Hide keyboard shortcuts

Hot-keys on this page

r m x p   toggle line displays

j k   next/prev highlighted chunk

0   (zero) top of page

1   (one) first highlighted chunk

1

2

3

4

5

6

7

8

9

10

11

12

13

14

15

16

17

18

19

20

21

22

23

24

25

26

27

28

29

30

31

32

33

34

35

36

37

38

39

40

41

42

43

44

45

46

47

48

49

50

51

52

53

54

55

56

57

58

59

60

61

62

63

64

65

66

67

68

69

70

71

72

73

74

75

76

77

78

79

80

81

82

83

84

85

86

87

88

89

90

91

# 

# Licensed to the Apache Software Foundation (ASF) under one or more 

# contributor license agreements. See the NOTICE file distributed with 

# this work for additional information regarding copyright ownership. 

# The ASF licenses this file to You under the Apache License, Version 2.0 

# (the "License"); you may not use this file except in compliance with 

# the License. You may obtain a copy of the License at 

# 

# http://www.apache.org/licenses/LICENSE-2.0 

# 

# Unless required by applicable law or agreed to in writing, software 

# distributed under the License is distributed on an "AS IS" BASIS, 

# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 

# See the License for the specific language governing permissions and 

# limitations under the License. 

# 

 

import sys 

 

from pyspark import since, _NoValue 

 

 

class RuntimeConfig(object): 

"""User-facing configuration API, accessible through `SparkSession.conf`. 

 

Options set here are automatically propagated to the Hadoop configuration during I/O. 

""" 

 

def __init__(self, jconf): 

"""Create a new RuntimeConfig that wraps the underlying JVM object.""" 

self._jconf = jconf 

 

@since(2.0) 

def set(self, key, value): 

"""Sets the given Spark runtime configuration property.""" 

self._jconf.set(key, value) 

 

@since(2.0) 

def get(self, key, default=_NoValue): 

"""Returns the value of Spark runtime configuration property for the given key, 

assuming it is set. 

""" 

self._checkType(key, "key") 

if default is _NoValue: 

return self._jconf.get(key) 

else: 

if default is not None: 

self._checkType(default, "default") 

return self._jconf.get(key, default) 

 

@since(2.0) 

def unset(self, key): 

"""Resets the configuration property for the given key.""" 

self._jconf.unset(key) 

 

def _checkType(self, obj, identifier): 

"""Assert that an object is of type str.""" 

58 ↛ 59line 58 didn't jump to line 59, because the condition on line 58 was never true if not isinstance(obj, str): 

raise TypeError("expected %s '%s' to be a string (was '%s')" % 

(identifier, obj, type(obj).__name__)) 

 

@since(2.4) 

def isModifiable(self, key): 

"""Indicates whether the configuration property with the given key 

is modifiable in the current session. 

""" 

return self._jconf.isModifiable(key) 

 

 

def _test(): 

import os 

import doctest 

from pyspark.sql.session import SparkSession 

import pyspark.sql.conf 

 

os.chdir(os.environ["SPARK_HOME"]) 

 

globs = pyspark.sql.conf.__dict__.copy() 

spark = SparkSession.builder\ 

.master("local[4]")\ 

.appName("sql.conf tests")\ 

.getOrCreate() 

globs['sc'] = spark.sparkContext 

globs['spark'] = spark 

(failure_count, test_count) = doctest.testmod(pyspark.sql.conf, globs=globs) 

spark.stop() 

87 ↛ 88line 87 didn't jump to line 88, because the condition on line 87 was never true if failure_count: 

sys.exit(-1) 

 

if __name__ == "__main__": 

_test()