0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 import sys
0019
0020 from pyspark import since, _NoValue
0021 from pyspark.rdd import ignore_unicode_prefix
0022
0023 if sys.version_info[0] >= 3:
0024 basestring = str
0025
0026
0027 class RuntimeConfig(object):
0028 """User-facing configuration API, accessible through `SparkSession.conf`.
0029
0030 Options set here are automatically propagated to the Hadoop configuration during I/O.
0031 """
0032
0033 def __init__(self, jconf):
0034 """Create a new RuntimeConfig that wraps the underlying JVM object."""
0035 self._jconf = jconf
0036
0037 @ignore_unicode_prefix
0038 @since(2.0)
0039 def set(self, key, value):
0040 """Sets the given Spark runtime configuration property."""
0041 self._jconf.set(key, value)
0042
0043 @ignore_unicode_prefix
0044 @since(2.0)
0045 def get(self, key, default=_NoValue):
0046 """Returns the value of Spark runtime configuration property for the given key,
0047 assuming it is set.
0048 """
0049 self._checkType(key, "key")
0050 if default is _NoValue:
0051 return self._jconf.get(key)
0052 else:
0053 if default is not None:
0054 self._checkType(default, "default")
0055 return self._jconf.get(key, default)
0056
0057 @ignore_unicode_prefix
0058 @since(2.0)
0059 def unset(self, key):
0060 """Resets the configuration property for the given key."""
0061 self._jconf.unset(key)
0062
0063 def _checkType(self, obj, identifier):
0064 """Assert that an object is of type str."""
0065 if not isinstance(obj, basestring):
0066 raise TypeError("expected %s '%s' to be a string (was '%s')" %
0067 (identifier, obj, type(obj).__name__))
0068
0069 @ignore_unicode_prefix
0070 @since(2.4)
0071 def isModifiable(self, key):
0072 """Indicates whether the configuration property with the given key
0073 is modifiable in the current session.
0074 """
0075 return self._jconf.isModifiable(key)
0076
0077
0078 def _test():
0079 import os
0080 import doctest
0081 from pyspark.sql.session import SparkSession
0082 import pyspark.sql.conf
0083
0084 os.chdir(os.environ["SPARK_HOME"])
0085
0086 globs = pyspark.sql.conf.__dict__.copy()
0087 spark = SparkSession.builder\
0088 .master("local[4]")\
0089 .appName("sql.conf tests")\
0090 .getOrCreate()
0091 globs['sc'] = spark.sparkContext
0092 globs['spark'] = spark
0093 (failure_count, test_count) = doctest.testmod(pyspark.sql.conf, globs=globs)
0094 spark.stop()
0095 if failure_count:
0096 sys.exit(-1)
0097
0098 if __name__ == "__main__":
0099 _test()