0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017
0018 """
0019 Module defining global singleton classes.
0020
0021 This module raises a RuntimeError if an attempt to reload it is made. In that
0022 way the identities of the classes defined here are fixed and will remain so
0023 even if pyspark itself is reloaded. In particular, a function like the following
0024 will still work correctly after pyspark is reloaded:
0025
0026 def foo(arg=pyspark._NoValue):
0027 if arg is pyspark._NoValue:
0028 ...
0029
0030 See gh-7844 for a discussion of the reload problem that motivated this module.
0031
0032 Note that this approach is taken after from NumPy.
0033 """
0034
0035 __ALL__ = ['_NoValue']
0036
0037
0038
0039
0040 if '_is_loaded' in globals():
0041 raise RuntimeError('Reloading pyspark._globals is not allowed')
0042 _is_loaded = True
0043
0044
0045 class _NoValueType(object):
0046 """Special keyword value.
0047
0048 The instance of this class may be used as the default value assigned to a
0049 deprecated keyword in order to check if it has been given a user defined
0050 value.
0051
0052 This class was copied from NumPy.
0053 """
0054 __instance = None
0055
0056 def __new__(cls):
0057
0058 if not cls.__instance:
0059 cls.__instance = super(_NoValueType, cls).__new__(cls)
0060 return cls.__instance
0061
0062
0063 def __reduce__(self):
0064 return (self.__class__, ())
0065
0066 def __repr__(self):
0067 return "<no value>"
0068
0069
0070 _NoValue = _NoValueType()