Back to home page

OSCL-LXR

 
 

    


0001 """
0002 This class is defined to override standard pickle functionality
0003 
0004 The goals of it follow:
0005 -Serialize lambdas and nested functions to compiled byte code
0006 -Deal with main module correctly
0007 -Deal with other non-serializable objects
0008 
0009 It does not include an unpickler, as standard python unpickling suffices.
0010 
0011 This module was extracted from the `cloud` package, developed by `PiCloud, Inc.
0012 <https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
0013 
0014 Copyright (c) 2012, Regents of the University of California.
0015 Copyright (c) 2009 `PiCloud, Inc. <https://web.archive.org/web/20140626004012/http://www.picloud.com/>`_.
0016 All rights reserved.
0017 
0018 Redistribution and use in source and binary forms, with or without
0019 modification, are permitted provided that the following conditions
0020 are met:
0021     * Redistributions of source code must retain the above copyright
0022       notice, this list of conditions and the following disclaimer.
0023     * Redistributions in binary form must reproduce the above copyright
0024       notice, this list of conditions and the following disclaimer in the
0025       documentation and/or other materials provided with the distribution.
0026     * Neither the name of the University of California, Berkeley nor the
0027       names of its contributors may be used to endorse or promote
0028       products derived from this software without specific prior written
0029       permission.
0030 
0031 THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
0032 "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
0033 LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
0034 A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
0035 HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
0036 SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
0037 TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
0038 PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
0039 LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
0040 NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
0041 SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
0042 """
0043 from __future__ import print_function
0044 
0045 import dis
0046 from functools import partial
0047 import io
0048 import itertools
0049 import logging
0050 import opcode
0051 import operator
0052 import pickle
0053 import struct
0054 import sys
0055 import traceback
0056 import types
0057 import weakref
0058 import uuid
0059 import threading
0060 
0061 
0062 try:
0063     from enum import Enum
0064 except ImportError:
0065     Enum = None
0066 
0067 # cloudpickle is meant for inter process communication: we expect all
0068 # communicating processes to run the same Python version hence we favor
0069 # communication speed over compatibility:
0070 DEFAULT_PROTOCOL = pickle.HIGHEST_PROTOCOL
0071 
0072 # Track the provenance of reconstructed dynamic classes to make it possible to
0073 # recontruct instances from the matching singleton class definition when
0074 # appropriate and preserve the usual "isinstance" semantics of Python objects.
0075 _DYNAMIC_CLASS_TRACKER_BY_CLASS = weakref.WeakKeyDictionary()
0076 _DYNAMIC_CLASS_TRACKER_BY_ID = weakref.WeakValueDictionary()
0077 _DYNAMIC_CLASS_TRACKER_LOCK = threading.Lock()
0078 
0079 if sys.version_info[0] < 3:  # pragma: no branch
0080     from pickle import Pickler
0081     try:
0082         from cStringIO import StringIO
0083     except ImportError:
0084         from StringIO import StringIO
0085     string_types = (basestring,)  # noqa
0086     PY3 = False
0087     PY2 = True
0088     PY2_WRAPPER_DESCRIPTOR_TYPE = type(object.__init__)
0089     PY2_METHOD_WRAPPER_TYPE = type(object.__eq__)
0090     PY2_CLASS_DICT_BLACKLIST = (PY2_METHOD_WRAPPER_TYPE,
0091                                 PY2_WRAPPER_DESCRIPTOR_TYPE)
0092 else:
0093     types.ClassType = type
0094     from pickle import _Pickler as Pickler
0095     from io import BytesIO as StringIO
0096     string_types = (str,)
0097     PY3 = True
0098     PY2 = False
0099 
0100 
0101 def _ensure_tracking(class_def):
0102     with _DYNAMIC_CLASS_TRACKER_LOCK:
0103         class_tracker_id = _DYNAMIC_CLASS_TRACKER_BY_CLASS.get(class_def)
0104         if class_tracker_id is None:
0105             class_tracker_id = uuid.uuid4().hex
0106             _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
0107             _DYNAMIC_CLASS_TRACKER_BY_ID[class_tracker_id] = class_def
0108     return class_tracker_id
0109 
0110 
0111 def _lookup_class_or_track(class_tracker_id, class_def):
0112     if class_tracker_id is not None:
0113         with _DYNAMIC_CLASS_TRACKER_LOCK:
0114             class_def = _DYNAMIC_CLASS_TRACKER_BY_ID.setdefault(
0115                 class_tracker_id, class_def)
0116             _DYNAMIC_CLASS_TRACKER_BY_CLASS[class_def] = class_tracker_id
0117     return class_def
0118 
0119 
0120 def _make_cell_set_template_code():
0121     """Get the Python compiler to emit LOAD_FAST(arg); STORE_DEREF
0122 
0123     Notes
0124     -----
0125     In Python 3, we could use an easier function:
0126 
0127     .. code-block:: python
0128 
0129        def f():
0130            cell = None
0131 
0132            def _stub(value):
0133                nonlocal cell
0134                cell = value
0135 
0136            return _stub
0137 
0138         _cell_set_template_code = f().__code__
0139 
0140     This function is _only_ a LOAD_FAST(arg); STORE_DEREF, but that is
0141     invalid syntax on Python 2. If we use this function we also don't need
0142     to do the weird freevars/cellvars swap below
0143     """
0144     def inner(value):
0145         lambda: cell  # make ``cell`` a closure so that we get a STORE_DEREF
0146         cell = value
0147 
0148     co = inner.__code__
0149 
0150     # NOTE: we are marking the cell variable as a free variable intentionally
0151     # so that we simulate an inner function instead of the outer function. This
0152     # is what gives us the ``nonlocal`` behavior in a Python 2 compatible way.
0153     if PY2:  # pragma: no branch
0154         return types.CodeType(
0155             co.co_argcount,
0156             co.co_nlocals,
0157             co.co_stacksize,
0158             co.co_flags,
0159             co.co_code,
0160             co.co_consts,
0161             co.co_names,
0162             co.co_varnames,
0163             co.co_filename,
0164             co.co_name,
0165             co.co_firstlineno,
0166             co.co_lnotab,
0167             co.co_cellvars,  # this is the trickery
0168             (),
0169         )
0170     else:
0171         if hasattr(types.CodeType, "co_posonlyargcount"):  # pragma: no branch
0172             return types.CodeType(
0173                 co.co_argcount,
0174                 co.co_posonlyargcount,  # Python3.8 with PEP570
0175                 co.co_kwonlyargcount,
0176                 co.co_nlocals,
0177                 co.co_stacksize,
0178                 co.co_flags,
0179                 co.co_code,
0180                 co.co_consts,
0181                 co.co_names,
0182                 co.co_varnames,
0183                 co.co_filename,
0184                 co.co_name,
0185                 co.co_firstlineno,
0186                 co.co_lnotab,
0187                 co.co_cellvars,  # this is the trickery
0188                 (),
0189             )
0190         else:
0191             return types.CodeType(
0192                 co.co_argcount,
0193                 co.co_kwonlyargcount,
0194                 co.co_nlocals,
0195                 co.co_stacksize,
0196                 co.co_flags,
0197                 co.co_code,
0198                 co.co_consts,
0199                 co.co_names,
0200                 co.co_varnames,
0201                 co.co_filename,
0202                 co.co_name,
0203                 co.co_firstlineno,
0204                 co.co_lnotab,
0205                 co.co_cellvars,  # this is the trickery
0206                 (),
0207             )
0208 
0209 _cell_set_template_code = _make_cell_set_template_code()
0210 
0211 
0212 def cell_set(cell, value):
0213     """Set the value of a closure cell.
0214     """
0215     return types.FunctionType(
0216         _cell_set_template_code,
0217         {},
0218         '_cell_set_inner',
0219         (),
0220         (cell,),
0221     )(value)
0222 
0223 
0224 # relevant opcodes
0225 STORE_GLOBAL = opcode.opmap['STORE_GLOBAL']
0226 DELETE_GLOBAL = opcode.opmap['DELETE_GLOBAL']
0227 LOAD_GLOBAL = opcode.opmap['LOAD_GLOBAL']
0228 GLOBAL_OPS = (STORE_GLOBAL, DELETE_GLOBAL, LOAD_GLOBAL)
0229 HAVE_ARGUMENT = dis.HAVE_ARGUMENT
0230 EXTENDED_ARG = dis.EXTENDED_ARG
0231 
0232 
0233 def islambda(func):
0234     return getattr(func, '__name__') == '<lambda>'
0235 
0236 
0237 _BUILTIN_TYPE_NAMES = {}
0238 for k, v in types.__dict__.items():
0239     if type(v) is type:
0240         _BUILTIN_TYPE_NAMES[v] = k
0241 
0242 
0243 def _builtin_type(name):
0244     return getattr(types, name)
0245 
0246 
0247 def _make__new__factory(type_):
0248     def _factory():
0249         return type_.__new__
0250     return _factory
0251 
0252 
0253 # NOTE: These need to be module globals so that they're pickleable as globals.
0254 _get_dict_new = _make__new__factory(dict)
0255 _get_frozenset_new = _make__new__factory(frozenset)
0256 _get_list_new = _make__new__factory(list)
0257 _get_set_new = _make__new__factory(set)
0258 _get_tuple_new = _make__new__factory(tuple)
0259 _get_object_new = _make__new__factory(object)
0260 
0261 # Pre-defined set of builtin_function_or_method instances that can be
0262 # serialized.
0263 _BUILTIN_TYPE_CONSTRUCTORS = {
0264     dict.__new__: _get_dict_new,
0265     frozenset.__new__: _get_frozenset_new,
0266     set.__new__: _get_set_new,
0267     list.__new__: _get_list_new,
0268     tuple.__new__: _get_tuple_new,
0269     object.__new__: _get_object_new,
0270 }
0271 
0272 
0273 if sys.version_info < (3, 4):  # pragma: no branch
0274     def _walk_global_ops(code):
0275         """
0276         Yield (opcode, argument number) tuples for all
0277         global-referencing instructions in *code*.
0278         """
0279         code = getattr(code, 'co_code', b'')
0280         if PY2:  # pragma: no branch
0281             code = map(ord, code)
0282 
0283         n = len(code)
0284         i = 0
0285         extended_arg = 0
0286         while i < n:
0287             op = code[i]
0288             i += 1
0289             if op >= HAVE_ARGUMENT:
0290                 oparg = code[i] + code[i + 1] * 256 + extended_arg
0291                 extended_arg = 0
0292                 i += 2
0293                 if op == EXTENDED_ARG:
0294                     extended_arg = oparg * 65536
0295                 if op in GLOBAL_OPS:
0296                     yield op, oparg
0297 
0298 else:
0299     def _walk_global_ops(code):
0300         """
0301         Yield (opcode, argument number) tuples for all
0302         global-referencing instructions in *code*.
0303         """
0304         for instr in dis.get_instructions(code):
0305             op = instr.opcode
0306             if op in GLOBAL_OPS:
0307                 yield op, instr.arg
0308 
0309 
0310 def _extract_class_dict(cls):
0311     """Retrieve a copy of the dict of a class without the inherited methods"""
0312     clsdict = dict(cls.__dict__)  # copy dict proxy to a dict
0313     if len(cls.__bases__) == 1:
0314         inherited_dict = cls.__bases__[0].__dict__
0315     else:
0316         inherited_dict = {}
0317         for base in reversed(cls.__bases__):
0318             inherited_dict.update(base.__dict__)
0319     to_remove = []
0320     for name, value in clsdict.items():
0321         try:
0322             base_value = inherited_dict[name]
0323             if value is base_value:
0324                 to_remove.append(name)
0325             elif PY2:
0326                 # backward compat for Python 2
0327                 if hasattr(value, "im_func"):
0328                     if value.im_func is getattr(base_value, "im_func", None):
0329                         to_remove.append(name)
0330                 elif isinstance(value, PY2_CLASS_DICT_BLACKLIST):
0331                     # On Python 2 we have no way to pickle those specific
0332                     # methods types nor to check that they are actually
0333                     # inherited. So we assume that they are always inherited
0334                     # from builtin types.
0335                     to_remove.append(name)
0336         except KeyError:
0337             pass
0338     for name in to_remove:
0339         clsdict.pop(name)
0340     return clsdict
0341 
0342 
0343 class CloudPickler(Pickler):
0344 
0345     dispatch = Pickler.dispatch.copy()
0346 
0347     def __init__(self, file, protocol=None):
0348         if protocol is None:
0349             protocol = DEFAULT_PROTOCOL
0350         Pickler.__init__(self, file, protocol=protocol)
0351         # map ids to dictionary. used to ensure that functions can share global env
0352         self.globals_ref = {}
0353 
0354     def dump(self, obj):
0355         self.inject_addons()
0356         try:
0357             return Pickler.dump(self, obj)
0358         except RuntimeError as e:
0359             if 'recursion' in e.args[0]:
0360                 msg = """Could not pickle object as excessively deep recursion required."""
0361                 raise pickle.PicklingError(msg)
0362             else:
0363                 raise
0364 
0365     def save_memoryview(self, obj):
0366         self.save(obj.tobytes())
0367 
0368     dispatch[memoryview] = save_memoryview
0369 
0370     if PY2:  # pragma: no branch
0371         def save_buffer(self, obj):
0372             self.save(str(obj))
0373 
0374         dispatch[buffer] = save_buffer  # noqa: F821 'buffer' was removed in Python 3
0375 
0376     def save_module(self, obj):
0377         """
0378         Save a module as an import
0379         """
0380         if _is_dynamic(obj):
0381             self.save_reduce(dynamic_subimport, (obj.__name__, vars(obj)),
0382                              obj=obj)
0383         else:
0384             self.save_reduce(subimport, (obj.__name__,), obj=obj)
0385 
0386     dispatch[types.ModuleType] = save_module
0387 
0388     def save_codeobject(self, obj):
0389         """
0390         Save a code object
0391         """
0392         if PY3:  # pragma: no branch
0393             if hasattr(obj, "co_posonlyargcount"):  # pragma: no branch
0394                 args = (
0395                     obj.co_argcount, obj.co_posonlyargcount,
0396                     obj.co_kwonlyargcount, obj.co_nlocals, obj.co_stacksize,
0397                     obj.co_flags, obj.co_code, obj.co_consts, obj.co_names,
0398                     obj.co_varnames, obj.co_filename, obj.co_name,
0399                     obj.co_firstlineno, obj.co_lnotab, obj.co_freevars,
0400                     obj.co_cellvars
0401                 )
0402             else:
0403                 args = (
0404                     obj.co_argcount, obj.co_kwonlyargcount, obj.co_nlocals,
0405                     obj.co_stacksize, obj.co_flags, obj.co_code, obj.co_consts,
0406                     obj.co_names, obj.co_varnames, obj.co_filename,
0407                     obj.co_name, obj.co_firstlineno, obj.co_lnotab,
0408                     obj.co_freevars, obj.co_cellvars
0409                 )
0410         else:
0411             args = (
0412                 obj.co_argcount, obj.co_nlocals, obj.co_stacksize, obj.co_flags, obj.co_code,
0413                 obj.co_consts, obj.co_names, obj.co_varnames, obj.co_filename, obj.co_name,
0414                 obj.co_firstlineno, obj.co_lnotab, obj.co_freevars, obj.co_cellvars
0415             )
0416         self.save_reduce(types.CodeType, args, obj=obj)
0417 
0418     dispatch[types.CodeType] = save_codeobject
0419 
0420     def save_function(self, obj, name=None):
0421         """ Registered with the dispatch to handle all function types.
0422 
0423         Determines what kind of function obj is (e.g. lambda, defined at
0424         interactive prompt, etc) and handles the pickling appropriately.
0425         """
0426         try:
0427             should_special_case = obj in _BUILTIN_TYPE_CONSTRUCTORS
0428         except TypeError:
0429             # Methods of builtin types aren't hashable in python 2.
0430             should_special_case = False
0431 
0432         if should_special_case:
0433             # We keep a special-cased cache of built-in type constructors at
0434             # global scope, because these functions are structured very
0435             # differently in different python versions and implementations (for
0436             # example, they're instances of types.BuiltinFunctionType in
0437             # CPython, but they're ordinary types.FunctionType instances in
0438             # PyPy).
0439             #
0440             # If the function we've received is in that cache, we just
0441             # serialize it as a lookup into the cache.
0442             return self.save_reduce(_BUILTIN_TYPE_CONSTRUCTORS[obj], (), obj=obj)
0443 
0444         write = self.write
0445 
0446         if name is None:
0447             name = obj.__name__
0448         try:
0449             # whichmodule() could fail, see
0450             # https://bitbucket.org/gutworth/six/issues/63/importing-six-breaks-pickling
0451             modname = pickle.whichmodule(obj, name)
0452         except Exception:
0453             modname = None
0454         # print('which gives %s %s %s' % (modname, obj, name))
0455         try:
0456             themodule = sys.modules[modname]
0457         except KeyError:
0458             # eval'd items such as namedtuple give invalid items for their function __module__
0459             modname = '__main__'
0460 
0461         if modname == '__main__':
0462             themodule = None
0463 
0464         try:
0465             lookedup_by_name = getattr(themodule, name, None)
0466         except Exception:
0467             lookedup_by_name = None
0468 
0469         if themodule:
0470             if lookedup_by_name is obj:
0471                 return self.save_global(obj, name)
0472 
0473         # a builtin_function_or_method which comes in as an attribute of some
0474         # object (e.g., itertools.chain.from_iterable) will end
0475         # up with modname "__main__" and so end up here. But these functions
0476         # have no __code__ attribute in CPython, so the handling for
0477         # user-defined functions below will fail.
0478         # So we pickle them here using save_reduce; have to do it differently
0479         # for different python versions.
0480         if not hasattr(obj, '__code__'):
0481             if PY3:  # pragma: no branch
0482                 rv = obj.__reduce_ex__(self.proto)
0483             else:
0484                 if hasattr(obj, '__self__'):
0485                     rv = (getattr, (obj.__self__, name))
0486                 else:
0487                     raise pickle.PicklingError("Can't pickle %r" % obj)
0488             return self.save_reduce(obj=obj, *rv)
0489 
0490         # if func is lambda, def'ed at prompt, is in main, or is nested, then
0491         # we'll pickle the actual function object rather than simply saving a
0492         # reference (as is done in default pickler), via save_function_tuple.
0493         if (islambda(obj)
0494                 or getattr(obj.__code__, 'co_filename', None) == '<stdin>'
0495                 or themodule is None):
0496             self.save_function_tuple(obj)
0497             return
0498         else:
0499             # func is nested
0500             if lookedup_by_name is None or lookedup_by_name is not obj:
0501                 self.save_function_tuple(obj)
0502                 return
0503 
0504         if obj.__dict__:
0505             # essentially save_reduce, but workaround needed to avoid recursion
0506             self.save(_restore_attr)
0507             write(pickle.MARK + pickle.GLOBAL + modname + '\n' + name + '\n')
0508             self.memoize(obj)
0509             self.save(obj.__dict__)
0510             write(pickle.TUPLE + pickle.REDUCE)
0511         else:
0512             write(pickle.GLOBAL + modname + '\n' + name + '\n')
0513             self.memoize(obj)
0514 
0515     dispatch[types.FunctionType] = save_function
0516 
0517     def _save_subimports(self, code, top_level_dependencies):
0518         """
0519         Save submodules used by a function but not listed in its globals.
0520 
0521         In the example below:
0522 
0523         ```
0524         import concurrent.futures
0525         import cloudpickle
0526 
0527 
0528         def func():
0529             x = concurrent.futures.ThreadPoolExecutor
0530 
0531 
0532         if __name__ == '__main__':
0533             cloudpickle.dumps(func)
0534         ```
0535 
0536         the globals extracted by cloudpickle in the function's state include
0537         the concurrent module, but not its submodule (here,
0538         concurrent.futures), which is the module used by func.
0539 
0540         To ensure that calling the depickled function does not raise an
0541         AttributeError, this function looks for any currently loaded submodule
0542         that the function uses and whose parent is present in the function
0543         globals, and saves it before saving the function.
0544         """
0545 
0546         # check if any known dependency is an imported package
0547         for x in top_level_dependencies:
0548             if isinstance(x, types.ModuleType) and hasattr(x, '__package__') and x.__package__:
0549                 # check if the package has any currently loaded sub-imports
0550                 prefix = x.__name__ + '.'
0551                 # A concurrent thread could mutate sys.modules,
0552                 # make sure we iterate over a copy to avoid exceptions
0553                 for name in list(sys.modules):
0554                     # Older versions of pytest will add a "None" module to sys.modules.
0555                     if name is not None and name.startswith(prefix):
0556                         # check whether the function can address the sub-module
0557                         tokens = set(name[len(prefix):].split('.'))
0558                         if not tokens - set(code.co_names):
0559                             # ensure unpickler executes this import
0560                             self.save(sys.modules[name])
0561                             # then discards the reference to it
0562                             self.write(pickle.POP)
0563 
0564     def _save_dynamic_enum(self, obj, clsdict):
0565         """Special handling for dynamic Enum subclasses
0566 
0567         Use a dedicated Enum constructor (inspired by EnumMeta.__call__) as the
0568         EnumMeta metaclass has complex initialization that makes the Enum
0569         subclasses hold references to their own instances.
0570         """
0571         members = dict((e.name, e.value) for e in obj)
0572 
0573         # Python 2.7 with enum34 can have no qualname:
0574         qualname = getattr(obj, "__qualname__", None)
0575 
0576         self.save_reduce(_make_skeleton_enum,
0577                          (obj.__bases__, obj.__name__, qualname, members,
0578                           obj.__module__, _ensure_tracking(obj), None),
0579                          obj=obj)
0580 
0581         # Cleanup the clsdict that will be passed to _rehydrate_skeleton_class:
0582         # Those attributes are already handled by the metaclass.
0583         for attrname in ["_generate_next_value_", "_member_names_",
0584                          "_member_map_", "_member_type_",
0585                          "_value2member_map_"]:
0586             clsdict.pop(attrname, None)
0587         for member in members:
0588             clsdict.pop(member)
0589 
0590     def save_dynamic_class(self, obj):
0591         """Save a class that can't be stored as module global.
0592 
0593         This method is used to serialize classes that are defined inside
0594         functions, or that otherwise can't be serialized as attribute lookups
0595         from global modules.
0596         """
0597         clsdict = _extract_class_dict(obj)
0598         clsdict.pop('__weakref__', None)
0599 
0600         # For ABCMeta in python3.7+, remove _abc_impl as it is not picklable.
0601         # This is a fix which breaks the cache but this only makes the first
0602         # calls to issubclass slower.
0603         if "_abc_impl" in clsdict:
0604             import abc
0605             (registry, _, _, _) = abc._get_dump(obj)
0606             clsdict["_abc_impl"] = [subclass_weakref()
0607                                     for subclass_weakref in registry]
0608 
0609         # On PyPy, __doc__ is a readonly attribute, so we need to include it in
0610         # the initial skeleton class.  This is safe because we know that the
0611         # doc can't participate in a cycle with the original class.
0612         type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}
0613 
0614         if hasattr(obj, "__slots__"):
0615             type_kwargs['__slots__'] = obj.__slots__
0616             # pickle string length optimization: member descriptors of obj are
0617             # created automatically from obj's __slots__ attribute, no need to
0618             # save them in obj's state
0619             if isinstance(obj.__slots__, string_types):
0620                 clsdict.pop(obj.__slots__)
0621             else:
0622                 for k in obj.__slots__:
0623                     clsdict.pop(k, None)
0624 
0625         # If type overrides __dict__ as a property, include it in the type
0626         # kwargs. In Python 2, we can't set this attribute after construction.
0627         __dict__ = clsdict.pop('__dict__', None)
0628         if isinstance(__dict__, property):
0629             type_kwargs['__dict__'] = __dict__
0630 
0631         save = self.save
0632         write = self.write
0633 
0634         # We write pickle instructions explicitly here to handle the
0635         # possibility that the type object participates in a cycle with its own
0636         # __dict__. We first write an empty "skeleton" version of the class and
0637         # memoize it before writing the class' __dict__ itself. We then write
0638         # instructions to "rehydrate" the skeleton class by restoring the
0639         # attributes from the __dict__.
0640         #
0641         # A type can appear in a cycle with its __dict__ if an instance of the
0642         # type appears in the type's __dict__ (which happens for the stdlib
0643         # Enum class), or if the type defines methods that close over the name
0644         # of the type, (which is common for Python 2-style super() calls).
0645 
0646         # Push the rehydration function.
0647         save(_rehydrate_skeleton_class)
0648 
0649         # Mark the start of the args tuple for the rehydration function.
0650         write(pickle.MARK)
0651 
0652         # Create and memoize an skeleton class with obj's name and bases.
0653         if Enum is not None and issubclass(obj, Enum):
0654             # Special handling of Enum subclasses
0655             self._save_dynamic_enum(obj, clsdict)
0656         else:
0657             # "Regular" class definition:
0658             tp = type(obj)
0659             self.save_reduce(_make_skeleton_class,
0660                              (tp, obj.__name__, obj.__bases__, type_kwargs,
0661                               _ensure_tracking(obj), None),
0662                              obj=obj)
0663 
0664         # Now save the rest of obj's __dict__. Any references to obj
0665         # encountered while saving will point to the skeleton class.
0666         save(clsdict)
0667 
0668         # Write a tuple of (skeleton_class, clsdict).
0669         write(pickle.TUPLE)
0670 
0671         # Call _rehydrate_skeleton_class(skeleton_class, clsdict)
0672         write(pickle.REDUCE)
0673 
0674     def save_function_tuple(self, func):
0675         """  Pickles an actual func object.
0676 
0677         A func comprises: code, globals, defaults, closure, and dict.  We
0678         extract and save these, injecting reducing functions at certain points
0679         to recreate the func object.  Keep in mind that some of these pieces
0680         can contain a ref to the func itself.  Thus, a naive save on these
0681         pieces could trigger an infinite loop of save's.  To get around that,
0682         we first create a skeleton func object using just the code (this is
0683         safe, since this won't contain a ref to the func), and memoize it as
0684         soon as it's created.  The other stuff can then be filled in later.
0685         """
0686         if is_tornado_coroutine(func):
0687             self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
0688                              obj=func)
0689             return
0690 
0691         save = self.save
0692         write = self.write
0693 
0694         code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func)
0695 
0696         save(_fill_function)  # skeleton function updater
0697         write(pickle.MARK)    # beginning of tuple that _fill_function expects
0698 
0699         self._save_subimports(
0700             code,
0701             itertools.chain(f_globals.values(), closure_values or ()),
0702         )
0703 
0704         # create a skeleton function object and memoize it
0705         save(_make_skel_func)
0706         save((
0707             code,
0708             len(closure_values) if closure_values is not None else -1,
0709             base_globals,
0710         ))
0711         write(pickle.REDUCE)
0712         self.memoize(func)
0713 
0714         # save the rest of the func data needed by _fill_function
0715         state = {
0716             'globals': f_globals,
0717             'defaults': defaults,
0718             'dict': dct,
0719             'closure_values': closure_values,
0720             'module': func.__module__,
0721             'name': func.__name__,
0722             'doc': func.__doc__,
0723         }
0724         if hasattr(func, '__annotations__') and sys.version_info >= (3, 7):
0725             state['annotations'] = func.__annotations__
0726         if hasattr(func, '__qualname__'):
0727             state['qualname'] = func.__qualname__
0728         if hasattr(func, '__kwdefaults__'):
0729             state['kwdefaults'] = func.__kwdefaults__
0730         save(state)
0731         write(pickle.TUPLE)
0732         write(pickle.REDUCE)  # applies _fill_function on the tuple
0733 
0734     _extract_code_globals_cache = (
0735         weakref.WeakKeyDictionary()
0736         if not hasattr(sys, "pypy_version_info")
0737         else {})
0738 
0739     @classmethod
0740     def extract_code_globals(cls, co):
0741         """
0742         Find all globals names read or written to by codeblock co
0743         """
0744         out_names = cls._extract_code_globals_cache.get(co)
0745         if out_names is None:
0746             try:
0747                 names = co.co_names
0748             except AttributeError:
0749                 # PyPy "builtin-code" object
0750                 out_names = set()
0751             else:
0752                 out_names = {names[oparg] for _, oparg in _walk_global_ops(co)}
0753 
0754                 # see if nested function have any global refs
0755                 if co.co_consts:
0756                     for const in co.co_consts:
0757                         if type(const) is types.CodeType:
0758                             out_names |= cls.extract_code_globals(const)
0759 
0760             cls._extract_code_globals_cache[co] = out_names
0761 
0762         return out_names
0763 
0764     def extract_func_data(self, func):
0765         """
0766         Turn the function into a tuple of data necessary to recreate it:
0767             code, globals, defaults, closure_values, dict
0768         """
0769         code = func.__code__
0770 
0771         # extract all global ref's
0772         func_global_refs = self.extract_code_globals(code)
0773 
0774         # process all variables referenced by global environment
0775         f_globals = {}
0776         for var in func_global_refs:
0777             if var in func.__globals__:
0778                 f_globals[var] = func.__globals__[var]
0779 
0780         # defaults requires no processing
0781         defaults = func.__defaults__
0782 
0783         # process closure
0784         closure = (
0785             list(map(_get_cell_contents, func.__closure__))
0786             if func.__closure__ is not None
0787             else None
0788         )
0789 
0790         # save the dict
0791         dct = func.__dict__
0792 
0793         # base_globals represents the future global namespace of func at
0794         # unpickling time. Looking it up and storing it in globals_ref allow
0795         # functions sharing the same globals at pickling time to also
0796         # share them once unpickled, at one condition: since globals_ref is
0797         # an attribute of a Cloudpickler instance, and that a new CloudPickler is
0798         # created each time pickle.dump or pickle.dumps is called, functions
0799         # also need to be saved within the same invokation of
0800         # cloudpickle.dump/cloudpickle.dumps (for example: cloudpickle.dumps([f1, f2])). There
0801         # is no such limitation when using Cloudpickler.dump, as long as the
0802         # multiple invokations are bound to the same Cloudpickler.
0803         base_globals = self.globals_ref.setdefault(id(func.__globals__), {})
0804 
0805         if base_globals == {}:
0806             # Add module attributes used to resolve relative imports
0807             # instructions inside func.
0808             for k in ["__package__", "__name__", "__path__", "__file__"]:
0809                 # Some built-in functions/methods such as object.__new__  have
0810                 # their __globals__ set to None in PyPy
0811                 if func.__globals__ is not None and k in func.__globals__:
0812                     base_globals[k] = func.__globals__[k]
0813 
0814         return (code, f_globals, defaults, closure, dct, base_globals)
0815 
0816     def save_builtin_function(self, obj):
0817         if obj.__module__ == "__builtin__":
0818             return self.save_global(obj)
0819         return self.save_function(obj)
0820 
0821     dispatch[types.BuiltinFunctionType] = save_builtin_function
0822 
0823     def save_global(self, obj, name=None, pack=struct.pack):
0824         """
0825         Save a "global".
0826 
0827         The name of this method is somewhat misleading: all types get
0828         dispatched here.
0829         """
0830         if obj is type(None):
0831             return self.save_reduce(type, (None,), obj=obj)
0832         elif obj is type(Ellipsis):
0833             return self.save_reduce(type, (Ellipsis,), obj=obj)
0834         elif obj is type(NotImplemented):
0835             return self.save_reduce(type, (NotImplemented,), obj=obj)
0836 
0837         if obj.__module__ == "__main__":
0838             return self.save_dynamic_class(obj)
0839 
0840         try:
0841             return Pickler.save_global(self, obj, name=name)
0842         except Exception:
0843             if obj.__module__ == "__builtin__" or obj.__module__ == "builtins":
0844                 if obj in _BUILTIN_TYPE_NAMES:
0845                     return self.save_reduce(
0846                         _builtin_type, (_BUILTIN_TYPE_NAMES[obj],), obj=obj)
0847 
0848             typ = type(obj)
0849             if typ is not obj and isinstance(obj, (type, types.ClassType)):
0850                 return self.save_dynamic_class(obj)
0851 
0852             raise
0853 
0854     dispatch[type] = save_global
0855     dispatch[types.ClassType] = save_global
0856 
0857     def save_instancemethod(self, obj):
0858         # Memoization rarely is ever useful due to python bounding
0859         if obj.__self__ is None:
0860             self.save_reduce(getattr, (obj.im_class, obj.__name__))
0861         else:
0862             if PY3:  # pragma: no branch
0863                 self.save_reduce(types.MethodType, (obj.__func__, obj.__self__), obj=obj)
0864             else:
0865                 self.save_reduce(types.MethodType, (obj.__func__, obj.__self__, obj.__self__.__class__),
0866                                  obj=obj)
0867 
0868     dispatch[types.MethodType] = save_instancemethod
0869 
0870     def save_inst(self, obj):
0871         """Inner logic to save instance. Based off pickle.save_inst"""
0872         cls = obj.__class__
0873 
0874         # Try the dispatch table (pickle module doesn't do it)
0875         f = self.dispatch.get(cls)
0876         if f:
0877             f(self, obj)  # Call unbound method with explicit self
0878             return
0879 
0880         memo = self.memo
0881         write = self.write
0882         save = self.save
0883 
0884         if hasattr(obj, '__getinitargs__'):
0885             args = obj.__getinitargs__()
0886             len(args)  # XXX Assert it's a sequence
0887             pickle._keep_alive(args, memo)
0888         else:
0889             args = ()
0890 
0891         write(pickle.MARK)
0892 
0893         if self.bin:
0894             save(cls)
0895             for arg in args:
0896                 save(arg)
0897             write(pickle.OBJ)
0898         else:
0899             for arg in args:
0900                 save(arg)
0901             write(pickle.INST + cls.__module__ + '\n' + cls.__name__ + '\n')
0902 
0903         self.memoize(obj)
0904 
0905         try:
0906             getstate = obj.__getstate__
0907         except AttributeError:
0908             stuff = obj.__dict__
0909         else:
0910             stuff = getstate()
0911             pickle._keep_alive(stuff, memo)
0912         save(stuff)
0913         write(pickle.BUILD)
0914 
0915     if PY2:  # pragma: no branch
0916         dispatch[types.InstanceType] = save_inst
0917 
0918     def save_property(self, obj):
0919         # properties not correctly saved in python
0920         self.save_reduce(property, (obj.fget, obj.fset, obj.fdel, obj.__doc__), obj=obj)
0921 
0922     dispatch[property] = save_property
0923 
0924     def save_classmethod(self, obj):
0925         orig_func = obj.__func__
0926         self.save_reduce(type(obj), (orig_func,), obj=obj)
0927 
0928     dispatch[classmethod] = save_classmethod
0929     dispatch[staticmethod] = save_classmethod
0930 
0931     def save_itemgetter(self, obj):
0932         """itemgetter serializer (needed for namedtuple support)"""
0933         class Dummy:
0934             def __getitem__(self, item):
0935                 return item
0936         items = obj(Dummy())
0937         if not isinstance(items, tuple):
0938             items = (items,)
0939         return self.save_reduce(operator.itemgetter, items)
0940 
0941     if type(operator.itemgetter) is type:
0942         dispatch[operator.itemgetter] = save_itemgetter
0943 
0944     def save_attrgetter(self, obj):
0945         """attrgetter serializer"""
0946         class Dummy(object):
0947             def __init__(self, attrs, index=None):
0948                 self.attrs = attrs
0949                 self.index = index
0950             def __getattribute__(self, item):
0951                 attrs = object.__getattribute__(self, "attrs")
0952                 index = object.__getattribute__(self, "index")
0953                 if index is None:
0954                     index = len(attrs)
0955                     attrs.append(item)
0956                 else:
0957                     attrs[index] = ".".join([attrs[index], item])
0958                 return type(self)(attrs, index)
0959         attrs = []
0960         obj(Dummy(attrs))
0961         return self.save_reduce(operator.attrgetter, tuple(attrs))
0962 
0963     if type(operator.attrgetter) is type:
0964         dispatch[operator.attrgetter] = save_attrgetter
0965 
0966     def save_file(self, obj):
0967         """Save a file"""
0968         try:
0969             import StringIO as pystringIO  # we can't use cStringIO as it lacks the name attribute
0970         except ImportError:
0971             import io as pystringIO
0972 
0973         if not hasattr(obj, 'name') or not hasattr(obj, 'mode'):
0974             raise pickle.PicklingError("Cannot pickle files that do not map to an actual file")
0975         if obj is sys.stdout:
0976             return self.save_reduce(getattr, (sys, 'stdout'), obj=obj)
0977         if obj is sys.stderr:
0978             return self.save_reduce(getattr, (sys, 'stderr'), obj=obj)
0979         if obj is sys.stdin:
0980             raise pickle.PicklingError("Cannot pickle standard input")
0981         if obj.closed:
0982             raise pickle.PicklingError("Cannot pickle closed files")
0983         if hasattr(obj, 'isatty') and obj.isatty():
0984             raise pickle.PicklingError("Cannot pickle files that map to tty objects")
0985         if 'r' not in obj.mode and '+' not in obj.mode:
0986             raise pickle.PicklingError("Cannot pickle files that are not opened for reading: %s" % obj.mode)
0987 
0988         name = obj.name
0989 
0990         retval = pystringIO.StringIO()
0991 
0992         try:
0993             # Read the whole file
0994             curloc = obj.tell()
0995             obj.seek(0)
0996             contents = obj.read()
0997             obj.seek(curloc)
0998         except IOError:
0999             raise pickle.PicklingError("Cannot pickle file %s as it cannot be read" % name)
1000         retval.write(contents)
1001         retval.seek(curloc)
1002 
1003         retval.name = name
1004         self.save(retval)
1005         self.memoize(obj)
1006 
1007     def save_ellipsis(self, obj):
1008         self.save_reduce(_gen_ellipsis, ())
1009 
1010     def save_not_implemented(self, obj):
1011         self.save_reduce(_gen_not_implemented, ())
1012 
1013     try:               # Python 2
1014         dispatch[file] = save_file
1015     except NameError:  # Python 3  # pragma: no branch
1016         dispatch[io.TextIOWrapper] = save_file
1017 
1018     dispatch[type(Ellipsis)] = save_ellipsis
1019     dispatch[type(NotImplemented)] = save_not_implemented
1020 
1021     def save_weakset(self, obj):
1022         self.save_reduce(weakref.WeakSet, (list(obj),))
1023 
1024     dispatch[weakref.WeakSet] = save_weakset
1025 
1026     def save_logger(self, obj):
1027         self.save_reduce(logging.getLogger, (obj.name,), obj=obj)
1028 
1029     dispatch[logging.Logger] = save_logger
1030 
1031     def save_root_logger(self, obj):
1032         self.save_reduce(logging.getLogger, (), obj=obj)
1033 
1034     dispatch[logging.RootLogger] = save_root_logger
1035 
1036     if hasattr(types, "MappingProxyType"):  # pragma: no branch
1037         def save_mappingproxy(self, obj):
1038             self.save_reduce(types.MappingProxyType, (dict(obj),), obj=obj)
1039 
1040         dispatch[types.MappingProxyType] = save_mappingproxy
1041 
1042     """Special functions for Add-on libraries"""
1043     def inject_addons(self):
1044         """Plug in system. Register additional pickling functions if modules already loaded"""
1045         pass
1046 
1047 
1048 # Tornado support
1049 
1050 def is_tornado_coroutine(func):
1051     """
1052     Return whether *func* is a Tornado coroutine function.
1053     Running coroutines are not supported.
1054     """
1055     if 'tornado.gen' not in sys.modules:
1056         return False
1057     gen = sys.modules['tornado.gen']
1058     if not hasattr(gen, "is_coroutine_function"):
1059         # Tornado version is too old
1060         return False
1061     return gen.is_coroutine_function(func)
1062 
1063 
1064 def _rebuild_tornado_coroutine(func):
1065     from tornado import gen
1066     return gen.coroutine(func)
1067 
1068 
1069 # Shorthands for legacy support
1070 
1071 def dump(obj, file, protocol=None):
1072     """Serialize obj as bytes streamed into file
1073 
1074     protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
1075     pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
1076     between processes running the same Python version.
1077 
1078     Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
1079     compatibility with older versions of Python.
1080     """
1081     CloudPickler(file, protocol=protocol).dump(obj)
1082 
1083 
1084 def dumps(obj, protocol=None):
1085     """Serialize obj as a string of bytes allocated in memory
1086 
1087     protocol defaults to cloudpickle.DEFAULT_PROTOCOL which is an alias to
1088     pickle.HIGHEST_PROTOCOL. This setting favors maximum communication speed
1089     between processes running the same Python version.
1090 
1091     Set protocol=pickle.DEFAULT_PROTOCOL instead if you need to ensure
1092     compatibility with older versions of Python.
1093     """
1094     file = StringIO()
1095     try:
1096         cp = CloudPickler(file, protocol=protocol)
1097         cp.dump(obj)
1098         return file.getvalue()
1099     finally:
1100         file.close()
1101 
1102 
1103 # including pickles unloading functions in this namespace
1104 load = pickle.load
1105 loads = pickle.loads
1106 
1107 
1108 # hack for __import__ not working as desired
1109 def subimport(name):
1110     __import__(name)
1111     return sys.modules[name]
1112 
1113 
1114 def dynamic_subimport(name, vars):
1115     mod = types.ModuleType(name)
1116     mod.__dict__.update(vars)
1117     return mod
1118 
1119 
1120 # restores function attributes
1121 def _restore_attr(obj, attr):
1122     for key, val in attr.items():
1123         setattr(obj, key, val)
1124     return obj
1125 
1126 
1127 def _gen_ellipsis():
1128     return Ellipsis
1129 
1130 
1131 def _gen_not_implemented():
1132     return NotImplemented
1133 
1134 
1135 def _get_cell_contents(cell):
1136     try:
1137         return cell.cell_contents
1138     except ValueError:
1139         # sentinel used by ``_fill_function`` which will leave the cell empty
1140         return _empty_cell_value
1141 
1142 
1143 def instance(cls):
1144     """Create a new instance of a class.
1145 
1146     Parameters
1147     ----------
1148     cls : type
1149         The class to create an instance of.
1150 
1151     Returns
1152     -------
1153     instance : cls
1154         A new instance of ``cls``.
1155     """
1156     return cls()
1157 
1158 
1159 @instance
1160 class _empty_cell_value(object):
1161     """sentinel for empty closures
1162     """
1163     @classmethod
1164     def __reduce__(cls):
1165         return cls.__name__
1166 
1167 
1168 def _fill_function(*args):
1169     """Fills in the rest of function data into the skeleton function object
1170 
1171     The skeleton itself is create by _make_skel_func().
1172     """
1173     if len(args) == 2:
1174         func = args[0]
1175         state = args[1]
1176     elif len(args) == 5:
1177         # Backwards compat for cloudpickle v0.4.0, after which the `module`
1178         # argument was introduced
1179         func = args[0]
1180         keys = ['globals', 'defaults', 'dict', 'closure_values']
1181         state = dict(zip(keys, args[1:]))
1182     elif len(args) == 6:
1183         # Backwards compat for cloudpickle v0.4.1, after which the function
1184         # state was passed as a dict to the _fill_function it-self.
1185         func = args[0]
1186         keys = ['globals', 'defaults', 'dict', 'module', 'closure_values']
1187         state = dict(zip(keys, args[1:]))
1188     else:
1189         raise ValueError('Unexpected _fill_value arguments: %r' % (args,))
1190 
1191     # - At pickling time, any dynamic global variable used by func is
1192     #   serialized by value (in state['globals']).
1193     # - At unpickling time, func's __globals__ attribute is initialized by
1194     #   first retrieving an empty isolated namespace that will be shared
1195     #   with other functions pickled from the same original module
1196     #   by the same CloudPickler instance and then updated with the
1197     #   content of state['globals'] to populate the shared isolated
1198     #   namespace with all the global variables that are specifically
1199     #   referenced for this function.
1200     func.__globals__.update(state['globals'])
1201 
1202     func.__defaults__ = state['defaults']
1203     func.__dict__ = state['dict']
1204     if 'annotations' in state:
1205         func.__annotations__ = state['annotations']
1206     if 'doc' in state:
1207         func.__doc__  = state['doc']
1208     if 'name' in state:
1209         func.__name__ = state['name']
1210     if 'module' in state:
1211         func.__module__ = state['module']
1212     if 'qualname' in state:
1213         func.__qualname__ = state['qualname']
1214     if 'kwdefaults' in state:
1215         func.__kwdefaults__ = state['kwdefaults']
1216 
1217     cells = func.__closure__
1218     if cells is not None:
1219         for cell, value in zip(cells, state['closure_values']):
1220             if value is not _empty_cell_value:
1221                 cell_set(cell, value)
1222 
1223     return func
1224 
1225 
1226 def _make_empty_cell():
1227     if False:
1228         # trick the compiler into creating an empty cell in our lambda
1229         cell = None
1230         raise AssertionError('this route should not be executed')
1231 
1232     return (lambda: cell).__closure__[0]
1233 
1234 
1235 def _make_skel_func(code, cell_count, base_globals=None):
1236     """ Creates a skeleton function object that contains just the provided
1237         code and the correct number of cells in func_closure.  All other
1238         func attributes (e.g. func_globals) are empty.
1239     """
1240     # This is backward-compatibility code: for cloudpickle versions between
1241     # 0.5.4 and 0.7, base_globals could be a string or None. base_globals
1242     # should now always be a dictionary.
1243     if base_globals is None or isinstance(base_globals, str):
1244         base_globals = {}
1245 
1246     base_globals['__builtins__'] = __builtins__
1247 
1248     closure = (
1249         tuple(_make_empty_cell() for _ in range(cell_count))
1250         if cell_count >= 0 else
1251         None
1252     )
1253     return types.FunctionType(code, base_globals, None, None, closure)
1254 
1255 
1256 def _make_skeleton_class(type_constructor, name, bases, type_kwargs,
1257                          class_tracker_id, extra):
1258     """Build dynamic class with an empty __dict__ to be filled once memoized
1259 
1260     If class_tracker_id is not None, try to lookup an existing class definition
1261     matching that id. If none is found, track a newly reconstructed class
1262     definition under that id so that other instances stemming from the same
1263     class id will also reuse this class definition.
1264 
1265     The "extra" variable is meant to be a dict (or None) that can be used for
1266     forward compatibility shall the need arise.
1267     """
1268     skeleton_class = type_constructor(name, bases, type_kwargs)
1269     return _lookup_class_or_track(class_tracker_id, skeleton_class)
1270 
1271 
1272 def _rehydrate_skeleton_class(skeleton_class, class_dict):
1273     """Put attributes from `class_dict` back on `skeleton_class`.
1274 
1275     See CloudPickler.save_dynamic_class for more info.
1276     """
1277     registry = None
1278     for attrname, attr in class_dict.items():
1279         if attrname == "_abc_impl":
1280             registry = attr
1281         else:
1282             setattr(skeleton_class, attrname, attr)
1283     if registry is not None:
1284         for subclass in registry:
1285             skeleton_class.register(subclass)
1286 
1287     return skeleton_class
1288 
1289 
1290 def _make_skeleton_enum(bases, name, qualname, members, module,
1291                         class_tracker_id, extra):
1292     """Build dynamic enum with an empty __dict__ to be filled once memoized
1293 
1294     The creation of the enum class is inspired by the code of
1295     EnumMeta._create_.
1296 
1297     If class_tracker_id is not None, try to lookup an existing enum definition
1298     matching that id. If none is found, track a newly reconstructed enum
1299     definition under that id so that other instances stemming from the same
1300     class id will also reuse this enum definition.
1301 
1302     The "extra" variable is meant to be a dict (or None) that can be used for
1303     forward compatibility shall the need arise.
1304     """
1305     # enums always inherit from their base Enum class at the last position in
1306     # the list of base classes:
1307     enum_base = bases[-1]
1308     metacls = enum_base.__class__
1309     classdict = metacls.__prepare__(name, bases)
1310 
1311     for member_name, member_value in members.items():
1312         classdict[member_name] = member_value
1313     enum_class = metacls.__new__(metacls, name, bases, classdict)
1314     enum_class.__module__ = module
1315 
1316     # Python 2.7 compat
1317     if qualname is not None:
1318         enum_class.__qualname__ = qualname
1319 
1320     return _lookup_class_or_track(class_tracker_id, enum_class)
1321 
1322 
1323 def _is_dynamic(module):
1324     """
1325     Return True if the module is special module that cannot be imported by its
1326     name.
1327     """
1328     # Quick check: module that have __file__ attribute are not dynamic modules.
1329     if hasattr(module, '__file__'):
1330         return False
1331 
1332     if hasattr(module, '__spec__'):
1333         return module.__spec__ is None
1334     else:
1335         # Backward compat for Python 2
1336         import imp
1337         try:
1338             path = None
1339             for part in module.__name__.split('.'):
1340                 if path is not None:
1341                     path = [path]
1342                 f, path, description = imp.find_module(part, path)
1343                 if f is not None:
1344                     f.close()
1345         except ImportError:
1346             return True
1347         return False
1348 
1349 
1350 """ Use copy_reg to extend global pickle definitions """
1351 
1352 if sys.version_info < (3, 4):  # pragma: no branch
1353     method_descriptor = type(str.upper)
1354 
1355     def _reduce_method_descriptor(obj):
1356         return (getattr, (obj.__objclass__, obj.__name__))
1357 
1358     try:
1359         import copy_reg as copyreg
1360     except ImportError:
1361         import copyreg
1362     copyreg.pickle(method_descriptor, _reduce_method_descriptor)