Python pickle.REDUCE() Examples

The following are code examples for showing how to use pickle.REDUCE(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def save_func(self, obj):
        try:
            self.save_global(obj)
            return
        except pickle.PicklingError:
            pass
        assert type(obj) is types.FunctionType
        self.save(types.FunctionType)
        self.save((
            obj.func_code,
            obj.func_globals,
            obj.func_name,
            obj.func_defaults,
            obj.func_closure,
        ))
        self.write(pickle.REDUCE)
        if id(obj) not in self.memo:    # Could be if we recursively landed here. See also pickle.save_tuple().
            self.memoize(obj) 
Example 2
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save_method(self, obj):
        try:
            self.save_global(obj)
            return
        except pickle.PicklingError:
            pass
        assert type(obj) is types.MethodType
        self.save(types.MethodType)
        self.save((obj.im_func, obj.im_self, obj.im_class))
        self.write(pickle.REDUCE)
        self.memoize(obj) 
Example 3
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save_code(self, obj):
        assert type(obj) is types.CodeType
        self.save(marshal.loads)
        self.save((marshal.dumps(obj),))
        self.write(pickle.REDUCE)
        self.memoize(obj) 
Example 4
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save_cell(self, obj):
        assert type(obj) is CellType
        self.save(makeFuncCell)
        self.save((obj.cell_contents,))
        self.write(pickle.REDUCE)
        self.memoize(obj) 
Example 5
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def intellisave_dict(self, obj):
        modname = getModNameForModDict(obj)
        if modname:
            self.save(getModuleDict)
            self.save((modname,))
            self.write(pickle.REDUCE)
            self.memoize(obj)
            return
        self.save_dict(obj) 
Example 6
Project: deepjets   Author: deepjets   File: tasksystem.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save_buffer(self, obj):
        self.save(buffer)
        self.save((str(obj),))
        self.write(pickle.REDUCE) 
Example 7
Project: WrapSQLMAP   Author: THSamurai   File: convert.py    GNU General Public License v3.0 5 votes vote down vote up
def base64unpickle(value, unsafe=False):
    """
    Decodes value from Base64 to plain format and deserializes (with pickle) its content

    >>> base64unpickle('gAJVBmZvb2JhcnEBLg==')
    'foobar'
    """

    retVal = None

    def _(self):
        if len(self.stack) > 1:
            func = self.stack[-2]
            if func not in PICKLE_REDUCE_WHITELIST:
                raise Exception, "abusing reduce() is bad, Mkay!"
        self.load_reduce()

    def loads(str):
        f = StringIO.StringIO(str)
        if unsafe:
            unpickler = picklePy.Unpickler(f)
            unpickler.dispatch[picklePy.REDUCE] = _
        else:
            unpickler = pickle.Unpickler(f)
        return unpickler.load()

    try:
        retVal = loads(base64decode(value))
    except TypeError: 
        retVal = loads(base64decode(bytes(value)))

    return retVal 
Example 8
Project: mitogen   Author: dw   File: core.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save_exc_inst(self, obj):
        if isinstance(obj, CallError):
            func, args = obj.__reduce__()
            self.save(func)
            self.save(args)
            self.write(py_pickle.REDUCE)
        else:
            py_pickle.Pickler.save_inst(self, obj) 
Example 9
Project: FATE   Author: FederatedAI   File: cloudpickle.py    Apache License 2.0 4 votes vote down vote up
def save_dynamic_class(self, obj):
        """
        Save a class that can't be stored as module global.
        This method is used to serialize classes that are defined inside
        functions, or that otherwise can't be serialized as attribute lookups
        from global modules.
        """
        clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
        clsdict.pop('__weakref__', None)

        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
        # the initial skeleton class.  This is safe because we know that the
        # doc can't participate in a cycle with the original class.
        type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}

        # If type overrides __dict__ as a property, include it in the type kwargs.
        # In Python 2, we can't set this attribute after construction.
        __dict__ = clsdict.pop('__dict__', None)
        if isinstance(__dict__, property):
            type_kwargs['__dict__'] = __dict__

        save = self.save
        write = self.write

        # We write pickle instructions explicitly here to handle the
        # possibility that the type object participates in a cycle with its own
        # __dict__. We first write an empty "skeleton" version of the class and
        # memoize it before writing the class' __dict__ itself. We then write
        # instructions to "rehydrate" the skeleton class by restoring the
        # attributes from the __dict__.
        #
        # A type can appear in a cycle with its __dict__ if an instance of the
        # type appears in the type's __dict__ (which happens for the stdlib
        # Enum class), or if the type defines methods that close over the name
        # of the type, (which is utils for Python 2-style super() calls).

        # Push the rehydration function.
        save(_rehydrate_skeleton_class)

        # Mark the start of the args tuple for the rehydration function.
        write(pickle.MARK)

        # Create and memoize an skeleton class with obj's name and bases.
        tp = type(obj)
        self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj)

        # Now save the rest of obj's __dict__. Any references to obj
        # encountered while saving will point to the skeleton class.
        save(clsdict)

        # Write a tuple of (skeleton_class, clsdict).
        write(pickle.TUPLE)

        # Call _rehydrate_skeleton_class(skeleton_class, clsdict)
        write(pickle.REDUCE) 
Example 10
Project: FATE   Author: FederatedAI   File: cloudpickle.py    Apache License 2.0 4 votes vote down vote up
def save_function_tuple(self, func):
        """  Pickles an actual func object.
        A func comprises: code, globals, defaults, closure, and dict.  We
        extract and save these, injecting reducing functions at certain points
        to recreate the func object.  Keep in mind that some of these pieces
        can contain a ref to the func itself.  Thus, a naive save on these
        pieces could trigger an infinite loop of save's.  To get around that,
        we first create a skeleton func object using just the code (this is
        safe, since this won't contain a ref to the func), and memoize it as
        soon as it's created.  The other stuff can then be filled in later.
        """
        if is_tornado_coroutine(func):
            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
                             obj=func)
            return

        save = self.save
        write = self.write

        code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func)

        save(_fill_function)  # skeleton function updater
        write(pickle.MARK)  # beginning of tuple that _fill_function expects

        self._save_subimports(
            code,
            itertools.chain(f_globals.values(), closure_values or ()),
        )

        # create a skeleton function object and memoize it
        save(_make_skel_func)
        save((
            code,
            len(closure_values) if closure_values is not None else -1,
            base_globals,
        ))
        write(pickle.REDUCE)
        self.memoize(func)

        # save the rest of the func data needed by _fill_function
        state = {
            'globals': f_globals,
            'defaults': defaults,
            'dict': dct,
            'module': func.__module__,
            'closure_values': closure_values,
        }
        if hasattr(func, '__qualname__'):
            state['qualname'] = func.__qualname__
        save(state)
        write(pickle.TUPLE)
        write(pickle.REDUCE)  # applies _fill_function on the tuple 
Example 11
Project: LearningApacheSpark   Author: runawayhorse001   File: cloudpickle.py    MIT License 4 votes vote down vote up
def save_dynamic_class(self, obj):
        """
        Save a class that can't be stored as module global.

        This method is used to serialize classes that are defined inside
        functions, or that otherwise can't be serialized as attribute lookups
        from global modules.
        """
        clsdict = dict(obj.__dict__)  # copy dict proxy to a dict
        clsdict.pop('__weakref__', None)

        # On PyPy, __doc__ is a readonly attribute, so we need to include it in
        # the initial skeleton class.  This is safe because we know that the
        # doc can't participate in a cycle with the original class.
        type_kwargs = {'__doc__': clsdict.pop('__doc__', None)}

        # If type overrides __dict__ as a property, include it in the type kwargs.
        # In Python 2, we can't set this attribute after construction.
        __dict__ = clsdict.pop('__dict__', None)
        if isinstance(__dict__, property):
            type_kwargs['__dict__'] = __dict__

        save = self.save
        write = self.write

        # We write pickle instructions explicitly here to handle the
        # possibility that the type object participates in a cycle with its own
        # __dict__. We first write an empty "skeleton" version of the class and
        # memoize it before writing the class' __dict__ itself. We then write
        # instructions to "rehydrate" the skeleton class by restoring the
        # attributes from the __dict__.
        #
        # A type can appear in a cycle with its __dict__ if an instance of the
        # type appears in the type's __dict__ (which happens for the stdlib
        # Enum class), or if the type defines methods that close over the name
        # of the type, (which is common for Python 2-style super() calls).

        # Push the rehydration function.
        save(_rehydrate_skeleton_class)

        # Mark the start of the args tuple for the rehydration function.
        write(pickle.MARK)

        # Create and memoize an skeleton class with obj's name and bases.
        tp = type(obj)
        self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj)

        # Now save the rest of obj's __dict__. Any references to obj
        # encountered while saving will point to the skeleton class.
        save(clsdict)

        # Write a tuple of (skeleton_class, clsdict).
        write(pickle.TUPLE)

        # Call _rehydrate_skeleton_class(skeleton_class, clsdict)
        write(pickle.REDUCE) 
Example 12
Project: LearningApacheSpark   Author: runawayhorse001   File: cloudpickle.py    MIT License 4 votes vote down vote up
def save_function_tuple(self, func):
        """  Pickles an actual func object.

        A func comprises: code, globals, defaults, closure, and dict.  We
        extract and save these, injecting reducing functions at certain points
        to recreate the func object.  Keep in mind that some of these pieces
        can contain a ref to the func itself.  Thus, a naive save on these
        pieces could trigger an infinite loop of save's.  To get around that,
        we first create a skeleton func object using just the code (this is
        safe, since this won't contain a ref to the func), and memoize it as
        soon as it's created.  The other stuff can then be filled in later.
        """
        if is_tornado_coroutine(func):
            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
                             obj=func)
            return

        save = self.save
        write = self.write

        code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func)

        save(_fill_function)  # skeleton function updater
        write(pickle.MARK)    # beginning of tuple that _fill_function expects

        self._save_subimports(
            code,
            itertools.chain(f_globals.values(), closure_values or ()),
        )

        # create a skeleton function object and memoize it
        save(_make_skel_func)
        save((
            code,
            len(closure_values) if closure_values is not None else -1,
            base_globals,
        ))
        write(pickle.REDUCE)
        self.memoize(func)

        # save the rest of the func data needed by _fill_function
        state = {
            'globals': f_globals,
            'defaults': defaults,
            'dict': dct,
            'module': func.__module__,
            'closure_values': closure_values,
        }
        if hasattr(func, '__qualname__'):
            state['qualname'] = func.__qualname__
        save(state)
        write(pickle.TUPLE)
        write(pickle.REDUCE)  # applies _fill_function on the tuple 
Example 13
Project: cloudpickle-generators   Author: llllllllll   File: __init__.py    BSD 3-Clause "New" or "Revised" License 4 votes vote down vote up
def _save_generator_impl(self, frame, gen, filler):
    if frame is None:
        # frame is None when the generator is fully consumed; take a fast path
        self.save_reduce(
            _restore_spent_generator,
            (gen.__name__, getattr(gen, '__qualname__', None)),
            obj=gen,
        )
        return

    f_locals = frame.f_locals
    f_code = frame.f_code

    # Create a copy of generator function without the closure to serve as a box
    # to serialize the code, globals, name, and closure. Cloudpickle already
    # handles things like closures and complicated globals so just rely on
    # cloudpickle to serialize this function.
    gen_func = FunctionType(
        f_code,
        frame.f_globals,
        gen.__name__,
        (),
        (_empty_cell(),) * len(f_code.co_freevars),
    )
    try:
        gen_func.__qualname__ = gen.__qualname__
    except AttributeError:
        # there is no __qualname__ on generators in Python < 3.5
        pass

    save = self.save
    write = self.write

    # push a function onto the stack to fill up our skeleton generator
    # or coroutine
    save(filler)

    # the start of the tuple to pass to ``_fill_generator`` (or
    # ``_fill_coroutine``, ``_fill_async_generator``)
    write(pickle.MARK)

    save(_create_skeleton_generator)
    save((gen_func,))
    write(pickle.REDUCE)
    self.memoize(gen)

    # push the rest of the arguments to ``_fill_generator`` (or
    # ``_fill_coroutine``, ``_fill_async_generator``)
    save(frame.f_lasti)
    save(f_locals)
    save(private_frame_data(frame))

    # call ``_fill_generator`` (or ``_fill_coroutine``,
    # _fill_async_generator``)
    write(pickle.TUPLE)
    write(pickle.REDUCE) 
Example 14
Project: pywren   Author: pywren   File: cloudpickle.py    Apache License 2.0 4 votes vote down vote up
def save_function_tuple(self, func):
        """  Pickles an actual func object.

        A func comprises: code, globals, defaults, closure, and dict.  We
        extract and save these, injecting reducing functions at certain points
        to recreate the func object.  Keep in mind that some of these pieces
        can contain a ref to the func itself.  Thus, a naive save on these
        pieces could trigger an infinite loop of save's.  To get around that,
        we first create a skeleton func object using just the code (this is
        safe, since this won't contain a ref to the func), and memoize it as
        soon as it's created.  The other stuff can then be filled in later.
        """
        if is_tornado_coroutine(func):
            self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,),
                             obj=func)
            return

        save = self.save
        write = self.write

        code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func)

        save(_fill_function)  # skeleton function updater
        write(pickle.MARK)    # beginning of tuple that _fill_function expects

        self._save_subimports(
            code,
            itertools.chain(f_globals.values(), closure_values or ()),
        )

        # create a skeleton function object and memoize it
        save(_make_skel_func)
        save((
            code,
            len(closure_values) if closure_values is not None else -1,
            base_globals,
        ))
        write(pickle.REDUCE)
        self.memoize(func)

        # save the rest of the func data needed by _fill_function
        save(f_globals)
        save(defaults)
        save(dct)
        save(func.__module__)
        save(closure_values)
        write(pickle.TUPLE)
        write(pickle.REDUCE)  # applies _fill_function on the tuple 
Example 15
Project: pywren   Author: pywren   File: cloudpickle.py    Apache License 2.0 4 votes vote down vote up
def save_reduce(self, func, args, state=None,
                    listitems=None, dictitems=None, obj=None):
        # Assert that args is a tuple or None
        if not isinstance(args, tuple):
            raise pickle.PicklingError("args from reduce() should be a tuple")

        # Assert that func is callable
        if not hasattr(func, '__call__'):
            raise pickle.PicklingError("func from reduce should be callable")

        save = self.save
        write = self.write

        # Protocol 2 special case: if func's name is __newobj__, use NEWOBJ
        if self.proto >= 2 and getattr(func, "__name__", "") == "__newobj__":
            cls = args[0]
            if not hasattr(cls, "__new__"):
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has no __new__")
            if obj is not None and cls is not obj.__class__:
                raise pickle.PicklingError(
                    "args[0] from __newobj__ args has the wrong class")
            args = args[1:]
            save(cls)

            save(args)
            write(pickle.NEWOBJ)
        else:
            save(func)
            save(args)
            write(pickle.REDUCE)

        if obj is not None:
            self.memoize(obj)

        # More new special cases (that work with older protocols as
        # well): when __reduce__ returns a tuple with 4 or 5 items,
        # the 4th and 5th item should be iterators that provide list
        # items and dict items (as (key, value) tuples), or None.

        if listitems is not None:
            self._batch_appends(listitems)

        if dictitems is not None:
            self._batch_setitems(dictitems)

        if state is not None:
            save(state)
            write(pickle.BUILD)