Python pickle.TUPLE Examples
The following are 24 code examples for showing how to use pickle.TUPLE(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
You may check out the related API usage on the sidebar.
You may also want to check out all available functions/classes of the module
pickle
, or try the search function
.
Example 1
Project: ironpython2 Author: IronLanguages File: pickletester.py License: Apache License 2.0 | 5 votes |
def test_bad_stack(self): badpickles = [ '.', # STOP '0', # POP '1', # POP_MARK '2', # DUP # '(2', # PyUnpickler doesn't raise 'R', # REDUCE ')R', 'a', # APPEND 'Na', 'b', # BUILD 'Nb', 'd', # DICT 'e', # APPENDS # '(e', # PyUnpickler raises AttributeError 'i__builtin__\nlist\n', # INST 'l', # LIST 'o', # OBJ '(o', 'p1\n', # PUT 'q\x00', # BINPUT 'r\x00\x00\x00\x00', # LONG_BINPUT 's', # SETITEM 'Ns', 'NNs', 't', # TUPLE 'u', # SETITEMS # '(u', # PyUnpickler doesn't raise '}(Nu', '\x81', # NEWOBJ ')\x81', '\x85', # TUPLE1 '\x86', # TUPLE2 'N\x86', '\x87', # TUPLE3 'N\x87', 'NN\x87', ] for p in badpickles: self.check_unpickling_error(self.bad_stack_errors, p)
Example 2
Project: ironpython2 Author: IronLanguages File: pickletester.py License: Apache License 2.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 3
Project: BinderFilter Author: dxwu File: pickletester.py License: MIT License | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 4
Project: oss-ftp Author: aliyun File: pickletester.py License: MIT License | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 5
Project: Fluid-Designer Author: Microvellum File: pickletester.py License: GNU General Public License v3.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, (3, 0): pickle.EMPTY_TUPLE, (3, 1): pickle.TUPLE1, (3, 2): pickle.TUPLE2, (3, 3): pickle.TUPLE3, (3, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assert_is_copy(x, y) expected = expected_opcode[min(proto, 3), len(x)] self.assertTrue(opcode_in_pickle(expected, s))
Example 6
Project: ironpython3 Author: IronLanguages File: pickletester.py License: Apache License 2.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, (3, 0): pickle.EMPTY_TUPLE, (3, 1): pickle.TUPLE1, (3, 2): pickle.TUPLE2, (3, 3): pickle.TUPLE3, (3, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assert_is_copy(x, y) expected = expected_opcode[min(proto, 3), len(x)] self.assertTrue(opcode_in_pickle(expected, s))
Example 7
Project: gcblue Author: gcblue File: pickletester.py License: BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 8
Project: Project-New-Reign---Nemesis-Main Author: ShikyoKira File: pickletester.py License: GNU General Public License v3.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, (3, 0): pickle.EMPTY_TUPLE, (3, 1): pickle.TUPLE1, (3, 2): pickle.TUPLE2, (3, 3): pickle.TUPLE3, (3, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assert_is_copy(x, y) expected = expected_opcode[min(proto, 3), len(x)] self.assertTrue(opcode_in_pickle(expected, s))
Example 9
Project: medicare-demo Author: ofermend File: pickletester.py License: Apache License 2.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 10
Project: CTFCrackTools-V2 Author: Acmesec File: pickletester.py License: GNU General Public License v3.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 11
Project: CTFCrackTools Author: Acmesec File: pickletester.py License: GNU General Public License v3.0 | 5 votes |
def test_short_tuples(self): # Map (proto, len(tuple)) to expected opcode. expected_opcode = {(0, 0): pickle.TUPLE, (0, 1): pickle.TUPLE, (0, 2): pickle.TUPLE, (0, 3): pickle.TUPLE, (0, 4): pickle.TUPLE, (1, 0): pickle.EMPTY_TUPLE, (1, 1): pickle.TUPLE, (1, 2): pickle.TUPLE, (1, 3): pickle.TUPLE, (1, 4): pickle.TUPLE, (2, 0): pickle.EMPTY_TUPLE, (2, 1): pickle.TUPLE1, (2, 2): pickle.TUPLE2, (2, 3): pickle.TUPLE3, (2, 4): pickle.TUPLE, } a = () b = (1,) c = (1, 2) d = (1, 2, 3) e = (1, 2, 3, 4) for proto in protocols: for x in a, b, c, d, e: s = self.dumps(x, proto) y = self.loads(s) self.assertEqual(x, y, (proto, x, s, y)) expected = expected_opcode[proto, len(x)] self.assertEqual(opcode_in_pickle(expected, s), True)
Example 12
Project: FATE Author: FederatedAI File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_dynamic_class(self, obj): """ Save a class that can't be stored as module global. This method is used to serialize classes that are defined inside functions, or that otherwise can't be serialized as attribute lookups from global modules. """ clsdict = dict(obj.__dict__) # copy dict proxy to a dict clsdict.pop('__weakref__', None) # On PyPy, __doc__ is a readonly attribute, so we need to include it in # the initial skeleton class. This is safe because we know that the # doc can't participate in a cycle with the original class. type_kwargs = {'__doc__': clsdict.pop('__doc__', None)} # If type overrides __dict__ as a property, include it in the type kwargs. # In Python 2, we can't set this attribute after construction. __dict__ = clsdict.pop('__dict__', None) if isinstance(__dict__, property): type_kwargs['__dict__'] = __dict__ save = self.save write = self.write # We write pickle instructions explicitly here to handle the # possibility that the type object participates in a cycle with its own # __dict__. We first write an empty "skeleton" version of the class and # memoize it before writing the class' __dict__ itself. We then write # instructions to "rehydrate" the skeleton class by restoring the # attributes from the __dict__. # # A type can appear in a cycle with its __dict__ if an instance of the # type appears in the type's __dict__ (which happens for the stdlib # Enum class), or if the type defines methods that close over the name # of the type, (which is utils for Python 2-style super() calls). # Push the rehydration function. save(_rehydrate_skeleton_class) # Mark the start of the args tuple for the rehydration function. write(pickle.MARK) # Create and memoize an skeleton class with obj's name and bases. tp = type(obj) self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj) # Now save the rest of obj's __dict__. Any references to obj # encountered while saving will point to the skeleton class. save(clsdict) # Write a tuple of (skeleton_class, clsdict). write(pickle.TUPLE) # Call _rehydrate_skeleton_class(skeleton_class, clsdict) write(pickle.REDUCE)
Example 13
Project: FATE Author: FederatedAI File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function state = { 'globals': f_globals, 'defaults': defaults, 'dict': dct, 'module': func.__module__, 'closure_values': closure_values, } if hasattr(func, '__qualname__'): state['qualname'] = func.__qualname__ save(state) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 14
Project: FATE Author: FederatedAI File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_dynamic_class(self, obj): """ Save a class that can't be stored as module global. This method is used to serialize classes that are defined inside functions, or that otherwise can't be serialized as attribute lookups from global modules. """ clsdict = dict(obj.__dict__) # copy dict proxy to a dict clsdict.pop('__weakref__', None) # On PyPy, __doc__ is a readonly attribute, so we need to include it in # the initial skeleton class. This is safe because we know that the # doc can't participate in a cycle with the original class. type_kwargs = {'__doc__': clsdict.pop('__doc__', None)} # If type overrides __dict__ as a property, include it in the type kwargs. # In Python 2, we can't set this attribute after construction. __dict__ = clsdict.pop('__dict__', None) if isinstance(__dict__, property): type_kwargs['__dict__'] = __dict__ save = self.save write = self.write # We write pickle instructions explicitly here to handle the # possibility that the type object participates in a cycle with its own # __dict__. We first write an empty "skeleton" version of the class and # memoize it before writing the class' __dict__ itself. We then write # instructions to "rehydrate" the skeleton class by restoring the # attributes from the __dict__. # # A type can appear in a cycle with its __dict__ if an instance of the # type appears in the type's __dict__ (which happens for the stdlib # Enum class), or if the type defines methods that close over the name # of the type, (which is utils for Python 2-style super() calls). # Push the rehydration function. save(_rehydrate_skeleton_class) # Mark the start of the args tuple for the rehydration function. write(pickle.MARK) # Create and memoize an skeleton class with obj's name and bases. tp = type(obj) self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj) # Now save the rest of obj's __dict__. Any references to obj # encountered while saving will point to the skeleton class. save(clsdict) # Write a tuple of (skeleton_class, clsdict). write(pickle.TUPLE) # Call _rehydrate_skeleton_class(skeleton_class, clsdict) write(pickle.REDUCE)
Example 15
Project: FATE Author: FederatedAI File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function state = { 'globals': f_globals, 'defaults': defaults, 'dict': dct, 'module': func.__module__, 'closure_values': closure_values, } if hasattr(func, '__qualname__'): state['qualname'] = func.__qualname__ save(state) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 16
Project: LearningApacheSpark Author: runawayhorse001 File: cloudpickle.py License: MIT License | 4 votes |
def save_dynamic_class(self, obj): """ Save a class that can't be stored as module global. This method is used to serialize classes that are defined inside functions, or that otherwise can't be serialized as attribute lookups from global modules. """ clsdict = dict(obj.__dict__) # copy dict proxy to a dict clsdict.pop('__weakref__', None) # On PyPy, __doc__ is a readonly attribute, so we need to include it in # the initial skeleton class. This is safe because we know that the # doc can't participate in a cycle with the original class. type_kwargs = {'__doc__': clsdict.pop('__doc__', None)} # If type overrides __dict__ as a property, include it in the type kwargs. # In Python 2, we can't set this attribute after construction. __dict__ = clsdict.pop('__dict__', None) if isinstance(__dict__, property): type_kwargs['__dict__'] = __dict__ save = self.save write = self.write # We write pickle instructions explicitly here to handle the # possibility that the type object participates in a cycle with its own # __dict__. We first write an empty "skeleton" version of the class and # memoize it before writing the class' __dict__ itself. We then write # instructions to "rehydrate" the skeleton class by restoring the # attributes from the __dict__. # # A type can appear in a cycle with its __dict__ if an instance of the # type appears in the type's __dict__ (which happens for the stdlib # Enum class), or if the type defines methods that close over the name # of the type, (which is common for Python 2-style super() calls). # Push the rehydration function. save(_rehydrate_skeleton_class) # Mark the start of the args tuple for the rehydration function. write(pickle.MARK) # Create and memoize an skeleton class with obj's name and bases. tp = type(obj) self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj) # Now save the rest of obj's __dict__. Any references to obj # encountered while saving will point to the skeleton class. save(clsdict) # Write a tuple of (skeleton_class, clsdict). write(pickle.TUPLE) # Call _rehydrate_skeleton_class(skeleton_class, clsdict) write(pickle.REDUCE)
Example 17
Project: LearningApacheSpark Author: runawayhorse001 File: cloudpickle.py License: MIT License | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function state = { 'globals': f_globals, 'defaults': defaults, 'dict': dct, 'module': func.__module__, 'closure_values': closure_values, } if hasattr(func, '__qualname__'): state['qualname'] = func.__qualname__ save(state) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 18
Project: ironpython3 Author: IronLanguages File: pickletester.py License: Apache License 2.0 | 4 votes |
def test_bad_stack(self): badpickles = [ b'.', # STOP b'0', # POP b'1', # POP_MARK b'2', # DUP # b'(2', # PyUnpickler doesn't raise b'R', # REDUCE b')R', b'a', # APPEND b'Na', b'b', # BUILD b'Nb', b'd', # DICT b'e', # APPENDS # b'(e', # PyUnpickler raises AttributeError b'ibuiltins\nlist\n', # INST b'l', # LIST b'o', # OBJ b'(o', b'p1\n', # PUT b'q\x00', # BINPUT b'r\x00\x00\x00\x00', # LONG_BINPUT b's', # SETITEM b'Ns', b'NNs', b't', # TUPLE b'u', # SETITEMS # b'(u', # PyUnpickler doesn't raise b'}(Nu', b'\x81', # NEWOBJ b')\x81', b'\x85', # TUPLE1 b'\x86', # TUPLE2 b'N\x86', b'\x87', # TUPLE3 b'N\x87', b'NN\x87', b'\x90', # ADDITEMS # b'(\x90', # PyUnpickler raises AttributeError b'\x91', # FROZENSET b'\x92', # NEWOBJ_EX b')}\x92', b'\x93', # STACK_GLOBAL b'Vlist\n\x93', b'\x94', # MEMOIZE ] for p in badpickles: self.check_unpickling_error(self.bad_stack_errors, p)
Example 19
Project: BentoML Author: bentoml File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function state = { 'globals': f_globals, 'defaults': defaults, 'dict': dct, 'closure_values': closure_values, 'module': func.__module__, 'name': func.__name__, 'doc': func.__doc__, } if hasattr(func, '__annotations__') and sys.version_info >= (3, 7): state['annotations'] = func.__annotations__ if hasattr(func, '__qualname__'): state['qualname'] = func.__qualname__ if hasattr(func, '__kwdefaults__'): state['kwdefaults'] = func.__kwdefaults__ save(state) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 20
Project: Project-New-Reign---Nemesis-Main Author: ShikyoKira File: pickletester.py License: GNU General Public License v3.0 | 4 votes |
def test_bad_stack(self): badpickles = [ b'.', # STOP b'0', # POP b'1', # POP_MARK b'2', # DUP # b'(2', # PyUnpickler doesn't raise b'R', # REDUCE b')R', b'a', # APPEND b'Na', b'b', # BUILD b'Nb', b'd', # DICT b'e', # APPENDS # b'(e', # PyUnpickler raises AttributeError b'ibuiltins\nlist\n', # INST b'l', # LIST b'o', # OBJ b'(o', b'p1\n', # PUT b'q\x00', # BINPUT b'r\x00\x00\x00\x00', # LONG_BINPUT b's', # SETITEM b'Ns', b'NNs', b't', # TUPLE b'u', # SETITEMS # b'(u', # PyUnpickler doesn't raise b'}(Nu', b'\x81', # NEWOBJ b')\x81', b'\x85', # TUPLE1 b'\x86', # TUPLE2 b'N\x86', b'\x87', # TUPLE3 b'N\x87', b'NN\x87', b'\x90', # ADDITEMS # b'(\x90', # PyUnpickler raises AttributeError b'\x91', # FROZENSET b'\x92', # NEWOBJ_EX b')}\x92', b'\x93', # STACK_GLOBAL b'Vlist\n\x93', b'\x94', # MEMOIZE ] for p in badpickles: self.check_unpickling_error(self.bad_stack_errors, p)
Example 21
Project: spark-cluster-deployment Author: adobe-research File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func, forced_imports): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ save = self.save write = self.write # save the modules (if any) if forced_imports: write(pickle.MARK) save(_modules_to_main) #print 'forced imports are', forced_imports forced_names = map(lambda m: m.__name__, forced_imports) save((forced_names,)) #save((forced_imports,)) write(pickle.REDUCE) write(pickle.POP_MARK) code, f_globals, defaults, closure, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects # create a skeleton function object and memoize it save(_make_skel_func) save((code, len(closure), base_globals)) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function save(f_globals) save(defaults) save(closure) save(dct) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 22
Project: pywren Author: pywren File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data(func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function save(f_globals) save(defaults) save(dct) save(func.__module__) save(closure_values) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple
Example 23
Project: eggroll Author: WeBankFinTech File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_dynamic_class(self, obj): """ Save a class that can't be stored as module global. This method is used to serialize classes that are defined inside functions, or that otherwise can't be serialized as attribute lookups from global modules. """ clsdict = dict(obj.__dict__) # copy dict proxy to a dict clsdict.pop('__weakref__', None) # On PyPy, __doc__ is a readonly attribute, so we need to include it in # the initial skeleton class. This is safe because we know that the # doc can't participate in a cycle with the original class. type_kwargs = {'__doc__': clsdict.pop('__doc__', None)} # If type overrides __dict__ as a property, include it in the type kwargs. # In Python 2, we can't set this attribute after construction. __dict__ = clsdict.pop('__dict__', None) if isinstance(__dict__, property): type_kwargs['__dict__'] = __dict__ save = self.save write = self.write # We write pickle instructions explicitly here to handle the # possibility that the type object participates in a cycle with its own # __dict__. We first write an empty "skeleton" version of the class and # memoize it before writing the class' __dict__ itself. We then write # instructions to "rehydrate" the skeleton class by restoring the # attributes from the __dict__. # # A type can appear in a cycle with its __dict__ if an instance of the # type appears in the type's __dict__ (which happens for the stdlib # Enum class), or if the type defines methods that close over the name # of the type, (which is utils for Python 2-style super() calls). # Push the rehydration function. save(_rehydrate_skeleton_class) # Mark the start of the args tuple for the rehydration function. write(pickle.MARK) # Create and memoize an skeleton class with obj's name and bases. tp = type(obj) self.save_reduce(tp, (obj.__name__, obj.__bases__, type_kwargs), obj=obj) # Now save the rest of obj's __dict__. Any references to obj # encountered while saving will point to the skeleton class. save(clsdict) # Write a tuple of (skeleton_class, clsdict). write(pickle.TUPLE) # Call _rehydrate_skeleton_class(skeleton_class, clsdict) write(pickle.REDUCE)
Example 24
Project: eggroll Author: WeBankFinTech File: cloudpickle.py License: Apache License 2.0 | 4 votes |
def save_function_tuple(self, func): """ Pickles an actual func object. A func comprises: code, globals, defaults, closure, and dict. We extract and save these, injecting reducing functions at certain points to recreate the func object. Keep in mind that some of these pieces can contain a ref to the func itself. Thus, a naive save on these pieces could trigger an infinite loop of save's. To get around that, we first create a skeleton func object using just the code (this is safe, since this won't contain a ref to the func), and memoize it as soon as it's created. The other stuff can then be filled in later. """ if is_tornado_coroutine(func): self.save_reduce(_rebuild_tornado_coroutine, (func.__wrapped__,), obj=func) return save = self.save write = self.write code, f_globals, defaults, closure_values, dct, base_globals = self.extract_func_data( func) save(_fill_function) # skeleton function updater write(pickle.MARK) # beginning of tuple that _fill_function expects self._save_subimports( code, itertools.chain(f_globals.values(), closure_values or ()), ) # create a skeleton function object and memoize it save(_make_skel_func) save(( code, len(closure_values) if closure_values is not None else -1, base_globals, )) write(pickle.REDUCE) self.memoize(func) # save the rest of the func data needed by _fill_function state = { 'globals': f_globals, 'defaults': defaults, 'dict': dct, 'module': func.__module__, 'closure_values': closure_values, } if hasattr(func, '__qualname__'): state['qualname'] = func.__qualname__ save(state) write(pickle.TUPLE) write(pickle.REDUCE) # applies _fill_function on the tuple