Python pickle.UnpicklingError() Examples

The following are code examples for showing how to use pickle.UnpicklingError(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: Pikax   Author: Redcxx   File: webclient.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _local_cookies_login(self):

        if not os.path.exists(self.cookies_file):
            raise LoginError('Local Cookies file not found')

        # cookies exists
        util.log(f'Cookie file found: {self.cookies_file}, attempt to login with local cookie')
        try:
            with open(self.cookies_file, 'rb') as f:
                local_cookies = pickle.load(f)
                self._session.cookies = local_cookies
            if self._check_is_logged():
                util.log('Logged in successfully with local cookies', inform=True)
                return
            else:
                os.remove(self.cookies_file)
                util.log('Removed outdated cookies', inform=True)
        except pickle.UnpicklingError as e:
            os.remove(self.cookies_file)
            util.log('Removed corrupted cookies file, message: {}'.format(e))

        # local cookies failed
        raise LoginError('Login with cookies failed') 
Example 2
Project: Pikax   Author: Redcxx   File: webclient.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _local_cookies_login(self):

        if not os.path.exists(self.cookies_file):
            raise LoginError('Local Cookies file not found')

        # cookies exists
        util.log(f'Cookie file found: {self.cookies_file}, attempt to login with local cookie')
        try:
            with open(self.cookies_file, 'rb') as f:
                local_cookies = pickle.load(f)
                self._session.cookies = local_cookies
            if self._check_is_logged():
                util.log('Logged in successfully with local cookies', inform=True)
                return
            else:
                os.remove(self.cookies_file)
                util.log('Removed outdated cookies', inform=True)
        except pickle.UnpicklingError as e:
            os.remove(self.cookies_file)
            util.log('Removed corrupted cookies file, message: {}'.format(e))

        # local cookies failed
        raise LoginError('Login with cookies failed') 
Example 3
Project: SEM   Author: YoannDupont   File: dictionaryfeatures.py    MIT License 6 votes vote down vote up
def __init__(self, getter=DEFAULT_GETTER, *args, **kwargs):
        super(TokenDictionaryFeature, self).__init__(getter=getter, *args, **kwargs)
        self._is_boolean = True
        
        if self._path is not None:
            try:
                self._value = pickle.load(open(self._path))
            except (pickle.UnpicklingError, ImportError, EOFError, IndexError, TypeError):
                self._value = compile_token(self._path, "utf-8")
            self._entries = None
        elif self._entries is not None:
            self._value = set()
            for entry in self._entries:
                entry = entry.strip()
                if entry:
                    self._value.add(entry)
        
        assert self._value is not None 
Example 4
Project: SEM   Author: YoannDupont   File: dictionaryfeatures.py    MIT License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        super(MultiwordDictionaryFeature, self).__init__(*args, **kwargs)
        self._is_sequence = True
        self._entry       = kwargs["entry"]
        self._appendice   = kwargs.get("appendice", "")
        
        if self._path is not None:
            try:
                self._value = pickle.load(open(self._path))
            except (pickle.UnpicklingError, ImportError, EOFError):
                self._value = compile_multiword(self._path, "utf-8")
            self._entries = None
        elif self._entries:
            self._value = Trie()
            for entry in self._entries:
                entry = entry.strip()
                if entry:
                    self._value.add(entry.split())
        else:
            self._value = Trie() 
Example 5
Project: bob   Author: BobBuildTool   File: audit.py    GNU General Public License v3.0 6 votes vote down vote up
def fromFile(cls, file):
        try:
            cacheName = file + ".pickle"
            cacheKey = binStat(file) + BOB_INPUT_HASH
            with open(cacheName, "rb") as f:
                persistedCacheKey = f.read(len(cacheKey))
                if cacheKey == persistedCacheKey:
                    return pickle.load(f)
        except (EOFError, OSError, pickle.UnpicklingError) as e:
            pass

        audit = cls()
        try:
            with gzip.open(file, 'rb') as gzf:
                audit.load(gzf, file)
            with open(cacheName, "wb") as f:
                f.write(cacheKey)
                pickle.dump(audit, f, -1)
        except OSError as e:
            log("Error loading audit: " + str(e), WARNING)
        return audit 
Example 6
Project: kodiswift   Author: afrase   File: storage.py    GNU General Public License v3.0 6 votes vote down vote up
def load(self):
        """Load the file from disk.

        Returns:
            bool: True if successfully loaded, False if the file
                doesn't exist.

        Raises:
            UnknownFormat: When the file exists but couldn't be loaded.
        """

        if not self._loaded and os.path.exists(self.file_path):
            with open(self.file_path, 'rb') as f:
                for loader in (pickle.load, json.load):
                    try:
                        f.seek(0)
                        self._store = loader(f)
                        self._loaded = True
                        break
                    except pickle.UnpicklingError:
                        pass
            # If the file exists and wasn't able to be loaded, raise an error.
            if not self._loaded:
                raise UnknownFormat('Failed to load file')
        return self._loaded 
Example 7
Project: locationsharinglib   Author: costastf   File: locationsharinglib.py    MIT License 6 votes vote down vote up
def _get_authenticated_session(self, cookies_file):
        session = Session()
        try:
            cfile = open(cookies_file, 'rb')
        except FileNotFoundError:
            message = 'Could not open cookies file, either file does not exist or no read access.'
            raise InvalidCookies(message)
        try:
            session.cookies.update(pickle.load(cfile))
            self._logger.debug('Successfully loaded pickled cookie!')
            warnings.warn('Pickled cookie format is going to be deprecated in a future version, '
                          'please start using a text base cookie file!')
        except (pickle.UnpicklingError, KeyError, AttributeError, EOFError, ValueError):
            self._logger.debug('Trying to load text based cookies.')
            session = self._load_text_cookies(session, cfile)
        cfile.close()
        return session 
Example 8
Project: latigo   Author: equinor   File: __init__.py    GNU Affero General Public License v3.0 6 votes vote down vote up
def deserialize_task(task_bytes, mode="json") -> typing.Optional[Task]:
    """
    Deserialize a task from bytes
    """
    task = None
    if mode == "pickle":
        try:
            task = pickle.loads(task_bytes)
        except pickle.UnpicklingError as e:
            logger.error(f"Could not deserialize task from pickle of size {len(task_bytes)}bytes: {e}")
            traceback.print_exc()
    else:
        try:
            # Rely on dataclass_json
            task = Task.from_json(task_bytes)
        except Exception as e:
            logger.error(f"Could not deserialize task from json of size {len(task_bytes)}bytes: '{task_bytes}', error:'{e}'")
            traceback.print_exc()
    return task 
Example 9
Project: vcs_query   Author: mageta   File: vcs_query.py    MIT License 6 votes vote down vote up
def _load(self):
        try:
            with open(self.pickle_path, "rb") as cache:
                obj = pickle.load(cache)

                # prune invalid or outdated cache-files
                if not isinstance(obj, tuple) or len(obj) < 3:
                    raise RuntimeError("Invalid type")
                elif obj[0] != VcardCache._cache_version:
                    raise RuntimeError("Invalid Version ({})".format(obj[0]))

                return obj
        except (OSError, RuntimeError, AttributeError, EOFError, ImportError,
                IndexError, pickle.UnpicklingError) as error:
            if not isinstance(error, OSError) or error.errno != 2:
                LOGGER.warning("Cache file (%s) could not be read: %s",
                               self.pickle_path, error)
            return self._default_state 
Example 10
Project: Question-Answering-System   Author: AdityaAS   File: indexer.py    MIT License 6 votes vote down vote up
def __init__(self, index):
        """Initialize the TokI object from a MongoDB or load from disk."""
        self.index = index
        if pymongo:
            if 'toki' in self.index.mongo_db.collection_names():
                self.mongo_toki = self.index.mongo_db['toki']
                if self.mongo_toki.count() == 0:
                    raise IndexLoadError
            else:
                raise IndexLoadError
        else:
            # Load into memory (not suitable for large corpora!)
            try:
                with open(self.index.base_fname + '.toki', mode='rb') as f:
                    self.toki = pickle.load(f)
                if not self.toki:
                    raise IndexLoadError
            except (IOError, pickle.UnpicklingError):
                raise IndexLoadError 
Example 11
Project: cloud-regionsrv-client   Author: SUSE-Enceladus   File: registerutils.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def get_smt_from_store(smt_store_file_path):
    """Create an SMTinstance from the stored data."""
    if not os.path.exists(smt_store_file_path):
        return None

    smt = None
    with open(smt_store_file_path, 'rb') as smt_file:
        u = pickle.Unpickler(smt_file)
        try:
            smt = u.load()
        except pickle.UnpicklingError:
            pass

    return smt


# ---------------------------------------------------------------------------- 
Example 12
Project: angr   Author: angr   File: common.py    BSD 2-Clause "Simplified" License 6 votes vote down vote up
def do_trace(proj, test_name, input_data, **kwargs):
    """
    trace, magic, crash_mode, crash_addr = load_cached_trace(proj, "test_blurble")
    """
    fname = os.path.join(bin_location, 'tests_data', 'runner_traces', '%s_%s_%s.p' % (test_name, os.path.basename(proj.filename), proj.arch.name))

    if os.path.isfile(fname):
        try:
            with open(fname, 'rb') as f:
                r = pickle.load(f)
                if type(r) is tuple and len(r) == 2 and r[1] == TRACE_VERSION:
                    return r[0]
        except (pickle.UnpicklingError, UnicodeDecodeError):
            print("Can't unpickle trace - rerunning")

    if tracer is None:
        raise Exception("Tracer is not installed and cached data is not present - cannot run test")

    runner = tracer.QEMURunner(project=proj, input=input_data, **kwargs)
    r = (runner.trace, runner.magic, runner.crash_mode, runner.crash_addr)
    with open(fname, 'wb') as f:
        pickle.dump((r, TRACE_VERSION), f, -1)
    return r 
Example 13
Project: kipoiseq   Author: kipoi   File: splicing.py    MIT License 6 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 intron5prime_len=100,
                 intron3prime_len=100,
                 transform=None,
                 **kwargs):

        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError, ModuleNotFoundError):
            self.exons = generate_exons(gtf_file=gtf_file,
                                        overhang=(intron5prime_len, intron3prime_len),
                                        **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta
        self.transform = transform 
Example 14
Project: eve-metrics   Author: ccpgames   File: utils.py    MIT License 6 votes vote down vote up
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
    if not ':' in data:
        return None
    if not hash_key:
        hash_key = hashlib.sha1(encryption_key).hexdigest()
    signature, encrypted_data = data.split(':', 1)
    actual_signature = hmac.new(hash_key, encrypted_data).hexdigest()
    if not compare(signature, actual_signature):
        return None
    key = pad(encryption_key[:32])
    encrypted_data = base64.urlsafe_b64decode(encrypted_data)
    IV, encrypted_data = encrypted_data[:16], encrypted_data[16:]
    cipher, _ = AES_new(key, IV=IV)
    try:
        data = cipher.decrypt(encrypted_data)
        data = data.rstrip(' ')
        if compression_level:
            data = zlib.decompress(data)
        return pickle.loads(data)
    except (TypeError, pickle.UnpicklingError):
        return None

### compute constant CTOKENS 
Example 15
Project: ultimate-tic-tac-toe   Author: stoimenoff   File: onlineplayer.py    MIT License 6 votes vote down vote up
def __connect(self, macroboard):
        self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.__socket.settimeout(DEFAULT_CLIENT_TIMEOUT)
        with self.__socket:
            self.__socket.connect((self.host, self.port))
            self.__socket.sendall(self.__get_data(macroboard))
            data = None
            while not self.__cancelled and not data:
                try:
                    # print('Recieve not cancelled')
                    data = self.__socket.recv(BYTES_LENGTH)
                except socket.timeout:
                    continue
            if self.__cancelled:
                return
            print('Received', repr(data), ' Size:', len(data))
            try:
                unpickled_data = pickle.loads(data)
            except (pickle.UnpicklingError, EOFError):
                raise BadResponseError('Response unpickling failed.')
            if self.__is_not_valid(unpickled_data, macroboard):
                raise BadResponseError('Response object is not valid.')
            self.name, move = pickle.loads(data)
        return move 
Example 16
Project: geofire-python   Author: ininex   File: dill.py    MIT License 6 votes vote down vote up
def pickles(obj,exact=False,safe=False,**kwds):
    """quick check if object pickles with dill"""
    if safe: exceptions = (Exception,) # RuntimeError, ValueError
    else:
        exceptions = (TypeError, AssertionError, PicklingError, UnpicklingError)
    try:
        pik = copy(obj, **kwds)
        try:
            result = bool(pik.all() == obj.all())
        except AttributeError:
            result = pik == obj
        if result: return True
        if not exact:
            result = type(pik) == type(obj)
            if result: return result
            # class instances might have been dumped with byref=False
            return repr(type(pik)) == repr(type(obj)) #XXX: InstanceType?
        return False
    except exceptions:
        return False 
Example 17
Project: spc   Author: whbrewer   File: utils.py    MIT License 6 votes vote down vote up
def secure_loads(data, encryption_key, hash_key=None, compression_level=None):
    if not ':' in data:
        return None
    if not hash_key:
        hash_key = hashlib.sha1(encryption_key).hexdigest()
    signature, encrypted_data = data.split(':', 1)
    actual_signature = hmac.new(hash_key, encrypted_data).hexdigest()
    if not compare(signature, actual_signature):
        return None
    key = pad(encryption_key[:32])
    encrypted_data = base64.urlsafe_b64decode(encrypted_data)
    IV, encrypted_data = encrypted_data[:16], encrypted_data[16:]
    cipher, _ = AES_new(key, IV=IV)
    try:
        data = cipher.decrypt(encrypted_data)
        data = data.rstrip(' ')
        if compression_level:
            data = zlib.decompress(data)
        return pickle.loads(data)
    except (TypeError, pickle.UnpicklingError):
        return None

### compute constant CTOKENS 
Example 18
Project: models   Author: kipoi   File: dataloader.py    MIT License 5 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 split_seq=True,
                 encode=True,
                 exon_cut_l=0,
                 exon_cut_r=0,
                 acceptor_intron_cut=6,
                 donor_intron_cut=6,
                 acceptor_intron_len=50,
                 acceptor_exon_len=3,
                 donor_exon_len=5,
                 donor_intron_len=13,
                 maxExonLength=200,
                 **kwargs
                 ):
        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError):
            self.exonGenerator = self.GenerateExons(gtf_file, **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta

        self.encode = encode
        self.split_seq = split_seq
        self.exon_cut_l = exon_cut_l
        self.exon_cut_r = exon_cut_r
        self.acceptor_intron_cut = acceptor_intron_cut
        self.donor_intron_cut = donor_intron_cut
        self.acceptor_intron_len = acceptor_intron_len
        self.acceptor_exon_len = acceptor_exon_len
        self.donor_exon_len = donor_exon_len
        self.donor_intron_len = donor_intron_len
        self.maxExonLength = maxExonLength 
Example 19
Project: models   Author: kipoi   File: dataloader.py    MIT License 5 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 split_seq=True,
                 encode=True,
                 exon_cut_l=0,
                 exon_cut_r=0,
                 acceptor_intron_cut=6,
                 donor_intron_cut=6,
                 acceptor_intron_len=50,
                 acceptor_exon_len=3,
                 donor_exon_len=5,
                 donor_intron_len=13,
                 maxExonLength=200,
                 **kwargs
                 ):
        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError):
            self.exonGenerator = self.GenerateExons(gtf_file, **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta

        self.encode = encode
        self.split_seq = split_seq
        self.exon_cut_l = exon_cut_l
        self.exon_cut_r = exon_cut_r
        self.acceptor_intron_cut = acceptor_intron_cut
        self.donor_intron_cut = donor_intron_cut
        self.acceptor_intron_len = acceptor_intron_len
        self.acceptor_exon_len = acceptor_exon_len
        self.donor_exon_len = donor_exon_len
        self.donor_intron_len = donor_intron_len
        self.maxExonLength = maxExonLength 
Example 20
Project: models   Author: kipoi   File: dataloader.py    MIT License 5 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 split_seq=True,
                 encode=True,
                 exon_cut_l=0,
                 exon_cut_r=0,
                 acceptor_intron_cut=6,
                 donor_intron_cut=6,
                 acceptor_intron_len=50,
                 acceptor_exon_len=3,
                 donor_exon_len=5,
                 donor_intron_len=13,
                 maxExonLength=200,
                 **kwargs
                 ):
        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError):
            self.exonGenerator = self.GenerateExons(gtf_file, **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta

        self.encode = encode
        self.split_seq = split_seq
        self.exon_cut_l = exon_cut_l
        self.exon_cut_r = exon_cut_r
        self.acceptor_intron_cut = acceptor_intron_cut
        self.donor_intron_cut = donor_intron_cut
        self.acceptor_intron_len = acceptor_intron_len
        self.acceptor_exon_len = acceptor_exon_len
        self.donor_exon_len = donor_exon_len
        self.donor_intron_len = donor_intron_len
        self.maxExonLength = maxExonLength 
Example 21
Project: models   Author: kipoi   File: dataloader.py    MIT License 5 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 split_seq=True,
                 encode=True,
                 exon_cut_l=0,
                 exon_cut_r=0,
                 acceptor_intron_cut=6,
                 donor_intron_cut=6,
                 acceptor_intron_len=50,
                 acceptor_exon_len=3,
                 donor_exon_len=5,
                 donor_intron_len=13,
                 maxExonLength=200,
                 **kwargs
                 ):
        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError):
            self.exonGenerator = self.GenerateExons(gtf_file, **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta

        self.encode = encode
        self.split_seq = split_seq
        self.exon_cut_l = exon_cut_l
        self.exon_cut_r = exon_cut_r
        self.acceptor_intron_cut = acceptor_intron_cut
        self.donor_intron_cut = donor_intron_cut
        self.acceptor_intron_len = acceptor_intron_len
        self.acceptor_exon_len = acceptor_exon_len
        self.donor_exon_len = donor_exon_len
        self.donor_intron_len = donor_intron_len
        self.maxExonLength = maxExonLength 
Example 22
Project: models   Author: kipoi   File: dataloader.py    MIT License 5 votes vote down vote up
def __init__(self,
                 gtf_file,
                 fasta_file,
                 split_seq=True,
                 encode=True,
                 exon_cut_l=0,
                 exon_cut_r=0,
                 acceptor_intron_cut=6,
                 donor_intron_cut=6,
                 acceptor_intron_len=50,
                 acceptor_exon_len=3,
                 donor_exon_len=5,
                 donor_intron_len=13,
                 maxExonLength=200,
                 **kwargs
                 ):
        try:
            with open(gtf_file, 'rb') as f:
                self.exons = pickle.load(f)
        except (FileNotFoundError, pickle.UnpicklingError):
            self.exonGenerator = self.GenerateExons(gtf_file, **kwargs)
        import six
        if isinstance(fasta_file, six.string_types):
            fasta = Fasta(fasta_file, as_raw=False)
        self.fasta = fasta

        self.encode = encode
        self.split_seq = split_seq
        self.exon_cut_l = exon_cut_l
        self.exon_cut_r = exon_cut_r
        self.acceptor_intron_cut = acceptor_intron_cut
        self.donor_intron_cut = donor_intron_cut
        self.acceptor_intron_len = acceptor_intron_len
        self.acceptor_exon_len = acceptor_exon_len
        self.donor_exon_len = donor_exon_len
        self.donor_intron_len = donor_intron_len
        self.maxExonLength = maxExonLength 
Example 23
Project: Pikax   Author: Redcxx   File: common.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def load_from_local(file_path):
    try:
        with open(file_path, 'rb') as file:
            return pickle.load(file)
    except pickle.UnpicklingError as e:
        sys.stdout.write(texts.get('FILE_CORRUPTED').format(file=file_path, msg=str(e)))
        remove_local_file(file_path)
        return None 
Example 24
Project: esys-pbi   Author: fsxfreak   File: file_methods.py    MIT License 5 votes vote down vote up
def load_object(file_path):
    file_path = os.path.expanduser(file_path)
    # reading to string and loads is 2.5x faster that using the file handle and load.
    with open(file_path, 'rb') as fh:
        data = fh.read()
    try:
        return pickle.loads(data, encoding='bytes')
    except pickle.UnpicklingError as e:
        raise ValueError from e 
Example 25
Project: gipc   Author: jgehrcke   File: gipc.py    MIT License 5 votes vote down vote up
def get(self, timeout=None):
        """Receive, decode and return data from the pipe. Block
        gevent-cooperatively until data is available or timeout expires. The
        default decoder is ``pickle.loads``.

        :arg timeout: ``None`` (default) or a ``gevent.Timeout``
            instance. The timeout must be started to take effect and is
            canceled when the first byte of a new message arrives (i.e.
            providing a timeout does not guarantee that the method completes
            within the timeout interval).

        :returns: a Python object.

        Raises:
            - :exc:`gevent.Timeout` (if provided)
            - :exc:`GIPCError`
            - :exc:`GIPCClosed`
            - :exc:`pickle.UnpicklingError`

        Recommended usage for silent timeout control::

            with gevent.Timeout(TIME_SECONDS, False) as t:
                reader.get(timeout=t)

        .. warning::

            The timeout control is currently not available on Windows,
            because Windows can't apply select() to pipe handles.
            An ``OSError`` is expected to be raised in case you set a
            timeout.
        """
        self._validate()
        with self._lock:
            if timeout:
                # Wait for ready-to-read event.
                h = gevent.get_hub()
                h.wait(h.loop.io(self._fd, 1))
                timeout.cancel()
            msize, = struct.unpack("!i", self._recv_in_buffer(4).getvalue())
            bindata = self._recv_in_buffer(msize).getvalue()
        return self._decoder(bindata) 
Example 26
Project: NiujiaoDebugger   Author: MrSrc   File: test_concurrent_futures.py    GNU General Public License v3.0 5 votes vote down vote up
def __reduce__(self):
        from pickle import UnpicklingError
        return _raise_error, (UnpicklingError, ) 
Example 27
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_negative_32b_binbytes(self):
        # On 32-bit builds, a BINBYTES of 2**31 or more is refused
        dumped = b'\x80\x03B\xff\xff\xff\xffxyzq\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 28
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_negative_32b_binunicode(self):
        # On 32-bit builds, a BINUNICODE of 2**31 or more is refused
        dumped = b'\x80\x03X\xff\xff\xff\xffxyzq\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 29
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_large_32b_binbytes8(self):
        dumped = b'\x80\x04\x8e\4\0\0\0\1\0\0\0\xe2\x82\xac\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 30
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_large_32b_binunicode8(self):
        dumped = b'\x80\x04\x8d\4\0\0\0\1\0\0\0\xe2\x82\xac\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 31
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_newobj_not_class(self):
        # Issue 24552
        global SimpleNewObj
        save = SimpleNewObj
        o = SimpleNewObj.__new__(SimpleNewObj)
        b = self.dumps(o, 4)
        try:
            SimpleNewObj = 42
            self.assertRaises((TypeError, pickle.UnpicklingError), self.loads, b)
        finally:
            SimpleNewObj = save

    # Register a type with copyreg, with extension code extcode.  Pickle
    # an object of that type.  Check that the resulting pickle uses opcode
    # (EXT[124]) under proto 2, and not in proto 1. 
Example 32
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_bad_init(self):
        # Test issue3664 (pickle can segfault from a badly initialized Pickler).
        # Override initialization without calling __init__() of the superclass.
        class BadPickler(self.Pickler):
            def __init__(self): pass

        class BadUnpickler(self.Unpickler):
            def __init__(self): pass

        self.assertRaises(pickle.PicklingError, BadPickler().dump, 0)
        self.assertRaises(pickle.UnpicklingError, BadUnpickler().load) 
Example 33
Project: NiujiaoDebugger   Author: MrSrc   File: pickletester.py    GNU General Public License v3.0 5 votes vote down vote up
def test_protocol0_is_ascii_only(self):
        non_ascii_str = "\N{EMPTY SET}"
        self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0)
        pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.'
        self.assertRaises(pickle.UnpicklingError, self.loads, pickled) 
Example 34
Project: NiujiaoDebugger   Author: MrSrc   File: rpc.py    GNU General Public License v3.0 5 votes vote down vote up
def pollmessage(self, wait):
        packet = self.pollpacket(wait)
        if packet is None:
            return None
        try:
            message = pickle.loads(packet)
        except pickle.UnpicklingError:
            print("-----------------------", file=sys.__stderr__)
            print("cannot unpickle packet:", repr(packet), file=sys.__stderr__)
            traceback.print_stack(file=sys.__stderr__)
            print("-----------------------", file=sys.__stderr__)
            raise
        return message 
Example 35
Project: diplomacy   Author: diplomacy   File: convoy_paths.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def get_convoy_paths_cache():
    """ Returns the current cache from disk """
    disk_convoy_paths = {}                  # Uses hash as key
    cache_convoy_paths = {}                 # Use map name as key

    # Loading from internal cache first
    if os.path.exists(INTERNAL_CACHE_PATH):
        try:
            cache_data = pickle.load(open(INTERNAL_CACHE_PATH, 'rb'))
            if cache_data.get('__version__', '') == __VERSION__:
                disk_convoy_paths.update(cache_data)
        except (pickle.UnpicklingError, EOFError):
            pass

    # Loading external cache
    if os.path.exists(EXTERNAL_CACHE_PATH):
        try:
            cache_data = pickle.load(open(EXTERNAL_CACHE_PATH, 'rb'))
            if cache_data.get('__version__', '') == __VERSION__:
                disk_convoy_paths.update(cache_data)
        except (pickle.UnpicklingError, EOFError):
            pass

    # Getting map name and file paths
    files_path = glob.glob(settings.PACKAGE_DIR + '/maps/*.map')
    for file_path in files_path:
        map_name = file_path.replace(settings.PACKAGE_DIR + '/maps/', '').replace('.map', '')
        map_hash = get_file_md5(file_path)
        if map_hash in disk_convoy_paths:
            cache_convoy_paths[map_name] = disk_convoy_paths[map_hash]
            cache_convoy_paths[file_path] = disk_convoy_paths[map_hash]

    # Returning
    return cache_convoy_paths 
Example 36
Project: paramz   Author: sods   File: __init__.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def load(file_or_path):
    """
    Load a previously pickled model, using `m.pickle('path/to/file.pickle)'`

    :param file_name: path/to/file.pickle
    """
    from pickle import UnpicklingError
    _python3 = True
    try: 
        import cPickle as pickle
        _python3 = False
    except ImportError: #python3
        import pickle
   
    try:
        if _python3:
            strcl = str
            p3kw = dict(encoding='latin1')
            return _unpickle(file_or_path, pickle, strcl, p3kw)
        else:
            strcl = basestring
            p3kw = {}
            return _unpickle(file_or_path, pickle, strcl, p3kw)
    
    except UnpicklingError: # pragma: no coverage
        import pickle
        return _unpickle(file_or_path, pickle, strcl, p3kw) 
Example 37
Project: pyontutils   Author: tgbugs   File: sheets.py    MIT License 5 votes vote down vote up
def get_oauth_service(api='sheets', version='v4', readonly=True, SCOPES=None):
    if readonly:  # FIXME the division isn't so clean for drive ...
        _auth_var = 'google-api-store-file-readonly'
    else:
        _auth_var = 'google-api-store-file'

    store_file = auth.get_path(_auth_var)

    if store_file.exists():
        with open(store_file, 'rb') as f:
            try:
                creds = pickle.load(f)
            except pickle.UnpicklingError as e:
                # FIXME need better way to trace errors in a way
                # that won't leak secrets by default
                log.error(f'problem in file at path for {_auth_var}')
                raise e
    else:
        creds = None

    if not creds or not creds.valid:
        # the first time you run this you will need to use the --noauth_local_webserver args
        if creds and creds.expired and creds.refresh_token:
            creds.refresh(Request())
        else:
            creds_file = auth.get_path('google-api-creds-file')
            flow = InstalledAppFlow.from_client_secrets_file((creds_file).as_posix(), SCOPES)
            creds = flow.run_console()

        with open(store_file, 'wb') as f:
            pickle.dump(creds, f)

    service = build(api, version, credentials=creds)
    return service 
Example 38
Project: bob   Author: BobBuildTool   File: input.py    GNU General Public License v3.0 5 votes vote down vote up
def __generatePackages(self, nameFormatter, env, cacheKey, sandboxEnabled):
        # use separate caches with and without sandbox
        if sandboxEnabled:
            cacheName = ".bob-packages-sb.pickle"
        else:
            cacheName = ".bob-packages.pickle"

        # try to load the persisted packages
        try:
            with open(cacheName, "rb") as f:
                persistedCacheKey = f.read(len(cacheKey))
                if cacheKey == persistedCacheKey:
                    tmp = PackageUnpickler(f, self.getRecipe, self.__plugins,
                                           nameFormatter).load()
                    return tmp.refDeref([], {}, None, nameFormatter)
        except (EOFError, OSError, pickle.UnpicklingError):
            pass

        # not cached -> calculate packages
        states = { n:s() for (n,s) in self.__states.items() }
        result = self.__rootRecipe.prepare(env, sandboxEnabled, states)[0]

        # save package tree for next invocation
        try:
            newCacheName = cacheName + ".new"
            with open(newCacheName, "wb") as f:
                f.write(cacheKey)
                PackagePickler(f, nameFormatter).dump(result)
            os.replace(newCacheName, cacheName)
        except OSError as e:
            print("Error saving internal state:", str(e), file=sys.stderr)

        return result.refDeref([], {}, None, nameFormatter) 
Example 39
Project: bob   Author: BobBuildTool   File: input.py    GNU General Public License v3.0 5 votes vote down vote up
def persistent_load(self, pid):
        (tag, key) = pid
        if tag == "pathfmt":
            return self.__pathFormatter
        elif tag == "recipe":
            return self.__recipeGetter(key)
        else:
            raise pickle.UnpicklingError("unsupported object") 
Example 40
Project: ironpython2   Author: IronLanguages   File: test_cPickle.py    Apache License 2.0 5 votes vote down vote up
def test_load_negative(self):
        if cPickle.__name__ == "cPickle":   # pickle vs. cPickle report different exceptions, even on Cpy
            filename = os.tempnam()
            for temp in ['\x02', "No"]:
                self.write_to_file(filename, content=temp)
                f = open(filename)
                self.assertRaises(cPickle.UnpicklingError, cPickle.load, f)
                f.close() 
Example 41
Project: Blockly-rduino-communication   Author: technologiescollege   File: _compatibility.py    GNU General Public License v3.0 5 votes vote down vote up
def load_string(self):
            data = self.readline()[:-1]
            # Strip outermost quotes
            if len(data) >= 2 and data[0] == data[-1] and data[0] in b'"\'':
                data = data[1:-1]
            else:
                raise pickle.UnpicklingError("the STRING opcode argument must be quoted")
            self.append(self._decode_string(pickle.codecs.escape_decode(data)[0])) 
Example 42
Project: Blockly-rduino-communication   Author: technologiescollege   File: _compatibility.py    GNU General Public License v3.0 5 votes vote down vote up
def load_binstring(self):
            # Deprecated BINSTRING uses signed 32-bit length
            len, = pickle.struct.unpack('<i', self.read(4))
            if len < 0:
                raise pickle.UnpicklingError("BINSTRING pickle has negative byte count")
            data = self.read(len)
            self.append(self._decode_string(data)) 
Example 43
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: test_concurrent_futures.py    GNU General Public License v2.0 5 votes vote down vote up
def __reduce__(self):
        from pickle import UnpicklingError
        return _raise_error, (UnpicklingError, ) 
Example 44
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_negative_32b_binbytes(self):
        # On 32-bit builds, a BINBYTES of 2**31 or more is refused
        dumped = b'\x80\x03B\xff\xff\xff\xffxyzq\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 45
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_negative_32b_binunicode(self):
        # On 32-bit builds, a BINUNICODE of 2**31 or more is refused
        dumped = b'\x80\x03X\xff\xff\xff\xffxyzq\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 46
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_large_32b_binbytes8(self):
        dumped = b'\x80\x04\x8e\4\0\0\0\1\0\0\0\xe2\x82\xac\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 47
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_large_32b_binunicode8(self):
        dumped = b'\x80\x04\x8d\4\0\0\0\1\0\0\0\xe2\x82\xac\x00.'
        self.check_unpickling_error((pickle.UnpicklingError, OverflowError),
                                    dumped) 
Example 48
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_newobj_not_class(self):
        # Issue 24552
        global SimpleNewObj
        save = SimpleNewObj
        o = SimpleNewObj.__new__(SimpleNewObj)
        b = self.dumps(o, 4)
        try:
            SimpleNewObj = 42
            self.assertRaises((TypeError, pickle.UnpicklingError), self.loads, b)
        finally:
            SimpleNewObj = save

    # Register a type with copyreg, with extension code extcode.  Pickle
    # an object of that type.  Check that the resulting pickle uses opcode
    # (EXT[124]) under proto 2, and not in proto 1. 
Example 49
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_incomplete_input(self):
        s = io.BytesIO(b"X''.")
        self.assertRaises((EOFError, struct.error, pickle.UnpicklingError), self.load, s) 
Example 50
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_bad_init(self):
        # Test issue3664 (pickle can segfault from a badly initialized Pickler).
        # Override initialization without calling __init__() of the superclass.
        class BadPickler(self.Pickler):
            def __init__(self): pass

        class BadUnpickler(self.Unpickler):
            def __init__(self): pass

        self.assertRaises(pickle.PicklingError, BadPickler().dump, 0)
        self.assertRaises(pickle.UnpicklingError, BadUnpickler().load) 
Example 51
Project: cqp-sdk-for-py37-native   Author: crud-boy   File: pickletester.py    GNU General Public License v2.0 5 votes vote down vote up
def test_protocol0_is_ascii_only(self):
        non_ascii_str = "\N{EMPTY SET}"
        self.assertRaises(pickle.PicklingError, self.dumps, non_ascii_str, 0)
        pickled = pickle.PERSID + non_ascii_str.encode('utf-8') + b'\n.'
        self.assertRaises(pickle.UnpicklingError, self.loads, pickled) 
Example 52
Project: aws-lambda-runtime-pypy   Author: uscheller   File: rpc.py    Apache License 2.0 5 votes vote down vote up
def pollmessage(self, wait):
        packet = self.pollpacket(wait)
        if packet is None:
            return None
        try:
            message = pickle.loads(packet)
        except pickle.UnpicklingError:
            print("-----------------------", file=sys.__stderr__)
            print("cannot unpickle packet:", repr(packet), file=sys.__stderr__)
            traceback.print_stack(file=sys.__stderr__)
            print("-----------------------", file=sys.__stderr__)
            raise
        return message 
Example 53
Project: pylivetrader   Author: alpacahq   File: __init__.py    Apache License 2.0 5 votes vote down vote up
def load(self):
        with open(self.path, 'rb') as f:
            try:
                loaded_state = pickle.load(f)
            except (pickle.UnpicklingError, IndexError):
                raise ValueError("Corrupt state file: {}".format(self.path))

        return loaded_state 
Example 54
Project: pylivetrader   Author: alpacahq   File: __init__.py    Apache License 2.0 5 votes vote down vote up
def load(self):
        try:
            loaded_state = pickle.loads(self.redis.get(self.REDIS_STATE_KEY))
            return loaded_state
        except pickle.UnpicklingError:
            raise ValueError("Corrupt state file in redis") 
Example 55
Project: scTE   Author: jphe   File: utils.py    MIT License 5 votes vote down vote up
def glload(filename):
    """
    **Purpose**
        Load a glbase binary file
        (Actually a Python pickle)

    **Arguments**
        filename (Required)
            the filename of the glbase binary file to load.

    **Returns**
        The glbase object previously saved as a binary file
    """
    assert os.path.exists(os.path.realpath(filename)), "File '%s' not found" % filename

    try:
        oh = open(os.path.realpath(filename), "rb")
        newl = pickle.load(oh)
        oh.close()
    except pickle.UnpicklingError:
        raise BadBinaryFileFormatError(filename)

    # Recalculate the _optimiseData for old lists, and new features
    try:
        if newl.qkeyfind:
            pass
        if "loc" in list(newl.keys()) or "tss_loc" in list(newl.keys()): # buckets are only present if a loc key is available.
            if newl.buckets: # added in 0.381, only in objects with tss_loc or loc key.
                pass
    except Exception:
        config.log.warning("Old glb format, will rebuild buckets and/or qkeyfind, consider resaving")
        newl._optimiseData()

    try:
        cons = len(newl._conditions) # expression-like object
        config.log.info("Loaded '%s' binary file with %s items, %s conditions" % (filename, len(newl), cons))
    except AttributeError:
        config.log.info("Loaded '%s' binary file with %s items" % (filename, len(newl)))
    return(newl) 
Example 56
Project: Question-Answering-System   Author: AdityaAS   File: indexer.py    MIT License 5 votes vote down vote up
def load_tokc(self):
        try:
            with open(self.base_fname + '.tokc', mode='rb') as f:
                self.tokc = pickle.load(f)
            if not self.tokc:
                raise IndexLoadError
        except (IOError, pickle.UnpicklingError):
            raise IndexLoadError 
Example 57
Project: Connector   Author: ekorneechev   File: properties.py    GNU General Public License v2.0 5 votes vote down vote up
def loadFromFile(fileName, window = None):
    """Загрузка сохраненных параметров из файла"""
    try: 
        dbfile = open(WORKFOLDER + fileName, 'rb')
        obj = pickle.load(dbfile)
        dbfile.close()
        return obj
    except FileNotFoundError:
        if fileName.find('default.conf') != -1: #если загружаем параметры программы
            #при неудаче - создает файл со значениями по умолчанию
            log.warning ("Файл с настройками по умолчанию (default.conf) не найден, сгенерирован новый!")
            saveInFile(fileName, DEFAULT)
            return DEFAULT
        else: #если загружаем параметры одного из сохраненных подключений
            dialog = Gtk.MessageDialog(window, 0, Gtk.MessageType.ERROR, Gtk.ButtonsType.OK,
                    "Файл " + fileName + "\nc сохраненными настройками не найден")
            response = dialog.run()
            dialog.destroy()
            log.exception("Файл %s c сохраненными настройками не найден! Подробнее:", fileName)
            return None
    except (pickle.UnpicklingError, EOFError):
        dialog = Gtk.MessageDialog(window, 0, Gtk.MessageType.ERROR, Gtk.ButtonsType.OK,
                 "Файл %s\nимеет неверный формат" % fileName.replace("tmp_",""))
        response = dialog.run()
        dialog.destroy()
        log.exception("Файл %s имеет неверный формат! Подробнее:", fileName.replace("tmp_",""))
        if fileName.find('default.conf') != -1: saveInFile(fileName, DEFAULT); return DEFAULT
        return None 
Example 58
Project: Connector   Author: ekorneechev   File: properties.py    GNU General Public License v2.0 5 votes vote down vote up
def importFromFile(fileName, window = None):
    """Импорт параметров из файла .ctor"""
    try:
        dbfile = open(fileName, 'rb')
        obj = pickle.load(dbfile)
        dbfile.close()
        return obj
    except (pickle.UnpicklingError, EOFError):
        dialog = Gtk.MessageDialog(window, 0, Gtk.MessageType.ERROR, Gtk.ButtonsType.OK,
                 "Файл " + fileName + "\nимеет неверный формат")
        response = dialog.run()
        dialog.destroy()
        log.exception("Файл %s имеет неверный формат! Подробнее:", fileName)
        return None 
Example 59
Project: avendesora   Author: KenKundert   File: files.py    GNU General Public License v3.0 5 votes vote down vote up
def read_manifests(self):  # {{{2
        if self.name_index:
            return
        cache_dir = get_setting('cache_dir')
        manifests_path = cache_dir / MANIFESTS_FILENAME
        try:
            encrypted = manifests_path.read_bytes()

            user_key = get_setting('user_key')
            if not user_key:
                raise Error('no user key.')
            key = base64.urlsafe_b64encode(sha256(user_key.encode('ascii')).digest())
            fernet = Fernet(key)
            contents = fernet.decrypt(encrypted)

            try:
                cache = pickle.loads(contents, **PICKLE_ARGS)
                self.name_manifests = cache['names']
                self.url_manifests = cache['urls']
                self.title_manifests = cache['titles']

                # build the name_index by inverting the name_manifests
                self.name_index = {
                    h:n for n,l in self.name_manifests.items() for h in l
                }
            except (ValueError, pickle.UnpicklingError) as e:
                warn('garbled manifest.', culprit=manifests_path, codicil=str(e))
                manifests_path.unlink()
            assert isinstance(self.name_index, dict)
        except OSErrors as e:
            comment(os_error(e)) 
Example 60
Project: avendesora   Author: KenKundert   File: files.py    GNU General Public License v3.0 5 votes vote down vote up
def read_manifests(self):  # {{{2
        if self.name_index:
            return
        cache_dir = get_setting('cache_dir')
        manifests_path = cache_dir / MANIFESTS_FILENAME
        try:
            encrypted = manifests_path.read_bytes()

            user_key = get_setting('user_key')
            if not user_key:
                raise Error('no user key.')
            key = base64.urlsafe_b64encode(sha256(user_key.encode('ascii')).digest())
            fernet = Fernet(key)
            contents = fernet.decrypt(encrypted)

            try:
                cache = pickle.loads(contents, **PICKLE_ARGS)
                self.name_manifests = cache['names']
                self.url_manifests = cache['urls']
                self.title_manifests = cache['titles']

                # build the name_index by inverting the name_manifests
                self.name_index = {
                    h:n for n,l in self.name_manifests.items() for h in l
                }
            except (ValueError, pickle.UnpicklingError) as e:
                warn('garbled manifest.', culprit=manifests_path, codicil=str(e))
                manifests_path.unlink()
            assert isinstance(self.name_index, dict)
        except OSErrors as e:
            comment(os_error(e)) 
Example 61
Project: ultimate-tic-tac-toe   Author: stoimenoff   File: onlineplayer.py    MIT License 5 votes vote down vote up
def listen(self, on_move_request):
        """
        Waits a connection from a remote player via socket connection.
        If the socket connection fails will NOT handle exceptions.

        Raises BadRequestError, if the client request is not valid
        or the client is not the opponent.

        Blocking, can be cancelled from another thread by calling
        stop() on the object.

        When a valid request is made, on_move_request function is called
        with the name of the opponent and the board he sent.
        The function should return a move for the board.
        The return value of the function is not checked! If it is
        NOT a valid move, it WILL BE sent to the client.
        """
        self.__socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.__socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        # self.__socket.settimeout(DEFAULT_SERVER_TIMEOUT)
        with self.__socket:
            self.__socket.bind((self.__host, self.__port))
            self.__socket.listen(1)
            connection, address = self.__socket.accept()
            print('Connected by', address)
            if self.opponent is not None and self.opponent != address[0]:
                raise BadRequestError('Not opponent.')
            with connection:
                data = connection.recv(BYTES_LENGTH)
                try:
                    unpickled_data = pickle.loads(data)
                except (pickle.UnpicklingError, EOFError):
                    raise BadRequestError('Request unpickling failed.')
                if self.__is_not_valid(unpickled_data):
                    raise BadRequestError('Request object is not valid.')
                name, macroboard = unpickled_data
                self.opponent = address[0]
                move = on_move_request(name, macroboard)
                connection.sendall(pickle.dumps((self.name, move))) 
Example 62
Project: ultimate-tic-tac-toe   Author: stoimenoff   File: singleplayer.py    MIT License 5 votes vote down vote up
def loadGame(self):
        filename = QFileDialog().getOpenFileName(self, 'Load game')
        if not filename[0]:
            return
        try:
            with open(filename[0], 'rb') as handle:
                config = pickle.load(handle)
        except (pickle.UnpicklingError, FileNotFoundError, EOFError):
            return
        self.game = SinglePlayerGame()
        try:
            self.game.loadConfiguration(config)
        except ValueError:
            return
        self.showGame() 
Example 63
Project: geofire-python   Author: ininex   File: dill.py    MIT License 5 votes vote down vote up
def _create_lock(locked, *args):
    from threading import Lock
    lock = Lock()
    if locked:
        if not lock.acquire(False):
            raise UnpicklingError("Cannot acquire lock")
    return lock

# thanks to matsjoyce for adding all the different file modes 
Example 64
Project: geofire-python   Author: ininex   File: dill.py    MIT License 5 votes vote down vote up
def _extend():
    """extend pickle with all of dill's registered types"""
    # need to have pickle not choke on _main_module?  use is_dill(pickler)
    for t,func in Pickler.dispatch.items():
        try:
            StockPickler.dispatch[t] = func
        except: #TypeError, PicklingError, UnpicklingError
            log.info("skip: %s" % t)
        else: pass
    return 
Example 65
Project: sceneKit-wrapper-for-Pythonista   Author: pulbrich   File: solver.py    MIT License 5 votes vote down vote up
def __init__(self, puzzle_variant):
    self.puzzle_cubes = sum(len(nodes) for nodes in data.reference_pieces[puzzle_variant.value].values())
    
    try:
      with open('resources/'+puzzle_variant.name+'-shapes.P', 'rb') as fp:
        self.shapes_dict = pickle.load(fp)
        for name, nodes in data.reference_pieces[puzzle_variant.value].items():
          if len(list(self.shapes_dict[name].keys())[0]) != len(nodes):
            raise pickle.UnpicklingError        
    except (FileNotFoundError, pickle.UnpicklingError, KeyError):
      if Vector3 is None:
        raise ImportError('Module Pyrr is needed to recreate the shapes.P file. Ether re-download the shapes.P file or run "pip install pyrr".')
      self.shapes_dict = {}
      for name, nodes in data.reference_pieces[puzzle_variant.value].items():
        self.shapes_dict[name] = self.generate_shapes(nodes)
      with open('resources/'+puzzle_variant.name+'-shapes.P', 'wb') as fp:
        pickle.dump(self.shapes_dict, fp)
      
    self.pieces = [Piece(name, self.shapes_dict[name]) for name in data.reference_pieces[puzzle_variant.value].keys()]
    
    try:
      with open('resources/'+puzzle_variant.name+'-solution.P', 'rb') as fs:
        self.empty_solutions = pickle.load(fs)
        if self.empty_solutions:
          if len(data.reference_pieces[puzzle_variant.value][self.empty_solutions[0][0][0][0]]) != len(self.empty_solutions[0][0][0][1]):
            raise pickle.UnpicklingError
    except (FileNotFoundError, pickle.UnpicklingError, KeyError):
      self.generate_solutions()
      self.empty_solutions = self.path
      with open('resources/'+puzzle_variant.name+'-solution.P', 'wb') as fs:
        pickle.dump(self.empty_solutions, fs)
    self.path = []
    self.solution = [] 
Example 66
Project: DeepGTA5-V2   Author: lyzMaster   File: dataset_clean.py    MIT License 5 votes vote down vote up
def clean(num):
    file = open("dataset_clean_file/dataset"+str(num)+".pickle", "rb") # 设置数据集清理文件的位置
    data_list = pickle.load(file)
    raw_dataset = gzip.open("d:/no_traffic/dataset"+str(num)+".pz", "rb")  # 设置需要清理的数据集存储位置以及存储名称,默认按数字增序向后命名
    new_data_path = "dataset/dataset_example.pz"   # 设置清理后数据集要存储的位置
    i = 1
    d = 0
    nd = 0
    left = 0
    right = 0
    while True:
        try:
            data_dict = pickle.load(raw_dataset)
            if i in data_list:
                print("=========delete "+str(i)+" successfully========")
                d = d+1
                i = i+1
                continue
            if float(data_dict["steering"])>0.10:
                right = right+1
            elif float(data_dict["steering"])<-0.10:
                left = left+1
            print("run on "+str(num)+" dataset, work on "+str(i))
            target = Targets(datasetPath=new_data_path)
            target.parse(data_dict)
            i = i+1
            nd = nd+1

        except (EOFError,pickle.UnpicklingError):
            print("error/end")
            print("total: " + str(i))
            break 
Example 67
Project: dace   Author: spcl   File: symbolic.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def persistent_load(self, pid):
        type_tag, value = pid
        if type_tag == "DaCeSympyExpression":
            return _sunpickle(value)
        else:
            raise pickle.UnpicklingError("unsupported persistent object") 
Example 68
Project: uarray   Author: Quansight-Labs   File: _backend.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def unpickle_function(mod_name, qname):
    import importlib

    try:
        module = importlib.import_module(mod_name)
        func = getattr(module, qname)
        return func
    except (ImportError, AttributeError) as e:
        from pickle import UnpicklingError

        raise UnpicklingError from e 
Example 69
Project: uarray   Author: Quansight-Labs   File: _backend.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def pickle_function(func):
    mod_name = getattr(func, "__module__", None)
    qname = getattr(func, "__qualname__", None)

    try:
        test = unpickle_function(mod_name, qname)
    except pickle.UnpicklingError:
        test = None

    if test is not func:
        raise pickle.PicklingError(
            "Can't pickle {}: it's not the same object as {}".format(func, test)
        )

    return unpickle_function, (mod_name, qname) 
Example 70
Project: lexpredict-contraxsuite   Author: LexPredict   File: redis.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def unpickle(value):
    """
    Safely unpickle value
    """
    try:
        return pickle.loads(value)
    except pickle.UnpicklingError:
        return value 
Example 71
Project: everyclass-server   Author: everyclass   File: session.py    Mozilla Public License 2.0 4 votes vote down vote up
def open_session(self, app, request):
        """
        @param app: Flask app
        @param request: Flask HTTP Request
        @summary: Sets the current session from the request's session cooke. This overrides the default
        Flask implementation, adding AES decryption of the client-side session cookie.
        """

        # Get the session cookie
        session_cookie = request.cookies.get(self.session_cookie_name)
        if not session_cookie:
            return self.session_class()

        # Get the crypto key
        crypto_key = app.config['SESSION_CRYPTO_KEY'] if 'SESSION_CRYPTO_KEY' in app.config else app.crypto_key

        # Split the session cookie : <z|u>.<base64 cipher text>.<base64 mac>.<base64 nonce>
        itup = session_cookie.split(".")
        if len(itup) != 4:
            return self.session_class()  # Session cookie not in the right format

        try:

            # Compressed data?
            if itup[0] == 'z':  # session cookie for compressed data starts with "z."
                is_compressed = True
            else:
                is_compressed = False

            # Decode the cookie parts from base64
            ciphertext = base64.b64decode(bytes(itup[1], 'utf-8'))
            mac = base64.b64decode(bytes(itup[2], 'utf-8'))
            nonce = base64.b64decode(bytes(itup[3], 'utf-8'))

            # Decrypt
            cipher = AES.new(crypto_key, AES.MODE_EAX, nonce)
            data = cipher.decrypt_and_verify(ciphertext, mac)

            # Convert back to a dict and pass that onto the session
            if is_compressed:
                data = zlib.decompress(data)
            session_dict = pickle.loads(data)

            return self.session_class(session_dict)

        except (ValueError, _pickle.UnpicklingError, pickle.UnpicklingError):
            return self.session_class() 
Example 72
Project: att   Author: Centre-Alt-Rendiment-Esportiu   File: catalog.py    GNU General Public License v3.0 4 votes vote down vote up
def add_function_persistent(self,code,function):
        """ Store the code->function relationship to disk.

            Two pieces of information are needed for loading functions
            from disk -- the function pickle (which conveniently stores
            the module name, etc.) and the path to its module's directory.
            The latter is needed so that the function can be loaded no
            matter what the user's Python path is.
        """
        # add function to data in first writable catalog
        mode = 'c'  # create if doesn't exist, otherwise, use existing
        cat_dir = self.get_writable_dir()
        cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat_file = catalog_path(cat_dir)
            warnings.warn('problems with default catalog -- removing')
            import glob
            files = glob.glob(cat_file+'*')
            for f in files:
                os.remove(f)
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            raise ValueError('Failed to access a catalog for storing functions')
        # Prabhu was getting some corrupt catalog errors.  I'll put a try/except
        # to protect against this, but should really try and track down the issue.
        function_list = [function]
        try:
            function_list = function_list + cat.get(code,[])
        except pickle.UnpicklingError:
            pass
        cat[code] = function_list
        # now add needed path information for loading function
        module = getmodule(function)
        try:
            # built in modules don't have the __file__ extension, so this
            # will fail.  Just pass in this case since path additions aren't
            # needed for built-in modules.
            mod_path,f = os.path.split(os.path.abspath(module.__file__))
            pkey = self.path_key(code)
            cat[pkey] = [mod_path] + cat.get(pkey,[])
        except:
            pass
        cat.close() 
Example 73
Project: diplomacy   Author: diplomacy   File: convoy_paths.py    GNU Affero General Public License v3.0 4 votes vote down vote up
def add_to_cache(map_name, max_convoy_length=MAX_CONVOY_LENGTH):
    """ Lazy generates convoys paths for a map and adds it to the disk cache

        :param map_name: The name of the map
        :param max_convoy_length: The maximum convoy length permitted
        :return: The convoy_paths for that map
    """
    convoy_paths = {'__version__': __VERSION__}             # Uses hash as key
    external_convoy_paths = {'__version__': __VERSION__}    # Uses hash as key

    # Loading from internal cache first
    if os.path.exists(INTERNAL_CACHE_PATH):
        try:
            cache_data = pickle.load(open(INTERNAL_CACHE_PATH, 'rb'))
            if cache_data.get('__version__', '') == __VERSION__:
                convoy_paths.update(cache_data)
        except (pickle.UnpicklingError, EOFError):
            pass

    # Loading external cache
    if os.path.exists(EXTERNAL_CACHE_PATH):
        try:
            cache_data = pickle.load(open(EXTERNAL_CACHE_PATH, 'rb'))
            if cache_data.get('__version__', '') != __VERSION__:
                print('Upgrading cache from "%s" to "%s"' % (cache_data.get('__version__', '<N/A>'), __VERSION__))
            else:
                convoy_paths.update(cache_data)
                external_convoy_paths.update(cache_data)
        except (pickle.UnpicklingError, EOFError):
            pass

    # Getting map MD5 hash
    if os.path.exists(map_name):
        map_path = map_name
    else:
        map_path = os.path.join(settings.PACKAGE_DIR, 'maps', map_name + '.map')
    if not os.path.exists(map_path):
        return None
    map_hash = get_file_md5(map_path)

    # Generating and adding to alternate cache paths
    if map_hash not in convoy_paths:
        map_object = Map(map_name, use_cache=False)
        convoy_paths[map_hash] = _build_convoy_paths_cache(map_object, max_convoy_length)
        external_convoy_paths[map_hash] = convoy_paths[map_hash]
        os.makedirs(os.path.dirname(EXTERNAL_CACHE_PATH), exist_ok=True)
        pickle.dump(external_convoy_paths, open(EXTERNAL_CACHE_PATH, 'wb'))

    # Returning
    return convoy_paths[map_hash] 
Example 74
Project: fairseq   Author: pytorch   File: distributed_utils.py    MIT License 4 votes vote down vote up
def all_gather_list(data, group=None, max_size=16384):
    """Gathers arbitrary data from all nodes into a list.

    Similar to :func:`~torch.distributed.all_gather` but for arbitrary Python
    data. Note that *data* must be picklable.

    Args:
        data (Any): data from the local worker to be gathered on other workers
        group (optional): group of the collective
        max_size (int, optional): maximum size of the data to be gathered
            across workers
    """
    rank = get_rank()
    world_size = get_world_size()

    buffer_size = max_size * world_size
    if not hasattr(all_gather_list, '_buffer') or \
            all_gather_list._buffer.numel() < buffer_size:
        all_gather_list._buffer = torch.cuda.ByteTensor(buffer_size)
        all_gather_list._cpu_buffer = torch.ByteTensor(max_size).pin_memory()
    buffer = all_gather_list._buffer
    buffer.zero_()
    cpu_buffer = all_gather_list._cpu_buffer

    enc = pickle.dumps(data)
    enc_size = len(enc)
    header_size = 4  # size of header that contains the length of the encoded data
    size = header_size + enc_size
    if size > max_size:
        raise ValueError('encoded data size ({}) exceeds max_size ({})'.format(size, max_size))

    header = struct.pack(">I", enc_size)
    cpu_buffer[:size] = torch.ByteTensor(list(header + enc))
    start = rank * max_size
    buffer[start:start + size].copy_(cpu_buffer[:size])

    all_reduce(buffer, group=group)

    try:
        result = []
        for i in range(world_size):
            out_buffer = buffer[i * max_size:(i + 1) * max_size]
            enc_size, = struct.unpack(">I", bytes(out_buffer[:header_size].tolist()))
            if enc_size > 0:
                result.append(pickle.loads(bytes(out_buffer[header_size:header_size + enc_size].tolist())))
        return result
    except pickle.UnpicklingError:
        raise Exception(
            'Unable to unpickle data from other workers. all_gather_list requires all '
            'workers to enter the function together, so this error usually indicates '
            'that the workers have fallen out of sync somehow. Workers can fall out of '
            'sync if one of them runs out of memory, or if there are other conditions '
            'in your training script that can cause one worker to finish an epoch '
            'while other workers are still iterating over their portions of the data.'
        ) 
Example 75
Project: stable-baselines   Author: Stable-Baselines-Team   File: save_util.py    MIT License 4 votes vote down vote up
def json_to_data(json_string, custom_objects=None):
    """
    Turn JSON serialization of class-parameters back into dictionary.

    :param json_string: (str) JSON serialization of the class-parameters
        that should be loaded.
    :param custom_objects: (dict) Dictionary of objects to replace
        upon loading. If a variable is present in this dictionary as a
        key, it will not be deserialized and the corresponding item
        will be used instead. Similar to custom_objects in
        `keras.models.load_model`. Useful when you have an object in
        file that can not be deserialized.
    :return: (dict) Loaded class parameters.
    """
    if custom_objects is not None and not isinstance(custom_objects, dict):
        raise ValueError("custom_objects argument must be a dict or None")

    json_dict = json.loads(json_string)
    # This will be filled with deserialized data
    return_data = {}
    for data_key, data_item in json_dict.items():
        if custom_objects is not None and data_key in custom_objects.keys():
            # If item is provided in custom_objects, replace
            # the one from JSON with the one in custom_objects
            return_data[data_key] = custom_objects[data_key]
        elif isinstance(data_item, dict) and ":serialized:" in data_item.keys():
            # If item is dictionary with ":serialized:"
            # key, this means it is serialized with cloudpickle.
            serialization = data_item[":serialized:"]
            # Try-except deserialization in case we run into
            # errors. If so, we can tell bit more information to
            # user.
            try:
                deserialized_object = cloudpickle.loads(
                    base64.b64decode(serialization.encode())
                )
            except pickle.UnpicklingError:
                raise RuntimeError(
                    "Could not deserialize object {}. ".format(data_key) +
                    "Consider using `custom_objects` argument to replace " +
                    "this object."
                )
            return_data[data_key] = deserialized_object
        else:
            # Read as it is
            return_data[data_key] = data_item
    return return_data 
Example 76
Project: Computable   Author: ktraunmueller   File: catalog.py    MIT License 4 votes vote down vote up
def add_function_persistent(self,code,function):
        """ Store the code->function relationship to disk.

            Two pieces of information are needed for loading functions
            from disk -- the function pickle (which conveniently stores
            the module name, etc.) and the path to its module's directory.
            The latter is needed so that the function can be loaded no
            matter what the user's Python path is.
        """
        # add function to data in first writable catalog
        mode = 'c'  # create if doesn't exist, otherwise, use existing
        cat_dir = self.get_writable_dir()
        cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat_file = catalog_path(cat_dir)
            print('problems with default catalog -- removing')
            import glob
            files = glob.glob(cat_file+'*')
            for f in files:
                os.remove(f)
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            raise ValueError('Failed to access a catalog for storing functions')
        # Prabhu was getting some corrupt catalog errors.  I'll put a try/except
        # to protect against this, but should really try and track down the issue.
        function_list = [function]
        try:
            function_list = function_list + cat.get(code,[])
        except pickle.UnpicklingError:
            pass
        cat[code] = function_list
        # now add needed path information for loading function
        module = getmodule(function)
        try:
            # built in modules don't have the __file__ extension, so this
            # will fail.  Just pass in this case since path additions aren't
            # needed for built-in modules.
            mod_path,f = os.path.split(os.path.abspath(module.__file__))
            pkey = self.path_key(code)
            cat[pkey] = [mod_path] + cat.get(pkey,[])
        except:
            pass
        cat.close() 
Example 77
Project: poker   Author: surgebiswas   File: catalog.py    MIT License 4 votes vote down vote up
def add_function_persistent(self,code,function):
        """ Store the code->function relationship to disk.

            Two pieces of information are needed for loading functions
            from disk -- the function pickle (which conveniently stores
            the module name, etc.) and the path to its module's directory.
            The latter is needed so that the function can be loaded no
            matter what the user's Python path is.
        """
        # add function to data in first writable catalog
        mode = 'c'  # create if doesn't exist, otherwise, use existing
        cat_dir = self.get_writable_dir()
        cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            cat_dir = default_dir()
            cat_file = catalog_path(cat_dir)
            warnings.warn('problems with default catalog -- removing')
            import glob
            files = glob.glob(cat_file+'*')
            for f in files:
                os.remove(f)
            cat = get_catalog(cat_dir,mode)
        if cat is None:
            raise ValueError('Failed to access a catalog for storing functions')
        # Prabhu was getting some corrupt catalog errors.  I'll put a try/except
        # to protect against this, but should really try and track down the issue.
        function_list = [function]
        try:
            function_list = function_list + cat.get(code,[])
        except pickle.UnpicklingError:
            pass
        cat[code] = function_list
        # now add needed path information for loading function
        module = getmodule(function)
        try:
            # built in modules don't have the __file__ extension, so this
            # will fail.  Just pass in this case since path additions aren't
            # needed for built-in modules.
            mod_path,f = os.path.split(os.path.abspath(module.__file__))
            pkey = self.path_key(code)
            cat[pkey] = [mod_path] + cat.get(pkey,[])
        except:
            pass
        cat.close() 
Example 78
Project: keyrings.alt   Author: jaraco   File: Google.py    MIT License 4 votes vote down vote up
def _read(self):
        from gdata.docs.service import DocumentQuery

        title_query = DocumentQuery(categories=[self.collection])
        title_query['title'] = self._get_doc_title()
        title_query['title-exact'] = 'true'
        docs = self.client.QueryDocumentListFeed(title_query.ToUri())

        if not docs.entry:
            if self.can_create:
                docs_entry = None
                keyring_dict = {}
            else:
                raise errors.InitError(
                    '%s not found in %s and create not permitted'
                    % (self._get_doc_title(), self.collection)
                )
        else:
            docs_entry = docs.entry[0]
            file_contents = ''
            try:
                url = docs_entry.content.src
                url += '&exportFormat=txt'
                server_response = self.client.request('GET', url)
                if server_response.status != 200:
                    raise errors.InitError(
                        'Could not read existing Google Docs keyring'
                    )
                file_contents = server_response.read()
                if file_contents.startswith(codecs.BOM_UTF8):
                    file_contents = file_contents[len(codecs.BOM_UTF8) :]
                keyring_dict = pickle.loads(
                    base64.urlsafe_b64decode(file_contents.decode('string-escape'))
                )
            except pickle.UnpicklingError as ex:
                raise errors.InitError(
                    'Could not unpickle existing Google Docs keyring', ex
                )
            except TypeError as ex:
                raise errors.InitError(
                    'Could not decode existing Google Docs keyring', ex
                )

        return docs_entry, keyring_dict 
Example 79
Project: adbepy   Author: tomzig16   File: deviceCacher.py    MIT License 4 votes vote down vote up
def GetCachedDeviceData(fname):
    """
    Gets cached device and returns as a deserialized dictionary with isFullData and actual device data (example is given below). 
    All serialized data is expected to be saved inside /src/res/dcache/ folder (folder is created when saving first object). 
    If object does not exist, this function returns `None`
    fname - file name. by default, device names are device serial numbers (without any extension)

    Example of returned device data:
    ```
    {
        "isFullData": True,
        "deviceData": {
            "serial": "someSerialNumber",
            "manufa": "dummyManufacturer",
            ...
        }
    }
    ```
    """
    try:
        with open(path.join(dcachePath, fname), mode="rb") as binaryFile:
            deserializedObj = pickle.load(binaryFile)
            return {
                "isFullData": deserializedObj.isFullData,
                "deviceData": {
                    "serial": deserializedObj.serial,
                    "manufa": deserializedObj.manuf,
                    "model_code": deserializedObj.model_code,
                    "market_name": deserializedObj.market_name,
                    "os": deserializedObj.os_ver,
                    "fingerprint": deserializedObj.fingerprint,
                    "gpu_renderer": deserializedObj.gpu_renderer,
                    "gpu_manufa": deserializedObj.gpu_manufacturer,
                    "gpu_gles": deserializedObj.gpu_gles,
                    "cpu_abi": deserializedObj.cpu_abi,
                    "cpu_soc": deserializedObj.cpu_soc,
                    "cpu_hardware": deserializedObj.cpu_hardware
                }
            }
    except IOError:
        return None
    except pickle.UnpicklingError:
        return None 
Example 80
Project: ParlAI   Author: facebookresearch   File: distributed.py    MIT License 4 votes vote down vote up
def sync_object(data, max_size=16384):
    """
    Sync an object among all workers.

    All workers will return the same value for `data` when returning from this
    method, always using the primary worker's version. Useful for ensuring control
    flow decisions are made the same.

    :param object data:
        The object to synchronize. Must be pickleable.
    :param int max_size:
        The maximum size of this object in bytes. Large values than 255^2 are not
        supported.

    :return: the synchronized data
    """
    if not is_distributed():
        return data

    # prepare the buffer
    if not hasattr(sync_object, '_buffer') or sync_object._buffer.numel() < max_size:
        # cuda is safe because distributed mode is only okay with CUDA
        sync_object._buffer = torch.cuda.ByteTensor(max_size)

    buffer = sync_object._buffer

    if is_primary_worker():
        enc = pickle.dumps(data)
        enc_size = len(enc)
        if (enc_size + 2 > max_size) or (enc_size > 255 * 255):
            # can't store the size in the first 2 bytes
            raise ValueError('encoded data exceeds max_size')

        buffer[0] = enc_size // 255
        buffer[1] = enc_size % 255
        buffer[2 : enc_size + 2] = torch.ByteTensor(list(enc))

    dist.broadcast(buffer, 0)

    if not is_primary_worker():
        # deserialize the data
        enc_size = buffer[0].item() * 255 + buffer[1].item()
        try:
            data = pickle.loads(bytes(buffer[2 : enc_size + 2].tolist()))
        except pickle.UnpicklingError:
            raise RuntimeError(
                'There was an unpickling error in sync_object. This likely '
                'means your workers got out of syncronization (e.g. one is '
                'expecting to sync and another is not.)'
            )

    return data