Python pickle.load() Examples

The following are 30 code examples of pickle.load(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module pickle , or try the search function .
Example #1
Source File: cache.py    From vergeml with MIT License 10 votes vote down vote up
def _deserialize(self, data, type_):

        if self.compress:
        # decompress the data if needed
            data = lz4.frame.decompress(data)

        if type_ == _NUMPY:
        # deserialize numpy arrays
            buf = io.BytesIO(data)
            data = np.load(buf)

        elif type_ == _PICKLE:
        # deserialize other python objects
            data = pickle.loads(data)

        else:
        # Otherwise we just return data as it is (bytes)
            pass

        return data 
Example #2
Source File: server.py    From BASS with GNU General Public License v2.0 7 votes vote down vote up
def function_raw_hash_get():
    global Session
    session = Session()
    filename, file_ = request.files.items()[0]
    db = Database(pickle.load(file_))

    arch_name = db.architecture_name
    if arch_name == "metapc":
        arch_name = "x86"
    try:
        arch = session.query(Architecture).filter(Architecture.name == arch_name and \
                Architecture.bits == db.architecture_bits and \
                Architecture.little_endian == db.architecture_endianness == "little").one()
    except NoResultFound:
        return make_response(jsonify(message = "Architecture not found"), 404)
    
    try:
        func = next(db.functions)
    except StopIteration:
        return make_response(jsonify(message = "No function found in database"), 500)

    raw_hash = _function_calculate_raw_sha256(func)
    size = _function_get_size(func)

    try:
        function = session.query(Function).filter(Function.raw_sha256 == raw_hash and \
                Function.size == size and \
                Function.arch == arch.id).one()
        return make_response(jsonify(**json.loads(function.data)), 200)
    except NoResultFound:
        return make_response(jsonify(message = "Function not found"), 404) 
Example #3
Source File: sessions.py    From cherrypy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _load(self, path=None):
        assert self.locked, ('The session load without being locked.  '
                             "Check your tools' priority levels.")
        if path is None:
            path = self._get_file_path()
        try:
            f = open(path, 'rb')
            try:
                return pickle.load(f)
            finally:
                f.close()
        except (IOError, EOFError):
            e = sys.exc_info()[1]
            if self.debug:
                cherrypy.log('Error loading the session pickle: %s' %
                             e, 'TOOLS.SESSIONS')
            return None 
Example #4
Source File: workflow.py    From wechat-alfred-workflow with MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example #5
Source File: train_val.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def from_snapshot(self, sfile, nfile):
    print('Restoring model snapshots from {:s}'.format(sfile))
    self.net.load_state_dict(torch.load(str(sfile)))
    print('Restored.')
    # Needs to restore the other hyper-parameters/states for training, (TODO xinlei) I have
    # tried my best to find the random states so that it can be recovered exactly
    # However the Tensorflow state is currently not available
    with open(nfile, 'rb') as fid:
      st0 = pickle.load(fid)
      cur = pickle.load(fid)
      perm = pickle.load(fid)
      cur_val = pickle.load(fid)
      perm_val = pickle.load(fid)
      last_snapshot_iter = pickle.load(fid)

      np.random.set_state(st0)
      self.data_layer._cur = cur
      self.data_layer._perm = perm
      self.data_layer_val._cur = cur_val
      self.data_layer_val._perm = perm_val

    return last_snapshot_iter 
Example #6
Source File: pascal_voc.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.

    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if os.path.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        try:
          roidb = pickle.load(fid)
        except:
          roidb = pickle.load(fid, encoding='bytes')
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_pascal_labels(index)
                for index in self.image_index]
    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))

    return gt_roidb 
Example #7
Source File: coco.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example #8
Source File: model.py    From VSE-C with MIT License 6 votes vote down vote up
def __init__(self, vocab_size, word_dim, embed_size, use_abs=False,
                 glove_path='data/glove.pkl'):
        super(EncoderTextDeepCNN, self).__init__()
        self.use_abs = use_abs
        self.embed_size = embed_size

        # word embedding
        self.embed = nn.Embedding(vocab_size, word_dim-300, padding_idx=0)
        _, embed_weight = pickle.load(open(glove_path, 'rb'))
        self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False)

        channel_num = embed_size

        self.conv1 = nn.Conv1d(word_dim, embed_size, 2, stride=2)   # [batch_size, dim, 30]
        self.conv2 = nn.Conv1d(embed_size, embed_size, 4, stride=2)  # [batch_size, dim, 14]
        self.conv3 = nn.Conv1d(embed_size, embed_size, 5, stride=2)  # [batch_size, dim, 5]
        self.conv4 = nn.Conv1d(embed_size, channel_num, 5)
        self.drop = nn.Dropout(p=0.5)
        self.relu = nn.ReLU()

#        self.mlp = nn.Linear(embed_size, embed_size)

        self.init_weights() 
Example #9
Source File: update_cache_compatibility.py    From gated-graph-transformer-network with MIT License 6 votes vote down vote up
def main(cache_dir):
    files_list = list(os.listdir(cache_dir))
    for file in files_list:
        full_filename = os.path.join(cache_dir, file)
        if os.path.isfile(full_filename):
            print("Processing {}".format(full_filename))
            m, stored_kwargs = pickle.load(open(full_filename, 'rb'))
            updated_kwargs = util.get_compatible_kwargs(model.Model, stored_kwargs)

            model_hash = util.object_hash(updated_kwargs)
            print("New hash -> " + model_hash)
            model_filename = os.path.join(cache_dir, "model_{}.p".format(model_hash))
            sys.setrecursionlimit(100000)
            pickle.dump((m,updated_kwargs), open(model_filename,'wb'), protocol=pickle.HIGHEST_PROTOCOL)

            os.remove(full_filename) 
Example #10
Source File: ggtnn_train.py    From gated-graph-transformer-network with MIT License 6 votes vote down vote up
def assemble_batch(story_fns, num_answer_words, format_spec):
    stories = []
    for sfn in story_fns:
        with gzip.open(sfn,'rb') as f:
            cvtd_story, _, _, _ = pickle.load(f)
        stories.append(cvtd_story)
    sents, graphs, queries, answers = zip(*stories)
    cvtd_sents = np.array(sents, np.int32)
    cvtd_queries = np.array(queries, np.int32)
    max_ans_len = max(len(a) for a in answers)
    cvtd_answers = np.stack([convert_answer(answer, num_answer_words, format_spec, max_ans_len) for answer in answers])
    num_new_nodes, new_node_strengths, new_node_ids, next_edges = zip(*graphs)
    num_new_nodes = np.stack(num_new_nodes)
    new_node_strengths = np.stack(new_node_strengths)
    new_node_ids = np.stack(new_node_ids)
    next_edges = np.stack(next_edges)
    return cvtd_sents, cvtd_queries, cvtd_answers, num_new_nodes, new_node_strengths, new_node_ids, next_edges 
Example #11
Source File: learn.py    From flappybird-qlearning-bot with MIT License 6 votes vote down vote up
def main():
    global HITMASKS, ITERATIONS, VERBOSE, bot

    parser = argparse.ArgumentParser("learn.py")
    parser.add_argument("--iter", type=int, default=1000, help="number of iterations to run")
    parser.add_argument(
        "--verbose", action="store_true", help="output [iteration | score] to stdout"
    )
    args = parser.parse_args()
    ITERATIONS = args.iter
    VERBOSE = args.verbose

    # load dumped HITMASKS
    with open("data/hitmasks_data.pkl", "rb") as input:
        HITMASKS = pickle.load(input)

    while True:
        movementInfo = showWelcomeAnimation()
        crashInfo = mainGame(movementInfo)
        showGameOverScreen(crashInfo) 
Example #12
Source File: custom_datasets.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False):
        """

        :param transform:
        :param target_transform:
        :param filename:
        :param transp: Set shuff= False for PGD based attacks
        :return:
        """
        self.transform = transform
        self.target_transform = target_transform
        self.adv_dict=pickle.load(open(filename,"rb"))
        self.adv_flat=self.adv_dict["adv_input"]
        self.num_adv=np.shape(self.adv_flat)[0]
        self.shuff = transp
        self.sample_num = 0 
Example #13
Source File: custom_datasets.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False):
        """

        :param transform:
        :param target_transform:
        :param filename:
        :param transp: Set shuff= False for PGD based attacks
        :return:
        """
        self.transform = transform
        self.target_transform = target_transform
        self.adv_dict=pickle.load(open(filename,"rb"))
        self.adv_flat=self.adv_dict["adv_input"]
        self.num_adv=np.shape(self.adv_flat)[0]
        self.shuff = transp
        self.sample_num = 0 
Example #14
Source File: custom_datasets.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False):
        """

        :param transform:
        :param target_transform:
        :param filename:
        :param transp: Set shuff= False for PGD based attacks
        :return:
        """
        self.transform = transform
        self.target_transform = target_transform
        self.adv_dict=pickle.load(open(filename,"rb"))
        self.adv_flat=self.adv_dict["adv_input"]
        self.num_adv=np.shape(self.adv_flat)[0]
        self.transp = transp
        self.sample_num = 0 
Example #15
Source File: training.py    From mlimages with MIT License 6 votes vote down vote up
def __load_mean(self):
        mean = None
        if self.mean_image_file:
            if os.path.isfile(self.mean_image_file):
                _, ext = os.path.splitext(os.path.basename(self.mean_image_file))
                if ext.lower() == ".npy":
                    mean = pickle.load(open(self.mean_image_file, "rb"))
                else:
                    m_image = LabeledImage(self.mean_image_file)  # mean image is already `converted` when calculation.
                    m_image.load()
                    mean = m_image.to_array(np, self.color)
            else:
                raise Exception("Mean image is not exist at {0}.".format(self.mean_image_file))
        else:
            self.label_file._logger.warning("Mean image is not set. So if you train the model, it will be difficult to converge.")
        return mean 
Example #16
Source File: dataloader.py    From models with MIT License 6 votes vote down vote up
def __init__(self, pos_features, pipeline_obj_path):
        """
        Args:
          pos_features: list of positional features to use
          pipeline_obj_path: path to the serialized pipeline obj_path
        """
        self.pos_features = pos_features
        self.pipeline_obj_path = pipeline_obj_path

        # deserialize the pickle file
        with open(self.pipeline_obj_path, "rb") as f:
            pipeline_obj = pickle.load(f)
        self.POS_FEATURES = pipeline_obj[0]
        self.minmax_scaler = pipeline_obj[1]
        self.imp = pipeline_obj[2]

        self.funct_transform = FunctionTransformer(func=sign_log_func,
                                                   inverse_func=sign_log_func_inverse)
        # for simplicity, assume all current pos_features are the
        # same as from before
        assert self.POS_FEATURES == self.pos_features 
Example #17
Source File: dump_dataloader_files.py    From models with MIT License 6 votes vote down vote up
def __init__(self, pos_features, pipeline_obj_path):
        """
        Args:
          pos_features: list of positional features to use
          pipeline_obj_path: path to the serialized pipeline obj_path
        """
        self.pos_features = pos_features
        self.pipeline_obj_path = pipeline_obj_path

        # deserialize the pickle file
        with open(self.pipeline_obj_path, "rb") as f:
            pipeline_obj = pickle.load(f)
        self.POS_FEATURES = pipeline_obj[0]
        self.preproc_pipeline = pipeline_obj[1]
        self.imp = pipeline_obj[2]

        # for simplicity, assume all current pos_features are the
        # same as from before
        assert self.POS_FEATURES == self.pos_features 
Example #18
Source File: dataset_tool.py    From disentangling_conditional_gans with MIT License 6 votes vote down vote up
def create_cifar100(tfrecord_dir, cifar100_dir):
    print('Loading CIFAR-100 from "%s"' % cifar100_dir)
    import pickle
    with open(os.path.join(cifar100_dir, 'train'), 'rb') as file:
        data = pickle.load(file, encoding='latin1')
    images = data['data'].reshape(-1, 3, 32, 32)
    labels = np.array(data['fine_labels'])
    assert images.shape == (50000, 3, 32, 32) and images.dtype == np.uint8
    assert labels.shape == (50000,) and labels.dtype == np.int32
    assert np.min(images) == 0 and np.max(images) == 255
    assert np.min(labels) == 0 and np.max(labels) == 99
    onehot = np.zeros((labels.size, np.max(labels) + 1), dtype=np.float32)
    onehot[np.arange(labels.size), labels] = 1.0

    with TFRecordExporter(tfrecord_dir, images.shape[0]) as tfr:
        order = tfr.choose_shuffled_order()
        for idx in range(order.size):
            tfr.add_image(images[order[idx]])
        tfr.add_labels(onehot[order])

#---------------------------------------------------------------------------- 
Example #19
Source File: model.py    From VSE-C with MIT License 6 votes vote down vote up
def __init__(self, vocab_size, word_dim, embed_size, num_layers, pooling='last',
                 use_abs=False, bid=False, glove_path='data/glove.pkl'):
        super(EncoderTextGRU, self).__init__()
        self.use_abs = use_abs
        self.embed_size = embed_size
        self.combiner = Combiner(pooling, embed_size)

        # word embedding
        self.word_dim = word_dim
        if word_dim > 300:
            self.embed = nn.Embedding(vocab_size, word_dim-300)
        _, embed_weight = pickle.load(open(glove_path, 'rb'))
        self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False)

        # caption embedding
        self.rnn = nn.GRU(word_dim, embed_size//(2 if bid else 1), num_layers, batch_first=True, bidirectional=bid)

        self.init_weights() 
Example #20
Source File: model.py    From VSE-C with MIT License 6 votes vote down vote up
def __init__(self, vocab_size, word_dim, embed_size, use_abs=False, glove_path='data/glove.pkl'):
        super(EncoderTextCNN, self).__init__()
        self.use_abs = use_abs
        self.embed_size = embed_size

        # word embedding
        self.embed = nn.Embedding(vocab_size, word_dim-300, padding_idx=0)  # 0 for <pad>
        _, embed_weight = pickle.load(open(glove_path, 'rb'))
        self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False)

        channel_num = embed_size // 4
        self.conv2 = nn.Conv1d(word_dim, channel_num, 2)
        self.conv3 = nn.Conv1d(word_dim, channel_num, 3)
        self.conv4 = nn.Conv1d(word_dim, channel_num, 4)
        self.conv5 = nn.Conv1d(word_dim, channel_num, 5)
        self.drop = nn.Dropout(p=0.5)
        self.relu = nn.ReLU()

#        self.mlp = nn.Linear(embed_size, embed_size)

        self.init_weights() 
Example #21
Source File: cache.py    From vergeml with MIT License 5 votes vote down vote up
def read(self, file, path):
        """Read the content index from file.
        """
        pos, = struct.unpack('<Q', file.read(8))
        if pos == 0:
            raise VergeMLError("Invalid cache file: {}".format(path))
        file.seek(pos)
        self.index, self.meta, self.info = pickle.load(file) 
Example #22
Source File: dataset.py    From hgraph2graph with MIT License 5 votes vote down vote up
def __iter__(self):
        for fn in self.data_files:
            fn = os.path.join(self.data_folder, fn)
            with open(fn, 'rb') as f:
                batches = pickle.load(f)

            if self.shuffle: random.shuffle(batches) #shuffle data before batch
            for batch in batches:
                yield batch

            del batches
            gc.collect() 
Example #23
Source File: sascorer.py    From hgraph2graph with MIT License 5 votes vote down vote up
def readFragmentScores(name='fpscores'):
    import gzip
    global _fscores
    # generate the full path filename:
    if name == "fpscores":
        name = op.join(op.dirname(__file__), name)
    _fscores = pickle.load(gzip.open('%s.pkl.gz' % name))
    outDict = {}
    for i in _fscores:
        for j in range(1, len(i)):
            outDict[i[j]] = float(i[0])
    _fscores = outDict 
Example #24
Source File: drd2_scorer.py    From hgraph2graph with MIT License 5 votes vote down vote up
def load_model():
    global clf_model
    name = op.join(op.dirname(__file__), 'clf_py36.pkl')
    with open(name, "rb") as f:
        clf_model = pickle.load(f) 
Example #25
Source File: dataset_tool.py    From disentangling_conditional_gans with MIT License 5 votes vote down vote up
def create_svhn(tfrecord_dir, svhn_dir):
    print('Loading SVHN from "%s"' % svhn_dir)
    import pickle
    images = []
    labels = []
    for batch in range(1, 4):
        with open(os.path.join(svhn_dir, 'train_%d.pkl' % batch), 'rb') as file:
            data = pickle.load(file, encoding='latin1')
        images.append(data[0])
        labels.append(data[1])
    images = np.concatenate(images)
    labels = np.concatenate(labels)
    assert images.shape == (73257, 3, 32, 32) and images.dtype == np.uint8
    assert labels.shape == (73257,) and labels.dtype == np.uint8
    assert np.min(images) == 0 and np.max(images) == 255
    assert np.min(labels) == 0 and np.max(labels) == 9
    onehot = np.zeros((labels.size, np.max(labels) + 1), dtype=np.float32)
    onehot[np.arange(labels.size), labels] = 1.0

    with TFRecordExporter(tfrecord_dir, images.shape[0]) as tfr:
        order = tfr.choose_shuffled_order()
        for idx in range(order.size):
            tfr.add_image(images[order[idx]])
        tfr.add_labels(onehot[order])

#---------------------------------------------------------------------------- 
Example #26
Source File: dataset_tool.py    From disentangling_conditional_gans with MIT License 5 votes vote down vote up
def create_from_images(tfrecord_dir, image_dir, label_dir, shuffle):
    print('Loading images from "%s"' % image_dir)
    image_filenames = sorted(glob.glob(os.path.join(image_dir, '*')))
    if len(image_filenames) == 0:
        error('No input images found')
        
    img = np.asarray(PIL.Image.open(image_filenames[0]))
    resolution = img.shape[0]
    channels = img.shape[2] if img.ndim == 3 else 1
    if img.shape[1] != resolution:
        error('Input images must have the same width and height')
    if resolution != 2 ** int(np.floor(np.log2(resolution))):
        error('Input image resolution must be a power-of-two')
    if channels not in [1, 3]:
        error('Input images must be stored as RGB or grayscale')

    try:
        with open(label_dir, 'rb') as file:
            labels = pickle.load(file)
    except:
        error('Label file was not found')
    
    with TFRecordExporter(tfrecord_dir, len(image_filenames)) as tfr:
        order = tfr.choose_shuffled_order() if shuffle else np.arange(len(image_filenames))
        reordered_names = []
        for idx in range(order.size):
            image_filename = image_filenames[order[idx]]
            img = np.asarray(PIL.Image.open(image_filename))
            if channels == 1:
                img = img[np.newaxis, :, :] # HW => CHW
            else:
                img = img.transpose(2, 0, 1) # HWC => CHW
            tfr.add_image(img)
            reordered_names.append(os.path.basename(image_filename))
        reordered_labels = []
        for key in reordered_names:
            reordered_labels += [labels[key]]
        reordered_labels = np.stack(reordered_labels, 0)
        tfr.add_labels(reordered_labels)

#---------------------------------------------------------------------------- 
Example #27
Source File: dataset_tool.py    From disentangling_conditional_gans with MIT License 5 votes vote down vote up
def create_from_hdf5(tfrecord_dir, hdf5_filename, shuffle):
    print('Loading HDF5 archive from "%s"' % hdf5_filename)
    import h5py # conda install h5py
    with h5py.File(hdf5_filename, 'r') as hdf5_file:
        hdf5_data = max([value for key, value in hdf5_file.items() if key.startswith('data')], key=lambda lod: lod.shape[3])
        with TFRecordExporter(tfrecord_dir, hdf5_data.shape[0]) as tfr:
            order = tfr.choose_shuffled_order() if shuffle else np.arange(hdf5_data.shape[0])
            for idx in range(order.size):
                tfr.add_image(hdf5_data[order[idx]])
            npy_filename = os.path.splitext(hdf5_filename)[0] + '-labels.npy'
            if os.path.isfile(npy_filename):
                tfr.add_labels(np.load(npy_filename)[order])

#---------------------------------------------------------------------------- 
Example #28
Source File: tokenizer_mod.py    From Turku-neural-parser-pipeline with Apache License 2.0 5 votes vote down vote up
def __init__(self, args):
        """
        Tokenizer model loading etc goes here
        """
        from keras.models import load_model
        self.model = load_model(args.model)
        with open(args.vocab,'rb') as inf:
            self.vocab = pickle.load(inf)
        self.args=args 
Example #29
Source File: dataset.py    From hgraph2graph with MIT License 5 votes vote down vote up
def __iter__(self):
        for fn in self.data_files:
            fn = os.path.join(self.data_folder, fn)
            with open(fn, 'rb') as f:
                batches = pickle.load(f)

            if self.shuffle: random.shuffle(batches) #shuffle data before batch
            for batch in batches:
                yield batch

            del batches
            gc.collect() 
Example #30
Source File: g2pc.py    From g2pC with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        '''
        self.cedict looks like:
        {行: {pron: [hang2, xing2],
        meaning: [/row/line, /to walk/to go],
        trad: [行, 行]}
        '''
        self.seg = pkuseg.pkuseg(postag=True)
        self.cedict = pickle.load(open(os.path.dirname(os.path.abspath(__file__)) + '/cedict.pkl', 'rb'))
        self.crf = pickle.load(open(os.path.dirname(os.path.abspath(__file__)) + '/crf100.bin', 'rb'))