Python pickle.load() Examples
The following are 30 code examples for showing how to use pickle.load(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
You may check out the related API usage on the sidebar.
You may also want to check out all available functions/classes of the module
pickle
, or try the search function
.
Example 1
Project: vergeml Author: mme File: cache.py License: MIT License | 9 votes |
def _deserialize(self, data, type_): if self.compress: # decompress the data if needed data = lz4.frame.decompress(data) if type_ == _NUMPY: # deserialize numpy arrays buf = io.BytesIO(data) data = np.load(buf) elif type_ == _PICKLE: # deserialize other python objects data = pickle.loads(data) else: # Otherwise we just return data as it is (bytes) pass return data
Example 2
Project: wechat-alfred-workflow Author: TKkk-iOSer File: workflow.py License: MIT License | 6 votes |
def register(self, name, serializer): """Register ``serializer`` object under ``name``. Raises :class:`AttributeError` if ``serializer`` in invalid. .. note:: ``name`` will be used as the file extension of the saved files. :param name: Name to register ``serializer`` under :type name: ``unicode`` or ``str`` :param serializer: object with ``load()`` and ``dump()`` methods """ # Basic validation getattr(serializer, 'load') getattr(serializer, 'dump') self._serializers[name] = serializer
Example 3
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection Author: Sunarker File: train_val.py License: MIT License | 6 votes |
def from_snapshot(self, sfile, nfile): print('Restoring model snapshots from {:s}'.format(sfile)) self.net.load_state_dict(torch.load(str(sfile))) print('Restored.') # Needs to restore the other hyper-parameters/states for training, (TODO xinlei) I have # tried my best to find the random states so that it can be recovered exactly # However the Tensorflow state is currently not available with open(nfile, 'rb') as fid: st0 = pickle.load(fid) cur = pickle.load(fid) perm = pickle.load(fid) cur_val = pickle.load(fid) perm_val = pickle.load(fid) last_snapshot_iter = pickle.load(fid) np.random.set_state(st0) self.data_layer._cur = cur self.data_layer._perm = perm self.data_layer_val._cur = cur_val self.data_layer_val._perm = perm_val return last_snapshot_iter
Example 4
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection Author: Sunarker File: pascal_voc.py License: MIT License | 6 votes |
def gt_roidb(self): """ Return the database of ground-truth regions of interest. This function loads/saves from/to a cache file to speed up future calls. """ cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl') if os.path.exists(cache_file): with open(cache_file, 'rb') as fid: try: roidb = pickle.load(fid) except: roidb = pickle.load(fid, encoding='bytes') print('{} gt roidb loaded from {}'.format(self.name, cache_file)) return roidb gt_roidb = [self._load_pascal_labels(index) for index in self.image_index] with open(cache_file, 'wb') as fid: pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL) print('wrote gt roidb to {}'.format(cache_file)) return gt_roidb
Example 5
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection Author: Sunarker File: coco.py License: MIT License | 6 votes |
def gt_roidb(self): """ Return the database of ground-truth regions of interest. This function loads/saves from/to a cache file to speed up future calls. """ cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl') if osp.exists(cache_file): with open(cache_file, 'rb') as fid: roidb = pickle.load(fid) print('{} gt roidb loaded from {}'.format(self.name, cache_file)) return roidb gt_roidb = [self._load_coco_annotation(index) for index in self._image_index] with open(cache_file, 'wb') as fid: pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL) print('wrote gt roidb to {}'.format(cache_file)) return gt_roidb
Example 6
Project: cherrypy Author: cherrypy File: sessions.py License: BSD 3-Clause "New" or "Revised" License | 6 votes |
def _load(self, path=None): assert self.locked, ('The session load without being locked. ' "Check your tools' priority levels.") if path is None: path = self._get_file_path() try: f = open(path, 'rb') try: return pickle.load(f) finally: f.close() except (IOError, EOFError): e = sys.exc_info()[1] if self.debug: cherrypy.log('Error loading the session pickle: %s' % e, 'TOOLS.SESSIONS') return None
Example 7
Project: gated-graph-transformer-network Author: hexahedria File: update_cache_compatibility.py License: MIT License | 6 votes |
def main(cache_dir): files_list = list(os.listdir(cache_dir)) for file in files_list: full_filename = os.path.join(cache_dir, file) if os.path.isfile(full_filename): print("Processing {}".format(full_filename)) m, stored_kwargs = pickle.load(open(full_filename, 'rb')) updated_kwargs = util.get_compatible_kwargs(model.Model, stored_kwargs) model_hash = util.object_hash(updated_kwargs) print("New hash -> " + model_hash) model_filename = os.path.join(cache_dir, "model_{}.p".format(model_hash)) sys.setrecursionlimit(100000) pickle.dump((m,updated_kwargs), open(model_filename,'wb'), protocol=pickle.HIGHEST_PROTOCOL) os.remove(full_filename)
Example 8
Project: gated-graph-transformer-network Author: hexahedria File: ggtnn_train.py License: MIT License | 6 votes |
def assemble_batch(story_fns, num_answer_words, format_spec): stories = [] for sfn in story_fns: with gzip.open(sfn,'rb') as f: cvtd_story, _, _, _ = pickle.load(f) stories.append(cvtd_story) sents, graphs, queries, answers = zip(*stories) cvtd_sents = np.array(sents, np.int32) cvtd_queries = np.array(queries, np.int32) max_ans_len = max(len(a) for a in answers) cvtd_answers = np.stack([convert_answer(answer, num_answer_words, format_spec, max_ans_len) for answer in answers]) num_new_nodes, new_node_strengths, new_node_ids, next_edges = zip(*graphs) num_new_nodes = np.stack(num_new_nodes) new_node_strengths = np.stack(new_node_strengths) new_node_ids = np.stack(new_node_ids) next_edges = np.stack(next_edges) return cvtd_sents, cvtd_queries, cvtd_answers, num_new_nodes, new_node_strengths, new_node_ids, next_edges
Example 9
Project: flappybird-qlearning-bot Author: chncyhn File: learn.py License: MIT License | 6 votes |
def main(): global HITMASKS, ITERATIONS, VERBOSE, bot parser = argparse.ArgumentParser("learn.py") parser.add_argument("--iter", type=int, default=1000, help="number of iterations to run") parser.add_argument( "--verbose", action="store_true", help="output [iteration | score] to stdout" ) args = parser.parse_args() ITERATIONS = args.iter VERBOSE = args.verbose # load dumped HITMASKS with open("data/hitmasks_data.pkl", "rb") as input: HITMASKS = pickle.load(input) while True: movementInfo = showWelcomeAnimation() crashInfo = mainGame(movementInfo) showGameOverScreen(crashInfo)
Example 10
Project: disentangling_conditional_gans Author: zalandoresearch File: dataset_tool.py License: MIT License | 6 votes |
def create_cifar100(tfrecord_dir, cifar100_dir): print('Loading CIFAR-100 from "%s"' % cifar100_dir) import pickle with open(os.path.join(cifar100_dir, 'train'), 'rb') as file: data = pickle.load(file, encoding='latin1') images = data['data'].reshape(-1, 3, 32, 32) labels = np.array(data['fine_labels']) assert images.shape == (50000, 3, 32, 32) and images.dtype == np.uint8 assert labels.shape == (50000,) and labels.dtype == np.int32 assert np.min(images) == 0 and np.max(images) == 255 assert np.min(labels) == 0 and np.max(labels) == 99 onehot = np.zeros((labels.size, np.max(labels) + 1), dtype=np.float32) onehot[np.arange(labels.size), labels] = 1.0 with TFRecordExporter(tfrecord_dir, images.shape[0]) as tfr: order = tfr.choose_shuffled_order() for idx in range(order.size): tfr.add_image(images[order[idx]]) tfr.add_labels(onehot[order]) #----------------------------------------------------------------------------
Example 11
Project: neural-fingerprinting Author: StephanZheng File: custom_datasets.py License: BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False): """ :param transform: :param target_transform: :param filename: :param transp: Set shuff= False for PGD based attacks :return: """ self.transform = transform self.target_transform = target_transform self.adv_dict=pickle.load(open(filename,"rb")) self.adv_flat=self.adv_dict["adv_input"] self.num_adv=np.shape(self.adv_flat)[0] self.shuff = transp self.sample_num = 0
Example 12
Project: neural-fingerprinting Author: StephanZheng File: custom_datasets.py License: BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False): """ :param transform: :param target_transform: :param filename: :param transp: Set shuff= False for PGD based attacks :return: """ self.transform = transform self.target_transform = target_transform self.adv_dict=pickle.load(open(filename,"rb")) self.adv_flat=self.adv_dict["adv_input"] self.num_adv=np.shape(self.adv_flat)[0] self.shuff = transp self.sample_num = 0
Example 13
Project: neural-fingerprinting Author: StephanZheng File: custom_datasets.py License: BSD 3-Clause "New" or "Revised" License | 6 votes |
def __init__(self, transform=None, target_transform=None, filename="adv_set_e_2.p", transp = False): """ :param transform: :param target_transform: :param filename: :param transp: Set shuff= False for PGD based attacks :return: """ self.transform = transform self.target_transform = target_transform self.adv_dict=pickle.load(open(filename,"rb")) self.adv_flat=self.adv_dict["adv_input"] self.num_adv=np.shape(self.adv_flat)[0] self.transp = transp self.sample_num = 0
Example 14
Project: mlimages Author: icoxfog417 File: training.py License: MIT License | 6 votes |
def __load_mean(self): mean = None if self.mean_image_file: if os.path.isfile(self.mean_image_file): _, ext = os.path.splitext(os.path.basename(self.mean_image_file)) if ext.lower() == ".npy": mean = pickle.load(open(self.mean_image_file, "rb")) else: m_image = LabeledImage(self.mean_image_file) # mean image is already `converted` when calculation. m_image.load() mean = m_image.to_array(np, self.color) else: raise Exception("Mean image is not exist at {0}.".format(self.mean_image_file)) else: self.label_file._logger.warning("Mean image is not set. So if you train the model, it will be difficult to converge.") return mean
Example 15
Project: models Author: kipoi File: dataloader.py License: MIT License | 6 votes |
def __init__(self, pos_features, pipeline_obj_path): """ Args: pos_features: list of positional features to use pipeline_obj_path: path to the serialized pipeline obj_path """ self.pos_features = pos_features self.pipeline_obj_path = pipeline_obj_path # deserialize the pickle file with open(self.pipeline_obj_path, "rb") as f: pipeline_obj = pickle.load(f) self.POS_FEATURES = pipeline_obj[0] self.minmax_scaler = pipeline_obj[1] self.imp = pipeline_obj[2] self.funct_transform = FunctionTransformer(func=sign_log_func, inverse_func=sign_log_func_inverse) # for simplicity, assume all current pos_features are the # same as from before assert self.POS_FEATURES == self.pos_features
Example 16
Project: models Author: kipoi File: dump_dataloader_files.py License: MIT License | 6 votes |
def __init__(self, pos_features, pipeline_obj_path): """ Args: pos_features: list of positional features to use pipeline_obj_path: path to the serialized pipeline obj_path """ self.pos_features = pos_features self.pipeline_obj_path = pipeline_obj_path # deserialize the pickle file with open(self.pipeline_obj_path, "rb") as f: pipeline_obj = pickle.load(f) self.POS_FEATURES = pipeline_obj[0] self.preproc_pipeline = pipeline_obj[1] self.imp = pipeline_obj[2] # for simplicity, assume all current pos_features are the # same as from before assert self.POS_FEATURES == self.pos_features
Example 17
Project: VSE-C Author: ExplorerFreda File: model.py License: MIT License | 6 votes |
def __init__(self, vocab_size, word_dim, embed_size, num_layers, pooling='last', use_abs=False, bid=False, glove_path='data/glove.pkl'): super(EncoderTextGRU, self).__init__() self.use_abs = use_abs self.embed_size = embed_size self.combiner = Combiner(pooling, embed_size) # word embedding self.word_dim = word_dim if word_dim > 300: self.embed = nn.Embedding(vocab_size, word_dim-300) _, embed_weight = pickle.load(open(glove_path, 'rb')) self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False) # caption embedding self.rnn = nn.GRU(word_dim, embed_size//(2 if bid else 1), num_layers, batch_first=True, bidirectional=bid) self.init_weights()
Example 18
Project: VSE-C Author: ExplorerFreda File: model.py License: MIT License | 6 votes |
def __init__(self, vocab_size, word_dim, embed_size, use_abs=False, glove_path='data/glove.pkl'): super(EncoderTextCNN, self).__init__() self.use_abs = use_abs self.embed_size = embed_size # word embedding self.embed = nn.Embedding(vocab_size, word_dim-300, padding_idx=0) # 0 for <pad> _, embed_weight = pickle.load(open(glove_path, 'rb')) self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False) channel_num = embed_size // 4 self.conv2 = nn.Conv1d(word_dim, channel_num, 2) self.conv3 = nn.Conv1d(word_dim, channel_num, 3) self.conv4 = nn.Conv1d(word_dim, channel_num, 4) self.conv5 = nn.Conv1d(word_dim, channel_num, 5) self.drop = nn.Dropout(p=0.5) self.relu = nn.ReLU() # self.mlp = nn.Linear(embed_size, embed_size) self.init_weights()
Example 19
Project: VSE-C Author: ExplorerFreda File: model.py License: MIT License | 6 votes |
def __init__(self, vocab_size, word_dim, embed_size, use_abs=False, glove_path='data/glove.pkl'): super(EncoderTextDeepCNN, self).__init__() self.use_abs = use_abs self.embed_size = embed_size # word embedding self.embed = nn.Embedding(vocab_size, word_dim-300, padding_idx=0) _, embed_weight = pickle.load(open(glove_path, 'rb')) self.glove = Variable(torch.cuda.FloatTensor(embed_weight), requires_grad=False) channel_num = embed_size self.conv1 = nn.Conv1d(word_dim, embed_size, 2, stride=2) # [batch_size, dim, 30] self.conv2 = nn.Conv1d(embed_size, embed_size, 4, stride=2) # [batch_size, dim, 14] self.conv3 = nn.Conv1d(embed_size, embed_size, 5, stride=2) # [batch_size, dim, 5] self.conv4 = nn.Conv1d(embed_size, channel_num, 5) self.drop = nn.Dropout(p=0.5) self.relu = nn.ReLU() # self.mlp = nn.Linear(embed_size, embed_size) self.init_weights()
Example 20
Project: vergeml Author: mme File: cache.py License: MIT License | 5 votes |
def read(self, file, path): """Read the content index from file. """ pos, = struct.unpack('<Q', file.read(8)) if pos == 0: raise VergeMLError("Invalid cache file: {}".format(path)) file.seek(pos) self.index, self.meta, self.info = pickle.load(file)
Example 21
Project: LipNet-PyTorch Author: sailordiary File: dataloader.py License: BSD 3-Clause "New" or "Revised" License | 5 votes |
def __getitem__(self, index): # images: bs x chan x T x H x W x = torch.zeros(3, self.opt.max_timesteps, 50, 100) # load video using read_data() and shove into x d = self.dataset[index] # targets: bs-length tensor of targets (each one is the length of the target seq) frames, y, sub = read_data(d, self.opt, self.vocab_mapping) x[:, : frames.size(1), :, :] = frames # input lengths: bs-length tensor of integers, representing # the number of input timesteps/frames for the given batch element length = frames.size(1) return x, y, length, index
Example 22
Project: natural-questions Author: google-research-datasets File: nq_eval.py License: Apache License 2.0 | 5 votes |
def main(_): cache_path = os.path.join(os.path.dirname(FLAGS.gold_path), 'cache') if FLAGS.cache_gold_data and os.path.exists(cache_path): logging.info('Reading from cache: %s', format(cache_path)) nq_gold_dict = pickle.load(open(cache_path, 'r')) else: nq_gold_dict = util.read_annotation( FLAGS.gold_path, n_threads=FLAGS.num_threads) if FLAGS.cache_gold_data: logging.info('Caching gold data for next time to: %s', format(cache_path)) pickle.dump(nq_gold_dict, open(cache_path, 'w')) nq_pred_dict = util.read_prediction_json(FLAGS.predictions_path) long_answer_stats, short_answer_stats = score_answers(nq_gold_dict, nq_pred_dict) if FLAGS.pretty_print: print('*' * 20) print('LONG ANSWER R@P TABLE:') print_r_at_p_table(long_answer_stats) print('*' * 20) print('SHORT ANSWER R@P TABLE:') print_r_at_p_table(short_answer_stats) scores = compute_final_f1(long_answer_stats, short_answer_stats) print('*' * 20) print('METRICS IGNORING SCORES (n={}):'.format(scores['long-answer-n'])) print(' F1 / P / R') print('Long answer {: >7.2%} / {: >7.2%} / {: >7.2%}'.format( scores['long-answer-f1'], scores['long-answer-precision'], scores['long-answer-recall'])) print('Short answer {: >7.2%} / {: >7.2%} / {: >7.2%}'.format( scores['short-answer-f1'], scores['short-answer-precision'], scores['short-answer-recall'])) else: metrics = get_metrics_with_answer_stats(long_answer_stats, short_answer_stats) print(json.dumps(metrics))
Example 23
Project: BASS Author: Cisco-Talos File: binary_database.py License: GNU General Public License v2.0 | 5 votes |
def load(clazz, path): with open(path, "rb") as f: return clazz(pickle.load(f))
Example 24
Project: BASS Author: Cisco-Talos File: server.py License: GNU General Public License v2.0 | 5 votes |
def function_raw_hash_get(): global Session session = Session() filename, file_ = request.files.items()[0] db = Database(pickle.load(file_)) arch_name = db.architecture_name if arch_name == "metapc": arch_name = "x86" try: arch = session.query(Architecture).filter(Architecture.name == arch_name and \ Architecture.bits == db.architecture_bits and \ Architecture.little_endian == db.architecture_endianness == "little").one() except NoResultFound: return make_response(jsonify(message = "Architecture not found"), 404) try: func = next(db.functions) except StopIteration: return make_response(jsonify(message = "No function found in database"), 500) raw_hash = _function_calculate_raw_sha256(func) size = _function_get_size(func) try: function = session.query(Function).filter(Function.raw_sha256 == raw_hash and \ Function.size == size and \ Function.arch == arch.id).one() return make_response(jsonify(**json.loads(function.data)), 200) except NoResultFound: return make_response(jsonify(message = "Function not found"), 404)
Example 25
Project: BASS Author: Cisco-Talos File: server.py License: GNU General Public License v2.0 | 5 votes |
def function_mnem_hash_get(): global Session session = Session() filename, file_ = request.files.items()[0] db = Database(pickle.load(file_)) arch_name = db.architecture_name if arch_name == "metapc": arch_name = "x86" try: arch = session.query(Architecture).filter(Architecture.name == arch_name and \ Architecture.bits == db.architecture_bits and \ Architecture.little_endian == db.architecture_endianness == "little").one() except NoResultFound: return make_response(jsonify(message = "Architecture not found"), 404) try: func = next(db.functions) except StopIteration: return make_response(jsonify(message = "No function found in database"), 500) mnem_hash = _function_calculate_mnem_sha256(func) try: function = session.query(Function).filter(Function.mnem_sha256 == mnem_hash and \ Function.arch == arch.id).one() return make_response(jsonify(**json.loads(function.data)), 200) except NoResultFound: return make_response(jsonify(message = "Function not found"), 404)
Example 26
Project: BASS Author: Cisco-Talos File: binary_database.py License: GNU General Public License v2.0 | 5 votes |
def load(clazz, path): with open(path, "rb") as f: return clazz(pickle.load(f))
Example 27
Project: wechat-alfred-workflow Author: TKkk-iOSer File: workflow.py License: MIT License | 5 votes |
def load(cls, file_obj): """Load serialized object from open JSON file. .. versionadded:: 1.8 :param file_obj: file handle :type file_obj: ``file`` object :returns: object loaded from JSON file :rtype: object """ return json.load(file_obj)
Example 28
Project: wechat-alfred-workflow Author: TKkk-iOSer File: workflow.py License: MIT License | 5 votes |
def load(cls, file_obj): """Load serialized object from open pickle file. .. versionadded:: 1.8 :param file_obj: file handle :type file_obj: ``file`` object :returns: object loaded from pickle file :rtype: object """ return cPickle.load(file_obj)
Example 29
Project: wechat-alfred-workflow Author: TKkk-iOSer File: workflow.py License: MIT License | 5 votes |
def _load(self): """Load cached settings from JSON file `self._filepath`.""" data = {} with LockFile(self._filepath, 0.5): with open(self._filepath, 'rb') as fp: data.update(json.load(fp)) self._original = deepcopy(data) self._nosave = True self.update(data) self._nosave = False
Example 30
Project: wechat-alfred-workflow Author: TKkk-iOSer File: workflow.py License: MIT License | 5 votes |
def cached_data(self, name, data_func=None, max_age=60): """Return cached data if younger than ``max_age`` seconds. Retrieve data from cache or re-generate and re-cache data if stale/non-existant. If ``max_age`` is 0, return cached data no matter how old. :param name: name of datastore :param data_func: function to (re-)generate data. :type data_func: ``callable`` :param max_age: maximum age of cached data in seconds :type max_age: ``int`` :returns: cached data, return value of ``data_func`` or ``None`` if ``data_func`` is not set """ serializer = manager.serializer(self.cache_serializer) cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer)) age = self.cached_data_age(name) if (age < max_age or max_age == 0) and os.path.exists(cache_path): with open(cache_path, 'rb') as file_obj: self.logger.debug('loading cached data: %s', cache_path) return serializer.load(file_obj) if not data_func: return None data = data_func() self.cache_data(name, data) return data