Python cPickle.load() Examples

The following are 30 code examples of cPickle.load(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module cPickle , or try the search function .
Example #1
Source File: data_loader.py    From nn_physical_concepts with Apache License 2.0 6 votes vote down vote up
def load(validation_size_p, file_name):
    """
    Params:
    validation_size_p: percentage of data to be used for validation
    file_name (str): File containing the data
    """
    f = gzip.open(io.data_path + file_name + ".plk.gz", 'rb')
    data, states, projectors = cPickle.load(f)
    data = np.array(data)
    states = np.array(states)
    train_val_separation = int(len(data) * (1 - validation_size_p / 100.))
    training_data = data[:train_val_separation]
    training_states = states[:train_val_separation]
    validation_data = data[train_val_separation:]
    validation_states = states[train_val_separation:]
    f.close()
    return (training_data, validation_data, training_states, validation_states, projectors) 
Example #2
Source File: train_val.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def from_snapshot(self, sfile, nfile):
    print('Restoring model snapshots from {:s}'.format(sfile))
    self.net.load_state_dict(torch.load(str(sfile)))
    print('Restored.')
    # Needs to restore the other hyper-parameters/states for training, (TODO xinlei) I have
    # tried my best to find the random states so that it can be recovered exactly
    # However the Tensorflow state is currently not available
    with open(nfile, 'rb') as fid:
      st0 = pickle.load(fid)
      cur = pickle.load(fid)
      perm = pickle.load(fid)
      cur_val = pickle.load(fid)
      perm_val = pickle.load(fid)
      last_snapshot_iter = pickle.load(fid)

      np.random.set_state(st0)
      self.data_layer._cur = cur
      self.data_layer._perm = perm
      self.data_layer_val._cur = cur_val
      self.data_layer_val._perm = perm_val

    return last_snapshot_iter 
Example #3
Source File: data.py    From razzy-spinner with GNU General Public License v3.0 6 votes vote down vote up
def show_cfg(resource_url, escape='##'):
    """
    Write out a grammar file, ignoring escaped and empty lines.

    :type resource_url: str
    :param resource_url: A URL specifying where the resource should be
        loaded from.  The default protocol is "nltk:", which searches
        for the file in the the NLTK data package.
    :type escape: str
    :param escape: Prepended string that signals lines to be ignored
    """
    resource_url = normalize_resource_url(resource_url)
    resource_val = load(resource_url, format='text', cache=False)
    lines = resource_val.splitlines()
    for l in lines:
        if l.startswith(escape):
            continue
        if re.match('^$', l):
            continue
        print(l) 
Example #4
Source File: workflow.py    From wechat-alfred-workflow with MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example #5
Source File: workflow.py    From gist-alfred with MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example #6
Source File: train.py    From cat-bbs with MIT License 6 votes vote down vote up
def _augment_images_worker(self, augseq, queue_source, queue_result):
        """Worker function that endlessly queries the source queue (input
        batches), augments batches in it and sends the result to the output
        queue."""
        while True:
            # wait for a new batch in the source queue and load it
            batch_str = queue_source.get()
            batch = pickle.loads(batch_str)

            # augment the batch
            if batch.images is not None and batch.keypoints is not None:
                augseq_det = augseq.to_deterministic()
                batch.images_aug = augseq_det.augment_images(batch.images)
                batch.keypoints_aug = augseq_det.augment_keypoints(batch.keypoints)
            elif batch.images is not None:
                batch.images_aug = augseq.augment_images(batch.images)
            elif batch.keypoints is not None:
                batch.keypoints_aug = augseq.augment_keypoints(batch.keypoints)

            # send augmented batch to output queue
            queue_result.put(pickle.dumps(batch, protocol=-1)) 
Example #7
Source File: input.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def extract_mnist_data(filename, num_images, image_size, pixel_depth):
  """
  Extract the images into a 4D tensor [image index, y, x, channels].

  Values are rescaled from [0, 255] down to [-0.5, 0.5].
  """
  # if not os.path.exists(file):
  if not tf.gfile.Exists(filename+".npy"):
    with gzip.open(filename) as bytestream:
      bytestream.read(16)
      buf = bytestream.read(image_size * image_size * num_images)
      data = np.frombuffer(buf, dtype=np.uint8).astype(np.float32)
      data = (data - (pixel_depth / 2.0)) / pixel_depth
      data = data.reshape(num_images, image_size, image_size, 1)
      np.save(filename, data)
      return data
  else:
    with tf.gfile.Open(filename+".npy", mode='r') as file_obj:
      return np.load(file_obj) 
Example #8
Source File: data_utils.py    From DOTA_models with Apache License 2.0 6 votes vote down vote up
def preprocess_omniglot():
  """Download and prepare raw Omniglot data.

  Downloads the data from GitHub if it does not exist.
  Then load the images, augment with rotations if desired.
  Resize the images and write them to a pickle file.
  """

  maybe_download_data()

  directory = TRAIN_DIR
  write_file = DATA_FILE_FORMAT % 'train'
  num_labels = write_datafiles(
      directory, write_file, resize=True, rotate=TRAIN_ROTATIONS,
      new_width=IMAGE_NEW_SIZE, new_height=IMAGE_NEW_SIZE)

  directory = TEST_DIR
  write_file = DATA_FILE_FORMAT % 'test'
  write_datafiles(directory, write_file, resize=True, rotate=TEST_ROTATIONS,
                  new_width=IMAGE_NEW_SIZE, new_height=IMAGE_NEW_SIZE,
                  first_label=num_labels) 
Example #9
Source File: variable_store.py    From spinn with MIT License 6 votes vote down vote up
def load_checkpoint(self, filename="vs_ckpt", keys=None, num_extra_vars=0, skip_saved_unsavables=False):
        if skip_saved_unsavables:
            keys = self.vars
        else:
            if not keys:
                keys = self.savable_vars
        save_file = open(filename)
        for key in keys:
            if skip_saved_unsavables and key not in self.savable_vars:
                if self.logger:
                    full_name = "%s/%s" % (self.prefix, key)
                    self.logger.Log(
                        "Not restoring variable " + full_name, level=self.logger.DEBUG)
                _ = cPickle.load(save_file) # Discard
            else:
                if self.logger:
                    full_name = "%s/%s" % (self.prefix, key)
                    self.logger.Log(
                        "Restoring variable " + full_name, level=self.logger.DEBUG)
            self.vars[key].set_value(cPickle.load(save_file), borrow=True)

        extra_vars = []
        for _ in range(num_extra_vars):
            extra_vars.append(cPickle.load(save_file))
        return extra_vars 
Example #10
Source File: dynamic_contour_embedding.py    From RingNet with MIT License 6 votes vote down vote up
def load_dynamic_contour(template_flame_path='None', contour_embeddings_path='None', static_embedding_path='None', angle=0):
    template_mesh = Mesh(filename=template_flame_path)
    contour_embeddings_path = contour_embeddings_path
    dynamic_lmks_embeddings = np.load(contour_embeddings_path, allow_pickle=True).item()
    lmk_face_idx_static, lmk_b_coords_static = load_static_embedding(static_embedding_path)
    lmk_face_idx_dynamic = dynamic_lmks_embeddings['lmk_face_idx'][angle]
    lmk_b_coords_dynamic = dynamic_lmks_embeddings['lmk_b_coords'][angle]
    dynamic_lmks = mesh_points_by_barycentric_coordinates(template_mesh.v, template_mesh.f, lmk_face_idx_dynamic, lmk_b_coords_dynamic)
    static_lmks = mesh_points_by_barycentric_coordinates(template_mesh.v, template_mesh.f, lmk_face_idx_static, lmk_b_coords_static)
    total_lmks = np.vstack([dynamic_lmks, static_lmks])

    # Visualization of the pose dependent contour on the template mesh
    vertex_colors = np.ones([template_mesh.v.shape[0], 4]) * [0.3, 0.3, 0.3, 0.8]
    tri_mesh = trimesh.Trimesh(template_mesh.v, template_mesh.f,
                               vertex_colors=vertex_colors)
    mesh = pyrender.Mesh.from_trimesh(tri_mesh)
    scene = pyrender.Scene()
    scene.add(mesh)
    sm = trimesh.creation.uv_sphere(radius=0.005)
    sm.visual.vertex_colors = [0.9, 0.1, 0.1, 1.0]
    tfs = np.tile(np.eye(4), (len(total_lmks), 1, 1))
    tfs[:, :3, 3] = total_lmks
    joints_pcl = pyrender.Mesh.from_trimesh(sm, poses=tfs)
    scene.add(joints_pcl)
    pyrender.Viewer(scene, use_raymond_lighting=True) 
Example #11
Source File: bidirectional.py    From deep-summarization with MIT License 6 votes vote down vote up
def _load_data(self):
        """
        Load data only if the present data is not checkpointed, else, just load the checkpointed data

        :return: None
        """
        self.mapper = Mapper()
        self.mapper.generate_vocabulary(self.review_summary_file)
        self.X_fwd, self.X_bwd, self.Y = self.mapper.get_tensor(reverseflag=True)
        # Store all the mapper values in a dict for later recovery
        self.mapper_dict = dict()
        self.mapper_dict['seq_length'] = self.mapper.get_seq_length()
        self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()
        self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()
        # Split into test and train data
        self._split_train_tst() 
Example #12
Source File: stacked_bidirectional.py    From deep-summarization with MIT License 6 votes vote down vote up
def _load_data(self):
        """
        Load data only if the present data is not checkpointed, else, just load the checkpointed data

        :return: None
        """
        self.mapper = Mapper()
        self.mapper.generate_vocabulary(self.review_summary_file)
        self.X_fwd, self.X_bwd, self.Y = self.mapper.get_tensor(reverseflag=True)
        # Store all the mapper values in a dict for later recovery
        self.mapper_dict = dict()
        self.mapper_dict['seq_length'] = self.mapper.get_seq_length()
        self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()
        self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()
        # Split into test and train data
        self._split_train_tst() 
Example #13
Source File: simple.py    From deep-summarization with MIT License 6 votes vote down vote up
def _load_data(self):
        """
        Load data only if the present data is not checkpointed, else, just load the checkpointed data

        :return: None
        """
        self.mapper = Mapper()
        self.mapper.generate_vocabulary(self.review_summary_file)
        self.X, self.Y = self.mapper.get_tensor()
        # Store all the mapper values in a dict for later recovery
        self.mapper_dict = dict()
        self.mapper_dict['seq_length'] = self.mapper.get_seq_length()
        self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()
        self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()
        # Split into test and train data
        self._split_train_tst() 
Example #14
Source File: stacked_simple.py    From deep-summarization with MIT License 6 votes vote down vote up
def _load_data(self):
        """
        Load data only if the present data is not checkpointed, else, just load the checkpointed data

        :return: None
        """
        self.mapper = Mapper()
        self.mapper.generate_vocabulary(self.review_summary_file)
        self.X, self.Y = self.mapper.get_tensor()
        # Store all the mapper values in a dict for later recovery
        self.mapper_dict = dict()
        self.mapper_dict['seq_length'] = self.mapper.get_seq_length()
        self.mapper_dict['vocab_size'] = self.mapper.get_vocabulary_size()
        self.mapper_dict['rev_map'] = self.mapper.get_reverse_map()
        # Split into test and train data
        self._split_train_tst() 
Example #15
Source File: utils.py    From TVQAplus with MIT License 6 votes vote down vote up
def read_json_lines(file_path):
    print("reading data...")
    with open(file_path, "r") as f:
        lines = []
        value_err_cnt = 0
        for l in tqdm(f.readlines()):
            try:
                loaded_l = json.loads(l.strip("\n"))
                lines.append(loaded_l)
            except ValueError as e:
                value_err_cnt += 1
                continue
    return lines


# def load_pickle(file_path):
#     with open(file_path, "r") as f:
#         return pickle.load(f) 
Example #16
Source File: read_ipcluster_ensemble.py    From EXOSIMS with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def read_all(run_dir):
    """
    Helper function that reads in all pkl files from an nsemble directory
    generated by run_ipcluster_ensemble
    
    Args:
        run_dir (string):
            Absolute path to run directory 
    
    Returns:
        allres (list):
            List of all pkl file contents in run_dir
    """
    
    pklfiles = glob.glob(os.path.join(run_dir,'*.pkl'))

    allres = []

    for counter,f in enumerate(pklfiles):
        print("%d/%d"%(counter,len(pklfiles)))
        with open(f, 'rb') as g:
            res = pickle.load(g, encoding='latin1')
        allres.append(res)
        del res # this avoids memory leaks when loading many pickle files
    return allres 
Example #17
Source File: plotTimeline.py    From EXOSIMS with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def loadFiles(self,pklfile,outspecfile):
        """ loads pkl and outspec files
        Args:
            pklfile (string) - full filepath to pkl file to load
            outspecfile (string) - fille filepath to outspec.json file
        Return:
            DRM (dict) - a dict containing seed, DRM, system
            outspec (dict) - a dict containing input instructions
        """
        try:
            with open(pklfile, 'rb') as f:#load from cache
                DRM = pickle.load(f)
        except:
            print('Failed to open pklfile %s'%pklfile)
            pass
        try:
            with open(outspecfile, 'rb') as g:
                outspec = json.load(g)
        except:
            print('Failed to open outspecfile %s'%outspecfile)
            pass
        return DRM, outspec 
Example #18
Source File: filecache.py    From cutout with MIT License 6 votes vote down vote up
def _prune(self):
        entries = self._list_dir()
        if len(entries) > self._threshold:
            now = time()
            for idx, fname in enumerate(entries):
                remove = False
                f = None
                try:
                    try:
                        f = open(fname, 'rb')
                        expires = pickle.load(f)
                        remove = expires <= now or idx % 3 == 0
                    finally:
                        if f is not None:
                            f.close()
                except Exception:
                    pass
                if remove:
                    try:
                        os.remove(fname)
                    except (IOError, OSError):
                        pass 
Example #19
Source File: model.py    From nn_physical_concepts with Apache License 2.0 6 votes vote down vote up
def from_saved(cls, file_name, change_params={}):
        """
        Initializes a new network from saved data.
        file_name (str): model is loaded from tf_save/file_name.ckpt
        """
        with open(io.tf_save_path + file_name + '.pkl', 'rb') as f:
            params = pickle.load(f)
        params['load_file'] = file_name
        for p in change_params:
            params[p] = change_params[p]
        print params
        return cls(**params)

    #########################################
    #        Private helper functions       #
    ######################################### 
Example #20
Source File: yacc.py    From SublimeKSP with GNU General Public License v3.0 6 votes vote down vote up
def read_pickle(self,filename):
        try:
            import cPickle as pickle
        except ImportError:
            import pickle

        in_f = open(filename,"rb")

        tabversion = pickle.load(in_f)
        if tabversion != __tabversion__:
            raise VersionError("yacc table file version is out of date")
        self.lr_method = pickle.load(in_f)
        signature      = pickle.load(in_f)
        self.lr_action = pickle.load(in_f)
        self.lr_goto   = pickle.load(in_f)
        productions    = pickle.load(in_f)

        self.lr_productions = []
        for p in productions:
            self.lr_productions.append(MiniProduction(*p))

        in_f.close()
        return signature

    # Bind all production function names to callable objects in pdict 
Example #21
Source File: utils.py    From TVQAplus with MIT License 5 votes vote down vote up
def load_json(file_path):
    with open(file_path, "r") as f:
        return json.load(f) 
Example #22
Source File: filecache.py    From cutout with MIT License 5 votes vote down vote up
def get(self, key):
        filename = self._get_filename(key)
        try:
            f = open(filename, 'rb')
            try:
                if pickle.load(f) >= time():
                    return pickle.load(f)
            finally:
                f.close()
            os.remove(filename)
        except Exception:
            return None 
Example #23
Source File: plotting.py    From cat-bbs with MIT License 5 votes vote down vote up
def load_from_filepath(fp):
        #return json.loads(open(, "r").read())
        with open(fp, "r") as f:
            history = pickle.load(f)
        return history 
Example #24
Source File: read_write.py    From visual_turing_test-tutorial with MIT License 5 votes vote down vote up
def unpickle_data_provider(path):
    import cPickle as pickle
    with open(path, 'rb') as f:
        dp = pickle.load(f)['data_provider']
    return dp 
Example #25
Source File: read_write.py    From visual_turing_test-tutorial with MIT License 5 votes vote down vote up
def json_to_model(path):
    """
    Loads a model from the json file.
    """
    import json
    from keras.models import model_from_json
    with open(path, 'r') as f:
        json_model = json.load(f)
    model = model_from_json(json_model)
    return model 
Example #26
Source File: read_write.py    From visual_turing_test-tutorial with MIT License 5 votes vote down vote up
def unpickle_vocabulary(path):
    import cPickle as pickle
    p_dict = {}
    with open(path, 'rb') as f:
        pickle_load = pickle.load(f)
        p_dict['word2index_x'] = pickle_load['word2index_x']
        p_dict['word2index_y'] = pickle_load['word2index_y']
        p_dict['index2word_x'] = pickle_load['index2word_x']
        p_dict['index2word_y'] = pickle_load['index2word_y']
    return p_dict 
Example #27
Source File: utils.py    From TVQAplus with MIT License 5 votes vote down vote up
def load_pickle(pickle_file):
    try:
        with open(pickle_file, 'rb') as f:
            pickle_data = pickle.load(f)
    except UnicodeDecodeError as e:
        with open(pickle_file, 'rb') as f:
            pickle_data = pickle.load(f, encoding='latin1')
    except Exception as e:
        print('Unable to load data ', pickle_file, ':', e)
        raise
    return pickle_data 
Example #28
Source File: read_write.py    From visual_turing_test-tutorial with MIT License 5 votes vote down vote up
def unpickle_model(path):
    import cPickle as pickle
    with open(path, 'rb') as f:
        model = pickle.load(f)['model']
    return model 
Example #29
Source File: util.py    From razzy-spinner with GNU General Public License v3.0 5 votes vote down vote up
def read_block(self, stream):
        result = []
        for i in range(self.BLOCK_SIZE):
            try: result.append(pickle.load(stream))
            except EOFError: break
        return result 
Example #30
Source File: utils.py    From TVQAplus with MIT License 5 votes vote down vote up
def load_json(file_path):
    with open(file_path, "r") as f:
        return json.load(f)