Python pickle._Unpickler() Examples
The following are 21 code examples for showing how to use pickle._Unpickler(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
You may check out the related API usage on the sidebar.
You may also want to check out all available functions/classes of the module
pickle
, or try the search function
.
Example 1
Project: ebonite Author: zyfra File: wrapper.py License: Apache License 2.0 | 6 votes |
def _get_non_pickle_io(self, obj): """ Checks if obj has non-Pickle IO and returns it :param obj: object to check :return: non-Pickle :class:`ModelIO` instance or None """ # avoid calling heavy analyzer machinery for "unknown" objects: # they are either non-models or callables if not isinstance(obj, self.known_types): return None # we couldn't import analyzer at top as it leads to circular import failure from ebonite.core.analyzer.model import ModelAnalyzer try: io = ModelAnalyzer._find_hook(obj)._wrapper_factory().io return None if isinstance(io, PickleModelIO) else io except ValueError: # non-model object return None # We couldn't use `EboniteUnpickler` here as it (in fact `dill`) subclasses `Unpickler` # `Unpickler`, unlike `_Unpickler`, doesn't support `load_build` overriding
Example 2
Project: self-ensemble-visual-domain-adapt-photo Author: Britefury File: network_architectures.py License: MIT License | 6 votes |
def _unpickle_from_path(path): # Oh... the joys of Py2 vs Py3 with open(path, 'rb') as f: if sys.version_info[0] == 2: return pickle.load(f) else: u = pickle._Unpickler(f) u.encoding = 'latin1' return u.load() # # # CUSTOM RESNET CLASS # #
Example 3
Project: vmf_vae_nlp Author: jiacheng-xu File: helper.py License: MIT License | 5 votes |
def read_bin_file(fname): with open(fname, 'rb') as f: u = pkl._Unpickler(f) u.encoding = 'latin1' return u.load()
Example 4
Project: MetaOptNet Author: kjunelee File: tiered_imagenet.py License: Apache License 2.0 | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 5
Project: MetaOptNet Author: kjunelee File: CIFAR_FS.py License: Apache License 2.0 | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 6
Project: MetaOptNet Author: kjunelee File: mini_imagenet.py License: Apache License 2.0 | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 7
Project: MetaOptNet Author: kjunelee File: FC100.py License: Apache License 2.0 | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 8
Project: UCB Author: SaynaEbrahimi File: mixture.py License: MIT License | 5 votes |
def __init__(self, root, train=True,transform=None, download=False): self.root = os.path.expanduser(root) self.transform = transform self.filename = "facescrub_100.zip" self.url = "https://github.com/nkundiushuti/facescrub_subset/blob/master/data/facescrub_100.zip?raw=true" fpath=os.path.join(root,self.filename) if not os.path.isfile(fpath): if not download: raise RuntimeError('Dataset not found. You can use download=True to download it') else: print('Downloading from '+self.url) self.download() training_file = 'facescrub_train_100.pkl' testing_file = 'facescrub_test_100.pkl' if train: with open(os.path.join(root,training_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # train = u.load() train = pickle.load(f) self.data = train['features'].astype(np.uint8) self.labels = train['labels'].astype(np.uint8) """ print(self.data.shape) print(self.data.mean()) print(self.data.std()) print(self.labels.max()) #""" else: with open(os.path.join(root,testing_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # test = u.load() test = pickle.load(f) self.data = test['features'].astype(np.uint8) self.labels = test['labels'].astype(np.uint8)
Example 9
Project: UCB Author: SaynaEbrahimi File: mixture.py License: MIT License | 5 votes |
def __init__(self, root, train=True,transform=None, download=False): self.root = os.path.expanduser(root) self.transform = transform self.filename = "notmnist.zip" self.url = "https://github.com/nkundiushuti/notmnist_convert/blob/master/notmnist.zip?raw=true" fpath = os.path.join(root, self.filename) if not os.path.isfile(fpath): if not download: raise RuntimeError('Dataset not found. You can use download=True to download it') else: print('Downloading from '+self.url) self.download() training_file = 'notmnist_train.pkl' testing_file = 'notmnist_test.pkl' if train: with open(os.path.join(root,training_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # train = u.load() train = pickle.load(f) self.data = train['features'].astype(np.uint8) self.labels = train['labels'].astype(np.uint8) else: with open(os.path.join(root,testing_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # test = u.load() test = pickle.load(f) self.data = test['features'].astype(np.uint8) self.labels = test['labels'].astype(np.uint8)
Example 10
Project: FEAT Author: Sha-Lab File: tiered_imagenet.py License: MIT License | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 11
Project: Conditional-Batch-Norm Author: ap229997 File: file_handlers.py License: MIT License | 5 votes |
def pickle_loader(file_path, gz=False): open_fct = open if gz: open_fct = gzip.open with open_fct(file_path, "rb") as f: if sys.version_info > (3, 0): # Workaround to load pickle data python2 -> python3 u = pickle._Unpickler(f) u.encoding = 'latin1' return u.load() else: return pickle.load(f)
Example 12
Project: MHE Author: wy1iu File: train.py License: MIT License | 5 votes |
def unpickle(file): with open(file, 'rb') as fo: u = pickle._Unpickler(fo) u.encoding = 'latin1' dict = u.load() return dict
Example 13
Project: SSL-FEW-SHOT Author: phecy File: tiered_imagenet.py License: MIT License | 5 votes |
def load_data(file): try: with open(file, 'rb') as fo: data = pickle.load(fo) return data except: with open(file, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' data = u.load() return data
Example 14
Project: DSD-SATN Author: JDAI-CV File: util.py License: Apache License 2.0 | 5 votes |
def read_pkl_coding(name = '../data/info.pkl'): with open(name, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' p = u.load() return p
Example 15
Project: hat Author: joansj File: mixture.py License: MIT License | 5 votes |
def __init__(self, root, train=True,transform=None, download=False): self.root = os.path.expanduser(root) self.transform = transform self.filename = "facescrub_100.zip" self.url = "https://github.com/nkundiushuti/facescrub_subset/blob/master/data/facescrub_100.zip?raw=true" fpath=os.path.join(root,self.filename) if not os.path.isfile(fpath): if not download: raise RuntimeError('Dataset not found. You can use download=True to download it') else: print('Downloading from '+self.url) self.download() training_file = 'facescrub_train_100.pkl' testing_file = 'facescrub_test_100.pkl' if train: with open(os.path.join(root,training_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # train = u.load() train = pickle.load(f) self.data = train['features'].astype(np.uint8) self.labels = train['labels'].astype(np.uint8) """ print(self.data.shape) print(self.data.mean()) print(self.data.std()) print(self.labels.max()) #""" else: with open(os.path.join(root,testing_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # test = u.load() test = pickle.load(f) self.data = test['features'].astype(np.uint8) self.labels = test['labels'].astype(np.uint8)
Example 16
Project: hat Author: joansj File: mixture.py License: MIT License | 5 votes |
def __init__(self, root, train=True,transform=None, download=False): self.root = os.path.expanduser(root) self.transform = transform self.filename = "notmnist.zip" self.url = "https://github.com/nkundiushuti/notmnist_convert/blob/master/notmnist.zip?raw=true" fpath = os.path.join(root, self.filename) if not os.path.isfile(fpath): if not download: raise RuntimeError('Dataset not found. You can use download=True to download it') else: print('Downloading from '+self.url) self.download() training_file = 'notmnist_train.pkl' testing_file = 'notmnist_test.pkl' if train: with open(os.path.join(root,training_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # train = u.load() train = pickle.load(f) self.data = train['features'].astype(np.uint8) self.labels = train['labels'].astype(np.uint8) else: with open(os.path.join(root,testing_file),'rb') as f: # u = pickle._Unpickler(f) # u.encoding = 'latin1' # test = u.load() test = pickle.load(f) self.data = test['features'].astype(np.uint8) self.labels = test['labels'].astype(np.uint8)
Example 17
Project: Aurora Author: upul File: mnist.py License: Apache License 2.0 | 5 votes |
def _load_data(self): script_dir = os.path.dirname(__file__) mnist_file = os.path.join(os.path.join(script_dir, 'data'), 'mnist.pkl.gz') with gzip.open(mnist_file, 'rb') as mnist_file: u = pickle._Unpickler(mnist_file) u.encoding = 'latin1' train, val, test = u.load() return train, val, test
Example 18
Project: learn2learn Author: learnables File: fc100.py License: MIT License | 5 votes |
def __init__(self, root, mode='train', transform=None, target_transform=None, download=False): super(FC100, self).__init__() self.root = os.path.expanduser(root) os.makedirs(self.root, exist_ok=True) self.transform = transform self.target_transform = target_transform if mode not in ['train', 'validation', 'test']: raise ValueError('mode must be train, validation, or test.') self.mode = mode self._bookkeeping_path = os.path.join(self.root, 'fc100-bookkeeping-' + mode + '.pkl') if not self._check_exists() and download: self.download() short_mode = 'val' if mode == 'validation' else mode fc100_path = os.path.join(self.root, 'FC100_' + short_mode + '.pickle') with open(fc100_path, 'rb') as f: u = pickle._Unpickler(f) u.encoding = 'latin1' archive = u.load() self.images = archive['data'] self.labels = archive['labels']
Example 19
Project: Searching-for-activation-functions Author: Neoanarika File: dataset.py License: MIT License | 5 votes |
def load_data(self, file_name): with open(file_name, 'rb') as file: unpickler = pickle._Unpickler(file) unpickler.encoding = 'latin1' contents = unpickler.load() X, Y = np.asarray(contents['data'], dtype=np.float32), np.asarray(contents['labels']) one_hot = np.zeros((Y.size, Y.max() + 1)) one_hot[np.arange(Y.size), Y] = 1 return X, one_hot
Example 20
Project: gae-pytorch Author: zfjsail File: utils.py License: MIT License | 5 votes |
def load_data(dataset): # load the data: x, tx, allx, graph names = ['x', 'tx', 'allx', 'graph'] objects = [] for i in range(len(names)): ''' fix Pickle incompatibility of numpy arrays between Python 2 and 3 https://stackoverflow.com/questions/11305790/pickle-incompatibility-of-numpy-arrays-between-python-2-and-3 ''' with open("data/ind.{}.{}".format(dataset, names[i]), 'rb') as rf: u = pkl._Unpickler(rf) u.encoding = 'latin1' cur_data = u.load() objects.append(cur_data) # objects.append( # pkl.load(open("data/ind.{}.{}".format(dataset, names[i]), 'rb'))) x, tx, allx, graph = tuple(objects) test_idx_reorder = parse_index_file( "data/ind.{}.test.index".format(dataset)) test_idx_range = np.sort(test_idx_reorder) if dataset == 'citeseer': # Fix citeseer dataset (there are some isolated nodes in the graph) # Find isolated nodes, add them as zero-vecs into the right position test_idx_range_full = range( min(test_idx_reorder), max(test_idx_reorder) + 1) tx_extended = sp.lil_matrix((len(test_idx_range_full), x.shape[1])) tx_extended[test_idx_range - min(test_idx_range), :] = tx tx = tx_extended features = sp.vstack((allx, tx)).tolil() features[test_idx_reorder, :] = features[test_idx_range, :] features = torch.FloatTensor(np.array(features.todense())) adj = nx.adjacency_matrix(nx.from_dict_of_lists(graph)) return adj, features
Example 21
Project: theanet Author: rakeshvar File: mnist.py License: Apache License 2.0 | 5 votes |
def _load_mnist(): data_dir = os.path.dirname(os.path.abspath(__file__)) data_file = os.path.join(data_dir, "mnist.pkl.gz") print("Looking for data file: ", data_file) if not os.path.isfile(data_file): import urllib.request as url origin = 'http://www.iro.umontreal.ca/~lisa/deep/data/mnist/mnist.pkl.gz' print('Downloading data from: ', origin) url.urlretrieve(origin, data_file) print('Loading MNIST data') f = gzip.open(data_file, 'rb') u = pickle._Unpickler(f) u.encoding = 'latin1' train_set, valid_set, test_set = u.load() f.close() train_x, train_y = train_set valid_x, valid_y = valid_set testing_x, testing_y = test_set training_x = np.vstack((train_x, valid_x)) training_y = np.concatenate((train_y, valid_y)) training_x = training_x.reshape((training_x.shape[0], 1, 28, 28)) testing_x = testing_x.reshape((testing_x.shape[0], 1, 28, 28)) return training_x, training_y, testing_x, testing_y