Python pickle._Unpickler() Examples

The following are code examples for showing how to use pickle._Unpickler(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: wordnet-randomwalk-python   Author: GreenParachute   File: predict_similarity.py    BSD 2-Clause "Simplified" License 6 votes vote down vote up
def predict(test, out, word2idx1, vecs1, word2idx2=None, vecs2=None, sum_mwes=False, default=False):
    idx2vec1 = None
    if word2idx1 is not None:
        try:
            idx2vec1 = pickle.load(open(vecs1, 'rb'))
        except UnicodeDecodeError:  # assume it's a polyglot pretrained embedding
            with open(vecs1, 'rb') as f:
                u = pickle._Unpickler(f)
                u.encoding = 'latin1'
                p = u.load()
                pass  
    else:  # assume it's from faruqui-retrofit
       word2idx1, idx2vec1 = read_from_retrofit(vecs1)
    idx2vec2 = pickle.load(open(vecs2, 'rb')) if vecs2 is not None else None
    def_vec1 = idx2vec1.mean(axis=0) if default else None
    def_vec2 = idx2vec2.mean(axis=0) if default and word2idx2 is not None else None
    with open(test, 'r', encoding='utf-8') as f_in, open(out, 'w', encoding='utf-8') as f_out:
        for line in f_in:
            line = line.strip().lower()
            word_pair = line.split("\t")
            wv1 = get_vector(word_pair[0], word2idx1, idx2vec1, word2idx2, idx2vec2, sum_mwes, def_vec1, def_vec2)
            wv2 = get_vector(word_pair[1], word2idx1, idx2vec1, word2idx2, idx2vec2, sum_mwes)
            score = cosine_similarity(wv1.reshape(1, -1), wv2.reshape(1, -1))[0][0]
            f_out.write("{}\n".format(score)) 
Example 2
Project: py-openmath   Author: OpenMath   File: convert_pickle.py    MIT License 5 votes vote down vote up
def __init__(self, file, converter):
        _Unpickler.__init__(self, file)
        self._converter = converter
    
    # we need to do this twice to enable stuff 
Example 3
Project: vmf_vae_nlp   Author: jiacheng-xu   File: helper.py    MIT License 5 votes vote down vote up
def read_bin_file(fname):
    with open(fname, 'rb') as f:
        u = pkl._Unpickler(f)
        u.encoding = 'latin1'
        return u.load() 
Example 4
Project: MetaOptNet   Author: kjunelee   File: tiered_imagenet.py    Apache License 2.0 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 5
Project: MetaOptNet   Author: kjunelee   File: CIFAR_FS.py    Apache License 2.0 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 6
Project: MetaOptNet   Author: kjunelee   File: mini_imagenet.py    Apache License 2.0 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 7
Project: MetaOptNet   Author: kjunelee   File: FC100.py    Apache License 2.0 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 8
Project: FEAT   Author: Sha-Lab   File: tiered_imagenet.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 9
Project: Conditional-Batch-Norm   Author: ap229997   File: file_handlers.py    MIT License 5 votes vote down vote up
def pickle_loader(file_path, gz=False):
    open_fct = open
    if gz:
        open_fct = gzip.open

    with open_fct(file_path, "rb") as f:
        if sys.version_info > (3, 0):  # Workaround to load pickle data python2 -> python3
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            return u.load()
        else:
            return pickle.load(f) 
Example 10
Project: DSD-SATN   Author: Arthur151   File: util.py    Apache License 2.0 5 votes vote down vote up
def read_pkl_coding(name = '../data/info.pkl'):
    with open(name, 'rb') as f:
        u = pickle._Unpickler(f)
        u.encoding = 'latin1'
        p = u.load()
    return p 
Example 11
Project: AdversarialQuerying   Author: goldblum   File: tiered_imagenet.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 12
Project: AdversarialQuerying   Author: goldblum   File: CIFAR_FS.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 13
Project: AdversarialQuerying   Author: goldblum   File: mini_imagenet.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 14
Project: AdversarialQuerying   Author: goldblum   File: FC100.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 15
Project: MHE   Author: wy1iu   File: train.py    MIT License 5 votes vote down vote up
def unpickle(file):
    with open(file, 'rb') as fo:
        u = pickle._Unpickler(fo)
        u.encoding = 'latin1'
        dict = u.load()
    return dict 
Example 16
Project: SSL-FEW-SHOT   Author: phecy   File: tiered_imagenet.py    MIT License 5 votes vote down vote up
def load_data(file):
    try:
        with open(file, 'rb') as fo:
            data = pickle.load(fo)
        return data
    except:
        with open(file, 'rb') as f:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            data = u.load()
        return data 
Example 17
Project: spatial_plots   Author: rhyswhitley   File: grid_spatial_plot.py    Creative Commons Zero v1.0 Universal 5 votes vote down vote up
def pickle3_load(bin_file):
    """
    There is some bug with unpacking binary values from pickle objects in
    python 3 - this is my temporary fix.
    """
    with open(bin_file, 'rb') as f:
        u = pickle._Unpickler(f)
        u.encoding = 'latin1'
        return u.load()

# -------------------------------------------------------------------------------- 
Example 18
Project: spatial_plots   Author: rhyswhitley   File: climate_space.py    Creative Commons Zero v1.0 Universal 5 votes vote down vote up
def pickle3_load(bin_file):
    """
    There is some bug with unpacking binary values from pickle objects in
    python 3 - this is my temporary fix.
    """
    with open(bin_file, 'rb') as f:
        u = pickle._Unpickler(f)
        u.encoding = 'latin1'
        return u.load()

# -------------------------------------------------------------------------------- 
Example 19
Project: CheML   Author: CheML   File: datasets.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _open_pickle(filename):
    # hack from http://stackoverflow.com/questions/11305790/pickle-incompatability-of-numpy-arrays-between-python-2-and-3
    # Needs to be extensively tested between versions


    with open(filename, 'rb') as f:
        try:
            u = pickle._Unpickler(f)
            u.encoding = 'latin1'
            p = u.load()
        except AttributeError:
            p = pickle.load(f)
    return Bunch(**p) 
Example 20
Project: hat   Author: joansj   File: mixture.py    MIT License 5 votes vote down vote up
def __init__(self, root, train=True,transform=None, download=False):
        self.root = os.path.expanduser(root)
        self.transform = transform
        self.filename = "facescrub_100.zip"
        self.url = "https://github.com/nkundiushuti/facescrub_subset/blob/master/data/facescrub_100.zip?raw=true"

        fpath=os.path.join(root,self.filename)
        if not os.path.isfile(fpath):
            if not download:
               raise RuntimeError('Dataset not found. You can use download=True to download it')
            else:
                print('Downloading from '+self.url)
                self.download()

        training_file = 'facescrub_train_100.pkl'
        testing_file = 'facescrub_test_100.pkl'
        if train:
            with open(os.path.join(root,training_file),'rb') as f:
                # u = pickle._Unpickler(f)
                # u.encoding = 'latin1'
                # train  = u.load()
                train = pickle.load(f)
            self.data = train['features'].astype(np.uint8)
            self.labels = train['labels'].astype(np.uint8)
            """
            print(self.data.shape)
            print(self.data.mean())
            print(self.data.std())
            print(self.labels.max())
            #"""
        else:
            with open(os.path.join(root,testing_file),'rb') as f:
                # u = pickle._Unpickler(f)
                # u.encoding = 'latin1'
                # test  = u.load()
                test = pickle.load(f)

            self.data = test['features'].astype(np.uint8)
            self.labels = test['labels'].astype(np.uint8) 
Example 21
Project: hat   Author: joansj   File: mixture.py    MIT License 5 votes vote down vote up
def __init__(self, root, train=True,transform=None, download=False):
        self.root = os.path.expanduser(root)
        self.transform = transform
        self.filename = "notmnist.zip"
        self.url = "https://github.com/nkundiushuti/notmnist_convert/blob/master/notmnist.zip?raw=true"

        fpath = os.path.join(root, self.filename)
        if not os.path.isfile(fpath):
            if not download:
               raise RuntimeError('Dataset not found. You can use download=True to download it')
            else:
                print('Downloading from '+self.url)
                self.download()

        training_file = 'notmnist_train.pkl'
        testing_file = 'notmnist_test.pkl'
        if train:
            with open(os.path.join(root,training_file),'rb') as f:
                # u = pickle._Unpickler(f)
                # u.encoding = 'latin1'
                # train  = u.load()
                train = pickle.load(f)
            self.data = train['features'].astype(np.uint8)
            self.labels = train['labels'].astype(np.uint8)
        else:
            with open(os.path.join(root,testing_file),'rb') as f:
                # u = pickle._Unpickler(f)
                # u.encoding = 'latin1'
                # test  = u.load()
                test = pickle.load(f)

            self.data = test['features'].astype(np.uint8)
            self.labels = test['labels'].astype(np.uint8) 
Example 22
Project: RWR-GAE   Author: MysteryVaibhav   File: utils.py    MIT License 4 votes vote down vote up
def load_data(dataset):
    # load the data: x, tx, allx, graph
    names = ['x', 'y', 'tx', 'ty', 'allx', 'ally', 'graph']
    objects = []
    for i in range(len(names)):
        '''
        fix Pickle incompatibility of numpy arrays between Python 2 and 3
        https://stackoverflow.com/questions/11305790/pickle-incompatibility-of-numpy-arrays-between-python-2-and-3
        '''
        with open("data/ind.{}.{}".format(dataset, names[i]), 'rb') as rf:
            u = pkl._Unpickler(rf)
            u.encoding = 'latin1'
            cur_data = u.load()
            objects.append(cur_data)
        # objects.append(
        #     pkl.load(open("data/ind.{}.{}".format(dataset, names[i]), 'rb')))
    x, y, tx, ty, allx, ally, graph = tuple(objects)
    test_idx_reorder = parse_index_file(
        "data/ind.{}.test.index".format(dataset))
    test_idx_range = np.sort(test_idx_reorder)

    if dataset == 'citeseer':
        # Fix citeseer dataset (there are some isolated nodes in the graph)
        # Find isolated nodes, add them as zero-vecs into the right position
        test_idx_range_full = range(
            min(test_idx_reorder), max(test_idx_reorder) + 1)
        tx_extended = sp.lil_matrix((len(test_idx_range_full), x.shape[1]))
        tx_extended[test_idx_range - min(test_idx_range), :] = tx
        tx = tx_extended
        ty_extended = np.zeros((len(test_idx_range_full), y.shape[1]))
        ty_extended[test_idx_range - min(test_idx_range), :] = ty
        ty = ty_extended

    features = sp.vstack((allx, tx)).tolil()
    features[test_idx_reorder, :] = features[test_idx_range, :]
    features = torch.FloatTensor(np.array(features.todense()))
    adj = nx.adjacency_matrix(nx.from_dict_of_lists(graph))

    labels = np.vstack((ally, ty))
    labels[test_idx_reorder, :] = labels[test_idx_range, :]

    idx_test = test_idx_range.tolist()
    idx_train = range(len(y))
    idx_val = range(len(y), len(y) + 500)

    train_mask = sample_mask(idx_train, labels.shape[0])
    val_mask = sample_mask(idx_val, labels.shape[0])
    test_mask = sample_mask(idx_test, labels.shape[0])

    y_train = np.zeros(labels.shape)
    y_val = np.zeros(labels.shape)
    y_test = np.zeros(labels.shape)
    y_train[train_mask, :] = labels[train_mask, :]
    y_val[val_mask, :] = labels[val_mask, :]
    y_test[test_mask, :] = labels[test_mask, :]

    return adj, features, y_test, tx, ty, test_mask, np.argmax(labels,1) 
Example 23
Project: geo-scattering-graph-data   Author: matthew-hirn   File: utilities.py    Apache License 2.0 4 votes vote down vote up
def generate_graph(graph_name='dataset/imdb_comedy_romance_scifi.graph'):
    if graph_name == 'dataset/imdb_comedy_romance_scifi.graph':
        maxval = 3
        n_classes = 3
    with open('dataset/imdb_comedy_romance_scifi.graph','rb') as f:
        new_f = pk._Unpickler(f)
        new_f.encoding = 'latin1'
        raw = new_f.load()
        
        n_graphs = len(raw['graph'])
        
        graph_list = []
        
        A = []
        rX = []
        Y = []
        
        
        for i in range(n_graphs):
            if i%200 == 0:
                print(i)
            class_label = int(raw['labels'][i])
            Y.append(class_label)
            
            # create graph
            g = raw['graph'][i]
            n_nodes = len(g)
            
            x = np.zeros((n_nodes, maxval), dtype='float32')
            
            G = nx.Graph()
            
            for node, meta in g.items():
                G.add_node(node)
                for neighbor in meta['neighbors']:
                    G.add_edge(node,neighbor)
                    
            for j in range(n_nodes):
                x[j,0] = nx.eccentricity(G,j)
                x[j,1] = nx.degree(G,j)
                x[j,2] = nx.clustering(G,j)
                
            graph_list.append(G)
            
            A.append(nx.adjacency_matrix(G,np.arange(n_nodes)).todense())
            rX.append(x)
    return A,rX,Y 
Example 24
Project: geo-scattering-graph-data   Author: matthew-hirn   File: utilities.py    Apache License 2.0 4 votes vote down vote up
def parse_graph_data(graph_name='dataset/enzymes.graph'):
    if graph_name == 'nci1.graph':
        maxval = 37
        n_classes = 2
    elif graph_name == 'nci109.graph':
        maxval = 38
        n_classes = 2
    elif graph_name == 'mutag.graph':
        maxval = 7
        n_classes = 2
    elif graph_name == 'ptc.graph':
        maxval = 22
        n_classes = 2
    elif graph_name == 'dataset/enzymes.graph':
        maxval = 3
        n_classes = 6
    
    with open(graph_name,'rb') as f:
        new_f = pk._Unpickler(f)
        new_f.encoding = 'latin1'
        raw = new_f.load()
        
        n_graphs = len(raw['graph'])
        
        A = []
        rX = []
        Y = []
        
        for i in range(n_graphs):
            # Set label
            class_label = raw['labels'][i]
            
            Y.append(class_label)
            
            # Parse graph
            G = raw['graph'][i]
            
            n_nodes = len(G)
            
            a = np.zeros((n_nodes, n_nodes), dtype='float32')
            x = np.zeros((n_nodes, maxval), dtype='float32')
            
            for node, meta in G.items():
                label = meta['label'][0] - 1
                x[node, label] = 1
                for neighbor in meta['neighbors']:
                    a[node, neighbor] = 1
            
            A.append(a)
            rX.append(x)

    return A, rX, Y