Python pickle.load() Examples

The following are code examples for showing how to use pickle.load(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 7 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example 2
Project: model-api-sequence   Author: evandowning   File: evaluation.py    GNU General Public License v3.0 6 votes vote down vote up
def sequence_generator(fn,n):
    xSet = np.array([])
    ySet = np.array([])

    x = np.array([])
    y = np.array([])

    num = 0

    # Read in sample's sequences
    with open(fn, 'rb') as fr:
        for e in enumerate(range(n)):
            t = pkl.load(fr)
            x = t[0]
            y = t[1]

            if len(xSet) == 0:
                xSet = x
                ySet = y
            else:
                xSet = np.vstack([xSet,x])
                ySet = np.append(ySet,y)

    return xSet,ySet 
Example 3
Project: model-api-sequence   Author: evandowning   File: color.py    GNU General Public License v3.0 6 votes vote down vote up
def extract(folder,fn,num,width):
    label = None
    seq = list()

    # Read in sample's sequence
    path = os.path.join(folder,fn+'.pkl')
    with open(path,'rb') as fr:
        for i in range(num):
            t = pkl.load(fr)
            label = t[1]

            # Replace API call integers with pixel values
            seq.extend([api_md5(str(api)) for api in t[0]])

    # Pad array if it's not divisible by width (3 channels for RGB)
    r = len(seq) % (width*3)
    if r != 0:
        seq.extend([api_md5('0')]*(width*3-r))

    # Reshape numpy array (3 channels)
    data = np.reshape(np.array(seq), (-1,width*3))
    data = data.astype(np.int8)

    return fn,data,label 
Example 4
Project: pyblish-win   Author: pyblish   File: trace.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def __init__(self, counts=None, calledfuncs=None, infile=None,
                 callers=None, outfile=None):
        self.counts = counts
        if self.counts is None:
            self.counts = {}
        self.counter = self.counts.copy() # map (filename, lineno) to count
        self.calledfuncs = calledfuncs
        if self.calledfuncs is None:
            self.calledfuncs = {}
        self.calledfuncs = self.calledfuncs.copy()
        self.callers = callers
        if self.callers is None:
            self.callers = {}
        self.callers = self.callers.copy()
        self.infile = infile
        self.outfile = outfile
        if self.infile:
            # Try to merge existing counts file.
            try:
                counts, calledfuncs, callers = \
                        pickle.load(open(self.infile, 'rb'))
                self.update(self.__class__(counts, calledfuncs, callers))
            except (IOError, EOFError, ValueError), err:
                print >> sys.stderr, ("Skipping counts file %r: %s"
                                      % (self.infile, err)) 
Example 5
Project: pyblish-win   Author: pyblish   File: test_signal.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_itimer_virtual(self):
        self.itimer = signal.ITIMER_VIRTUAL
        signal.signal(signal.SIGVTALRM, self.sig_vtalrm)
        signal.setitimer(self.itimer, 0.3, 0.2)

        start_time = time.time()
        while time.time() - start_time < 60.0:
            # use up some virtual time by doing real work
            _ = pow(12345, 67890, 10000019)
            if signal.getitimer(self.itimer) == (0.0, 0.0):
                break # sig_vtalrm handler stopped this itimer
        else: # Issue 8424
            self.skipTest("timeout: likely cause: machine too slow or load too "
                          "high")

        # virtual itimer should be (0.0, 0.0) now
        self.assertEqual(signal.getitimer(self.itimer), (0.0, 0.0))
        # and the handler should have been called
        self.assertEqual(self.hndl_called, True)

    # Issue 3864. Unknown if this affects earlier versions of freebsd also. 
Example 6
Project: pyblish-win   Author: pyblish   File: forking.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def main():
        '''
        Run code specified by data received over pipe
        '''
        assert is_forking(sys.argv)

        handle = int(sys.argv[-1])
        fd = msvcrt.open_osfhandle(handle, os.O_RDONLY)
        from_parent = os.fdopen(fd, 'rb')

        process.current_process()._inheriting = True
        preparation_data = load(from_parent)
        prepare(preparation_data)
        self = load(from_parent)
        process.current_process()._inheriting = False

        from_parent.close()

        exitcode = self._bootstrap()
        exit(exitcode) 
Example 7
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example 8
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: train_val.py    MIT License 6 votes vote down vote up
def from_snapshot(self, sfile, nfile):
    print('Restoring model snapshots from {:s}'.format(sfile))
    self.net.load_state_dict(torch.load(str(sfile)))
    print('Restored.')
    # Needs to restore the other hyper-parameters/states for training, (TODO xinlei) I have
    # tried my best to find the random states so that it can be recovered exactly
    # However the Tensorflow state is currently not available
    with open(nfile, 'rb') as fid:
      st0 = pickle.load(fid)
      cur = pickle.load(fid)
      perm = pickle.load(fid)
      cur_val = pickle.load(fid)
      perm_val = pickle.load(fid)
      last_snapshot_iter = pickle.load(fid)

      np.random.set_state(st0)
      self.data_layer._cur = cur
      self.data_layer._perm = perm
      self.data_layer_val._cur = cur_val
      self.data_layer_val._perm = perm_val

    return last_snapshot_iter 
Example 9
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: pascal_voc.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.

    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = os.path.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if os.path.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        try:
          roidb = pickle.load(fid)
        except:
          roidb = pickle.load(fid, encoding='bytes')
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_pascal_labels(index)
                for index in self.image_index]
    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))

    return gt_roidb 
Example 10
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example 11
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: sessions.py    Apache License 2.0 6 votes vote down vote up
def get(self, sid):
        if not self.is_valid_key(sid):
            return self.new()
        try:
            f = open(self.get_session_filename(sid), 'rb')
        except IOError:
            if self.renew_missing:
                return self.new()
            data = {}
        else:
            try:
                try:
                    data = load(f)
                except Exception:
                    data = {}
            finally:
                f.close()
        return self.session_class(data, sid, False) 
Example 12
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: sessions.py    Apache License 2.0 6 votes vote down vote up
def get(self, sid):
        if not self.is_valid_key(sid):
            return self.new()
        try:
            f = open(self.get_session_filename(sid), 'rb')
        except IOError:
            if self.renew_missing:
                return self.new()
            data = {}
        else:
            try:
                try:
                    data = load(f)
                except Exception:
                    data = {}
            finally:
                f.close()
        return self.session_class(data, sid, False) 
Example 13
Project: wikilinks   Author: trovdimi   File: decorators.py    MIT License 6 votes vote down vote up
def __call__(self, *args, **kwargs):
        fkey = args[0].__class__.__name__ + '_' + self.func.__name__
        self.filepath = os.path.join(CACHE_FOLDER, fkey + '.obj')

        if self.cache is None:
            if os.path.exists(self.filepath):
                print('loading', self.filepath, '...')
                with open(self.filepath, 'rb') as infile:
                    self.cache = pickle.load(infile)
            else:
                self.cache = {}

        pickled = pickle.dumps(args[1:], -1) + pickle.dumps(kwargs, -1)
        key = hashlib.sha1(pickled).hexdigest()
        try:
            return self.cache[key]
        except KeyError:
            # print(fkey, 'key not found...')
            self.modified = True
            value = self.func(*args, **kwargs)
            self.cache[key] = value
            return value 
Example 14
Project: wikilinks   Author: trovdimi   File: redirectscandidatespostioninserter.py    MIT License 6 votes vote down vote up
def manageWork(self):
        #file = open("/home/ddimitrov/20160305_en_wikilinks/tmp/missing_article_ids.p",'r')
        file = open(SSD_HOME+"pickle/redirects_ids.obj",'r')
        object_file = pickle.load(file)
        #print object_file
        #print type(object_file)
        for root, dirs, files in os.walk(STATIC_HTML_DUMP_ARTICLES_DIR+self.path):
            for i, file_name in enumerate(files):
                if file_name.endswith(".zip"):
                    parts = file_name.split('_')
                    if long(parts[1]) in object_file:

                        try:
                            self.parse_article(file_name,root)
                        except  Exception as e:
                            print("FILENAME_FAIL:"+file_name)
                            print(type(e))    # the exception instance
                            print(e)
                            print (e.message) 
Example 15
Project: esjspy   Author: Dothion   File: base.py    GNU General Public License v3.0 5 votes vote down vote up
def _scan(identifier: str, cache_dir: Path = Path(CACHE_DIR)) -> Any:
    md5 = _gen_md5(identifier)
    cache_dir = cache_dir / md5[0:2] / md5[2:4]
    cache_path = cache_dir / md5
    logger.debug('从缓存中读取了 %s 的内容。' % identifier if LANGUAGE in _simplified else
                 '從 cache 中讀取了 %s 的內容。' % identifier)
    with cache_path.open('rb') as f:
        return pickle.load(f) 
Example 16
Project: fs_image   Author: facebookincubator   File: demo_sendstreams.py    MIT License 5 votes vote down vote up
def gold_demo_sendstreams():
    with open(
        os.path.join(
            # We are part of the library interface, but __file__ works because:
            #   (a) we never use par_style = "fastzip", and
            #   (b) the gold data is baked into the PAR for reading.
            os.path.dirname(os.path.abspath(__file__)),
            'gold_demo_sendstreams.pickle',
        ),
        'rb',
    ) as f:
        return pickle.load(f) 
Example 17
Project: fs_image   Author: facebookincubator   File: print_gold_demo_sendstreams.py    MIT License 5 votes vote down vote up
def main(argv):
    if len(argv) != 2:
        print(__doc__, file=sys.stderr)
        return 1

    with open(os.path.join(
        os.path.dirname(__file__), 'gold_demo_sendstreams.pickle',
    ), "rb") as infile:
        sys.stdout.buffer.write(pickle.load(infile)[argv[1]]["sendstream"])
    return 0 
Example 18
Project: f5go   Author: f5devcentral   File: go.py    MIT License 5 votes vote down vote up
def load(db=cfg_fnDatabase):
        """Attempt to load the database defined at cfg_fnDatabase. Create a
        new one if the database doesn't already exist.
        """
        try:
            print("Loading DB from %s" % db)
            return pickle.load(open(db, 'rb'))
        except IOError:
            print(sys.exc_info()[1])
            print("Creating new database...")
            return LinkDatabase() 
Example 19
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def load(cls, file_obj):
        """Load serialized object from open JSON file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from JSON file
        :rtype: object

        """
        return json.load(file_obj) 
Example 20
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def load(cls, file_obj):
        """Load serialized object from open pickle file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from pickle file
        :rtype: object

        """
        return cPickle.load(file_obj) 
Example 21
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def _load(self):
        """Load cached settings from JSON file `self._filepath`."""
        data = {}
        with LockFile(self._filepath, 0.5):
            with open(self._filepath, 'rb') as fp:
                data.update(json.load(fp))

        self._original = deepcopy(data)

        self._nosave = True
        self.update(data)
        self._nosave = False 
Example 22
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def cached_data(self, name, data_func=None, max_age=60):
        """Return cached data if younger than ``max_age`` seconds.

        Retrieve data from cache or re-generate and re-cache data if
        stale/non-existant. If ``max_age`` is 0, return cached data no
        matter how old.

        :param name: name of datastore
        :param data_func: function to (re-)generate data.
        :type data_func: ``callable``
        :param max_age: maximum age of cached data in seconds
        :type max_age: ``int``
        :returns: cached data, return value of ``data_func`` or ``None``
            if ``data_func`` is not set

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
        age = self.cached_data_age(name)

        if (age < max_age or max_age == 0) and os.path.exists(cache_path):

            with open(cache_path, 'rb') as file_obj:
                self.logger.debug('loading cached data: %s', cache_path)
                return serializer.load(file_obj)

        if not data_func:
            return None

        data = data_func()
        self.cache_data(name, data)

        return data 
Example 23
Project: explirefit   Author: codogogo   File: io_helper.py    Apache License 2.0 5 votes vote down vote up
def deserialize(path):
	return pickle.load(open(path, "rb" )) 
Example 24
Project: rhodonite   Author: nestauk   File: build_phylomemetic_graph.py    MIT License 5 votes vote down vote up
def build(input, output, min_clique_size, parent_limit, 
        workers, chunksize):
    '''from_communities
    Creates and saves a phylomemetic graph from an input of temporal communities.

    Args:
        input (:obj:`str`): Path to input pickled dictionary of communities.
        output (:obj:`str`): Output directory for results (.gt format).
        min_clique_size (:obj:`int`): Minimum community size to consider.
        parent_limit (:obj:`int`): Maximum number of parents to consider.
        workers (:obj:`str` or :obj:`int`): Number of processes to use. Either 
            provide integer, or "auto". Default is "auto".
        chunksize (:obj:`str` or :obj:`int`): Number of communities for each 
            worker to process at a time. Either provide integer or "auto".
            Default is "auto".
    '''

    with open(input, 'rb') as f:
        communities = pickle.load(f)
    
    if chunksize.isnumeric():
        chunksize = int(chunksize)
    if workers.isnumeric():
        workers = int(workers)

    save_dir = os.path.join(*input.split(os.sep)[:-1])
    if os.path.isdir(save_dir):
        pg = phylomemetic_graph(
                community_sets=list(communities.values()),
                labels=list(communities.keys()),
                min_clique_size=min_clique_size, 
                parent_limit=parent_limit,
                workers=workers,
                chunksize=chunksize,
                )
        pg.save(output)
    else:
        click.echo('Output directory does not exist.') 
Example 25
Project: pyblish-win   Author: pyblish   File: webchecker.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def load_pickle(dumpfile=DUMPFILE, verbose=VERBOSE):
    if verbose > 0:
        print "Loading checkpoint from %s ..." % dumpfile
    f = open(dumpfile, "rb")
    c = pickle.load(f)
    f.close()
    if verbose > 0:
        print "Done."
        print "Root:", "\n      ".join(c.roots)
    return c 
Example 26
Project: pyblish-win   Author: pyblish   File: grammar.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def load(self, filename):
        """Load the grammar tables from a pickle file."""
        f = open(filename, "rb")
        d = pickle.load(f)
        f.close()
        self.__dict__.update(d) 
Example 27
Project: pyblish-win   Author: pyblish   File: test_signal.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_main(self):
        # This function spawns a child process to insulate the main
        # test-running process from all the signals. It then
        # communicates with that child process over a pipe and
        # re-raises information about any exceptions the child
        # raises. The real work happens in self.run_test().
        os_done_r, os_done_w = os.pipe()
        with closing(os.fdopen(os_done_r)) as done_r, \
             closing(os.fdopen(os_done_w, 'w')) as done_w:
            child = os.fork()
            if child == 0:
                # In the child process; run the test and report results
                # through the pipe.
                try:
                    done_r.close()
                    # Have to close done_w again here because
                    # exit_subprocess() will skip the enclosing with block.
                    with closing(done_w):
                        try:
                            self.run_test()
                        except:
                            pickle.dump(traceback.format_exc(), done_w)
                        else:
                            pickle.dump(None, done_w)
                except:
                    print 'Uh oh, raised from pickle.'
                    traceback.print_exc()
                finally:
                    exit_subprocess()

            done_w.close()
            # Block for up to MAX_DURATION seconds for the test to finish.
            r, w, x = select.select([done_r], [], [], self.MAX_DURATION)
            if done_r in r:
                tb = pickle.load(done_r)
                if tb:
                    self.fail(tb)
            else:
                os.kill(child, signal.SIGKILL)
                self.fail('Test deadlocked after %d seconds.' %
                          self.MAX_DURATION) 
Example 28
Project: pyblish-win   Author: pyblish   File: test_random.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def test_bug_1727780(self):
        # verify that version-2-pickles can be loaded
        # fine, whether they are created on 32-bit or 64-bit
        # platforms, and that version-3-pickles load fine.
        files = [("randv2_32.pck", 780),
                 ("randv2_64.pck", 866),
                 ("randv3.pck", 343)]
        for file, value in files:
            f = open(test_support.findfile(file),"rb")
            r = pickle.load(f)
            f.close()
            self.assertEqual(r.randrange(1000), value) 
Example 29
Project: LipNet-PyTorch   Author: sailordiary   File: dataloader.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __getitem__(self, index):
        # images: bs x chan x T x H x W
        x = torch.zeros(3, self.opt.max_timesteps, 50, 100)
        # load video using read_data() and shove into x
        d = self.dataset[index]
        # targets: bs-length tensor of targets (each one is the length of the target seq)
        frames, y, sub = read_data(d, self.opt, self.vocab_mapping)
        x[:, : frames.size(1), :, :] = frames
        # input lengths: bs-length tensor of integers, representing
        # the number of input timesteps/frames for the given batch element
        length = frames.size(1)

        return x, y, length, index 
Example 30
Project: natural-questions   Author: google-research-datasets   File: nq_eval.py    Apache License 2.0 5 votes vote down vote up
def main(_):
  cache_path = os.path.join(os.path.dirname(FLAGS.gold_path), 'cache')
  if FLAGS.cache_gold_data and os.path.exists(cache_path):
    logging.info('Reading from cache: %s', format(cache_path))
    nq_gold_dict = pickle.load(open(cache_path, 'r'))
  else:
    nq_gold_dict = util.read_annotation(
        FLAGS.gold_path, n_threads=FLAGS.num_threads)
    if FLAGS.cache_gold_data:
      logging.info('Caching gold data for next time to: %s', format(cache_path))
      pickle.dump(nq_gold_dict, open(cache_path, 'w'))

  nq_pred_dict = util.read_prediction_json(FLAGS.predictions_path)

  long_answer_stats, short_answer_stats = score_answers(nq_gold_dict,
                                                        nq_pred_dict)

  if FLAGS.pretty_print:
    print('*' * 20)
    print('LONG ANSWER [email protected] TABLE:')
    print_r_at_p_table(long_answer_stats)
    print('*' * 20)
    print('SHORT ANSWER [email protected] TABLE:')
    print_r_at_p_table(short_answer_stats)

    scores = compute_final_f1(long_answer_stats, short_answer_stats)
    print('*' * 20)
    print('METRICS IGNORING SCORES (n={}):'.format(scores['long-answer-n']))
    print('              F1     /  P      /  R')
    print('Long answer  {: >7.2%} / {: >7.2%} / {: >7.2%}'.format(
        scores['long-answer-f1'], scores['long-answer-precision'],
        scores['long-answer-recall']))
    print('Short answer {: >7.2%} / {: >7.2%} / {: >7.2%}'.format(
        scores['short-answer-f1'], scores['short-answer-precision'],
        scores['short-answer-recall']))
  else:
    metrics = get_metrics_with_answer_stats(long_answer_stats,
                                            short_answer_stats)
    print(json.dumps(metrics)) 
Example 31
Project: BASS   Author: Cisco-Talos   File: binary_database.py    GNU General Public License v2.0 5 votes vote down vote up
def load(clazz, path):
        with open(path, "rb") as f:
            return clazz(pickle.load(f)) 
Example 32
Project: BASS   Author: Cisco-Talos   File: server.py    GNU General Public License v2.0 5 votes vote down vote up
def function_raw_hash_get():
    global Session
    session = Session()
    filename, file_ = request.files.items()[0]
    db = Database(pickle.load(file_))

    arch_name = db.architecture_name
    if arch_name == "metapc":
        arch_name = "x86"
    try:
        arch = session.query(Architecture).filter(Architecture.name == arch_name and \
                Architecture.bits == db.architecture_bits and \
                Architecture.little_endian == db.architecture_endianness == "little").one()
    except NoResultFound:
        return make_response(jsonify(message = "Architecture not found"), 404)
    
    try:
        func = next(db.functions)
    except StopIteration:
        return make_response(jsonify(message = "No function found in database"), 500)

    raw_hash = _function_calculate_raw_sha256(func)
    size = _function_get_size(func)

    try:
        function = session.query(Function).filter(Function.raw_sha256 == raw_hash and \
                Function.size == size and \
                Function.arch == arch.id).one()
        return make_response(jsonify(**json.loads(function.data)), 200)
    except NoResultFound:
        return make_response(jsonify(message = "Function not found"), 404) 
Example 33
Project: BASS   Author: Cisco-Talos   File: server.py    GNU General Public License v2.0 5 votes vote down vote up
def function_mnem_hash_get():
    global Session
    session = Session()
    filename, file_ = request.files.items()[0]
    db = Database(pickle.load(file_))

    arch_name = db.architecture_name
    if arch_name == "metapc":
        arch_name = "x86"
    try:
        arch = session.query(Architecture).filter(Architecture.name == arch_name and \
                Architecture.bits == db.architecture_bits and \
                Architecture.little_endian == db.architecture_endianness == "little").one()
    except NoResultFound:
        return make_response(jsonify(message = "Architecture not found"), 404)
    
    try:
        func = next(db.functions)
    except StopIteration:
        return make_response(jsonify(message = "No function found in database"), 500)

    mnem_hash = _function_calculate_mnem_sha256(func)

    try:
        function = session.query(Function).filter(Function.mnem_sha256 == mnem_hash and \
                Function.arch == arch.id).one()
        return make_response(jsonify(**json.loads(function.data)), 200)
    except NoResultFound:
        return make_response(jsonify(message = "Function not found"), 404) 
Example 34
Project: BASS   Author: Cisco-Talos   File: binary_database.py    GNU General Public License v2.0 5 votes vote down vote up
def load(clazz, path):
        with open(path, "rb") as f:
            return clazz(pickle.load(f)) 
Example 35
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def load(cls, file_obj):
        """Load serialized object from open JSON file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from JSON file
        :rtype: object

        """
        return json.load(file_obj) 
Example 36
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def load(cls, file_obj):
        """Load serialized object from open pickle file.

        .. versionadded:: 1.8

        :param file_obj: file handle
        :type file_obj: ``file`` object
        :returns: object loaded from pickle file
        :rtype: object

        """
        return cPickle.load(file_obj) 
Example 37
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def _load(self):
        """Load cached settings from JSON file `self._filepath`."""
        data = {}
        with LockFile(self._filepath, 0.5):
            with open(self._filepath, 'rb') as fp:
                data.update(json.load(fp))

        self._original = deepcopy(data)

        self._nosave = True
        self.update(data)
        self._nosave = False 
Example 38
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def cached_data(self, name, data_func=None, max_age=60):
        """Return cached data if younger than ``max_age`` seconds.

        Retrieve data from cache or re-generate and re-cache data if
        stale/non-existant. If ``max_age`` is 0, return cached data no
        matter how old.

        :param name: name of datastore
        :param data_func: function to (re-)generate data.
        :type data_func: ``callable``
        :param max_age: maximum age of cached data in seconds
        :type max_age: ``int``
        :returns: cached data, return value of ``data_func`` or ``None``
            if ``data_func`` is not set

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))
        age = self.cached_data_age(name)

        if (age < max_age or max_age == 0) and os.path.exists(cache_path):

            with open(cache_path, 'rb') as file_obj:
                self.logger.debug('loading cached data: %s', cache_path)
                return serializer.load(file_obj)

        if not data_func:
            return None

        data = data_func()
        self.cache_data(name, data)

        return data 
Example 39
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: train_val.py    MIT License 5 votes vote down vote up
def initialize(self):
    # Initial file lists are empty
    np_paths = []
    ss_paths = []
    # Fresh train directly from ImageNet weights
    print('Loading initial model weights from {:s}'.format(self.pretrained_model))
    self.net.load_pretrained_cnn(torch.load(self.pretrained_model))
    print('Loaded.')
    
#    pretrained_model = torch.load('/DATA3_DB7/data/jjwang/workspace/two_stage/output/vgg16/voc_2007_trainval/default/vgg16_faster_rcnn_iter_50001.pth')    
    if self.wsddn_premodel is not None: # Load the pretrained WSDDN model
      wsddn_pre = torch.load(self.wsddn_premodel)
      model_dict = self.net.state_dict()
      model_dict.update(wsddn_pre)
      self.net.load_state_dict(model_dict)
      print('Loading pretrained WSDDN model weights from {:s}'.format(self.wsddn_premodel))
      print('Loaded.')
    
    
    # Need to fix the variables before loading, so that the RGB weights are changed to BGR
    # For VGG16 it also changes the convolutional weights fc6 and fc7 to
    # fully connected weights
    last_snapshot_iter = 0
    lr = cfg.TRAIN.LEARNING_RATE
    stepsizes = list(cfg.TRAIN.STEPSIZE)

    return lr, last_snapshot_iter, stepsizes, np_paths, ss_paths 
Example 40
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: pascal_voc.py    MIT License 5 votes vote down vote up
def _load_rpn_roidb(self, gt_roidb):
    filename = self.config['rpn_file']
    print('loading {}'.format(filename))
    assert os.path.exists(filename), \
      'rpn data not found at: {}'.format(filename)
    with open(filename, 'rb') as f:
      box_list = pickle.load(f)
    return self.create_roidb_from_box_list(box_list, gt_roidb) 
Example 41
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: pascal_voc.py    MIT License 5 votes vote down vote up
def selective_search_roidb(self):
    """
    Return the database of selective search regions of interest.
    Ground-truth ROIs are also included.

    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = os.path.join(self.cache_path,
                              self.name + '_selective_search_roidb.pkl')

    if os.path.exists(cache_file):
        with open(cache_file, 'rb') as fid:
            roidb = pickle.load(fid)
        print('{} ss roidb loaded from {}'.format(self.name, cache_file))
        return roidb

    if int(self._year) == 2007 or self._image_set != 'test':
        gt_roidb = self.gt_roidb()
        # ss_roidb = self._load_selective_search_roidb(gt_roidb)
        # roidb = datasets.imdb.merge_roidbs(gt_roidb, ss_roidb)
        roidb = self._load_selective_search_roidb(gt_roidb)
    else:
        roidb = self._load_selective_search_roidb(None)
    with open(cache_file, 'wb') as fid:
        pickle.dump(roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote ss roidb to {}'.format(cache_file))

    return roidb 
Example 42
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: reval.py    MIT License 5 votes vote down vote up
def from_dets(imdb_name, output_dir, args):
  imdb = get_imdb(imdb_name)
  imdb.competition_mode(args.comp_mode)
  imdb.config['matlab_eval'] = args.matlab_eval
  with open(os.path.join(output_dir, 'detections.pkl'), 'rb') as f:
    dets = pickle.load(f)

  if args.apply_nms:
    print('Applying NMS to all detections')
    nms_dets = apply_nms(dets, cfg.TEST.NMS)
  else:
    nms_dets = dets

  print('Evaluating detections')
  imdb.evaluate_detections(nms_dets, output_dir) 
Example 43
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: reval_discovery.py    MIT License 5 votes vote down vote up
def from_dets(imdb_name, output_dir, args):
  imdb = get_imdb(imdb_name)
  imdb.competition_mode(args.comp_mode)
  with open(os.path.join(output_dir, 'discovery.pkl'), 'rb') as f:
    dets = pickle.load(f)


  print('Evaluating detections')
  imdb.evaluate_discovery(dets, output_dir) 
Example 44
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: show_boxes_results.py    MIT License 5 votes vote down vote up
def parse_args():
  """
  Parse input arguments
  """
  parser = argparse.ArgumentParser(description='show the imgs and the resulted boxes')
  parser.add_argument('--box', default='/DATA3_DB7/data/jjwang/workspace/wsFaster-rcnn/output/vgg16/voc_2007_test/WSDDN_PRE_50000/vgg16_faster_rcnn_iter_90000/wsddn/detections.pkl', help='boxes pkl file to load')
  parser.add_argument('--thr', default=0.1, type=float, help='idx of test img')

  if len(sys.argv) == 1:
    parser.print_help()
    sys.exit(1)

  args = parser.parse_args()
  return args 
Example 45
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: cache.py    Apache License 2.0 5 votes vote down vote up
def _prune(self):
        entries = self._list_dir()
        if len(entries) > self._threshold:
            now = time()
            try:
                for idx, fname in enumerate(entries):
                    remove = False
                    with open(fname, 'rb') as f:
                        expires = pickle.load(f)
                    remove = expires <= now or idx % 3 == 0

                    if remove:
                        os.remove(fname)
            except (IOError, OSError):
                pass 
Example 46
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: cache.py    Apache License 2.0 5 votes vote down vote up
def get(self, key):
        filename = self._get_filename(key)
        try:
            with open(filename, 'rb') as f:
                if pickle.load(f) >= time():
                    return pickle.load(f)
                else:
                    os.remove(filename)
                    return None
        except (IOError, OSError, pickle.PickleError):
            return None 
Example 47
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: cache.py    Apache License 2.0 5 votes vote down vote up
def _prune(self):
        entries = self._list_dir()
        if len(entries) > self._threshold:
            now = time()
            try:
                for idx, fname in enumerate(entries):
                    remove = False
                    with open(fname, 'rb') as f:
                        expires = pickle.load(f)
                    remove = expires <= now or idx % 3 == 0

                    if remove:
                        os.remove(fname)
            except (IOError, OSError):
                pass 
Example 48
Project: comet-commonsense   Author: atcbosselut   File: demo_bilinear.py    Apache License 2.0 5 votes vote down vote up
def run(gens_file, theshold=None, flip_r_e1=False):
    model = pickle.load(open("ckbc-demo/Bilinear_cetrainSize300frac1.0dSize200relSize150acti0.001.1e-05.800.RAND.tanh.txt19.pickle",  "r"))

    Rel = model['rel']
    We = model['embeddings']
    Weight = model['weight']
    Offset = model['bias']
    words = model['words_name']
    rel = model['rel_name']

    results = []

    if type(gens_file) == list:
        gens = []
        for file_name in gens_file:
            gens += open(file_name, "r").read().split("\n")
    else:
        gens = open(gens_file, "r").read().split("\n")

    formatted_gens = [tuple(i.split("\t")[:4]) for i in gens if i]

    for i, gen in enumerate(formatted_gens):
        if gen == ('s', 'r', 'o', 'minED'):
            continue
        if flip_r_e1:
            relation = "_".join(gen[1].split(" "))
            subject_ = "_".join(gen[0].split(" "))
        else:
            relation = "_".join(gen[0].split(" "))
            subject_ = "_".join(gen[1].split(" "))
        object_ = "_".join(gen[2].split(" "))
        result = score(subject_, object_, words, We, rel, Rel, Weight, Offset, relation)

        results.append((gen, result))

    return results 
Example 49
Project: wikilinks   Author: trovdimi   File: redirectscandidatespostioninserter.py    MIT License 5 votes vote down vote up
def modify_html(self, html, source_article_id):
        # we need this in order to plot the heatmap
        soup = Soup(html, 'html.parser')
        head = soup.find('base')
        print soup.find("title")
        if head is not None:
            head.decompose()


        css = soup.find("link", {"rel": "stylesheet"})
        if css is not None:
            css['href'] = 'https:' + css['href']
            headers = {'user-agent': EMAIL}
            r = requests.get(css['href'], headers=headers, stream=True)
            css['href'] = ""
            if r.status_code == 200:
                style = soup.new_tag('style')
                style.string = r.text
                css.insert_after(style)
            else:
                print('FAIL: Cannot load css  for id: "%s" ' % source_article_id)

            css.decompose()

        last_element_on_page_meta = soup.new_tag('meta')
        last_element_on_page_meta['http-equiv'] = "content-type"
        last_element_on_page_meta['content'] = "text/html; charset=utf-8"

        body = soup.find('body')
        #if body is not None:
        last_element_on_page = soup.new_tag('div')
        last_element_on_page['class'] = "pyqt_is_shit"
        body.append(last_element_on_page)
        return soup.prettify(encoding='utf-8') 
Example 50
Project: mietmap-scraper   Author: CodeforKarlsruhe   File: scrape.py    MIT License 5 votes vote down vote up
def memoize_persistently(filename):
    """
    Persistently memoize a function's return values.

    This decorator memoizes a function's return values persistently
    over multiple runs of the program. The return values are stored
    in the given file using ``pickle``. If the decorated function is
    called again with arguments that it has already been called with
    then the return value is retrieved from the cache and returned
    without calling the function. If the function is called with
    previously unseen arguments then its return value is added to the
    cache and the cache file is updated.

    Both return values and arguments of the function must support the
    pickle protocol. The arguments must also be usable as dictionary
    keys.
    """
    try:
        with open(filename, 'rb') as cache_file:
            cache = pickle.load(cache_file)
    except IOError as e:
        if e.errno != errno.ENOENT:
            raise
        cache = {}

    def decorator(f):

        @functools.wraps(f)
        def wrapper(*args, **kwargs):
            key = args + tuple(sorted(kwargs.items()))
            try:
                return cache[key]
            except KeyError:
                value = cache[key] = f(*args, **kwargs)
                with open(filename, 'wb') as cache_file:
                    pickle.dump(cache, cache_file)
                return value

        return wrapper
    return decorator