Python json.dump() Examples

The following are 30 code examples of json.dump(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module json , or try the search function .
Example #1
Source File: prepare_model_yaml.py    From models with MIT License 6 votes vote down vote up
def make_secondary_dl_yaml(template_yaml, model_json, output_yaml_path):
    with open(template_yaml, 'r') as f:
        model_yaml = yaml.load(f)
    #
    # get the model config:
    json_file = open(model_json, 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = keras.models.model_from_json(loaded_model_json)
    #
    model_yaml["output_schema"]["targets"] = []
    for oname, oshape in zip(loaded_model.output_names, loaded_model.output_shape):
        append_el ={"name":oname , "shape":str(oshape)#replace("None,", "")
        , "doc":"Methylation probability for %s"%oname}
        model_yaml["output_schema"]["targets"].append(append_el)
    #
    with open(output_yaml_path, 'w') as f:
        yaml.dump(model_yaml, f, default_flow_style=False) 
Example #2
Source File: log-parser.py    From aws-waf-security-automations with Apache License 2.0 6 votes vote down vote up
def write_output(bucket_name, key_name, output_key_name, outstanding_requesters):
    logging.getLogger().debug('[write_output] Start')

    try:
        current_data = '/tmp/' + key_name.split('/')[-1] + '_LOCAL.json'
        with open(current_data, 'w') as outfile:
            json.dump(outstanding_requesters, outfile)

        s3 = boto3.client('s3')
        s3.upload_file(current_data, bucket_name, output_key_name, ExtraArgs={'ContentType': "application/json"})
        remove(current_data)

    except Exception as e:
        logging.getLogger().error("[write_output] \tError to write output file")
        logging.getLogger().error(e)

    logging.getLogger().debug('[write_output] End') 
Example #3
Source File: data.py    From comet-commonsense with Apache License 2.0 6 votes vote down vote up
def save_eval_file(opt, stats, eval_type="losses", split="dev", ext="pickle"):
    if cfg.test_save:
        name = "{}/{}.{}".format(utils.make_name(
            opt, prefix="garbage/{}/".format(eval_type),
            is_dir=True, eval_=True), split, ext)
    else:
        name = "{}/{}.{}".format(utils.make_name(
            opt, prefix="results/{}/".format(eval_type),
            is_dir=True, eval_=True), split, ext)
    print("Saving {} {} to {}".format(split, eval_type, name))

    if ext == "pickle":
        with open(name, "wb") as f:
            pickle.dump(stats, f)
    elif ext == "txt":
        with open(name, "w") as f:
            f.write(stats)
    elif ext == "json":
        with open(name, "w") as f:
            json.dump(stats, f)
    else:
        raise 
Example #4
Source File: coco.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def _write_coco_results(self, _coco, detections):
        """ example results
        [{"image_id": 42,
          "category_id": 18,
          "bbox": [258.15,41.29,348.26,243.78],
          "score": 0.236}, ...]
        """
        cats = [cat['name'] for cat in _coco.loadCats(_coco.getCatIds())]
        class_to_coco_ind = dict(zip(cats, _coco.getCatIds()))
        results = []
        for cls_ind, cls in enumerate(self.classes):
            if cls == '__background__':
                continue
            logger.info('collecting %s results (%d/%d)' % (cls, cls_ind, self.num_classes - 1))
            coco_cat_id = class_to_coco_ind[cls]
            results.extend(self._coco_results_one_category(detections[cls_ind], coco_cat_id))
        logger.info('writing results json to %s' % self._result_file)
        with open(self._result_file, 'w') as f:
            json.dump(results, f, sort_keys=True, indent=4) 
Example #5
Source File: coco.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def _write_coco_results_file(self, all_boxes, res_file):
    # [{"image_id": 42,
    #   "category_id": 18,
    #   "bbox": [258.15,41.29,348.26,243.78],
    #   "score": 0.236}, ...]
    results = []
    for cls_ind, cls in enumerate(self.classes):
      if cls == '__background__':
        continue
      print('Collecting {} results ({:d}/{:d})'.format(cls, cls_ind,
                                                       self.num_classes - 1))
      coco_cat_id = self._class_to_coco_cat_id[cls]
      results.extend(self._coco_results_one_category(all_boxes[cls_ind],
                                                     coco_cat_id))
    print('Writing results json to {}'.format(res_file))
    with open(res_file, 'w') as fid:
      json.dump(results, fid) 
Example #6
Source File: build.py    From Traffic_sign_detection_YOLO with MIT License 6 votes vote down vote up
def savepb(self):
		"""
		Create a standalone const graph def that 
		C++	can load and run.
		"""
		darknet_pb = self.to_darknet()
		flags_pb = self.FLAGS
		flags_pb.verbalise = False
		
		flags_pb.train = False
		# rebuild another tfnet. all const.
		tfnet_pb = TFNet(flags_pb, darknet_pb)		
		tfnet_pb.sess = tf.Session(graph = tfnet_pb.graph)
		# tfnet_pb.predict() # uncomment for unit testing
		name = 'built_graph/{}.pb'.format(self.meta['name'])
		os.makedirs(os.path.dirname(name), exist_ok=True)
		#Save dump of everything in meta
		with open('built_graph/{}.meta'.format(self.meta['name']), 'w') as fp:
			json.dump(self.meta, fp)
		self.say('Saving const graph def to {}'.format(name))
		graph_def = tfnet_pb.sess.graph_def
		tf.train.write_graph(graph_def,'./', name, False) 
Example #7
Source File: coco.py    From Collaborative-Learning-for-Weakly-Supervised-Object-Detection with MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example #8
Source File: workflow.py    From wechat-alfred-workflow with MIT License 6 votes vote down vote up
def cache_data(self, name, data):
        """Save ``data`` to cache under ``name``.

        If ``data`` is ``None``, the corresponding cache file will be
        deleted.

        :param name: name of datastore
        :param data: data to store. This may be any object supported by
                the cache serializer

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))

        if data is None:
            if os.path.exists(cache_path):
                os.unlink(cache_path)
                self.logger.debug('deleted cache file: %s', cache_path)
            return

        with atomic_writer(cache_path, 'wb') as file_obj:
            serializer.dump(data, file_obj)

        self.logger.debug('cached data: %s', cache_path) 
Example #9
Source File: prepare_model_yaml.py    From models with MIT License 6 votes vote down vote up
def make_model_yaml(template_yaml, model_json, output_yaml_path):
    #
    with open(template_yaml, 'r') as f:
        model_yaml = yaml.load(f)
    #
    # get the model config:
    json_file = open(model_json, 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = keras.models.model_from_json(loaded_model_json)
    #
    model_yaml["schema"]["targets"] = []
    for oname, oshape in zip(loaded_model.output_names, loaded_model.output_shape):
        append_el ={"name":oname , "shape":str(oshape)#replace("None,", "")
        , "doc":"Methylation probability for %s"%oname}
        model_yaml["schema"]["targets"].append(append_el)
    #
    with open(output_yaml_path, 'w') as f:
        yaml.dump(model_yaml, f, default_flow_style=False) 
Example #10
Source File: main.py    From cs294-112_hws with MIT License 6 votes vote down vote up
def train(session, model, curr_dir, data_train, data_val):
    curr_dir = os.path.join(curr_dir, model.algorithm)
    bestmodel_dir = os.path.join(curr_dir, 'best_checkpoint')
    
    if not os.path.exists(curr_dir):
        os.makedirs(curr_dir)
    
    file_handler = logging.FileHandler(os.path.join(curr_dir, 'log.txt'))
    logging.getLogger().addHandler(file_handler)
    
    with open(os.path.join(curr_dir, FLAGS['save_name'] + '.json'), 'w') as f:
        json.dump(FLAGS, f)
    
    if not os.path.exists(bestmodel_dir):
        os.makedirs(bestmodel_dir)
    
    initialize_model(session, model, curr_dir, expect_exists=False)
    model.train(session, curr_dir, bestmodel_dir, data_train, data_val) 
Example #11
Source File: workflow.py    From wechat-alfred-workflow with MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example #12
Source File: core.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def save_json(filename, obj, normalize=True):
    '''
    save_json(filename, obj) writes the given object to the given filename (or stream) in a
      normalized JSON format.

    The optional argument normalize (default True) may be set to False to prevent the object from
    being run through neuropythy's normalize system.
    '''
    from neuropythy.util import normalize as norm
    dat = norm(obj) if normalize else obj
    if pimms.is_str(filename):
        jsonstr = json.dumps(dat)
        if any(filename.endswith(s) for s in ('.gz', '.bz2', '.lzma')):
            with gzip.open(filename, 'wt') as fl: fl.write(jsonstr)
        else:
            with open(filename, 'wt') as fl: fl.write(jsonstr)
    else: json.dump(dat, filename)
    return filename 
Example #13
Source File: conf.py    From neuropythy with GNU Affero General Public License v3.0 6 votes vote down vote up
def saverc(filename, dat, overwrite=False):
    '''
    saverc(filename, d) saves the given configuration dictionary d to the given filename in JSON
      format. If d is not a dictionary or if filename already exists or cannot be created, an error
      is raised. This funciton does not create directories.

    The optional argument overwrite (default: False) may be passed as True to overwrite files that
    already exist.
    '''
    filename = os.path.expanduser(os.path.expandvars(filename))
    if not overwrite and os.path.isfile(filename):
        raise ValueError('Given filename %s already exists' % filename)
    if not pimms.is_map(dat):
        try: dat = dict(dat)
        except Exception: raise ValueError('Given config data must be a dictionary')
    with open(filename, 'w') as fl:
        json.dump(dat, fl, sort_keys=True)
    return filename

# the private class that handles all the details... 
Example #14
Source File: proxyLoader.py    From premeStock with MIT License 6 votes vote down vote up
def loadProxies():
	proxiesList = []
	cprint("Loading proxies...","green")

	site2(proxiesList) # load proxies

	# proxiesList = ["13.85.80.251:443"]
	# proxiesList = ["13.85.80.251:443"]
	# proxiesList = ["144.217.16.78:3128"]
	proxiesList = proxiesList[::-1]
	proxiesList = proxiesList[:10]
	proxiesList = filterConnections(proxiesList) # filter for working connections

	# Write to file
	with open("proxies.txt", 'w') as outfile:
		json.dump(proxiesList, outfile)
	cprint("Proxies saved to proxies.txt!","magenta","on_grey", attrs=['bold']) 
Example #15
Source File: getmetrics_docker_remote_api.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def writeInsatanceFile(filename, instanceList):
    global hostname
    jsonData = {}
    print "In Function writeInsatanceFile()"
    print instanceList
    newInstanceList = []
    for index in range(len(instanceList)):
        newInstanceList.append(instanceList[index] + "_" + hostname)
    jsonData["instanceList"] = newInstanceList
    with open(os.path.join(homepath, datadir + filename + ".json"), 'w') as f:
        json.dump(jsonData, f) 
Example #16
Source File: config.py    From SecPi with GNU General Public License v3.0 5 votes vote down vote up
def save():
	with open(config_file, 'w') as outfile:
		json.dump(conf, outfile) 
Example #17
Source File: monitor.py    From premeStock with MIT License 5 votes vote down vote up
def main(argv):
    global IDs
    global stock

    if len(sys.argv) > 1:
        cprint("First run, saving stock.txt","green")
        for ID in IDs.keys():
            restockCheck(ID, 1)
            time.sleep(.5)

        if stock:
            with open("stock.txt", 'w') as outfile:
                json.dump(stock, outfile, indent=4, sort_keys=True)
        cprint("stock.txt saved!","green")
        exit()


    start_time = time.time()
    cprint(str(datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]), "magenta")
    try:
        # If you don't want threading: 
        # for ID in IDs.keys():
        #     print("Checking: {}".format(ID))
        #     restockCheck(ID, 1)
        #     time.sleep(.5)

        # Use threading:
        multiCheck(list(IDs.keys()))
        # print(stock)
        if stock:
            compareStock()
    except:
        cprint("ERROR","red", attrs=['bold'])

    cprint(str(time.time() - start_time)+" seconds", "magenta", attrs=['bold']) 
Example #18
Source File: getmetrics_cgroup.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def update_results(lists):
    with open(os.path.join(homepath,datadir+"previous_results.json"),'w') as f:
        json.dump(lists,f) 
Example #19
Source File: getmetrics_cgroup.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def update_docker():
    global dockers
    global newInstanceAvailable
    global dockerInstances


    proc = subprocess.Popen(["docker ps --no-trunc | awk '{if(NR>1) print $NF}'"], stdout=subprocess.PIPE, shell=True)
    (out, err) = proc.communicate()
    dockers = out.split("\n")
    if os.path.isfile(os.path.join(homepath,datadir+"totalInstances.json")) == False:
        towritePreviousInstances = {}
        for containers in dockers:
            if containers != "":
                dockerInstances.append(containers)
        towritePreviousInstances["overallDockerInstances"] = dockerInstances
        with open(os.path.join(homepath,datadir+"totalInstances.json"),'w') as f:
            json.dump(towritePreviousInstances,f)
    else:
        with open(os.path.join(homepath,datadir+"totalInstances.json"),'r') as f:
            dockerInstances = json.load(f)["overallDockerInstances"]
    newInstances = []
    for eachDocker in dockers:
        if eachDocker == "":
            continue
        newInstances.append(eachDocker)
    if cmp(newInstances,dockerInstances) != 0:
        try:
            writeInsatanceFile("currentInstances", newInstances)
            writeInsatanceFile("previousInstances", dockerInstances)
        except Exception as e:
            print e
        towritePreviousInstances = {}
        towritePreviousInstances["overallDockerInstances"] = newInstances
        with open(os.path.join(homepath,datadir+"totalInstances.json"),'w') as f:
            json.dump(towritePreviousInstances,f)
        newInstanceAvailable = True
        dockerInstances = newInstances 
Example #20
Source File: getmetrics_cgroup.py    From InsightAgent with Apache License 2.0 5 votes vote down vote up
def writeInsatanceFile(filename, instanceList):
    global hostname
    jsonData = {}
    print "In Function writeInsatanceFile()"
    print instanceList
    print os.path.join(homepath, datadir + filename + ".json")
    newInstanceList = []
    for index in range(len(instanceList)):
        dockerID = instanceList[index]
        if len(instanceList[index]) > 12:
            dockerID = instanceList[index][:12]
        newInstanceList.append(dockerID + "_" + hostname)
    jsonData["instanceList"] = newInstanceList
    with open(os.path.join(homepath, datadir + filename + ".json"), 'w') as f:
        json.dump(jsonData, f) 
Example #21
Source File: dumpload.py    From fishroom with GNU General Public License v3.0 5 votes vote down vote up
def dump_meta(r, tofilename):
    backup = {}

    rkeys = [
        APIClientManager.clients_name_key,
        RedisNickStore.NICKNAME_KEY, RedisNickStore.USERNAME_KEY,
        RedisStickerURLStore.STICKER_KEY,
    ]

    for rk in rkeys:
        b = {}
        for k, v in r.hgetall(rk).items():
            try:
                k, v = k.decode('utf-8'), v.decode('utf-8')
            except:
                continue
            b[k] = v
        backup[rk] = b

    backup[APIClientManager.clients_key] = {
        k.decode('utf-8'): base64.b64encode(v).decode('utf-8')
        for k, v in r.hgetall(APIClientManager.clients_key).items()
    }

    counters = [Counter(r, name) for name in ('qiniu', )]
    for c in counters:
        backup[c.key] = c.incr()

    with open(tofilename, 'w') as f:
        json.dump(backup, f, indent=4) 
Example #22
Source File: mxdoc.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def _get_src_download_btn(out_prefix, langs, lines):
    btn = '<div class="btn-group" role="group">\n'
    for lang in langs:
        ipynb = out_prefix
        if lang == 'python':
            ipynb += '.ipynb'
        else:
            ipynb += '_' + lang + '.ipynb'
        with open(ipynb, 'w') as f:
            json.dump(_get_jupyter_notebook(lang, lines), f)
        f = ipynb.split('/')[-1]
        btn += '<div class="download-btn"><a href="%s" download="%s">' \
               '<span class="glyphicon glyphicon-download-alt"></span> %s</a></div>' % (f, f, f)
    btn += '</div>\n'
    return btn 
Example #23
Source File: utils.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def quick_save_json(dir_path=os.curdir, file_name="", content=None):
    file_path = os.path.join(dir_path, file_name)
    if not os.path.isdir(dir_path):
        os.makedirs(dir_path)
    with open(file_path, 'w') as fp:
        json.dump(content, fp)
    logging.info('Save json into %s' % file_path) 
Example #24
Source File: utils.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def save_misc(dir_path=os.curdir, epoch=None, name="", content=None):
    prefix = os.path.join(dir_path, name)
    _, _, misc_saving_path = get_saving_path(prefix, epoch)
    with open(misc_saving_path, 'w') as fp:
        json.dump(content, fp)
    return misc_saving_path 
Example #25
Source File: api.py    From google_streetview with MIT License 5 votes vote down vote up
def save_metadata(self, file_path):
    """Save Google Street View metadata from parameter queries.
    
    Args:
      file_path (str):
        Path of the file with extension to save the :class:`api.results`.metadata
    """
    with open(file_path, 'w+') as out_file:
      json.dump(self.metadata, out_file) 
Example #26
Source File: api.py    From google_streetview with MIT License 5 votes vote down vote up
def download_links(self, dir_path, metadata_file='metadata.json', metadata_status='status', status_ok='OK'):
    """Download Google Street View images from parameter queries if they are available.
    
    Args:
      dir_path (str):
        Path of directory to save downloads of images from :class:`api.results`.links
      metadata_file (str):
         Name of the file with extension to save the :class:`api.results`.metadata
      metadata_status (str):
        Key name of the status value from :class:`api.results`.metadata response from the metadata API request.
      status_ok (str):
        Value from the metadata API response status indicating that an image is available.
    """
    metadata = self.metadata
    if not path.isdir(dir_path):
      makedirs(dir_path)
    
    # (download) Download images if status from metadata is ok
    for i, url in enumerate(self.links):
      if metadata[i][metadata_status] == status_ok:
        file_path = path.join(dir_path, 'gsv_' + str(i) + '.jpg')
        metadata[i]['_file'] = path.basename(file_path) # add file reference
        helpers.download(url, file_path)
    
    # (metadata) Save metadata with file reference
    metadata_path = path.join(dir_path, metadata_file)
    with open(metadata_path, 'w') as out_file:
      json.dump(metadata, out_file) 
Example #27
Source File: train.py    From neural-pipeline with MIT License 5 votes vote down vote up
def _save_state(self, ckpts_manager: CheckpointsManager, best_ckpts_manager: CheckpointsManager or None,
                    cur_best_state: float or None, epoch_idx: int) -> float or None:
        """
        Internal method used for save states after epoch end

        :param ckpts_manager: ordinal checkpoints manager
        :param best_ckpts_manager: checkpoints manager, used for store best stages
        :param cur_best_state: current best stage metric value
        :return: new best stage metric value or None if it not update
        """
        def save_trainer(ckp_manager):
            with open(ckp_manager.trainer_file(), 'w') as out:
                json.dump({'last_epoch': epoch_idx}, out)

        if self._best_state_rule is not None:
            new_best_state = self._best_state_rule()
            if cur_best_state is None:
                self._data_processor.save_state()
                save_trainer(ckpts_manager)
                ckpts_manager.pack()
                return new_best_state
            else:
                if new_best_state <= cur_best_state:
                    self._data_processor.set_checkpoints_manager(best_ckpts_manager)
                    self._data_processor.save_state()
                    save_trainer(best_ckpts_manager)
                    best_ckpts_manager.pack()
                    self._data_processor.set_checkpoints_manager(ckpts_manager)
                    return new_best_state

        self._data_processor.save_state()
        save_trainer(ckpts_manager)
        ckpts_manager.pack()
        return None 
Example #28
Source File: monitoring.py    From neural-pipeline with MIT License 5 votes vote down vote up
def _flush_metrics(self) -> None:
        """
        Flush metrics files
        """
        with open(self._get_file_name(True), 'w') as out:
            json.dump(self._storage, out)

        if self._final_metrics_file is not None:
            res = dict_recursive_bypass(self._storage, lambda v: v[-1])
            with open(self._final_metrics_file, 'w') as out:
                json.dump(res, out) 
Example #29
Source File: conftest.py    From NiBetaSeries with MIT License 5 votes vote down vote up
def sub_rest_metadata(bids_dir, bids_json_fname=bids_rest_json_fname):
    sub_json = bids_dir.ensure(bids_rest_json_fname)
    tr = 2
    bold_metadata = {"RepetitionTime": tr, "TaskName": "rest"}

    with open(str(sub_json), 'w') as md:
        json.dump(bold_metadata, md)

    return sub_json 
Example #30
Source File: conftest.py    From NiBetaSeries with MIT License 5 votes vote down vote up
def sub_top_metadata(bids_dir, bids_json_fname='task-waffles_bold.json'):
    sub_json = bids_dir.ensure(bids_json_fname)
    tr = 2
    bold_metadata = {"RepetitionTime": tr, "TaskName": "waffles"}

    with open(str(sub_json), 'w') as md:
        json.dump(bold_metadata, md)

    return sub_json