Python json.dump() Examples

The following are code examples for showing how to use json.dump(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: navitia_client   Author: leonardbinet   File: parser.py    MIT License 6 votes vote down vote up
def write_all(self, directory):
        # Get results
        unnested = pd.DataFrame(self.unnested_items)  # df
        nested = self.nested_items  # dict
        # Write item csv
        unnested.to_csv(os.path.join(directory, self.item_name + ".csv"))
        # Write item json
        with open(os.path.join(directory, self.item_name + ".json"), 'w') as f:
            json.dump(nested, f, ensure_ascii=False)
        # Write links (of first page)
        with open(os.path.join(directory, "links.json"), 'w') as f:
            json.dump(self.links, f, ensure_ascii=False)
        # Write disruptions (if item different)
        if self.item_name != "disruptions":
            unnested_dis = pd.DataFrame(self.disruptions)  # df
            unnested_dis.to_csv(os.path.join(directory, "disruptions.csv"))
        # Write logs
        with open(os.path.join(directory, "parse_log.json"), 'w') as f:
            json.dump(self.log, f, ensure_ascii=False) 
Example 2
Project: fs_image   Author: facebookincubator   File: layer_mount_config.py    MIT License 6 votes vote down vote up
def main(stdin, stdout, layer_target):
    mount_config = json.load(stdin)

    for key in ('build_source', 'is_directory'):
        if key in mount_config:
            raise RuntimeError(
                f'`{key}` must not be set in `mount_config = {mount_config}`'
            )

    mount_config['is_directory'] = True
    mount_config['build_source'] = {
        # Don't attempt to target-tag this because this would complicate
        # MountItem, which would have to contain `Subvol` and know how to
        # serialize it (P106589820).  This is much messier than the current
        # approach of explicit target & layer lookups in `_BuildSource`.
        'source': layer_target,
        # The compiler knows how to resolve the above target to a layer path.
        # For now, we don't support mounting a subdirectory of a layer because
        # that might make packaging more complicated, but it could be done.
        'type': 'layer',
    }

    json.dump(mount_config, stdout) 
Example 3
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example 4
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 6 votes vote down vote up
def cache_data(self, name, data):
        """Save ``data`` to cache under ``name``.

        If ``data`` is ``None``, the corresponding cache file will be
        deleted.

        :param name: name of datastore
        :param data: data to store. This may be any object supported by
                the cache serializer

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))

        if data is None:
            if os.path.exists(cache_path):
                os.unlink(cache_path)
                self.logger.debug('deleted cache file: %s', cache_path)
            return

        with atomic_writer(cache_path, 'wb') as file_obj:
            serializer.dump(data, file_obj)

        self.logger.debug('cached data: %s', cache_path) 
Example 5
Project: python-samples   Author: dek-odoo   File: WerkZeug.py    Apache License 2.0 6 votes vote down vote up
def removeIdFromFile(self, sid):
        Debugg.info("Removing SID : " + str(sid))
        jsondata = App.getJsonFromFile(App.jsonFile)
        i = 0
        try:
            with open(App.jsonFile, 'w') as outfile:
                outfile.write('[')
                for student in jsondata:  # use yield
                    Debugg.info("STR(I) " + str(i))
                    if sid != i:
                        json.dump(jsondata[i], outfile)
                        try:
                            if jsondata[i + 1]:
                                outfile.write(',')
                        except IndexError, ie:
                            Debugg.info('IndexOutOfBounds %s', str(ie))
                    else:
                        Debugg.info(type(student))
                        Debugg.info(student)
                    i += 1
                outfile.write(']')
            self.sid -= 1 
Example 6
Project: python-samples   Author: dek-odoo   File: WerkZeug.py    Apache License 2.0 6 votes vote down vote up
def removeCurIdFromFile(self):
        # acquire lock
        # Debugg.info(jsondata)
        # Debugg.info("i is %d" % i)
        Debugg.info(self.sid)
        jsondata = App.getJsonFromFile(App.jsonFile)
        i = 0
        try:
            with open(App.jsonFile, 'w') as outfile:
                outfile.write('[')
            for student in jsondata:  # use yield
                if self.sid != i:
                    json.dump(jsondata[i], outfile)
                    try:
                        if jsondata[i + 1]:
                            outfile.write(',')
                    except IndexError, ie:
                        Debugg.info('IndexOutOfBounds %s', str(ie))
                else:
                    Debugg.info(type(student))
                    Debugg.info(student)
                i += 1
            outfile.write(']') 
Example 7
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example 8
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 6 votes vote down vote up
def cache_data(self, name, data):
        """Save ``data`` to cache under ``name``.

        If ``data`` is ``None``, the corresponding cache file will be
        deleted.

        :param name: name of datastore
        :param data: data to store. This may be any object supported by
                the cache serializer

        """
        serializer = manager.serializer(self.cache_serializer)

        cache_path = self.cachefile('%s.%s' % (name, self.cache_serializer))

        if data is None:
            if os.path.exists(cache_path):
                os.unlink(cache_path)
                self.logger.debug('deleted cache file: %s', cache_path)
            return

        with atomic_writer(cache_path, 'wb') as file_obj:
            serializer.dump(data, file_obj)

        self.logger.debug('cached data: %s', cache_path) 
Example 9
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example 10
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 6 votes vote down vote up
def _write_coco_results_file(self, all_boxes, res_file):
    # [{"image_id": 42,
    #   "category_id": 18,
    #   "bbox": [258.15,41.29,348.26,243.78],
    #   "score": 0.236}, ...]
    results = []
    for cls_ind, cls in enumerate(self.classes):
      if cls == '__background__':
        continue
      print('Collecting {} results ({:d}/{:d})'.format(cls, cls_ind,
                                                       self.num_classes - 1))
      coco_cat_id = self._class_to_coco_cat_id[cls]
      results.extend(self._coco_results_one_category(all_boxes[cls_ind],
                                                     coco_cat_id))
    print('Writing results json to {}'.format(res_file))
    with open(res_file, 'w') as fid:
      json.dump(results, fid) 
Example 11
Project: comet-commonsense   Author: atcbosselut   File: data.py    Apache License 2.0 6 votes vote down vote up
def save_eval_file(opt, stats, eval_type="losses", split="dev", ext="pickle"):
    if cfg.test_save:
        name = "{}/{}.{}".format(utils.make_name(
            opt, prefix="garbage/{}/".format(eval_type),
            is_dir=True, eval_=True), split, ext)
    else:
        name = "{}/{}.{}".format(utils.make_name(
            opt, prefix="results/{}/".format(eval_type),
            is_dir=True, eval_=True), split, ext)
    print("Saving {} {} to {}".format(split, eval_type, name))

    if ext == "pickle":
        with open(name, "wb") as f:
            pickle.dump(stats, f)
    elif ext == "txt":
        with open(name, "w") as f:
            f.write(stats)
    elif ext == "json":
        with open(name, "w") as f:
            json.dump(stats, f)
    else:
        raise 
Example 12
Project: Traffic_sign_detection_YOLO   Author: AmeyaWagh   File: build.py    MIT License 6 votes vote down vote up
def savepb(self):
		"""
		Create a standalone const graph def that 
		C++	can load and run.
		"""
		darknet_pb = self.to_darknet()
		flags_pb = self.FLAGS
		flags_pb.verbalise = False
		
		flags_pb.train = False
		# rebuild another tfnet. all const.
		tfnet_pb = TFNet(flags_pb, darknet_pb)		
		tfnet_pb.sess = tf.Session(graph = tfnet_pb.graph)
		# tfnet_pb.predict() # uncomment for unit testing
		name = 'built_graph/{}.pb'.format(self.meta['name'])
		os.makedirs(os.path.dirname(name), exist_ok=True)
		#Save dump of everything in meta
		with open('built_graph/{}.meta'.format(self.meta['name']), 'w') as fp:
			json.dump(self.meta, fp)
		self.say('Saving const graph def to {}'.format(name))
		graph_def = tfnet_pb.sess.graph_def
		tf.train.write_graph(graph_def,'./', name, False) 
Example 13
Project: pytuber   Author: tefra   File: test_storage.py    MIT License 6 votes vote down vote up
def test_from_file(self):
        try:
            tmp = tempfile.mkdtemp()
            file_path = os.path.join(tmp, "foo.json")
            with open(file_path, "w") as fp:
                json.dump(dict(a=True), fp)

            Registry.from_file(file_path)

            self.assertEqual(dict(a=True), Registry())

            Registry.set("a", False)

            self.assertFalse(Registry.get("a"))

            Registry.from_file(file_path)
            self.assertFalse(Registry.get("a"))

        finally:
            shutil.rmtree(tmp) 
Example 14
Project: FasterRCNN_TF_Py3   Author: upojzsb   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
        """
        Return the database of ground-truth regions of interest.
        This function loads/saves from/to a cache file to speed up future calls.
        """
        cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if osp.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = pickle.load(fid)
            print('{} gt roidb loaded from {}'.format(self.name, cache_file))
            return roidb

        gt_roidb = [self._load_coco_annotation(index)
                    for index in self._image_index]

        with open(cache_file, 'wb') as fid:
            pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
        print('wrote gt roidb to {}'.format(cache_file))
        return gt_roidb 
Example 15
Project: FasterRCNN_TF_Py3   Author: upojzsb   File: coco.py    MIT License 6 votes vote down vote up
def _write_coco_results_file(self, all_boxes, res_file):
        # [{"image_id": 42,
        #   "category_id": 18,
        #   "bbox": [258.15,41.29,348.26,243.78],
        #   "score": 0.236}, ...]
        results = []
        for cls_ind, cls in enumerate(self.classes):
            if cls == '__background__':
                continue
            print('Collecting {} results ({:d}/{:d})'.format(cls, cls_ind,
                                                             self.num_classes - 1))
            coco_cat_id = self._class_to_coco_cat_id[cls]
            results.extend(self._coco_results_one_category(all_boxes[cls_ind],
                                                           coco_cat_id))
        print('Writing results json to {}'.format(res_file))
        with open(res_file, 'w') as fid:
            json.dump(results, fid) 
Example 16
Project: VisualNN   Author: angelhunt   File: test_views.py    GNU General Public License v3.0 6 votes vote down vote up
def test_keras_import(self):
        model = Sequential()
        model.add(LSTM(64, return_sequences=True, input_shape=(10, 64)))
        model.add(SimpleRNN(32, return_sequences=True))
        model.add(GRU(10, kernel_regularizer=regularizers.l2(0.01),
                      bias_regularizer=regularizers.l2(0.01), recurrent_regularizer=regularizers.l2(0.01),
                      activity_regularizer=regularizers.l2(0.01), kernel_constraint='max_norm',
                      bias_constraint='max_norm', recurrent_constraint='max_norm'))
        model.build()
        json_string = Model.to_json(model)
        with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
            json.dump(json.loads(json_string), out, indent=4)
        sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
        response = self.client.post(reverse('keras-import'), {'file': sample_file})
        response = json.loads(response.content)
        layerId = sorted(response['net'].keys())
        self.assertEqual(response['result'], 'success')
        self.assertGreaterEqual(len(response['net'][layerId[1]]['params']), 7)
        self.assertGreaterEqual(len(response['net'][layerId[3]]['params']), 7)
        self.assertGreaterEqual(len(response['net'][layerId[6]]['params']), 7)


# ********** Embedding Layers ********** 
Example 17
Project: VisualNN   Author: angelhunt   File: test_views.py    GNU General Public License v3.0 6 votes vote down vote up
def test_keras_import(self):
        model = Sequential()
        model.add(BatchNormalization(center=True, scale=True, beta_regularizer=regularizers.l2(0.01),
                                     gamma_regularizer=regularizers.l2(0.01),
                                     beta_constraint='max_norm', gamma_constraint='max_norm',
                                     input_shape=(16, 10)))
        model.build()
        json_string = Model.to_json(model)
        with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
            json.dump(json.loads(json_string), out, indent=4)
        sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
        response = self.client.post(reverse('keras-import'), {'file': sample_file})
        response = json.loads(response.content)
        layerId = sorted(response['net'].keys())
        self.assertEqual(response['result'], 'success')
        self.assertEqual(response['net'][layerId[0]]['info']['type'], 'Scale')
        self.assertEqual(response['net'][layerId[1]]['info']['type'], 'BatchNorm')


# ********** Noise Layers ********** 
Example 18
Project: models   Author: kipoi   File: prepare_model_yaml.py    MIT License 6 votes vote down vote up
def make_model_yaml(template_yaml, model_json, output_yaml_path):
    #
    with open(template_yaml, 'r') as f:
        model_yaml = yaml.load(f)
    #
    # get the model config:
    json_file = open(model_json, 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = keras.models.model_from_json(loaded_model_json)
    #
    model_yaml["schema"]["targets"] = []
    for oname, oshape in zip(loaded_model.output_names, loaded_model.output_shape):
        append_el ={"name":oname , "shape":str(oshape)#replace("None,", "")
        , "doc":"Methylation probability for %s"%oname}
        model_yaml["schema"]["targets"].append(append_el)
    #
    with open(output_yaml_path, 'w') as f:
        yaml.dump(model_yaml, f, default_flow_style=False) 
Example 19
Project: models   Author: kipoi   File: prepare_model_yaml.py    MIT License 6 votes vote down vote up
def make_secondary_dl_yaml(template_yaml, model_json, output_yaml_path):
    with open(template_yaml, 'r') as f:
        model_yaml = yaml.load(f)
    #
    # get the model config:
    json_file = open(model_json, 'r')
    loaded_model_json = json_file.read()
    json_file.close()
    loaded_model = keras.models.model_from_json(loaded_model_json)
    #
    model_yaml["output_schema"]["targets"] = []
    for oname, oshape in zip(loaded_model.output_names, loaded_model.output_shape):
        append_el ={"name":oname , "shape":str(oshape)#replace("None,", "")
        , "doc":"Methylation probability for %s"%oname}
        model_yaml["output_schema"]["targets"].append(append_el)
    #
    with open(output_yaml_path, 'w') as f:
        yaml.dump(model_yaml, f, default_flow_style=False) 
Example 20
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: metadata.py    MIT License 6 votes vote down vote up
def write(self, path=None, fileobj=None, legacy=False, skip_unknown=True):
        if [path, fileobj].count(None) != 1:
            raise ValueError('Exactly one of path and fileobj is needed')
        self.validate()
        if legacy:
            if self._legacy:
                legacy_md = self._legacy
            else:
                legacy_md = self._to_legacy()
            if path:
                legacy_md.write(path, skip_unknown=skip_unknown)
            else:
                legacy_md.write_file(fileobj, skip_unknown=skip_unknown)
        else:
            if self._legacy:
                d = self._from_legacy()
            else:
                d = self._data
            if fileobj:
                json.dump(d, fileobj, ensure_ascii=True, indent=2,
                          sort_keys=True)
            else:
                with codecs.open(path, 'w', 'utf-8') as f:
                    json.dump(d, f, ensure_ascii=True, indent=2,
                              sort_keys=True) 
Example 21
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: outdated.py    MIT License 6 votes vote down vote up
def save(self, pypi_version, current_time):
        # Check to make sure that we own the directory
        if not check_path_owner(os.path.dirname(self.statefile_path)):
            return

        # Now that we've ensured the directory is owned by this user, we'll go
        # ahead and make sure that all our directories are created.
        ensure_dir(os.path.dirname(self.statefile_path))

        # Attempt to write out our version check file
        with lockfile.LockFile(self.statefile_path):
            if os.path.exists(self.statefile_path):
                with open(self.statefile_path) as statefile:
                    state = json.load(statefile)
            else:
                state = {}

            state[sys.prefix] = {
                "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
                "pypi_version": pypi_version,
            }

            with open(self.statefile_path, "w") as statefile:
                json.dump(state, statefile, sort_keys=True,
                          separators=(",", ":")) 
Example 22
Project: factotum   Author: Denubis   File: newmap.py    GNU General Public License v3.0 5 votes vote down vote up
def newFactorioMap():
	FACTORIOPATH = getFactorioPath()

	mapFileExamplePath="%s/data/map-gen-settings.example.json" % (FACTORIOPATH)
	mapFilePath="%s/config/mapsettings.json" % (FACTORIOPATH)

	if not os.path.isfile(mapFilePath):		
		with codecs.open(mapFileExamplePath, 'r', encoding='utf-8') as map_file:
			mapJson = json.load(map_file)
	
			mapJson['starting_area'] = "very-high"

			for control in mapJson['autoplace_controls']:
				mapJson['autoplace_controls'][control]['size'] = "high"
				mapJson['autoplace_controls'][control]['richness'] = "very-high"
				mapJson['autoplace_controls'][control]['frequency'] = "low"

		with codecs.open(mapFilePath, 'w', encoding='utf-8') as map_file:
			json.dump(mapJson, map_file, indent=4)


	print(subprocess.check_output(
					["%s/bin/x64/factorio" % (FACTORIOPATH), 
					 "--create", "%s/saves/%s" % (FACTORIOPATH, 'Headless-{:%Y%m%d-%H%M%S}'.format(datetime.datetime.now())),
					 "--map-gen-settings", "%s/config/mapsettings.json" % (FACTORIOPATH)	]
					 ).decode("unicode_escape")) 
Example 23
Project: factotum   Author: Denubis   File: settings.py    GNU General Public License v3.0 5 votes vote down vote up
def configAuthenticate(username, password):
	FACTORIOPATH = getFactorioPath()

	url = "https://auth.factorio.com/api-login"
	params = {'username': username, 'password': password, 'apiVersion': 2}


	if not os.path.isfile("%s/bin/x64/factorio" % (FACTORIOPATH) ):
		print("Could not find factorio at %s" % (FACTORIOPATH))
		sys.exit(1)


	print("Fetching token for %s" %  (username))
	myResponse = requests.post(url,data=params, verify=True)
	if(myResponse.ok):

	    jData = json.loads(myResponse.text)
	    print("Writing %s to settings.json" % (jData[0]))
	    
	else:
	  # If response code is not ok (200), print the resulting http error code with description
	    myResponse.raise_for_status()
	    sys.exit(1)
	

	try:
		with codecs.open(getSettingsFile(), 'r', encoding='utf-8') as settings_file:
			settingsJson = json.load(settings_file)
			settingsJson['token'] = jData[0]
			settingsJson['username'] = username
				


		with codecs.open("%s/config/settings.json" % (FACTORIOPATH), 'w', encoding='utf-8') as settings_file:
			json.dump(settingsJson, settings_file, indent=4)
	except Exception as e:
		print(e)
		print("Help! Can't deal with the settings file!") 
Example 24
Project: fs_image   Author: facebookincubator   File: test_update_package_db.py    MIT License 5 votes vote down vote up
def _write_bzl_db(self, db_path, dct):
        with open(db_path, 'w') as outfile:
            # Not using `_with_generated_header` to ensure that we are
            # resilient to changes in the header.
            outfile.write(f'# A {_GENERATED} file\n# second header line\n')
            outfile.write(updb._BZL_DB_PREFIX)
            json.dump(dct, outfile)
        # Make sure our write implementation is sane.
        self.assertEqual(dct, updb._read_bzl_db(db_path)) 
Example 25
Project: fs_image   Author: facebookincubator   File: test_update_package_db.py    MIT License 5 votes vote down vote up
def test_json_db(self):
        with temp_dir() as td:
            os.makedirs(td / 'idb/pkg')
            with open(td / 'idb/pkg/tag.json', 'w') as outfile:
                # Not using `_with_generated_header` to ensure that we are
                # resilient to changes in the header.
                outfile.write(f'# A {_GENERATED} file\n# 2nd header line\n')
                json.dump({'foo': 'bar'}, outfile)
            self.assertEqual(
                {'pkg': {'tag': {'foo': 'bar'}}},
                updb._read_json_dir_db(td / 'idb'),
            )
            self._main([
                '--db', (td / 'idb').decode(),
                '--out-db', (td / 'odb').decode(),
            ])
            self.assertEqual([b'pkg'], os.listdir(td / 'odb'))
            self.assertEqual([b'tag.json'], os.listdir(td / 'odb/pkg'))
            self._check_file(
                td / 'odb/pkg/tag.json',
                '# ' + _GENERATED + textwrap.dedent(''' \
                SignedSource<<e8b8ab0d998b5fe5429777af98579c12>>
                # Update via `how`
                {
                    "x": "z"
                }
                ''')) 
Example 26
Project: fs_image   Author: facebookincubator   File: test_mount.py    MIT License 5 votes vote down vote up
def test_mount_item_default_mountpoint(self):
        with tempfile.TemporaryDirectory() as mnt_target:
            mount_config = {
                'is_directory': True,
                'build_source': {'type': 'layer', 'source': '//fake:path'},
            }
            with open(os.path.join(mnt_target, 'mountconfig.json'), 'w') as f:
                json.dump(mount_config, f)
            # Since our initial mountconfig lacks `default_mountpoint`, the
            # item requires its `mountpoint` to be set.
            with self.assertRaisesRegex(AssertionError, 'lacks mountpoint'):
                MountItem(
                    from_target='t',
                    mountpoint=None,
                    target=mnt_target,
                    mount_config=None,
                )

            # Now, check that the default gets used.
            mount_config['default_mountpoint'] = 'potato'
            with open(os.path.join(mnt_target, 'mountconfig.json'), 'w') as f:
                json.dump(mount_config, f)
            self.assertEqual(self._make_mount_item(
                mountpoint=None,
                target=mnt_target,
                mount_config=mount_config,
            ).mountpoint, 'potato') 
Example 27
Project: odorik   Author: nijel   File: main.py    GNU General Public License v3.0 5 votes vote down vote up
def print_json(self, value):
        """JSON print."""
        json.dump(value, self.stdout, indent=2) 
Example 28
Project: leapp-repository   Author: oamg   File: dnfplugin.py    Apache License 2.0 5 votes vote down vote up
def create_config(context, target_repoids, debug, test, tasks):
    """
    Creates the configuration data file for our DNF plugin.
    """
    context.makedirs(os.path.dirname(DNF_PLUGIN_DATA_PATH), exists_ok=True)
    with context.open(DNF_PLUGIN_DATA_PATH, 'w+') as f:
        config_data = build_plugin_data(target_repoids=target_repoids, debug=debug, test=test, tasks=tasks)
        json.dump(config_data, f, sort_keys=True, indent=2) 
Example 29
Project: PEAKachu   Author: tbischler   File: window.py    ISC License 5 votes vote down vote up
def _write_parameters(self):
        parameter_dict = {"max_insert_size": self._max_insert_size,
                          "paired_end": self._paired_end,
                          "libraries": {}}
        for lib, size_factor in zip(
                self._lib_dict.values(), self._size_factors):
            parameter_dict["libraries"][lib.lib_name] = {
                "bam_file": lib.bam_file,
                "size_factor": size_factor}
        with open("{}/parameters.json".format(self._output_folder),
                  'w') as parameter_fh:
            json.dump(parameter_dict, parameter_fh, indent=4, sort_keys=True) 
Example 30
Project: PEAKachu   Author: tbischler   File: adaptive.py    ISC License 5 votes vote down vote up
def _write_parameters(self):
        parameter_dict = {"max_insert_size": self._max_insert_size,
                          "paired_end": self._paired_end,
                          "libraries": {}}
        for lib, size_factor in zip(
                self._lib_dict.values(), self._size_factors):
            parameter_dict["libraries"][lib.lib_name] = {
                "bam_file": lib.bam_file,
                "size_factor": size_factor}
        with open("{}/parameters.json".format(self._output_folder),
                  'w') as parameter_fh:
            json.dump(parameter_dict, parameter_fh, indent=4, sort_keys=True) 
Example 31
Project: kicker-module   Author: EvanTheB   File: backend.py    GNU General Public License v3.0 5 votes vote down vote up
def init_data_file(filename):
    to_save = {}
    to_save['events'] = []
    with open(filename, 'w') as fd:
        json.dump(
            to_save,
            fd,
            sort_keys=True,
            indent=4,
            separators=(',', ': ')) 
Example 32
Project: kicker-module   Author: EvanTheB   File: backend.py    GNU General Public License v3.0 5 votes vote down vote up
def _save_to_log(self, log_fp):
        to_save = {}
        to_save['events'] = []
        for e in self.events:
            to_save['events'].append(e.to_json())
        json.dump(
            to_save,
            log_fp,
            sort_keys=True,
            indent=4,
            separators=(',', ': ')) 
Example 33
Project: mlbv   Author: kmac   File: session.py    GNU General Public License v3.0 5 votes vote down vote up
def save(self):
        with open(SESSION_FILE, 'w') as outfile:
            json.dump(self._state, outfile)
        self.session.cookies.save(COOKIE_FILE) 
Example 34
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow3.py    MIT License 5 votes vote down vote up
def send_feedback(self):
        """Print stored items to console/Alfred as JSON."""
        json.dump(self.obj, sys.stdout)
        sys.stdout.flush() 
Example 35
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open JSON file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: JSON-serializable data structure
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return json.dump(obj, file_obj, indent=2, encoding='utf-8') 
Example 36
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return cPickle.dump(obj, file_obj, protocol=-1) 
Example 37
Project: alfred-yubikey-otp   Author: robertoriv   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return pickle.dump(obj, file_obj, protocol=-1)


# Set up default manager and register built-in serializers 
Example 38
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow3.py    MIT License 5 votes vote down vote up
def send_feedback(self):
        """Print stored items to console/Alfred as JSON."""
        json.dump(self.obj, sys.stdout)
        sys.stdout.flush() 
Example 39
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open JSON file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: JSON-serializable data structure
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return json.dump(obj, file_obj, indent=2, encoding='utf-8') 
Example 40
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return cPickle.dump(obj, file_obj, protocol=-1) 
Example 41
Project: wechat-alfred-workflow   Author: TKkk-iOSer   File: workflow.py    MIT License 5 votes vote down vote up
def dump(cls, obj, file_obj):
        """Serialize object ``obj`` to open pickle file.

        .. versionadded:: 1.8

        :param obj: Python object to serialize
        :type obj: Python object
        :param file_obj: file handle
        :type file_obj: ``file`` object

        """
        return pickle.dump(obj, file_obj, protocol=-1)


# Set up default manager and register built-in serializers 
Example 42
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 5 votes vote down vote up
def _do_detection_eval(self, res_file, output_dir):
    ann_type = 'bbox'
    coco_dt = self._COCO.loadRes(res_file)
    coco_eval = COCOeval(self._COCO, coco_dt)
    coco_eval.params.useSegm = (ann_type == 'segm')
    coco_eval.evaluate()
    coco_eval.accumulate()
    self._print_detection_eval_metrics(coco_eval)
    eval_file = osp.join(output_dir, 'detection_results.pkl')
    with open(eval_file, 'wb') as fid:
      pickle.dump(coco_eval, fid, pickle.HIGHEST_PROTOCOL)
    print('Wrote COCO eval results to: {}'.format(eval_file)) 
Example 43
Project: comet-commonsense   Author: atcbosselut   File: utils.py    Apache License 2.0 5 votes vote down vote up
def generate_config_files(type_, key, name="base", eval_mode=False):
    with open("config/default.json".format(type_), "r") as f:
        base_config = json.load(f)
    with open("config/{}/default.json".format(type_), "r") as f:
        base_config_2 = json.load(f)
    if eval_mode:
        with open("config/{}/eval_changes.json".format(type_), "r") as f:
            changes_by_machine = json.load(f)
    else:
        with open("config/{}/changes.json".format(type_), "r") as f:
            changes_by_machine = json.load(f)

    base_config.update(base_config_2)

    if name in changes_by_machine:
        changes = changes_by_machine[name]
    else:
        changes = changes_by_machine["base"]

    # for param in changes[key]:
    #     base_config[param] = changes[key][param]

    replace_params(base_config, changes[key])

    mkpath("config/{}".format(type_))

    with open("config/{}/config_{}.json".format(type_, key), "w") as f:
        json.dump(base_config, f, indent=4) 
Example 44
Project: twitter-export-image-fill   Author: mwichary   File: twitter-export-image-fill.py    The Unlicense 5 votes vote down vote up
def resave_data(data, data_filename, first_data_line, year_str, month_str):
  # Writing to a separate file so that we can only copy over the
  # main file when done
  data_filename_temp = 'data/js/tweets/%s_%s.js.tmp' % (year_str, month_str)
  with open(data_filename_temp, 'w') as f:
    f.write(first_data_line)
    json.dump(data, f, indent=2)
  os.remove(data_filename)
  os.rename(data_filename_temp, data_filename)


# Download a given image directly from the URL 
Example 45
Project: mietmap-scraper   Author: CodeforKarlsruhe   File: scrape.py    MIT License 5 votes vote down vote up
def memoize_persistently(filename):
    """
    Persistently memoize a function's return values.

    This decorator memoizes a function's return values persistently
    over multiple runs of the program. The return values are stored
    in the given file using ``pickle``. If the decorated function is
    called again with arguments that it has already been called with
    then the return value is retrieved from the cache and returned
    without calling the function. If the function is called with
    previously unseen arguments then its return value is added to the
    cache and the cache file is updated.

    Both return values and arguments of the function must support the
    pickle protocol. The arguments must also be usable as dictionary
    keys.
    """
    try:
        with open(filename, 'rb') as cache_file:
            cache = pickle.load(cache_file)
    except IOError as e:
        if e.errno != errno.ENOENT:
            raise
        cache = {}

    def decorator(f):

        @functools.wraps(f)
        def wrapper(*args, **kwargs):
            key = args + tuple(sorted(kwargs.items()))
            try:
                return cache[key]
            except KeyError:
                value = cache[key] = f(*args, **kwargs)
                with open(filename, 'wb') as cache_file:
                    pickle.dump(cache, cache_file)
                return value

        return wrapper
    return decorator 
Example 46
Project: mietmap-scraper   Author: CodeforKarlsruhe   File: scrape.py    MIT License 5 votes vote down vote up
def dump_json(data, filename):
    """
    Dump data as JSON to file.
    """
    with codecs.open(filename, 'w', encoding='utf8') as f:
        json.dump(data, f, separators=(',', ':')) 
Example 47
Project: pytuber   Author: tefra   File: storage.py    MIT License 5 votes vote down vote up
def persist(cls, path):
        with suppress(FileNotFoundError):
            with open(path, "w") as fp:
                json.dump(cls(), fp) 
Example 48
Project: FasterRCNN_TF_Py3   Author: upojzsb   File: coco.py    MIT License 5 votes vote down vote up
def _do_detection_eval(self, res_file, output_dir):
        ann_type = 'bbox'
        coco_dt = self._COCO.loadRes(res_file)
        coco_eval = COCOeval(self._COCO, coco_dt)
        coco_eval.params.useSegm = (ann_type == 'segm')
        coco_eval.evaluate()
        coco_eval.accumulate()
        self._print_detection_eval_metrics(coco_eval)
        eval_file = osp.join(output_dir, 'detection_results.pkl')
        with open(eval_file, 'wb') as fid:
            pickle.dump(coco_eval, fid, pickle.HIGHEST_PROTOCOL)
        print('Wrote COCO eval results to: {}'.format(eval_file)) 
Example 49
Project: yts_torrents   Author: makkoncept   File: yts_am_api.py    MIT License 5 votes vote down vote up
def create_json_file(json_file_number):
    name = 'torrents' + str(json_file_number) + '.json'
    with open(name, 'w') as f:
        content = {}
        json.dump(content, f) 
Example 50
Project: yts_torrents   Author: makkoncept   File: yts_am_api.py    MIT License 5 votes vote down vote up
def add_torrent_to_json_file(json_file_number):
    name = 'torrents' + str(json_file_number) + '.json'
    with open(name, 'r') as f:
        content = json.load(f)
    content[title_long] = {'720_bluray': torrent_720_bluray, '1080_bluray': torrent_1080_bluray,
                           '720_web': torrent_720_web, '1080_web': torrent_1080_web}
    with open(name, 'w') as f:
        json.dump(content, f) 
Example 51
Project: flappybird-qlearning-bot   Author: chncyhn   File: bot.py    MIT License 5 votes vote down vote up
def __init__(self):
        self.gameCNT = 0  # Game count of current run, incremented after every death
        self.DUMPING_N = 25  # Number of iterations to dump Q values to JSON after
        self.discount = 1.0
        self.r = {0: 1, 1: -1000}  # Reward function
        self.lr = 0.7
        self.load_qvalues()
        self.last_state = "420_240_0"
        self.last_action = 0
        self.moves = [] 
Example 52
Project: flappybird-qlearning-bot   Author: chncyhn   File: bot.py    MIT License 5 votes vote down vote up
def dump_qvalues(self, force=False):
        """
        Dump the qvalues to the JSON file
        """
        if self.gameCNT % self.DUMPING_N == 0 or force:
            fil = open("data/qvalues.json", "w")
            json.dump(self.qvalues, fil)
            fil.close()
            print("Q-values updated on local file.") 
Example 53
Project: CSL_Hamburg_Noise   Author: CityScope   File: parse_city_scope_table.py    GNU General Public License v3.0 5 votes vote down vote up
def save_buildings_from_city_scope(city_scope_address):
    config = get_config()

    # if the table origin is flipped to teh southeast, instead of regular northwest
    table_flipped = config['CITY_SCOPE'].getboolean('TABLE_FLIPPED')

    # dynamic input data from designer
    table = CityScopeTable.CityScopeTable(city_scope_address, table_flipped)
    grid_of_cells = create_grid_of_cells(table)
    geo_json = create_buildings_json(table, grid_of_cells)
    geo_json_merged = merge_adjacent_buildings(geo_json)

    # save geojson
    with open(config['NOISE_SETTINGS']['INPUT_JSON_BUILDINGS'], 'wb') as f:
        json.dump(geo_json_merged, f) 
Example 54
Project: VisualNN   Author: angelhunt   File: test_views.py    GNU General Public License v3.0 5 votes vote down vote up
def test_keras_export(self):
        # Test 1
        img_input = Input((224, 224, 3))
        model = Conv2D(64, (3, 3), padding='same', dilation_rate=1, use_bias=True,
                       kernel_regularizer=regularizers.l1(), bias_regularizer='l1',
                       activity_regularizer='l1', kernel_constraint='max_norm',
                       bias_constraint='max_norm')(img_input)
        model = BatchNormalization(center=True, scale=True, beta_regularizer=regularizers.l2(0.01),
                                   gamma_regularizer=regularizers.l2(0.01),
                                   beta_constraint='max_norm', gamma_constraint='max_norm',)(model)
        model = Model(img_input, model)
        json_string = Model.to_json(model)
        with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
            json.dump(json.loads(json_string), out, indent=4)
        sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
        response = self.client.post(reverse('keras-import'), {'file': sample_file})
        response = json.loads(response.content)
        net = get_shapes(response['net'])
        response = self.client.post(reverse('keras-export'), {'net': json.dumps(net),
                                                              'net_name': ''})
        response = json.loads(response.content)
        self.assertEqual(response['result'], 'success')
        # Test 2
        tests = open(os.path.join(settings.BASE_DIR, 'tests', 'unit', 'ide',
                                  'caffe_export_test.json'), 'r')
        response = json.load(tests)
        tests.close()
        net = yaml.safe_load(json.dumps(response['net']))
        net = {'l0': net['HDF5Data']}
        # Currently we can't determine shape of HDF5Data Layer
        response = self.client.post(reverse('keras-export'), {'net': json.dumps(net),
                                                              'net_name': ''})
        response = json.loads(response.content)
        self.assertEqual(response['result'], 'error')


# *********** Keras Backend Test ********** 
Example 55
Project: VisualNN   Author: angelhunt   File: test_views.py    GNU General Public License v3.0 5 votes vote down vote up
def keras_type_test(self, model, id, type):
        json_string = Model.to_json(model)
        with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
            json.dump(json.loads(json_string), out, indent=4)
        sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
        response = self.client.post(reverse('keras-import'), {'file': sample_file})
        response = json.loads(response.content)
        layerId = sorted(response['net'].keys())
        self.assertEqual(response['result'], 'success')
        self.assertEqual(response['net'][layerId[id]]['info']['type'], type) 
Example 56
Project: VisualNN   Author: angelhunt   File: test_views.py    GNU General Public License v3.0 5 votes vote down vote up
def keras_param_test(self, model, id, params):
        json_string = Model.to_json(model)
        with open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'w') as out:
            json.dump(json.loads(json_string), out, indent=4)
        sample_file = open(os.path.join(settings.BASE_DIR, 'media', 'test.json'), 'r')
        response = self.client.post(reverse('keras-import'), {'file': sample_file})
        response = json.loads(response.content)
        layerId = sorted(response['net'].keys())
        self.assertEqual(response['result'], 'success')
        self.assertGreaterEqual(len(response['net'][layerId[id]]['params']), params)


# ********** Data Layers ********** 
Example 57
Project: neural-fingerprinting   Author: StephanZheng   File: loss.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def save(self, path):
        json.dump(dict(loss=self.__class__.__name__,
                       params=self.hparams),
                  open(os.path.join(path, 'loss.json'), 'wb')) 
Example 58
Project: openplotter   Author: openplotter   File: unit-private.py    GNU General Public License v2.0 5 votes vote down vote up
def on_change_selected(self, e):
		orig_unit = self.select_Unit.GetValue().split(' ')
		if len(orig_unit) == 1:
			orig_unit = [orig_unit[0], '']
		if orig_unit[0] != '':
			list_select = []
			item = self.list.GetFirstSelected()
			while item != -1:
				# do something with the item
				self.list_SK[item][2] = orig_unit[1]

				list_select.append(self.get_by_index(item))
				item = self.list.GetNextSelected(item)

			self.data_SK_unit_private = []
			for i in self.list_SK:
				if i[2] != '':
					self.data_SK_unit_private.append([i[0], i[1], i[2]])
			with open(self.home+'/.openplotter/private_unit.json', 'w') as data_file:
				json.dump(self.data_SK_unit_private, data_file)

			self.data_SK_unit_private = []
			with open(self.home+'/.openplotter/private_unit.json') as data_file:
				self.data_SK_unit_private = json.load(data_file)
			self.read()
			self.sorting() 
Example 59
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: outdated.py    MIT License 5 votes vote down vote up
def save(self, pypi_version, current_time):
        # Attempt to write out our version check file
        with open(self.statefile_path, "w") as statefile:
            json.dump(
                {
                    "last_check": current_time.strftime(SELFCHECK_DATE_FMT),
                    "pypi_version": pypi_version,
                },
                statefile,
                sort_keys=True,
                separators=(",", ":")
            ) 
Example 60
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: docker_login.py    MIT License 5 votes vote down vote up
def write_config(self, path, config):
        try:
            json.dump(config, open(path, "w"), indent=5, sort_keys=True)
        except Exception as exc:
            self.fail("Error: failed to write config to %s - %s" % (path, str(exc))) 
Example 61
Project: microservice-junos-deployer   Author: network-automate   File: _junos_get_facts.py    GNU General Public License v3.0 5 votes vote down vote up
def save_facts(junos_module, facts):
    """If the savedir option was specified, save the facts into a JSON file.

    If the savedir option was specified, save the facts into a JSON file named
    savedir/hostname-facts.json. The filename begins with the value of the
    hostname fact returned from the Junos device, which might be different than
    the value of the host option passed to the module.

    Args:
        junos_module: An instance of a JuniperJunosModule.
        facts: The facts dict returned by get_facts_dict().

    Raises:
        IOError: Calls junos_module.fail_json if unable to open the facts
                 file for writing.
    """
    if junos_module.params.get('savedir') is not None:
        save_dir = junos_module.params.get('savedir')
        file_name = '%s-facts.json' % (facts['hostname'])
        file_path = os.path.normpath(os.path.join(save_dir, file_name))
        junos_module.logger.debug("Saving facts to: %s.", file_path)
        try:
            with open(file_path, 'w') as fact_file:
                json.dump(facts, fact_file)
            junos_module.logger.debug("Facts saved to: %s.", file_path)
        except IOError:
            junos_module.fail_json(msg="Unable to save facts. Failed to open "
                                       "the %s file." % (file_path)) 
Example 62
Project: microservice-junos-deployer   Author: network-automate   File: juniper_junos_facts.py    GNU General Public License v3.0 5 votes vote down vote up
def save_facts(junos_module, facts):
    """If the savedir option was specified, save the facts into a JSON file.

    If the savedir option was specified, save the facts into a JSON file named
    savedir/hostname-facts.json. The filename begins with the value of the
    hostname fact returned from the Junos device, which might be different than
    the value of the host option passed to the module.

    Args:
        junos_module: An instance of a JuniperJunosModule.
        facts: The facts dict returned by get_facts_dict().

    Raises:
        IOError: Calls junos_module.fail_json if unable to open the facts
                 file for writing.
    """
    if junos_module.params.get('savedir') is not None:
        save_dir = junos_module.params.get('savedir')
        file_name = '%s-facts.json' % (facts['hostname'])
        file_path = os.path.normpath(os.path.join(save_dir, file_name))
        junos_module.logger.debug("Saving facts to: %s.", file_path)
        try:
            with open(file_path, 'w') as fact_file:
                json.dump(facts, fact_file)
            junos_module.logger.debug("Facts saved to: %s.", file_path)
        except IOError:
            junos_module.fail_json(msg="Unable to save facts. Failed to open "
                                       "the %s file." % (file_path)) 
Example 63
Project: friendly-telegram   Author: friendly-telegram   File: local_backend.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def init(self, trigger_refresh):
        self._id = (await self._client.get_me(True)).user_id
        self._filename = "database-{}.json".format(self._id)
        try:
            self._file = open(self._filename, "r+")
        except FileNotFoundError:
            self._file = open(self._filename, "w+")
            json.dump({}, self._file) 
Example 64
Project: rcbup   Author: NGnius   File: main.py    MIT License 5 votes vote down vote up
def main(args):
    token = ''
    credentials = do_login(args)
    if credentials is None:
        return
    token = credentials['Token']
    makedirs(args.out, exist_ok=True)
    search_params = factory.make_search_body(search=args.search, player=args.player)
    robots = factory.factory_list(token, search_params)

    calls = 1
    threads = list()
    already_seen = list()
    for bot in robots:
        if bot['itemId'] not in already_seen:  # ignore duplicates returned by API
            already_seen.append(bot['itemId'])
            if calls % CALLS_BEFORE_REFRESH == 0 and args.batch:
                print('Refreshing token...')
                token = do_login(args)['Token']
            save_path = join(args.out, remove_bad_chars(bot['itemName']+'-'+str(bot['itemId']))+'.'+args.extension.lstrip('.'))
            print('Downloading %s to %s...' % (bot['itemName'], save_path))
            bot_info = factory.factory_bot(token, bot['itemId'])
            with open(save_path, 'w') as f:
                json.dump(bot_info, f, indent=4)
            if args.thumbnail is True:
                # this in an AWS CDN, idc about spamming it
                threads.append(Thread(target=save_thumbnail, args=(bot_info['name']+'-'+str(bot['itemId']),bot_info['thumbnail'], args)))
                threads[-1].start()
            if calls == args.max and args.max >= 0:
                break
            calls += 1
            if args.batch:
                time.sleep(SLOWDOWN)
        else:
            pass
            # print('Ignoring %s duplicate (already downloaded)' % (bot['itemName']))
    for t in threads:
        t.join() 
Example 65
Project: facebook-discussion-tk   Author: internaut   File: parse_fb_html_files.py    MIT License 5 votes vote down vote up
def main():
    num_args = len(sys.argv)
    if num_args < 3:
        print('usage: %s <html-file1> [html-file2 ...] <output-json-file>' % sys.argv[0], file=sys.stderr)
        exit(1)

    html_files = sys.argv[1:num_args - 1]
    output_file = sys.argv[num_args - 1]

    output_json_data = {}
    for html_file in html_files:
        file_basename = os.path.basename(html_file)
        try:
            rdot_idx = file_basename.rindex('.')
            label = file_basename[:rdot_idx]
        except ValueError:
            label = file_basename

        json_data = parse_html_file(html_file)

        if label not in output_json_data:
            output_json_data[label] = json_data
        else:
            output_json_data[label]['data'].append(json_data['data'])

    print("> writing result JSON file '%s'..." % output_file)
    with codecs.open(output_file, 'w', 'utf-8') as f:
        json.dump(output_json_data, f, indent=2) 
Example 66
Project: VSE-C   Author: ExplorerFreda   File: completion_datamaker.py    MIT License 5 votes vote down vote up
def main():
    with open(args.input) as f:
        lines = f.readlines()
    results = Pool().map(process, lines)
    with open(args.output, 'w') as f:
        json.dump(results, f)
    from IPython import embed; embed() 
Example 67
Project: Graphlib   Author: HamletWantToCode   File: util.py    MIT License 5 votes vote down vote up
def write_json(content, fname):
    with fname.open('wt') as handle:
        json.dump(content, handle, indent=4, sort_keys=False) 
Example 68
Project: video2commons   Author: toolforge   File: extracti18n.py    GNU General Public License v3.0 5 votes vote down vote up
def _write(key):
    dest_file = dest + "/" + key + ".json"
    with open(dest_file, 'w') as openfile:
        json.dump(data[key], openfile, sort_keys=True,
                  indent=4, separators=(',', ': ')) 
Example 69
Project: NiBetaSeries   Author: HBClab   File: conftest.py    MIT License 5 votes vote down vote up
def bids_dir(tmpdir_factory):
    bids_dir = tmpdir_factory.mktemp('bids')

    dataset_json = bids_dir.ensure("dataset_description.json")

    dataset_dict = {
        "Name": "waffles and fries",
        "BIDSVersion": "1.1.1",
    }

    with open(str(dataset_json), 'w') as dj:
        json.dump(dataset_dict, dj)

    return bids_dir 
Example 70
Project: NiBetaSeries   Author: HBClab   File: conftest.py    MIT License 5 votes vote down vote up
def deriv_dir(bids_dir):
    deriv_dir = bids_dir.ensure('derivatives',
                                'fmriprep',
                                dir=True)

    dataset_json = deriv_dir.ensure("dataset_description.json")

    dataset_dict = {
        "Name": "fMRIPrep - fMRI PREProcessing workflow",
        "BIDSVersion": "1.1.1",
        "PipelineDescription": {
            "Name": "fMRIPrep",
            "Version": "1.5.0rc2+14.gf673eaf5",
            "CodeURL": "https://github.com/poldracklab/fmriprep/archive/1.5.0.tar.gz"
        },
        "CodeURL": "https://github.com/poldracklab/fmriprep",
        "HowToAcknowledge": "Please cite our paper (https://doi.org/10.1038/s41592-018-0235-4)",
        "SourceDatasetsURLs": [
            "https://doi.org/"
        ],
        "License": ""
    }

    with open(str(dataset_json), 'w') as dj:
        json.dump(dataset_dict, dj)

    return deriv_dir 
Example 71
Project: NiBetaSeries   Author: HBClab   File: conftest.py    MIT License 5 votes vote down vote up
def sub_metadata(bids_dir, bids_json_fname=bids_json_fname):
    sub_json = bids_dir.ensure(bids_json_fname)
    tr = 2
    bold_metadata = {"RepetitionTime": tr, "TaskName": "waffles"}

    with open(str(sub_json), 'w') as md:
        json.dump(bold_metadata, md)

    return sub_json 
Example 72
Project: NiBetaSeries   Author: HBClab   File: conftest.py    MIT License 5 votes vote down vote up
def sub_rest_metadata(bids_dir, bids_json_fname=bids_rest_json_fname):
    sub_json = bids_dir.ensure(bids_rest_json_fname)
    tr = 2
    bold_metadata = {"RepetitionTime": tr, "TaskName": "rest"}

    with open(str(sub_json), 'w') as md:
        json.dump(bold_metadata, md)

    return sub_json 
Example 73
Project: haveibeenpwned_api   Author: Critical-Start   File: pwned_api.py    MIT License 5 votes vote down vote up
def allBreaches():
    req = requests.get("https://" + endpoint + "/api/v2/breaches", headers = useragent, cookies = cookies, verify = sslVerify)
    if str(req.status_code) == "200":
        print OKGREEN + "[!] Obtained breaches and saving to file!" + ENDC
        with open('Output/breaches.txt', 'w+') as outfile:
	    json.dump(req.content, outfile)
    else:
	print WARNING + "[!] Something went wrong while obtaining breaches" + ENDC
	sys.exit()



# Obtaining breaches by name 
Example 74
Project: telegram-repeater   Author: googlehosts   File: main.py    GNU Affero General Public License v3.0 5 votes vote down vote up
def import_from_csv():
	import csv
	with open(sys.argv[2], encoding = 'utf8') as fin:
		s = csv.reader(fin, delimiter = ',')
		problems = []
		for row in s:
			problems.append({'Q': row[0], 'A': row[1]})
	problem_set = extern_load_problem_set()
	problem_set['problem_set'] = problems
	with open('problem_set.json', 'w', encoding='utf8') as fout:
		json.dump(problem_set, fout, indent='\t', separators=(',', ': '), ensure_ascii=False) 
Example 75
Project: neural-pipeline   Author: toodef   File: monitoring.py    MIT License 5 votes vote down vote up
def _flush_metrics(self) -> None:
        """
        Flush metrics files
        """
        with open(self._get_file_name(True), 'w') as out:
            json.dump(self._storage, out)

        if self._final_metrics_file is not None:
            res = dict_recursive_bypass(self._storage, lambda v: v[-1])
            with open(self._final_metrics_file, 'w') as out:
                json.dump(res, out) 
Example 76
Project: neural-pipeline   Author: toodef   File: train.py    MIT License 5 votes vote down vote up
def _save_state(self, ckpts_manager: CheckpointsManager, best_ckpts_manager: CheckpointsManager or None,
                    cur_best_state: float or None, epoch_idx: int) -> float or None:
        """
        Internal method used for save states after epoch end

        :param ckpts_manager: ordinal checkpoints manager
        :param best_ckpts_manager: checkpoints manager, used for store best stages
        :param cur_best_state: current best stage metric value
        :return: new best stage metric value or None if it not update
        """
        def save_trainer(ckp_manager):
            with open(ckp_manager.trainer_file(), 'w') as out:
                json.dump({'last_epoch': epoch_idx}, out)

        if self._best_state_rule is not None:
            new_best_state = self._best_state_rule()
            if cur_best_state is None:
                self._data_processor.save_state()
                save_trainer(ckpts_manager)
                ckpts_manager.pack()
                return new_best_state
            else:
                if new_best_state <= cur_best_state:
                    self._data_processor.set_checkpoints_manager(best_ckpts_manager)
                    self._data_processor.save_state()
                    save_trainer(best_ckpts_manager)
                    best_ckpts_manager.pack()
                    self._data_processor.set_checkpoints_manager(ckpts_manager)
                    return new_best_state

        self._data_processor.save_state()
        save_trainer(ckpts_manager)
        ckpts_manager.pack()
        return None 
Example 77
Project: factotum   Author: Denubis   File: settings.py    GNU General Public License v3.0 4 votes vote down vote up
def configSetup(servername, description, tag, visibility, serverpassword, genserverpasswordwords, admins, ignoreplayerlimit, afk, uploadrate, updatepassword):
	FACTORIOPATH = getFactorioPath()
	
	
	if updatepassword:
		if serverpassword:
			password=serverpassword
		elif genserverpasswordwords > 0:
			password=generatePhrase(genserverpasswordwords)
		else:
			password=""

		

	try:
		with codecs.open(getSettingsFile(), 'r', encoding='utf-8') as settings_file:

			settingsJson = json.load(settings_file)
			if servername:
				settingsJson['name'] = servername
			if description:
				settingsJson['description'] = description
			if tag:
				settingsJson['tags'] = tag
			if admins:
				settingsJson['admins'] = admins
			if ignoreplayerlimit:
				settingsJson['ignore_player_limit_for_returning_players'] = ignoreplayerlimit
			if afk:
				settingsJson['afk_autokick_interval'] = afk
			if uploadrate:
				settingsJson['max_upload_in_kilobytes_per_second'] = uploadrate
			if updatepassword:
				settingsJson["game_password"] = password
			if visibility:
				settingsJson['visibility'] = {"public":visibility, "lan": true}
			
			
		with codecs.open("%s/config/settings.json" % (FACTORIOPATH), 'w', encoding='utf-8') as settingsFile:
			json.dump(settingsJson, settingsFile, indent=4)

		print("The server password is: \"%s\" " % getPassword())

	except:
		print("Cannot write settings file.") 
Example 78
Project: models   Author: kipoi   File: prepare_model_yaml.py    MIT License 4 votes vote down vote up
def data_reader_config_from_model(model, config_out_fpath = None, replicate_names=None):
    """Return :class:`DataReader` from `model`.
    Builds a :class:`DataReader` for reading data for `model`.
    Parameters
    ----------
    model: :class:`Model`.
        :class:`Model`.
    outputs: bool
        If `True`, return output labels.
    replicate_names: list
        Name of input cells of `model`.
    Returns
    -------
    :class:`DataReader`
        Instance of :class:`DataReader`.
    """
    use_dna = False
    dna_wlen = None
    cpg_wlen = None
    output_names = None
    encode_replicates = False
    #
    input_shapes = to_list(model.input_shape)
    for input_name, input_shape in zip(model.input_names, input_shapes):
        if input_name == 'dna':
            # Read DNA sequences.
            use_dna = True
            dna_wlen = input_shape[1]
        elif input_name.startswith('cpg/state/'):
            # DEPRECATED: legacy model. Decode replicate names from input name.
            replicate_names = decode_replicate_names(input_name.replace('cpg/state/', ''))
            assert len(replicate_names) == input_shape[1]
            cpg_wlen = input_shape[2]
            encode_replicates = True
        elif input_name == 'cpg/state':
            # Read neighboring CpG sites.
            if not replicate_names:
                raise ValueError('Replicate names required!')
            if len(replicate_names) != input_shape[1]:
                tmp = '{r} replicates found but CpG model was trained with' \
                    ' {s} replicates. Use `--nb_replicate {s}` or ' \
                    ' `--replicate_names` option to select {s} replicates!'
                tmp = tmp.format(r=len(replicate_names), s=input_shape[1])
                raise ValueError(tmp)
            cpg_wlen = input_shape[2]
    output_names = model.output_names
    config = {"output_names":output_names,
                      "use_dna":use_dna,
                      "dna_wlen":dna_wlen,
                      "cpg_wlen":cpg_wlen,
                      "replicate_names":replicate_names,
                      "encode_replicates":encode_replicates}
    if config_out_fpath is not None:
        with open(config_out_fpath, "w") as ofh:
            json.dump(config, ofh)
    return config 
Example 79
Project: models   Author: kipoi   File: dataloader_m.py    MIT License 4 votes vote down vote up
def data_reader_config_from_model(model, config_out_fpath = None, replicate_names=None):
    """Return :class:`DataReader` from `model`.
    Builds a :class:`DataReader` for reading data for `model`.
    Parameters
    ----------
    model: :class:`Model`.
        :class:`Model`.
    outputs: bool
        If `True`, return output labels.
    replicate_names: list
        Name of input cells of `model`.
    Returns
    -------
    :class:`DataReader`
        Instance of :class:`DataReader`.
    """
    use_dna = False
    dna_wlen = None
    cpg_wlen = None
    output_names = None
    encode_replicates = False
    #
    input_shapes = to_list(model.input_shape)
    for input_name, input_shape in zip(model.input_names, input_shapes):
        if input_name == 'dna':
            # Read DNA sequences.
            use_dna = True
            dna_wlen = input_shape[1]
        elif input_name.startswith('cpg/state/'):
            # DEPRECATED: legacy model. Decode replicate names from input name.
            replicate_names = decode_replicate_names(input_name.replace('cpg/state/', ''))
            assert len(replicate_names) == input_shape[1]
            cpg_wlen = input_shape[2]
            encode_replicates = True
        elif input_name == 'cpg/state':
            # Read neighboring CpG sites.
            if not replicate_names:
                raise ValueError('Replicate names required!')
            if len(replicate_names) != input_shape[1]:
                tmp = '{r} replicates found but CpG model was trained with' \
                    ' {s} replicates. Use `--nb_replicate {s}` or ' \
                    ' `--replicate_names` option to select {s} replicates!'
                tmp = tmp.format(r=len(replicate_names), s=input_shape[1])
                raise ValueError(tmp)
            cpg_wlen = input_shape[2]
    output_names = model.output_names
    config = {"output_names":output_names,
                      "use_dna":use_dna,
                      "dna_wlen":dna_wlen,
                      "cpg_wlen":cpg_wlen,
                      "replicate_names":replicate_names,
                      "encode_replicates":encode_replicates}
    if config_out_fpath is not None:
        with open(config_out_fpath, "w") as ofh:
            json.dump(config, ofh)
    return config 
Example 80
Project: haveibeenpwned_api   Author: Critical-Start   File: pwned_api.py    MIT License 4 votes vote down vote up
def obtainPastes(account):
    # Directory to store all pastes in
    directory = "Output/pastes/"
    if not os.path.exists(directory):
        os.makedirs(directory)
        print OKBLUE + "[+] Making directory for pastes to be placed in!" + ENDC
    
    sleep = rate
    req = requests.get("https://" + endpoint + "/api/v2/pasteaccount/" + account, headers = useragent, cookies = cookies, verify = sslVerify)
    # The account has no pastes
    if str(req.status_code) == "404":
        print OKGREEN + "[!] " + account + " has no pastes." + ENDC
        time.sleep(sleep) # sleep so that we don't trigger the rate limit
        return False
    # The account has pastes
    elif str(req.status_code) == "200":
        # Files to be written to
        with open(directory + account + ".txt", "w+") as outfile:
            json.dump(req.content, outfile)

        print OKRED + "[!] " + account + " has pastes!" + ENDC
        time.sleep(sleep) # sleep so that we don't trigger the rate limit
        return True
    # Rate limit triggered
    elif str(req.status_code) == "429":
        print WARNING + "[!] Rate limit exceeded, server instructed us to retry after " + req.headers['Retry-After'] + " seconds" + ENDC
        # Checking to see if the server has limited us for a long time or possibly banned us
        if str(req.headers['Retry-After']) <= 300:
            print FAIL + "[!] Server has rate limited us for longer then 5 minutes!" + ENDC
            print FAIL + "[!] Do one of the following: Be patient you crazy person, change your IP, change the URL (remove or add v2 after /api/), or just rerun the script and pray!" + ENDC
            f.close()
            sys.exit()
        else:
            sleep = float(req.headers['Retry-After']) # Read rate limit from HTTP response headers and set local sleep rate
            time.sleep(sleep) # Sleeping a little longer as the server instructed us to do
            obtainPastes(account) # Reissue request
    # CloudFlare has stopped us
    elif str(req.status_code) == 503:
        print FAIL + "[!] CloudFlare has stopped our request! Ensure you are using a valid cookie with the user-agent that obtained that cookie!" + ENDC
        f.close()
        sys.exit()
    else:
        print WARNING + "[!] Something went wrong while checking " + account + ENDC
        time.sleep(sleep) # sleep so that we don't trigger the rate limit
        return True



# Obtaining all breaches