Python json.load() Examples

The following are 30 code examples of json.load(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module json , or try the search function .
Example #1
Source File: test_party.py    From indras_net with GNU General Public License v3.0 10 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #2
Source File: file_utils.py    From cmrc2019 with Creative Commons Attribution Share Alike 4.0 International 9 votes vote down vote up
def filename_to_url(filename: str, cache_dir: Union[str, Path] = None) -> Tuple[str, str]:
    """
    Return the url and etag (which may be ``None``) stored for `filename`.
    Raise ``FileNotFoundError`` if `filename` or its stored metadata do not exist.
    """
    if cache_dir is None:
        cache_dir = PYTORCH_PRETRAINED_BERT_CACHE
    if isinstance(cache_dir, Path):
        cache_dir = str(cache_dir)

    cache_path = os.path.join(cache_dir, filename)
    if not os.path.exists(cache_path):
        raise FileNotFoundError("file {} not found".format(cache_path))

    meta_path = cache_path + '.json'
    if not os.path.exists(meta_path):
        raise FileNotFoundError("file {} not found".format(meta_path))

    with open(meta_path) as meta_file:
        metadata = json.load(meta_file)
    url = metadata['url']
    etag = metadata['etag']

    return url, etag 
Example #3
Source File: test_forestfire.py    From indras_net with GNU General Public License v3.0 8 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #4
Source File: test_basic.py    From indras_net with GNU General Public License v3.0 7 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #5
Source File: test_coop.py    From indras_net with GNU General Public License v3.0 7 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #6
Source File: test_shamir.py    From python-shamir-mnemonic with MIT License 7 votes vote down vote up
def test_vectors():
    with open("vectors.json", "r") as f:
        vectors = json.load(f)
    for description, mnemonics, secret in vectors:
        if secret:
            assert bytes.fromhex(secret) == shamir.combine_mnemonics(
                mnemonics, b"TREZOR"
            ), 'Incorrect secret for test vector "{}".'.format(description)
        else:
            with pytest.raises(MnemonicError):
                shamir.combine_mnemonics(mnemonics)
                pytest.fail(
                    'Failed to raise exception for test vector "{}".'.format(
                        description
                    )
                ) 
Example #7
Source File: session.py    From mlbv with GNU General Public License v3.0 7 votes vote down vote up
def __init__(self, user_agent, token_url_template, platform):
        self.user_agent = user_agent
        self.token_url_template = token_url_template
        self.platform = platform

        self.session = requests.Session()
        self.session.cookies = http.cookiejar.LWPCookieJar()
        if not os.path.exists(COOKIE_FILE):
            self.session.cookies.save(COOKIE_FILE)
        self.session.cookies.load(COOKIE_FILE, ignore_discard=True)
        self.session.headers = {"User-agent": user_agent}
        if os.path.exists(SESSION_FILE):
            self.load()
        else:
            self._state = {
                'api_key': None,
                'client_api_key': None,
                'token': None,
                'access_token': None,
                'access_token_expiry': None
            }
        self.login() 
Example #8
Source File: json_combiner.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def get_models(model_files):
    """
        return all the models from list of model files (.json) for processing
    """
    model = []
    for file in model_files:
        if file.endswith("_model.json"):
            with open(file, 'r') as input_stream:
                try:
                    loadedData = json.load(input_stream)
                    if(len(loadedData) > 0):
                        model.append(loadedData)
                except ValueError:
                    script_output("Invalid JSON in " + file)
                    exit(1)
        else:
            script_output("File does not end with _model.json found")
            script_output(file, False)
            exit(1)

    return model 
Example #9
Source File: test_wolfsheep.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #10
Source File: json_combiner.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def get_prev_models(filepath):
    """
        Reads from models.json, which also happens to be our DEST_FOLDER
        If our DEST_FOLDER doesn't exist, it will just create the new file
        Otherwise, it will read in the json,
        so the script knows the model ID for exisiting models
    """
    try:
        with open(filepath, 'r') as input_stream:
            try:
                # Assumes models.json has DB_NAME that matches
                # what the script expects
                return json.load(input_stream)[DB_NAME]
            except ValueError:
                script_output("Invalid JSON in " + filepath)
                exit(1)

    except OSError:
        script_output("Could not open " + filepath)
        exit(1) 
Example #11
Source File: test_grid.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #12
Source File: configuration.py    From spleeter with MIT License 6 votes vote down vote up
def load_configuration(descriptor):
    """ Load configuration from the given descriptor. Could be
    either a `spleeter:` prefixed embedded configuration name
    or a file system path to read configuration from.

    :param descriptor: Configuration descriptor to use for lookup.
    :returns: Loaded description as dict.
    :raise ValueError: If required embedded configuration does not exists.
    :raise SpleeterError: If required configuration file does not exists.
    """
    # Embedded configuration reading.
    if descriptor.startswith(_EMBEDDED_CONFIGURATION_PREFIX):
        name = descriptor[len(_EMBEDDED_CONFIGURATION_PREFIX):]
        if not loader.is_resource(resources, f'{name}.json'):
            raise SpleeterError(f'No embedded configuration {name} found')
        with loader.open_text(resources, f'{name}.json') as stream:
            return json.load(stream)
    # Standard file reading.
    if not exists(descriptor):
        raise SpleeterError(f'Configuration file {descriptor} not found')
    with open(descriptor, 'r') as stream:
        return json.load(stream) 
Example #13
Source File: test_fmarket.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #14
Source File: __main__.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def main(args=None):
    welcome = "Welcome to Indra! Please choose a model:"
    stars = "*" * len(welcome)
    model_db = None
    model_list = None
    with open("models.json", 'r') as f:
        model_db = json.load(f)
    model_list = model_db["models_database"]
    while True:
        print("\n",
              stars, "\n",
              welcome, "\n",
              stars)
        for choice, model in enumerate(model_list):
            print(str(choice) + ". ", model["name"])
        choice = int(input())
        if 0 <= choice < len(model_list):
            rdict[model_list[choice]["run"]]()
        else:
            return 0 
Example #15
Source File: test_fashion.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def test_props_write(self):
        announce('test_props_write')
        report = True
        self.env.pwrite(self.env.model_nm + ".props")
        with open(self.env.model_nm + ".props", "r") as f:
            props_written = json.load(f)
        if len(props_written) != len(self.env.props.props):
            report = False
        if report:
            for key in props_written:
                if key not in self.env.props.props:
                    report = False
                    break
                else:
                    if props_written[key]["val"] != self.env.props.props[key].val:
                        report = False
                        break
        f.close()
        os.remove(self.env.model_nm + ".props")
        self.assertEqual(report, True) 
Example #16
Source File: image_tags.py    From drydock with Apache License 2.0 6 votes vote down vote up
def read_config(stream, env):
    config = {}
    try:
        config['tags'] = json.load(stream)
    except ValueError:
        LOG.exception('Failed to decode JSON from input stream')
        config['tags'] = {}

    LOG.debug('Configuration after reading stream: %s', config)

    config['context'] = {
        'branch': env.get('BRANCH'),
        'change': env.get('CHANGE'),
        'commit': env.get('COMMIT'),
        'ps': env.get('PATCHSET'),
    }

    LOG.info('Final configuration: %s', config)

    return config 
Example #17
Source File: labeled_image.py    From vergeml with MIT License 6 votes vote down vote up
def _get_classes_from_json(self):

        for filename in ("labels.txt", "classes.json"):
            path = os.path.join(self.samples_dir, filename)
            if not os.path.exists(path):
                raise VergeMLError("{} is missing".format(filename))

            with open(path) as f:
                if filename == "labels.txt":
                    items = filter(None, map(methodcaller("strip"), f.read().splitlines()))
                    labels = Labels(items)
                else:
                    self.classes = json.load(f)
        files = {}
        # prefix the sample with input_dir
        for k, v in self.classes['files'].items():

            # on windows and linux, separator is /
            path = k.split("/")
            path.insert(0, self.samples_dir)
            fname = os.path.join(*path)
            files[fname] = v

        self.classes['files'] = files
        self.meta['labels'] = labels 
Example #18
Source File: config.py    From dustmaps with GNU General Public License v2.0 6 votes vote down vote up
def load(self):
        if os.path.isfile(self.fname):
            with open(self.fname, 'r') as f:
                try:
                    self._options = json.load(f)
                    self._success = True
                except ValueError as error:
                    print(('The config file appears to be corrupted:\n\n'
                           '    {fname}\n\n'
                           'Either fix the config file manually, or overwrite '
                           'it with a blank configuration as follows:\n\n'
                           '    from dustmaps.config import config\n'
                           '    config.reset()\n\n'
                           'Note that this will delete your configuration! For '
                           'example, if you have specified a data directory, '
                           'then dustmaps will forget about its location.'
                          ).format(fname=self.fname))
                    self._options = {}
        else:
            self._options = {}
            self._success = True 
Example #19
Source File: workflow.py    From wechat-alfred-workflow with MIT License 6 votes vote down vote up
def register(self, name, serializer):
        """Register ``serializer`` object under ``name``.

        Raises :class:`AttributeError` if ``serializer`` in invalid.

        .. note::

            ``name`` will be used as the file extension of the saved files.

        :param name: Name to register ``serializer`` under
        :type name: ``unicode`` or ``str``
        :param serializer: object with ``load()`` and ``dump()``
            methods

        """
        # Basic validation
        getattr(serializer, 'load')
        getattr(serializer, 'dump')

        self._serializers[name] = serializer 
Example #20
Source File: lib.py    From iSDX with Apache License 2.0 6 votes vote down vote up
def __init__(self, config_file):
        self.mode = None

        self.vmac_mode = None
        self.vmac_options = None

        self.vnhs = None

        self.refmon = None

        self.flanc_auth = None

        self.route_server = None

        self.arp_proxy = None

        self.peers = {}

        # loading config file
        config = json.load(open(config_file, 'r'))

        # parse config
        self.parse_config(config) 
Example #21
Source File: build.py    From Traffic_sign_detection_YOLO with MIT License 6 votes vote down vote up
def build_from_pb(self):
		with tf.gfile.FastGFile(self.FLAGS.pbLoad, "rb") as f:
			graph_def = tf.GraphDef()
			graph_def.ParseFromString(f.read())
		
		tf.import_graph_def(
			graph_def,
			name=""
		)
		with open(self.FLAGS.metaLoad, 'r') as fp:
			self.meta = json.load(fp)
		self.framework = create_framework(self.meta, self.FLAGS)

		# Placeholders
		self.inp = tf.get_default_graph().get_tensor_by_name('input:0')
		self.feed = dict() # other placeholders
		self.out = tf.get_default_graph().get_tensor_by_name('output:0')
		
		self.setup_meta_ops() 
Example #22
Source File: test_darkflow.py    From Traffic_sign_detection_YOLO with MIT License 6 votes vote down vote up
def test_CLI_JSON_YOLOv1():
    #Test predictions outputted to a JSON file using the YOLOv1 model through CLI
    #NOTE: This test verifies that the code executes properly, the JSON file is created properly and the predictions generated are within a certain
    #      margin of error when compared to the expected predictions.

    testString = "flow --imgdir {0} --model {1} --load {2} --config {3} --threshold 0.4 --json".format(os.path.dirname(testImg["path"]), yolo_small_CfgPath, yolo_small_WeightPath, generalConfigPath)
    executeCLI(testString)

    outputJSONPath = os.path.join(os.path.dirname(testImg["path"]), "out", os.path.splitext(os.path.basename(testImg["path"]))[0] + ".json")
    assert os.path.exists(outputJSONPath), "Expected output JSON file: {0} was not found.".format(outputJSONPath)

    with open(outputJSONPath) as json_file:
        loadedPredictions = json.load(json_file)

    assert compareObjectData(testImg["expected-objects"]["yolo-small"], loadedPredictions, testImg["width"], testImg["height"], threshCompareThreshold, posCompareThreshold), "Generated object predictions from JSON were not within margin of error compared to expected values."
    os.remove(outputJSONPath) #Remove the JSON file so that it does not affect subsequent tests 
Example #23
Source File: build.py    From Traffic_sign_detection_YOLO with MIT License 6 votes vote down vote up
def savepb(self):
		"""
		Create a standalone const graph def that 
		C++	can load and run.
		"""
		darknet_pb = self.to_darknet()
		flags_pb = self.FLAGS
		flags_pb.verbalise = False
		
		flags_pb.train = False
		# rebuild another tfnet. all const.
		tfnet_pb = TFNet(flags_pb, darknet_pb)		
		tfnet_pb.sess = tf.Session(graph = tfnet_pb.graph)
		# tfnet_pb.predict() # uncomment for unit testing
		name = 'built_graph/{}.pb'.format(self.meta['name'])
		os.makedirs(os.path.dirname(name), exist_ok=True)
		#Save dump of everything in meta
		with open('built_graph/{}.meta'.format(self.meta['name']), 'w') as fp:
			json.dump(self.meta, fp)
		self.say('Saving const graph def to {}'.format(name))
		graph_def = tfnet_pb.sess.graph_def
		tf.train.write_graph(graph_def,'./', name, False) 
Example #24
Source File: test_darkflow.py    From Traffic_sign_detection_YOLO with MIT License 5 votes vote down vote up
def test_CLI_SAVEPB_YOLOv2():
    #Save .pb and .meta as generated from the YOLOv2 model through CLI
    #NOTE: This test verifies that the code executes properly, and the .pb and .meta files are successfully created. The subsequent test will verify the
    #      contents of those files.

    testString = "flow --model {0} --load {1} --config {2} --threshold 0.4 --savepb".format(yolo_CfgPath, yolo_WeightPath, generalConfigPath)
    
    with pytest.raises(SystemExit):
            executeCLI(testString)

    assert os.path.exists(pbPath), "Expected output .pb file: {0} was not found.".format(pbPath)
    assert os.path.exists(metaPath), "Expected output .meta file: {0} was not found.".format(metaPath) 
Example #25
Source File: analyze_noun_counts.py    From facebook-discussion-tk with MIT License 5 votes vote down vote up
def main():
    global output_file
    num_args = len(sys.argv)
    if num_args < 3:
        print("usage: %s <json-file-1> [json-file-2 ...] <output-csv-file>" % sys.argv[0], file=sys.stderr)
        exit(1)

    json_files = sys.argv[1:num_args - 1]
    output_file = sys.argv[num_args - 1]

    merged_json_data = {}
    for json_file in json_files:
        print("> reading JSON file '%s'..." % json_file)
        with open(json_file) as f:
            json_data = json.load(f)
            for label, data in json_data.items():
                if label not in merged_json_data:
                    merged_json_data[label] = data
                else:
                    merged_json_data[label]['data'].extend(data['data'])

    # pprint(merged_json_data)
    analyse(merged_json_data) 
Example #26
Source File: test_darkflow.py    From Traffic_sign_detection_YOLO with MIT License 5 votes vote down vote up
def test_TRAIN_FROM_WEIGHTS_CLI__LOAD_CHECKPOINT_RETURNPREDICT_YOLOv2():
    #Test training using pre-generated weights for tiny-yolo-voc
    #NOTE: This test verifies that the code executes properly, and that the expected checkpoint file (tiny-yolo-voc-20.meta in this case) is generated.
    #      In addition, predictions are generated using the checkpoint file to verify that training completed successfully.

    testString = "flow --model {0} --load {1} --train --dataset {2} --annotation {3} --epoch 20".format(tiny_yolo_voc_CfgPath, tiny_yolo_voc_WeightPath, os.path.join(buildPath, "test", "training", "images"), os.path.join(buildPath, "test", "training", "annotations"))
    with pytest.raises(SystemExit):
        executeCLI(testString)

    checkpointPath = os.path.join(buildPath, "ckpt", "tiny-yolo-voc-20.meta")
    assert os.path.exists(checkpointPath), "Expected output checkpoint file: {0} was not found.".format(checkpointPath)

    #Using trained weights
    options = {"model": tiny_yolo_voc_CfgPath, "load": 20, "config": generalConfigPath, "threshold": 0.1}
    tfnet = TFNet(options)

    #Make sure predictions very roughly match the expected values for image with bike and person
    imgcv = cv2.imread(trainImgBikePerson["path"])
    loadedPredictions = tfnet.return_predict(imgcv)
    assert compareObjectData(trainImgBikePerson["expected-objects"]["tiny-yolo-voc"], loadedPredictions, trainImgBikePerson["width"], trainImgBikePerson["height"], 0.7, 0.25), "Generated object predictions from training (for image with person on the bike) were not anywhere close to what they are expected to be.\nTraining may not have completed successfully."
    differentThanExpectedBike = compareObjectData(trainImgBikePerson["expected-objects"]["tiny-yolo-voc"], loadedPredictions, trainImgBikePerson["width"], trainImgBikePerson["height"], 0.01, 0.001)

    #Make sure predictions very roughly match the expected values for image with horse and person
    imgcv = cv2.imread(trainImgHorsePerson["path"])
    loadedPredictions = tfnet.return_predict(imgcv)
    assert compareObjectData(trainImgHorsePerson["expected-objects"]["tiny-yolo-voc"], loadedPredictions, trainImgHorsePerson["width"], trainImgHorsePerson["height"], 0.7, 0.25), "Generated object predictions from training (for image with person on the horse) were not anywhere close to what they are expected to be.\nTraining may not have completed successfully."
    differentThanExpectedHorse = compareObjectData(trainImgHorsePerson["expected-objects"]["tiny-yolo-voc"], loadedPredictions, trainImgHorsePerson["width"], trainImgHorsePerson["height"], 0.01, 0.001)

    assert not (differentThanExpectedBike and differentThanExpectedHorse), "The generated object predictions for both images appear to be exactly the same as the ones generated with the original weights.\nTraining may not have completed successfully.\n\nNOTE: It is possible this is a fluke error and training did complete properly (try running this build again to confirm) - but most likely something is wrong." 
Example #27
Source File: arproxy.py    From iSDX with Apache License 2.0 5 votes vote down vote up
def parse_config(config_file):
    "Parse the config file"

    with open(config_file, 'r') as f:
        config = json.load(f)

    vnhs = IPNetwork(config["VNHs"])

    host, port = config["ARP Proxy"]["GARP_SOCKET"]
    garp_socket = (host, int(port))

    interface = config["ARP Proxy"]["Interface"]

    return Config(vnhs, garp_socket, interface) 
Example #28
Source File: participant_controller.py    From iSDX with Apache License 2.0 5 votes vote down vote up
def load_policies(self, policy_file):
        # Load policies from file

        with open(policy_file, 'r') as f:
            policies = json.load(f)

        return self.sanitize_policies(policies) 
Example #29
Source File: test_sfd.py    From dustmaps with GNU General Public License v2.0 5 votes vote down vote up
def setUpClass(self):
        t0 = time.time()

        # Test data comes from NED
        with open(os.path.join(test_dir, 'ned_output.json'), 'r') as f:
            self._test_data = json.load(f)

        # Set up SFD query object
        self._sfd = sfd.SFDQuery()

        t1 = time.time()
        print('Loaded SFD test data in {:.5f} s.'.format(t1-t0)) 
Example #30
Source File: lib.py    From iSDX with Apache License 2.0 5 votes vote down vote up
def __init__(self, config_file, id):
        self.id = str(id)

        with open(config_file, 'r') as f:
            self.config = json.load(f)

        self.parse_modes()

        self.parse_various()