Python json.loads() Examples
The following are 30
code examples of json.loads().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
json
, or try the search function
.

Example #1
Source File: proxy_oa.py From incubator-spot with Apache License 2.0 | 7 votes |
def _get_suspicious_details(self): uri_list = [] iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) if os.path.isfile(iana_conf_file): iana_config = json.loads(open(iana_conf_file).read()) proxy_iana = IanaTransform(iana_config["IANA"]) for conn in self._proxy_scores: clientip = conn[self._conf["proxy_score_fields"]["clientip"]] fulluri = conn[self._conf["proxy_score_fields"]["fulluri"]] date=conn[self._conf["proxy_score_fields"]["p_date"]].split('-') if len(date) == 3: year=date[0] month=date[1].zfill(2) day=date[2].zfill(2) hh=(conn[self._conf["proxy_score_fields"]["p_time"]].split(":"))[0] self._get_proxy_details(fulluri,clientip,year,month,day,hh,proxy_iana)
Example #2
Source File: test_app.py From hydrus with MIT License | 7 votes |
def test_IriTemplate(self): """Test structure of IriTemplates attached to collections""" index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: collection_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if collection_name in self.doc.collections: response_get = self.client.get(endpoints[endpoint]) assert response_get.status_code == 200 response_get_data = json.loads( response_get.data.decode('utf-8')) assert "search" in response_get_data assert "mapping" in response_get_data["search"] collection = self.doc.collections[collection_name]["collection"] class_ = self.doc.parsed_classes[collection.class_.title]["class"] class_props = [x.prop for x in class_.supportedProperty] for mapping in response_get_data["search"]["mapping"]: if mapping["property"] not in ["limit", "offset", "pageIndex"]: assert mapping["property"] in class_props
Example #3
Source File: worker.py From incubator-spot with Apache License 2.0 | 6 votes |
def _initialize_members(self, db_name, hdfs_app_path, kafka_consumer, conf_type): # get logger instance. self._logger = Util.get_logger('SPOT.INGEST.WRK.FLOW') self._db_name = db_name self._hdfs_app_path = hdfs_app_path # read proxy configuration. self._script_path = os.path.dirname(os.path.abspath(__file__)) conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path))) conf = json.loads(open(conf_file).read()) self._conf = conf["pipelines"][conf_type] self._id = "spot-{0}-worker".format(conf_type) self._process_opt = self._conf['process_opt'] self._local_staging = self._conf['local_staging'] self.kafka_consumer = kafka_consumer # self._cursor = hive_engine.create_connection() self._cursor = hive_engine
Example #4
Source File: worker.py From incubator-spot with Apache License 2.0 | 6 votes |
def _initialize_members(self,db_name,hdfs_app_path,kafka_consumer,conf_type,processes): # get logger instance. self._logger = Util.get_logger('SPOT.INGEST.WRK.PROXY') self._db_name = db_name self._hdfs_app_path = hdfs_app_path self._kafka_consumer = kafka_consumer # read proxy configuration. self._script_path = os.path.dirname(os.path.abspath(__file__)) conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path))) conf = json.loads(open(conf_file).read()) self._spark_conf = conf["spark-streaming"] self._conf = conf["pipelines"][conf_type] self._processes = processes
Example #5
Source File: dns_oa.py From incubator-spot with Apache License 2.0 | 6 votes |
def _get_suspicious_details(self): iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) if os.path.isfile(iana_conf_file): iana_config = json.loads(open(iana_conf_file).read()) dns_iana = IanaTransform(iana_config["IANA"]) for conn in self._dns_scores: timestamp = conn[self._conf["dns_score_fields"]["unix_tstamp"]] full_date = datetime.datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S') date = full_date.split(" ")[0].split("-") # get date parameters. yr = date[0] mn = date[1] dy = date[2] time = full_date.split(" ")[1].split(":") hh = int(time[0]) dns_qry_name = conn[self._conf["dns_score_fields"]["dns_qry_name"]] self._get_dns_details(dns_qry_name,yr,mn,dy,hh,dns_iana)
Example #6
Source File: gti.py From incubator-spot with Apache License 2.0 | 6 votes |
def _call_gti(self, command, num_values): try: response_json = check_output(command, shell=True) result_dict = json.loads(response_json[0:len(response_json) - 1]) responses = result_dict['a'] return responses except CalledProcessError as e: self._logger.error("Error calling McAfee GTI client in gti module: " + e.output) error_resp = [{self.REP_KEY: self.DEFAULT_REP}] * num_values return error_resp except ValueError as e: self._logger.error("Error reading JSON response in gti module: " + e.message) error_resp = [{self.REP_KEY: self.DEFAULT_REP}] * num_values return error_resp
Example #7
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_Index(self): """Test for the index.""" response_get = self.client.get("/{}".format(self.API_NAME)) endpoints = json.loads(response_get.data.decode('utf-8')) response_post = self.client.post( "/{}".format(self.API_NAME), data=dict(foo="bar")) response_put = self.client.put( "/{}".format(self.API_NAME), data=dict(foo="bar")) response_delete = self.client.delete("/{}".format(self.API_NAME)) assert "@context" in endpoints assert endpoints["@id"] == "/{}".format(self.API_NAME) assert endpoints["@type"] == "EntryPoint" assert response_get.status_code == 200 assert response_post.status_code == 405 assert response_put.status_code == 405 assert response_delete.status_code == 405
Example #8
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_Vocab(self): """Test the vocab.""" response_get = self.client.get("/{}/vocab#".format(self.API_NAME)) response_get_data = json.loads(response_get.data.decode('utf-8')) assert "@context" in response_get_data assert response_get_data["@type"] == "ApiDocumentation" assert response_get_data["@id"] == "{}{}/vocab".format( self.HYDRUS_SERVER_URL, self.API_NAME) assert response_get.status_code == 200 response_delete = self.client.delete( "/{}/vocab#".format(self.API_NAME)) assert response_delete.status_code == 405 response_put = self.client.put( "/{}/vocab#".format(self.API_NAME), data=json.dumps(dict(foo='bar'))) assert response_put.status_code == 405 response_post = self.client.post( "/{}/vocab#".format(self.API_NAME), data=json.dumps(dict(foo='bar'))) assert response_post.status_code == 405
Example #9
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_object_PUT_at_id(self): """Create object in collection using PUT at specific ID.""" index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: collection_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if collection_name in self.doc.collections: collection = self.doc.collections[collection_name]["collection"] class_ = self.doc.parsed_classes[collection.class_.title]["class"] class_methods = [x.method for x in class_.supportedOperation] dummy_object = gen_dummy_object( collection.class_.title, self.doc) if "PUT" in class_methods: dummy_object = gen_dummy_object( collection.class_.title, self.doc) put_response = self.client.put('{}/{}'.format( endpoints[endpoint], uuid.uuid4()), data=json.dumps(dummy_object)) assert put_response.status_code == 201
Example #10
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_object_PUT_at_ids(self): index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: collection_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if collection_name in self.doc.collections: collection = self.doc.collections[collection_name]["collection"] class_ = self.doc.parsed_classes[collection.class_.title]["class"] class_methods = [x.method for x in class_.supportedOperation] data_ = {"data": list()} objects = list() ids = "" for index in range(3): objects.append(gen_dummy_object( collection.class_.title, self.doc)) ids = "{},".format(uuid.uuid4()) data_["data"] = objects if "PUT" in class_methods: put_response = self.client.put( '{}/add/{}'.format(endpoints[endpoint], ids), data=json.dumps(data_)) assert put_response.status_code == 201
Example #11
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_endpointClass_POST(self): """Check non collection Class POST.""" index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: if endpoint not in ["@context", "@id", "@type"]: class_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if class_name not in self.doc.collections: class_ = self.doc.parsed_classes[class_name]["class"] class_methods = [ x.method for x in class_.supportedOperation] if "POST" in class_methods: dummy_object = gen_dummy_object(class_.title, self.doc) post_response = self.client.post( endpoints[endpoint], data=json.dumps(dummy_object)) assert post_response.status_code == 200
Example #12
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_endpointClass_DELETE(self): """Check non collection Class DELETE.""" index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: if endpoint not in ["@context", "@id", "@type"]: class_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if class_name not in self.doc.collections: class_ = self.doc.parsed_classes[class_name]["class"] class_methods = [ x.method for x in class_.supportedOperation] if "DELETE" in class_methods: delete_response = self.client.delete( endpoints[endpoint]) assert delete_response.status_code == 200
Example #13
Source File: test_app.py From hydrus with MIT License | 6 votes |
def test_endpointClass_GET(self): """Check non collection Class GET.""" index = self.client.get("/{}".format(self.API_NAME)) assert index.status_code == 200 endpoints = json.loads(index.data.decode('utf-8')) for endpoint in endpoints: if endpoint not in ["@context", "@id", "@type"]: class_name = "/".join(endpoints[endpoint].split( "/{}/".format(self.API_NAME))[1:]) if class_name not in self.doc.collections: class_ = self.doc.parsed_classes[class_name]["class"] class_methods = [ x.method for x in class_.supportedOperation] if "GET" in class_methods: response_get = self.client.get(endpoints[endpoint]) assert response_get.status_code == 200 response_get_data = json.loads( response_get.data.decode('utf-8')) assert "@context" in response_get_data assert "@id" in response_get_data assert "@type" in response_get_data
Example #14
Source File: modeling_test.py From BERT-Classification-Tutorial with Apache License 2.0 | 5 votes |
def test_config_to_json_string(self): config = modeling.BertConfig(vocab_size=99, hidden_size=37) obj = json.loads(config.to_json_string()) self.assertEqual(obj["vocab_size"], 99) self.assertEqual(obj["hidden_size"], 37)
Example #15
Source File: modeling.py From BERT-Classification-Tutorial with Apache License 2.0 | 5 votes |
def from_json_file(cls, json_file): """Constructs a `BertConfig` from a json file of parameters.""" with tf.gfile.GFile(json_file, "r") as reader: text = reader.read() return cls.from_dict(json.loads(text))
Example #16
Source File: env.py From indras_net with GNU General Public License v3.0 | 5 votes |
def restore_session(self, session_id=None): """ Restore a previous session from a json file """ logging.info("-------------------------Start Restoration of states-------------------------------") try: base_dir = self.props["base_dir"] except: base_dir = "" if session_id is None: session_id = str(self.user.ask("Enter session id: ")) session_id = str(session_id) path = os.path.join(base_dir, "json/" + self.model_nm + session_id + ".json") with open(path, "r") as f: json_input = f.readline() json_input = json.loads(json_input) self.from_json(json_input) self.restore_agents(json_input) self.restore_womb(json_input) self.agents.restore_hist_from(json_input["pop_hist"]) #self.print_env() #self.user.tell("Session restored")
Example #17
Source File: test_basic.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_save_session(self): announce('test_save_session') report = True rand_sess_id = random.randint(1, 10) try: base_dir = self.env.props["base_dir"] except: base_dir = "" self.env.save_session(rand_sess_id) path = base_dir + "json/" + self.env.model_nm + str(rand_sess_id) + ".json" with open(path, "r") as f: json_input = f.readline() json_input_dic = json.loads(json_input) if json_input_dic["period"] != self.env.period: report = False if json_input_dic["model_nm"] != self.env.model_nm: report = False if json_input_dic["preact"] != self.env.preact: report = False if json_input_dic["postact"] != self.env.postact: report = False if json_input_dic["props"] != self.env.props.to_json(): report = False #Here is why test_save_session fail before. #The env will generate a new prop_arg 2 type proparg when restoring #session(check env.from_json function), but # we were using old prop_arg in this test file. if json_input_dic["user"] != self.env.user.to_json(): report = False agents = [] for agent in self.env.agents: agents.append(agent.to_json()) if json_input_dic["agents"] != agents: report = False f.close() os.remove(path) self.assertEqual(report, True)
Example #18
Source File: test_hiv.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_save_session(self): announce('test_save_session') report = True rand_sess_id = random.randint(1, 10) try: base_dir = self.env.props["base_dir"] except: base_dir = "" self.env.save_session(rand_sess_id) path = (base_dir + "json/" + self.env.model_nm + str(rand_sess_id) + ".json") with open(path, "r") as f: json_input = f.readline() json_input_dic = json.loads(json_input) if json_input_dic["period"] != self.env.period: report = False if json_input_dic["model_nm"] != self.env.model_nm: report = False if json_input_dic["preact"] != self.env.preact: report = False if json_input_dic["postact"] != self.env.postact: report = False if json_input_dic["props"] != self.env.props.to_json(): report = False if json_input_dic["user"] != self.env.user.to_json(): report = False agents = [] for agent in self.env.agents: agents.append(agent.to_json()) f.close() os.remove(path) self.assertEqual(report, True)
Example #19
Source File: test_hiv.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_restore_session(self): announce('test_restore_session') report = True rand_sess_id = random.randint(1, 10) try: base_dir = self.env.props["base_dir"] except: base_dir = "" self.env.save_session(rand_sess_id) self.env.n_steps(random.randint(1, 10)) self.env.restore_session(rand_sess_id) path = (base_dir + "json/" + self.env.model_nm + str(rand_sess_id) + ".json") with open(path, "r") as f: json_input = f.readline() json_input_dic = json.loads(json_input) if json_input_dic["period"] != self.env.period: report = False if json_input_dic["model_nm"] != self.env.model_nm: report = False if json_input_dic["preact"] != self.env.preact: report = False if json_input_dic["postact"] != self.env.postact: report = False if json_input_dic["props"] != self.env.props.to_json(): report = False if json_input_dic["user"] != self.env.user.to_json(): report = False agents = [] for agent in self.env.agents: agents.append(agent.to_json()) os.remove(path) f.close() self.assertEqual(report, True)
Example #20
Source File: test_props.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_props_overwriting_through_prop_file(prop_args): prop_json = "{{ \"{prop_name}\": {{\"val\": 7}} }}".format(prop_name=NUM_AGENTS) prop_dict = json.loads(prop_json) prop_args[NUM_AGENTS].val = 100 prop_args.overwrite_props_from_dict(prop_dict) assert prop_args[NUM_AGENTS].val == 7
Example #21
Source File: models_api.py From indras_net with GNU General Public License v3.0 | 5 votes |
def load_models(indra_dir): model_file = indra_dir + MODEL_FILE with open(model_file) as file: return json.loads(file.read())["models_database"]
Example #22
Source File: props_api.py From indras_net with GNU General Public License v3.0 | 5 votes |
def get_props(model_id, indra_dir): try: model = get_model(model_id, indra_dir=indra_dir) with open(indra_dir + "/" + model["props"]) as file: return json.loads(file.read()) except (IndexError, KeyError, ValueError): return err_return("Invalid model id " + str(model_id)) except FileNotFoundError: # noqa: F821 return err_return("Models or props file not found")
Example #23
Source File: test_api_endpoints.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_load_models(self): """ See if models can be loaded. """ rv = self.models test_model_file = indra_dir + MODEL_FILE with open(test_model_file) as file: test_rv = json.loads(file.read())["models_database"] self.assertEqual(rv, test_rv)
Example #24
Source File: test_api_endpoints.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_get_props(self): """ See if we can get props. """ model_id = random.randint(0, 10) rv = self.props.get(model_id) test_model_file = indra_dir + MODEL_FILE with open(test_model_file) as file: test_models_db = json.loads(file.read())["models_database"] with open(indra_dir + "/" + test_models_db[model_id]["props"]) as file: test_props = json.loads(file.read()) self.assertEqual(rv, test_props)
Example #25
Source File: test_api_endpoints.py From indras_net with GNU General Public License v3.0 | 5 votes |
def test_get_ModelMenu(self): """ Testing whether we are getting the menu. """ rv = self.model_menu.get() test_menu_file = indra_dir + "/indra/menu.json" with open(test_menu_file) as file: test_menu = json.loads(file.read())["menu_database"] self.assertEqual(rv, test_menu)
Example #26
Source File: study_robot.py From 21tb_robot with MIT License | 5 votes |
def select_score_item(self, course_id, score_id): """select one scoreitem and do check""" params = {'courseId': course_id, 'scoId': score_id, 'firstLoad': 'true'} r = self.http.post(self.apis['select_resourse'], params, json_ret=False) try: location = float(json.loads(r)['location']) except: location = 0.1 select_check_api = self.apis['select_check'] api = select_check_api % (course_id, score_id) r = self.http.post(api, json_ret=False) return location
Example #27
Source File: collector.py From incubator-spot with Apache License 2.0 | 5 votes |
def _initialize_members(self, hdfs_app_path, kafkaproducer, conf_type): # getting parameters. self._logger = logging.getLogger('SPOT.INGEST.FLOW') self._hdfs_app_path = hdfs_app_path self._producer = kafkaproducer # get script path self._script_path = os.path.dirname(os.path.abspath(__file__)) # read flow configuration. conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path))) conf = json.loads(open(conf_file).read()) self._conf = conf["pipelines"][conf_type] # set configuration. self._collector_path = self._conf['collector_path'] self._dsource = 'flow' self._hdfs_root_path = "{0}/{1}".format(hdfs_app_path, self._dsource) self._supported_files = self._conf['supported_files'] # create collector watcher self._watcher = FileWatcher(self._collector_path,self._supported_files) # Multiprocessing. self._processes = conf["collector_processes"] self._ingestion_interval = conf["ingestion_interval"] self._pool = Pool(processes=self._processes) # TODO: review re-use of hdfs.client self._hdfs_client = hdfs.get_client()
Example #28
Source File: collector.py From incubator-spot with Apache License 2.0 | 5 votes |
def _initialize_members(self,hdfs_app_path,kafka_topic,conf_type): # getting parameters. self._logger = logging.getLogger('SPOT.INGEST.PROXY') self._hdfs_app_path = hdfs_app_path self._kafka_topic= kafka_topic # get script path self._script_path = os.path.dirname(os.path.abspath(__file__)) # read proxy configuration. conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path))) conf = json.loads(open(conf_file).read()) self._message_size = conf["kafka"]["message_size"] self._conf = conf["pipelines"][conf_type] # get collector path. self._collector_path = self._conf['collector_path'] #get supported files self._supported_files = self._conf['supported_files'] # create collector watcher self._watcher = FileWatcher(self._collector_path,self._supported_files) # Multiprocessing. self._processes = conf["collector_processes"] self._ingestion_interval = conf["ingestion_interval"] self._pool = Pool(processes=self._processes)
Example #29
Source File: collector.py From incubator-spot with Apache License 2.0 | 5 votes |
def _initialize_members(self, hdfs_app_path, kafkaproducer, conf_type): # getting parameters. self._logger = logging.getLogger('SPOT.INGEST.DNS') self._hdfs_app_path = hdfs_app_path self._producer = kafkaproducer # get script path self._script_path = os.path.dirname(os.path.abspath(__file__)) # read dns configuration. conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path))) conf = json.loads(open(conf_file).read()) self._conf = conf["pipelines"][conf_type] # set configuration. self._collector_path = self._conf['collector_path'] self._dsource = 'dns' self._hdfs_root_path = "{0}/{1}".format(hdfs_app_path, self._dsource) # set configuration. self._pkt_num = self._conf['pkt_num'] self._pcap_split_staging = self._conf['pcap_split_staging'] self._supported_files = self._conf['supported_files'] # create collector watcher self._watcher = FileWatcher(self._collector_path, self._supported_files) # Multiprocessing. self._processes = conf["collector_processes"] self._ingestion_interval = conf["ingestion_interval"] self._pool = Pool(processes=self._processes) # TODO: review re-use of hdfs.client self._hdfs_client = hdfs.get_client()
Example #30
Source File: graphql.py From incubator-spot with Apache License 2.0 | 5 votes |
def send_query(self): assert(self.url is not None) assert(type(self.url) is str) assert(self.query is not None) assert(type(self.query) is str) data = { 'query': self.query } if self.variables is not None and type(self.variables) is dict: data['variables'] = self.variables encoded_data = json.dumps(data).encode('utf-8') http = urllib3.PoolManager() response = http.request( 'POST', self.url, body=encoded_data, headers={ 'Accept': 'application/json', 'Content-type': 'application/json' } ) try: return json.loads(response.data.decode('utf-8')) except: return { 'errors': [ { 'status': response.status, 'message': 'Failed to contact GraphQL endpoint. Is "{}" the correct URL?'.format(self.url) } ] }