Python json.loads() Examples

The following are code examples for showing how to use json.loads(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: incubator-spot   Author: apache   File: proxy_oa.py    Apache License 2.0 9 votes vote down vote up
def _get_suspicious_details(self):
        uri_list = []
        iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        if os.path.isfile(iana_conf_file):
            iana_config  = json.loads(open(iana_conf_file).read())
            proxy_iana = IanaTransform(iana_config["IANA"])

        for conn in self._proxy_scores:
            clientip = conn[self._conf["proxy_score_fields"]["clientip"]]
            fulluri = conn[self._conf["proxy_score_fields"]["fulluri"]]
            date=conn[self._conf["proxy_score_fields"]["p_date"]].split('-')
            if len(date) == 3:
                year=date[0]
                month=date[1].zfill(2)
                day=date[2].zfill(2)
                hh=(conn[self._conf["proxy_score_fields"]["p_time"]].split(":"))[0]
                self._get_proxy_details(fulluri,clientip,year,month,day,hh,proxy_iana) 
Example 2
Project: incubator-spot   Author: apache   File: dns_oa.py    Apache License 2.0 8 votes vote down vote up
def _get_suspicious_details(self):

        iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        if os.path.isfile(iana_conf_file):
            iana_config  = json.loads(open(iana_conf_file).read())
            dns_iana = IanaTransform(iana_config["IANA"])
        
        for conn in self._dns_scores:

            timestamp = conn[self._conf["dns_score_fields"]["unix_tstamp"]]
            full_date = datetime.datetime.utcfromtimestamp(int(timestamp)).strftime('%Y-%m-%d %H:%M:%S')

            date = full_date.split(" ")[0].split("-")
            # get date parameters.
            yr = date[0]
            mn = date[1]
            dy = date[2]
            time = full_date.split(" ")[1].split(":")
            hh = int(time[0])

            dns_qry_name = conn[self._conf["dns_score_fields"]["dns_qry_name"]]
            self._get_dns_details(dns_qry_name,yr,mn,dy,hh,dns_iana) 
Example 3
Project: incubator-spot   Author: apache   File: gti.py    Apache License 2.0 8 votes vote down vote up
def _call_gti(self, command, num_values):
        try:
            response_json = check_output(command, shell=True)
            result_dict = json.loads(response_json[0:len(response_json) - 1])
            responses = result_dict['a']
            return responses

        except CalledProcessError as e:
            self._logger.error("Error calling McAfee GTI client in gti module: " + e.output)
            error_resp = [{self.REP_KEY: self.DEFAULT_REP}] * num_values
            return error_resp

        except ValueError as e:
            self._logger.error("Error reading JSON response in gti module: " + e.message)
            error_resp = [{self.REP_KEY: self.DEFAULT_REP}] * num_values
            return error_resp 
Example 4
Project: Coulomb   Author: DynamoDS   File: data_files_to_sessions.py    MIT License 8 votes vote down vote up
def flush(session_maps):
    lns_count = 0
    for sessions_lst in session_maps.values():
        lns_count += len(sessions_lst)

    log ("Flushing lines/sessions: " + str(lns_count) + " / " + str(len(session_maps.keys())))

    # Verify that the folders exist
    for session_id in sorted(session_maps.keys()):
        log ("Flushing session: " + session_id)
        sessionPath = ensure_have_session(session_id)
        log ("Session path: " + sessionPath)
            
        o = gzip.open(sessionPath, 'ab')
        for ln in session_maps[session_id]:
            assert (json.loads(ln)["SessionID"] == session_id)
            o.write(ln)
        o.flush()
        log ("Flushing complete for: " + session_id)

    log ("Flushing complete. Total sessions:\t" + str(len(sessionIDSet)) + "\tTotal new sessions:\t" + str(len(newSessionIDSet))) 
Example 5
Project: god-eye   Author: PiScale   File: test_ping_plugin.py    BSD 3-Clause "New" or "Revised" License 7 votes vote down vote up
def test_get_result(self):

        @asyncio.coroutine
        def go():
            queue = asyncio.Queue(loop=self.loop)
            ping = Ping(self.loop, queue)
            with aiohttp.ClientSession(loop=self.loop) as client:
                yield from ping(client, 'http://httpbin.org/get')
            cor_result = yield from queue.get()
            result = yield from cor_result
            result = json.loads(result)
            self.assertIsInstance(result, dict)
            self.assertEqual(result['url'], 'http://httpbin.org/get')

        self.loop.run_until_complete(go()) 
Example 6
Project: AboveTustin   Author: kevinabrandon   File: flightdata.py    MIT License 7 votes vote down vote up
def refresh(self):
        try:
            #open the data url
            self.req = urlopen(self.data_url)

            #read data from the url
            self.raw_data = self.req.read()

            #load in the json
            self.json_data = json.loads(self.raw_data.decode())

            #get time from json
            self.time = datetime.fromtimestamp(self.parser.time(self.json_data))

            #load all the aircarft
            self.aircraft = self.parser.aircraft_data(self.json_data, self.time)

        except Exception:
            print("exception in FlightData.refresh():")
            traceback.print_exc() 
Example 7
Project: mycode   Author: gmraabe   File: inventory.py    GNU General Public License v3.0 7 votes vote down vote up
def search_entry(search_str):    # function for searching inventory
    with open(filename, 'r') as myfile:            # open file with implied close (with)
        myDict = json.loads(myfile.read())           # read file into a dictionary
    print('The Location for ' + search_str + ' is: ' + myDict.get(search_str, "not found"))
    if type(myDict.get(search_str))() is None:     # check to see if no entry is found
        input('Press enter to continue')             # if no entry then only option is to return to Main Menu
    else:                                          # if an entry is found
        while True:                                  # loop until a valid selection is made
            print('Enter 1 to delete ' + search_str + ' or 2 to return to Main Menu')
            selection = input()                        # get input from user
            if selection == '1':                         # check if user wishes to delete the found search item
                myDict.pop(search_str)                     # remove search string from dictionary
                with open(filename, 'w') as myfile:        # open file with implied close (with)
                    myfile.write(json.dumps(myDict))         # write dictionary to file
                print(search_str + ' is deleted')          # print confirmation message
                input('Press enter to continue')           # wait for user before continuing
                break
            elif selection == '2':                     # if user wishes to exit to main menu
                break
            else:                                      # if anything else then loop
                print('Invalid selection') 
Example 8
Project: mycode   Author: gmraabe   File: iss_tracking.py    GNU General Public License v3.0 7 votes vote down vote up
def isspass(userlat, userlon):
    passiss = 'http://api.open-notify.org/iss-pass.json'
    passiss = passiss + '?lat=' + str(userlat) + '&lon=' + str(userlon)
    response = urllib.request.urlopen(passiss)
    result = json.loads(response.read())
    # print(result) ## uncomment to see the downloaded result

    over = result['response'][0]['risetime']

    style = ('Arial', 6, 'bold')
    mylocation.write(time.ctime(over), font=style)

    print('The next five passes over ' + str(yellowlat) + ' ' + str(yellowlon))
    print('Pass 1 = ' + time.ctime(result['response'][0]['risetime']))
    print('Pass 2 = ' + time.ctime(result['response'][1]['risetime']))
    print('Pass 3 = ' + time.ctime(result['response'][2]['risetime']))
    print('Pass 4 = ' + time.ctime(result['response'][3]['risetime']))
    print('Pass 5 = ' + time.ctime(result['response'][4]['risetime']))



## Get user location 
Example 9
Project: parsechain   Author: Suor   File: chains.py    BSD 2-Clause "Simplified" License 6 votes vote down vote up
def ld(node):
        text = C.css('script[type="application/ld+json"]').inner_text(node)
        try:
            return json.loads(text)
        except ValueError as e:
            try:
                # Try parsing non-strict
                import demjson
                return demjson.decode(text)
            except:
                raise e  # reraise first one

    # Select 
Example 10
Project: parsechain   Author: Suor   File: response.py    BSD 2-Clause "Simplified" License 6 votes vote down vote up
def json(self):
        return json.loads(self.body) 
Example 11
Project: BERT-Classification-Tutorial   Author: Socialbird-AILab   File: modeling_test.py    Apache License 2.0 6 votes vote down vote up
def test_config_to_json_string(self):
        config = modeling.BertConfig(vocab_size=99, hidden_size=37)
        obj = json.loads(config.to_json_string())
        self.assertEqual(obj["vocab_size"], 99)
        self.assertEqual(obj["hidden_size"], 37) 
Example 12
Project: BERT-Classification-Tutorial   Author: Socialbird-AILab   File: modeling.py    Apache License 2.0 6 votes vote down vote up
def from_json_file(cls, json_file):
        """Constructs a `BertConfig` from a json file of parameters."""
        with tf.gfile.GFile(json_file, "r") as reader:
            text = reader.read()
        return cls.from_dict(json.loads(text)) 
Example 13
Project: apistar-msgpack   Author: juancarlospaco   File: apistar_msgpack.py    GNU General Public License v3.0 6 votes vote down vote up
def parse(self, body: http.Body) -> typing.Any:
        if not body:
            raise exceptions.BadRequest(detail=f'Empty MessagePack: {body}.')
        try:
            if has_msgpack:
                data_from_msgpack = loads(msgpack.unpackb(body))
            else:
                data_from_msgpack = loads(umsgpack.unpackb(body))
        except UnicodeEncodeError as error:
            raise exceptions.BadRequest(
                detail=f'Invalid Unicode UTF-8 on MessagePack error: {error}.')
        except ValueError as error:
            raise exceptions.BadRequest(
                detail=f'Invalid Keys or Values on MessagePack error: {error}')
        except JSONDecodeError as error:
            raise exceptions.BadRequest(detail=f'Invalid MessagePack: {error}')
        except Exception as error:
            raise exceptions.BadRequest(
                detail=f'{self} Unknown Exception: {error}, parsing {body}.')
        else:
            return data_from_msgpack 
Example 14
Project: navitia_client   Author: leonardbinet   File: client.py    MIT License 5 votes vote down vote up
def _extract_nbr_results(self, response):
        """
        Out of a request response, finds total number of results
        """
        parsed = json.loads(response.text)
        try:
            nbr_results = parsed["pagination"]["total_result"]
            return nbr_results
        except KeyError:
            # No pagination in page
            print("WARNING: not able to extract pagination out of first request.")
            return False 
Example 15
Project: navitia_client   Author: leonardbinet   File: parser.py    MIT License 5 votes vote down vote up
def parse_requests(self):
        # First operation, to parse requests text into python dictionnaries
        for page, value in self.results.items():
            # Only add if answer was good
            if value.status_code == 200:
                try:
                    self.parsed[page] = json.loads(value.text)
                except ValueError:
                    print("JSON decoding error.")
                    self.parsing_errors[page] = "JSON decoding error" 
Example 16
Project: navitia_client   Author: leonardbinet   File: parser.py    MIT License 5 votes vote down vote up
def extract_nbr_expected_items(self):
        if self.results[0].status_code != 200:
            return None
        # Parse first request answer.
        parsed = json.loads(self.results[0].text)
        # Extract pagination part.
        pagination = parsed["pagination"]
        # Extract total_result
        self.nbr_expected_items = pagination["total_result"] 
Example 17
Project: factotum   Author: Denubis   File: settings.py    GNU General Public License v3.0 5 votes vote down vote up
def configAuthenticate(username, password):
	FACTORIOPATH = getFactorioPath()

	url = "https://auth.factorio.com/api-login"
	params = {'username': username, 'password': password, 'apiVersion': 2}


	if not os.path.isfile("%s/bin/x64/factorio" % (FACTORIOPATH) ):
		print("Could not find factorio at %s" % (FACTORIOPATH))
		sys.exit(1)


	print("Fetching token for %s" %  (username))
	myResponse = requests.post(url,data=params, verify=True)
	if(myResponse.ok):

	    jData = json.loads(myResponse.text)
	    print("Writing %s to settings.json" % (jData[0]))
	    
	else:
	  # If response code is not ok (200), print the resulting http error code with description
	    myResponse.raise_for_status()
	    sys.exit(1)
	

	try:
		with codecs.open(getSettingsFile(), 'r', encoding='utf-8') as settings_file:
			settingsJson = json.load(settings_file)
			settingsJson['token'] = jData[0]
			settingsJson['username'] = username
				


		with codecs.open("%s/config/settings.json" % (FACTORIOPATH), 'w', encoding='utf-8') as settings_file:
			json.dump(settingsJson, settings_file, indent=4)
	except Exception as e:
		print(e)
		print("Help! Can't deal with the settings file!") 
Example 18
Project: fs_image   Author: facebookincubator   File: update_package_db.py    MIT License 5 votes vote down vote up
def _parse_updates(
    description: str,
    items: List[Tuple[Package, Tag, str]],
) -> ExplicitUpdates:
    updates = {}
    for package, tag, opts_str in items:
        opts = json.loads(opts_str)
        stored_opts = updates.setdefault(package, {}).setdefault(tag, opts)
        if stored_opts is not opts:  # `!=` would permit duplicates
            # This detects conflicts only within a single update type,
            # `_get_updated_db` detects conflicts between types.
            raise RuntimeError(
                f'Conflicting "{description}" updates for {package} / {tag}: '
                f'{opts} ({id(opts)}) is not {stored_opts} ({id(stored_opts)}.'
            )
    return updates 
Example 19
Project: fs_image   Author: facebookincubator   File: pluggable.py    MIT License 5 votes vote down vote up
def from_json(cls, json_cfg: str) -> 'Pluggable':
        'Uniform parsing for Storage configs e.g. on the command-line.'
        cfg = json.loads(json_cfg)
        cfg['kind']  # KeyError if not set, or if not a dict
        return cls.make(**cfg) 
Example 20
Project: fs_image   Author: facebookincubator   File: repo_server.py    MIT License 5 votes vote down vote up
def add_snapshot_db_objs(db):
    location_to_obj = {}
    for repo, build_timestamp, metadata_xml in db.execute('''
    SELECT "repo", "build_timestamp", "metadata_xml" FROM "repomd"
    ''').fetchall():
        set_new_key(
            location_to_obj,
            os.path.join(repo, 'repodata/repomd.xml'),
            {
                'size': len(metadata_xml),
                'build_timestamp': build_timestamp,
                'content_bytes': metadata_xml.encode(),
            }
        )
    for table in ['repodata', 'rpm']:
        for (
            repo, path, build_timestamp, checksum, error, error_json, size,
            storage_id,
        ) in db.execute(f'''
        SELECT
            "repo", "path", "build_timestamp", "checksum", "error",
            "error_json", "size", "storage_id"
        FROM "{table}"
        ''').fetchall():
            obj = {
                'checksum': checksum,
                'size': size,
                'build_timestamp': build_timestamp,
            }
            # `storage_id` is populated in the DB table for `mutable_rpm`
            # errors, but we don't want to serve up those files.
            if storage_id and not error and not error_json:
                obj['storage_id'] = storage_id
            elif error and error_json:
                obj['error'] = {'error': error, **json.loads(error_json)}
            else:  # pragma: no cover
                raise AssertionError(f'{storage_id} {error} {error_json}')
            set_new_key(location_to_obj, os.path.join(repo, path), obj)
    return location_to_obj 
Example 21
Project: fs_image   Author: facebookincubator   File: test_layer_mount_config.py    MIT License 5 votes vote down vote up
def test_config_merging(self):
        out = StringIO()
        main(StringIO('{"runtime_source": "meow"}'), out, '//layer:path')
        self.assertEqual({
            'runtime_source': 'meow',
            'is_directory': True,
            'build_source': {'source': '//layer:path', 'type': 'layer'},
        }, json.loads(out.getvalue())) 
Example 22
Project: fs_image   Author: facebookincubator   File: mount.py    MIT License 5 votes vote down vote up
def build(self, subvol: Subvol, layer_opts: LayerOpts):
        mount_dir = os.path.join(META_MOUNTS_DIR, self.mountpoint, MOUNT_MARKER)
        for name, data in (
            # NB: Not exporting self.mountpoint since it's implicit in the path.
            ('is_directory', self.is_directory),
            ('build_source', self.build_source._asdict()),
            ('runtime_source', json.loads(self.runtime_source)),
        ):
            procfs_serde.serialize(data, subvol, os.path.join(mount_dir, name))
        source_path = self.build_source.to_path(
            target_to_path=layer_opts.target_to_path,
            subvolumes_dir=layer_opts.subvolumes_dir,
        )
        # Support mounting directories and non-directories...  This check
        # follows symlinks for the mount source, which seems correct.
        is_dir = os.path.isdir(source_path)
        assert is_dir == self.is_directory, self
        if is_dir:
            subvol.run_as_root([
                'mkdir', '--mode=0755', subvol.path(self.mountpoint),
            ])
        else:  # Regular files, device nodes, FIFOs, you name it.
            # `touch` lacks a `--mode` argument, but the mode of this
            # mountpoint will be shadowed anyway, so let it be whatever.
            subvol.run_as_root(['touch', subvol.path(self.mountpoint)])
        ro_rbind_mount(source_path, subvol, self.mountpoint) 
Example 23
Project: paws   Author: funkybob   File: rpc.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def post(self, request):
        action = request.headers.get('X-Rpc-Action')
        if action:
            method = getattr(self, action, None)
            if is_rpc_method(method):
                if request.body:
                    data = json.loads(request.body)
                else:
                    data = {}
                log.info("Method: %r %r", action, data)
                return method(request, **data)
        return response(status=404) 
Example 24
Project: odorik   Author: nijel   File: __init__.py    GNU General Public License v3.0 5 votes vote down vote up
def get_json(self, path, args=None):
        """JSON parser on top of get."""
        result = json.loads(self.get(path, args))
        if isinstance(result, dict) and 'errors' in result:
            raise OdorikException(result['errors'])
        return result 
Example 25
Project: odorik   Author: nijel   File: test_main.py    GNU General Public License v3.0 5 votes vote down vote up
def test_version_json(self):
        """Test version printing."""
        output = execute(['--format', 'json', 'version'], True)
        values = json.loads(output)
        self.assertEqual({'version': odorik.__version__}, values) 
Example 26
Project: odorik   Author: nijel   File: test_main.py    GNU General Public License v3.0 5 votes vote down vote up
def test_data_list_json(self):
        """Test getting data list."""
        register_uris()
        output = execute(
            ['--format', 'json', 'mobile-data', '--list'],
            True
        )
        values = json.loads(output)
        self.assertEqual(len(values), 1) 
Example 27
Project: 21tb_robot   Author: iloghyr   File: study_robot.py    MIT License 5 votes vote down vote up
def select_score_item(self, course_id, score_id):
        """select one scoreitem and do check"""
        params = {'courseId': course_id, 
                  'scoId': score_id, 
                  'firstLoad': 'true'}
        r = self.http.post(self.apis['select_resourse'], params, json_ret=False)
        try:
            location = float(json.loads(r)['location'])
        except:
            location = 0.1
        select_check_api = self.apis['select_check']
        api = select_check_api % (course_id, score_id)
        r = self.http.post(api, json_ret=False)
        return location 
Example 28
Project: http2mqtt   Author: chris-gunawardena   File: chip_neopixels.py    MIT License 5 votes vote down vote up
def on_message(client, userdata, msg):
    print(msg.topic+" "+str(msg.qos)+"====" + str(msg.payload))
    try:
      root = json.loads(str(msg.payload.decode('utf-8')))
      light_state = root["body"]["result"]["parameters"]["light-state"]; # "on"
      if msg.topic == "/lights" :
        if light_state == "on" :
          print("ON")
          on()
        elif light_state == "off" :
          print("OFF")
          off()
        else:
          off()
          print("NO MATCH")
    except:  # includes simplejson.decoder.JSONDecodeError
        print ('Decoding JSON failed') 
Example 29
Project: autolims   Author: scottbecker   File: test_autoprotocol_interpreter.py    MIT License 5 votes vote down vote up
def test_pipette_operations(self):
    
        #same as https://secure.transcriptic.com/becker-lab/p19aqhcbep8ea/runs/r19uvbk55tb54
        with open(os.path.join(os.path.dirname(__file__),'data','pipette_operations.json')) as f:
            protocol = json.loads(f.read())             
    
        run = Run.objects.create(title='Pipette Operation Run',
                                 test_mode=False,
                                 protocol=protocol,
                                 project = self.project,
                                 owner=self.user)
        assert isinstance(run, Run)
    
        execute_run(run)
    
        self.assertEqual(run.containers.count(),1)   
        self.assertEqual(run.instructions.count(),2)    
    
        test_plate = run.containers.get(label='test plate')
        
        volumes = [Decimal('745'),Decimal('85'),Decimal('20'),Decimal('20'),Decimal('30')]
        
        self.assertEqual(test_plate.aliquots.count(),5)
        
        for aq in test_plate.aliquots.all():
            assert isinstance(aq, Aliquot)
            self.assertEqual(Decimal(aq.volume_ul),volumes[aq.well_idx]) 
Example 30
Project: autolims   Author: scottbecker   File: test_run.py    MIT License 5 votes vote down vote up
def test_run_setup_all_new_containers(self):
        
        #same as https://secure.transcriptic.com/becker-lab/p19aqhcbep8ea/runs/r19u4jkqxhbt8
        with open(os.path.join(os.path.dirname(__file__),'data','oligosynthesis.json')) as f:
            protocol = json.loads(f.read())
    
        run = Run.objects.create(title='Oligosynthesis Run',
                                 test_mode=False,
                                 protocol=protocol,
                                 project = self.project,
                                 owner=self.user)   
        assert isinstance(run,Run)
            
            
        #check that instructions have been created and they aren't executed
        
        self.assertEqual(run.instructions.count(),6)
        
        self.assertEqual(run.containers.count(),2) 
Example 31
Project: autolims   Author: scottbecker   File: test_run.py    MIT License 5 votes vote down vote up
def test_run_setup_existing_containers(self):
        """ Check that Runs can be created that reference existing containers"""
        
        #create existing containers to be referenced
        
        existing_container = Container.objects.create(container_type_id = 'micro-1.5',
                                                   label = 'My Container',
                                                   test_mode = False,
                                                   storage_condition = Temperature.cold_80.name,
                                                   status = 'available',
                                                   organization = self.org
                                                   ) 
        
        
        
        #same as https://secure.transcriptic.com/becker-lab/p19aqhcbep8ea/runs/r19uqqkmr5u8f
        with open(os.path.join(os.path.dirname(__file__),'data','pellet_bacteria.json')) as f:
            protocol = json.loads(f.read())        
        
        #update the protocol to reference the correct id post import
        
        protocol['refs']['bacteria_tube']['id'] = existing_container.id
        
        
        run = Run.objects.create(title='Pellet Bacteria Run',
                         test_mode=False,
                         protocol=protocol,
                         project = self.project,
                         owner=self.user)   
        assert isinstance(run,Run)
    
    
        #check that refs is updated correctly
        
        self.assertEqual(run.instructions.count(),13)
    
        self.assertEqual(run.containers.count(),4) 
Example 32
Project: autolims   Author: scottbecker   File: custom_connection.py    MIT License 5 votes vote down vote up
def from_file(path):
        """Loads connection from file"""
        with open(expanduser(path), 'r') as f:
            cfg = json.loads(f.read())
            return CustomConnection(**cfg)    
        
        
    #custom version that prevents ssl verification 
Example 33
Project: incubator-spot   Author: apache   File: worker.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self, db_name, hdfs_app_path, kafka_consumer, conf_type):

        # get logger instance.
        self._logger = Util.get_logger('SPOT.INGEST.WRK.FLOW')

        self._db_name = db_name
        self._hdfs_app_path = hdfs_app_path

        # read proxy configuration.
        self._script_path = os.path.dirname(os.path.abspath(__file__))
        conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
        conf = json.loads(open(conf_file).read())
        self._conf = conf["pipelines"][conf_type]
        self._id = "spot-{0}-worker".format(conf_type)

        self._process_opt = self._conf['process_opt']
        self._local_staging = self._conf['local_staging']
        self.kafka_consumer = kafka_consumer

        # self._cursor = hive_engine.create_connection()
        self._cursor = hive_engine 
Example 34
Project: incubator-spot   Author: apache   File: collector.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self, hdfs_app_path, kafkaproducer, conf_type):

        # getting parameters.
        self._logger = logging.getLogger('SPOT.INGEST.FLOW')
        self._hdfs_app_path = hdfs_app_path
        self._producer = kafkaproducer

        # get script path
        self._script_path = os.path.dirname(os.path.abspath(__file__))

        # read flow configuration.
        conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
        conf = json.loads(open(conf_file).read())
        self._conf = conf["pipelines"][conf_type]

        # set configuration.
        self._collector_path = self._conf['collector_path']        
        self._dsource = 'flow'
        self._hdfs_root_path = "{0}/{1}".format(hdfs_app_path, self._dsource)

        self._supported_files = self._conf['supported_files']

        # create collector watcher
        self._watcher = FileWatcher(self._collector_path,self._supported_files)
        
        # Multiprocessing. 
        self._processes = conf["collector_processes"]
        self._ingestion_interval = conf["ingestion_interval"]
        self._pool = Pool(processes=self._processes)
        # TODO: review re-use of hdfs.client
        self._hdfs_client = hdfs.get_client() 
Example 35
Project: incubator-spot   Author: apache   File: worker.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self,db_name,hdfs_app_path,kafka_consumer,conf_type,processes):
        
        # get logger instance.
        self._logger = Util.get_logger('SPOT.INGEST.WRK.PROXY')

        self._db_name = db_name
        self._hdfs_app_path = hdfs_app_path
        self._kafka_consumer = kafka_consumer

        # read proxy configuration.
        self._script_path = os.path.dirname(os.path.abspath(__file__))
        conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
        conf = json.loads(open(conf_file).read())
        self._spark_conf  = conf["spark-streaming"]
        self._conf = conf["pipelines"][conf_type]
        self._processes = processes 
Example 36
Project: incubator-spot   Author: apache   File: collector.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self,hdfs_app_path,kafka_topic,conf_type):
        
        # getting parameters.
        self._logger = logging.getLogger('SPOT.INGEST.PROXY')
        self._hdfs_app_path = hdfs_app_path
        self._kafka_topic= kafka_topic

        # get script path
        self._script_path = os.path.dirname(os.path.abspath(__file__))

        # read proxy configuration.
        conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
        conf = json.loads(open(conf_file).read())
        self._message_size = conf["kafka"]["message_size"]
        self._conf = conf["pipelines"][conf_type]

        # get collector path.
        self._collector_path = self._conf['collector_path']

        #get supported files
        self._supported_files = self._conf['supported_files']

        # create collector watcher
        self._watcher = FileWatcher(self._collector_path,self._supported_files)

        # Multiprocessing. 
        self._processes = conf["collector_processes"]
        self._ingestion_interval = conf["ingestion_interval"]
        self._pool = Pool(processes=self._processes) 
Example 37
Project: incubator-spot   Author: apache   File: collector.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self, hdfs_app_path, kafkaproducer, conf_type):

        # getting parameters.
        self._logger = logging.getLogger('SPOT.INGEST.DNS')
        self._hdfs_app_path = hdfs_app_path
        self._producer = kafkaproducer

        # get script path
        self._script_path = os.path.dirname(os.path.abspath(__file__))

        # read dns configuration.
        conf_file = "{0}/ingest_conf.json".format(os.path.dirname(os.path.dirname(self._script_path)))
        conf = json.loads(open(conf_file).read())
        self._conf = conf["pipelines"][conf_type]

        # set configuration.
        self._collector_path = self._conf['collector_path']
        self._dsource = 'dns'
        self._hdfs_root_path = "{0}/{1}".format(hdfs_app_path, self._dsource)

        # set configuration.
        self._pkt_num = self._conf['pkt_num']
        self._pcap_split_staging = self._conf['pcap_split_staging']
        self._supported_files = self._conf['supported_files']

        # create collector watcher
        self._watcher = FileWatcher(self._collector_path, self._supported_files)

        # Multiprocessing.
        self._processes = conf["collector_processes"]
        self._ingestion_interval = conf["ingestion_interval"]
        self._pool = Pool(processes=self._processes)
        # TODO: review re-use of hdfs.client
        self._hdfs_client = hdfs.get_client() 
Example 38
Project: incubator-spot   Author: apache   File: graphql.py    Apache License 2.0 5 votes vote down vote up
def send_query(self):
        assert(self.url is not None)
        assert(type(self.url) is str)
        assert(self.query is not None)
        assert(type(self.query) is str)

        data = {
            'query': self.query
        }

        if self.variables is not None and type(self.variables) is dict:
            data['variables'] = self.variables

        encoded_data = json.dumps(data).encode('utf-8')

        http = urllib3.PoolManager()

        response = http.request(
            'POST',
            self.url,
            body=encoded_data,
            headers={
                'Accept': 'application/json',
                'Content-type': 'application/json'
            }
        )

        try:
            return json.loads(response.data.decode('utf-8'))
        except:
            return {
                'errors': [
                    {
                        'status': response.status,
                        'message': 'Failed to contact GraphQL endpoint. Is "{}" the correct URL?'.format(self.url)
                    }
                ]
            } 
Example 39
Project: incubator-spot   Author: apache   File: flow.py    Apache License 2.0 5 votes vote down vote up
def sc_geo(ip,date):

    app_path = Configuration.spot()
    file_name = "globe-{0}.json".format(ip.replace(".","_"))
    hdfs_path = "{0}/flow/oa/storyboard/{1}/{2}/{3}/{4}" \
    .format(app_path,date.year,date.month,date.day,ip.replace(".","_"))

    if HDFSClient.file_exists(hdfs_path,file_name):
        return json.loads(HDFSClient.get_file("{0}/{1}" \
        .format(hdfs_path,file_name)))
    else:
        return {} 
Example 40
Project: incubator-spot   Author: apache   File: flow.py    Apache License 2.0 5 votes vote down vote up
def impact_analysis(ip,date):

    app_path = Configuration.spot()
    file_name = "stats-{0}.json".format(ip.replace(".","_"))
    hdfs_path = "{0}/flow/oa/storyboard/{1}/{2}/{3}/{4}" \
    .format(app_path,date.year,date.month,date.day,ip.replace(".","_"))

    if HDFSClient.file_exists(hdfs_path,file_name):
        return json.loads(HDFSClient.get_file("{0}/{1}" \
        .format(hdfs_path,file_name)))
    else:
        return {} 
Example 41
Project: incubator-spot   Author: apache   File: proxy.py    Apache License 2.0 5 votes vote down vote up
def incident_progression(date,uri):

    app_path = Configuration.spot()
    hdfs_path = "{0}/proxy/oa/storyboard/{1}/{2}/{3}".format(app_path,\
        date.year,date.month,date.day)

    hash_name = md5.new(str(uri)).hexdigest()
    file_name = "incident-progression-{0}.json".format(hash_name)

    if HDFSClient.file_exists(hdfs_path,file_name):
        return json.loads(HDFSClient.get_file("{0}/{1}"\
        .format(hdfs_path,file_name)))
    else:
        return {} 
Example 42
Project: incubator-spot   Author: apache   File: flow_oa.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self,date,limit,logger): 

        # get logger if exists. if not, create new instance.
        self._logger = logging.getLogger('OA.Flow') if logger else Util.get_logger('OA.Flow',create_file=False)

        # initialize required parameters.
        self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
        self._date = date
        self._table_name = "flow"
        self._flow_results = []
        self._limit = limit
        self._data_path = None
        self._ipynb_path = None
        self._ingest_summary_path = None
        self._flow_scores = []
        self._results_delimiter = '\t'
        

        # get app configuration.
        self._spot_conf = Util.get_spot_conf()

        # # get scores fields conf
        conf_file = "{0}/flow_conf.json".format(self._scrtip_path)
        self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)

        # initialize data engine
        self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '') 
Example 43
Project: incubator-spot   Author: apache   File: proxy_oa.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self,date,limit,logger):

        # get logger if exists. if not, create new instance.
        self._logger = logging.getLogger('OA.PROXY') if logger else Util.get_logger('OA.PROXY',create_file=False)

        # initialize required parameters.
        self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
        self._date = date
        self._table_name = "proxy"
        self._proxy_results = []
        self._limit = limit
        self._data_path = None
        self._ipynb_path = None
        self._ingest_summary_path = None
        self._proxy_scores = []
        self._proxy_scores_headers = []
        self._proxy_extra_columns = []
        self._results_delimiter = '\t'

        # get app configuration.
        self._spot_conf = Util.get_spot_conf()

        # get scores fields conf
        conf_file = "{0}/proxy_conf.json".format(self._scrtip_path)
        self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)

        # initialize data engine
        self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '') 
Example 44
Project: incubator-spot   Author: apache   File: proxy_oa.py    Apache License 2.0 5 votes vote down vote up
def _add_network_context(self):

        nc_conf_file = "{0}/components/nc/nc_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        if os.path.isfile(nc_conf_file):
            nc_conf = json.loads(open(nc_conf_file).read())["NC"]
            proxy_nc = NetworkContext(nc_conf,self._logger)
            ip_dst_index = self._conf["proxy_score_fields"]["clientip"]
            self._proxy_scores = [ conn + [proxy_nc.get_nc(conn[ip_dst_index])] for conn in self._proxy_scores ] 
        else:
            self._proxy_scores = [ conn + [""] for conn in self._proxy_scores ] 
Example 45
Project: incubator-spot   Author: apache   File: dns_oa.py    Apache License 2.0 5 votes vote down vote up
def _initialize_members(self,date,limit,logger):
        
        # get logger if exists. if not, create new instance.
        self._logger = logging.getLogger('OA.DNS') if logger else Util.get_logger('OA.DNS',create_file=False)

        # initialize required parameters.
        self._scrtip_path = os.path.dirname(os.path.abspath(__file__))
        self._date = date
        self._table_name = "dns"
        self._dns_results = []
        self._limit = limit
        self._data_path = None
        self._ipynb_path = None
        self._ingest_summary_path = None
        self._dns_scores = []
        self._dns_scores_headers = []
        self._results_delimiter = '\t'
        self._details_limit = 250

        # get app configuration.
        self._spot_conf = Util.get_spot_conf()

        # get scores fields conf
        conf_file = "{0}/dns_conf.json".format(self._scrtip_path)
        self._conf = json.loads(open (conf_file).read(),object_pairs_hook=OrderedDict)

        # initialize data engine
        self._db = self._spot_conf.get('conf', 'DBNAME').replace("'", "").replace('"', '') 
Example 46
Project: incubator-spot   Author: apache   File: dns_oa.py    Apache License 2.0 5 votes vote down vote up
def _add_iana(self):
        iana_conf_file = "{0}/components/iana/iana_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        if os.path.isfile(iana_conf_file):
            iana_config  = json.loads(open(iana_conf_file).read())
            dns_iana = IanaTransform(iana_config["IANA"])

            dns_qry_class_index = self._conf["dns_results_fields"]["dns_qry_class"]
            dns_qry_type_index = self._conf["dns_results_fields"]["dns_qry_type"]
            dns_qry_rcode_index = self._conf["dns_results_fields"]["dns_qry_rcode"]
            self._dns_scores = [ conn + [ dns_iana.get_name(conn[dns_qry_class_index],"dns_qry_class")] + [dns_iana.get_name(conn[dns_qry_type_index],"dns_qry_type")] + [dns_iana.get_name(conn[dns_qry_rcode_index],"dns_qry_rcode")] for conn in self._dns_scores ]
            
        else:            
            self._dns_scores = [ conn + ["","",""] for conn in self._dns_scores ] 
Example 47
Project: incubator-spot   Author: apache   File: dns_oa.py    Apache License 2.0 5 votes vote down vote up
def _get_dns_details(self,dns_qry_name,year,month,day,hh,dns_iana):
        value_string = ""
        query_to_load =("""
            SELECT unix_tstamp,frame_len,ip_dst,ip_src,dns_qry_name,dns_qry_class,dns_qry_type,dns_qry_rcode,dns_a,h as hh
            FROM {0}.{1} WHERE y={2} AND m={3} AND d={4} AND dns_qry_name LIKE '%{5}%' AND h={6} LIMIT {7};
        """).format(self._db,self._table_name,year,month,day,dns_qry_name,hh,self._details_limit)

        try:
             dns_details = impala.execute_query(query_to_load)
        except:
            self._logger.info("WARNING. Details couldn't be retreived for {0}, skipping this step".format(dns_qry_name))
        else:
        # add IANA to results.
            update_rows = []
            if dns_iana:
                self._logger.info("Adding IANA translation to details results")

                dns_details = [ conn + (dns_iana.get_name(str(conn[5]),"dns_qry_class"),dns_iana.get_name(str(conn[6]),"dns_qry_type"),dns_iana.get_name(str(conn[7]),"dns_qry_rcode")) for conn in dns_details ]
            else:
                self._logger.info("WARNING: NO IANA configured.")
                dns_details = [ conn + ("","","") for conn in dns_details ]

            nc_conf_file = "{0}/components/nc/nc_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
            if os.path.isfile(nc_conf_file):
                nc_conf = json.loads(open(nc_conf_file).read())["NC"]
                dns_nc = NetworkContext(nc_conf,self._logger)
                dns_details = [ conn + (dns_nc.get_nc(conn[2]),) for conn in dns_details ]
            else:
                dns_details = [ conn + (0,) for conn in dns_details ]

            for row in dns_details:
                value_string += str(tuple(item for item in row)) + ","

            if value_string != "":
                
                query_to_insert=("""
                    INSERT INTO {0}.dns_edge PARTITION (y={1}, m={2}, d={3}) VALUES ({4});
                """).format(self._db,year, month, day,  value_string[:-1])

                impala.execute_query(query_to_insert) 
Example 48
Project: incubator-spot   Author: apache   File: data.py    Apache License 2.0 5 votes vote down vote up
def _initialize_engine(self,db, pipeline):

        # read engine configuration.
        data_conf_file = "{0}/engine.json".format(os.path.dirname(os.path.abspath(__file__)))

        self._logger.info("Reading data component configuration: {0}".format(data_conf_file))
        self._engine_conf = json.loads(open (data_conf_file).read())       
        self._engine_name = self._engine_conf["oa_data_engine"]

        # import configured data engine.
        self._logger.info("Initializating {0} instance".format(self._engine_name))        
        module = __import__("components.data.{0}".format(self._engine_name),fromlist=['Engine'])

        # start data engine with configuration.
        self._engine = module.Engine(db,self._engine_conf[self._engine_name], pipeline) 
Example 49
Project: Coulomb   Author: DynamoDS   File: features_JSON.py    MIT License 5 votes vote down vote up
def getVersion(b64decodedData):
    json_map = json.loads(b64decodedData)
    if not json_map.has_key("View"):
        return None
    return json.loads(b64decodedData)["View"]["Dynamo"]["Version"] 
Example 50
Project: Coulomb   Author: DynamoDS   File: features_JSON.py    MIT License 5 votes vote down vote up
def getVersion(b64decodedData):
    json_map = json.loads(b64decodedData)
    if not "View" in json_map.keys():
        return None
    return json_map["View"]["Dynamo"]["Version"] 
Example 51
Project: Coulomb   Author: DynamoDS   File: features_JSON.py    MIT License 5 votes vote down vote up
def hasCodeBlockNode(data):
    json_map = json.loads(data)
    nodes_list = json_map["Nodes"]
    for node in nodes_list:
        if node["ConcreteType"] == "Dynamo.Graph.Nodes.CodeBlockNodeModel, DynamoCore":
            return True
    return False 
Example 52
Project: Coulomb   Author: DynamoDS   File: features_JSON.py    MIT License 5 votes vote down vote up
def hasPythonFunction(data):
    json_map = json.loads(data)
    nodes_list = json_map["Nodes"]
    for node in nodes_list:
        if node["ConcreteType"] == "PythonNodeModels.PythonNode, PythonNodeModels":
            return True
    return False 
Example 53
Project: Coulomb   Author: DynamoDS   File: features_JSON.py    MIT License 5 votes vote down vote up
def hasCustomFunction(data):
    json_map = json.loads(data)
    nodes_list = json_map["Nodes"]
    for node in nodes_list:
        if node["ConcreteType"] == "Dynamo.Graph.Nodes.CustomNodes.Function, DynamoCore":
            return True
    return False 
Example 54
Project: mycode   Author: gmraabe   File: inventory.py    GNU General Public License v3.0 5 votes vote down vote up
def new_entry(new_item, new_loc):                # function for creating a new entry
    with open(filename, 'r') as myfile:            # open file with implied close (with)
        myDict = json.loads(myfile.read())           # read file into a dictionary
    myDict[new_item] = new_loc                     # append new item and location to dictionary
    with open(filename, 'w') as myfile:            # open file with implied close (with)
        myfile.write(json.dumps(myDict))             # write dictionary to file
    print(new_item + ' added to inventory')
    input('Press enter to continue') 
Example 55
Project: mycode   Author: gmraabe   File: inventory.py    GNU General Public License v3.0 5 votes vote down vote up
def print_entrys():               # function for printing the inventory
    print('\nItem\t\tLocation')
    print('----\t\t--------')
    with open(filename, 'r') as myfile:            # open file with implied close (with)
        myDict = json.loads(myfile.read())           # read file into a dictionary
    for itm, loc in myDict.items():                # loop through dictionary
        print(itm + '\t\t' + loc)                    # print dictionary item
    input('\nPress enter to continue')             # wait for user before continuing 
Example 56
Project: mycode   Author: gmraabe   File: iss_tracking.py    GNU General Public License v3.0 5 votes vote down vote up
def issloc():
    ## Trace the ISS - earth-orbital space station
    eoss = 'http://api.open-notify.org/iss-now.json'
    ## Call the webserv
    trackiss = urllib.request.urlopen(eoss)
    ## put into file object
    ztrack = trackiss.read()
    ## json 2 python data structure
    result = json.loads(ztrack.decode('utf-8'))
    ## display our pythonic data
    #print("\n\nConverted python data")
    #print(result)
    #input('\nISS data retrieved & converted. Press enter to continue')
    return result 
Example 57
Project: chainer-openai-transformer-lm   Author: soskek   File: analysis.py    MIT License 5 votes vote down vote up
def rocstories(data_dir, pred_path, log_path):
    preds = pd.read_csv(pred_path, delimiter='\t')[
        'prediction'].values.tolist()
    _, _, _, labels = _rocstories(os.path.join(
        data_dir, 'cloze_test_test__spring2016 - cloze_test_ALL_test.csv'))
    test_accuracy = accuracy_score(labels, preds) * 100.
    logs = [json.loads(line) for line in open(log_path)][1:]
    best_validation_index = np.argmax([log['va_acc'] for log in logs])
    valid_accuracy = logs[best_validation_index]['va_acc']
    print('ROCStories Valid Accuracy: %.2f' % (valid_accuracy))
    print('ROCStories Test Accuracy:  %.2f' % (test_accuracy)) 
Example 58
Project: chainer-openai-transformer-lm   Author: soskek   File: analysis.py    MIT License 5 votes vote down vote up
def sst(data_dir, pred_path, log_path):
    preds = pd.read_csv(pred_path, delimiter='\t')[
        'prediction'].values.tolist()
    test_url = 'https://raw.githubusercontent.com/harvardnlp/sent-conv-torch/master/data/stsa.binary.test'
    path = chainer.dataset.cached_download(test_url)
    teX, teY = _sst(path)
    labels = teY
    test_accuracy = accuracy_score(labels, preds) * 100.
    logs = [json.loads(line) for line in open(log_path)][1:]
    best_validation_index = np.argmax([log['va_acc'] for log in logs])
    valid_accuracy = logs[best_validation_index]['va_acc']
    print('SST Valid Accuracy: %.2f' % (valid_accuracy))
    print('SST Test Accuracy:  %.2f' % (test_accuracy)) 
Example 59
Project: invenio-openaire   Author: inveniosoftware   File: test_rest.py    MIT License 5 votes vote down vote up
def _get_json(response, code=None):
    """Decode JSON from response."""
    data = response.get_data(as_text=True)
    if code is not None:
        assert response.status_code == code, data
    return json.loads(data) 
Example 60
Project: invenio-openaire   Author: inveniosoftware   File: loaders.py    MIT License 5 votes vote down vote up
def iter_grants(self, as_json=True):
        """Fetch records from the SQLite database."""
        self._connect()
        result = self.db_connection.cursor().execute(
            "SELECT data, format FROM grants"
        )
        for data, data_format in result:
            if (not as_json) and data_format == 'json':
                raise Exception("Cannot convert JSON source to XML output.")
            elif as_json and data_format == 'xml':
                data = self.grantxml2json(data)
            elif as_json and data_format == 'json':
                data = json.loads(data)
            yield data
        self._disconnect() 
Example 61
Project: alfred-yubikey-otp   Author: robertoriv   File: web.py    MIT License 5 votes vote down vote up
def json(self):
        """Decode response contents as JSON.

        :returns: object decoded from JSON
        :rtype: list, dict or unicode

        """
        return json.loads(self.content, self.encoding or 'utf-8') 
Example 62
Project: hydrus   Author: HTTP-APIs   File: resources.py    MIT License 5 votes vote down vote up
def put(self, id_: str, path: str) -> Response:
        """Add new object_ optional <id_> parameter using HTTP PUT.
        :param id_ - ID of Item to be updated
        :param path - Path for Item type( Specified in APIDoc @id) to be updated
        """
        id_ = str(id_)
        auth_response = check_authentication_response()
        if isinstance(auth_response, Response):
            return auth_response

        class_type = get_doc().collections[path]["collection"].class_.title
        # Get path of the collection-class
        class_path = get_doc().collections[path]["collection"].class_.path
        if checkClassOp(class_path, "PUT"):
            # Check if class_type supports PUT operation
            object_ = json.loads(request.data.decode('utf-8'))
            obj_type = getType(class_path, "PUT")
            link_props, link_type_check = get_link_props(class_path, object_)
            # Load new object and type
            if validObject(object_) and object_["@type"] == obj_type and check_required_props(
                    class_path, object_) and link_type_check:
                try:
                    # Add the object with given ID
                    object_id = crud.insert(object_=object_, id_=id_,
                                            link_props=link_props, session=get_session())
                    headers_ = [{"Location": "{}{}/{}/{}".format(
                        get_hydrus_server_url(), get_api_name(), path, object_id)}]
                    status_description = "Object with ID {} successfully added".format(object_id)
                    status = HydraStatus(code=201, title="Object successfully added.",
                                         desc=status_description)
                    return set_response_headers(
                        jsonify(status.generate()), headers=headers_, status_code=status.code)
                except (ClassNotFound, InstanceExists, PropertyNotFound) as e:
                    error = e.get_HTTP()
                    return set_response_headers(jsonify(error.generate()), status_code=error.code)
            else:
                error = HydraError(code=400, title="Data is not valid")
                return set_response_headers(jsonify(error.generate()), status_code=error.code)
        else:
            abort(405) 
Example 63
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_wrongID_GET(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                response_get = self.client.get(endpoints[endpoint])
                self.wrong_id['X-Authentication'] = response_get.headers['X-Authentication']
                response_get = self.client.get(
                    endpoints[endpoint], headers=self.wrong_id)
                assert response_get.status_code == 401 or response_get.status_code == 400 
Example 64
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_wrongID_POST(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                response_get = self.client.get(endpoints[endpoint])
                self.wrong_id['X-Authentication'] = response_get.headers['X-Authentication']
                response_get = self.client.post(
                    endpoints[endpoint], headers=self.wrong_id, data=json.dumps(dict(foo="bar")))
                assert response_get.status_code == 401 or response_get.status_code == 400 
Example 65
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_wrongPass_POST(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                response_get = self.client.get(endpoints[endpoint])
                self.wrong_pass['X-Authentication'] = response_get.headers['X-Authentication']
                response_get = self.client.post(
                    endpoints[endpoint], headers=self.wrong_pass, data=json.dumps(dict(foo="bar")))
                assert response_get.status_code == 401 
Example 66
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_wrong_nonce_get(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                self.auth_header['X-authentication'] = "random-string"
                response_get = self.client.get(
                    endpoints[endpoint], headers=self.auth_header)
                assert response_get.status_code == 401 
Example 67
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_wrong_nonce_post(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                self.auth_header['X-authentication'] = "random-string"
                response_get = self.client.post(
                    endpoints[endpoint], headers=self.auth_header, data=json.dumps(dict(foo="bar")))
                assert response_get.status_code == 401 
Example 68
Project: hydrus   Author: HTTP-APIs   File: test_auth.py    MIT License 5 votes vote down vote up
def test_Auth_GET(self):
        """Test for the index."""
        response_get = self.client.get("/{}".format(self.API_NAME))
        endpoints = json.loads(response_get.data.decode('utf-8'))
        for endpoint in endpoints:
            if endpoint in self.doc.collections:
                response_get = self.client.get(endpoints[endpoint])
                self.auth_header['X-Authentication'] = response_get.headers['X-Authentication']
                response_get = self.client.get(
                    endpoints[endpoint], headers=self.auth_header)
                assert response_get.status_code != 401 
Example 69
Project: NordVPN-NetworkManager-Gui   Author: vfosterm   File: nord_nm_gui.py    GNU General Public License v3.0 4 votes vote down vote up
def verify_credentials(self):
        """
        Requests a token, salt and key from Nord api
        Sends a final hash of (salt+password)+key and token to Nord api
        Verifies responses and updates GUI
        """
        if self.user_input.text() and self.password_input.text():
            self.statusbar.showMessage('Login Success', 2000)
            self.username = self.user_input.text()
            self.password = self.password_input.text()
            self.repaint()
            time.sleep(0.5)
            self.hide()
            self.main_ui()
        else:
            self.statusbar.showMessage('Username or password field cannot be empty, 2000')
        # try:
        #     resp = requests.get('https://apself.statusbar.showMessage('Login Success', 2000)
    #                         self.username = self.user_input.text()
    #                         self.password = self.password_input.text()
    #                         self.repaint()
    #                         time.sleep(0.5)
    #                         self.hide()
    #                         self.main_ui()i.nordvpn.com/token/token/' + self.user_input.text(), timeout=5)
        #
        #     if resp.status_code == requests.codes.ok:
        #         token_json = json.loads(resp.text)
        #         token = token_json['token']
        #         salt = token_json['salt']
        #         key = token_json['key']
        #
        #         password_hash = hashlib.sha512(salt.encode() + self.password_input.text().encode())
        #         final_hash = hashlib.sha512(password_hash.hexdigest().encode() + key.encode())
        #
        #         try:
        #             resp = requests.get('https://api.nordvpn.com/token/verify/' + token + '/' + final_hash.hexdigest(), timeout=5)
        #             if resp.status_code == requests.codes.ok:
        #                 self.statusbar.showMessage('Login Success', 2000)
        #                 self.username = self.user_input.text()
        #                 self.password = self.password_input.text()
        #                 self.repaint()
        #                 time.sleep(0.5)
        #                 self.hide()
        #                 self.main_ui()
        #             else:
        #                 self.statusbar.showMessage('Invalid Credentials', 2000)
        #                 self.user_input.clear()
        #                 self.password_input.clear()
        #                 self.user_input.setFocus()
        #         except Exception as ex:
        #             self.statusbar.showMessage('Invalid Credentials', 2000)
        #             self.user_input.clear()
        #             self.password_input.clear()
        #             self.user_input.setFocus()
        #     else:
        #         self.statusbar.showMessage("API Error: could not fetch token", 2000)
        # except Exception as ex:
        #     self.statusbar.showMessage("API Error: could not fetch token", 2000)
        #     self.get_api_data() 
Example 70
Project: autolims   Author: scottbecker   File: test_autoprotocol_interpreter.py    MIT License 4 votes vote down vote up
def test_existing_containers(self):
        #create existing containers to be referenced

        existing_container = Container.objects.create(container_type_id = 'micro-1.5',
                                                      label = 'bacteria_tube',
                                                      test_mode = False,
                                                      storage_condition = Temperature.cold_80.name,
                                                      status = 'available',
                                                      organization = self.org
                                                      )
        existing_aq = Aliquot.objects.create(container = existing_container,
                               well_idx = 0,
                               volume_ul = "115")
            
        #same as https://secure.transcriptic.com/becker-lab/p19aqhcbep8ea/runs/r19uqqkmr5u8f
        with open(os.path.join(os.path.dirname(__file__),'data','pellet_bacteria.json')) as f:
            protocol = json.loads(f.read())             

        #update the protocol to reference the correct id post import

        protocol['refs']['bacteria_tube']['id'] = existing_container.id

        run = Run.objects.create(title='Real Run',
                                 test_mode=False,
                                 protocol=protocol,
                                 project = self.project,
                                 owner=self.user)
        assert isinstance(run, Run)
    
        execute_run(run)

        #two Containers should have been made
        self.assertEqual(run.containers.count(),4)        

        #ensure that volumes on existing inventory are updated  (40.12)
        
        existing_aq = Aliquot.objects.get(id=existing_aq.id)
        
        self.assertEqual(Decimal(existing_aq.volume_ul), Decimal('40.12'))
        
        #ensure all aliquots on the growth plate were made (32) and have the same volume (15ul)
        
        growth_plate = run.containers.get(label='growth_plate')
        
        self.assertEqual(growth_plate.aliquots.count(),4*8)
        
        self.assertTrue(all([Decimal(aq.volume_ul)==Decimal('15') for aq in growth_plate.aliquots.all()]))
        
        #ensure containers are discarded
        
        destroyed_containers = Container.objects.filter(run_container__run_id = run.id,
                                                        run_container__container_label__in = ['absorbance_plate',
                                                                                              'bacteria_tube',
                                                                                              'trash_plate'])
        
        self.assertEqual(destroyed_containers.count(),3)
        
        self.assertTrue(all([container.status=='destroyed' for container in destroyed_containers])) 
Example 71
Project: weibo-login   Author: littlepinecone   File: login.py    GNU General Public License v3.0 4 votes vote down vote up
def getcookies(user, passwd):
    # 获取验证码
    sign = random.random()
    url = "https://captcha.weibo.com/api/pattern/get?ver=daf139fb2696a4540b298756bd06266a&source=ssologin&usrname=" + user + "&line=160&side=100&radius=30&_rnd=" + str(
        sign) + "&callback=pl_cb"
    r = requests.get(url)
    imgdata = json.loads(r.text.replace("pl_cb(", '').replace(")", ''))['path_enc']
    id = json.loads(r.text.replace("pl_cb(", '').replace(")", ''))['id']
    recombinePattern(imgdata)
    data_enc = pathdataEncode(path_generate(patterntohash()))
    path_enc = pathEncode(patterntohash(), id)

    url2 = "https://captcha.weibo.com/api/pattern/verify?ver=daf139fb2696a4540b298756bd06266a&id=" + id + "&usrname=" + user + "&source=ssologin&path_enc=" + path_enc + "&data_enc=" + data_enc + "&callback=pl_cb"
    url3 = 'https://passport.weibo.cn/sso/login'
    # 必要的等待时间
    time.sleep(1)
    # 验证验证码
    session = requests.Session()
    r2 = session.get(url2)
    # print r2.headers
    print json.loads(r2.text.replace("pl_cb(", '').replace(")", ''))['msg']
    # print id

    formdata = {'username': user,
                'password': passwd,
                'savestate': '1',
                'ec': '0',
                'entry': 'mweibo',
                'mainpageflag': '1',
                'vid': id,
                'wentry': '',
                'loginfrom': '',
                'client_id': '',
                'code:qq': '',
                'r': '',
                'pagerefer': '',
                'hff': '',
                'hfp': ''}

    # print formdata['vid']
    # 登录
    r3 = session.post(url3, data=formdata, headers=headers3)
    cookies_url = r3.headers['Set-Cookie']
    print json.loads(r3.content)['msg']
    return {k.split('=')[0]: k.split('=')[1] for k in cookies_url.split(';')}

    # r4 = requests.get('https://m.weibo.cn/')
    # print r4.headers['Set-Cookie'] 
Example 72
Project: incubator-spot   Author: apache   File: start_listener.py    Apache License 2.0 4 votes vote down vote up
def main():
    '''
        Main command-line entry point.
    '''
    state = {}

    try:
        args = parse_args()
        conf = json.loads(args.config_file.read())

        # .............................check kerberos authentication
        if os.getenv('KRB_AUTH'):
            kb = Kerberos()
            kb.authenticate()

        state.update(**args.__dict__)

        # .............................add Spark Streaming parameters
        for key in conf['spark-streaming'].keys():
            if conf['spark-streaming'][key] == None:
                continue

            if isinstance(conf['spark-streaming'][key], basestring):
                conf['spark-streaming'][key] = conf['spark-streaming'][key].strip()

                if bool(conf['spark-streaming'][key]):
                    state[key] = conf['spark-streaming'][key]
                continue
            state[key] = conf['spark-streaming'][key]

        # .............................add files to place on the PYTHONPATH
        state['py_files'] = ','.join([os.path.abspath(os.path.join('dist', x)) for x in os.listdir('dist')])

        # .............................add database name
        state['database'] = conf['dbname']

        # .............................add zookeeper's connection string
        state['zkquorum'] = '{0}:{1}'.format(conf['kafka']['zookeper_server'],
                                        conf['kafka']['zookeper_port'])

        spark_job('common/listener.py', **state)

    except SystemExit: raise
    except:
        sys.excepthook(*sys.exc_info())
        sys.exit(1) 
Example 73
Project: incubator-spot   Author: apache   File: collector.py    Apache License 2.0 4 votes vote down vote up
def run(cls):
        '''
            Main command-line entry point.

        :param cls: The class as implicit first argument.
        '''
        try:
            args  = _parse_args()
            conf  = json.loads(args.config_file.read())

            # .........................set up logger
            Util.get_logger('SPOT', args.log_level)

            # .........................check kerberos authentication
            if os.getenv('KRB_AUTH'):
                kb = Kerberos()
                kb.authenticate()

            conf['producer'] = {
                'bootstrap_servers': ['{0}:{1}'
                    .format(conf['kafka']['kafka_server'], conf['kafka']['kafka_port'])]
            }

            conf['file_watcher'] = {
                'path': conf['pipelines'][args.type]['collector_path'],
                'supported_files': conf['pipelines'][args.type]['supported_files'],
                'recursive': True
            }

            # .........................migrate configs
            if not 'local_staging' in conf['pipelines'][args.type].keys():
                conf['pipelines'][args.type]['local_staging'] = '/tmp'

            if 'max_request_size' in conf['kafka'].keys():
                conf['producer']['max_request_size'] = conf['kafka']['max_request_size']

            if not 'process_opt' in conf['pipelines'][args.type].keys():
                conf['pipelines'][args.type]['process_opt'] = ''

            if 'recursive' in conf['pipelines'][args.type].keys():
                conf['file_watcher']['recursive'] = conf['pipelines'][args.type]['recursive']

            collector = cls(args.type, args.topic, args.skip_conversion, **conf)
            collector.start()

        except SystemExit: raise
        except:
            sys.excepthook(*sys.exc_info())
            sys,exit(1) 
Example 74
Project: incubator-spot   Author: apache   File: flow_oa.py    Apache License 2.0 4 votes vote down vote up
def _add_reputation(self):
        
        reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        
        # read configuration.
        self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
        rep_conf = json.loads(open(reputation_conf_file).read())

        # initialize reputation services.
        self._rep_services = []
        self._logger.info("Initializing reputation services.")
        for service in rep_conf:
             config = rep_conf[service]
             module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
             self._rep_services.append(module.Reputation(config,self._logger))

	if self._rep_services :
 
           # get all src ips.
            src_ip_index = self._conf["flow_score_fields"]["srcIP"]
            dst_ip_index = self._conf["flow_score_fields"]["dstIP"]

            flow_scores_src = iter(self._flow_scores)

            # getting reputation for src IPs
            src_ips = [ conn[src_ip_index] for conn in flow_scores_src ]            
	    self._logger.info("Getting reputation for each service in config")
            src_rep_results = {}
	    for rep_service in self._rep_services:
                # if more than one reputation service is defined, the last ip match remains after merge
                # Example fb: returns an entry for every ip, including unknown ones
                # which overwrites other services that have previously returned a match. Same for dstip
                # In future should consider a weigted merge, or UX should support multiple reps per IP
	        src_rep_results = dict(rep_service.check(src_ips).items() + src_rep_results.items())

            flow_scores_dst = iter(self._flow_scores)

            # getting reputation for dst IPs            
            dst_ips = [  conn[dst_ip_index] for conn in flow_scores_dst ]
            dst_rep_results = {}
	    for rep_service in self._rep_services:
                dst_rep_results = dict(rep_service.check(dst_ips).items() + dst_rep_results.items()) 

	    
            flow_scores_final = iter(self._flow_scores)

            self._flow_scores = []
            flow_scores = [conn + [src_rep_results[conn[src_ip_index]]] + [dst_rep_results[conn[dst_ip_index]]] for conn in flow_scores_final ]
            self._flow_scores = flow_scores           

        else:
            # add values to gtiSrcRep and gtiDstRep.
            flow_scores = iter(self._flow_scores)

            self._flow_scores = [ conn + ["",""] for conn in flow_scores ]   
            self._logger.info("WARNING: IP reputation was not added. No refclient configured") 
Example 75
Project: incubator-spot   Author: apache   File: proxy_oa.py    Apache License 2.0 4 votes vote down vote up
def _add_reputation(self):

        # read configuration.
        reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
        rep_conf = json.loads(open(reputation_conf_file).read())

        # initialize reputation services.
        self._rep_services = []
        self._logger.info("Initializing reputation services.")
        for service in rep_conf:
             config = rep_conf[service]
             module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
             self._rep_services.append(module.Reputation(config,self._logger))

        # get columns for reputation.
        rep_cols = {}
        indexes =  [ int(value) for key, value in self._conf["add_reputation"].items()]
        self._logger.info("Getting columns to add reputation based on config file: proxy_conf.json".format())
        for index in indexes:
            col_list = []
            for conn in self._proxy_scores:
                col_list.append(conn[index])
            rep_cols[index] = list(set(col_list))

        # get reputation per column.
        self._logger.info("Getting reputation for each service in config")
        rep_services_results = []
        if self._rep_services :
            for key,value in rep_cols.items():
                rep_services_results = [ rep_service.check(None,value,True) for rep_service in self._rep_services]
                rep_results = {}

                for result in rep_services_results:
                    rep_results = {k: "{0}::{1}".format(rep_results.get(k, ""), result.get(k, "")).strip('::') for k in set(rep_results) | set(result)}

                if rep_results:
                    self._proxy_scores = [ conn + [ rep_results[conn[key]] ]   for conn in self._proxy_scores  ]
                else:
                    self._proxy_scores = [ conn + [""] for conn in self._proxy_scores  ]
        else:
            self._proxy_scores = [ conn + [""] for conn in self._proxy_scores  ] 
Example 76
Project: incubator-spot   Author: apache   File: dns_oa.py    Apache License 2.0 4 votes vote down vote up
def _add_reputation(self):

        # read configuration.
        reputation_conf_file = "{0}/components/reputation/reputation_config.json".format(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
        self._logger.info("Reading reputation configuration file: {0}".format(reputation_conf_file))
        rep_conf = json.loads(open(reputation_conf_file).read())

        # initialize reputation services.
        self._rep_services = []
        self._logger.info("Initializing reputation services.")
        for service in rep_conf:               
            config = rep_conf[service]
            module = __import__("components.reputation.{0}.{0}".format(service), fromlist=['Reputation'])
            self._rep_services.append(module.Reputation(config,self._logger))
                
        # get columns for reputation.
        rep_cols = {}
        indexes =  [ int(value) for key, value in self._conf["add_reputation"].items()]  
        self._logger.info("Getting columns to add reputation based on config file: dns_conf.json".format())
        for index in indexes:
            col_list = []
            for conn in self._dns_scores:
                col_list.append(conn[index])            
            rep_cols[index] = list(set(col_list))

        # get reputation per column.
        self._logger.info("Getting reputation for each service in config")        
        rep_services_results = []
 
        if self._rep_services :
            for key,value in rep_cols.items():
                rep_services_results = [ rep_service.check(None,value) for rep_service in self._rep_services]
                rep_results = {}            
                for result in rep_services_results:            
                    rep_results = {k: "{0}::{1}".format(rep_results.get(k, ""), result.get(k, "")).strip('::') for k in set(rep_results) | set(result)}

                if rep_results:
                    self._dns_scores = [ conn + [ rep_results.get(key) ]    for conn in self._dns_scores  ]
                else:
                    self._dns_scores = [ conn + [""]   for conn in self._dns_scores  ]
        else:
            self._dns_scores = [ conn + [""]   for conn in self._dns_scores  ] 
Example 77
Project: incubator-spot   Author: apache   File: fb.py    Apache License 2.0 4 votes vote down vote up
def _request_reputation(self, data, name):
        reputation_dict = {}
        token = "{0}|{1}".format(self._fb_app_id, self._fb_app_secret)
        request_body = {
            'access_token': token,
            'batch': data
        }
        
        request_body = urllib.urlencode(request_body)

        url = "https://graph.facebook.com/"
        content_type = {'Content-Type': 'application/json'}
        request = urllib2.Request(url, request_body, content_type)

        try:
            str_response = urllib2.urlopen(request).read()
            response = json.loads(str_response)
        except urllib2.HTTPError as e:
            self._logger.info("Error calling ThreatExchange in module fb: " + e.message)
            reputation_dict[name] = self._get_reputation_label('UNKNOWN')
            return reputation_dict

        for row in response:
            if row is None:
                continue

            if row['code'] != 200:
                reputation_dict[name] = self._get_reputation_label('UNKNOWN')
                return reputation_dict
            if 'body' in row: 
                try:
                    row_response = json.loads(row['body']) 
                except ValueError as e:
                    self._logger.error("Error reading JSON body response in fb module: " + e.message)

                if 'data' in row_response and row_response['data'] != []: 
                    row_response_data = row_response['data']
                    name = row_response_data[0]['indicator']['indicator']
                    reputation_dict[name] = self._get_reputation_label(row_response_data[0]['status'])
                else:
                    reputation_dict[name] = self._get_reputation_label('UNKNOWN')
            else:
                reputation_dict[name] = self._get_reputation_label('UNKNOWN')

        return reputation_dict 
Example 78
Project: Coulomb   Author: DynamoDS   File: sessions_to_sorted_deduped_sessions.py    MIT License 4 votes vote down vote up
def sort_blob(blob_name, sorted_blob_name, temp_path):
    log("Sorting: {} => {}".format(blob_name, sorted_blob_name))

    blob_session_name = blob_name.split('/')[-1]
    blob_session_name_sorted = sorted_blob_name.split('/')[-1]

    blob_path_to_proc = os.path.join(temp_path, blob_session_name)
    out_path = os.path.join(temp_path, blob_session_name_sorted)
    
    log ("Downloading {} => {}".format(blob_name, blob_path_to_proc))
    download_blob(SESSIONS_BUCKET, blob_name, blob_path_to_proc)

    f = gzip.open(blob_path_to_proc)
    data_set = set()
    data = []
    byte_counter = 0
    skip_file = False

    for ln in f:
        data_set.add(ln)
        byte_counter += sys.getsizeof(ln)

        if byte_counter > MAX_MEM:
            skip_file = True
            log ("Skipped large file: " + blob_path_to_proc)
            break

    if skip_file:
        os.remove(blob_path_to_proc)
        return False

    for ln in data_set:
        data.append(json.loads(ln))

    data.sort(key=lambda x: int(x["MicroTime"]))
    sortedF = gzip.open(out_path, 'w')
    for d in data:
        sortedF.write((json.dumps(d) + "\n").encode('utf-8'))

    sortedF.flush()
    sortedF.close()
    log("Sorted: {} => {}".format(blob_session_name, blob_session_name_sorted))
    upload_blob(SESSIONS_BUCKET, out_path, sorted_blob_name)
    
    log("About to remove: {}".format(blob_path_to_proc))
    os.remove(blob_path_to_proc)
    
    log("About to remove: {}".format(out_path))
    os.remove(out_path)

    return True 
Example 79
Project: hydrus   Author: HTTP-APIs   File: resources.py    MIT License 4 votes vote down vote up
def post(self, id_: str, path: str) -> Response:
        """Update object of type<path> at ID<id_> with new object_ using HTTP POST.
        :param id_ - ID of Item to be updated
        :param path - Path for Item type( Specified in APIDoc @id)
        """
        id_ = str(id_)
        auth_response = check_authentication_response()
        if isinstance(auth_response, Response):
            return auth_response

        class_type = get_doc().collections[path]["collection"].class_.title
        # Get path of the collection-class
        class_path = get_doc().collections[path]["collection"].class_.path
        object_ = json.loads(request.data.decode('utf-8'))
        if checkClassOp(class_path, "POST") and check_writeable_props(class_path, object_):
            # Check if class_type supports POST operation
            obj_type = getType(class_path, "POST")
            link_props, link_type_check = get_link_props(class_path, object_)
            # Load new object and type
            if validObject(object_) and object_["@type"] == obj_type and check_required_props(
                    class_path, object_) and link_type_check:
                try:
                    # Update the right ID if the object is valid and matches
                    # type of Item
                    object_id = crud.update(
                        object_=object_,
                        id_=id_,
                        link_props=link_props,
                        type_=object_["@type"],
                        session=get_session(),
                        api_name=get_api_name())
                    method = "POST"
                    resource_url = "{}{}/{}/{}".format(
                            get_hydrus_server_url(), get_api_name(), path, object_id)
                    last_job_id = crud.get_last_modification_job_id(session=get_session())
                    new_job_id = crud.insert_modification_record(method, resource_url,
                                                                 session=get_session())
                    send_sync_update(socketio=socketio, new_job_id=new_job_id,
                                     last_job_id=last_job_id, method=method,
                                     resource_url=resource_url)
                    headers_ = [{"Location": resource_url}]
                    status_description = "Object with ID {} successfully updated".format(object_id)
                    status = HydraStatus(code=200, title="Object updated", desc=status_description)
                    return set_response_headers(jsonify(status.generate()), headers=headers_)

                except (ClassNotFound, InstanceNotFound, InstanceExists, PropertyNotFound) as e:
                    error = e.get_HTTP()
                    return set_response_headers(jsonify(error.generate()), status_code=error.code)
            else:
                error = HydraError(code=400, title="Data is not valid")
                return set_response_headers(jsonify(error.generate()), status_code=error.code)
        else:
            abort(405) 
Example 80
Project: hydrus   Author: HTTP-APIs   File: resources.py    MIT License 4 votes vote down vote up
def post(self, path: str) -> Response:
        """
        Method executed for POST requests.
        Used to update a non-collection class.
        :param path - Path for Item type ( Specified in APIDoc @id)
        """
        auth_response = check_authentication_response()
        if isinstance(auth_response, Response):
            return auth_response

        endpoint_ = checkEndpoint("POST", path)
        if endpoint_['method']:
            object_ = json.loads(request.data.decode('utf-8'))
            if path in get_doc().parsed_classes and "{}Collection".format(path) not in get_doc(
            ).collections:
                obj_type = getType(path, "POST")
                link_props, link_type_check = get_link_props(path, object_)
                if check_writeable_props(path, object_):
                    if object_["@type"] == obj_type and check_required_props(
                            path, object_) and validObject(object_) and link_type_check:
                        try:
                            crud.update_single(
                                object_=object_,
                                session=get_session(),
                                api_name=get_api_name(),
                                link_props=link_props,
                                path=path)
                            method = "POST"
                            resource_url = "{}{}/{}".format(
                                get_hydrus_server_url(), get_api_name(), path)
                            last_job_id = crud.get_last_modification_job_id(session=get_session())
                            new_job_id = crud.insert_modification_record(method, resource_url,
                                                                         session=get_session())
                            send_sync_update(socketio=socketio, new_job_id=new_job_id,
                                             last_job_id=last_job_id, method=method,
                                             resource_url=resource_url)
                            headers_ = [
                                {"Location": "{}/{}/".format(
                                    get_hydrus_server_url(), get_api_name(), path)}]
                            status = HydraStatus(code=200, title="Object successfully added")
                            return set_response_headers(
                                jsonify(status.generate()), headers=headers_)
                        except (ClassNotFound, InstanceNotFound,
                                InstanceExists, PropertyNotFound) as e:
                            error = e.get_HTTP()
                            return set_response_headers(
                                jsonify(error.generate()), status_code=error.code)

                    error = HydraError(code=400, title="Data is not valid")
                    return set_response_headers(jsonify(error.generate()), status_code=error.code)
                else:
                    abort(405)

        abort(endpoint_['status'])