Python geojson.load() Examples
The following are 20
code examples of geojson.load().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
geojson
, or try the search function
.
Example #1
Source File: bus_router.py From bus-router with MIT License | 6 votes |
def geojsonToShapes(): datadir = os.path.join(os.getcwd(), 'data') gtfsdir = os.path.join(datadir, 'gtfs') geojsondir = os.path.join(datadir, 'geojson') with open(gtfsdir + "/shapes_new.txt", 'wb') as shapesfile: shapeswriter = csv.writer(shapesfile) shapeswriter.writerow(["shape_id","shape_pt_sequence","shape_dist_traveled","shape_pt_lon","shape_pt_lat"]) geojsonfile = os.path.join(geojsondir, 'shapes.geojson') with open(geojsonfile, 'rb') as fc: geo_fc = geojson.load(fc) # print geo_fc for feature in geo_fc['features']: for i, coord in enumerate(feature['geometry']['coordinates']): shapeswriter.writerow([feature['properties']['shape_id'],i,'',coord[0],coord[1]])
Example #2
Source File: bus_router.py From bus-router with MIT License | 6 votes |
def osrmDirectionsCall(stop, origin, dest, osrmpoints, fname): print "getting dirs..." base = 'http://router.project-osrm.org/viaroute?' viastring = "" for point in osrmpoints: viastring += 'loc=' + point + '&' params = 'loc=' + origin + '&' + viastring + 'loc=' + dest # params = urllib.urlencode({'loc': origin, 'loc': dest, 'waypoints': waypoints, 'sensor': 'false','key': google_key}) print params # if waypoints == "": with open("log.txt", 'a') as log: log.write(base + params + '\n') response = urllib.urlopen(base + params) data = json.load(response) with open(fname, 'w') as outfile: json.dump(data, outfile)
Example #3
Source File: geojson_tools.py From mltools with MIT License | 6 votes |
def join(input_files, output_file): ''' Join geojsons into one. The spatial reference system of the output file is the same as the one of the last file in the list. Args: input_files (list): List of file name strings. output_file (str): Output file name. ''' # get feature collections final_features = [] for file in input_files: with open(file) as f: feat_collection = geojson.load(f) final_features += feat_collection['features'] feat_collection['features'] = final_features # write to output file with open(output_file, 'w') as f: geojson.dump(feat_collection, f)
Example #4
Source File: geojson_tools.py From mltools with MIT License | 6 votes |
def split(input_file, file_1, file_2, no_in_first_file): ''' Split a geojson in two separate files. Args: input_file (str): Input filename. file_1 (str): Output file name 1. file_2 (str): Output file name 2. no_features (int): Number of features in input_file to go to file_1. output_file (str): Output file name. ''' # get feature collection with open(input_file) as f: feat_collection = geojson.load(f) features = feat_collection['features'] feat_collection_1 = geojson.FeatureCollection(features[0:no_in_first_file]) feat_collection_2 = geojson.FeatureCollection(features[no_in_first_file:]) with open(file_1, 'w') as f: geojson.dump(feat_collection_1, f) with open(file_2, 'w') as f: geojson.dump(feat_collection_2, f)
Example #5
Source File: geojson_tools.py From mltools with MIT License | 6 votes |
def get_from(input_file, property_names): ''' Reads a geojson and returns a list of value tuples, each value corresponding to a property in property_names. Args: input_file (str): File name. property_names: List of strings; each string is a property name. Returns: List of value tuples. ''' # get feature collections with open(input_file) as f: feature_collection = geojson.load(f) features = feature_collection['features'] values = [tuple([feat['properties'].get(x) for x in property_names]) for feat in features] return values
Example #6
Source File: geojson_tools.py From mltools with MIT License | 6 votes |
def find_unique_values(input_file, property_name): ''' Find unique values of a given property in a geojson file. Args: input_file (str): File name. property_name (str): Property name. Returns: List of distinct values of property. If property does not exist, it returns None. ''' with open(input_file) as f: feature_collection = geojson.load(f) features = feature_collection['features'] values = np.array([feat['properties'].get(property_name) for feat in features]) return np.unique(values)
Example #7
Source File: utils.py From tsd with GNU Affero General Public License v3.0 | 6 votes |
def valid_geojson(filepath): """ Check if a file contains valid geojson. """ with open(filepath, 'r') as f: geo = geojson.load(f) if type(geo) == geojson.geometry.Polygon: return geo if type(geo) == geojson.feature.Feature: p = geo['geometry'] if type(p) == geojson.geometry.Polygon: return p if type(geo) == geojson.feature.FeatureCollection: p = geo['features'][0]['geometry'] if type(p) == geojson.geometry.Polygon: return p raise argparse.ArgumentTypeError('Invalid geojson: only polygons are supported')
Example #8
Source File: geojson_tools.py From mltools with MIT License | 5 votes |
def create_train_test(input_file, output_file=None, test_size=0.2): ''' Split a geojson file into train and test features. Saves features as geojsons in the working directory under the same file name with train and test prefixes to the original file name. INPUT input_file (str): File name output_file (str): Name to use after the train_ and test_ prefixes for the saved files. Defaults to name of input_file. test_size (float or int): Amount of features to set aside as test data. If less than one will be interpreted as a proportion of the total feature collection. Otherwise it is the amount of features to use as test data. Defaults to 0.2. ''' with open(input_file) as f: data = geojson.load(f) features = data['features'] np.random.shuffle(features) # Convert test size from proportion to number of polygons if test_size <= 1: test_size = int(test_size * len(features)) # Name output files if not output_file: output_file = input_file elif not output_file.endswith('.geojson'): output_file += '.geojson' test_out, train_out = 'test_{}'.format(output_file), 'train_{}'.format(output_file) # Save train and test files data['features'] = features[:test_size] with open(test_out, 'wb') as test_file: geojson.dump(data, test_file) data['features'] = features[test_size:] with open(train_out, 'wb') as train_file: geojson.dump(data, train_file)
Example #9
Source File: bus_router.py From bus-router with MIT License | 5 votes |
def directionscall(google_key, stop, origin, dest, waypoints, fname): print "getting dirs..." base = 'https://maps.googleapis.com/maps/api/directions/json?' params = urllib.urlencode({'origin': origin, 'destination': dest, 'waypoints': waypoints, 'sensor': 'false','key': google_key}) # print params # if waypoints == "": with open("log.txt", 'a') as log: log.write(base + params + '\n') response = urllib.urlopen(base + params) data = json.load(response) with open(fname, 'w') as outfile: json.dump(data, outfile)
Example #10
Source File: test_api.py From overpass-api-python-wrapper with Apache License 2.0 | 5 votes |
def test_geojson_extended(): class API(overpass.API): def _get_from_overpass(self, query): return pickle.load(open(os.path.join(os.path.dirname(__file__), "example.response"), "rb")) # The commented code should only be executed once when major changes to the Overpass API and/or to this wrapper are # introduced. One than has to manually verify that the date in the example.response file from the Overpass API # matches the data in the example.json file generated by this wrapper. # # The reason for this approach is the following: It is not safe to make calls to the actual API in this test as the # API might momentarily be unavailable and the underlying data can also change at any moment. The commented code is # needed to create the example.response and example.json files. The example.response file is subsequently used to # fake the _get_from_overpass method during the tests and the example.json file is the reference that we are # asserting against. # # api = overpass.API() # osm_geo = api.get("rel(6518385);out body geom;way(10322303);out body geom;node(4927326183);", verbosity='body geom') # pickle.dump(api._get_from_overpass("[out:json];rel(6518385);out body geom;way(10322303);out body geom;node(4927326183);out body geom;"), # open(os.path.join(os.path.dirname(__file__), "example.response"), "wb"), # protocol=2) # geojson.dump(osm_geo, open(os.path.join(os.path.dirname(__file__), "example.json"), "w")) api = API() osm_geo = api.get("rel(6518385);out body geom;way(10322303);out body geom;node(4927326183);", verbosity='body geom') ref_geo = geojson.load(open(os.path.join(os.path.dirname(__file__), "example.json"), "r")) assert osm_geo==ref_geo
Example #11
Source File: extract_mgrs_tile_coordinates_from_kml.py From tsd with GNU Affero General Public License v3.0 | 5 votes |
def main(kml_filename, verbose=False): """ Extract information from the kml file distributed by ESA to describe the Sentinel-2 MGRS tiling grid. This file is distributed on ESA Sentinel website at: https://sentinel.esa.int/documents/247904/1955685/S2A_OPER_GIP_TILPAR_MPC__20151209T095117_V20150622T000000_21000101T000000_B00.kml """ kml2geojson.main.convert(kml_filename, 's2_mgrs_grid') with open(os.path.join('s2_mgrs_grid', kml_filename.replace('.kml', '.geojson')), 'r') as f: grid = geojson.load(f) mgrs_tiles = [] for x in grid['features']: g = x['geometry'] keep_only_polygons_from_geometry_collection(g) for p in g['geometries']: remove_z_from_polygon_coordinates(p) mgrs_id = x['properties']['name'] mgrs_tiles.append(geojson.Feature(id=mgrs_id, geometry=g)) if verbose: print(mgrs_id, end=' ') print(g) return geojson.FeatureCollection(mgrs_tiles)
Example #12
Source File: pool_net.py From mltools with MIT License | 5 votes |
def _load_model_architecture(self, model_name): ''' Load a model arcitecture from a json file INPUT model_name (str): Name of model to load OUTPUT Loaded model architecture ''' print 'Loading model {}'.format(self.model_name) #load model with open(model_name + '.json') as f: mod = model_from_json(json.load(f)) return mod
Example #13
Source File: sentinel.py From sentinelsat with GNU General Public License v3.0 | 5 votes |
def _load_subquery(self, query, order_by=None, limit=None, offset=0): # store last query (for testing) self._last_query = query self.logger.debug("Sub-query: offset=%s, limit=%s", offset, limit) # load query results url = self._format_url(order_by, limit, offset) response = self.session.post( url, {"q": query}, auth=self.session.auth, headers={"Content-Type": "application/x-www-form-urlencoded; charset=UTF-8"}, timeout=self.timeout, ) _check_scihub_response(response, query_string=query) # store last status code (for testing) self._last_response = response # parse response content try: json_feed = response.json()["feed"] if json_feed["opensearch:totalResults"] is None: # We are using some unintended behavior of the server that a null is # returned as the total results value when the query string was incorrect. raise QuerySyntaxError( "Invalid query string. Check the parameters and format.", response ) total_results = int(json_feed["opensearch:totalResults"]) except (ValueError, KeyError): raise ServerError("API response not valid. JSON decoding failed.", response) products = json_feed.get("entry", []) # this verification is necessary because if the query returns only # one product, self.products will be a dict not a list if isinstance(products, dict): products = [products] return products, total_results
Example #14
Source File: geojson_tools.py From mltools with MIT License | 5 votes |
def filter_by_property(input_file, output_file, property_name, values): ''' Create a file containing only features with specified property value(s) from input_file. INPUT input_file (str): File name. output_file (str): Output file name. property_name (str): Name of the feature property to filter by. values (list): Value(s) a feature may have for property_name if it is to be included in output_file. ''' filtered_feats = [] if not output_file.endswith('.geojson'): output_file += '.geojson' # Load feature list with open(input_file) as f: feature_collection = geojson.load(f) # Filter feats by property_name for feat in feature_collection['features']: if feat['properties'][property_name] in values: filtered_feats.append(feat) feature_collection['features'] = filtered_feats # Save filtered file with open(output_file, 'wb') as f: geojson.dump(f)
Example #15
Source File: data_extractors.py From mltools with MIT License | 5 votes |
def uniform_chip_generator(input_file, batch_size=32, **kwargs): ''' Generate batches of uniformly-sized pixel intensity arrays from image strips using a geojson file. Output will be in the same format as get_data_from_polygon_list. INPUT input_file (str): File name batch_size (int): Number of chips to yield per iteration kwargs: ------- See get_data_from_polygon_list docstring for other input params. Do not use the num_chips arg. OUTPUT chips (array): Uniformly sized chips with the following dimensions: (num_chips, num_channels, max_side_dim, max_side_dim) ids (list): Feature ids corresponding to chips. labels (array): One-hot encoded labels for chips with the follwoing dimensions: (num_chips, num_classes) ''' # Load features from input_file with open(input_file) as f: feature_collection = geojson.load(f)['features'] # Produce batches using get_data_from_polygon_list for batch_ix in range(0, len(feature_collection), batch_size): this_batch = feature_collection[batch_ix: batch_ix + batch_size] yield get_data_from_polygon_list(this_batch, **kwargs)
Example #16
Source File: data_extractors.py From mltools with MIT License | 5 votes |
def get_uniform_chips(input_file, num_chips=None, **kwargs): ''' Get uniformly-sized pixel intensity arrays from image strips using a geojson file. Output will be in the same format as get_data_from_polygon_list. INPUT input_file (str): File name. This file should be filtered for polygon size num_chips (int): Maximum number of chips to return. If None will return all chips in input_file. Defaults to None kwargs: ------- See get_data_from_polygon_list docstring for other input params OUTPUT chips (array): Uniformly sized chips with the following dimensions: (num_chips, num_channels, max_side_dim, max_side_dim) ids (list): Feature ids corresponding to chips. labels (array): One-hot encoded labels for chips with the follwoing dimensions: (num_chips, num_classes) ''' # Load features from input_file with open(input_file) as f: feature_collection = geojson.load(f)['features'] if num_chips: feature_collection = feature_collection[: num_chips] return get_data_from_polygon_list(feature_collection, num_chips=num_chips, **kwargs)
Example #17
Source File: geo_json_processor.py From urbanfootprint with GNU General Public License v3.0 | 5 votes |
def importer(self, config_entity, db_entity, **kwargs): """ Creates various GeojsonFeature classes by importing geojson and saving it to the database via a dynamic subclass of GeojsonFeature :schema: The optional schema to use for the dynamic subclass's meta db_table attribute, which will allow the class's table to be saved in the specified schema. Defaults to public :data: Optional python dict data to use instead of loading from the db_entity.url :return: a list of lists. Each list is a list of features of distinct subclass of GeoJsonFeature that is created dynamically. To persist these features, you must first create the subclass's table in the database using create_table_for_dynamic_class(). You should also register the table as a DbEntity. """ if self.seed_data: data = geojson.loads(jsonify(self.seed_data), object_hook=geojson.GeoJSON.to_instance) else: fp = open(db_entity.url.replace('file://', '')) data = geojson.load(fp, object_hook=geojson.GeoJSON.to_instance) feature_class_creator = FeatureClassCreator(config_entity, db_entity) # find all unique properties feature_class_configuration = feature_class_creator.feature_class_configuration_from_geojson_introspection(data) feature_class_creator.update_db_entity(feature_class_configuration) feature_class = feature_class_creator.dynamic_model_class(base_only=True) # Create our base table. Normally this is done by the import, but we're just importing into memory create_tables_for_dynamic_classes(feature_class) # Now write each feature to our newly created table for feature in map(lambda feature: self.instantiate_sub_class(feature_class, feature), data.features): feature.save() # Create the rel table too rel_feature_class = feature_class_creator.dynamic_model_class() create_tables_for_dynamic_classes(rel_feature_class) # PostGIS 2 handles this for us now # if InformationSchema.objects.table_exists(db_entity.schema, db_entity.table): # # Tell PostGIS about the new geometry column or the table # sync_geometry_columns(db_entity.schema, db_entity.table) # Create association classes and tables and populate them with data create_and_populate_relations(config_entity, db_entity)
Example #18
Source File: sentinel.py From sentinelsat with GNU General Public License v3.0 | 5 votes |
def read_geojson(geojson_file): """Read a GeoJSON file into a GeoJSON object. """ with open(geojson_file) as f: return geojson.load(f)
Example #19
Source File: geojson_tools.py From mltools with MIT License | 4 votes |
def create_balanced_geojson(input_file, classes, output_file='balanced.geojson', samples_per_class=None): ''' Create a geojson comprised of balanced classes from input_file for training data. Randomly selects polygons from all classes. INPUT input_file (str): File name classes (list[str]): Classes in input_file to include in the balanced output file. Must exactly match the 'class_name' property in the features of input_file. output_file (str): Name under which to save the balanced output file. Defualts to balanced.geojson. samples_per_class (int or None): Number of features to select per class in input_file. If None will use the smallest class size. Defaults to None. ''' if not output_file.endswith('.geojson'): output_file += '.geojson' with open(input_file) as f: data = geojson.load(f) # Sort classes in separate lists sorted_classes = {clss : [] for clss in classes} for feat in data['features']: try: sorted_classes[feat['properties']['class_name']].append(feat) except (KeyError): continue # Determine sample size per class if not samples_per_class: smallest_class = min(sorted_classes, key=lambda clss: len(sorted_classes[clss])) samples_per_class = len(sorted_classes[smallest_class]) # Randomly select features from each class try: samps = [random.sample(feats, samples_per_class) for feats in sorted_classes.values()] final = [feat for sample in samps for feat in sample] except (ValueError): raise Exception('Insufficient features in at least one class. Set ' \ 'samples_per_class to None to use maximum amount of '\ 'features.') # Shuffle and save balanced data np.random.shuffle(final) data['features'] = final with open(output_file, 'wb') as f: geojson.dump(data, f)
Example #20
Source File: bus_router.py From bus-router with MIT License | 4 votes |
def shapesToGeojson(): json_data=open('data.txt') datadir = os.path.join(os.getcwd(), 'data') gtfsdir = os.path.join(datadir, 'gtfs') geojsondir = os.path.join(datadir, 'geojson') data = json.load(json_data, object_hook=_decode_dict) json_data.close() with open(gtfsdir + "/shapes.txt", 'rb') as shapesfile: shapesreader = csv.DictReader(shapesfile) keys = shapesreader.fieldnames jsonpoints = [] features = [] currentTrip = '' for i, point in enumerate(shapesreader): if point['shape_pt_sequence'] == '0': print 'creating trip' currentTrip = point['shape_id'] if i > 0: ls = LineString(jsonpoints) feature = Feature(geometry=ls, properties={"shape_id": currentTrip}) # print feature features.append(feature) jsonpoints = [] else: pnt = (float(point['shape_pt_lon']), float(point['shape_pt_lat'])) # print pnt jsonpoints.append(pnt) # write linestring for last shape ls = LineString(jsonpoints) feature = Feature(geometry=ls, properties={"shape_id": currentTrip}) print feature features.append(feature) jsonpoints = [] fc = FeatureCollection(features) print fc geojsonfile = os.path.join(geojsondir, 'shapes.geojson') with open(geojsonfile, 'wb') as tripgeo: geojson.dump(fc, tripgeo)