Python google.cloud.datastore.Client() Examples

The following are code examples for showing how to use google.cloud.datastore.Client(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: python-docs-samples   Author: GoogleCloudPlatform   File: quickstart.py    Apache License 2.0 8 votes vote down vote up
def run_quickstart():
    # [START datastore_quickstart]
    # Imports the Google Cloud client library
    from google.cloud import datastore

    # Instantiates a client
    datastore_client = datastore.Client()

    # The kind for the new entity
    kind = 'Task'
    # The name/ID for the new entity
    name = 'sampletask1'
    # The Cloud Datastore key for the new entity
    task_key = datastore_client.key(kind, name)

    # Prepares the new entity
    task = datastore.Entity(key=task_key)
    task['description'] = 'Buy milk'

    # Saves the entity
    datastore_client.put(task)

    print('Saved {}: {}'.format(task.key.name, task['description']))
    # [END datastore_quickstart] 
Example 2
Project: loaner   Author: google   File: datastore.py    Apache License 2.0 6 votes vote down vote up
def from_config(cls, config, creds=None):
    """Returns an initialized DatastoreAPI object.

    Args:
      config: common.ProjectConfig, the project configuration.
      creds: auth.CloudCredentials, the credentials to use for client
          authentication.

    Returns:
      An authenticated DatastoreAPI instance.
    """
    if creds is None:
      creds = auth.CloudCredentials(config, cls.SCOPES)
    client = datastore.Client(
        project=config.project, credentials=creds.get_credentials(cls.SCOPES))
    return cls(config, client) 
Example 3
Project: som   Author: vsoch   File: client.py    MIT License 6 votes vote down vote up
def storageObject(uid,entity,url,storage_type):
    '''image returns an image object. entity is the parent
    '''
    fields = [{'key':'uid','required':True,'value': uid},
              {'key':'url','required':True,'value': url}]

    collection = entity.collection.get_name()
    entity = entity.get_name()

    storage_type = storage_type.replace(' ','-').lower().capitalize()

    model = {'fields':fields,
             'exclude_from_indexes': ['url'],
             'key':['Collection', collection, 'Entity',entity, storage_type, uid]}

    return model
    



######################################################################################
# Client to interact with Models
###################################################################################### 
Example 4
Project: som   Author: vsoch   File: models.py    MIT License 6 votes vote down vote up
def __init__(self, **kwargs):
        '''A ModelBase is a controller for a general DataStore
        Entity. Most of the variables in init are for initial
        validation, and further fields etc are stored with
        the Entity itself (_Entity)
        :param client: the datastore client, should be passed on init
        :param _fields: intiial fields to validate on model creation
        :param _key: the initial key used for the entity
        :param _Entity: the final generated (validated) entity
        '''
        client = None
        if "client" in kwargs:
            client = kwargs['client']

        super(ModelBase, self).__init__(**kwargs)
        if client is None:
            client = datastore.Client()
        self.client = client
        self._fields = validate_model(self.model['fields'])
        self._key = self.client.key(*self.model['key'])
        self._Entity = None
        if "exclude_from_indexes" in self.model:
            self._exclude_from_indexes = self.model['exclude_from_indexes'] # Can be None
        else:
            self._exclude_from_indexes = None 
Example 5
Project: openmic-annotator   Author: cosmir   File: database.py    MIT License 6 votes vote down vote up
def uris(self, kind=None):
        """Returns an iterator over the URIs in the Client.

        Parameters
        ----------
        kind : str, default=None
            Optionally filter over the URI kind in the database.

        Yields
        ------
        uri : str
            A URI in the collection.
        """
        for uri in self._collection.keys():
            if kind is None or kind == urilib.split(uri)[0]:
                yield uri 
Example 6
Project: eclipse2017   Author: google   File: uploader.py    Apache License 2.0 6 votes vote down vote up
def _get_client(client_type='storage'):
    """
    Returns gcloud client, (either for storage if `client_type` is 'storage',
    or for datastore if `client_type` is 'datastore'). Defaults to storage
    client, including when an invalid `client_type` is given.
    Raises `CouldNotObtainCredentialsError` if there is an error obtaining
    credentials.
    """
    # Raises CouldNotObtainCredentialsError
    credentials = sa.get_credentials()

    if client_type == 'datastore':
        client_class = datastore.Client
    else:
        client_class = storage.client.Client

    return client_class(project=config.PROJECT_ID, credentials=credentials) 
Example 7
Project: eclipse2017   Author: google   File: uploader.py    Apache License 2.0 6 votes vote down vote up
def _check_adult_content(img):
    """
    Checks if img contains adult content.
    Returns True if img contains adult content.
    """
    first, second = getRescaledDimensions(img.width, img.height, 640, 480)
    try:
        resize = img.resize((first, second), Image.ANTIALIAS)
    except IOError:
        logging.error("Invalid image cannot be resized.")
        # Have to assume image is adult content
        return True
    out = io.BytesIO()
    resize.convert('RGB').save(out, format='JPEG')
    vision_client = vision.Client()
    vc_img = vision_client.image(content=out.getvalue())
    safe = vc_img.detect_safe_search()
    if safe.adult == vision.likelihood.Likelihood.LIKELY or safe.adult == vision.likelihood.Likelihood.POSSIBLE:
        logging.error("Detected likely adult content upload.")
        return True
    return False 
Example 8
Project: eclipse2017   Author: google   File: image_sorter_test.py    Apache License 2.0 6 votes vote down vote up
def test_sort_newest(self):
        datastore_client = datastore.Client(config.PROJECT_ID)
        key = datastore_client.key('Photo', '1')
        entity = datastore.Entity(key=key)
        entity["image_datetime"] = datetime.datetime.utcnow()
        time.sleep(1)
        key = datastore_client.key('Photo', '2')
        entity2 = datastore.Entity(key=key)
        entity2["image_datetime"] = datetime.datetime.utcnow()
        time.sleep(1)
        key = datastore_client.key('Photo', '3')
        entity3 = datastore.Entity(key=key)
        entity3["image_datetime"] = datetime.datetime.utcnow()

        entities = [entity, entity2, entity3]
        self.assertEqual(image_sorter.pick_image(entities), entity3)
        entities = [entity3, entity2, entity]
        self.assertEqual(image_sorter.pick_image(entities), entity3)
        entities = [entity2, entity3, entity]
        self.assertEqual(image_sorter.pick_image(entities), entity3) 
Example 9
Project: eclipse2017   Author: google   File: extract_metadata_from_datastore.py    Apache License 2.0 6 votes vote down vote up
def main():
    args  = get_arguments()
    client = datastore.Client(project=args.project_id)

    query = client.query(kind=args.photo_table)
    entities = list(query.fetch())
    results = []
    print "Initial results:", len(entities)
    for entity in entities:
        m = metadata.get_metadata(entity, args.image_bucket, debug=False)
        if m is not None:
            results.append( (entity.key.name, m) )

    print "Filtered results:", len(results)

    f = open(args.files, "wb")
    for result in results:
        f.write("%s/%s\n" % (args.directory, result[0]))
    f.close()
    pickle.dump(dict(results), open(args.output, "wb")) 
Example 10
Project: eclipse2017   Author: google   File: final_numbers_report.py    Apache License 2.0 6 votes vote down vote up
def main():
    args  = get_arguments()
    datastore_client = datastore.Client(project=args.project_id)

    query = datastore_client.query(kind=args.photo_table)
    query.add_filter("image_bucket","=", args.image_bucket)
    entities = list(query.fetch())
    images = set()
    for entity in entities:
        images.add(entity.key.name)

    # Instantiates a client
    storage_client = storage.Client()

    # The name for the new bucket
    bucket_name = args.project_id + "-photos"
    bucket = storage_client.get_bucket(bucket_name)
    blobs = bucket.list_blobs()
    blob_sizes = []
    for blob in blobs:
        if blob.name in images:
            blob_sizes.append(blob.size)

    print len(images), sum(blob_sizes) 
Example 11
Project: eclipse2017   Author: google   File: delete_processed_photos.py    Apache License 2.0 6 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    query = client.query(kind="ProcessedImage")
    query.keys_only()

    entities = query.fetch()
    batch = client.batch()
    batch.begin()
    i = 0
    for entity in entities:
        batch.delete(entity.key)
        i = i + 1
        if i == 500:
            batch.commit()
            batch = client.batch()
            batch.begin()
            i = 0
    batch.commit() 
Example 12
Project: eclipse2017   Author: google   File: get_user_ids.py    Apache License 2.0 6 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    addresses = [address.strip() for address in open(args.email_address_file).readlines()]
    # Can't find a way to query a collection of records matching different email addresses.
    for email in addresses:
        query = client.query(kind="User")
        query.add_filter('email', '=', email)
        entities = query.fetch()
        l = list(entities)
        if l == []:
            print "No match for", email
        else:
            for entity in l:
                print entity.key.name, entity['email'] 
Example 13
Project: eclipse2017   Author: google   File: delete_photos.py    Apache License 2.0 6 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    query = client.query(kind="Photo")
    query.keys_only()

    if args.upload_session_id != None:
        query.add_filter('upload_session_id', '=', args.upload_session_id)
    if args.user_id != None:
        print client.key(u'User', unicode(args.user_id))
        query.add_filter('user', '=', client.key('User', args.user_id))


    entities = list(query.fetch())
    entity_chunks = chunks(entities, 500)
    for entity_chunk in entity_chunks:
        print "creating batch"
        batch = client.batch()
        batch.begin()
        for entity in entity_chunk:
            batch.delete(entity.key)
        batch.commit()
        print "batch committed" 
Example 14
Project: dnae   Author: google   File: gcp_connector.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, project_id):

    credentials = GoogleCredentials.get_application_default()

    self.project_id = project_id
    self.gce_zone = None
    # Change the following location if your GAE instance is not in US-CENTRAL
    # See GAE locations at: https://cloud.google.com/appengine/docs/locations
    self.gae_location = 'us-central1'
    self.__gcsapi = discovery.build(
        'storage', self._gcs_apiver, credentials=credentials)
    self.__gbqapi = discovery.build(
        'bigquery', self._gbq_apiver, credentials=credentials)
    self.__gceapi = discovery.build(
        'compute', self._gce_apiver, credentials=credentials)
    self.__gdsapi = discovery.build(
        'datastore', self._gds_apiver, credentials=credentials)
    self.__gctapi = discovery.build(
        'cloudtasks', self._gct_apiver, credentials=credentials)

    self.__gcsclient = storage.Client(project_id)
    self.__gdsclient = datastore.Client(project_id)
    self.__gbqclient = bigquery.Client(project_id)

  # Cloud Storage methods 
Example 15
Project: neural-fingerprinting   Author: StephanZheng   File: cloud_client.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, project_id, bucket_name):
    """Initialize client with project id and name of the storage bucket."""
    self.project_id = project_id
    self.bucket_name = bucket_name
    self.client = storage.Client(project=project_id)
    self.bucket = self.client.get_bucket(bucket_name) 
Example 16
Project: neural-fingerprinting   Author: StephanZheng   File: cloud_client.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self, project_id, namespace=None):
    """Init this method with given project id and optional namespace."""
    self._client = datastore.Client(project=project_id, namespace=namespace) 
Example 17
Project: PyChunkedGraph   Author: seung-lab   File: flask_log_db.py    Mozilla Public License 2.0 5 votes vote down vote up
def __init__(self, table_id, project_id="neuromancer-seung-import",
                 client=None, credentials=None):
        self._table_id = table_id
        if client is not None:
            self._client = client
        else:
            self._client = datastore.Client(project=project_id,
                                            credentials=credentials) 
Example 18
Project: PyChunkedGraph   Author: seung-lab   File: performance.py    Mozilla Public License 2.0 5 votes vote down vote up
def readout_log_db(table_id, filters, cols,
                   date_filter=datetime.datetime(year=2019, day=30, month=3)):
    if date_filter.tzinfo is None:
        date_filter = chunkedgraph.UTC.localize(date_filter)

    credentials, project_id = default_creds()
    client = datastore.Client(project=project_id, credentials=credentials)
    log_db = flask_log_db.FlaskLogDatabase(table_id, client=client)

    query = log_db.client.query(kind=log_db.kind, namespace=log_db.namespace)

    for filter_ in filters:
        query.add_filter(*filter_)

    data = []
    query_iter = query.fetch()
    for e in query_iter:
        if e["date"] > date_filter:
            col_data = []
            for col in cols:
                col_data.append(e[col])

            data.append(col_data)

        # if len(data) > 10000:
        #     break

    return data 
Example 19
Project: PyChunkedGraph   Author: seung-lab   File: app_utils.py    Mozilla Public License 2.0 5 votes vote down vote up
def get_bigtable_client(config):
    project_id = config.get("project_id", "pychunkedgraph")

    if config.get("emulate", False):
        credentials = DoNothingCreds()
    else:
        credentials, project_id = default_creds()

    client = bigtable.Client(admin=True, project=project_id, credentials=credentials)
    return client 
Example 20
Project: PyChunkedGraph   Author: seung-lab   File: app_utils.py    Mozilla Public License 2.0 5 votes vote down vote up
def get_datastore_client(config):
    project_id = config.get("project_id", "pychunkedgraph")

    if config.get("emulate", False):
        credentials = DoNothingCreds()
    else:
        credentials, project_id = default_creds()

    client = datastore.Client(project=project_id, credentials=credentials)
    return client 
Example 21
Project: som   Author: vsoch   File: client.py    MIT License 5 votes vote down vote up
def __init__(self, project, bucket_name, **kwargs):
        super(DataStoreClient, self).__init__(project, bucket_name, **kwargs)
        self.datastore = datastore.Client(self.project)
        self.name = "datastore"
        self.batch = DataStoreManager(client=self.datastore)

    ###################################################################
    ## Create #########################################################
    ################################################################### 
Example 22
Project: som   Author: vsoch   File: models.py    MIT License 5 votes vote down vote up
def __init__(self, **kwargs):
        super(DataStoreManager, self).__init__(**kwargs)
        if self.client is None:
            self.client = datastore.Client() 
Example 23
Project: clusterfuzz   Author: google   File: ndb_patcher.py    Apache License 2.0 5 votes vote down vote up
def init():
  """Explicitly (re-)initialize _client(). This is useful for testing."""
  # We discard the project from the service account credentials, as it may be
  # different from the Datastore project we wish to connect to.
  creds = credentials.get_default()[0]
  _local.client = datastore.Client(
      project=utils.get_application_id(), credentials=creds) 
Example 24
Project: hangouts-chat-samples   Author: gsuitedevs   File: auth.py    Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.datastore_client = datastore.Client() 
Example 25
Project: kryptos   Author: produvia   File: settings.py    MIT License 5 votes vote down vote up
def get_from_datastore(config_key, env):
    ds = datastore.Client()
    product_key = ds.key('Settings', env)
    entity = ds.get(product_key)

    return entity[config_key]

## Machine Learning Settings 
Example 26
Project: kryptos   Author: produvia   File: settings.py    MIT License 5 votes vote down vote up
def get_from_datastore(config_key, env):
    ds = datastore.Client()
    print("Fetching {}".format(config_key))

    product_key = ds.key("Settings", env)
    entity = ds.get(product_key)

    return entity[config_key] 
Example 27
Project: kryptos   Author: produvia   File: settings.py    MIT License 5 votes vote down vote up
def get_from_datastore(config_key, env):
    ds = datastore.Client()
    print("Fetching {}".format(config_key))

    product_key = ds.key("Settings", env)
    entity = ds.get(product_key)

    return entity[config_key] 
Example 28
Project: Flask-Blogging   Author: gouthambs   File: gcdatastore.py    MIT License 5 votes vote down vote up
def __init__(self, namespace=None):
        self._logger = logging.getLogger("flask-blogging")
        self._client = datastore.Client(namespace=namespace) 
Example 29
Project: python-ndb   Author: googleapis   File: conftest.py    Apache License 2.0 5 votes vote down vote up
def _make_ds_client(namespace):
    emulator = bool(os.environ.get("DATASTORE_EMULATOR_HOST"))
    if emulator:
        client = datastore.Client(namespace=namespace, _http=requests.Session)
    else:
        client = datastore.Client(namespace=namespace)

    return client 
Example 30
Project: python-ndb   Author: googleapis   File: conftest.py    Apache License 2.0 5 votes vote down vote up
def client_context(namespace):
    client = ndb.Client(namespace=namespace)
    with client.context(cache_policy=False, legacy_data=False) as the_context:
        yield the_context 
Example 31
Project: scraper   Author: m-lab   File: run_scraper_test.py    Apache License 2.0 5 votes vote down vote up
def test_main_with_no_data(self, mock_sleep):
        now = datetime.datetime.now()
        slept_seconds = []
        mock_sleep.side_effect = slept_seconds.append

        # Verify that the recoverable exception does not rise to the top level
        run_scraper.main([
            'run_as_e2e_test',
            '--num_runs', '1',
            '--rsync_host', 'ndt.iupui.mlab4.xxx08.measurement-lab.org',
            '--rsync_module', 'iupui_ndt',
            '--data_dir', '/scraper_data',
            '--metrics_port', str(EndToEndWithFakes.prometheus_port),
            '--max_uncompressed_size', '1024'])

        # Verify that the sleep time is never too long
        for time_slept in slept_seconds:
            self.assertLessEqual(time_slept, 3600)

        # Verify that cloud storage has been updated to midnight last night
        datastore_client = datastore.Client()
        key = datastore_client.key(
            'dropboxrsyncaddress',
            'rsync://ndt.iupui.mlab4.xxx08.measurement-lab.org'
            ':7999/iupui_ndt')
        value = datastore_client.get(key)
        midnight = datetime.datetime(
            year=now.year, month=now.month, day=now.day)
        time_since_epoch = (midnight -
                            datetime.datetime(1970, 1, 1)).total_seconds()
        self.assertTrue(
            abs(value['maxrawfilemtimearchived'] - time_since_epoch) < 5,
            'maxrawfilemtimearchived(%d) and time_since_epoch(%d) differ by '
            'too much' % (value['maxrawfilemtimearchived'], time_since_epoch)) 
Example 32
Project: scraper   Author: m-lab   File: run_scraper_test.py    Apache License 2.0 5 votes vote down vote up
def test_main_with_enough_data_for_early_upload(self, mock_sleep):
        now = datetime.datetime.now()
        slept_seconds = []
        mock_sleep.side_effect = slept_seconds.append

        # Add files for 2.1 hours ago and right now. Only the older should get
        # uploaded.
        now = datetime.datetime.now()
        older = now - datetime.timedelta(minutes=126)
        self.create_file(older)
        self.create_file(now)

        # Run the scraper, hopefully uploading only recent data.
        run_scraper.main([
            'run_as_e2e_test',
            '--num_runs', '1',
            '--rsync_host', 'ndt.iupui.mlab4.xxx08.measurement-lab.org',
            '--rsync_module', 'iupui_ndt',
            '--data_dir', '/scraper_data',
            '--metrics_port', str(EndToEndWithFakes.prometheus_port),
            '--max_uncompressed_size', '1024',
            '--data_buffer_threshold', '1023'])

        # Verify that cloud storage has been updated to 2.1 hours ago
        datastore_client = datastore.Client()
        key = datastore_client.key(
            'dropboxrsyncaddress',
            'rsync://ndt.iupui.mlab4.xxx08.measurement-lab.org'
            ':7999/iupui_ndt')
        value = datastore_client.get(key)
        time_since_epoch = scraper.datetime_to_epoch(now)
        self.assertLess(value['maxrawfilemtimearchived'], time_since_epoch)

        # Verify that the storage service received one file
        tgzfiles = os.listdir(self.cloud_upload_dir)
        self.assertEqual(len(tgzfiles), 1) 
Example 33
Project: scraper   Author: m-lab   File: scraper.py    Apache License 2.0 5 votes vote down vote up
def init(args):
    """Initialize the scraper library.

    The discovery interface means that the contents of some libraries is
    determined at runtime.  Also, applications need to be authorized to use the
    necessary services.  This performs both library initialization as well as
    application authorization.
    """
    rsync_url = 'rsync://{}:{}/{}'.format(args.rsync_host, args.rsync_port,
                                          args.rsync_module)
    # Set up logging
    logging.basicConfig(
        level=logging.INFO,
        format='[%(asctime)s %(levelname)s %(filename)s:%(lineno)d ' +
        rsync_url + '] %(message)s')
    logging.info('Scraping from %s, putting the results in %s', rsync_url,
                 args.bucket)

    # Authorize this application to use Google APIs.
    creds = gce.AppAssertionCredentials()

    # Set up cloud datastore and its dependencies
    datastore_service = cloud_datastore.Client(
        namespace=args.datastore_namespace)
    status = SyncStatus(datastore_service, rsync_url)
    logging.getLogger().addHandler(SyncStatusLogHandler(status))

    # Set up cloud storage
    storage_service = apiclient.discovery.build(
        'storage', 'v1', credentials=creds)

    # If the destination directory does not exist, make it exist.
    destination = os.path.join(args.data_dir, args.rsync_host,
                               args.rsync_module)
    if not os.path.isdir(destination):
        os.makedirs(destination)
    return (rsync_url, status, destination, storage_service)


# How long ago should a file have been last edited before we should consider
# downloading it. 
Example 34
Project: openmic-annotator   Author: cosmir   File: database.py    MIT License 5 votes vote down vote up
def _client(self):
        return datastore.Client(self.project) 
Example 35
Project: findopendata   Author: findopendata   File: settings.py    Apache License 2.0 5 votes vote down vote up
def from_datastore(kind):
    client = datastore.Client()
    entities = list(client.query(kind=kind).fetch(limit=1))
    if len(entities) == 0:
        raise ValueError("Cloud Datastore has no entities with kind {}".format(
            kind))
    return dict(entities[0]) 
Example 36
Project: pulse-data   Author: Recidiviz   File: environment.py    GNU General Public License v3.0 5 votes vote down vote up
def get_datastore_client() -> datastore.Client:
    # If we're running with the datastore emulator, we must specify `_https` due
    # to a bug in the datastore client.
    # See: https://github.com/googleapis/google-cloud-python/issues/5738
    if os.environ.get(environment_vars.GCD_HOST):
        return datastore.Client(_http=requests.Session)

    return datastore.Client() 
Example 37
Project: turbinia   Author: google   File: client.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, jobs_blacklist=None, jobs_whitelist=None):
    """Initialization for PSQ Worker.

    Args:
      jobs_blacklist (Optional[list[str]]): Jobs we will exclude from running
      jobs_whitelist (Optional[list[str]]): The only Jobs we will include to run
    """
    config.LoadConfig()
    psq_publisher = pubsub.PublisherClient()
    psq_subscriber = pubsub.SubscriberClient()
    datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
    try:
      self.psq = psq.Queue(
          psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
          name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
    except exceptions.GoogleCloudError as e:
      msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
      log.error(msg)
      raise TurbiniaException(msg)

    # Deregister jobs from blacklist/whitelist.
    disabled_jobs = list(config.DISABLED_JOBS) if config.DISABLED_JOBS else []
    job_manager.JobsManager.DeregisterJobs(jobs_blacklist, jobs_whitelist)
    if disabled_jobs:
      log.info(
          'Disabling jobs that were configured to be disabled in the '
          'config file: {0:s}'.format(', '.join(disabled_jobs)))
      job_manager.JobsManager.DeregisterJobs(jobs_blacklist=disabled_jobs)

    # Check for valid dependencies/directories.
    check_dependencies(config.DEPENDENCIES)
    check_directory(config.MOUNT_DIR_PREFIX)
    check_directory(config.OUTPUT_DIR)
    check_directory(config.TMP_DIR)

    log.info('Starting PSQ listener on queue {0:s}'.format(self.psq.name))
    self.worker = psq.Worker(queue=self.psq) 
Example 38
Project: turbinia   Author: google   File: task_manager.py    Apache License 2.0 5 votes vote down vote up
def _backend_setup(self, server=True, *args, **kwargs):
    """
    Args:
      server (bool): Whether this is the client or a server

    Raises:
      TurbiniaException: When there are errors creating PSQ Queue
    """

    log.debug(
        'Setting up PSQ Task Manager requirements on project {0:s}'.format(
            config.TURBINIA_PROJECT))
    self.server_pubsub = turbinia_pubsub.TurbiniaPubSub(config.PUBSUB_TOPIC)
    if server:
      self.server_pubsub.setup_subscriber()
    else:
      self.server_pubsub.setup_publisher()
    psq_publisher = pubsub.PublisherClient()
    psq_subscriber = pubsub.SubscriberClient()
    datastore_client = datastore.Client(project=config.TURBINIA_PROJECT)
    try:
      self.psq = psq.Queue(
          psq_publisher, psq_subscriber, config.TURBINIA_PROJECT,
          name=config.PSQ_TOPIC, storage=psq.DatastoreStorage(datastore_client))
    except exceptions.GoogleCloudError as e:
      msg = 'Error creating PSQ Queue: {0:s}'.format(str(e))
      log.error(msg)
      raise turbinia.TurbiniaException(msg) 
Example 39
Project: turbinia   Author: google   File: state_manager.py    Apache License 2.0 5 votes vote down vote up
def __init__(self):
    config.LoadConfig()
    try:
      self.client = datastore.Client(project=config.TURBINIA_PROJECT)
    except EnvironmentError as e:
      message = (
          'Could not create Datastore client: {0!s}\n'
          'Have you run $ gcloud auth application-default login?'.format(e))
      raise TurbiniaException(message) 
Example 40
Project: myplace   Author: vinihcampos   File: main.py    MIT License 5 votes vote down vote up
def create_client(project_id):
    return datastore.Client(project_id) 
Example 41
Project: arXie-Bot   Author: thundergolfer   File: accounts.py    MIT License 5 votes vote down vote up
def update_with_user(self, team, slack_user, username, pw):
        client = datastore.Client(self.project_id, namespace=team)

        complete_key = client.key('Login', slack_user)

        task = datastore.Entity(key=complete_key)

        task.update({
            'username': username,
            'password': pw,
        })

        client.put(task) 
Example 42
Project: arXie-Bot   Author: thundergolfer   File: accounts.py    MIT License 5 votes vote down vote up
def get_user(self, team, slack_user):
        client = datastore.Client(self.project_id, namespace=team)
        key = client.key('Login', slack_user)
        user_details = client.get(key)

        if user_details:
            return user_details['username'], user_details['password']

        return None, None 
Example 43
Project: python-docs-samples   Author: GoogleCloudPlatform   File: snippets.py    Apache License 2.0 5 votes vote down vote up
def main(project_id):
    client = datastore.Client(project_id)

    for name, function in globals().iteritems():
        if name in ('main', 'defaultdict') or not callable(function):
            continue

        print(name)
        pprint(function(client))
        print('\n-----------------\n') 
Example 44
Project: python-docs-samples   Author: GoogleCloudPlatform   File: tasks.py    Apache License 2.0 5 votes vote down vote up
def create_client(project_id):
    return datastore.Client(project_id)
# [END datastore_build_service]


# [START datastore_add_entity] 
Example 45
Project: python-docs-samples   Author: GoogleCloudPlatform   File: tasks_test.py    Apache License 2.0 5 votes vote down vote up
def client():
    client = datastore.Client(PROJECT)

    yield client

    # Delete anything created during the test.
    with client.batch():
        client.delete_multi(
            [x.key for x in client.query(kind='Task').fetch()]) 
Example 46
Project: python-docs-samples   Author: GoogleCloudPlatform   File: main.py    Apache License 2.0 5 votes vote down vote up
def index():
    ds = datastore.Client()

    user_ip = request.remote_addr

    # Keep only the first two octets of the IP address.
    if is_ipv6(user_ip):
        user_ip = ':'.join(user_ip.split(':')[:2])
    else:
        user_ip = '.'.join(user_ip.split('.')[:2])

    entity = datastore.Entity(key=ds.key('visit'))
    entity.update({
        'user_ip': user_ip,
        'timestamp': datetime.datetime.utcnow()
    })

    ds.put(entity)

    query = ds.query(kind='visit', order=('-timestamp',))

    results = [
        'Time: {timestamp} Addr: {user_ip}'.format(**x)
        for x in query.fetch(limit=10)]

    output = 'Last 10 visits:\n{}'.format('\n'.join(results))

    return output, 200, {'Content-Type': 'text/plain; charset=utf-8'}
# [END gae_flex_datastore_app] 
Example 47
Project: python-docs-samples   Author: GoogleCloudPlatform   File: main.py    Apache License 2.0 5 votes vote down vote up
def homepage():
    # Create a Cloud Datastore client.
    datastore_client = datastore.Client()

    # Use the Cloud Datastore client to fetch information from Datastore about
    # each photo.
    query = datastore_client.query(kind='Faces')
    image_entities = list(query.fetch())

    # Return a Jinja2 HTML template and pass in image_entities as a parameter.
    return render_template('homepage.html', image_entities=image_entities) 
Example 48
Project: realtime-embeddings-matching   Author: GoogleCloudPlatform   File: lookup.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, kind):
    logging.info('Initialising datastore lookup utility...')
    self.kind = kind
    self.client = datastore.Client()
    logging.info('Datastore lookup utility initialised.') 
Example 49
Project: eclipse2017   Author: google   File: location_test.py    Apache License 2.0 5 votes vote down vote up
def setUp(self):
        self.app = Eclipse2017AdminApp(config.PROJECT_ID, sk.FLASK_SESSION_ENC_KEY,
                                       sk.GOOGLE_OAUTH2_CLIENT_ID,
                                       sk.GOOGLE_OAUTH2_CLIENT_SECRET)

        self.test_client = self.app.test_client()
        if 'prod' in config.PROJECT_ID:
            raise RuntimeError('Cowardly refusing to delete prod datastore')
        self.datastore_client = datastore.Client(config.PROJECT_ID)


        test_common._clear_data(self.datastore_client) 
Example 50
Project: eclipse2017   Author: google   File: count_test.py    Apache License 2.0 5 votes vote down vote up
def setUp(self):
        self.app = Eclipse2017AdminApp(config.PROJECT_ID, sk.FLASK_SESSION_ENC_KEY,
                                       sk.GOOGLE_OAUTH2_CLIENT_ID,
                                       sk.GOOGLE_OAUTH2_CLIENT_SECRET)

        self.test_client = self.app.test_client()
        if 'prod' in config.PROJECT_ID:
            raise RuntimeError('Cowardly refusing to delete prod datastore')
        self.datastore_client = datastore.Client(config.PROJECT_ID)
        test_common._clear_data(self.datastore_client) 
Example 51
Project: eclipse2017   Author: google   File: main.py    Apache License 2.0 5 votes vote down vote up
def main(sleep_time=constants.MOVIE_DAEMON_SLEEP_TIME_S):

    logging.basicConfig(level=logging.INFO,
                        format=constants.LOG_FMT_S_THREADED)
    logging.info("Reading images from gs://" + config.GCS_PROCESSED_PHOTOS_BUCKET)
    logging.info("Writing movies to gs://" + config.GCS_MOVIE_BUCKET)

    #Get current projects storage and datastore client
    credentials = sa.get_credentials()
    datastore_client = datastore.Client(project=config.PROJECT_ID, \
                                        credentials=credentials)

    storage_client = storage.client.Client(project=config.PROJECT_ID, \
                                           credentials=credentials)

    # Create new instance of movie pipeline w/ datastore & GCS
    movie_pipeline = pipeline.Pipeline(datastore_client, storage_client)
    movie_stats = pipeline_stats.Pipeline_Stats(datastore_client, storage_client)

    while True:

        # Get all newly pre-processed images
        fnames = movie_pipeline.scan()

        # Stitch new images into new/exsisting megamovie(s)
        if fnames:
            # Seperate files based on clusters of photos
            clusters = movie_stats.get_clusters(fnames)

            # Concatenates frames into movies and stores in constants.MOVIE_DATA_DIR
            files_in_movie = movie_pipeline.assemble(fnames)

            # Upload movie file(s)
            if files_in_movie:
                movie_pipeline.upload(files_in_movie)

        # Allow files to accumulate before taking our next pass
        time.sleep(sleep_time) 
Example 52
Project: eclipse2017   Author: google   File: site_test.py    Apache License 2.0 5 votes vote down vote up
def setUp(self):
        self.client = datastore.Client(PROJECT_ID) 
Example 53
Project: eclipse2017   Author: google   File: profile_test.py    Apache License 2.0 5 votes vote down vote up
def setUp(self):
        self.app = Eclipse2017App(config.PROJECT_ID, sk.FLASK_SESSION_ENC_KEY,
                                  sk.GOOGLE_OAUTH2_CLIENT_ID,
                                  sk.GOOGLE_OAUTH2_CLIENT_SECRET)

        self.test_client = self.app.test_client()
        if 'prod' in config.PROJECT_ID:
            raise RuntimeError('Cowardly refusing to delete prod datastore')
        self.datastore_client = datastore.Client(config.PROJECT_ID)
        test_common._clear_data(self.datastore_client) 
Example 54
Project: eclipse2017   Author: google   File: extract_metadata_from_datastore.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()
    client = datastore.Client(project=args.project_id)

    query = client.query(kind='Photo')
    entities = list(query.fetch())
    pickle.dump(entities, open(args.photo_metadata, "wb"))

    query = client.query(kind='User')
    entities = list(query.fetch())
    pickle.dump(entities, open(args.user_metadata, "wb")) 
Example 55
Project: eclipse2017   Author: google   File: update_user_role.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(args.project_id)

    user_ids = []
    if args.user_id_file:
        f = open(args.user_id_file)
        user_ids.extend([line.strip() for line in f.readlines()])
    if args.user_id:
        user_ids.append(args.user_id)

    for user_id in user_ids:
        key = client.key("UserRole", user_id)
        entity = client.get(key)

        if entity:
            roles = set(entity['roles'])
            print "original roles:", roles
            for role in args.add_roles:
                roles.add(unicode(role, 'utf8'))
            for role in args.remove_roles:
                if role in roles:
                    roles.remove(unicode(role, 'utf8'))

            roles = list(roles)
            print "new roles:", roles
            entity['roles'] = roles
            client.put(entity)
        else:
            print "No such user:", user_id 
Example 56
Project: eclipse2017   Author: google   File: print_processed_photos.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    query = client.query(kind="ProcessedImage")
    entities = query.fetch()
    for entity in entities:
        print "ProcessedImage:", entity 
Example 57
Project: eclipse2017   Author: google   File: print_photos.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    query = client.query(kind="Photo")
    entities = query.fetch()
    for entity in entities:
        print "Photo id (hashed):", entity 
Example 58
Project: eclipse2017   Author: google   File: repair_missing_gps.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(args.project_id)
    query = client.query(kind="Photo")
    entities = query.fetch()
    upload_session_id = args.upload_session_id
    assert upload_session_id is not None
    filters = []
    filters.append(('upload_session_id', '=', upload_session_id))

    query = client.query(kind="Photo", filters=filters)
    entities = query.fetch()
    results = repair_missing_gps.partition_gps(entities)
    complete_images, incomplete_images = results
    batch = client.batch()
    batch.begin()
    for complete_image in complete_images:
        batch.put(complete_image)

    if len(complete_images) == 1 and len(incomplete_images) > 1:
        print "Repairing incomplete images"
        complete_image = complete_images[0]
        for incomplete_image in incomplete_images:
            repaired_image = repair_missing_gps.update_incomplete(complete_image, incomplete_image)
            print "Repaired", repaired_image.key.name, "from", complete_image.key.name
            batch.put(repaired_image)

    batch.commit() 
Example 59
Project: eclipse2017   Author: google   File: movie_tool.py    Apache License 2.0 5 votes vote down vote up
def main():
    logging.basicConfig(level=logging.INFO,
                        format=constants.LOG_FMT_S_THREADED)
    args  = get_arguments()
    datastore_client = datastore.Client(project=args.project_id)
    storage_client = storage.client.Client(project=args.project_id)
    movie_pipeline = movie.pipeline.Pipeline(datastore_client, storage_client)
    fnames = movie_pipeline.scan()
    movie_pipeline.download(fnames)
    print "Rendering %d frames" % len(fnames)
    files_in_movie = movie_pipeline.assemble(fnames)
    print files_in_movie
    movie_pipeline.upload(files_in_movie) 
Example 60
Project: eclipse2017   Author: google   File: print_users.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    query = client.query(kind="User")

    entities = query.fetch()
    for entity in entities:
        print "User id (hashed):", entity.key.name
        if 'badges' in entity:
            print "Badges ", entity['badges']
        key = client.key("UserRole", entity.key.name)
        entity = client.get(key)
        print "\troles:", entity['roles'] 
Example 61
Project: eclipse2017   Author: google   File: add_movie.py    Apache License 2.0 5 votes vote down vote up
def main():
    args  = get_arguments()
    client = datastore.Client(project=args.project_id)
    key = client.key("Movie")
    entity = datastore.Entity(key = key)
    entity.key = key
    entity.update({'id': args.id,
                   'time': datetime.datetime.utcnow()})
    client.put(entity)
    f = open(args.outfile, "w")
    f.write("var movie_id = '%s';\n" % args.id)
    f.close() 
Example 62
Project: eclipse2017   Author: google   File: create_test_users.py    Apache License 2.0 5 votes vote down vote up
def main():
  args  = get_arguments()

  eclipse_poly = get_polygon()

  client = datastore.Client(args.project_id)

  for x in range(0, args.count[0]):
    user = create_user(client, str(random.randint(1, 10000000)), eclipse_poly) 
Example 63
Project: poem   Author: shixing   File: google_datastore.py    MIT License 5 votes vote down vote up
def __init__(self):
        self.client = datastore.Client() 
Example 64
Project: vishnu   Author: anomaly   File: util.py    Apache License 2.0 5 votes vote down vote up
def google_cloud_datastore_delete_expired_sessions(dormant_for=86400, limit=500):
    """
    Deletes expired sessions
    A session is expired if it expires date is set and has passed or
    if it has not been accessed for a given period of time.

    :param dormant_for: seconds since last access to delete sessions, defaults to 24 hours.
    :type dormant_for: int
    :param limit: amount to delete in one call of the method, the maximum and default for this is the NDB fetch limit of 500
    :type limit: int
    """
    from vishnu.backend.client.google_cloud_datastore import TABLE_NAME
    from google.cloud import datastore
    from datetime import datetime
    from datetime import timedelta

    now = datetime.utcnow()
    last_accessed = now - timedelta(seconds=dormant_for)

    client = datastore.Client()
    accessed_query = client.query(kind=TABLE_NAME)
    accessed_query.add_filter("last_accessed", "<=", last_accessed)
    accessed_results = accessed_query.fetch(limit=limit)

    expires_query = client.query(kind=TABLE_NAME)
    expires_query.add_filter("expires", "<=", now)
    expires_results = expires_query.fetch(limit=limit)

    keys = list()
    for result in accessed_results:
        keys.append(result.key)
    for result in expires_results:
        if result.key not in keys:
            keys.append(result.key)

    client.delete_multi(keys)

    return len(keys) < limit 
Example 65
Project: vishnu   Author: anomaly   File: google_cloud_datastore.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, sid):
        super(Client, self).__init__(sid)

        self._client = datastore.Client()
        self._key = self._client.key(TABLE_NAME, self._sid) 
Example 66
Project: cloud-opensource-python   Author: GoogleCloudPlatform   File: datastore_cache.py    Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self._datastore_client = datastore.Client() 
Example 67
Project: slack-standup-app   Author: SpikeLab-CL   File: datastore.py    Do What The F*ck You Want To Public License 5 votes vote down vote up
def __init__(self, credentials):
        """Datastore instace wrapper.
            Arguments:
                credentials: google.auth credentials, for development you can user appengine service account file.
        """
        self.client = datastore.Client(credentials=credentials) 
Example 68
Project: example_dataproc_twitter   Author: WillianFuks   File: datastore.py    MIT License 5 votes vote down vote up
def __init__(self, credentials=None):
        self.client = (ds.Client(credentials=credentials) if credentials
            else ds.Client()) 
Example 69
Project: example_dataproc_twitter   Author: WillianFuks   File: test_build_datastore_template.py    MIT License 5 votes vote down vote up
def test_main(self, config_mock):
        kind = 'unittest-example-dataproc'
        config2['input'] = 'tests/system/data/dataflow/*.json.gz'
        config2['similarities_cap'] = 5
        config2['kind'] = kind
        config_mock.items.return_value = config2.items()
        expected = {'sku0': {'items': ['sku8', 'sku7', 'sku6', 'sku5', 'sku4'],
            'scores': [0.8, 0.7, 0.6, 0.5, 0.4]},
                    'sku1': {'items': ['sku0', 'sku2', 'sku3', 'sku4', 'sku5'],
            'scores': [0.8, 0.7, 0.6, 0.5, 0.4]},
                    'sku2': {'items': ['sku2', 'sku0'],
            'scores': [0.7, 0.2]}}
        dsc = ds.Client()
        keys = map(lambda x: dsc.key(kind, x), ['sku0', 'sku1', 'sku2'])

        exporter.main()
 
        ds_keys = dsc.get_multi(keys)
        for key in ds_keys:
            name = key.key.name
            self.assertEqual(expected[name]['items'], key['items'])
            self.assertEqual(expected[name]['scores'], key['scores'])

        dsc.delete_multi(keys)
        key0 = dsc.get(keys[0])
        self.assertEqual(key0, None) 
Example 70
Project: getting-started-python   Author: GoogleCloudPlatform   File: model_datastore.py    Apache License 2.0 5 votes vote down vote up
def get_client():
    return datastore.Client(current_app.config['PROJECT_ID']) 
Example 71
Project: datastorm   Author: JavierLuna   File: datastorm.py    MIT License 5 votes vote down vote up
def client(self):
        return datastore.Client(project=self.project, namespace=self.namespace, credentials=self.credentials,
                                _http=self._http) 
Example 72
Project: python-docs-samples   Author: GoogleCloudPlatform   File: blog.py    Apache License 2.0 4 votes vote down vote up
def main(project_id):
    ds = datastore.Client(project_id)

    print("Creating users...")
    create_user(ds, 'tonystark',
                {'name': 'Tony Stark', 'location': 'Stark Island'})
    create_user(ds, 'peterparker',
                {'name': 'Peter Parker', 'location': 'New York City'})

    print("Creating posts...")
    for n in range(1, 10):
        create_post(ds, 'tonystark', "Tony's post #{0}".format(n))
        create_post(ds, 'peterparker', "Peter's post #{0}".format(n))

    print("Re-posting tony's post as peter...")

    tonysposts = list_posts_by_user(ds, 'tonystark')
    for post in tonysposts:
        original_post = post
        break

    repost(ds, 'peterparker', original_post)

    print('Posts by tonystark:')
    for post in list_posts_by_user(ds, 'tonystark'):
        print("> {0} on {1}".format(post['content'], post['created']))

    print('Posts by peterparker:')
    for post in list_posts_by_user(ds, 'peterparker'):
        print("> {0} on {1}".format(post['content'], post['created']))

    print('Posts by everyone:')
    for post in list_all_posts(ds):
        print("> {0} on {1}".format(post['content'], post['created']))

    print('Cleaning up...')
    ds.delete_multi([
        path_to_key(ds, 'tonystark.user'),
        path_to_key(ds, 'peterparker.user')
    ])
    ds.delete_multi([
        x.key for x in list_all_posts(ds)]) 
Example 73
Project: eclipse2017   Author: google   File: rename_photo_table.py    Apache License 2.0 4 votes vote down vote up
def main():
    args  = get_arguments()

    client = datastore.Client(project=args.project_id)

    # Fetch all keys of the new Photo table
    query = client.query(kind="PhotoVolunteerTest")
    query.keys_only()

    cursor = None
    results = []
    while True:
        entities = query.fetch(start_cursor=cursor, limit=1000)
        l = list(entities)
        results.extend(l)
        if len(l) < 1000:
            break
        cursor = entities.next_page_token

    # Delete all keys in the PhotoVolunteerTest table
    result_chunks = chunks(results, 500)
    for result_chunk in result_chunks:
        batch = client.batch()
        batch.begin()
        for result in result_chunk:
            batch.delete(result.key)
        batch.commit()

    # Fetch all keys of the old Photo table
    query = client.query(kind="Photo")

    cursor = None
    results = []
    while True:
        entities = query.fetch(start_cursor=cursor, limit=1000)
        l = list(entities)
        results.extend(l)
        if len(l) < 1000:
            break
        cursor = entities.next_page_token

    # Copy all the keys from the Photo table to the PhotoVolunteerTest table
    result_chunks = chunks(results, 500)
    for result_chunk in result_chunks:
        batch = client.batch()
        batch.begin()
        for result in result_chunk:
            key = client.key("PhotoVolunteerTest", result.key.name)
            entity = datastore.Entity(key=key)
            # Fix datetimes not roundtripping properly (they are missing tz info)
            entity.update(result)
            batch.put(entity)
        batch.commit()