Python google.appengine.api.app_identity.get_default_gcs_bucket_name() Examples

The following are 14 code examples of google.appengine.api.app_identity.get_default_gcs_bucket_name(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.appengine.api.app_identity , or try the search function .
Example #1
Source File: main.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_serving_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # BlobstoreDownloadHandler serves the file from Google Cloud Storage to
        # your computer using blob_key.
        self.send_blob(blob_key) 
Example #2
Source File: mapreduce_pipeline.py    From python-compat-runtime with Apache License 2.0 6 votes vote down vote up
def run(self,
          job_name,
          mapper_spec,
          reducer_spec,
          input_reader_spec,
          output_writer_spec=None,
          mapper_params=None,
          reducer_params=None,
          shards=None,
          combiner_spec=None):


    if mapper_params.get("bucket_name") is None:
      try:
        mapper_params["bucket_name"] = (
            app_identity.get_default_gcs_bucket_name())
      except Exception, e:
        raise errors.Error("Unable to get the GCS default bucket name. "
                           "Check to see that GCS is properly activated. "
                           + str(e)) 
Example #3
Source File: main.py    From appengine-mapreduce with Apache License 2.0 6 votes vote down vote up
def get(self):
    user = users.get_current_user()
    username = user.nickname()

    first = FileMetadata.getFirstKeyForUser(username)
    last = FileMetadata.getLastKeyForUser(username)

    q = FileMetadata.all()
    q.filter("__key__ >", first)
    q.filter("__key__ < ", last)
    results = q.fetch(10)

    items = [result for result in results]
    length = len(items)

    bucket_name = app_identity.get_default_gcs_bucket_name()
    upload_url = blobstore.create_upload_url("/upload",
                                             gs_bucket_name=bucket_name)

    self.response.out.write(self.template_env.get_template("index.html").render(
        {"username": username,
         "items": items,
         "length": length,
         "upload_url": upload_url})) 
Example #4
Source File: main.py    From appengine-mapreduce with Apache License 2.0 6 votes vote down vote up
def run(self, filekey, blobkey):
    logging.debug("filename is %s" % filekey)
    bucket_name = app_identity.get_default_gcs_bucket_name()
    output = yield mapreduce_pipeline.MapreducePipeline(
        "word_count",
        "main.word_count_map",
        "main.word_count_reduce",
        "mapreduce.input_readers.BlobstoreZipInputReader",
        "mapreduce.output_writers.GoogleCloudStorageOutputWriter",
        mapper_params={
            "blob_key": blobkey,
        },
        reducer_params={
            "output_writer": {
                "bucket_name": bucket_name,
                "content_type": "text/plain",
            }
        },
        shards=16)
    yield StoreOutput("WordCount", filekey, output) 
Example #5
Source File: main.py    From appengine-mapreduce with Apache License 2.0 6 votes vote down vote up
def run(self, filekey, blobkey):
    bucket_name = app_identity.get_default_gcs_bucket_name()
    output = yield mapreduce_pipeline.MapreducePipeline(
        "index",
        "main.index_map",
        "main.index_reduce",
        "mapreduce.input_readers.BlobstoreZipInputReader",
        "mapreduce.output_writers.GoogleCloudStorageOutputWriter",
        mapper_params={
            "blob_key": blobkey,
        },
        reducer_params={
            "output_writer": {
                "bucket_name": bucket_name,
                "content_type": "text/plain",
            }
        },
        shards=16)
    yield StoreOutput("Index", filekey, output) 
Example #6
Source File: main.py    From appengine-mapreduce with Apache License 2.0 6 votes vote down vote up
def run(self, filekey, blobkey):
    bucket_name = app_identity.get_default_gcs_bucket_name()
    output = yield mapreduce_pipeline.MapreducePipeline(
        "phrases",
        "main.phrases_map",
        "main.phrases_reduce",
        "mapreduce.input_readers.BlobstoreZipInputReader",
        "mapreduce.output_writers.GoogleCloudStorageOutputWriter",
        mapper_params={
            "blob_key": blobkey,
        },
        reducer_params={
            "output_writer": {
                "bucket_name": bucket_name,
                "content_type": "text/plain",
            }
        },
        shards=16)
    yield StoreOutput("Phrases", filekey, output) 
Example #7
Source File: mapreduce_pipeline.py    From appengine-mapreduce with Apache License 2.0 6 votes vote down vote up
def run(self,
          job_name,
          mapper_spec,
          reducer_spec,
          input_reader_spec,
          output_writer_spec=None,
          mapper_params=None,
          reducer_params=None,
          shards=None,
          combiner_spec=None):
    # Check that you have a bucket_name set in the mapper_params and set it
    # to the default if not.
    if mapper_params.get("bucket_name") is None:
      try:
        mapper_params["bucket_name"] = (
            app_identity.get_default_gcs_bucket_name())
      except Exception, e:
        raise errors.Error("Unable to get the GCS default bucket name. "
                           "Check to see that GCS is properly activated. "
                           + str(e)) 
Example #8
Source File: storage.py    From isthislegit with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def __init__(self):
        self.bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name()) 
Example #9
Source File: filestore.py    From MyLife with MIT License 5 votes vote down vote up
def _bucket_name():
	return app_identity.get_default_gcs_bucket_name() 
Example #10
Source File: main.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobstore_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # Read the file's contents using the Blobstore API.
        # The last two parameters specify the start and end index of bytes we
        # want to read.
        data = blobstore.fetch_data(blob_key, 0, 6)

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(data)

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)


# This handler creates a file in Cloud Storage using the cloudstorage
# client library and then serves the file back using the Blobstore API. 
Example #11
Source File: main.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def get(self):
        bucket_name = os.environ.get(
            'BUCKET_NAME', app_identity.get_default_gcs_bucket_name())

        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(
            'Demo GCS Application running from Version: {}\n'.format(
                os.environ['CURRENT_VERSION_ID']))
        self.response.write('Using bucket name: {}\n\n'.format(bucket_name))
# [END get_default_bucket]

        bucket = '/' + bucket_name
        filename = bucket + '/demo-testfile'
        self.tmp_filenames_to_clean_up = []

        self.create_file(filename)
        self.response.write('\n\n')

        self.read_file(filename)
        self.response.write('\n\n')

        self.stat_file(filename)
        self.response.write('\n\n')

        self.create_files_for_list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket(bucket)
        self.response.write('\n\n')

        self.list_bucket_directory_mode(bucket)
        self.response.write('\n\n')

        self.delete_files()
        self.response.write('\n\nThe demo ran successfully!\n')

# [START write] 
Example #12
Source File: _handlers.py    From GAEPyPI with GNU General Public License v3.0 5 votes vote down vote up
def get_storage(self):
        bucket_name = os.environ.get('BUCKET_NAME', app_identity.get_default_gcs_bucket_name())
        return GCStorage(bucket_name)


# Handlers 
Example #13
Source File: cloud_storage.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        credentials = GoogleCredentials.get_application_default()
        self.service = build('storage', 'v1', credentials=credentials)
        self.bucket_name = (
            config.get('gcs_bucket_name') or
            app_identity.get_default_gcs_bucket_name()).encode('utf-8') 
Example #14
Source File: main.py    From python-docs-samples with Apache License 2.0 4 votes vote down vote up
def get(self):
        # Get the default Cloud Storage Bucket name and create a file name for
        # the object in Cloud Storage.
        bucket = app_identity.get_default_gcs_bucket_name()

        # Cloud Storage file names are in the format /bucket/object.
        filename = '/{}/blobreader_demo'.format(bucket)

        # Create a file in Google Cloud Storage and write something to it.
        with cloudstorage.open(filename, 'w') as filehandle:
            filehandle.write('abcde\n')

        # In order to read the contents of the file using the Blobstore API,
        # you must create a blob_key from the Cloud Storage file name.
        # Blobstore expects the filename to be in the format of:
        # /gs/bucket/object
        blobstore_filename = '/gs{}'.format(filename)
        blob_key = blobstore.create_gs_key(blobstore_filename)

        # [START gae_blobstore_reader]
        # Instantiate a BlobReader for a given Blobstore blob_key.
        blob_reader = blobstore.BlobReader(blob_key)

        # Instantiate a BlobReader for a given Blobstore blob_key, setting the
        # buffer size to 1 MB.
        blob_reader = blobstore.BlobReader(blob_key, buffer_size=1048576)

        # Instantiate a BlobReader for a given Blobstore blob_key, setting the
        # initial read position.
        blob_reader = blobstore.BlobReader(blob_key, position=0)

        # Read the entire value into memory. This may take a while depending
        # on the size of the value and the size of the read buffer, and is not
        # recommended for large values.
        blob_reader_data = blob_reader.read()

        # Write the contents to the response.
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write(blob_reader_data)

        # Set the read position back to 0, then read and write 3 bytes.
        blob_reader.seek(0)
        blob_reader_data = blob_reader.read(3)
        self.response.write(blob_reader_data)
        self.response.write('\n')

        # Set the read position back to 0, then read and write one line (up to
        # and including a '\n' character) at a time.
        blob_reader.seek(0)
        for line in blob_reader:
            self.response.write(line)
        # [END gae_blobstore_reader]

        # Delete the file from Google Cloud Storage using the blob_key.
        blobstore.delete(blob_key)