Python googleapiclient.http.MediaFileUpload() Examples

The following are 24 code examples of googleapiclient.http.MediaFileUpload(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module googleapiclient.http , or try the search function .
Example #1
Source File: bot.py    From Telegram-bot-Google-Drive with MIT License 9 votes vote down vote up
def file_handler(update, context):
  """handles the uploaded files"""

  file = context.bot.getFile(update.message.document.file_id)
  file.download(update.message.document.file_name)

  doc = update.message.document

  service = build('drive', 'v3', credentials=getCreds(),cache_discovery=False)
  filename = doc.file_name

  metadata = {'name': filename}
  media = MediaFileUpload(filename, chunksize=1024 * 1024, mimetype=doc.mime_type,  resumable=True)
  request = service.files().create(body=metadata,
                                media_body=media)

  response = None
  while response is None:
    status, response = request.next_chunk()
    if status:
       print( "Uploaded %d%%." % int(status.progress() * 100))

  context.bot.send_message(chat_id=update.effective_chat.id, text="✅ File uploaded!") 
Example #2
Source File: utils.py    From drive-cli with MIT License 6 votes vote down vote up
def upload_file(name, path, pid):
    token = os.path.join(dirpath, 'token.json')
    store = file.Storage(token)
    creds = store.get()
    service = build('drive', 'v3', http=creds.authorize(Http()))
    file_mimeType = identify_mimetype(name)
    file_metadata = {
        'name': name,
        'parents': [pid],
        'mimeType': file_mimeType
    }
    media = MediaFileUpload(path, mimetype=file_mimeType)
    new_file = service.files().create(body=file_metadata,
                                      media_body=media,
                                      fields='id').execute()
    data = drive_data()
    data[path] = {'id': new_file['id'], 'time': time.time()}
    drive_data(data)
    click.secho("uploaded " + name, fg='yellow')
    return new_file 
Example #3
Source File: googledrive.py    From packtpub-crawler with MIT License 6 votes vote down vote up
def __insert_file(self):
        print '[+] uploading file...'
        media_body = MediaFileUpload(
            self.info['path'], mimetype=self.info['mime_type'], resumable=True)
        body = {
            'title': self.info['name'],
            'description': 'uploaded with packtpub-crawler',
            'mimeType': self.info['mime_type'],
            'parents': [{'id': self.__get_folder()}]
        }
        file = self.__googledrive_service.files().insert(body=body, media_body=media_body).execute()
        # log_dict(file)

        print '[+] updating file permissions...'
        permissions = {
            'role': 'reader',
            'type': 'anyone',
            'value': self.__config.get('googledrive', 'googledrive.gmail')
        }
        self.__googledrive_service.permissions().insert(fileId=file['id'], body=permissions).execute()

        # self.__googledrive_service.files().get(fileId=file['id']).execute()

        self.info['id'] = file['id']
        self.info['download_url'] = file['webContentLink'] 
Example #4
Source File: task.py    From realtime-embeddings-matching with Apache License 2.0 6 votes vote down vote up
def _upload_to_gcs(gcs_services, local_file_name, bucket_name, gcs_location):

  logging.info('Uploading file {} to {}...'.format(
    local_file_name, "gs://{}/{}".format(bucket_name, gcs_location)))

  media = MediaFileUpload(local_file_name,
                          mimetype='application/octet-stream',
                          chunksize=CHUNKSIZE, resumable=True)
  request = gcs_services.objects().insert(
    bucket=bucket_name, name=gcs_location, media_body=media)
  response = None
  while response is None:
    progress, response = request.next_chunk()

  logging.info('File {} uploaded to {}.'.format(
    local_file_name, "gs://{}/{}".format(bucket_name, gcs_location))) 
Example #5
Source File: uploadrobot.py    From youtube-video-maker with GNU General Public License v3.0 6 votes vote down vote up
def initialize_upload(self, youtube, options):
        tags = None
        if options.keywords:
            tags = options.keywords.split(",")

        body = dict(
            snippet = dict(
                title = options.title,
                description = options.description,
                tags = tags,
                categoryId = options.category
            ),

            status = dict(
                privacyStatus = options.privacy_status
            )
        )

        insert_request = youtube.videos().insert(
            part = ",".join(body.keys()),
            body = body,
            media_body = MediaFileUpload(options.file, chunksize = -1, resumable = True)
        )

        self.resumable_upload(insert_request) 
Example #6
Source File: gcs_hook.py    From python-docs-samples with Apache License 2.0 5 votes vote down vote up
def upload(self, bucket, object, filename, mime_type='application/octet-stream'):
        """
        Uploads a local file to Google Cloud Storage.

        :param bucket: The bucket to upload to.
        :type bucket: str
        :param object: The object name to set when uploading the local file.
        :type object: str
        :param filename: The local file path to the file to be uploaded.
        :type filename: str
        :param mime_type: The MIME type to set when uploading the file.
        :type mime_type: str
        """
        service = self.get_conn()
        media = MediaFileUpload(filename, mime_type)
        try:
            service \
                .objects() \
                .insert(bucket=bucket, name=object, media_body=media) \
                .execute()
            return True
        except errors.HttpError as ex:
            if ex.resp['status'] == '404':
                return False
            raise

    # pylint:disable=redefined-builtin 
Example #7
Source File: utils.py    From colab-tf-utils with GNU General Public License v3.0 5 votes vote down vote up
def upload_file_to_folder(self, local_file, folder = None):
        """
        Upload a local file, optionally to a specific folder in Google Drive
        :param local_file: Path to the local file
        :param folder: (Option) GDriveItem which should be the parent.
        :return:
        """
        if folder is not None:
            assert type(folder)==GDriveItem	

        file_metadata = {
            'title': local_file,
            'name': local_file
        }

        if folder is not None:
            file_metadata['parents'] = [folder.fid]

        media = MediaFileUpload(local_file, resumable=True)
        created = self.drive_service.files().create(body=file_metadata,
                                                    media_body=media,
                                                    fields='id')

        response = None
        last_progress = 0

        if folder is not None:
            d = 'Uploading file %s to folder %s' % (local_file, folder.name)
        else:
            d = 'Uploading file %s' % local_file

        pbar = tqdm(total=100, desc=d)
        while response is None:
            status, response = created.next_chunk()
            if status:
                p = status.progress() * 100
                dp = p - last_progress
                pbar.update(dp)
                last_progress = p

        pbar.update(100 - last_progress) 
Example #8
Source File: utils.py    From drive-cli with MIT License 5 votes vote down vote up
def update_file(name, path, fid):
    token = os.path.join(dirpath, 'token.json')
    store = file.Storage(token)
    creds = store.get()
    service = build('drive', 'v3', http=creds.authorize(Http()))
    file_mimeType = identify_mimetype(name)
    media = MediaFileUpload(path, mimetype=file_mimeType)
    new_file = service.files().update(fileId=fid,
                                      media_body=media,
                                      fields='id').execute()
    data = drive_data()
    data[path]['time'] = {'time': time.time()}
    drive_data(data)
    return new_file 
Example #9
Source File: google.py    From pghoard with Apache License 2.0 5 votes vote down vote up
def store_file_from_disk(self, key, filepath, metadata=None,  # pylint: disable=arguments-differ, unused-variable
                             *, multipart=None, extra_props=None,  # pylint: disable=arguments-differ, unused-variable
                             cache_control=None, mimetype=None):
        mimetype = mimetype or "application/octet-stream"
        upload = MediaFileUpload(filepath, mimetype, chunksize=UPLOAD_CHUNK_SIZE, resumable=True)
        return self._upload(upload, key, self.sanitize_metadata(metadata), extra_props, cache_control=cache_control) 
Example #10
Source File: gdriveTools.py    From python-aria-mirror-bot with GNU General Public License v3.0 5 votes vote down vote up
def __upload_empty_file(self, path, file_name, mime_type, parent_id=None):
        media_body = MediaFileUpload(path,
                                     mimetype=mime_type,
                                     resumable=False)
        file_metadata = {
            'name': file_name,
            'description': 'mirror',
            'mimeType': mime_type,
        }
        if parent_id is not None:
            file_metadata['parents'] = [parent_id]
        return self.__service.files().create(supportsTeamDrives=True,
                                             body=file_metadata, media_body=media_body).execute() 
Example #11
Source File: glink.py    From BotHub with Apache License 2.0 5 votes vote down vote up
def upload_file(http, file_path, file_name, mime_type, event):
    # Create Google Drive service instance
    drive_service = build("drive", "v2", http=http, cache_discovery=False)
    # File body description
    media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=True)
    body = {
        "title": file_name,
        "description": "Uploaded using github.com/mkaraniya/BotHub.",
        "mimeType": mime_type,
    }
    if parent_id:
        body["parents"] = [{"id": parent_id}]
    # Permissions body description: anyone who has link can upload
    # Other permissions can be found at https://developers.google.com/drive/v2/reference/permissions
    permissions = {
        "role": "reader",
        "type": "anyone",
        "value": None,
        "withLink": True
    }
    # Insert a file
    file = drive_service.files().insert(body=body, media_body=media_body)
    response = None
    while response is None:
        status, response = file.next_chunk()
        await asyncio.sleep(5)
        if status:
            percentage = int(status.progress() * 100)
            progress_str = "[{0}{1}]\nProgress: {2}%\n".format(
                ''.join(["●" for i in range(math.floor(percentage / 5))]),
                ''.join(["○" for i in range(20 - math.floor(percentage / 5))]),
                round(percentage, 2))
            await event.edit(f"Uploading to Google Drive...\n\nFile Name: {file_name}\n{progress_str}")
    if file:
        await event.edit(file_name + " Uploaded Successfully")
    # Insert new permissions
    drive_service.permissions().insert(fileId=response.get('id'), body=permissions).execute()
    # Define file instance and get url for download
    file = drive_service.files().get(fileId=response.get('id')).execute()
    download_url = response.get("webContentLink")
    return download_url 
Example #12
Source File: spreadsheet.py    From minetorch with MIT License 5 votes vote down vote up
def _upload_drive_image(self, key, value, retry=True):
        try:
            file_metadata = {'name': key, 'parents': [self.drive_folder_id]}
            media = MediaFileUpload(value, mimetype='image/png')
            file = self.drive.files().create(body=file_metadata, media_body=media, fields='id').execute()
            return file.get('id')
        except HttpError as e:
            if not retry:
                raise e
            self.drive_folder_id = self._prepare_drive_directory()
            self._upload_drive_image(key, value, retry=False) 
Example #13
Source File: storage.py    From django-cloud-deploy with Apache License 2.0 5 votes vote down vote up
def _upload_file_to_object(self, local_file_path: str, bucket_name: str,
                               object_name: str):
        """Upload the contents of a local file to an object in a GCS bucket."""
        media_body = http.MediaFileUpload(local_file_path)
        body = {'name': object_name}
        request = self._storage_service.objects().insert(bucket=bucket_name,
                                                         body=body,
                                                         media_body=media_body)
        try:
            response = request.execute(num_retries=5)
            if 'name' not in response:
                raise CloudStorageError(
                    'Unexpected responses when uploading file "{}" to '
                    'bucket "{}"'.format(local_file_path, bucket_name))
        except errors.HttpError as e:
            if e.resp.status == 403:
                raise CloudStorageError(
                    'You do not have permission to upload files to '
                    'bucket "{}"'.format(bucket_name))
            elif e.resp.status == 404:
                raise CloudStorageError(
                    'Bucket "{}" not found.'.format(bucket_name))
            else:
                raise CloudStorageError(
                    'Unexpected error when uploading file "{}" to '
                    'bucket "{}"'.format(local_file_path, bucket_name)) from e

        # http.MediaFileUpload opens a file but never closes it. So we
        # need to manually close the file to avoid "ResourceWarning:
        # unclosed file".
        # TODO: Remove this line when
        # https://github.com/googleapis/google-api-python-client/issues/575
        # is resolved.
        media_body.stream().close() 
Example #14
Source File: storage.py    From singularity-python with GNU Affero General Public License v3.0 5 votes vote down vote up
def upload_file(storage_service,bucket,bucket_path,file_name,verbose=True):
    '''get_folder will return the folder with folder_name, and if create=True,
    will create it if not found. If folder is found or created, the metadata is
    returned, otherwise None is returned
    :param storage_service: the drive_service created from get_storage_service
    :param bucket: the bucket object from get_bucket
    :param file_name: the name of the file to upload
    :param bucket_path: the path to upload to
    '''
    # Set up path on bucket
    upload_path = "%s/%s" %(bucket['id'],bucket_path)
    if upload_path[-1] != '/':
        upload_path = "%s/" %(upload_path)
    upload_path = "%s%s" %(upload_path,os.path.basename(file_name))
    body = {'name': upload_path }
    # Create media object with correct mimetype
    if os.path.exists(file_name):
        mimetype = sniff_extension(file_name,verbose=verbose)
        media = http.MediaFileUpload(file_name,
                                     mimetype=mimetype,
                                     resumable=True)
        request = storage_service.objects().insert(bucket=bucket['id'], 
                                                   body=body,
                                                   predefinedAcl="publicRead",
                                                   media_body=media)
        result = request.execute()
        return result
    bot.warning('%s requested for upload does not exist, skipping' %file_name) 
Example #15
Source File: analytics.py    From airflow with Apache License 2.0 5 votes vote down vote up
def upload_data(
        self,
        file_location: str,
        account_id: str,
        web_property_id: str,
        custom_data_source_id: str,
        resumable_upload: bool = False,
    ) -> None:
        """
        Uploads file to GA via the Data Import API

        :param file_location: The path and name of the file to upload.
        :type file_location: str
        :param account_id: The GA account Id to which the data upload belongs.
        :type account_id: str
        :param web_property_id: UA-string associated with the upload.
        :type web_property_id: str
        :param custom_data_source_id: Custom Data Source Id to which this data import belongs.
        :type custom_data_source_id: str
        :param resumable_upload: flag to upload the file in a resumable fashion, using a
            series of at least two requests.
        :type resumable_upload: bool
        """

        media = MediaFileUpload(
            file_location,
            mimetype="application/octet-stream",
            resumable=resumable_upload,
        )

        self.log.info(
            "Uploading file to GA file for accountId: %s, webPropertyId:%s and customDataSourceId:%s ",
            account_id,
            web_property_id,
            custom_data_source_id,
        )

        self.get_conn().management().uploads().uploadData(  # pylint: disable=no-member
            accountId=account_id,
            webPropertyId=web_property_id,
            customDataSourceId=custom_data_source_id,
            media_body=media,
        ).execute() 
Example #16
Source File: drive.py    From airflow with Apache License 2.0 5 votes vote down vote up
def upload_file(self, local_location: str, remote_location: str) -> str:
        """
        Uploads a file that is available locally to a Google Drive service.

        :param local_location: The path where the file is available.
        :type local_location: str
        :param remote_location: The path where the file will be send
        :type remote_location: str
        :return: File ID
        :rtype: str
        """
        service = self.get_conn()
        directory_path, _, filename = remote_location.rpartition("/")
        if directory_path:
            parent = self._ensure_folders_exists(directory_path)
        else:
            parent = "root"

        file_metadata = {"name": filename, "parents": [parent]}
        media = MediaFileUpload(local_location)
        file = (
            service.files()  # pylint: disable=no-member
            .create(body=file_metadata, media_body=media, fields="id")
            .execute(num_retries=self.num_retries)
        )
        self.log.info("File %s uploaded to gdrive://%s.", local_location, remote_location)
        return file.get("id") 
Example #17
Source File: google_analytics.py    From orchestra with Apache License 2.0 5 votes vote down vote up
def upload_file(self,
                    file_location,
                    account_id,
                    web_property_id,
                    custom_data_source_id,
                    mime_type='application/octet-stream',
                    resumable_upload=False):

        """Uploads file to GA via the Data Import API

        :param file_location: The path and name of the file to upload.
        :type file_location: str
        :param account_id: The GA account Id to which the data upload belongs.
        :type account_id: str
        :param web_property_id: UA-string associated with the upload.
        :type web_property_id: str
        :param custom_data_source_id: Custom Data Source Id to which this data
                                      import belongs.
        :type custom_data_source_id: str
        :param mime_type: Label to identify the type of data in the HTTP request
        :type mime_type: str
        :param resumable_upload: flag to upload the file in a resumable fashion,
                                 using a series of at least two requests
        :type resumable_upload: bool
        """

        media = MediaFileUpload(file_location,
                                mimetype=mime_type,
                                resumable=resumable_upload)

        logger.info('Uploading file to GA file for accountId:%s,'
                    'webPropertyId:%s'
                    'and customDataSourceId:%s ',
                    account_id, web_property_id, custom_data_source_id)

        # TODO(): handle scenario where upload fails
        self.get_service().management().uploads().uploadData(
            accountId=account_id,
            webPropertyId=web_property_id,
            customDataSourceId=custom_data_source_id,
            media_body=media).execute() 
Example #18
Source File: drive.py    From platypush with MIT License 4 votes vote down vote up
def upload(self,
               path: str,
               mime_type: Optional[str] = None,
               name: Optional[str] = None,
               description: Optional[str] = None,
               parents: Optional[List[str]] = None,
               starred: bool = False,
               target_mime_type: Optional[str] = None) -> GoogleDriveFile:
        """
        Upload a file to Google Drive.

        :param path: Path of the file to upload.
        :param mime_type: MIME type of the source file (e.g. "``image/jpeg``").
        :param name: Name of the target file. Default: same name as the source file.
        :param description: File description.
        :param parents: List of folder IDs that will contain the file (default: drive root).
        :param starred: If True, then the uploaded file will be marked as starred by the user.
        :param target_mime_type: Target MIME type. Useful if you want to e.g. import a CSV file as a Google Sheet
            (use "``application/vnd.google-apps.spreadsheet``), or an ODT file to a Google Doc
            (use "``application/vnd.google-apps.document``). See
            `the official documentation <https://developers.google.com/drive/api/v3/mime-types>`_ for a complete list
            of supported types.
        """
        # noinspection PyPackageRequirements
        from googleapiclient.http import MediaFileUpload

        path = os.path.abspath(os.path.expanduser(path))
        name = name or os.path.basename(path)
        metadata = {
            'name': name,
            'description': description,
            'parents': parents,
            'starred': starred,
        }

        if target_mime_type:
            metadata['mimeType'] = target_mime_type

        media = MediaFileUpload(path, mimetype=mime_type)
        service = self.get_service()
        file = service.files().create(
            body=metadata,
            media_body=media,
            fields='*'
        ).execute()

        return GoogleDriveFile(
            type=file.get('kind').split('#')[1],
            id=file.get('id'),
            name=file.get('name'),
            mime_type=file.get('mimeType'),
        ) 
Example #19
Source File: gdrivenew.py    From BotHub with Apache License 2.0 4 votes vote down vote up
def upload_file(http, file_path, file_name, mime_type, event, parent_id):
    # Create Google Drive service instance
    drive_service = build("drive", "v2", http=http, cache_discovery=False)
    # File body description
    media_body = MediaFileUpload(file_path, mimetype=mime_type, resumable=True)
    body = {
        "title": file_name,
        "description": "Uploaded using github.com/ravana69/pornhub gDrive v2",
        "mimeType": mime_type,
    }
    if parent_id is not None:
        body["parents"] = [{"id": parent_id}]
    # Permissions body description: anyone who has link can upload
    # Other permissions can be found at https://developers.google.com/drive/v2/reference/permissions
    permissions = {
        "role": "reader",
        "type": "anyone",
        "value": None,
        "withLink": True
    }
    # Insert a file
    file = drive_service.files().insert(body=body, media_body=media_body)
    response = None
    display_message = ""
    while response is None:
        status, response = file.next_chunk()
        await asyncio.sleep(20)
        if status:
            percentage = int(status.progress() * 100)
            progress_str = "[{0}{1}]\nProgress: {2}%\n".format(
                "".join(["█" for i in range(math.floor(percentage / 5))]),
                "".join(["░" for i in range(20 - math.floor(percentage / 5))]),
                round(percentage, 2)
            )
            current_message = f"uploading to gDrive\nFile Name: {file_name}\n{progress_str}"
            if display_message != current_message:
                try:
                    await event.edit(current_message)
                    display_message = current_message
                except Exception as e:
                    logger.info(str(e))
                    pass
    file_id = response.get("id")
    # Insert new permissions
    drive_service.permissions().insert(fileId=file_id, body=permissions).execute()
    # Define file instance and get url for download
    file = drive_service.files().get(fileId=file_id).execute()
    download_url = file.get("webContentLink")
    return download_url 
Example #20
Source File: google.py    From wrapanapi with MIT License 4 votes vote down vote up
def upload_file_to_bucket(self, bucket_name, file_path):
        def handle_progressless_iter(error, progressless_iters):
            if progressless_iters > NUM_RETRIES:
                self.logger.info('Failed to make progress for too many consecutive iterations.')
                raise error

            sleeptime = random.random() * (2 ** progressless_iters)
            self.logger.info(
                'Caught exception (%s). Sleeping for %d seconds before retry #%d.',
                str(error), sleeptime, progressless_iters
            )

            time.sleep(sleeptime)

        self.logger.info('Building upload request...')
        media = MediaFileUpload(file_path, chunksize=CHUNKSIZE, resumable=True)
        if not media.mimetype():
            media = MediaFileUpload(file_path, DEFAULT_MIMETYPE, resumable=True)

        blob_name = os.path.basename(file_path)
        if not self.bucket_exists(bucket_name):
            self.logger.error("Bucket '%s' doesn't exist", bucket_name)
            raise NotFoundError("bucket {}".format(bucket_name))

        request = self._storage.objects().insert(
            bucket=bucket_name, name=blob_name, media_body=media)
        self.logger.info(
            'Uploading file: %s, to bucket: %s, blob: %s',
            file_path, bucket_name, blob_name
        )

        progressless_iters = 0
        response = None
        while response is None:
            error = None
            try:
                progress, response = request.next_chunk()
                if progress:
                    self.logger.info('Upload progress: %d%%', 100 * progress.progress())
            except errors.HttpError as error:
                if error.resp.status < 500:
                    raise
            except RETRYABLE_ERRORS as error:
                if error:
                    progressless_iters += 1
                    handle_progressless_iter(error, progressless_iters)
                else:
                    progressless_iters = 0

        self.logger.info('Upload complete!')
        self.logger.info('Uploaded Object:')
        self.logger.info(json_dumps(response, indent=2))
        return (True, blob_name) 
Example #21
Source File: run.py    From starthinker with Apache License 2.0 4 votes vote down vote up
def save_video(out, clip):
  # write to user defined local file
  if out.get('file'):
    clip.write_videofile(out['file'],  codec='libx264', audio_codec='aac', logger=None) # logger needed or fmmpeg writes to stderr

  # for storage, write to temporary file ( no alternative with moviepy ), then upload
  if out.get('storage', {}).get('file'):
    temporary_file_name = "/tmp/%s_%s" % (uuid.uuid1(), out['storage']['file'])
    clip.write_videofile(temporary_file_name, codec='libx264', audio_codec='aac', logger=None) # logger needed or fmmpeg writes to stderr
    with open(temporary_file_name, 'rb') as temporary_file:
      object_put(
        project.task['auth'],
        '%s:%s' % ( out['storage']['bucket'],
        out['storage']['file']),
        temporary_file,
        mimetype = mimetypes.guess_type(out['storage']['file'], strict=False)[0]
      )
    os.remove(temporary_file_name)

  if out.get('dcm'):
    print('DCM not implemented yet.')

  # for youtube, write to temporary file ( no alternative with moviepy ), then upload
  if out.get('youtube', {}).get('title'):
    temporary_file_name = "/tmp/%s_%s" % (uuid.uuid1(), out['storage']['file'])
    clip.write_videofile(temporary_file_name, codec='libx264', audio_codec='aac', logger=None) # logger needed or fmmpeg writes to stderr

    body = {
     'snippet':{
        'title':out['youtube']['title'],
        'description':out['youtube']['description'],
        'tags':out['youtube']['tags'],
        'categoryId':out['youtube']['category']
      },
      'status':{
        'privacyStatus':out['youtube']['privacy']
      }
    }
    try:
      API_YouTube(project.task['auth']).videos().insert(
        part=",".join(body.keys()), 
        body=body,
        media_body=MediaFileUpload(temporary_file_name, chunksize=CHUNKSIZE, resumable=True)  
      ).upload()
    finally:
      os.remove(temporary_file_name) 
Example #22
Source File: gdriveTools.py    From python-aria-mirror-bot with GNU General Public License v3.0 4 votes vote down vote up
def upload_file(self, file_path, file_name, mime_type, parent_id):
        # File body description
        file_metadata = {
            'name': file_name,
            'description': 'mirror',
            'mimeType': mime_type,
        }
        if parent_id is not None:
            file_metadata['parents'] = [parent_id]

        if os.path.getsize(file_path) == 0:
            media_body = MediaFileUpload(file_path,
                                         mimetype=mime_type,
                                         resumable=False)
            response = self.__service.files().create(supportsTeamDrives=True,
                                                     body=file_metadata, media_body=media_body).execute()
            if not IS_TEAM_DRIVE:
                self.__set_permission(response['id'])

            drive_file = self.__service.files().get(supportsTeamDrives=True,
                                                    fileId=response['id']).execute()
            download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id'))
            return download_url
        media_body = MediaFileUpload(file_path,
                                     mimetype=mime_type,
                                     resumable=True,
                                     chunksize=50 * 1024 * 1024)

        # Insert a file
        drive_file = self.__service.files().create(supportsTeamDrives=True,
                                                   body=file_metadata, media_body=media_body)
        response = None
        while response is None:
            if self.is_cancelled:
                return None
            try:
                self.status, response = drive_file.next_chunk()
            except HttpError as err:
                if err.resp.get('content-type', '').startswith('application/json'):
                    reason = json.loads(err.content).get('error').get('errors')[0].get('reason')
                    if reason == 'userRateLimitExceeded' or reason == 'dailyLimitExceeded':
                        if USE_SERVICE_ACCOUNTS:
                            self.switchServiceAccount()
                            LOGGER.info(f"Got: {reason}, Trying Again.")
                            return self.upload_file(file_path, file_name, mime_type, parent_id)
                    else:
                        raise err
        self._file_uploaded_bytes = 0
        # Insert new permissions
        if not IS_TEAM_DRIVE:
            self.__set_permission(response['id'])
        # Define file instance and get url for download
        drive_file = self.__service.files().get(supportsTeamDrives=True, fileId=response['id']).execute()
        download_url = self.__G_DRIVE_BASE_DOWNLOAD_URL.format(drive_file.get('id'))
        return download_url 
Example #23
Source File: push.py    From sregistry-cli with Mozilla Public License 2.0 4 votes vote down vote up
def upload(
    self,
    source,
    destination,
    bucket,
    chunk_size=2 * 1024 * 1024,
    metadata=None,
    keep_private=True,
):

    """upload a file from a source to a destination. The client is expected
       to have a bucket (self._bucket) that is created when instantiated.
     
       This would be the method to do the same using the storage client,
       but not easily done for resumable

       blob = self._bucket.blob(destination)
       blob.upload_from_filename(filename=source, 
                                 content_type="application/zip",
                                 client=self._service)

       url = blob.public_url
       if isinstance(url, six.binary_type):
           url = url.decode('utf-8')

       return url
    """
    env = "SREGISTRY_GOOGLE_STORAGE_PRIVATE"
    keep_private = self._get_and_update_setting(env) or keep_private

    media = MediaFileUpload(source, chunksize=chunk_size, resumable=True)
    request = self._storage_service.objects().insert(
        bucket=bucket.name, name=destination, media_body=media
    )

    response = None
    total = request.resumable._size / (1024 * 1024.0)

    bar = ProgressBar(expected_size=total, filled_char="=", hide=self.quiet)

    while response is None:
        progress, response = request.next_chunk()
        if progress:
            bar.show(progress.resumable_progress / (1024 * 1024.0))

    # When we finish upload, get as blob
    blob = bucket.blob(destination)
    if blob.exists():

        if not keep_private:
            blob.make_public()

        # If the user has a dictionary of metadata to update
        if metadata is not None:
            blob.metadata = metadata
            blob._properties["metadata"] = metadata
            blob.patch()

    return response 
Example #24
Source File: push.py    From sregistry-cli with Mozilla Public License 2.0 4 votes vote down vote up
def upload(self, source, destination, chunk_size=2 * 1024 * 1024, metadata=None):
    """upload a file from a source to a destination. The client is expected
       to have a bucket (self._bucket) that is created when instantiated.
     
       This would be the method to do the same using the storage client,
       but not easily done for resumable

       blob = self._bucket.blob(destination)
       blob.upload_from_filename(filename=source, 
                                 content_type="application/zip",
                                 client=self._service)

       url = blob.public_url
       if isinstance(url, six.binary_type):
           url = url.decode('utf-8')

       return url
    """
    env = "SREGISTRY_GOOGLE_STORAGE_PRIVATE"
    keep_private = self._get_and_update_setting(env)

    media = MediaFileUpload(source, chunksize=chunk_size, resumable=True)
    request = self._storage_service.objects().insert(
        bucket=self._bucket_name, name=destination, media_body=media
    )

    response = None
    total = request.resumable._size / (1024 * 1024.0)

    bar = ProgressBar(expected_size=total, filled_char="=", hide=self.quiet)

    while response is None:
        progress, response = request.next_chunk()
        if progress:
            bar.show(progress.resumable_progress / (1024 * 1024.0))

    # When we finish upload, get as blob
    blob = self._bucket.blob(destination)
    if blob.exists():

        if not keep_private:
            blob.make_public()

        # If the user has a dictionary of metadata to update
        if metadata is not None:
            blob.metadata = metadata
            blob._properties["metadata"] = metadata
            blob.patch()

    return response