Python google.api_core.exceptions.Forbidden() Examples

The following are 9 code examples of google.api_core.exceptions.Forbidden(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.api_core.exceptions , or try the search function .
Example #1
Source File: test_dbapi_cursor.py    From python-bigquery with Apache License 2.0 6 votes vote down vote up
def test_fetchall_w_bqstorage_client_fetch_error_no_fallback(self):
        from google.cloud.bigquery import dbapi
        from google.cloud.bigquery import table

        row_data = [table.Row([1.1, 1.2], {"foo": 0, "bar": 1})]

        mock_client = self._mock_client(rows=row_data)
        mock_bqstorage_client = self._mock_bqstorage_client(
            stream_count=1, rows=row_data,
        )
        no_access_error = exceptions.Forbidden("invalid credentials")
        mock_bqstorage_client.create_read_session.side_effect = no_access_error

        connection = dbapi.connect(
            client=mock_client, bqstorage_client=mock_bqstorage_client,
        )
        cursor = connection.cursor()
        cursor.execute("SELECT foo, bar FROM some_table")

        with six.assertRaisesRegex(self, exceptions.Forbidden, "invalid credentials"):
            cursor.fetchall()

        # the default client was not used
        mock_client.list_rows.assert_not_called() 
Example #2
Source File: base_google.py    From airflow with Apache License 2.0 5 votes vote down vote up
def is_soft_quota_exception(exception: Exception):
    """
    API for Google services does not have a standardized way to report quota violation errors.
    The function has been adapted by trial and error to the following services:

    * Google Translate
    * Google Vision
    * Google Text-to-Speech
    * Google Speech-to-Text
    * Google Natural Language
    * Google Video Intelligence
    """
    if isinstance(exception, Forbidden):
        return any(
            reason in error.details()
            for reason in INVALID_REASONS
            for error in exception.errors
        )

    if isinstance(exception, (ResourceExhausted, TooManyRequests)):
        return any(
            key in error.details()
            for key in INVALID_KEYS
            for error in exception.errors
        )

    return False 
Example #3
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def test_blob_w_temporary_hold(self):
        from google.api_core import exceptions

        new_bucket_name = "w-tmp-hold" + unique_resource_id("-")
        self.assertRaises(
            exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
        )
        bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)

        blob_name = "test-blob"
        payload = b"DEADBEEF"
        blob = bucket.blob(blob_name)
        blob.upload_from_string(payload)

        self.case_blobs_to_delete.append(blob)

        other = bucket.get_blob(blob_name)
        other.temporary_hold = True
        other.patch()

        self.assertTrue(other.temporary_hold)
        self.assertFalse(other.event_based_hold)
        self.assertIsNone(other.retention_expiration_time)

        with self.assertRaises(exceptions.Forbidden):
            other.delete()

        other.temporary_hold = False
        other.patch()

        other.delete()
        self.case_blobs_to_delete.pop() 
Example #4
Source File: test_system.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def test_bucket_lock_retention_policy(self):
        import datetime
        from google.api_core import exceptions

        period_secs = 10

        new_bucket_name = "loc-ret-policy" + unique_resource_id("-")
        self.assertRaises(
            exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
        )
        bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)

        bucket.retention_period = period_secs
        bucket.patch()

        self.assertEqual(bucket.retention_period, period_secs)
        self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime)
        self.assertFalse(bucket.default_event_based_hold)
        self.assertFalse(bucket.retention_policy_locked)

        bucket.lock_retention_policy()

        bucket.reload()
        self.assertTrue(bucket.retention_policy_locked)

        bucket.retention_period = None
        with self.assertRaises(exceptions.Forbidden):
            bucket.patch() 
Example #5
Source File: system.py    From python-bigquery with Apache License 2.0 5 votes vote down vote up
def _rate_limit_exceeded(forbidden):
    """Predicate: pass only exceptions with 'rateLimitExceeded' as reason."""
    return any(error["reason"] == "rateLimitExceeded" for error in forbidden._errors)


# We need to wait to stay within the rate limits.
# The alternative outcome is a 403 Forbidden response from upstream, which
# they return instead of the more appropriate 429.
# See https://cloud.google.com/bigquery/quota-policy 
Example #6
Source File: backfill.py    From professional-services with Apache License 2.0 5 votes vote down vote up
def get_buckets(project_ids: List[str],
                gcs_client: storage.Client) -> List[Dict[str, str]]:
    """Retrieves list of metadata for all buckets in a GCP org.

    Args:
        project_ids: List of strings holding project IDs
        gcs_client: storage.Client object

    Returns:
        List of dictionaries mapping bucket-level metadata.
    """
    output_list = []
    try:
        for project_id in project_ids:
            try:
                bucket_list = list(gcs_client.list_buckets(project=project_id))
                for bucket in bucket_list:
                    output_list.append({
                        "bucket_name": bucket.name,
                        "project_id": project_id,
                        "last_read_timestamp": "",
                        "days_since_last_read": -1,
                        "read_count_30_days": -1,
                        "read_count_90_days": -1,
                        "export_day": datetime.datetime.utcnow().strftime("%Y-%m-%d"),
                        "recommended_OLM": ""
                    })
            except Forbidden as err:
                logging.error(f"""Access denied on bucket {bucket.name}.
                              {err}""")

            except BadRequest as err:
                logging.error(f"Could not find bucket {bucket.name}.")
                logging.error(err)
                pass
        return output_list

    except Exception as err:
        logging.error(f"""Could not access buckets in {project_id}.
                      {err}""") 
Example #7
Source File: test_system.py    From python-storage with Apache License 2.0 4 votes vote down vote up
def test_bucket_w_retention_period(self):
        import datetime
        from google.api_core import exceptions

        period_secs = 10

        new_bucket_name = "w-retention-period" + unique_resource_id("-")
        bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)

        bucket.retention_period = period_secs
        bucket.default_event_based_hold = False
        bucket.patch()

        self.assertEqual(bucket.retention_period, period_secs)
        self.assertIsInstance(bucket.retention_policy_effective_time, datetime.datetime)
        self.assertFalse(bucket.default_event_based_hold)
        self.assertFalse(bucket.retention_policy_locked)

        blob_name = "test-blob"
        payload = b"DEADBEEF"
        blob = bucket.blob(blob_name)
        blob.upload_from_string(payload)

        self.case_blobs_to_delete.append(blob)

        other = bucket.get_blob(blob_name)

        self.assertFalse(other.event_based_hold)
        self.assertFalse(other.temporary_hold)
        self.assertIsInstance(other.retention_expiration_time, datetime.datetime)

        with self.assertRaises(exceptions.Forbidden):
            other.delete()

        bucket.retention_period = None
        bucket.patch()

        self.assertIsNone(bucket.retention_period)
        self.assertIsNone(bucket.retention_policy_effective_time)
        self.assertFalse(bucket.default_event_based_hold)
        self.assertFalse(bucket.retention_policy_locked)

        other.reload()

        self.assertFalse(other.event_based_hold)
        self.assertFalse(other.temporary_hold)
        self.assertIsNone(other.retention_expiration_time)

        other.delete()
        self.case_blobs_to_delete.pop() 
Example #8
Source File: test_system.py    From python-storage with Apache License 2.0 4 votes vote down vote up
def test_bucket_w_default_event_based_hold(self):
        from google.api_core import exceptions

        new_bucket_name = "w-def-ebh" + unique_resource_id("-")
        self.assertRaises(
            exceptions.NotFound, Config.CLIENT.get_bucket, new_bucket_name
        )
        bucket = retry_429_503(Config.CLIENT.create_bucket)(new_bucket_name)
        self.case_buckets_to_delete.append(new_bucket_name)

        bucket.default_event_based_hold = True
        bucket.patch()

        self.assertTrue(bucket.default_event_based_hold)
        self.assertIsNone(bucket.retention_period)
        self.assertIsNone(bucket.retention_policy_effective_time)
        self.assertFalse(bucket.retention_policy_locked)

        blob_name = "test-blob"
        payload = b"DEADBEEF"
        blob = bucket.blob(blob_name)
        blob.upload_from_string(payload)

        self.case_blobs_to_delete.append(blob)

        other = bucket.get_blob(blob_name)

        self.assertTrue(other.event_based_hold)
        self.assertFalse(other.temporary_hold)
        self.assertIsNone(other.retention_expiration_time)

        with self.assertRaises(exceptions.Forbidden):
            other.delete()

        other.event_based_hold = False
        other.patch()
        other.delete()

        bucket.default_event_based_hold = False
        bucket.patch()

        self.assertFalse(bucket.default_event_based_hold)
        self.assertIsNone(bucket.retention_period)
        self.assertIsNone(bucket.retention_policy_effective_time)
        self.assertFalse(bucket.retention_policy_locked)

        blob.upload_from_string(payload)
        self.assertFalse(blob.event_based_hold)
        self.assertFalse(blob.temporary_hold)
        self.assertIsNone(blob.retention_expiration_time)

        blob.delete()
        self.case_blobs_to_delete.pop() 
Example #9
Source File: bigquery_api.py    From sroka with MIT License 4 votes vote down vote up
def done_bigquery(job_id, filename=None):

    if filename:
        if type(filename) != str:
            print('filename needs to be a string')
            return None

        if type(job_id) != str:
            print('input_query needs to be a string')
            return None

        try:
            f = open(filename, 'w')
            f.close()
        except FileNotFoundError:
            print('file cannot be saved in selected directory')
            return None

    else:
        if type(job_id) != str:
            print('input_query needs to be a string')
            return pd.DataFrame([])

    client = bigquery.Client.from_service_account_json(
        KEY_FILE)
    try:
        query_job = client.get_job(job_id=job_id)
    except (BadRequest, NotFound) as error:
        print(error)
        if filename:
            return None
        return pd.DataFrame([])
    try:
        df = query_job.result().to_dataframe()
    except (Forbidden, NotFound) as error:
        print(error)
        if filename:
            return None
        return pd.DataFrame([])
    if filename:
        df.to_csv(filename)
        print('saved to ' + filename)
        return None
    else:
        return df