Python google.api_core.exceptions.GoogleAPICallError() Examples

The following are 27 code examples of google.api_core.exceptions.GoogleAPICallError(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.api_core.exceptions , or try the search function .
Example #1
Source File: translate_v3_create_glossary_test.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def test_create_glossary(capsys):
    try:
        glossary_id = "test-{}".format(uuid.uuid4())
        translate_v3_create_glossary.create_glossary(
            PROJECT_ID, GLOSSARY_INPUT_URI, glossary_id
        )
        out, _ = capsys.readouterr()
        # assert
        assert "Created:" in out
        assert "gs://cloud-samples-data/translation/glossary_ja.csv" in out
    finally:
        # cleanup
        @backoff.on_exception(
            backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
        )
        def delete_glossary():
            try:
                translate_v3_delete_glossary.delete_glossary(
                    PROJECT_ID, glossary_id)
            except NotFound as e:
                # Ignoring this case.
                print("Got NotFound, detail: {}".format(str(e)))
        delete_glossary() 
Example #2
Source File: translate_v3_translate_text_with_glossary_test.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def glossary():
    """Get the ID of a glossary available to session (do not mutate/delete)."""
    glossary_id = "must-start-with-letters-" + str(uuid.uuid1())
    translate_v3_create_glossary.create_glossary(
        PROJECT_ID, GLOSSARY_INPUT_URI, glossary_id
    )

    yield glossary_id

    # cleanup
    @backoff.on_exception(
        backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
    )
    def delete_glossary():
        try:
            translate_v3_delete_glossary.delete_glossary(
                PROJECT_ID, glossary_id)
        except NotFound as e:
            # Ignoring this case.
            print("Got NotFound, detail: {}".format(str(e)))
    delete_glossary() 
Example #3
Source File: translate_v3_get_glossary_test.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def glossary():
    """Get the ID of a glossary available to session (do not mutate/delete)."""
    glossary_id = "must-start-with-letters-" + str(uuid.uuid1())
    translate_v3_create_glossary.create_glossary(
        PROJECT_ID, GLOSSARY_INPUT_URI, glossary_id
    )

    yield glossary_id

    # cleanup
    @backoff.on_exception(
        backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
    )
    def delete_glossary():
        try:
            translate_v3_delete_glossary.delete_glossary(
                PROJECT_ID, glossary_id)
        except NotFound as e:
            # Ignoring this case.
            print("Got NotFound, detail: {}".format(str(e)))
    delete_glossary() 
Example #4
Source File: translate_v3_list_glossary_test.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def glossary():
    """Get the ID of a glossary available to session (do not mutate/delete)."""
    glossary_id = "must-start-with-letters-" + str(uuid.uuid1())
    translate_v3_create_glossary.create_glossary(
        PROJECT_ID, GLOSSARY_INPUT_URI, glossary_id
    )

    yield glossary_id

    # clean up
    @backoff.on_exception(
        backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
    )
    def delete_glossary():
        try:
            translate_v3_delete_glossary.delete_glossary(
                PROJECT_ID, glossary_id)
        except NotFound as e:
            # Ignoring this case.
            print("Got NotFound, detail: {}".format(str(e)))
    delete_glossary() 
Example #5
Source File: translate_v3_batch_translate_text_with_glossary_test.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def glossary():
    """Get the ID of a glossary available to session (do not mutate/delete)."""
    glossary_id = "test-{}".format(uuid.uuid4())
    translate_v3_create_glossary.create_glossary(
        PROJECT_ID, GLOSSARY_INPUT_URI, glossary_id
    )

    yield glossary_id

    # cleanup
    @backoff.on_exception(
        backoff.expo, (DeadlineExceeded, GoogleAPICallError), max_time=60
    )
    def delete_glossary():
        try:
            translate_v3_delete_glossary.delete_glossary(
                PROJECT_ID, glossary_id)
        except NotFound as e:
            # Ignoring this case.
            print("Got NotFound, detail: {}".format(str(e)))
    delete_glossary() 
Example #6
Source File: spanner.py    From airflow with Apache License 2.0 6 votes vote down vote up
def delete_instance(self, instance_id: str, project_id: str) -> None:
        """
        Deletes an existing Cloud Spanner instance.

        :param instance_id: The ID of the Cloud Spanner instance.
        :type instance_id: str
        :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner
            database. If set to None or missing, the default project_id from the GCP connection is used.
        :type project_id: str
        :return: None
        """
        instance = self._get_client(project_id=project_id).instance(instance_id)
        try:
            instance.delete()
            return
        except GoogleAPICallError as e:
            self.log.error('An error occurred: %s. Exiting.', e.message)
            raise e 
Example #7
Source File: spanner.py    From airflow with Apache License 2.0 5 votes vote down vote up
def delete_database(self, instance_id: str, database_id, project_id: str) -> bool:
        """
        Drops a database in Cloud Spanner.

        :type project_id: str
        :param instance_id: The ID of the Cloud Spanner instance.
        :type instance_id: str
        :param database_id: The ID of the database in Cloud Spanner.
        :type database_id: str
        :param project_id: Optional, the ID of the  GCP project that owns the Cloud Spanner
            database. If set to None or missing, the default project_id from the GCP connection is used.
        :return: True if everything succeeded
        :rtype: bool
        """
        instance = self._get_client(project_id=project_id).\
            instance(instance_id=instance_id)
        if not instance.exists():
            raise AirflowException("The instance {} does not exist in project {} !".
                                   format(instance_id, project_id))
        database = instance.database(database_id=database_id)
        if not database.exists():
            self.log.info(
                "The database %s is already deleted from instance %s. Exiting.",
                database_id, instance_id
            )
            return False
        try:
            database.drop()  # pylint: disable=E1111
        except GoogleAPICallError as e:
            self.log.error('An error occurred: %s. Exiting.', e.message)
            raise e

        return True 
Example #8
Source File: googleJobStore.py    From toil with Apache License 2.0 5 votes vote down vote up
def googleRetryPredicate(e):
    """
    necessary because under heavy load google may throw
        TooManyRequests: 429
        The project exceeded the rate limit for creating and deleting buckets.

    or numerous other server errors which need to be retried.
    """
    if isinstance(e, GoogleAPICallError) and e.code == 429:
        return True
    if isinstance(e, InternalServerError) or isinstance(e, ServiceUnavailable):
        return True
    return False 
Example #9
Source File: engine_client.py    From Cirq with Apache License 2.0 5 votes vote down vote up
def _make_request(self, request: Callable[[], _R]) -> _R:
        # Start with a 100ms retry delay with exponential backoff to
        # max_retry_delay_seconds
        current_delay = 0.1

        while True:
            try:
                return request()
            except GoogleAPICallError as err:
                message = err.message
                # Raise RuntimeError for exceptions that are not retryable.
                # Otherwise, pass through to retry.
                if err.code.value not in RETRYABLE_ERROR_CODES:
                    raise EngineException(message) from err

            current_delay *= 2
            if current_delay > self.max_retry_delay_seconds:
                raise TimeoutError(
                    'Reached max retry attempts for error: {}'.format(message))
            if self.verbose:
                print(message, file=sys.stderr)
                print('Waiting ',
                      current_delay,
                      'seconds before retrying.',
                      file=sys.stderr)
            time.sleep(current_delay) 
Example #10
Source File: test_blob.py    From python-storage with Apache License 2.0 5 votes vote down vote up
def _helper(self, message, code=http_client.BAD_REQUEST, args=()):
        import requests

        from google.resumable_media import InvalidResponse
        from google.api_core import exceptions

        response = requests.Response()
        response.request = requests.Request("GET", "http://example.com").prepare()
        response.status_code = code
        error = InvalidResponse(response, message, *args)

        with self.assertRaises(exceptions.GoogleAPICallError) as exc_info:
            self._call_fut(error)

        return exc_info 
Example #11
Source File: streaming_pull_manager.py    From python-pubsub with Apache License 2.0 5 votes vote down vote up
def send(self, request):
        """Queue a request to be sent to the RPC.

        If a RetryError occurs, the manager shutdown is triggered, and the
        error is re-raised.
        """
        if self._UNARY_REQUESTS:
            try:
                self._send_unary_request(request)
            except exceptions.GoogleAPICallError:
                _LOGGER.debug(
                    "Exception while sending unary RPC. This is typically "
                    "non-fatal as stream requests are best-effort.",
                    exc_info=True,
                )
            except exceptions.RetryError as exc:
                _LOGGER.debug(
                    "RetryError while sending unary RPC. Waiting on a transient "
                    "error resolution for too long, will now trigger shutdown.",
                    exc_info=False,
                )
                # The underlying channel has been suffering from a retryable error
                # for too long, time to give up and shut the streaming pull down.
                self._on_rpc_done(exc)
                raise

        else:
            self._rpc.send(request) 
Example #12
Source File: test_streaming_pull_manager.py    From python-pubsub with Apache License 2.0 5 votes vote down vote up
def test_send_unary_api_call_error(caplog):
    caplog.set_level(logging.DEBUG)

    manager = make_manager()
    manager._UNARY_REQUESTS = True

    error = exceptions.GoogleAPICallError("The front fell off")
    manager._client.acknowledge.side_effect = error

    manager.send(types.StreamingPullRequest(ack_ids=["ack_id1", "ack_id2"]))

    assert "The front fell off" in caplog.text 
Example #13
Source File: pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def publish(
        self,
        topic: str,
        messages: List[Dict],
        project_id: str,
    ) -> None:
        """
        Publishes messages to a Pub/Sub topic.

        :param topic: the Pub/Sub topic to which to publish; do not
            include the ``projects/{project}/topics/`` prefix.
        :type topic: str
        :param messages: messages to publish; if the data field in a
            message is set, it should be a bytestring (utf-8 encoded)
        :type messages: list of PubSub messages; see
            http://cloud.google.com/pubsub/docs/reference/rest/v1/PubsubMessage
        :param project_id: Optional, the GCP project ID in which to publish.
            If set to None or missing, the default project_id from the GCP connection is used.
        :type project_id: str
        """
        self._validate_messages(messages)

        publisher = self.get_conn()
        topic_path = PublisherClient.topic_path(project_id, topic)  # pylint: disable=no-member

        self.log.info("Publish %d messages to topic (path) %s", len(messages), topic_path)
        try:
            for message in messages:
                future = publisher.publish(
                    topic=topic_path,
                    data=message.get("data", b''),
                    **message.get('attributes', {})
                )
                future.result()
        except GoogleAPICallError as e:
            raise PubSubException(f'Error publishing to topic {topic_path}', e)

        self.log.info("Published %d messages to topic (path) %s", len(messages), topic_path) 
Example #14
Source File: test_watch.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def test_is_grpc_error(self):
        import grpc
        from google.api_core.exceptions import GoogleAPICallError

        exc = grpc.RpcError()
        result = self._callFUT(exc)
        self.assertEqual(result.__class__, GoogleAPICallError) 
Example #15
Source File: spanner.py    From airflow with Apache License 2.0 5 votes vote down vote up
def create_database(
        self,
        instance_id: str,
        database_id: str,
        ddl_statements: List[str],
        project_id: str,
    ) -> None:
        """
        Creates a new database in Cloud Spanner.

        :type project_id: str
        :param instance_id: The ID of the Cloud Spanner instance.
        :type instance_id: str
        :param database_id: The ID of the database to create in Cloud Spanner.
        :type database_id: str
        :param ddl_statements: The string list containing DDL for the new database.
        :type ddl_statements: list[str]
        :param project_id: Optional, the ID of the  GCP project that owns the Cloud Spanner
            database. If set to None or missing, the default project_id from the GCP connection is used.
        :return: None
        """
        instance = self._get_client(project_id=project_id).instance(
            instance_id=instance_id)
        if not instance.exists():
            raise AirflowException("The instance {} does not exist in project {} !".
                                   format(instance_id, project_id))
        database = instance.database(database_id=database_id,
                                     ddl_statements=ddl_statements)
        try:
            operation = database.create()  # type: Operation
        except GoogleAPICallError as e:
            self.log.error('An error occurred: %s. Exiting.', e.message)
            raise e

        if operation:
            result = operation.result()
            self.log.info(result) 
Example #16
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_publish_api_call_error(self, mock_service):
        publish_method = mock_service.return_value.publish
        publish_method.side_effect = GoogleAPICallError(
            'Error publishing to topic {}'.format(EXPANDED_SUBSCRIPTION)
        )

        with self.assertRaises(PubSubException):
            self.pubsub_hook.publish(project_id=TEST_PROJECT, topic=TEST_TOPIC, messages=TEST_MESSAGES) 
Example #17
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_create_subscription_api_call_error(self, mock_service):
        mock_service.create_subscription.side_effect = GoogleAPICallError(
            'Error creating subscription %s' % EXPANDED_SUBSCRIPTION
        )
        with self.assertRaises(PubSubException):
            self.pubsub_hook.create_subscription(
                project_id=TEST_PROJECT, topic=TEST_TOPIC, subscription=TEST_SUBSCRIPTION, fail_if_exists=True
            ) 
Example #18
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_delete_subscription_api_call_error(self, mock_service):
        mock_service.delete_subscription.side_effect = GoogleAPICallError(
            'Error deleting subscription %s' % EXPANDED_SUBSCRIPTION
        )
        with self.assertRaises(PubSubException):
            self.pubsub_hook.delete_subscription(
                project_id=TEST_PROJECT, subscription=TEST_SUBSCRIPTION, fail_if_not_exists=True
            ) 
Example #19
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_create_topic_api_call_error(self, mock_service):
        mock_service.return_value.create_topic.side_effect = GoogleAPICallError(
            'Error creating topic: %s' % TEST_TOPIC
        )
        with self.assertRaises(PubSubException):
            self.pubsub_hook.create_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_exists=True) 
Example #20
Source File: test_pubsub.py    From airflow with Apache License 2.0 5 votes vote down vote up
def test_delete_topic_api_call_error(self, mock_service):
        mock_service.return_value.delete_topic.side_effect = GoogleAPICallError(
            'Error deleting topic: %s' % EXPANDED_TOPIC
        )
        with self.assertRaises(PubSubException):
            self.pubsub_hook.delete_topic(project_id=TEST_PROJECT, topic=TEST_TOPIC, fail_if_not_exists=True) 
Example #21
Source File: transaction.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def _maybe_commit(self, transaction):
        """Try to commit the transaction.

        If the transaction is read-write and the ``Commit`` fails with the
        ``ABORTED`` status code, it will be retried. Any other failure will
        not be caught.

        Args:
            transaction (~.firestore_v1beta1.transaction.Transaction): The
                transaction to be ``Commit``-ed.

        Returns:
            bool: Indicating if the commit succeeded.
        """
        try:
            transaction._commit()
            return True
        except exceptions.GoogleAPICallError as exc:
            if transaction._read_only:
                raise

            if isinstance(exc, exceptions.Aborted):
                # If a read-write transaction returns ABORTED, retry.
                return False
            else:
                raise 
Example #22
Source File: transaction.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def _commit_with_retry(client, write_pbs, transaction_id):
    """Call ``Commit`` on the GAPIC client with retry / sleep.

    Retries the ``Commit`` RPC on Unavailable. Usually this RPC-level
    retry is handled by the underlying GAPICd client, but in this case it
    doesn't because ``Commit`` is not always idempotent. But here we know it
    is "idempotent"-like because it has a transaction ID. We also need to do
    our own retry to special-case the ``INVALID_ARGUMENT`` error.

    Args:
        client (:class:`~google.cloud.firestore_v1.client.Client`):
            A client with GAPIC client and configuration details.
        write_pbs (List[:class:`google.cloud.proto.firestore.v1.write_pb2.Write`, ...]):
            A ``Write`` protobuf instance to be committed.
        transaction_id (bytes):
            ID of an existing transaction that this commit will run in.

    Returns:
        :class:`google.cloud.firestore_v1.types.CommitResponse`:
        The protobuf response from ``Commit``.

    Raises:
        ~google.api_core.exceptions.GoogleAPICallError: If a non-retryable
            exception is encountered.
    """
    current_sleep = _INITIAL_SLEEP
    while True:
        try:
            return client._firestore_api.commit(
                client._database_string,
                write_pbs,
                transaction=transaction_id,
                metadata=client._rpc_metadata,
            )
        except exceptions.ServiceUnavailable:
            # Retry
            pass

        current_sleep = _sleep(current_sleep) 
Example #23
Source File: transaction.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def _maybe_commit(self, transaction):
        """Try to commit the transaction.

        If the transaction is read-write and the ``Commit`` fails with the
        ``ABORTED`` status code, it will be retried. Any other failure will
        not be caught.

        Args:
            transaction
                (:class:`~google.cloud.firestore_v1.transaction.Transaction`):
                The transaction to be ``Commit``-ed.

        Returns:
            bool: Indicating if the commit succeeded.
        """
        try:
            transaction._commit()
            return True
        except exceptions.GoogleAPICallError as exc:
            if transaction._read_only:
                raise

            if isinstance(exc, exceptions.Aborted):
                # If a read-write transaction returns ABORTED, retry.
                return False
            else:
                raise 
Example #24
Source File: test_watch.py    From python-firestore with Apache License 2.0 5 votes vote down vote up
def test_is_grpc_error(self):
        import grpc
        from google.api_core.exceptions import GoogleAPICallError

        exc = grpc.RpcError()
        result = self._callFUT(exc)
        self.assertEqual(result.__class__, GoogleAPICallError) 
Example #25
Source File: spanner.py    From airflow with Apache License 2.0 4 votes vote down vote up
def update_database(
        self,
        instance_id: str,
        database_id: str,
        ddl_statements: List[str],
        project_id: str,
        operation_id: Optional[str] = None
    ) -> None:
        """
        Updates DDL of a database in Cloud Spanner.

        :type project_id: str
        :param instance_id: The ID of the Cloud Spanner instance.
        :type instance_id: str
        :param database_id: The ID of the database in Cloud Spanner.
        :type database_id: str
        :param ddl_statements: The string list containing DDL for the new database.
        :type ddl_statements: list[str]
        :param project_id: Optional, the ID of the GCP project that owns the Cloud Spanner
            database. If set to None or missing, the default project_id from the GCP connection is used.
        :param operation_id: (Optional) The unique per database operation ID that can be
            specified to implement idempotency check.
        :type operation_id: str
        :return: None
        """
        instance = self._get_client(project_id=project_id).instance(
            instance_id=instance_id)
        if not instance.exists():
            raise AirflowException("The instance {} does not exist in project {} !".
                                   format(instance_id, project_id))
        database = instance.database(database_id=database_id)
        try:
            operation = database.update_ddl(
                ddl_statements=ddl_statements, operation_id=operation_id)
            if operation:
                result = operation.result()
                self.log.info(result)
            return
        except AlreadyExists as e:
            if e.code == 409 and operation_id in e.message:
                self.log.info("Replayed update_ddl message - the operation id %s "
                              "was already done before.", operation_id)
                return
        except GoogleAPICallError as e:
            self.log.error('An error occurred: %s. Exiting.', e.message)
            raise e 
Example #26
Source File: pubsub.py    From airflow with Apache License 2.0 4 votes vote down vote up
def delete_subscription(
        self,
        subscription: str,
        project_id: str,
        fail_if_not_exists: bool = False,
        retry: Optional[Retry] = None,
        timeout: Optional[float] = None,
        metadata: Optional[Sequence[Tuple[str, str]]] = None,
    ) -> None:
        """
        Deletes a Pub/Sub subscription, if it exists.

        :param subscription: the Pub/Sub subscription name to delete; do not
            include the ``projects/{project}/subscriptions/`` prefix.
        :param project_id: Optional, the GCP project ID where the subscription exists
            If set to None or missing, the default project_id from the GCP connection is used.
        :type project_id: str
        :type subscription: str
        :param fail_if_not_exists: if set, raise an exception if the topic does not exist
        :type fail_if_not_exists: bool
        :param retry: (Optional) A retry object used to retry requests.
            If None is specified, requests will not be retried.
        :type retry: google.api_core.retry.Retry
        :param timeout: (Optional) The amount of time, in seconds, to wait for the request
            to complete. Note that if retry is specified, the timeout applies to each
            individual attempt.
        :type timeout: float
        :param metadata: (Optional) Additional metadata that is provided to the method.
        :type metadata: Sequence[Tuple[str, str]]]
        """
        subscriber = self.subscriber_client
        subscription_path = SubscriberClient.subscription_path(project_id, subscription)  # noqa E501 # pylint: disable=no-member,line-too-long

        self.log.info("Deleting subscription (path) %s", subscription_path)
        try:
            # pylint: disable=no-member
            subscriber.delete_subscription(
                subscription=subscription_path,
                retry=retry,
                timeout=timeout,
                metadata=metadata
            )

        except NotFound:
            self.log.warning('Subscription does not exist: %s', subscription_path)
            if fail_if_not_exists:
                raise PubSubException('Subscription does not exist: {}'.format(subscription_path))
        except GoogleAPICallError as e:
            raise PubSubException('Error deleting subscription {}'.format(subscription_path), e)

        self.log.info("Deleted subscription (path) %s", subscription_path) 
Example #27
Source File: pubsub.py    From airflow with Apache License 2.0 4 votes vote down vote up
def pull(
        self,
        subscription: str,
        max_messages: int,
        project_id: str,
        return_immediately: bool = False,
        retry: Optional[Retry] = None,
        timeout: Optional[float] = None,
        metadata: Optional[Sequence[Tuple[str, str]]] = None,
    ) -> List[ReceivedMessage]:
        """
        Pulls up to ``max_messages`` messages from Pub/Sub subscription.

        :param subscription: the Pub/Sub subscription name to pull from; do not
            include the 'projects/{project}/topics/' prefix.
        :type subscription: str
        :param max_messages: The maximum number of messages to return from
            the Pub/Sub API.
        :type max_messages: int
        :param project_id: Optional, the GCP project ID where the subscription exists.
            If set to None or missing, the default project_id from the GCP connection is used.
        :type project_id: str
        :param return_immediately: If set, the Pub/Sub API will immediately
            return if no messages are available. Otherwise, the request will
            block for an undisclosed, but bounded period of time
        :type return_immediately: bool
        :param retry: (Optional) A retry object used to retry requests.
            If None is specified, requests will not be retried.
        :type retry: google.api_core.retry.Retry
        :param timeout: (Optional) The amount of time, in seconds, to wait for the request
            to complete. Note that if retry is specified, the timeout applies to each
            individual attempt.
        :type timeout: float
        :param metadata: (Optional) Additional metadata that is provided to the method.
        :type metadata: Sequence[Tuple[str, str]]]
        :return: A list of Pub/Sub ReceivedMessage objects each containing
            an ``ackId`` property and a ``message`` property, which includes
            the base64-encoded message content. See
            https://cloud.google.com/pubsub/docs/reference/rest/v1/projects.subscriptions/pull#ReceivedMessage
        """
        subscriber = self.subscriber_client
        subscription_path = SubscriberClient.subscription_path(project_id, subscription)  # noqa E501 # pylint: disable=no-member,line-too-long

        self.log.info("Pulling max %d messages from subscription (path) %s", max_messages, subscription_path)
        try:
            # pylint: disable=no-member
            response = subscriber.pull(
                subscription=subscription_path,
                max_messages=max_messages,
                return_immediately=return_immediately,
                retry=retry,
                timeout=timeout,
                metadata=metadata,
            )
            result = getattr(response, 'received_messages', [])
            self.log.info("Pulled %d messages from subscription (path) %s", len(result), subscription_path)
            return result
        except (HttpError, GoogleAPICallError) as e:
            raise PubSubException('Error pulling messages from subscription {}'.format(subscription_path), e)