Python google.appengine.runtime.apiproxy_errors.RequestTooLargeError() Examples

The following are code examples for showing how to use google.appengine.runtime.apiproxy_errors.RequestTooLargeError(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: luci-py   Author: luci   File: context.py    Apache License 2.0 6 votes vote down vote up
def flush(self):
    """Force a flush."""
    if not self.items:
      return

    retry = 0
    options = {"deadline": DATASTORE_DEADLINE}
    while retry <= self.__timeout_retries:
      try:
        self.__flush_function(self.items, options)
        self.clear()
        break
      except db.Timeout, e:
        logging.warning(e)
        logging.warning("Flushing '%s' timed out. Will retry for the %s time.",
                        self, retry)
        retry += 1
        options["deadline"] *= 2
      except apiproxy_errors.RequestTooLargeError:
        self._log_largest_items()
        raise 
Example 2
Project: luci-py   Author: luci   File: context.py    Apache License 2.0 6 votes vote down vote up
def flush(self):
    """Force a flush."""
    if not self.items:
      return

    retry = 0
    options = {"deadline": DATASTORE_DEADLINE}
    while retry <= self.__timeout_retries:
      try:
        self.__flush_function(self.items, options)
        self.clear()
        break
      except db.Timeout, e:
        logging.warning(e)
        logging.warning("Flushing '%s' timed out. Will retry for the %s time.",
                        self, retry)
        retry += 1
        options["deadline"] *= 2
      except apiproxy_errors.RequestTooLargeError:
        self._log_largest_items()
        raise 
Example 3
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: apiproxy_stub.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE,
               request_data=None):
    """Constructor.

    Args:
      service_name: Service name expected for all calls.
      max_request_size: int, maximum allowable size of the incoming request.  A
        apiproxy_errors.RequestTooLargeError will be raised if the inbound
        request exceeds this size.  Default is 1 MB.
      request_data: A request_info.RequestInfo instance used to look up state
        associated with the request that generated an API call.
    """
    self.__service_name = service_name
    self.__max_request_size = max_request_size
    self.request_data = request_data or request_info._local_request_info



    self._mutex = threading.RLock()
    self.__error = None
    self.__error_dict = {} 
Example 4
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: context.py    Apache License 2.0 6 votes vote down vote up
def flush(self):
    """Force a flush."""
    if not self.items:
      return

    retry = 0
    options = {"deadline": DATASTORE_DEADLINE}
    while retry <= self.__timeout_retries:
      try:
        self.__flush_function(self.items, options)
        self.clear()
        break
      except db.Timeout, e:
        logging.warning(e)
        logging.warning("Flushing '%s' timed out. Will retry for the %s time.",
                        self, retry)
        retry += 1
        options["deadline"] *= 2
      except apiproxy_errors.RequestTooLargeError:
        self._log_largest_items()
        raise 
Example 5
Project: luci-py   Author: luci   File: context.py    Apache License 2.0 5 votes vote down vote up
def _log_largest_items(self):
    if not self.__repr_function:
      logging.error("Got RequestTooLargeError but can't interpret items in "
                    "_ItemList %s.", self)
      return

    sizes = [len(self.__repr_function(i)) for i in self.items]
    largest = heapq.nlargest(self._LARGEST_ITEMS_TO_LOG,
                             zip(sizes, self.items),
                             lambda t: t[0])
    # Set field for for test only.
    self._largest = [(s, self.__repr_function(i)) for s, i in largest]
    logging.error("Got RequestTooLargeError. Largest items: %r", self._largest) 
Example 6
Project: luci-py   Author: luci   File: context.py    Apache License 2.0 5 votes vote down vote up
def _log_largest_items(self):
    if not self.__repr_function:
      logging.error("Got RequestTooLargeError but can't interpret items in "
                    "_ItemList %s.", self)
      return

    sizes = [len(self.__repr_function(i)) for i in self.items]
    largest = heapq.nlargest(self._LARGEST_ITEMS_TO_LOG,
                             zip(sizes, self.items),
                             lambda t: t[0])
    # Set field for for test only.
    self._largest = [(s, self.__repr_function(i)) for s, i in largest]
    logging.error("Got RequestTooLargeError. Largest items: %r", self._largest) 
Example 7
Project: cas-eval   Author: varepsilon   File: main.py    Apache License 2.0 5 votes vote down vote up
def save_page():
    @flask.after_this_request
    def add_headers(response):
        response.headers['Access-Control-Allow-Origin'] = '*'
        return response
    values = flask.request.values
    if values.get('type', '') == 'Serp':
        try:
            user_id = Session.get_user_id(values['url'])
        except Exception as e:
            app.logger.error(e)
            return 'Incorrect user_id used', 400
        try:
            query = Session.get_query(values['url'])
        except Exception as e:
            app.logger.error(e)
            return 'No query set?', 400
        for k in ['data', 'tab_id', 'time']:
            if k not in values:
                return 'Missing param: %s' % k, 400
        data = values['data']
        try:
            ts = Session.convert_time(values['time'])
        except Exception as e:
            app.logger.error(e)
            return 'Incorrect timestamp', 400
        session = Session(id=values['tab_id'], user_id=user_id, q=query,
                serp_html=data, start_ts=ts)
        n = len(data)
        while n > 1:
            session.serp_html = data[:n]
            try:
                session.put()
                break
            except apiproxy_errors.RequestTooLargeError as e:
                app.logger.error(e)
                n /= 2
        return 'Saved', 201
    return 'Only support saving SERPs using POST requests, sorry.', 403 
Example 8
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: apiproxy_stub.py    Apache License 2.0 5 votes vote down vote up
def MakeSyncCall(self, service, call, request, response, request_id=None):
    """The main RPC entry point.

    Args:
      service: Must be name as provided to service_name of constructor.
      call: A string representing the rpc to make.  Must be part of
        the underlying services methods and impemented by _Dynamic_<call>.
      request: A protocol buffer of the type corresponding to 'call'.
      response: A protocol buffer of the type corresponding to 'call'.
      request_id: A unique string identifying the request associated with the
          API call.
    """
    assert service == self.__service_name, ('Expected "%s" service name, '
                                            'was "%s"' % (self.__service_name,
                                                          service))
    if request.ByteSize() > self.__max_request_size:
      raise apiproxy_errors.RequestTooLargeError(
          'The request to API call %s.%s() was too large.' % (service, call))
    messages = []
    assert request.IsInitialized(messages), messages




    exception_type, frequency = self.__error_dict.get(call, (None, None))
    if exception_type and frequency:
      if random.random() <= frequency:
        raise exception_type

    if self.__error:
      if random.random() <= self.__error_rate:
        raise self.__error


    method = getattr(self, '_Dynamic_' + call)
    if self._ACCEPTS_REQUEST_ID:
      method(request, response, request_id)
    else:
      method(request, response) 
Example 9
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: backup_handler.py    Apache License 2.0 5 votes vote down vote up
def flush(self):
    """Save aggregated type information to the datastore if changed."""
    if self.__needs_save:

      def update_aggregation_tx():
        aggregation = SchemaAggregationResult.load(
            self.__backup_id, self.__kind, self.__shard_id)
        if aggregation:
          if aggregation.merge(self.__aggregation):
            aggregation.put(force_writes=True)
          self.__aggregation = aggregation
        else:
          self.__aggregation.put(force_writes=True)

      def mark_aggregation_as_partial_tx():
        aggregation = SchemaAggregationResult.load(
            self.__backup_id, self.__kind, self.__shard_id)
        if aggregation is None:
          aggregation = SchemaAggregationResult.create(
              self.__backup_id, self.__kind, self.__shard_id)
        aggregation.is_partial = True
        aggregation.put(force_writes=True)
        self.__aggregation = aggregation

      try:
        db.run_in_transaction(update_aggregation_tx)
      except apiproxy_errors.RequestTooLargeError:
        db.run_in_transaction(mark_aggregation_as_partial_tx)
      self.__needs_save = False 
Example 10
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: urlfetch.py    Apache License 2.0 4 votes vote down vote up
def _get_fetch_result(rpc):
  """Checks for success, handles exceptions, and returns a converted RPC result.

  This method waits for the RPC if it has not yet finished and calls the
  post-call hooks on the first invocation.

  Args:
    rpc: A UserRPC object.

  Raises:
    InvalidURLError: If the URL was invalid.
    DownloadError: If there was a problem fetching the URL.
    PayloadTooLargeError: If the request and its payload was larger than the
        allowed limit.
    ResponseTooLargeError: If the response was either truncated (and
        `allow_truncated=False` was passed to `make_fetch_call()`), or if it
        was too big for us to download.
    MalformedReplyError: If an invalid HTTP response was returned.
    TooManyRedirectsError: If the redirect limit was hit while `follow_rediects`
        was set to `True`.
    InternalTransientError: An internal error occurred. Wait a few minutes, then
        try again.
    ConnectionClosedError: If the target server prematurely closed the
        connection.
    DNSLookupFailedError: If the DNS lookup for the URL failed.
    DeadlineExceededError: If the deadline was exceeded; occurs when the
        client-supplied `deadline` is invalid or if the client did not specify a
        `deadline` and the system default value is invalid.
    SSLCertificateError: If an invalid server certificate was presented.
    AssertionError: If the `assert` statement fails.

  Returns:
    A `_URLFetchResult` object.
  """
  assert rpc.service == 'urlfetch', repr(rpc.service)
  assert rpc.method == 'Fetch', repr(rpc.method)

  url = rpc.request.url()

  try:
    rpc.check_success()
  except apiproxy_errors.RequestTooLargeError, err:
    raise InvalidURLError(
        'Request body too large fetching URL: ' + url)