Python google.appengine.api.taskqueue.add() Examples

The following are 30 code examples of google.appengine.api.taskqueue.add(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module google.appengine.api.taskqueue , or try the search function .
Example #1
Source File: love.py    From love with MIT License 6 votes vote down vote up
def _send_love(recipient_key, message, sender_key, secret):
    """Send love and do associated bookkeeping."""
    new_love = Love(
        sender_key=sender_key,
        recipient_key=recipient_key,
        message=message,
        secret=(secret is True),
    )
    new_love.put()
    LoveCount.update(new_love)

    # Send email asynchronously
    taskqueue.add(
        url='/tasks/love/email',
        params={
            'id': new_love.key.id()
        }
    )

    if not secret:
        logic.event.add_event(
            logic.event.LOVESENT,
            {'love_id': new_love.key.id()},
        ) 
Example #2
Source File: test_datachecks.py    From personfinder with Apache License 2.0 6 votes vote down vote up
def _test_deadline_exceeded(run_task_func, task_url):
    mox_obj = mox.Mox()
    mox_obj.StubOutWithMock(taskqueue, 'add')
    taskqueue.add(
        method='POST',
        url=task_url,
        params={'cursor': None},
        queue_name='datachecks',
        retry_options=mox.IsA(taskqueue.taskqueue.TaskRetryOptions),
        name=mox.IsA(unicode))
    mox_obj.ReplayAll()
    with mock.patch('utils.validate_email') as mock_validate_email:
        mock_validate_email.side_effect = deadline_exceeded_side_effect
        run_task_func()
    mox_obj.VerifyAll()
    mox_obj.UnsetStubs() 
Example #3
Source File: test_deletion.py    From personfinder with Apache License 2.0 6 votes vote down vote up
def test_task_rescheduling(self):
        """Tests that task is rescheduled for continuation."""
        tq_mock = mox.Mox()
        tq_mock.StubOutWithMock(taskqueue, 'add')
        taskqueue.add(name=mox.IsA(unicode),
                      method='POST',
                      url='/haiti/tasks/process_expirations',
                      queue_name='expiry',
                      params={'cursor': ''})
        tq_mock.ReplayAll()
        # DeadlineExceededErrors can be raised at any time. A convenient way for
        # us to raise it during this test execution is with utils.get_utcnow.
        with mock.patch('utils.get_utcnow') as get_utcnow_mock:
            get_utcnow_mock.side_effect = runtime.DeadlineExceededError()
            self.run_task('/haiti/tasks/process_expirations', method='POST')
        tq_mock.VerifyAll()
        tq_mock.UnsetStubs() 
Example #4
Source File: fixit.py    From cloud-playground with Apache License 2.0 6 votes vote down vote up
def post(self):  # pylint:disable-msg=invalid-name,missing-docstring
    assert self.request.environ[common.HTTP_X_APPENGINE_QUEUENAME]
    query = model.Project.query(namespace=settings.PLAYGROUND_NAMESPACE)
    cursor = self.request.get('cursor', None)
    if cursor:
      cursor = Cursor(urlsafe=cursor)
    projects, next_cursor, more = query.fetch_page(_CURSOR_PAGE_SIZE,
                                                   start_cursor=cursor)
    if more and next_cursor:
      taskqueue.add(queue_name='fixit',
                    url='/playground/fix/project',
                    params={'cursor': next_cursor.urlsafe()})
    for project in projects:
      FixProject(project)
    if not next_cursor:
      shared.w('REACHED END OF QUERY CURSOR, '
               'ALTHOUGH OTHER TASKS MAY STILL BE EXECUTING') 
Example #5
Source File: main.py    From professional-services with Apache License 2.0 6 votes vote down vote up
def get(self):
    """Handler for doing metrics fan-in for all projects."""

    # Can only be accessed by cron.
    if self.request.headers.get('X-Appengine-Cron') is None:
      self.error(403)
      return

    metrics.create_custom_metrics(config.PROJECT_ID)
    date_string = helpers.date_object_to_rfc3339(datetime.now())

    for src_project in metrics.get_projects(config.BILLING_ACCOUNT):
      taskqueue.add(
          queue_name='copy-metrics',
          name=filter(str.isalnum, '%s%s' % (src_project, date_string)),
          url='/CopyMetrics',
          method='GET',
          params={
              'src_project': src_project,
              'dst_project': config.PROJECT_ID,
          }) 
Example #6
Source File: main.py    From danforth-east with MIT License 6 votes vote down vote up
def post(self):
        helpers.check_csrf(self.request)

        user = users.get_current_user()
        if not user or not gapps.is_user_authorized(user):
            detail = 'user not authorized' if user else 'user not logged in'
            webapp2.abort(401, detail=detail)

        renew_member = gapps.member_dict_from_request(self.request,
                                                      user.email(),
                                                      'renew')
        gapps.renew_member_from_dict(renew_member)

        self.response.write('success')

        # Queue the welcome email
        taskqueue.add(url='/tasks/renew-member-mail', params=renew_member) 
Example #7
Source File: main.py    From danforth-east with MIT License 6 votes vote down vote up
def post(self):
        """Create the new member.
        '409 Conflict' is thrown if the email address is already associated
        with an existing member.
        """
        helpers.check_csrf(self.request)

        user = users.get_current_user()
        if not user or not gapps.is_user_authorized(user):
            detail = 'user not authorized' if user else 'user not logged in'
            webapp2.abort(401, detail=detail)

        new_member = gapps.member_dict_from_request(self.request,
                                                    user.email(),
                                                    'join')
        join_or_renew = gapps.join_or_renew_member_from_dict(new_member)

        self.response.write('success: %s' % join_or_renew)

        # Queue the welcome email
        taskqueue.add(url='/tasks/new-member-mail', params=new_member) 
Example #8
Source File: change_log.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def enqueue_process_change_task(auth_db_rev):
  """Transactionally adds a call to 'process_change' to the task queue.

  Pins the task to currently executing version of BACKEND_MODULE module
  (defined in config.py).

  Added as AuthDB commit callback in get_backend_routes() below.
  """
  assert ndb.in_transaction()
  conf = config.ensure_configured()
  try:
    # Pin the task to the module and version.
    taskqueue.add(
        url='/internal/auth/taskqueue/process-change/%d' % auth_db_rev,
        queue_name=conf.PROCESS_CHANGE_TASK_QUEUE,
        headers={'Host': modules.get_hostname(module=conf.BACKEND_MODULE)},
        transactional=True)
  except Exception as e:
    logging.error(
        'Problem adding "process-change" task to the task queue (%s): %s',
        e.__class__.__name__, e)
    raise 
Example #9
Source File: change_log.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def enqueue_process_change_task(auth_db_rev):
  """Transactionally adds a call to 'process_change' to the task queue.

  Pins the task to currently executing version of BACKEND_MODULE module
  (defined in config.py).

  Added as AuthDB commit callback in get_backend_routes() below.
  """
  assert ndb.in_transaction()
  conf = config.ensure_configured()
  try:
    # Pin the task to the module and version.
    taskqueue.add(
        url='/internal/auth/taskqueue/process-change/%d' % auth_db_rev,
        queue_name=conf.PROCESS_CHANGE_TASK_QUEUE,
        headers={'Host': modules.get_hostname(module=conf.BACKEND_MODULE)},
        transactional=True)
  except Exception as e:
    logging.error(
        'Problem adding "process-change" task to the task queue (%s): %s',
        e.__class__.__name__, e)
    raise 
Example #10
Source File: change_log.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def enqueue_process_change_task(auth_db_rev):
  """Transactionally adds a call to 'process_change' to the task queue.

  Pins the task to currently executing version of BACKEND_MODULE module
  (defined in config.py).

  Added as AuthDB commit callback in get_backend_routes() below.
  """
  assert ndb.in_transaction()
  conf = config.ensure_configured()
  try:
    # Pin the task to the module and version.
    taskqueue.add(
        url='/internal/auth/taskqueue/process-change/%d' % auth_db_rev,
        queue_name=conf.PROCESS_CHANGE_TASK_QUEUE,
        headers={'Host': modules.get_hostname(module=conf.BACKEND_MODULE)},
        transactional=True)
  except Exception as e:
    logging.error(
        'Problem adding "process-change" task to the task queue (%s): %s',
        e.__class__.__name__, e)
    raise 
Example #11
Source File: change_log.py    From luci-py with Apache License 2.0 6 votes vote down vote up
def enqueue_process_change_task(auth_db_rev):
  """Transactionally adds a call to 'process_change' to the task queue.

  Pins the task to currently executing version of BACKEND_MODULE module
  (defined in config.py).

  Added as AuthDB commit callback in get_backend_routes() below.
  """
  assert ndb.in_transaction()
  conf = config.ensure_configured()
  try:
    # Pin the task to the module and version.
    taskqueue.add(
        url='/internal/auth/taskqueue/process-change/%d' % auth_db_rev,
        queue_name=conf.PROCESS_CHANGE_TASK_QUEUE,
        headers={'Host': modules.get_hostname(module=conf.BACKEND_MODULE)},
        transactional=True)
  except Exception as e:
    logging.error(
        'Problem adding "process-change" task to the task queue (%s): %s',
        e.__class__.__name__, e)
    raise 
Example #12
Source File: application.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def post(self):
        amount = int(self.request.get('amount'))

        task = taskqueue.add(
            url='/update_counter',
            target='worker',
            params={'amount': amount})

        self.response.write(
            'Task {} enqueued, ETA {}.'.format(task.name, task.eta))


# AsyncEnqueueTaskHandler behaves the same as EnqueueTaskHandler, but shows
# how to queue the task using the asyncronous API. This is not wired up by
# default. To use this, change the MainPageHandler's form action to
# /enqueue_async 
Example #13
Source File: base_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def stream_to_bq(self, user, summary, timestamp=None):
    """Creates a task to stream an update to BigQuery.

    Args:
      user: string user email of the acting user.
      summary: string summary of the action being performed.
      timestamp: datetime, if not provided current time will be used.
    """
    if not timestamp:
      timestamp = datetime.datetime.utcnow()
    calling_function = inspect.stack()[1][3]
    task_params = {
        'model_instance': self,
        'timestamp': timestamp,
        'actor': user,
        'method': calling_function,
        'summary': summary,
    }
    taskqueue.add(
        queue_name='stream-bq',
        payload=pickle.dumps(task_params),
        target='default') 
Example #14
Source File: send_email.py    From loaner with Apache License 2.0 6 votes vote down vote up
def _send_email(**kwargs):
  """Sends email using App Engine's email API.

  Args:
    **kwargs: kwargs for the email api.
  """
  kwargs['sender'] = constants.SEND_EMAIL_AS
  if not constants.ON_PROD:
    if constants.ON_DEV:
      kwargs['subject'] = '[dev] ' + kwargs['subject']
    elif constants.ON_LOCAL:
      kwargs['subject'] = '[local] ' + kwargs['subject']
    elif constants.ON_QA:
      kwargs['subject'] = '[qa] ' + kwargs['subject']

  taskqueue.add(queue_name='send-email', params=kwargs, target='default') 
Example #15
Source File: taskqueue.py    From python-docs-samples with Apache License 2.0 6 votes vote down vote up
def get(self, namespace='default'):
        # Queue task to update global counter.
        current_global_count = get_count('counter')
        taskqueue.add(
            url='/tasks/counter',
            params={'counter_name': 'counter'})

        # Queue task to update counter in specified namespace.
        previous_namespace = namespace_manager.get_namespace()
        try:
            namespace_manager.set_namespace(namespace)
            current_namespace_count = get_count('counter')
            taskqueue.add(
                url='/tasks/counter',
                params={'counter_name': 'counter'})
        finally:
            namespace_manager.set_namespace(previous_namespace)

        self.response.write(
            'Counters will be updated asyncronously.'
            'Current values: Global: {}, Namespace {}: {}'.format(
                current_global_count, namespace, current_namespace_count)) 
Example #16
Source File: events.py    From loaner with Apache License 2.0 6 votes vote down vote up
def get_actions_for_event(event_name):
  """Gets all Action mappings for a given Event.

  Args:
    event_name: str, the name of the Event.

  Returns:
    A list of actions for the Event, or None.

  Raises:
    NoEventsError: if there are no Events to be found in datastore.
  """
  all_mappings = get_all_event_action_mappings()
  if not all_mappings:
    raise NoEventsError(
        'There are no events; run bootstrap to add the default ones.')

  return all_mappings.get(event_name) 
Example #17
Source File: test_db_datastore.py    From locality-sensitive-hashing with MIT License 6 votes vote down vote up
def post(self):
        filename = self.request.get("filename")
        blob_key = self.request.get("blobkey")
        ds_key   = self.request.get("ds_key")
        output_link   = self.request.get("output_link")

        if self.request.get("run_lsh"):
            taskqueue.add(url='/test_zip_worker', 
                          params={'filename': filename,
                                  'blob_key': blob_key,
                                  })
        else:
            pass

        time.sleep(1)
        self.get() 
Example #18
Source File: models.py    From sndlatr with Apache License 2.0 6 votes vote down vote up
def add_to_queue(self, url=None, target_state='queued', countdown=0):
        """
        Adds job to task queue and transactionally updates state to 'queued'
        and saves job.
        Does nothing if state is not 'scheduled'.
        """
        if self.state != 'scheduled':
            logging.warn('tried to add job {} with state {}, to queue, '
                         'doing nothing'.format(self.key, self.state))
            return
        if url is None:
            url = self.queue_url
        logging.debug(u'scheduling job {} for {}'.format(self.key,
                                                        self.user_email))
        taskqueue.add(url=url,
                      payload=json.dumps({'key': self.key.urlsafe()}),
                      queue_name=self.queue_name,
                      countdown=countdown,
                      transactional=True)
        self.state = target_state
        self.put() 
Example #19
Source File: base_model.py    From loaner with Apache License 2.0 6 votes vote down vote up
def add_docs_to_index(cls, documents):
    """Adds a list of documents to a particular index.

    Args:
      documents: a list of search.Documents to add to the class' index.
    """
    index = cls.get_index()
    for doc in documents:
      try:
        index.put(doc)
      except search.PutError as err:
        result = err.results[0]
        if result.code == search.OperationResult.TRANSIENT_ERROR:
          index.put(doc)
      except (search.Error, apiproxy_errors.OverQuotaError):
        logging.error(_PUT_DOC_ERR_MSG, doc, index) 
Example #20
Source File: sitemap_ping.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def add_ping_tasks():
    for search_engine in _INDEXER_MAP:
        name = 'sitemap_ping-%s-%s' % (search_engine, int(time.time()*1000))
        taskqueue.add(
            name=name, method='GET', url='/global/tasks/sitemap_ping',
            params={_TASK_PARAM_SEARCH_ENGINE: search_engine}) 
Example #21
Source File: datachecks.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def schedule_task(self, repo, **kwargs):
        name = '%s-%s-%s' % (repo, self.BASE_NAME, int(time.time()*1000))
        path = self.build_absolute_path('/%s/tasks/%s' % (repo, self.TASK_PATH))
        cursor = kwargs.get('cursor', '')
        # TODO(nworden): figure out why setting task_retry_limit isn't working
        retry_options = taskqueue.taskqueue.TaskRetryOptions(task_retry_limit=1)
        taskqueue.add(name=name, method='POST', url=path,
                      queue_name='datachecks', retry_options=retry_options,
                      params={'cursor': cursor}) 
Example #22
Source File: deletion.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def schedule_task(self, repo, **kwargs):
        name = '%s-cleanup_stray_notes-%s' % (
            repo, int(time.time()*1000))
        path = self.build_absolute_path(
            '/%s/tasks/cleanup_stray_notes' % repo)
        cursor = kwargs.get('cursor', '')
        taskqueue.add(name=name, method='POST', url=path, queue_name='expiry',
                      params={'cursor': cursor}) 
Example #23
Source File: utils.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def send_mail(self, to, subject, body):
        """Sends e-mail using a sender address that's allowed for this app."""
        app_id = get_app_name()
        sender = 'Do not reply <do-not-reply@%s.%s>' % (app_id, EMAIL_DOMAIN)
        logging.info('Add mail task: recipient %r, subject %r' % (to, subject))
        taskqueue.add(queue_name='send-mail', url='/global/admin/send_mail',
                      params={'sender': sender,
                              'to': to,
                              'subject': subject,
                              'body': body}) 
Example #24
Source File: utils.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def add_task_for_repo(repo, name, action, **kwargs):
        """Queues up a task for an individual repository."""
        task_name = '%s-%s-%s' % (repo, name, int(time.time()*1000))
        path = '/%s/%s' % (repo, action)
        taskqueue.add(name=task_name, method='GET', url=path, params=kwargs) 
Example #25
Source File: utils.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def set_url_param(url, param, value):
    """This modifies a URL setting the given param to the specified value.  This
    may add the param or override an existing value, or, if the value is None,
    it will remove the param.  Note that value must be a basestring and can't be
    an int, for example."""
    url_parts = list(urlparse.urlparse(url))
    url_parts[4] = set_param(url_parts[4], param, value)
    return urlparse.urlunparse(url_parts) 
Example #26
Source File: helloworld.py    From NoseGAE with BSD 2-Clause "Simplified" License 5 votes vote down vote up
def get(self):
        self.response.headers['Content-Type'] = 'text/plain'
        self.response.write('Hello, webapp2 World!')
        taskqueue.add(url='/work/do-something', params={'a': 'b'}, queue_name='awesome-stuff') 
Example #27
Source File: models.py    From crmint with Apache License 2.0 5 votes vote down vote up
def enqueue(self, worker_class, worker_params, delay=0):
    if self.status != Job.STATUS.RUNNING:
      return None

    # Add a new task to the queue.
    task_name = '%s_%s' % (self.pipeline.id, self.id)
    escaped_task_name = re.sub(r'[^-_0-9a-zA-Z]', '-', task_name)
    unique_task_name = '%s_%s' % (escaped_task_name, str(uuid.uuid4()))
    task_params = {
        'job_id': self.id,
        'worker_class': worker_class,
        'worker_params': json.dumps(worker_params),
        'task_name': unique_task_name
    }
    task = taskqueue.add(
        target='job-service',
        name=unique_task_name,
        url='/task',
        params=task_params,
        countdown=delay)

    # Keep track of the running task name.
    self._add_task_with_name(unique_task_name)
    self.save()

    return task 
Example #28
Source File: process_action.py    From loaner with Apache License 2.0 5 votes vote down vote up
def post(self):
    """Process an async Action task with the correct Action class."""
    payload = pickle.loads(self.request.body)
    async_actions = payload.pop('async_actions')
    action_name = async_actions.pop(0)
    action_instance = self.actions['async'].get(action_name)
    if action_instance:
      try:
        action_instance.run(**payload)
      # pylint: disable=broad-except, because this logic, in which tasks are
      # responsible for spawning subsequent tasks, creates a chain that could be
      # interrupted by any conceivable exception in an action's run method. This
      # handling ensures any further tasks will run.
      except Exception as error:
        logging.exception(
            'Failed to run async Action %r due to error: %r',
            action_name, str(error))
      # pylint: enable=broad-except
    else:
      logging.error('No async Action named %s found.', action_name)

    if async_actions:
      payload['async_actions'] = async_actions
      taskqueue.add(
          queue_name='process-action',
          payload=pickle.dumps(payload),
          target='default') 
Example #29
Source File: fixit.py    From cloud-playground with Apache License 2.0 5 votes vote down vote up
def Begin():
  taskqueue.add(queue_name='fixit', url='/playground/fix/project') 
Example #30
Source File: deletion.py    From personfinder with Apache License 2.0 5 votes vote down vote up
def schedule_task(self, repo, **kwargs):
        name = '%s-cleanup_stray_subscriptions-%s' % (
            repo, int(time.time()*1000))
        path = self.build_absolute_path(
            '/%s/tasks/cleanup_stray_subscriptions' % repo)
        cursor = kwargs.get('cursor', '')
        taskqueue.add(name=name, method='POST', url=path, queue_name='expiry',
                      params={'cursor': cursor})