Python google.appengine.api.memcache.set_multi() Examples

The following are 9 code examples for showing how to use google.appengine.api.memcache.set_multi(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module google.appengine.api.memcache , or try the search function .

Example 1
Project: python-docs-samples   Author: GoogleCloudPlatform   File: snippets.py    License: Apache License 2.0 6 votes vote down vote up
def add_values():
    # [START add_values]
    # Add a value if it doesn't exist in the cache
    # with a cache expiration of 1 hour.
    memcache.add(key="weather_USA_98105", value="raining", time=3600)

    # Set several values, overwriting any existing values for these keys.
    memcache.set_multi(
        {"USA_98115": "cloudy", "USA_94105": "foggy", "USA_94043": "sunny"},
        key_prefix="weather_",
        time=3600
    )

    # Atomically increment an integer value.
    memcache.set(key="counter", value=0)
    memcache.incr("counter")
    memcache.incr("counter")
    memcache.incr("counter")
    # [END add_values] 
Example 2
Project: panwdbl   Author: jtschichold   File: panwdbl.py    License: ISC License 6 votes vote down vote up
def __get_iplist_info(self, tag):
        ipltime = memcache.get("t"+tag)
        iplist = memcache.get("l"+tag)
        if (ipltime is not None) and (iplist is not None):
            iplist = len(iplist)
            return ipltime, iplist
        else:
            q = BlockList.all()
            q.filter("tag =", tag)
            q.order("-time")
            iplist = q.get()
            if iplist is None:
                return '--', '--'
            else:
                ipltime = iplist.time.strftime('%d %b %Y %H:%M %Z')
                memcache.set_multi({"t"+tag: ipltime, "l"+tag: iplist.iplist}, 60*60*24)
                return ipltime, len(iplist.iplist) 
Example 3
Project: iris   Author: doitintl   File: main.py    License: MIT License 5 votes vote down vote up
def store(key, value, chunksize=950000):
    serialized = cloudpickle.dumps(value, 2)
    values = {}
    for i in xrange(0, len(serialized), chunksize):
        values['%s.%s' % (key, i // chunksize)] = serialized[i:i + chunksize]
    return memcache.set_multi(values) 
Example 4
Project: browserscope   Author: elsigh   File: result_stats.py    License: Apache License 2.0 5 votes vote down vote up
def UpdateStats(cls, category, stats):
        """Update the summary stats in memory and the datastore.

        This will only update part of a summary score row.

        Args:
            category: a category string like 'network'
            stats: a dict of browser stats (see CategoryStatsManager.GetStats)
        Returns:
            The summary stats that have been updated by the given stats.
            (Used by GetStats.)
        """
        browsers = [b for b in stats.keys() if b != 'total_runs']
        update_summary_stats = memcache.get_multi(
                browsers, namespace=cls.MEMCACHE_NAMESPACE)
        for browser in browsers:
            ua_summary_stats = update_summary_stats.setdefault(browser, {
                    'results': {}})
            ua_summary_stats['results'][category] = {
                    'score': stats[browser]['summary_score'],
                    'display': stats[browser]['summary_display'],
                    'total_runs': stats[browser]['total_runs'],
                    }
            if category == 'acid3':
                ua_summary_stats['results']['acid3']['display'] = (
                        stats[browser]['results']['score']['display'])
        memcache.set_multi(update_summary_stats, namespace=cls.MEMCACHE_NAMESPACE)
        return update_summary_stats 
Example 5
Project: browserscope   Author: elsigh   File: result_stats.py    License: Apache License 2.0 5 votes vote down vote up
def UpdateStatsCache(cls, category, browsers):
        """Update the memcache of stats for all the tests for each browser.

        This is also where the summary stats get updated.

        Args:
            category: a category string like 'network'
            browsers: a list of browsers like ['Firefox 3.6', 'IE 8.0']
        Returns:
            a list of browsers that were not processed due to a timeout.
        """
        test_set = all_test_sets.GetTestSet(category)
        test_keys = [t.key for t in test_set.VisibleTests()]
        ua_stats = {}
        unhandled_browsers = []
        is_timed_out = False
        for browser in browsers:
            try:
                medians, num_scores = test_set.GetMediansAndNumScores(browser)
            except db.Timeout:
                is_timed_out = True
            if is_timed_out:
                logging.info('Timed out \'%s\' in UpdateStatsCache doing '
                                         'GetMediansAndNumScores for %s', category, browser)
                unhandled_browsers.append(browser)
            else:
                stats = test_set.GetStats(test_keys, medians, num_scores)
                ua_stats[browser] = stats
        memcache.set_multi(ua_stats, **cls.MemcacheParams(category))
        if not is_timed_out:
            SummaryStatsManager.UpdateStats(category, ua_stats)
        return unhandled_browsers 
Example 6
Project: python-docs-samples   Author: GoogleCloudPlatform   File: batch.py    License: Apache License 2.0 5 votes vote down vote up
def get(self):
        # [START batch]
        values = {'comment': 'I did not ... ', 'comment_by': 'Bill Holiday'}
        if not memcache.set_multi(values):
            logging.error('Unable to set Memcache values')
        tvalues = memcache.get_multi(('comment', 'comment_by'))
        self.response.write(tvalues)
        # [END batch] 
Example 7
Project: python-compat-runtime   Author: GoogleCloudPlatform   File: recording.py    License: Apache License 2.0 5 votes vote down vote up
def _save(self):
    """Internal function to save the recorded data to memcache.

    Returns:
      A tuple (key, summary_size, full_size).
    """
    part, full = self.get_both_protos_encoded()
    key = make_key(self.start_timestamp)
    errors = memcache.set_multi({config.PART_SUFFIX: part,
                                 config.FULL_SUFFIX: full},
                                time=36*3600, key_prefix=key,
                                namespace=config.KEY_NAMESPACE)
    if errors:
      logging.warn('Memcache set_multi() error: %s', errors)
    return key, len(part), len(full) 
Example 8
Project: browserscope   Author: elsigh   File: result_stats.py    License: Apache License 2.0 4 votes vote down vote up
def AddUserAgent(cls, category, user_agent):
        """Adds a user agent's browser strings to version-level groups.

        AddUserAgent assumes that it does not receive overlapping calls.
        - It should only get called by the update-user-groups task queue.

        Adds a browser for every version level.
        If a level does not have a string, then use the one from the previous level.
        For example, "Safari 4.3" would increment the following:
                level  browser
                        0  Safari
                        1  Safari 4
                        2  Safari 4.3
                        3  Safari 4.3

        Args:
            category: a category string like 'network' or 'reflow'.
            user_agent: a UserAgent instance.
        """
        key_names = [cls.KeyName(category, v) for v in range(4)]
        version_levels = range(4)
        if category in [t.category for t in all_test_sets.GetVisibleTestSets()]:
            key_names.extend([cls.KeyName('summary', v) for v in range(4)])
            version_levels.extend(range(4))
        level_browsers = memcache.get_multi(key_names,
                                            namespace=cls.MEMCACHE_NAMESPACE)
        browser_key_names = []
        ua_browsers = user_agent.get_string_list()
        max_ua_browsers_index = len(ua_browsers) - 1
        for version_level, key_name in zip(version_levels, key_names):
            browser = ua_browsers[min(max_ua_browsers_index, version_level)]
            if browser not in level_browsers.get(key_name, []):
                browser_key_names.append((browser, key_name))
        managers = cls.get_by_key_name([x[1] for x in browser_key_names])

        updated_managers = []
        memcache_mapping = {}
        for (browser, key_name), manager in zip(browser_key_names, managers):
            if manager is None:
                manager = cls.get_or_insert(key_name)
            if browser not in manager.browsers:
                cls.InsortBrowser(manager.browsers, browser)
                updated_managers.append(manager)
                memcache_mapping[key_name] = manager.browsers
        if updated_managers:
            db.put(updated_managers)
            memcache.set_multi(memcache_mapping, namespace=cls.MEMCACHE_NAMESPACE) 
Example 9
Project: luci-py   Author: luci   File: task_queues.py    License: Apache License 2.0 4 votes vote down vote up
def get_queues(bot_root_key):
  """Returns the known task queues as integers.

  This function is called to get the task queues to poll, as the bot is trying
  to reap a task, any task.

  It is also called while the bot is running a task, to refresh the task queues.

  Arguments:
    bot_root_key: ndb.Key to bot_management.BotRoot

  Returns:
    dimensions_hashes: list of dimension_hash for the bot
  """
  bot_id = bot_root_key.string_id()
  dimensions_hashes = memcache.get(bot_id, namespace='task_queues')
  if dimensions_hashes is not None:
    # Note: This may return stale queues. We may want to change the format to
    # include the expiration.
    logging.debug(
        'get_queues(%s): can run from %d queues (memcache)\n%s',
        bot_id, len(dimensions_hashes), dimensions_hashes)
    # Refresh all the keys.
    memcache.set_multi(
        {str(d): True for d in dimensions_hashes},
        time=61, namespace='task_queues_tasks')
    return dimensions_hashes

  # Retrieve all the dimensions_hash that this bot could run that have
  # actually been triggered in the past. Since this is under a root entity, this
  # should be fast.
  now = utils.utcnow()
  dimensions_hashes = sorted(
      obj.key.integer_id()
      for obj in BotTaskDimensions.query(ancestor=bot_root_key)
      if obj.valid_until_ts >= now)
  memcache.set(
      bot_id,
      dimensions_hashes,
      namespace='task_queues',
      time=_EXPIRATION_TIME_TASK_QUEUES)
  logging.info(
      'get_queues(%s): Query in %.3fs: can run from %d queues\n%s',
      bot_id, (utils.utcnow()-now).total_seconds(),
      len(dimensions_hashes), dimensions_hashes)
  memcache.set_multi(
      {str(d): True for d in dimensions_hashes},
      time=61, namespace='task_queues_tasks')
  return dimensions_hashes