Python itertools.izip() Examples

The following are 30 code examples for showing how to use itertools.izip(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module itertools , or try the search function .

Example 1
Project: convseg   Author: chqiwang   File: tagger.py    License: MIT License 6 votes vote down vote up
def tag(self, data_iter):
        """A tagging function.

        Args:
            data_iter: A iterator for generate batches.

        Returns:
            A generator for tagging result.
        """
        output = []
        for data in data_iter:
            batch = data_to_ids(data, [self.item2id] + [self.word2id] * self.parameters['word_window_size'])
            batch = create_input(batch)
            seq_ids, seq_other_ids_list, seq_lengths = batch[0], batch[1: -1], batch[-1]
            feed_dict = {self.seq_ids_pl: seq_ids.astype(INT_TYPE),
                         self.seq_lengths_pl: seq_lengths.astype(INT_TYPE),
                         self.is_train_pl: False}
            for pl, v in zip(self.seq_other_ids_pls, seq_other_ids_list):
                feed_dict[pl] = v.astype(INT_TYPE)
            scores = self.sess.run(self.scores_op, feed_dict)
            stag_ids = self.inference(scores, seq_lengths)
            for seq, stag_id, length in izip(data[0], stag_ids, seq_lengths):
                output.append((seq, [self.id2tag[t] for t in stag_id[:length]]))
            yield zip(*output)
            output = [] 
Example 2
Project: convseg   Author: chqiwang   File: tagger.py    License: MIT License 6 votes vote down vote up
def create_input(batch):
    """
    Take each sentence data in batch and return an input for
    the training or the evaluation function.
    """
    assert len(batch) > 0
    lengths = [len(seq) for seq in batch[0]]
    max_len = max(2, max(lengths))
    ret = []
    for d in batch:
        dd = []
        for seq_id, pos in izip(d, lengths):
            assert len(seq_id) == pos
            pad = [0] * (max_len - pos)
            dd.append(seq_id + pad)
        ret.append(np.array(dd))
    ret.append(np.array(lengths))
    return ret 
Example 3
Project: DOTA_models   Author: ringringyi   File: text2bin.py    License: Apache License 2.0 6 votes vote down vote up
def go(fhs):
  fmt = None
  with open(opt_vocab, 'w') as vocab_out:
    with open(opt_output, 'w') as vecs_out:
      for lines in izip(*fhs):
        parts = [line.split() for line in lines]
        token = parts[0][0]
        if any(part[0] != token for part in parts[1:]):
          raise IOError('vector files must be aligned')

        print >> vocab_out, token

        vec = [sum(float(x) for x in xs) for xs in zip(*parts)[1:]]
        if not fmt:
          fmt = struct.Struct('%df' % len(vec))

        vecs_out.write(fmt.pack(*vec)) 
Example 4
Project: DOTA_models   Author: ringringyi   File: graph_utils.py    License: Apache License 2.0 6 votes vote down vote up
def convert_to_graph_tool(G):
  timer = utils.Timer()
  timer.tic()
  gtG = gt.Graph(directed=G.is_directed())
  gtG.ep['action'] = gtG.new_edge_property('int')

  nodes_list = G.nodes()
  nodes_array = np.array(nodes_list)

  nodes_id = np.zeros((nodes_array.shape[0],), dtype=np.int64)

  for i in range(nodes_array.shape[0]):
    v = gtG.add_vertex()
    nodes_id[i] = int(v)

  # d = {key: value for (key, value) in zip(nodes_list, nodes_id)}
  d = dict(itertools.izip(nodes_list, nodes_id))

  for src, dst, data in G.edges_iter(data=True):
    e = gtG.add_edge(d[src], d[dst])
    gtG.ep['action'][e] = data['action']
  nodes_to_id = d
  timer.toc(average=True, log_at=1, log_str='src.graph_utils.convert_to_graph_tool')
  return gtG, nodes_array, nodes_to_id 
Example 5
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def averageSeries(requestContext, *seriesLists):
    """
    Short Alias: avg()

    Takes one metric or a wildcard seriesList.
    Draws the average value of all metrics passed at each time.

    Example:

    .. code-block:: none

      &target=averageSeries(company.server.*.threads.busy)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "averageSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeDiv(safeSum(row), safeLen(row)) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 6
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def stddevSeries(requestContext, *seriesLists):
    """

    Takes one metric or a wildcard seriesList.
    Draws the standard deviation of all metrics passed at each time.

    Example:

    .. code-block:: none

      &target=stddevSeries(company.server.*.threads.busy)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "stddevSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeStdDev(row) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 7
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def maxSeries(requestContext, *seriesLists):
    """
    Takes one metric or a wildcard seriesList.
    For each datapoint from each metric passed in, pick the maximum value and graph it.

    Example:

    .. code-block:: none

      &target=maxSeries(Server*.connections.total)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "maxSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeMax(row) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 8
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def rangeOfSeries(requestContext, *seriesLists):
    """
    Takes a wildcard seriesList.
    Distills down a set of inputs into the range of the series

    Example:

    .. code-block:: none

        &target=rangeOfSeries(Server*.connections.total)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "rangeOfSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeSubtract(max(row), min(row)) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 9
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def percentileOfSeries(requestContext, seriesList, n, interpolate=False):
    """
    percentileOfSeries returns a single series which is composed of the n-percentile
    values taken across a wildcard series at each point. Unless `interpolate` is
    set to True, percentile values are actual values contained in one of the
    supplied series.
    """
    yield defer.succeed(None)
    if n <= 0:
        raise ValueError(
            'The requested percent is required to be greater than 0')

    name = 'percentileOfSeries(%s,%g)' % (seriesList[0].pathExpression, n)
    (start, end, step) = normalize([seriesList])[1:]
    values = [_getPercentile(row, n, interpolate) for row in izip(*seriesList)]
    resultSeries = TimeSeries(name, start, end, step, values)
    resultSeries.pathExpression = name

    returnValue([resultSeries]) 
Example 10
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 6 votes vote down vote up
def countSeries(requestContext, *seriesLists):
    """
    Draws a horizontal line representing the number of nodes found in the seriesList.

    .. code-block:: none

      &target=countSeries(carbon.agents.*.*)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "countSeries(%s)" % formatPathExpressions(seriesList)
    values = (int(len(row)) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 11
Project: loaner   Author: google   File: config_model_test.py    License: Apache License 2.0 6 votes vote down vote up
def _create_config_parameters():
  """Creates a config value pair for parameterized test cases.

  Yields:
    A list containing the list of configs and their values.
  """
  string_config_value = 'config value 1'
  integer_config_value = 1
  bool_config_value = True
  list_config_value = ['email1', 'email2']
  config_ids = ['string_config', 'integer_config', 'bool_config', 'list_config']
  config_values = [
      string_config_value, integer_config_value, bool_config_value,
      list_config_value
  ]
  for i in itertools.izip(config_ids, config_values):
    yield [i] 
Example 12
Project: honeybee   Author: ladybug-tools   File: analysispoint.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _calculate_annual_sunlight_exposure(
            values, hoys, threshhold=None, blinds_state_ids=None, occ_schedule=None,
            target_hours=None):
        threshhold = threshhold or 1000
        target_hours = target_hours or 250
        schedule = occ_schedule or Schedule.eight_am_to_six_pm()
        ase = 0
        problematic_hours = []
        for h, v in zip(hoys, values):
            if h not in schedule:
                continue
            if v > threshhold:
                ase += 1
                problematic_hours.append(h)

        return ase < target_hours, ase, problematic_hours 
Example 13
Project: vehicle_counting_tensorflow   Author: ahmetozlu   File: ssd_feature_extractor_test.py    License: MIT License 6 votes vote down vote up
def check_extract_features_returns_correct_shape(
      self, batch_size, image_height, image_width, depth_multiplier,
      pad_to_multiple, expected_feature_map_shapes, use_explicit_padding=False,
      use_keras=False):
    def graph_fn(image_tensor):
      return self._extract_features(image_tensor,
                                    depth_multiplier,
                                    pad_to_multiple,
                                    use_explicit_padding,
                                    use_keras=use_keras)

    image_tensor = np.random.rand(batch_size, image_height, image_width,
                                  3).astype(np.float32)
    feature_maps = self.execute(graph_fn, [image_tensor])
    for feature_map, expected_shape in itertools.izip(
        feature_maps, expected_feature_map_shapes):
      self.assertAllEqual(feature_map.shape, expected_shape) 
Example 14
Project: vehicle_counting_tensorflow   Author: ahmetozlu   File: ssd_feature_extractor_test.py    License: MIT License 6 votes vote down vote up
def check_extract_features_returns_correct_shapes_with_dynamic_inputs(
      self, batch_size, image_height, image_width, depth_multiplier,
      pad_to_multiple, expected_feature_map_shapes, use_explicit_padding=False,
      use_keras=False):
    def graph_fn(image_height, image_width):
      image_tensor = tf.random_uniform([batch_size, image_height, image_width,
                                        3], dtype=tf.float32)
      return self._extract_features(image_tensor,
                                    depth_multiplier,
                                    pad_to_multiple,
                                    use_explicit_padding,
                                    use_keras=use_keras)

    feature_maps = self.execute_cpu(graph_fn, [
        np.array(image_height, dtype=np.int32),
        np.array(image_width, dtype=np.int32)
    ])
    for feature_map, expected_shape in itertools.izip(
        feature_maps, expected_feature_map_shapes):
      self.assertAllEqual(feature_map.shape, expected_shape) 
Example 15
Project: vehicle_counting_tensorflow   Author: ahmetozlu   File: mobilenet_v2_test.py    License: MIT License 6 votes vote down vote up
def _check_returns_correct_shape(
      self, batch_size, image_height, image_width, depth_multiplier,
      expected_feature_map_shapes, use_explicit_padding=False, min_depth=None,
      layer_names=None):
    def graph_fn(image_tensor):
      model = self._create_application_with_layer_outputs(
          layer_names=layer_names,
          batchnorm_training=False, use_explicit_padding=use_explicit_padding,
          min_depth=min_depth,
          alpha=depth_multiplier)
      return model(image_tensor)

    image_tensor = np.random.rand(batch_size, image_height, image_width,
                                  3).astype(np.float32)
    feature_maps = self.execute(graph_fn, [image_tensor])

    for feature_map, expected_shape in itertools.izip(
        feature_maps, expected_feature_map_shapes):
      self.assertAllEqual(feature_map.shape, expected_shape) 
Example 16
Project: vehicle_counting_tensorflow   Author: ahmetozlu   File: mobilenet_v2_test.py    License: MIT License 6 votes vote down vote up
def _check_returns_correct_shapes_with_dynamic_inputs(
      self, batch_size, image_height, image_width, depth_multiplier,
      expected_feature_map_shapes, use_explicit_padding=False,
      layer_names=None):
    def graph_fn(image_height, image_width):
      image_tensor = tf.random_uniform([batch_size, image_height, image_width,
                                        3], dtype=tf.float32)
      model = self._create_application_with_layer_outputs(
          layer_names=layer_names,
          batchnorm_training=False, use_explicit_padding=use_explicit_padding,
          alpha=depth_multiplier)
      return model(image_tensor)

    feature_maps = self.execute_cpu(graph_fn, [
        np.array(image_height, dtype=np.int32),
        np.array(image_width, dtype=np.int32)
    ])

    for feature_map, expected_shape in itertools.izip(
        feature_maps, expected_feature_map_shapes):
      self.assertAllEqual(feature_map.shape, expected_shape) 
Example 17
Project: opt-mmd   Author: djsutherland   File: learn_kernel.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def run_train_epoch(X_train, Y_train, batchsize, train_fn):
    total_mmd2 = 0
    total_obj = 0
    n_batches = 0
    batches = itertools.izip( # shuffle the two independently
        iterate_minibatches(X_train, batchsize=batchsize, shuffle=True),
        iterate_minibatches(Y_train, batchsize=batchsize, shuffle=True),
    )
    for ((Xbatch,), (Ybatch,)) in batches:
        mmd2, obj = train_fn(Xbatch, Ybatch)
        assert np.isfinite(mmd2)
        assert np.isfinite(obj)
        total_mmd2 += mmd2
        total_obj += obj
        n_batches += 1
    return total_mmd2 / n_batches, total_obj / n_batches 
Example 18
Project: jbox   Author: jpush   File: itsdangerous.py    License: MIT License 6 votes vote down vote up
def constant_time_compare(val1, val2):
    """Returns True if the two strings are equal, False otherwise.

    The time taken is independent of the number of characters that match.  Do
    not use this function for anything else than comparision with known
    length targets.

    This is should be implemented in C in order to get it completely right.
    """
    if _builtin_constant_time_compare is not None:
        return _builtin_constant_time_compare(val1, val2)
    len_eq = len(val1) == len(val2)
    if len_eq:
        result = 0
        left = val1
    else:
        result = 1
        left = val2
    for x, y in izip(bytearray(left), bytearray(val2)):
        result |= x ^ y
    return result == 0 
Example 19
Project: SplunkForPCAP   Author: DanielSchwartz1   File: search_command.py    License: MIT License 6 votes vote down vote up
def _records_protocol_v1(self, ifile):

        reader = csv.reader(ifile, dialect=CsvDialect)

        try:
            fieldnames = reader.next()
        except StopIteration:
            return

        mv_fieldnames = {name: name[len('__mv_'):] for name in fieldnames if name.startswith('__mv_')}

        if len(mv_fieldnames) == 0:
            for values in reader:
                yield OrderedDict(izip(fieldnames, values))
            return

        for values in reader:
            record = OrderedDict()
            for fieldname, value in izip(fieldnames, values):
                if fieldname.startswith('__mv_'):
                    if len(value) > 0:
                        record[mv_fieldnames[fieldname]] = self._decode_list(value)
                elif fieldname not in record:
                    record[fieldname] = value
            yield record 
Example 20
Project: convseg   Author: chqiwang   File: cws.py    License: MIT License 5 votes vote down vote up
def create_output(seqs, stags):
    """
    Create final output from characters and BMES tags.
    """
    output = []
    for seq, stag in izip(seqs, stags):
        new_sen = []
        for c, tag in izip(seq, stag):
            new_sen.append(c)
            if tag == 'S' or tag == 'E':
                new_sen.append('  ')
        output.append(''.join(new_sen))
    return output 
Example 21
Project: open-sesame   Author: swabhs   File: sentence.py    License: Apache License 2.0 5 votes vote down vote up
def get_common_path(self, src, dest):
        """
        :param src: source node in tree
        :param dest: destination node
        :return: undirected path from src to dest
        """
        if dest == self.depheads[src] or src == self.depheads[dest]:
            return []
        if dest in self.rootpath[src]:
            return self.rootpath[src][:-len(self.rootpath[dest]) - 1]
        if src in self.rootpath[dest]:
            return self.rootpath[dest][:-len(self.rootpath[src]) - 1]

        pathfrom = self.rootpath[src][::-1]
        pathto = self.rootpath[dest][::-1]
        i = 0
        for n1, n2 in izip(pathfrom, pathto):
            if n1 == n2:
                i += 1
                continue
            if n1 == dest:
                return pathfrom[:i + 1]
            return pathfrom[i:][::-1] + pathto[i:]

        if i == len(pathfrom):
            return pathto[i - 1:]
        return pathfrom[i - 1:][::-1] 
Example 22
Project: open-sesame   Author: swabhs   File: sentence.py    License: Apache License 2.0 5 votes vote down vote up
def get_lca(self, src, dest):
        if src == dest:
            return src, self.idxlabelmap[src.label()]
        pathfrom = self.crootpaths[src.label()][::-1]
        pathto = self.crootpaths[dest.label()][::-1]
        common = 0
        for n1, n2 in izip(pathfrom, pathto):
            if n1 == n2:
                common += 1
                continue
            return pathfrom[common - 1], self.idxlabelmap[pathfrom[common - 1].label()] 
Example 23
Project: open-sesame   Author: swabhs   File: sentence.py    License: Apache License 2.0 5 votes vote down vote up
def get_common_cpath(self, src, dest):
        if src == dest:
            return [src]

        pathfrom = self.crootpaths[src.label()][::-1]
        pathto = self.crootpaths[dest.label()][::-1]
        common = 0
        for n1, n2 in izip(pathfrom, pathto):
            if n1 == n2:
                common += 1
                continue
            break
        return pathfrom[common - 1:][::-1] + pathto[common:] 
Example 24
Project: open-sesame   Author: swabhs   File: preprocess_syntax.py    License: Apache License 2.0 5 votes vote down vote up
def join_fnconll_parseyconll(conllfile, synfile, outfile):
    with codecs.open(outfile, "w", "utf-8") as outf:
        with codecs.open(conllfile, "r", "utf-8") as cf:
            with codecs.open(synfile, "r", "utf-8") as sf:
                for l,sl in izip(cf,sf):

                    cfields = l.strip().split("\t")
                    if len(cfields) == 1:
                        outf.write("\n")
                        continue

                    if len(cfields) != 15:
                        raise Exception("incorrect CoNLL 2009 format", l, cfields)

                    sfields = sl.strip().split("\t")
                    if len(sfields) != 10:
                        raise Exception("incorrect parsey CoNLL format")

                    newfields = cfields[:4] # ID FORM LEMMA PLEMMA = 0,1,2,3
                    newfields += sfields[3:6:2] # syntaxnetPOS fnPOS = 4,5  ~ replacing POS PPOS
                    newfields += cfields[6:9] # sent_num PFEAT HEAD = 6,7,8 ~ replacing FEAT PFEAT HEAD
                    newfields += sfields[6:7] # syntaxnetHEAD = 9           ~ replacing PHEAD
                    newfields += cfields[10:11] # DEPREL = 10
                    newfields += sfields[7:8] # syntaxnetDEPREL = 11        ~ replacing PDEPREL
                    newfields += cfields[12:] # FILLPRED PRED APREDS = 12,13,14
                    if len(newfields) != len(cfields):
                        raise Exception("didn't join properly", len(newfields), len(cfields), newfields)
                    outf.write("\t".join(newfields) + "\n")
                sf.close()
            cf.close()
        outf.close() 
Example 25
Project: DOTA_models   Author: ringringyi   File: prep.py    License: Apache License 2.0 5 votes vote down vote up
def write_vocab_and_sums(vocab, sums, vocab_filename, sums_filename):
  """Writes vocabulary and marginal sum files."""
  with open(os.path.join(FLAGS.output_dir, vocab_filename), 'w') as vocab_out:
    with open(os.path.join(FLAGS.output_dir, sums_filename), 'w') as sums_out:
      for tok, cnt in itertools.izip(vocab, sums):
        print >> vocab_out, tok
        print >> sums_out, cnt 
Example 26
Project: InsightAgent   Author: insightfinder   File: collectdReportMetrics.py    License: Apache License 2.0 5 votes vote down vote up
def calculate_avg_cpu_values(all_latest_timestamps, each_file, filenames, new_prev_endtime_epoch_l, raw_data_l,
                             start_time_epoch_l, date_l):
    try:
        csv_file_1 = open(os.path.join(csvpath, each_file + date_l))
        csv_file_2 = open(os.path.join(
            csvpath, 'aggregation-cpu-average/cpu-user-' + date_l))
        csv_file_3 = open(os.path.join(
            csvpath, 'aggregation-cpu-average/cpu-idle-' + date_l))
        reader1 = csv.reader(csv_file_1)
        reader2 = csv.reader(csv_file_2)
        reader3 = csv.reader(csv_file_3)

        for row, row1, row2 in itertools.izip(reader1, reader2, reader3):
            if reader1.line_num > 1:
                if long(int(float(row[0]))) < long(start_time_epoch_l):
                    continue
                timestamp_str = str(int(float(row[0])))
                new_prev_endtime_epoch_l = long(timestamp_str) * 1000.0
                if timestamp_str in raw_data_l:
                    value_list = raw_data_l[timestamp_str]
                    total = float(row[1]) + float(row1[1]) + float(row2[1])
                    idle = float(row2[1])
                    # result = 1 - round(float(idle / total), 4)
                    value_list[filenames[each_file][0]] = str(
                        round((1 - float(idle / total)) * 100, 4))
                    raw_data_l[timestamp_str] = value_list
                else:
                    value_list = {}
                    total = float(row[1]) + float(row1[1]) + float(row2[1])
                    idle = float(row2[1])
                    # result = 1 - round(float(idle / total), 4)
                    value_list[filenames[each_file][0]] = str(
                        round((1 - float(idle / total)) * 100, 4))
                    raw_data_l[timestamp_str] = value_list
        all_latest_timestamps.append(new_prev_endtime_epoch_l)

    except IOError:
        print ""
    return new_prev_endtime_epoch_l 
Example 27
Project: pyscf   Author: pyscf   File: misc.py    License: Apache License 2.0 5 votes vote down vote up
def izip(*args):
    '''python2 izip == python3 zip'''
    if sys.version_info < (3,):
        return itertools.izip(*args)
    else:
        return zip(*args) 
Example 28
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 5 votes vote down vote up
def formatPathExpressions(seriesList):
    # remove duplicates
    pathExpressions = []
    [pathExpressions.append(s.pathExpression)
     for s in seriesList if not pathExpressions.count(s.pathExpression)]
    return ','.join(pathExpressions)

# Series Functions

# NOTE: Some of the functions below use izip, which may be problematic.
# izip stops when it hits the end of the shortest series
# in practice this *shouldn't* matter because all series will cover
# the same interval, despite having possibly different steps... 
Example 29
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 5 votes vote down vote up
def sumSeries(requestContext, *seriesLists):
    """
    Short form: sum()

    This will add metrics together and return the sum at each datapoint. (See
    integral for a sum over time)

    Example:

    .. code-block:: none

      &target=sum(company.server.application*.requestsHandled)

    This would show the sum of all requests handled per minute (provided
    requestsHandled are collected once a minute).   If metrics with different
    retention rates are combined, the coarsest metric is graphed, and the sum
    of the other metrics is averaged for the metrics with finer retention rates.

    """
    yield defer.succeed(None)
    try:
        (seriesList, start, end, step) = normalize(seriesLists)
    except:
        returnValue([])
    name = "sumSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeSum(row) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series]) 
Example 30
Project: worker   Author: moira-alert   File: functions.py    License: GNU General Public License v3.0 5 votes vote down vote up
def diffSeries(requestContext, *seriesLists):
    """
    Subtracts series 2 through n from series 1.

    Example:

    .. code-block:: none

      &target=diffSeries(service.connections.total,service.connections.failed)

    To diff a series and a constant, one should use offset instead of (or in
    addition to) diffSeries

    Example:

    .. code-block:: none

      &target=offset(service.connections.total,-5)

      &target=offset(diffSeries(service.connections.total,service.connections.failed),-4)

    """
    yield defer.succeed(None)
    (seriesList, start, end, step) = normalize(seriesLists)
    name = "diffSeries(%s)" % formatPathExpressions(seriesList)
    values = (safeDiff(row) for row in izip(*seriesList))
    series = TimeSeries(name, start, end, step, values)
    series.pathExpression = name
    returnValue([series])