Python numpy.timedelta64() Examples
The following are 30 code examples for showing how to use numpy.timedelta64(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.
You may check out the related API usage on the sidebar.
You may also want to check out all available functions/classes of the module
numpy
, or try the search function
.
Example 1
Project: aospy Author: spencerahill File: test_data_loader.py License: Apache License 2.0 | 6 votes |
def test_maybe_apply_time_shift(data_loader, ds_with_time_bounds, ds_inst, var_name, generate_file_set_args): ds = xr.decode_cf(ds_with_time_bounds) da = ds[var_name] result = data_loader._maybe_apply_time_shift( da.copy(), **generate_file_set_args)[TIME_STR] assert result.identical(da[TIME_STR]) offset = data_loader._maybe_apply_time_shift( da.copy(), {'days': 1}, **generate_file_set_args) result = offset[TIME_STR] expected = da[TIME_STR] + np.timedelta64(1, 'D') expected[TIME_STR] = expected assert result.identical(expected)
Example 2
Project: aospy Author: spencerahill File: test_data_loader.py License: Apache License 2.0 | 6 votes |
def test_maybe_apply_time_shift_inst(gfdl_data_loader, ds_inst, var_name, generate_file_set_args): ds_inst = xr.decode_cf(ds_inst) generate_file_set_args['dtype_in_time'] = 'inst' generate_file_set_args['intvl_in'] = '3hr' da = ds_inst[var_name] result = gfdl_data_loader._maybe_apply_time_shift( da.copy(), **generate_file_set_args)[TIME_STR] expected = da[TIME_STR] + np.timedelta64(-3, 'h') expected[TIME_STR] = expected assert result.identical(expected) generate_file_set_args['intvl_in'] = 'daily' da = ds_inst[var_name] result = gfdl_data_loader._maybe_apply_time_shift( da.copy(), **generate_file_set_args)[TIME_STR] expected = da[TIME_STR] expected[TIME_STR] = expected assert result.identical(expected)
Example 3
Project: gnocchi Author: gnocchixyz File: carbonara.py License: Apache License 2.0 | 6 votes |
def __init__(self, ts, granularity, start=None): # NOTE(sileht): The whole class assumes ts is ordered and don't have # duplicate timestamps, it uses numpy.unique that sorted list, but # we always assume the orderd to be the same as the input. self.granularity = granularity self.can_derive = isinstance(granularity, numpy.timedelta64) self.start = start if start is None: self._ts = ts self._ts_for_derive = ts else: self._ts = ts[numpy.searchsorted(ts['timestamps'], start):] if self.can_derive: start_derive = start - granularity self._ts_for_derive = ts[ numpy.searchsorted(ts['timestamps'], start_derive): ] if self.can_derive: self.indexes = round_timestamp(self._ts['timestamps'], granularity) elif calendar.GROUPINGS.get(granularity): self.indexes = calendar.GROUPINGS.get(granularity)( self._ts['timestamps']) self.tstamps, self.counts = numpy.unique(self.indexes, return_counts=True)
Example 4
Project: gnocchi Author: gnocchixyz File: carbonara.py License: Apache License 2.0 | 6 votes |
def truncate(self, oldest_point=None): """Truncate the time series up to oldest_point excluded. :param oldest_point: Oldest point to keep from, this excluded. Default is the aggregation timespan. :type oldest_point: numpy.datetime64 or numpy.timedelta64 :return: The oldest point that could have been kept. """ last = self.last if last is None: return if oldest_point is None: oldest_point = self.aggregation.timespan if oldest_point is None: return if isinstance(oldest_point, numpy.timedelta64): oldest_point = last - oldest_point index = numpy.searchsorted(self.ts['timestamps'], oldest_point, side='right') self.ts = self.ts[index:] return oldest_point
Example 5
Project: gnocchi Author: gnocchixyz File: test_storage.py License: Apache License 2.0 | 6 votes |
def test_corrupted_split(self): self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69), ]) self.trigger_processing() aggregation = self.metric.archive_policy.get_aggregation( "mean", numpy.timedelta64(5, 'm')) with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize', side_effect=carbonara.InvalidData()): results = self.storage._get_splits_and_unserialize({ self.metric: { aggregation: [ carbonara.SplitKey( numpy.datetime64(1387800000, 's'), numpy.timedelta64(5, 'm')) ], }, })[self.metric][aggregation] self.assertEqual(1, len(results)) self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie) # Assert it's an empty one since corrupted self.assertEqual(0, len(results[0])) self.assertEqual(results[0].aggregation, aggregation)
Example 6
Project: gnocchi Author: gnocchixyz File: test_storage.py License: Apache License 2.0 | 6 votes |
def test_get_splits_and_unserialize(self): self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69), ]) self.trigger_processing() aggregation = self.metric.archive_policy.get_aggregation( "mean", numpy.timedelta64(5, 'm')) results = self.storage._get_splits_and_unserialize({ self.metric: { aggregation: [ carbonara.SplitKey( numpy.datetime64(1387800000, 's'), numpy.timedelta64(5, 'm')), ], }, })[self.metric][aggregation] self.assertEqual(1, len(results)) self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie) # Assert it's not empty one since corrupted self.assertGreater(len(results[0]), 0) self.assertEqual(results[0].aggregation, aggregation)
Example 7
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_derived_hole(self): ts = carbonara.TimeSerie.from_data( [datetime.datetime(2014, 1, 1, 12, 0, 0), datetime.datetime(2014, 1, 1, 12, 0, 4), datetime.datetime(2014, 1, 1, 12, 1, 2), datetime.datetime(2014, 1, 1, 12, 1, 14), datetime.datetime(2014, 1, 1, 12, 1, 24), datetime.datetime(2014, 1, 1, 12, 3, 2), datetime.datetime(2014, 1, 1, 12, 3, 22), datetime.datetime(2014, 1, 1, 12, 3, 42), datetime.datetime(2014, 1, 1, 12, 4, 9)], [50, 55, 65, 66, 70, 105, 108, 200, 202]) ts = self._resample(ts, numpy.timedelta64(60, 's'), 'last', derived=True) self.assertEqual(4, len(ts)) self.assertEqual( [(datetime64(2014, 1, 1, 12, 0, 0), 5), (datetime64(2014, 1, 1, 12, 1, 0), 4), (datetime64(2014, 1, 1, 12, 3, 0), 92), (datetime64(2014, 1, 1, 12, 4, 0), 2)], list(ts.fetch( from_timestamp=datetime64(2014, 1, 1, 12))))
Example 8
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def _do_test_aggregation(self, name, v1, v2, v3): # NOTE(gordc): test data must have a group of odd count to properly # test 50pct test case. ts = carbonara.TimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 10), datetime64(2014, 1, 1, 12, 0, 20), datetime64(2014, 1, 1, 12, 0, 30), datetime64(2014, 1, 1, 12, 0, 40), datetime64(2014, 1, 1, 12, 1, 0), datetime64(2014, 1, 1, 12, 1, 10), datetime64(2014, 1, 1, 12, 1, 20), datetime64(2014, 1, 1, 12, 1, 30), datetime64(2014, 1, 1, 12, 1, 40), datetime64(2014, 1, 1, 12, 1, 50), datetime64(2014, 1, 1, 12, 2, 0), datetime64(2014, 1, 1, 12, 2, 10)], [3, 5, 2, 3, 5, 8, 11, 22, 10, 42, 9, 4, 2]) ts = self._resample(ts, numpy.timedelta64(60, 's'), name) self.assertEqual(3, len(ts)) self.assertEqual(v1, ts[datetime64(2014, 1, 1, 12, 0, 0)][1]) self.assertEqual(v2, ts[datetime64(2014, 1, 1, 12, 1, 0)][1]) self.assertEqual(v3, ts[datetime64(2014, 1, 1, 12, 2, 0)][1])
Example 9
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_aggregation_std_with_unique(self): ts = carbonara.TimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 0)], [3]) ts = self._resample(ts, numpy.timedelta64(60, 's'), 'std') self.assertEqual(0, len(ts), ts.values) ts = carbonara.TimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 4), datetime64(2014, 1, 1, 12, 0, 9), datetime64(2014, 1, 1, 12, 1, 6)], [3, 6, 5, 9]) ts = self._resample(ts, numpy.timedelta64(60, 's'), "std") self.assertEqual(1, len(ts)) self.assertEqual(1.5275252316519465, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
Example 10
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_serialize(self): ts = {'sampling': numpy.timedelta64(500, 'ms'), 'agg': 'mean'} tsb = carbonara.BoundTimeSerie(block_size=ts['sampling']) tsb.set_values(numpy.array([ (datetime64(2014, 1, 1, 12, 0, 0, 1234), 3), (datetime64(2014, 1, 1, 12, 0, 0, 321), 6), (datetime64(2014, 1, 1, 12, 1, 4, 234), 5), (datetime64(2014, 1, 1, 12, 1, 9, 32), 7), (datetime64(2014, 1, 1, 12, 2, 12, 532), 1)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=ts)) key = ts['return'].get_split_key() o, s = ts['return'].serialize(key) self.assertEqual(ts['return'], carbonara.AggregatedTimeSerie.unserialize( s, key, ts['return'].aggregation))
Example 11
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_no_truncation(self): ts = {'sampling': numpy.timedelta64(60, 's'), 'agg': 'mean'} tsb = carbonara.BoundTimeSerie() for i in six.moves.range(1, 11): tsb.set_values(numpy.array([ (datetime64(2014, 1, 1, 12, i, i), float(i))], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=ts)) tsb.set_values(numpy.array([ (datetime64(2014, 1, 1, 12, i, i + 1), float(i + 1))], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=ts)) self.assertEqual(i, len(list(ts['return'].fetch())))
Example 12
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_split_key(self): self.assertEqual( numpy.datetime64("2014-10-07"), carbonara.SplitKey.from_timestamp_and_sampling( numpy.datetime64("2015-01-01T15:03"), numpy.timedelta64(3600, 's'))) self.assertEqual( numpy.datetime64("2014-12-31 18:00"), carbonara.SplitKey.from_timestamp_and_sampling( numpy.datetime64("2015-01-01 15:03:58"), numpy.timedelta64(58, 's'))) key = carbonara.SplitKey.from_timestamp_and_sampling( numpy.datetime64("2015-01-01 15:03"), numpy.timedelta64(3600, 's')) self.assertGreater(key, numpy.datetime64("1970")) self.assertGreaterEqual(key, numpy.datetime64("1970"))
Example 13
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 6 votes |
def test_split(self): sampling = numpy.timedelta64(5, 's') points = 100000 ts = carbonara.TimeSerie.from_data( timestamps=list(map(datetime.datetime.utcfromtimestamp, six.moves.range(points))), values=list(six.moves.range(points))) agg = self._resample(ts, sampling, 'mean') grouped_points = list(agg.split()) self.assertEqual( math.ceil((points / sampling.astype(float)) / carbonara.SplitKey.POINTS_PER_SPLIT), len(grouped_points)) self.assertEqual("0.0", str(carbonara.SplitKey(grouped_points[0][0], 0))) # 3600 × 5s = 5 hours self.assertEqual(datetime64(1970, 1, 1, 5), grouped_points[1][0]) self.assertEqual(carbonara.SplitKey.POINTS_PER_SPLIT, len(grouped_points[0][1]))
Example 14
Project: gnocchi Author: gnocchixyz File: test_aggregates.py License: Apache License 2.0 | 6 votes |
def test_aggregated_different_archive_no_overlap(self): tsc1 = {'sampling': numpy.timedelta64(60, 's'), 'size': 50, 'agg': 'mean', "name": "all"} tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling']) tsc2 = {'sampling': numpy.timedelta64(60, 's'), 'size': 50, 'agg': 'mean', "name": "all"} tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling']) tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 11, 46, 4), 4)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=tsc1)) tsb2.set_values(numpy.array([(datetime64(2014, 1, 1, 9, 1, 4), 4)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=tsc2)) dtfrom = datetime64(2014, 1, 1, 11, 0, 0) self.assertRaises(exceptions.UnAggregableTimeseries, processor.aggregated, [tsc1['return'], tsc2['return']], from_timestamp=dtfrom, operations=["aggregate", "mean", [ "metric", ["all", "mean"]]])
Example 15
Project: gnocchi Author: gnocchixyz File: test_aggregates.py License: Apache License 2.0 | 6 votes |
def test_aggregated_different_archive_no_overlap2(self): tsc1 = {'sampling': numpy.timedelta64(60, 's'), 'size': 50, 'agg': 'mean'} tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling']) tsc2 = carbonara.AggregatedTimeSerie( carbonara.Aggregation('mean', numpy.timedelta64(60, 's'), None)) tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 3, 0), 4)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE), before_truncate_callback=functools.partial( self._resample_and_merge, agg_dict=tsc1)) metric = mock.Mock(id=str(uuid.uuid4())) ref = processor.MetricReference(metric, "mean") self.assertRaises(exceptions.UnAggregableTimeseries, processor.aggregated, [tsc1['return'], (ref, tsc2)], operations=["aggregate", "mean", ["metric", tsc1['return'][0].lookup_key, ref.lookup_key]])
Example 16
Project: gnocchi Author: gnocchixyz File: test_aggregates.py License: Apache License 2.0 | 6 votes |
def test_binary_operator_ts_on_right(self): metric2, __ = self._create_metric() self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69), incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42), incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4), incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44), ]) self.trigger_processing() values = processor.get_measures( self.storage, [processor.MetricReference(self.metric, "mean")], ["*", 2, ["metric", str(self.metric.id), "mean"]], granularities=[numpy.timedelta64(1, 'h')]) self.assertEqual({str(self.metric.id): { "mean": [(datetime64(2014, 1, 1, 12, 0, 0), numpy.timedelta64(1, 'h'), 138), (datetime64(2014, 1, 1, 13, 0, 0), numpy.timedelta64(1, 'h'), 84), (datetime64(2014, 1, 1, 14, 0, 0), numpy.timedelta64(1, 'h'), 8), (datetime64(2014, 1, 1, 15, 0, 0), numpy.timedelta64(1, 'h'), 88)] }}, values)
Example 17
Project: gnocchi Author: gnocchixyz File: test_indexer.py License: Apache License 2.0 | 6 votes |
def test_get_archive_policy(self): ap = self.index.get_archive_policy("low") self.assertEqual({ 'back_window': 0, 'aggregation_methods': set(self.conf.archive_policy.default_aggregation_methods), 'definition': [ {u'granularity': numpy.timedelta64(5, 'm'), u'points': 12, u'timespan': numpy.timedelta64(3600, 's')}, {u'granularity': numpy.timedelta64(3600, 's'), u'points': 24, u'timespan': numpy.timedelta64(86400, 's')}, {u'granularity': numpy.timedelta64(86400, 's'), u'points': 30, u'timespan': numpy.timedelta64(2592000, 's')}], 'name': u'low'}, dict(ap))
Example 18
Project: recruit Author: Frank-qlu File: _dtype.py License: Apache License 2.0 | 6 votes |
def _name_get(dtype): # provides dtype.name.__get__ if dtype.isbuiltin == 2: # user dtypes don't promise to do anything special return dtype.type.__name__ # Builtin classes are documented as returning a "bit name" name = dtype.type.__name__ # handle bool_, str_, etc if name[-1] == '_': name = name[:-1] # append bit counts to str, unicode, and void if np.issubdtype(dtype, np.flexible) and not _isunsized(dtype): name += "{}".format(dtype.itemsize * 8) # append metadata to datetimes elif dtype.type in (np.datetime64, np.timedelta64): name += _datetime_metadata_str(dtype) return name
Example 19
Project: aospy Author: spencerahill File: calc.py License: Apache License 2.0 | 5 votes |
def _compute(self, data): """Perform the calculation.""" local_ts = self._local_ts(*data) dt = local_ts[internal_names.TIME_WEIGHTS_STR] # Convert dt to units of days to prevent overflow dt = dt / np.timedelta64(1, 'D') return local_ts, dt
Example 20
Project: gnocchi Author: gnocchixyz File: json.py License: Apache License 2.0 | 5 votes |
def to_primitive(obj): if isinstance(obj, ((six.text_type,) + six.integer_types + (type(None), bool, float))): return obj if isinstance(obj, uuid.UUID): return six.text_type(obj) if isinstance(obj, datetime.datetime): return obj.isoformat() if isinstance(obj, numpy.datetime64): # Do not include nanoseconds if null return str(obj).rpartition(".000000000")[0] + "+00:00" if isinstance(obj, numpy.timedelta64): return obj / numpy.timedelta64(1, 's') if isinstance(obj, datetime.timedelta): return obj.total_seconds() # This mimics what Pecan implements in its default JSON encoder if hasattr(obj, "jsonify"): return to_primitive(obj.jsonify()) if isinstance(obj, dict): return {to_primitive(k): to_primitive(v) for k, v in obj.items()} if hasattr(obj, 'iteritems'): return to_primitive(dict(obj.iteritems())) # Python 3 does not have iteritems if hasattr(obj, 'items'): return to_primitive(dict(obj.items())) if hasattr(obj, '__iter__'): return list(map(to_primitive, obj)) return obj
Example 21
Project: gnocchi Author: gnocchixyz File: utils.py License: Apache License 2.0 | 5 votes |
def to_timespan(value, allow_le_zero=False): if value is None: raise ValueError("Invalid timespan") try: seconds = float(value) except Exception: seconds = pytimeparse.parse(value) if seconds is None: raise ValueError("Unable to parse timespan") seconds = numpy.timedelta64(int(seconds * 10e8), 'ns') if not allow_le_zero and seconds <= numpy.timedelta64(0, 'ns'): raise ValueError("Timespan must be positive") return seconds
Example 22
Project: gnocchi Author: gnocchixyz File: test_storage.py License: Apache License 2.0 | 5 votes |
def test_corrupted_data(self): self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69), ]) self.trigger_processing() self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 13, 0, 1), 1), ]) with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize', side_effect=carbonara.InvalidData()): with mock.patch('gnocchi.carbonara.BoundTimeSerie.unserialize', side_effect=carbonara.InvalidData()): self.trigger_processing() m = self.storage.get_aggregated_measures( {self.metric: self.metric.archive_policy.get_aggregations_for_method( 'mean')},)[self.metric] m = get_measures_list(m)['mean'] self.assertIn((datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 1), m) self.assertIn((datetime64(2014, 1, 1, 13), numpy.timedelta64(1, 'h'), 1), m) self.assertIn((datetime64(2014, 1, 1, 13), numpy.timedelta64(5, 'm'), 1), m)
Example 23
Project: gnocchi Author: gnocchixyz File: test_storage.py License: Apache License 2.0 | 5 votes |
def test_aborted_initial_processing(self): self.incoming.add_measures(self.metric.id, [ incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 5), ]) with mock.patch.object(self.storage, '_store_unaggregated_timeseries', side_effect=Exception): try: self.trigger_processing() except Exception: pass with mock.patch('gnocchi.storage.LOG') as LOG: self.trigger_processing() self.assertFalse(LOG.error.called) aggregations = ( self.metric.archive_policy.get_aggregations_for_method("mean") ) m = self.storage.get_aggregated_measures( {self.metric: aggregations})[self.metric] m = get_measures_list(m)['mean'] self.assertIn((datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 5.0), m) self.assertIn((datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 5.0), m) self.assertIn((datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 5.0), m)
Example 24
Project: gnocchi Author: gnocchixyz File: test_storage.py License: Apache License 2.0 | 5 votes |
def test_resample_no_metric(self): """https://github.com/gnocchixyz/gnocchi/issues/69""" aggregation = self.metric.archive_policy.get_aggregation( "mean", numpy.timedelta64(300, 's')) self.assertRaises(storage.MetricDoesNotExist, self.storage.get_aggregated_measures, {self.metric: [aggregation]}, datetime64(2014, 1, 1), datetime64(2015, 1, 1), resample=numpy.timedelta64(1, 'h'))
Example 25
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_block_size(self): ts = carbonara.BoundTimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 5), datetime64(2014, 1, 1, 12, 0, 9)], [5, 6], block_size=numpy.timedelta64(5, 's')) self.assertEqual(2, len(ts)) ts.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 10), 3), (datetime64(2014, 1, 1, 12, 0, 11), 4)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE)) self.assertEqual(2, len(ts))
Example 26
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_block_size_unordered(self): ts = carbonara.BoundTimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 5), datetime64(2014, 1, 1, 12, 0, 9)], [5, 23], block_size=numpy.timedelta64(5, 's')) self.assertEqual(2, len(ts)) ts.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 11), 3), (datetime64(2014, 1, 1, 12, 0, 10), 4)], dtype=carbonara.TIMESERIES_ARRAY_DTYPE)) self.assertEqual(2, len(ts))
Example 27
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_fetch_basic(self): ts = carbonara.AggregatedTimeSerie.from_data( timestamps=[datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 4), datetime64(2014, 1, 1, 12, 0, 9)], values=[3, 5, 6], aggregation=carbonara.Aggregation( "mean", numpy.timedelta64(1, 's'), None)) self.assertEqual( [(datetime64(2014, 1, 1, 12), 3), (datetime64(2014, 1, 1, 12, 0, 4), 5), (datetime64(2014, 1, 1, 12, 0, 9), 6)], list(ts.fetch())) self.assertEqual( [(datetime64(2014, 1, 1, 12, 0, 4), 5), (datetime64(2014, 1, 1, 12, 0, 9), 6)], list(ts.fetch( from_timestamp=datetime64(2014, 1, 1, 12, 0, 4)))) self.assertEqual( [(datetime64(2014, 1, 1, 12, 0, 4), 5), (datetime64(2014, 1, 1, 12, 0, 9), 6)], list(ts.fetch( from_timestamp=numpy.datetime64(iso8601.parse_date( "2014-01-01 12:00:04"))))) self.assertEqual( [(datetime64(2014, 1, 1, 12, 0, 4), 5), (datetime64(2014, 1, 1, 12, 0, 9), 6)], list(ts.fetch( from_timestamp=numpy.datetime64(iso8601.parse_date( "2014-01-01 13:00:04+01:00")))))
Example 28
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_derived_mean(self): ts = carbonara.TimeSerie.from_data( [datetime.datetime(2014, 1, 1, 12, 0, 0), datetime.datetime(2014, 1, 1, 12, 0, 4), datetime.datetime(2014, 1, 1, 12, 1, 2), datetime.datetime(2014, 1, 1, 12, 1, 14), datetime.datetime(2014, 1, 1, 12, 1, 24), datetime.datetime(2014, 1, 1, 12, 2, 4), datetime.datetime(2014, 1, 1, 12, 2, 35), datetime.datetime(2014, 1, 1, 12, 2, 42), datetime.datetime(2014, 1, 1, 12, 3, 2), datetime.datetime(2014, 1, 1, 12, 3, 22), # Counter reset datetime.datetime(2014, 1, 1, 12, 3, 42), datetime.datetime(2014, 1, 1, 12, 4, 9)], [50, 55, 65, 66, 70, 83, 92, 103, 105, 5, 7, 23]) ts = self._resample(ts, numpy.timedelta64(60, 's'), 'mean', derived=True) self.assertEqual(5, len(ts)) self.assertEqual( [(datetime64(2014, 1, 1, 12, 0, 0), 5), (datetime64(2014, 1, 1, 12, 1, 0), 5), (datetime64(2014, 1, 1, 12, 2, 0), 11), (datetime64(2014, 1, 1, 12, 3, 0), -32), (datetime64(2014, 1, 1, 12, 4, 0), 16)], list(ts.fetch( from_timestamp=datetime64(2014, 1, 1, 12))))
Example 29
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_74_percentile_serialized(self): ts = carbonara.TimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 4), datetime64(2014, 1, 1, 12, 0, 9)], [3, 5, 6]) ts = self._resample(ts, numpy.timedelta64(60, 's'), '74pct') self.assertEqual(1, len(ts)) self.assertEqual(5.48, ts[datetime64(2014, 1, 1, 12, 0, 0)][1]) # Serialize and unserialize key = ts.get_split_key() o, s = ts.serialize(key) saved_ts = carbonara.AggregatedTimeSerie.unserialize( s, key, ts.aggregation) self.assertEqual(ts.aggregation, saved_ts.aggregation) ts = carbonara.TimeSerie.from_data( [datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 4), datetime64(2014, 1, 1, 12, 0, 9)], [3, 5, 6]) ts = self._resample(ts, numpy.timedelta64(60, 's'), '74pct') saved_ts.merge(ts) self.assertEqual(1, len(ts)) self.assertEqual(5.48, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
Example 30
Project: gnocchi Author: gnocchixyz File: test_carbonara.py License: Apache License 2.0 | 5 votes |
def test_different_length_in_timestamps_and_data(self): self.assertRaises( ValueError, carbonara.AggregatedTimeSerie.from_data, carbonara.Aggregation('mean', numpy.timedelta64(3, 's'), None), [datetime64(2014, 1, 1, 12, 0, 0), datetime64(2014, 1, 1, 12, 0, 4), datetime64(2014, 1, 1, 12, 0, 9)], [3, 5])