Python numpy.datetime64() Examples

The following are 30 code examples for showing how to use numpy.datetime64(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module numpy , or try the search function .

Example 1
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 6 votes vote down vote up
def ensure_datetime(obj):
    """Return the object if it is a datetime-like object

    Parameters
    ----------
    obj : Object to be tested.

    Returns
    -------
    The original object if it is a datetime-like object

    Raises
    ------
    TypeError if `obj` is not datetime-like
    """
    _VALID_TYPES = (str, datetime.datetime, cftime.datetime,
                    np.datetime64)
    if isinstance(obj, _VALID_TYPES):
        return obj
    raise TypeError("datetime-like object required.  "
                    "Type given: {}".format(type(obj))) 
Example 2
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 6 votes vote down vote up
def extract_months(time, months):
    """Extract times within specified months of the year.

    Parameters
    ----------
    time : xarray.DataArray
         Array of times that can be represented by numpy.datetime64 objects
         (i.e. the year is between 1678 and 2262).
    months : Desired months of the year to include

    Returns
    -------
    xarray.DataArray of the desired times
    """
    inds = _month_conditional(time, months)
    return time.sel(time=inds) 
Example 3
Project: gnocchi   Author: gnocchixyz   File: carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def __getitem__(self, key):
        if isinstance(key, numpy.datetime64):
            idx = numpy.searchsorted(self.timestamps, key)
            if self.timestamps[idx] == key:
                return self[idx]
            raise KeyError(key)
        if isinstance(key, slice):
            if isinstance(key.start, numpy.datetime64):
                start = numpy.searchsorted(self.timestamps, key.start)
            else:
                start = key.start
            if isinstance(key.stop, numpy.datetime64):
                stop = numpy.searchsorted(self.timestamps, key.stop)
            else:
                stop = key.stop
            key = slice(start, stop, key.step)
        return self.ts[key] 
Example 4
Project: gnocchi   Author: gnocchixyz   File: carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def truncate(self, oldest_point=None):
        """Truncate the time series up to oldest_point excluded.

        :param oldest_point: Oldest point to keep from, this excluded.
                             Default is the aggregation timespan.
        :type oldest_point: numpy.datetime64 or numpy.timedelta64
        :return: The oldest point that could have been kept.
        """
        last = self.last
        if last is None:
            return
        if oldest_point is None:
            oldest_point = self.aggregation.timespan
            if oldest_point is None:
                return
        if isinstance(oldest_point, numpy.timedelta64):
            oldest_point = last - oldest_point
        index = numpy.searchsorted(self.ts['timestamps'], oldest_point,
                                   side='right')
        self.ts = self.ts[index:]
        return oldest_point 
Example 5
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_corrupted_split(self):
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()

        aggregation = self.metric.archive_policy.get_aggregation(
            "mean", numpy.timedelta64(5, 'm'))

        with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize',
                        side_effect=carbonara.InvalidData()):
            results = self.storage._get_splits_and_unserialize({
                self.metric: {
                    aggregation: [
                        carbonara.SplitKey(
                            numpy.datetime64(1387800000, 's'),
                            numpy.timedelta64(5, 'm'))
                    ],
                },
            })[self.metric][aggregation]
            self.assertEqual(1, len(results))
            self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie)
            # Assert it's an empty one since corrupted
            self.assertEqual(0, len(results[0]))
            self.assertEqual(results[0].aggregation, aggregation) 
Example 6
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_get_splits_and_unserialize(self):
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()

        aggregation = self.metric.archive_policy.get_aggregation(
            "mean", numpy.timedelta64(5, 'm'))

        results = self.storage._get_splits_and_unserialize({
            self.metric: {
                aggregation: [
                    carbonara.SplitKey(
                        numpy.datetime64(1387800000, 's'),
                        numpy.timedelta64(5, 'm')),
                ],
            },
        })[self.metric][aggregation]
        self.assertEqual(1, len(results))
        self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie)
        # Assert it's not empty one since corrupted
        self.assertGreater(len(results[0]), 0)
        self.assertEqual(results[0].aggregation, aggregation) 
Example 7
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_delete_nonempty_metric(self):
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
        ])
        self.trigger_processing()
        self.storage._delete_metric(self.metric)
        self.trigger_processing()

        aggregations = (
            self.metric.archive_policy.get_aggregations_for_method("mean")
        )

        self.assertRaises(storage.MetricDoesNotExist,
                          self.storage.get_aggregated_measures,
                          {self.metric: aggregations})
        self.assertEqual(
            {self.metric: None},
            self.storage._get_or_create_unaggregated_timeseries(
                [self.metric])) 
Example 8
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_get_aggregated_measures(self):
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, i, j), 100)
            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
        self.trigger_processing([self.metric])

        aggregations = self.metric.archive_policy.aggregations

        measures = self.storage.get_aggregated_measures(
            {self.metric: aggregations})
        self.assertEqual(1, len(measures))
        self.assertIn(self.metric, measures)
        measures = measures[self.metric]
        self.assertEqual(len(aggregations), len(measures))
        self.assertGreater(len(measures[aggregations[0]]), 0)
        for agg in aggregations:
            self.assertEqual(agg, measures[agg].aggregation) 
Example 9
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_add_measures_update_subset(self):
        m, m_sql = self._create_metric('medium')
        measures = [
            incoming.Measure(datetime64(2014, 1, 6, i, j, 0), 100)
            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)]
        self.incoming.add_measures(m.id, measures)
        self.trigger_processing([m])

        # add measure to end, in same aggregate time as last point.
        new_point = datetime64(2014, 1, 6, 1, 58, 1)
        self.incoming.add_measures(m.id, [incoming.Measure(new_point, 100)])

        with mock.patch.object(self.incoming, 'add_measures') as c:
            self.trigger_processing([m])
        for __, args, __ in c.mock_calls:
            self.assertEqual(
                list(args[3])[0][0], carbonara.round_timestamp(
                    new_point, args[1].granularity * 10e8)) 
Example 10
Project: gnocchi   Author: gnocchixyz   File: test_storage.py    License: Apache License 2.0 6 votes vote down vote up
def test_get_measure_unknown_aggregation(self):
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
        ])

        aggregations = (
            self.metric.archive_policy.get_aggregations_for_method("last")
        )

        self.assertRaises(
            storage.MetricDoesNotExist,
            self.storage.get_aggregated_measures,
            {self.metric: aggregations}) 
Example 11
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_duplicate_timestamps(self):
        ts = carbonara.BoundTimeSerie.from_data(
            [datetime64(2014, 1, 1, 12, 0, 0),
             datetime64(2014, 1, 1, 12, 0, 9)],
            [10, 23])
        self.assertEqual(2, len(ts))
        self.assertEqual(10.0, ts[0][1])
        self.assertEqual(23.0, ts[1][1])

        ts.set_values(numpy.array([(datetime64(2014, 1, 1, 13, 0, 10), 3),
                                   (datetime64(2014, 1, 1, 13, 0, 11), 9),
                                   (datetime64(2014, 1, 1, 13, 0, 11), 8),
                                   (datetime64(2014, 1, 1, 13, 0, 11), 7),
                                   (datetime64(2014, 1, 1, 13, 0, 11), 4)],
                                  dtype=carbonara.TIMESERIES_ARRAY_DTYPE))
        self.assertEqual(4, len(ts))
        self.assertEqual(10.0, ts[0][1])
        self.assertEqual(23.0, ts[1][1])
        self.assertEqual(3.0, ts[2][1])
        self.assertEqual(9.0, ts[3][1]) 
Example 12
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_derived_hole(self):
        ts = carbonara.TimeSerie.from_data(
            [datetime.datetime(2014, 1, 1, 12, 0, 0),
             datetime.datetime(2014, 1, 1, 12, 0, 4),
             datetime.datetime(2014, 1, 1, 12, 1, 2),
             datetime.datetime(2014, 1, 1, 12, 1, 14),
             datetime.datetime(2014, 1, 1, 12, 1, 24),
             datetime.datetime(2014, 1, 1, 12, 3, 2),
             datetime.datetime(2014, 1, 1, 12, 3, 22),
             datetime.datetime(2014, 1, 1, 12, 3, 42),
             datetime.datetime(2014, 1, 1, 12, 4, 9)],
            [50, 55, 65, 66, 70, 105, 108, 200, 202])
        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'last',
                            derived=True)

        self.assertEqual(4, len(ts))
        self.assertEqual(
            [(datetime64(2014, 1, 1, 12, 0, 0), 5),
             (datetime64(2014, 1, 1, 12, 1, 0), 4),
             (datetime64(2014, 1, 1, 12, 3, 0), 92),
             (datetime64(2014, 1, 1, 12, 4, 0), 2)],
            list(ts.fetch(
                from_timestamp=datetime64(2014, 1, 1, 12)))) 
Example 13
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_aggregation_std_with_unique(self):
        ts = carbonara.TimeSerie.from_data(
            [datetime64(2014, 1, 1, 12, 0, 0)], [3])
        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'std')
        self.assertEqual(0, len(ts), ts.values)

        ts = carbonara.TimeSerie.from_data(
            [datetime64(2014, 1, 1, 12, 0, 0),
             datetime64(2014, 1, 1, 12, 0, 4),
             datetime64(2014, 1, 1, 12, 0, 9),
             datetime64(2014, 1, 1, 12, 1, 6)],
            [3, 6, 5, 9])
        ts = self._resample(ts, numpy.timedelta64(60, 's'), "std")

        self.assertEqual(1, len(ts))
        self.assertEqual(1.5275252316519465,
                         ts[datetime64(2014, 1, 1, 12, 0, 0)][1]) 
Example 14
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_no_truncation(self):
        ts = {'sampling': numpy.timedelta64(60, 's'), 'agg': 'mean'}
        tsb = carbonara.BoundTimeSerie()

        for i in six.moves.range(1, 11):
            tsb.set_values(numpy.array([
                (datetime64(2014, 1, 1, 12, i, i), float(i))],
                dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
                before_truncate_callback=functools.partial(
                    self._resample_and_merge, agg_dict=ts))
            tsb.set_values(numpy.array([
                (datetime64(2014, 1, 1, 12, i, i + 1), float(i + 1))],
                dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
                before_truncate_callback=functools.partial(
                    self._resample_and_merge, agg_dict=ts))
            self.assertEqual(i, len(list(ts['return'].fetch()))) 
Example 15
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_split_key(self):
        self.assertEqual(
            numpy.datetime64("2014-10-07"),
            carbonara.SplitKey.from_timestamp_and_sampling(
                numpy.datetime64("2015-01-01T15:03"),
                numpy.timedelta64(3600, 's')))
        self.assertEqual(
            numpy.datetime64("2014-12-31 18:00"),
            carbonara.SplitKey.from_timestamp_and_sampling(
                numpy.datetime64("2015-01-01 15:03:58"),
                numpy.timedelta64(58, 's')))

        key = carbonara.SplitKey.from_timestamp_and_sampling(
            numpy.datetime64("2015-01-01 15:03"),
            numpy.timedelta64(3600, 's'))

        self.assertGreater(key, numpy.datetime64("1970"))

        self.assertGreaterEqual(key, numpy.datetime64("1970")) 
Example 16
Project: gnocchi   Author: gnocchixyz   File: test_carbonara.py    License: Apache License 2.0 6 votes vote down vote up
def test_split(self):
        sampling = numpy.timedelta64(5, 's')
        points = 100000
        ts = carbonara.TimeSerie.from_data(
            timestamps=list(map(datetime.datetime.utcfromtimestamp,
                                six.moves.range(points))),
            values=list(six.moves.range(points)))
        agg = self._resample(ts, sampling, 'mean')

        grouped_points = list(agg.split())

        self.assertEqual(
            math.ceil((points / sampling.astype(float))
                      / carbonara.SplitKey.POINTS_PER_SPLIT),
            len(grouped_points))
        self.assertEqual("0.0",
                         str(carbonara.SplitKey(grouped_points[0][0], 0)))
        # 3600 × 5s = 5 hours
        self.assertEqual(datetime64(1970, 1, 1, 5),
                         grouped_points[1][0])
        self.assertEqual(carbonara.SplitKey.POINTS_PER_SPLIT,
                         len(grouped_points[0][1])) 
Example 17
Project: gnocchi   Author: gnocchixyz   File: test_aggregates.py    License: Apache License 2.0 6 votes vote down vote up
def test_aggregated_different_archive_no_overlap(self):
        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
                'size': 50, 'agg': 'mean', "name": "all"}
        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
                'size': 50, 'agg': 'mean', "name": "all"}
        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])

        tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 11, 46, 4), 4)],
                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
                        before_truncate_callback=functools.partial(
                            self._resample_and_merge, agg_dict=tsc1))
        tsb2.set_values(numpy.array([(datetime64(2014, 1, 1, 9, 1, 4), 4)],
                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
                        before_truncate_callback=functools.partial(
                            self._resample_and_merge, agg_dict=tsc2))

        dtfrom = datetime64(2014, 1, 1, 11, 0, 0)
        self.assertRaises(exceptions.UnAggregableTimeseries,
                          processor.aggregated,
                          [tsc1['return'], tsc2['return']],
                          from_timestamp=dtfrom,
                          operations=["aggregate", "mean", [
                              "metric", ["all", "mean"]]]) 
Example 18
Project: gnocchi   Author: gnocchixyz   File: test_aggregates.py    License: Apache License 2.0 6 votes vote down vote up
def test_aggregated_different_archive_no_overlap2(self):
        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
                'size': 50, 'agg': 'mean'}
        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
        tsc2 = carbonara.AggregatedTimeSerie(
            carbonara.Aggregation('mean', numpy.timedelta64(60, 's'), None))

        tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 3, 0), 4)],
                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
                        before_truncate_callback=functools.partial(
                            self._resample_and_merge, agg_dict=tsc1))
        metric = mock.Mock(id=str(uuid.uuid4()))
        ref = processor.MetricReference(metric, "mean")
        self.assertRaises(exceptions.UnAggregableTimeseries,
                          processor.aggregated,
                          [tsc1['return'], (ref, tsc2)],
                          operations=["aggregate", "mean",
                                      ["metric", tsc1['return'][0].lookup_key,
                                       ref.lookup_key]]) 
Example 19
Project: gnocchi   Author: gnocchixyz   File: test_aggregates.py    License: Apache License 2.0 6 votes vote down vote up
def test_get_measures_unknown_aggregation(self):
        metric2 = indexer.Metric(uuid.uuid4(),
                                 self.archive_policies['low'])
        self.incoming.add_measures(self.metric.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.incoming.add_measures(metric2.id, [
            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
        ])
        self.assertRaises(storage.AggregationDoesNotExist,
                          processor.get_measures,
                          self.storage,
                          [processor.MetricReference(self.metric, 'last'),
                           processor.MetricReference(metric2, 'last')],
                          operations=["aggregate", "mean", [
                              "metric",
                              [str(self.metric.id), "last"],
                              [(metric2.id), "last"],
                          ]]) 
Example 20
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 5 votes vote down vote up
def apply_time_offset(time, years=0, months=0, days=0, hours=0):
    """Apply a specified offset to the given time array.

    This is useful for GFDL model output of instantaneous values.  For example,
    3 hourly data postprocessed to netCDF files spanning 1 year each will
    actually have time values that are offset by 3 hours, such that the first
    value is for 1 Jan 03:00 and the last value is 1 Jan 00:00 of the
    subsequent year.  This causes problems in xarray, e.g. when trying to group
    by month.  It is resolved by manually subtracting off those three hours,
    such that the dates span from 1 Jan 00:00 to 31 Dec 21:00 as desired.

    Parameters
    ----------
    time : xarray.DataArray representing a timeseries
    years, months, days, hours : int, optional
        The number of years, months, days, and hours, respectively, to offset
        the time array by.  Positive values move the times later.

    Returns
    -------
    pandas.DatetimeIndex

    Examples
    --------
    Case of a length-1 input time array:

    >>> times = xr.DataArray(datetime.datetime(1899, 12, 31, 21))
    >>> apply_time_offset(times)
    Timestamp('1900-01-01 00:00:00')

    Case of input time array with length greater than one:

    >>> times = xr.DataArray([datetime.datetime(1899, 12, 31, 21),
    ...                       datetime.datetime(1899, 1, 31, 21)])
    >>> apply_time_offset(times) # doctest: +NORMALIZE_WHITESPACE
    DatetimeIndex(['1900-01-01', '1899-02-01'], dtype='datetime64[ns]',
                  freq=None)
    """
    return (pd.to_datetime(time.values) +
            pd.DateOffset(years=years, months=months, days=days, hours=hours)) 
Example 21
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 5 votes vote down vote up
def sel_time(da, start_date, end_date):
    """Subset a DataArray or Dataset for a given date range.

    Ensures that data are present for full extent of requested range.
    Appends start and end date of the subset to the DataArray.

    Parameters
    ----------
    da : DataArray or Dataset
        data to subset
    start_date : np.datetime64
        start of date interval
    end_date : np.datetime64
        end of date interval

    Returns
    ----------
    da : DataArray or Dataset
        subsetted data

    Raises
    ------
    AssertionError
        if data for requested range do not exist for part or all of
        requested range
    """
    _assert_has_data_for_time(da, start_date, end_date)
    da[SUBSET_START_DATE_STR] = xr.DataArray(start_date)
    da[SUBSET_END_DATE_STR] = xr.DataArray(end_date)
    return da.sel(**{TIME_STR: slice(start_date, end_date)}) 
Example 22
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 5 votes vote down vote up
def infer_year(date):
    """Given a datetime-like object or string infer the year.

    Parameters
    ----------
    date : datetime-like object or str
        Input date

    Returns
    -------
    int

    Examples
    --------
    >>> infer_year('2000')
    2000
    >>> infer_year('2000-01')
    2000
    >>> infer_year('2000-01-31')
    2000
    >>> infer_year(datetime.datetime(2000, 1, 1))
    2000
    >>> infer_year(np.datetime64('2000-01-01'))
    2000
    >>> infer_year(DatetimeNoLeap(2000, 1, 1))
    2000
    >>>
    """
    if isinstance(date, str):
        # Look for a string that begins with four numbers; the first four
        # numbers found are the year.
        pattern = r'(?P<year>\d{4})'
        result = re.match(pattern, date)
        if result:
            return int(result.groupdict()['year'])
        else:
            raise ValueError('Invalid date string provided: {}'.format(date))
    elif isinstance(date, np.datetime64):
        return date.item().year
    else:
        return date.year 
Example 23
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 5 votes vote down vote up
def prep_time_data(ds):
    """Prepare time coordinate information in Dataset for use in aospy.

    1. If the Dataset contains a time bounds coordinate, add attributes
       representing the true beginning and end dates of the time interval used
       to construct the Dataset
    2. If the Dataset contains a time bounds coordinate, overwrite the time
       coordinate values with the averages of the time bounds at each timestep
    3. Decode the times into np.datetime64 objects for time indexing

    Parameters
    ----------
    ds : Dataset
        Pre-processed Dataset with time coordinate renamed to
        internal_names.TIME_STR

    Returns
    -------
    Dataset
        The processed Dataset

    """
    ds = ensure_time_as_index(ds)
    if TIME_BOUNDS_STR in ds:
        ds = ensure_time_avg_has_cf_metadata(ds)
        ds[TIME_STR] = average_time_bounds(ds)
    else:
        logging.warning("dt array not found.  Assuming equally spaced "
                        "values in time, even though this may not be "
                        "the case")
        ds = add_uniform_time_weights(ds)
    return xr.decode_cf(ds, decode_times=True, decode_coords=False,
                        mask_and_scale=True) 
Example 24
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_datetime_or_default():
    date = np.datetime64('2000-01-01')
    assert datetime_or_default(None, 'dummy') == 'dummy'
    assert datetime_or_default(date, 'dummy') == ensure_datetime(date) 
Example 25
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_assert_has_data_for_time():
    time_bounds = np.array([[0, 31], [31, 59], [59, 90]])
    nv = np.array([0, 1])
    time = np.array([15, 46, 74])
    data = np.zeros((3))
    var_name = 'a'
    ds = xr.DataArray(data,
                      coords=[time],
                      dims=[TIME_STR],
                      name=var_name).to_dataset()
    ds[TIME_BOUNDS_STR] = xr.DataArray(time_bounds,
                                       coords=[time, nv],
                                       dims=[TIME_STR, BOUNDS_STR],
                                       name=TIME_BOUNDS_STR)
    units_str = 'days since 2000-01-01 00:00:00'
    ds[TIME_STR].attrs['units'] = units_str
    ds = ensure_time_avg_has_cf_metadata(ds)
    ds = set_grid_attrs_as_coords(ds)
    ds = xr.decode_cf(ds)
    da = ds[var_name]

    start_date = np.datetime64('2000-01-01')
    end_date = np.datetime64('2000-03-31')
    _assert_has_data_for_time(da, start_date, end_date)

    start_date_bad = np.datetime64('1999-12-31')
    end_date_bad = np.datetime64('2000-04-01')

    with pytest.raises(AssertionError):
        _assert_has_data_for_time(da, start_date_bad, end_date)

    with pytest.raises(AssertionError):
        _assert_has_data_for_time(da, start_date, end_date_bad)

    with pytest.raises(AssertionError):
        _assert_has_data_for_time(da, start_date_bad, end_date_bad) 
Example 26
Project: aospy   Author: spencerahill   File: test_run.py    License: Apache License 2.0 5 votes vote down vote up
def test_init_dates_valid_input(self):
        for attr in ['default_start_date', 'default_end_date']:
            for date in [None, np.datetime64('2000-01-01')]:
                run_ = Run(**{attr: date})
                self.assertEqual(date, getattr(run_, attr)) 
Example 27
Project: esmlab   Author: NCAR   File: core.py    License: Apache License 2.0 5 votes vote down vote up
def isdecoded(self, obj):
        return obj.dtype.type in {np.str_, np.object_, np.datetime64} 
Example 28
Project: gnocchi   Author: gnocchixyz   File: json.py    License: Apache License 2.0 5 votes vote down vote up
def to_primitive(obj):
    if isinstance(obj, ((six.text_type,)
                        + six.integer_types
                        + (type(None), bool, float))):
        return obj
    if isinstance(obj, uuid.UUID):
        return six.text_type(obj)
    if isinstance(obj, datetime.datetime):
        return obj.isoformat()
    if isinstance(obj, numpy.datetime64):
        # Do not include nanoseconds if null
        return str(obj).rpartition(".000000000")[0] + "+00:00"
    if isinstance(obj, numpy.timedelta64):
        return obj / numpy.timedelta64(1, 's')
    if isinstance(obj, datetime.timedelta):
        return obj.total_seconds()
    # This mimics what Pecan implements in its default JSON encoder
    if hasattr(obj, "jsonify"):
        return to_primitive(obj.jsonify())
    if isinstance(obj, dict):
        return {to_primitive(k): to_primitive(v)
                for k, v in obj.items()}
    if hasattr(obj, 'iteritems'):
        return to_primitive(dict(obj.iteritems()))
    # Python 3 does not have iteritems
    if hasattr(obj, 'items'):
        return to_primitive(dict(obj.items()))
    if hasattr(obj, '__iter__'):
        return list(map(to_primitive, obj))
    return obj 
Example 29
Project: gnocchi   Author: gnocchixyz   File: utils.py    License: Apache License 2.0 5 votes vote down vote up
def to_timestamps(values):
    try:
        if len(values) == 0:
            return []
        if isinstance(values[0], (numpy.datetime64, datetime.datetime)):
            times = numpy.array(values)
        else:
            try:
                # Try to convert to float. If it works, then we consider
                # timestamps to be number of seconds since Epoch
                # e.g. 123456 or 129491.1293
                float(values[0])
            except ValueError:
                try:
                    # Try to parse the value as a string of ISO timestamp
                    # e.g. 2017-10-09T23:23:12.123
                    numpy.datetime64(values[0])
                except ValueError:
                    # Last chance: it can be relative timestamp, so convert
                    # to timedelta relative to now()
                    # e.g. "-10 seconds" or "5 minutes"
                    times = numpy.fromiter(
                        numpy.add(numpy.datetime64(utcnow()),
                                  [to_timespan(v, True) for v in values]),
                        dtype='datetime64[ns]', count=len(values))
                else:
                    times = numpy.array(values, dtype='datetime64[ns]')
            else:
                times = numpy.array(values, dtype='float') * 10e8
    except ValueError:
        raise ValueError("Unable to convert timestamps")

    times = times.astype('datetime64[ns]')

    if (times < unix_universal_start64).any():
        raise ValueError('Timestamp must be after Epoch')

    return times 
Example 30
Project: gnocchi   Author: gnocchixyz   File: carbonara.py    License: Apache License 2.0 5 votes vote down vote up
def __hash__(self):
        return hash(str(self.key.astype('datetime64[ns]')) +
                    str(self.sampling.astype('timedelta64[ns]')))