Python pandas.date_range() Examples

The following are 30 code examples for showing how to use pandas.date_range(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may want to check out the right sidebar which shows the related API usage.

You may also want to check out all available functions/classes of the module pandas , or try the search function .

Example 1
Project: backtrader-cn   Author: pandalibin   File: test_datas_utils.py    License: GNU General Public License v3.0 6 votes vote down vote up
def _test_strip_unused_cols(self):
        data = pd.DataFrame({
            'name': ['tom', 'jack'],
            'age': [24, 56],
            'gender': ['male', 'male'],
            'address': ['cn', 'us']
        })
        data.index = pd.date_range(start='2017-01-01', periods=2)

        origin_cols = ['name', 'age', 'gender', 'address']
        unused_cols = ['address', 'gender']
        new_cols = ['name', 'age']

        self.assertEqual(list(data.columns).sort(), origin_cols.sort())

        bdu.Utils.strip_unused_cols(data, *unused_cols)

        self.assertEqual(list(data.columns).sort(), new_cols.sort()) 
Example 2
Project: aospy   Author: spencerahill   File: times.py    License: Apache License 2.0 6 votes vote down vote up
def monthly_mean_at_each_ind(monthly_means, sub_monthly_timeseries):
    """Copy monthly mean over each time index in that month.

    Parameters
    ----------
    monthly_means : xarray.DataArray
        array of monthly means
    sub_monthly_timeseries : xarray.DataArray
        array of a timeseries at sub-monthly time resolution

    Returns
    -------
    xarray.DataArray with eath monthly mean value from `monthly_means` repeated
    at each time within that month from `sub_monthly_timeseries`

    See Also
    --------
    monthly_mean_ts : Create timeseries of monthly mean values
    """
    time = monthly_means[TIME_STR]
    start = time.indexes[TIME_STR][0].replace(day=1, hour=0)
    end = time.indexes[TIME_STR][-1]
    new_indices = pd.date_range(start=start, end=end, freq='MS')
    arr_new = monthly_means.reindex(time=new_indices, method='backfill')
    return arr_new.reindex_like(sub_monthly_timeseries, method='pad') 
Example 3
Project: pywr   Author: pywr   File: test_parameters.py    License: GNU General Public License v3.0 6 votes vote down vote up
def test_parameter_array_indexed_json_load(simple_linear_model, tmpdir):
    """Test ArrayIndexedParameter can be loaded from json dict"""
    model = simple_linear_model
    # Daily time-step
    index = pd.date_range('2015-01-01', periods=365, freq='D', name='date')
    df = pd.DataFrame(np.arange(365), index=index, columns=['data'])
    df_path = tmpdir.join('df.csv')
    df.to_csv(str(df_path))

    data = {
        'type': 'arrayindexed',
        'url': str(df_path),
        'index_col': 'date',
        'parse_dates': True,
        'column': 'data',
    }

    p = load_parameter(model, data)
    model.setup()

    si = ScenarioIndex(0, np.array([0], dtype=np.int32))
    for v, ts in enumerate(model.timestepper):
        np.testing.assert_allclose(p.value(ts, si), v) 
Example 4
Project: pywr   Author: pywr   File: test_parameters.py    License: GNU General Public License v3.0 6 votes vote down vote up
def test_parameter_df_embed_load(model):

    # Daily time-step
    index = pd.date_range('2015-01-01', periods=365, freq='D', name='date')
    df = pd.DataFrame(np.random.rand(365), index=index, columns=['data'])

    # Save to JSON and load. This is the format we support loading as embedded data
    df_data = df.to_json(date_format="iso")
    # Removing the time information from the dataset for testing purposes
    df_data = df_data.replace('T00:00:00.000Z', '')
    df_data = json.loads(df_data)

    data = {
        'type': 'dataframe',
        'data': df_data,
        'parse_dates': True,
    }

    p = load_parameter(model, data)
    p.setup() 
Example 5
Project: xalpha   Author: refraction-ray   File: info.py    License: MIT License 6 votes vote down vote up
def _basic_init(self):
        self.name = "货币基金"
        self.rate = 0
        datel = list(
            pd.date_range(dt.datetime.strftime(self.start, "%Y-%m-%d"), yesterdaydash())
        )
        valuel = []
        for i, date in enumerate(datel):
            valuel.append((1 + self.interest) ** i)
        dfdict = {
            "date": datel,
            "netvalue": valuel,
            "totvalue": valuel,
            "comment": [0 for _ in datel],
        }
        df = pd.DataFrame(data=dfdict)
        self.price = df[df["date"].isin(opendate)] 
Example 6
Project: xalpha   Author: refraction-ray   File: universal.py    License: MIT License 6 votes vote down vote up
def _get_peb_range(code, start, end):  # 盈利,净资产,总市值
    """
    获取指定指数一段时间内的 pe pb 值。

    :param code: 聚宽形式指数代码。
    :param start:
    :param end:
    :return: pd.DataFrame
    """
    if len(code.split(".")) != 2:
        code = _inverse_convert_code(code)
    data = {"date": [], "pe": [], "pb": []}
    for d in pd.date_range(start=start, end=end, freq="W-FRI"):
        data["date"].append(d)
        logger.debug("compute pe pb on %s" % d)
        r = get_peb(code, date=d.strftime("%Y-%m-%d"))
        data["pe"].append(r["pe"])
        data["pb"].append(r["pb"])
    return pd.DataFrame(data) 
Example 7
Project: xalpha   Author: refraction-ray   File: universal.py    License: MIT License 6 votes vote down vote up
def get_fund_peb_range(code, start, end):
    """
    获取一段时间的基金历史估值,每周五为频率

    :param code:
    :param start:
    :param end:
    :return:
    """
    if code.startswith("F"):
        code = code[1:]
    data = {"date": [], "pe": [], "pb": []}
    for d in pd.date_range(start=start, end=end, freq="W-FRI"):
        data["date"].append(d)
        r = get_fund_peb(code, date=d.strftime("%Y-%m-%d"))
        data["pe"].append(r["pe"])
        data["pb"].append(r["pb"])
    return pd.DataFrame(data) 
Example 8
Project: xalpha   Author: refraction-ray   File: test_realtime.py    License: MIT License 6 votes vote down vote up
def test_review(capsys):
    st1 = xa.policy.buyandhold(gf, start="2018-08-10", end="2019-01-01")
    st2 = xa.policy.scheduled_tune(
        gf,
        totmoney=1000,
        times=pd.date_range("2018-01-01", "2019-01-01", freq="W-MON"),
        piece=[(0.1, 2), (0.15, 1)],
    )
    check = xa.review([st1, st2], ["Plan A", "Plan Z"])
    assert isinstance(check.content, str) == True
    conf = {}
    check.notification(conf)
    captured = capsys.readouterr()
    assert captured.out == "没有提醒待发送\n"
    check.content = "a\nb"
    check.notification(conf)
    captured = capsys.readouterr()
    assert captured.out == "邮件发送失败\n" 
Example 9
Project: NeuroKit   Author: neuropsychology   File: signal_resample.py    License: MIT License 6 votes vote down vote up
def _resample_pandas(signal, desired_length):
    # Convert to Time Series
    index = pd.date_range("20131212", freq="L", periods=len(signal))
    resampled_signal = pd.Series(signal, index=index)

    # Create resampling factor
    resampling_factor = str(np.round(1 / (desired_length / len(signal)), 6)) + "L"

    # Resample
    resampled_signal = resampled_signal.resample(resampling_factor).bfill().values

    # Sanitize
    resampled_signal = _resample_sanitize(resampled_signal, desired_length)

    return resampled_signal


# =============================================================================
# Internals
# ============================================================================= 
Example 10
Project: pysat   Author: pysat   File: test_instrument.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_iterate_over_bounds_set_by_date_season_extra_time(self):
        start = [pysat.datetime(2009, 1, 1, 1, 10),
                 pysat.datetime(2009, 2, 1, 1, 10)]
        stop = [pysat.datetime(2009, 1, 15, 1, 10),
                pysat.datetime(2009, 2, 15, 1, 10)]
        self.testInst.bounds = (start, stop)
        # filter
        start = self.testInst._filter_datetime_input(start)
        stop = self.testInst._filter_datetime_input(stop)
        # iterate
        dates = []
        for inst in self.testInst:
            dates.append(inst.date)
        out = pds.date_range(start[0], stop[0]).tolist()
        out.extend(pds.date_range(start[1], stop[1]).tolist())
        assert np.all(dates == out) 
Example 11
Project: pysat   Author: pysat   File: test_instrument.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_iterate_over_bounds_set_by_fname_via_next(self):
        start = '2009-01-01.nofile'
        stop = '2009-01-15.nofile'
        start_d = pysat.datetime(2009, 1, 1)
        stop_d = pysat.datetime(2009, 1, 15)
        self.testInst.bounds = (start, stop)
        dates = []
        loop_next = True
        while loop_next:
            try:
                self.testInst.next()
                dates.append(self.testInst.date)
            except StopIteration:
                loop_next = False
        out = pds.date_range(start_d, stop_d).tolist()
        assert np.all(dates == out) 
Example 12
Project: pysat   Author: pysat   File: test_instrument.py    License: BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def test_iterate_over_bounds_set_by_fname_via_prev(self):
        start = '2009-01-01.nofile'
        stop = '2009-01-15.nofile'
        start_d = pysat.datetime(2009, 1, 1)
        stop_d = pysat.datetime(2009, 1, 15)
        self.testInst.bounds = (start, stop)
        dates = []
        loop = True
        while loop:
            try:
                self.testInst.prev()
                dates.append(self.testInst.date)
            except StopIteration:
                loop = False
        out = pds.date_range(start_d, stop_d).tolist()
        assert np.all(dates == out[::-1]) 
Example 13
Project: scikit-downscale   Author: jhamman   File: utils.py    License: Apache License 2.0 6 votes vote down vote up
def zscore_ds_plot(training, target, future, corrected):
    labels = ["training", "future", "target", "corrected"]
    colors = {k: c for (k, c) in zip(labels, sns.color_palette("Set2", n_colors=4))}

    alpha = 0.5

    time_target = pd.date_range("1980-01-01", "1989-12-31", freq="D")
    time_training = time_target[~((time_target.month == 2) & (time_target.day == 29))]
    time_future = pd.date_range("1990-01-01", "1999-12-31", freq="D")
    time_future = time_future[~((time_future.month == 2) & (time_future.day == 29))]

    plt.figure(figsize=(8, 4))
    plt.plot(time_training, training.uas, label="training", alpha=alpha, c=colors["training"])
    plt.plot(time_target, target.uas, label="target", alpha=alpha, c=colors["target"])

    plt.plot(time_future, future.uas, label="future", alpha=alpha, c=colors["future"])
    plt.plot(time_future, corrected.uas, label="corrected", alpha=alpha, c=colors["corrected"])

    plt.xlabel("Time")
    plt.ylabel("Eastward Near-Surface Wind (m s-1)")
    plt.legend()

    return 
Example 14
Project: scikit-downscale   Author: jhamman   File: test_pointwise_models.py    License: Apache License 2.0 6 votes vote down vote up
def test_zscore_shift():
    time = pd.date_range(start="2018-01-01", end="2020-01-01")
    data_X = np.zeros(len(time))
    data_y = np.ones(len(time))

    X = xr.DataArray(data_X, name="foo", dims=["index"], coords={"index": time}).to_dataframe()
    y = xr.DataArray(data_y, name="foo", dims=["index"], coords={"index": time}).to_dataframe()

    shift_expected = xr.DataArray(
        np.ones(364), name="foo", dims=["day"], coords={"day": np.arange(1, 365)}
    ).to_series()

    zscore = ZScoreRegressor()
    zscore.fit(X, y)

    np.testing.assert_allclose(zscore.shift_, shift_expected) 
Example 15
Project: arctic   Author: man-group   File: test_toplevel.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_should_return_data_when_date_range_spans_libraries(toplevel_tickstore, arctic):
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    tickstore_2010 = arctic['FEED_2010.LEVEL1']
    tickstore_2011 = arctic['FEED_2011.LEVEL1']
    toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
    toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
    dates = pd.date_range('20100101', periods=6, tz=mktz('Europe/London'))
    df_10 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
    tickstore_2010.write('blah', df_10)
    dates = pd.date_range('20110101', periods=6, tz=mktz('Europe/London'))
    df_11 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
    tickstore_2011.write('blah', df_11)
    res = toplevel_tickstore.read('blah', DateRange(start=dt(2010, 1, 2), end=dt(2011, 1, 4)), list('ABCD'))
    expected_df = pd.concat([df_10[1:], df_11[:4]])
    assert_frame_equal(expected_df, res.tz_convert(mktz('Europe/London'))) 
Example 16
Project: arctic   Author: man-group   File: test_toplevel.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_should_return_data_when_date_range_spans_libraries_even_if_one_returns_nothing(toplevel_tickstore, arctic):
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    tickstore_2010 = arctic['FEED_2010.LEVEL1']
    tickstore_2011 = arctic['FEED_2011.LEVEL1']
    toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
    toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
    dates = pd.date_range('20100101', periods=6, tz=mktz('Europe/London'))
    df_10 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
    tickstore_2010.write('blah', df_10)
    dates = pd.date_range('20110201', periods=6, tz=mktz('Europe/London'))
    df_11 = pd.DataFrame(np.random.randn(6, 4), index=dates, columns=list('ABCD'))
    tickstore_2011.write('blah', df_11)
    res = toplevel_tickstore.read('blah', DateRange(start=dt(2010, 1, 2), end=dt(2011, 1, 4)), list('ABCD'))
    expected_df = df_10[1:]
    assert_frame_equal(expected_df, res.tz_convert(mktz('Europe/London'))) 
Example 17
Project: arctic   Author: man-group   File: test_toplevel.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_should_successfully_do_a_roundtrip_write_and_read_spanning_multiple_underlying_libraries(toplevel_tickstore, arctic):
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('test_current.toplevel_tickstore', tickstore.TICK_STORE_TYPE)
    toplevel_tickstore.add(DateRange(start=dt(2010, 1, 1), end=dt(2010, 12, 31, 23, 59, 59, 999000)), 'FEED_2010.LEVEL1')
    toplevel_tickstore.add(DateRange(start=dt(2011, 1, 1), end=dt(2011, 12, 31, 23, 59, 59, 999000)), 'FEED_2011.LEVEL1')
    tickstore_current = arctic['test_current.toplevel_tickstore']
    dates = pd.date_range('20101201', periods=57, tz=mktz('Europe/London'))
    data = pd.DataFrame(np.random.randn(57, 4), index=dates, columns=list('ABCD'))
    toplevel_tickstore.write('blah', data)
    tickstore_current.write('blah', data)
    res = toplevel_tickstore.read('blah', DateRange(start=dt(2010, 12, 1), end=dt(2011, 2, 1)), columns=list('ABCD'))
    assert_frame_equal(data, res.tz_convert(mktz('Europe/London')))
    lib2010 = arctic['FEED_2010.LEVEL1']
    res = lib2010.read('blah', DateRange(start=dt(2010, 12, 1), end=dt(2011, 1, 1)), columns=list('ABCD'))
    assert_frame_equal(data[dt(2010, 12, 1): dt(2010, 12, 31)], res.tz_convert(mktz('Europe/London')))
    lib2011 = arctic['FEED_2011.LEVEL1']
    res = lib2011.read('blah', DateRange(start=dt(2011, 1, 1), end=dt(2011, 2, 1)), columns=list('ABCD'))
    assert_frame_equal(data[dt(2011, 1, 1): dt(2011, 2, 1)], res.tz_convert(mktz('Europe/London'))) 
Example 18
Project: arctic   Author: man-group   File: test_toplevel.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_should_write_top_level_with_correct_timezone(arctic):
    # Write timezone aware data and read back in UTC
    utc = mktz('UTC')
    arctic.initialize_library('FEED_2010.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED_2011.LEVEL1', tickstore.TICK_STORE_TYPE)
    arctic.initialize_library('FEED.LEVEL1', toplevel.TICK_STORE_TYPE)
    toplevel_tickstore = arctic['FEED.LEVEL1']
    dates = pd.date_range('20101230220000', periods=10, tz=mktz('America/New_York'))  # 10pm New York time is 3am next day UTC 
    data = [{'index': dates[i], 'a': i} for i in range(len(dates))]
    expected = pd.DataFrame(np.arange(len(dates), dtype=np.float64), index=dates.tz_convert(utc), columns=list('a'))
    toplevel_tickstore.write('blah', data)
    res = toplevel_tickstore.read('blah', DateRange(start=dt(2010, 1, 1), end=dt(2011, 12, 31)), columns=list('a')).tz_convert(utc)
    assert_frame_equal(expected, res)
    lib2010 = arctic['FEED_2010.LEVEL1']
    # Check that only one point was written into 2010 being 3am on 31st
    assert len(lib2010.read('blah', DateRange(start=dt(2010, 12, 1), end=dt(2011, 1, 1)))) == 1 
Example 19
Project: arctic   Author: man-group   File: test_fixes.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_rewrite(chunkstore_lib):
    """
    Issue 427
    incorrectly storing and updating metadata. dataframes without an index
    have no "index" field in their metadata, so updating existing
    metadata does not remove the index field.
    Also, metadata was incorrectly being stored. symbol, start, and end
    are the index for the collection, but metadata was being
    stored without an index (so it was defaulting to null,null,null)
    """
    date_range = pd.date_range(start=dt(2017, 5, 1, 1), periods=8, freq='6H')

    df = DataFrame(data={'something': [100, 200, 300, 400, 500, 600, 700, 800]},
                   index=DatetimeIndex(date_range, name='date'))

    chunkstore_lib.write('test', df, chunk_size='D')

    df2 = DataFrame(data={'something': [100, 200, 300, 400, 500, 600, 700, 800],
                          'date': date_range})

    chunkstore_lib.write('test', df2, chunk_size='D')
    ret = chunkstore_lib.read('test')
    assert_frame_equal(ret, df2) 
Example 20
Project: arctic   Author: man-group   File: test_fixes.py    License: GNU Lesser General Public License v2.1 6 votes vote down vote up
def test_missing_cols(chunkstore_lib):
    index = DatetimeIndex(pd.date_range('2019-01-01', periods=3, freq='D'), name='date')
    index2 = DatetimeIndex(pd.date_range('2019-01-04', periods=3, freq='D'), name='date')
    expected_index = DatetimeIndex(pd.date_range('2019-01-01', periods=6, freq='D'), name='date')
    expected_df = DataFrame({'A': [1, 2, 3, 40, 50, 60], 'B': [5.0,6.0,7.0, np.nan, np.nan, np.nan]}, index=expected_index)

    df = pd.DataFrame({'A': [1, 2, 3], 'B': [5,6,7]}, index=index)
    chunkstore_lib.write('test', df, chunk_size='D')

    df = pd.DataFrame({'A': [40, 50, 60]}, index=index2)
    chunkstore_lib.append('test', df, chunk_size='D')


    assert_frame_equal(chunkstore_lib.read('test'), expected_df)
    df = chunkstore_lib.read('test', columns=['B'])
    assert_frame_equal(df, expected_df['B'].to_frame()) 
Example 21
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_apply_time_offset():
    start = datetime.datetime(1900, 5, 10)
    years, months, days, hours = -2, 1, 7, 3
    # test lengths 0, 1, and >1 of input time array
    for periods in range(3):
        times = pd.date_range(start=start, freq='M', periods=periods)
        times = pd.to_datetime(times.values)  # Workaround for pandas bug
        actual = apply_time_offset(xr.DataArray(times), years=years,
                                   months=months, days=days, hours=hours)
        desired = (times + pd.DateOffset(
            years=years, months=months, days=days, hours=hours
        ))
        assert actual.identical(desired) 
Example 22
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_monthly_mean_ts_single_month():
    time = pd.date_range('2000-01-01', freq='6H', periods=4 * 31)
    arr = xr.DataArray(np.random.random(time.shape), dims=[TIME_STR],
                       coords={TIME_STR: time})
    desired = arr.mean(TIME_STR)
    actual = monthly_mean_ts(arr)
    np.testing.assert_allclose(actual, desired) 
Example 23
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_monthly_mean_ts_submonthly():
    time = pd.date_range('2000-01-01', freq='1D', periods=365 * 3)
    arr = xr.DataArray(np.random.random(time.shape), dims=[TIME_STR],
                       coords={TIME_STR: time})
    desired = arr.resample(**{TIME_STR: '1M'}).mean(TIME_STR)
    actual = monthly_mean_ts(arr)
    assert desired.identical(actual) 
Example 24
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_monthly_mean_ts_monthly():
    time = pd.date_range('2000-01-01', freq='1M', periods=120)
    arr = xr.DataArray(np.random.random(time.shape), dims=[TIME_STR],
                       coords={TIME_STR: time})
    actual = monthly_mean_ts(arr)
    assert arr.identical(actual) 
Example 25
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_extract_months():
    time = xr.DataArray(pd.date_range(start='2001-02-18', end='2002-07-12',
                                      freq='1D'), dims=[TIME_STR])
    months = 'mam'  # March-April-May
    desired = xr.concat([
        xr.DataArray(pd.date_range(start='2001-03-01', end='2001-05-31',
                                   freq='1D'), dims=[TIME_STR]),
        xr.DataArray(pd.date_range(start='2002-03-01', end='2002-05-31',
                                   freq='1D'), dims=[TIME_STR])
    ], dim=TIME_STR)
    actual = extract_months(time, months)
    xr.testing.assert_identical(actual, desired) 
Example 26
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_extract_months_single_month():
    time = xr.DataArray(pd.date_range(start='1678-01-01', end='1678-01-31',
                                      freq='1M'), dims=[TIME_STR])
    months = 1
    desired = time
    actual = extract_months(time, months)
    xr.testing.assert_identical(actual, desired) 
Example 27
Project: aospy   Author: spencerahill   File: test_utils_times.py    License: Apache License 2.0 5 votes vote down vote up
def test_assert_matching_time_coord():
    rng = pd.date_range('2000-01-01', '2001-01-01', freq='M')
    arr1 = xr.DataArray(rng, coords=[rng], dims=[TIME_STR])
    arr2 = xr.DataArray(rng, coords=[rng], dims=[TIME_STR])
    assert_matching_time_coord(arr1, arr2)

    arr2 = arr2.sel(**{TIME_STR: slice('2000-03', '2000-05')})
    with pytest.raises(ValueError):
        assert_matching_time_coord(arr1, arr2) 
Example 28
Project: xrft   Author: xgcm   File: test_xrft.py    License: MIT License 5 votes vote down vote up
def test_dft_1d_time(self):
        """Test the discrete Fourier transform function on timeseries data."""
        time = pd.date_range('2000-01-01', '2001-01-01', closed='left')
        Nt = len(time)
        da = xr.DataArray(np.random.rand(Nt), coords=[time], dims=['time'])

        ft = xrft.dft(da)

        # check that frequencies are correct
        dt = (time[1] - time[0]).total_seconds()
        freq_time_expected = np.fft.fftshift(np.fft.fftfreq(Nt, dt))
        npt.assert_allclose(ft['freq_time'], freq_time_expected) 
Example 29
Project: pywr   Author: pywr   File: thames.py    License: GNU General Public License v3.0 5 votes vote down vote up
def plot_control_curves(ext, show):

    with open(MODEL_FILENAME) as fh:
        data = json.load(fh)

    parameters = data['parameters']

    fig, ax = plt.subplots(figsize=(8, 5), dpi=300)

    dates = pandas.date_range("2015-01-01", "2015-12-31")

    L1_values = np.array([parameters['level1']['values'][d.month-1] for d in dates])
    L2_values = np.array([parameters['level2']['values'][d.month-1] for d in dates])

    x = np.arange(0, len(dates)) + 1

    ax.fill_between(x, 1.0, L1_values, label='Level 0', alpha=0.8)
    ax.fill_between(x, L1_values, L2_values, label='Level 1', alpha=0.8)
    ax.fill_between(x, L2_values, 0.0, label='Level 2', alpha=0.8)

    plt.xlabel("Day of year")
    plt.ylabel("Reservoir volume [%]")

    plt.grid(True)
    plt.ylim([0.0, 1.0])
    plt.xlim(1, 365)
    plt.legend(["Level 0", "Level 1", "Level 2"], loc="upper right")
    ax.plot(x, L1_values, color='k', label=None)
    ax.plot(x, L2_values, color='k', label=None)
    plt.tight_layout()

    if ext is not None:
        fig.savefig(f'Control curve zones.{ext}', dpi=300)

    if show:
        plt.show() 
Example 30
Project: pywr   Author: pywr   File: thames.py    License: GNU General Public License v3.0 5 votes vote down vote up
def plot_demand_saving_factor(ext, show):

    with open(MODEL_FILENAME) as fh:
        data = json.load(fh)

    parameters = data['parameters']

    fig, ax = plt.subplots(figsize=(8, 5), dpi=300)

    dates = pandas.date_range("2015-01-01", "2015-12-31")

    L0_values = np.array([parameters['level0_factor']['values'] for d in dates])
    L1_values = np.array([parameters['level1_factor']['values'][d.month-1] for d in dates])
    L2_values = np.array([parameters['level2_factor']['values'][d.month-1] for d in dates])

    x = np.arange(0, len(dates)) + 1

    ax.plot(x, L0_values, label='Level 0')
    ax.plot(x, L1_values, label='Level 0')
    ax.plot(x, L2_values, label='Level 0')

    plt.xlabel("Day of year")
    plt.ylabel("Demand restriction factor")

    plt.grid(True)
    plt.ylim(0.6, 1.2)
    plt.xlim(1, 365)
    plt.legend(["Level 0", "Level 1", "Level 2"], loc="upper right")

    plt.tight_layout()

    if ext is not None:
        fig.savefig(f'Demand restriction factors.{ext}', dpi=300)

    if show:
        plt.show()