/pandas/tseries/tests/test_timeseries.py
Python | 4212 lines | 3261 code | 793 blank | 158 comment | 152 complexity | a41322788c317267557679b46486866f MD5 | raw file
Possible License(s): BSD-3-Clause, Apache-2.0
Large files files are truncated, but you can click here to view the full file
- # pylint: disable-msg=E1101,W0612
- from datetime import datetime, time, timedelta
- import sys
- import operator
- import nose
- import numpy as np
- randn = np.random.randn
- from pandas import (Index, Series, TimeSeries, DataFrame,
- isnull, date_range, Timestamp, Period, DatetimeIndex,
- Int64Index, to_datetime, bdate_range, Float64Index)
- import pandas.core.datetools as datetools
- import pandas.tseries.offsets as offsets
- import pandas.tseries.tools as tools
- import pandas.tseries.frequencies as fmod
- import pandas as pd
- from pandas.util.testing import assert_series_equal, assert_almost_equal
- import pandas.util.testing as tm
- from pandas.tslib import NaT, iNaT
- import pandas.lib as lib
- import pandas.tslib as tslib
- import pandas.index as _index
- from pandas.compat import range, long, StringIO, lrange, lmap, zip, product
- import pandas.core.datetools as dt
- from numpy.random import rand
- from numpy.testing import assert_array_equal
- from pandas.util.testing import assert_frame_equal
- import pandas.compat as compat
- import pandas.core.common as com
- from pandas import concat
- from pandas import _np_version_under1p7
- from numpy.testing.decorators import slow
- def _skip_if_has_locale():
- import locale
- lang, _ = locale.getlocale()
- if lang is not None:
- raise nose.SkipTest("Specific locale is set {0}".format(lang))
- def _skip_if_windows_python_3():
- if sys.version_info > (3,) and sys.platform == 'win32':
- raise nose.SkipTest("not used on python 3/win32")
- def _skip_if_not_windows_python_3():
- if sys.version_info < (3,) or sys.platform != 'win32':
- raise nose.SkipTest("only run on python 3/win32")
- class TestTimeSeriesDuplicates(tm.TestCase):
- _multiprocess_can_split_ = True
- def setUp(self):
- dates = [datetime(2000, 1, 2), datetime(2000, 1, 2),
- datetime(2000, 1, 2), datetime(2000, 1, 3),
- datetime(2000, 1, 3), datetime(2000, 1, 3),
- datetime(2000, 1, 4), datetime(2000, 1, 4),
- datetime(2000, 1, 4), datetime(2000, 1, 5)]
- self.dups = Series(np.random.randn(len(dates)), index=dates)
- def test_constructor(self):
- tm.assert_isinstance(self.dups, TimeSeries)
- tm.assert_isinstance(self.dups.index, DatetimeIndex)
- def test_is_unique_monotonic(self):
- self.assertFalse(self.dups.index.is_unique)
- def test_index_unique(self):
- uniques = self.dups.index.unique()
- expected = DatetimeIndex([datetime(2000, 1, 2), datetime(2000, 1, 3),
- datetime(2000, 1, 4), datetime(2000, 1, 5)])
- self.assertEqual(uniques.dtype, 'M8[ns]') # sanity
- self.assertTrue(uniques.equals(expected))
- self.assertEqual(self.dups.index.nunique(), 4)
- # #2563
- self.assertTrue(isinstance(uniques, DatetimeIndex))
- dups_local = self.dups.index.tz_localize('US/Eastern')
- dups_local.name = 'foo'
- result = dups_local.unique()
- expected = DatetimeIndex(expected, tz='US/Eastern')
- self.assertTrue(result.tz is not None)
- self.assertEqual(result.name, 'foo')
- self.assertTrue(result.equals(expected))
- # NaT, note this is excluded
- arr = [ 1370745748 + t for t in range(20) ] + [iNaT]
- idx = DatetimeIndex(arr * 3)
- self.assertTrue(idx.unique().equals(DatetimeIndex(arr)))
- self.assertEqual(idx.nunique(), 20)
- self.assertEqual(idx.nunique(dropna=False), 21)
- arr = [ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ] + [NaT]
- idx = DatetimeIndex(arr * 3)
- self.assertTrue(idx.unique().equals(DatetimeIndex(arr)))
- self.assertEqual(idx.nunique(), 20)
- self.assertEqual(idx.nunique(dropna=False), 21)
- def test_index_dupes_contains(self):
- d = datetime(2011, 12, 5, 20, 30)
- ix = DatetimeIndex([d, d])
- self.assertTrue(d in ix)
- def test_duplicate_dates_indexing(self):
- ts = self.dups
- uniques = ts.index.unique()
- for date in uniques:
- result = ts[date]
- mask = ts.index == date
- total = (ts.index == date).sum()
- expected = ts[mask]
- if total > 1:
- assert_series_equal(result, expected)
- else:
- assert_almost_equal(result, expected[0])
- cp = ts.copy()
- cp[date] = 0
- expected = Series(np.where(mask, 0, ts), index=ts.index)
- assert_series_equal(cp, expected)
- self.assertRaises(KeyError, ts.__getitem__, datetime(2000, 1, 6))
- # new index
- ts[datetime(2000,1,6)] = 0
- self.assertEqual(ts[datetime(2000,1,6)], 0)
- def test_range_slice(self):
- idx = DatetimeIndex(['1/1/2000', '1/2/2000', '1/2/2000', '1/3/2000',
- '1/4/2000'])
- ts = Series(np.random.randn(len(idx)), index=idx)
- result = ts['1/2/2000':]
- expected = ts[1:]
- assert_series_equal(result, expected)
- result = ts['1/2/2000':'1/3/2000']
- expected = ts[1:4]
- assert_series_equal(result, expected)
- def test_groupby_average_dup_values(self):
- result = self.dups.groupby(level=0).mean()
- expected = self.dups.groupby(self.dups.index).mean()
- assert_series_equal(result, expected)
- def test_indexing_over_size_cutoff(self):
- import datetime
- # #1821
- old_cutoff = _index._SIZE_CUTOFF
- try:
- _index._SIZE_CUTOFF = 1000
- # create large list of non periodic datetime
- dates = []
- sec = datetime.timedelta(seconds=1)
- half_sec = datetime.timedelta(microseconds=500000)
- d = datetime.datetime(2011, 12, 5, 20, 30)
- n = 1100
- for i in range(n):
- dates.append(d)
- dates.append(d + sec)
- dates.append(d + sec + half_sec)
- dates.append(d + sec + sec + half_sec)
- d += 3 * sec
- # duplicate some values in the list
- duplicate_positions = np.random.randint(0, len(dates) - 1, 20)
- for p in duplicate_positions:
- dates[p + 1] = dates[p]
- df = DataFrame(np.random.randn(len(dates), 4),
- index=dates,
- columns=list('ABCD'))
- pos = n * 3
- timestamp = df.index[pos]
- self.assertIn(timestamp, df.index)
- # it works!
- df.ix[timestamp]
- self.assertTrue(len(df.ix[[timestamp]]) > 0)
- finally:
- _index._SIZE_CUTOFF = old_cutoff
- def test_indexing_unordered(self):
- # GH 2437
- rng = date_range(start='2011-01-01', end='2011-01-15')
- ts = Series(randn(len(rng)), index=rng)
- ts2 = concat([ts[0:4],ts[-4:],ts[4:-4]])
- for t in ts.index:
- s = str(t)
- expected = ts[t]
- result = ts2[t]
- self.assertTrue(expected == result)
- # GH 3448 (ranges)
- def compare(slobj):
- result = ts2[slobj].copy()
- result = result.sort_index()
- expected = ts[slobj]
- assert_series_equal(result,expected)
- compare(slice('2011-01-01','2011-01-15'))
- compare(slice('2010-12-30','2011-01-15'))
- compare(slice('2011-01-01','2011-01-16'))
- # partial ranges
- compare(slice('2011-01-01','2011-01-6'))
- compare(slice('2011-01-06','2011-01-8'))
- compare(slice('2011-01-06','2011-01-12'))
- # single values
- result = ts2['2011'].sort_index()
- expected = ts['2011']
- assert_series_equal(result,expected)
- # diff freq
- rng = date_range(datetime(2005, 1, 1), periods=20, freq='M')
- ts = Series(np.arange(len(rng)), index=rng)
- ts = ts.take(np.random.permutation(20))
- result = ts['2005']
- for t in result.index:
- self.assertTrue(t.year == 2005)
- def test_indexing(self):
- idx = date_range("2001-1-1", periods=20, freq='M')
- ts = Series(np.random.rand(len(idx)),index=idx)
- # getting
- # GH 3070, make sure semantics work on Series/Frame
- expected = ts['2001']
- df = DataFrame(dict(A = ts))
- result = df['2001']['A']
- assert_series_equal(expected,result)
- # setting
- ts['2001'] = 1
- expected = ts['2001']
- df.loc['2001','A'] = 1
- result = df['2001']['A']
- assert_series_equal(expected,result)
- # GH3546 (not including times on the last day)
- idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:00', freq='H')
- ts = Series(lrange(len(idx)), index=idx)
- expected = ts['2013-05']
- assert_series_equal(expected,ts)
- idx = date_range(start='2013-05-31 00:00', end='2013-05-31 23:59', freq='S')
- ts = Series(lrange(len(idx)), index=idx)
- expected = ts['2013-05']
- assert_series_equal(expected,ts)
- idx = [ Timestamp('2013-05-31 00:00'), Timestamp(datetime(2013,5,31,23,59,59,999999))]
- ts = Series(lrange(len(idx)), index=idx)
- expected = ts['2013']
- assert_series_equal(expected,ts)
- # GH 3925, indexing with a seconds resolution string / datetime object
- df = DataFrame(randn(5,5),columns=['open','high','low','close','volume'],index=date_range('2012-01-02 18:01:00',periods=5,tz='US/Central',freq='s'))
- expected = df.loc[[df.index[2]]]
- result = df['2012-01-02 18:01:02']
- assert_frame_equal(result,expected)
- # this is a single date, so will raise
- self.assertRaises(KeyError, df.__getitem__, df.index[2],)
- def test_recreate_from_data(self):
- if _np_version_under1p7:
- freqs = ['M', 'Q', 'A', 'D', 'B', 'T', 'S', 'L', 'U', 'H']
- else:
- freqs = ['M', 'Q', 'A', 'D', 'B', 'T', 'S', 'L', 'U', 'H', 'N', 'C']
- for f in freqs:
- org = DatetimeIndex(start='2001/02/01 09:00', freq=f, periods=1)
- idx = DatetimeIndex(org, freq=f)
- self.assertTrue(idx.equals(org))
- org = DatetimeIndex(start='2001/02/01 09:00', freq=f, tz='US/Pacific', periods=1)
- idx = DatetimeIndex(org, freq=f, tz='US/Pacific')
- self.assertTrue(idx.equals(org))
- def assert_range_equal(left, right):
- assert(left.equals(right))
- assert(left.freq == right.freq)
- assert(left.tz == right.tz)
- class TestTimeSeries(tm.TestCase):
- _multiprocess_can_split_ = True
- def test_is_(self):
- dti = DatetimeIndex(start='1/1/2005', end='12/1/2005', freq='M')
- self.assertTrue(dti.is_(dti))
- self.assertTrue(dti.is_(dti.view()))
- self.assertFalse(dti.is_(dti.copy()))
- def test_dti_slicing(self):
- dti = DatetimeIndex(start='1/1/2005', end='12/1/2005', freq='M')
- dti2 = dti[[1, 3, 5]]
- v1 = dti2[0]
- v2 = dti2[1]
- v3 = dti2[2]
- self.assertEqual(v1, Timestamp('2/28/2005'))
- self.assertEqual(v2, Timestamp('4/30/2005'))
- self.assertEqual(v3, Timestamp('6/30/2005'))
- # don't carry freq through irregular slicing
- self.assertIsNone(dti2.freq)
- def test_pass_datetimeindex_to_index(self):
- # Bugs in #1396
- rng = date_range('1/1/2000', '3/1/2000')
- idx = Index(rng, dtype=object)
- expected = Index(rng.to_pydatetime(), dtype=object)
- self.assert_numpy_array_equal(idx.values, expected.values)
- def test_contiguous_boolean_preserve_freq(self):
- rng = date_range('1/1/2000', '3/1/2000', freq='B')
- mask = np.zeros(len(rng), dtype=bool)
- mask[10:20] = True
- masked = rng[mask]
- expected = rng[10:20]
- self.assertIsNotNone(expected.freq)
- assert_range_equal(masked, expected)
- mask[22] = True
- masked = rng[mask]
- self.assertIsNone(masked.freq)
- def test_getitem_median_slice_bug(self):
- index = date_range('20090415', '20090519', freq='2B')
- s = Series(np.random.randn(13), index=index)
- indexer = [slice(6, 7, None)]
- result = s[indexer]
- expected = s[indexer[0]]
- assert_series_equal(result, expected)
- def test_series_box_timestamp(self):
- rng = date_range('20090415', '20090519', freq='B')
- s = Series(rng)
- tm.assert_isinstance(s[5], Timestamp)
- rng = date_range('20090415', '20090519', freq='B')
- s = Series(rng, index=rng)
- tm.assert_isinstance(s[5], Timestamp)
- tm.assert_isinstance(s.iget_value(5), Timestamp)
- def test_date_range_ambiguous_arguments(self):
- # #2538
- start = datetime(2011, 1, 1, 5, 3, 40)
- end = datetime(2011, 1, 1, 8, 9, 40)
- self.assertRaises(ValueError, date_range, start, end,
- freq='s', periods=10)
- def test_timestamp_to_datetime(self):
- tm._skip_if_no_pytz()
- rng = date_range('20090415', '20090519',
- tz='US/Eastern')
- stamp = rng[0]
- dtval = stamp.to_pydatetime()
- self.assertEqual(stamp, dtval)
- self.assertEqual(stamp.tzinfo, dtval.tzinfo)
- def test_timestamp_to_datetime_dateutil(self):
- tm._skip_if_no_pytz()
- rng = date_range('20090415', '20090519',
- tz='dateutil/US/Eastern')
- stamp = rng[0]
- dtval = stamp.to_pydatetime()
- self.assertEqual(stamp, dtval)
- self.assertEqual(stamp.tzinfo, dtval.tzinfo)
- def test_timestamp_to_datetime_explicit_pytz(self):
- tm._skip_if_no_pytz()
- import pytz
- rng = date_range('20090415', '20090519',
- tz=pytz.timezone('US/Eastern'))
- stamp = rng[0]
- dtval = stamp.to_pydatetime()
- self.assertEquals(stamp, dtval)
- self.assertEquals(stamp.tzinfo, dtval.tzinfo)
- def test_timestamp_to_datetime_explicit_dateutil(self):
- _skip_if_windows_python_3()
- tm._skip_if_no_dateutil()
- import dateutil
- rng = date_range('20090415', '20090519',
- tz=dateutil.tz.gettz('US/Eastern'))
- stamp = rng[0]
- dtval = stamp.to_pydatetime()
- self.assertEquals(stamp, dtval)
- self.assertEquals(stamp.tzinfo, dtval.tzinfo)
- def test_index_convert_to_datetime_array(self):
- tm._skip_if_no_pytz()
- def _check_rng(rng):
- converted = rng.to_pydatetime()
- tm.assert_isinstance(converted, np.ndarray)
- for x, stamp in zip(converted, rng):
- tm.assert_isinstance(x, datetime)
- self.assertEqual(x, stamp.to_pydatetime())
- self.assertEqual(x.tzinfo, stamp.tzinfo)
- rng = date_range('20090415', '20090519')
- rng_eastern = date_range('20090415', '20090519', tz='US/Eastern')
- rng_utc = date_range('20090415', '20090519', tz='utc')
- _check_rng(rng)
- _check_rng(rng_eastern)
- _check_rng(rng_utc)
- def test_index_convert_to_datetime_array_explicit_pytz(self):
- tm._skip_if_no_pytz()
- import pytz
- def _check_rng(rng):
- converted = rng.to_pydatetime()
- tm.assert_isinstance(converted, np.ndarray)
- for x, stamp in zip(converted, rng):
- tm.assert_isinstance(x, datetime)
- self.assertEquals(x, stamp.to_pydatetime())
- self.assertEquals(x.tzinfo, stamp.tzinfo)
- rng = date_range('20090415', '20090519')
- rng_eastern = date_range('20090415', '20090519', tz=pytz.timezone('US/Eastern'))
- rng_utc = date_range('20090415', '20090519', tz=pytz.utc)
- _check_rng(rng)
- _check_rng(rng_eastern)
- _check_rng(rng_utc)
- def test_index_convert_to_datetime_array_dateutil(self):
- tm._skip_if_no_dateutil()
- import dateutil
- def _check_rng(rng):
- converted = rng.to_pydatetime()
- tm.assert_isinstance(converted, np.ndarray)
- for x, stamp in zip(converted, rng):
- tm.assert_isinstance(x, datetime)
- self.assertEquals(x, stamp.to_pydatetime())
- self.assertEquals(x.tzinfo, stamp.tzinfo)
- rng = date_range('20090415', '20090519')
- rng_eastern = date_range('20090415', '20090519', tz='dateutil/US/Eastern')
- rng_utc = date_range('20090415', '20090519', tz=dateutil.tz.tzutc())
- _check_rng(rng)
- _check_rng(rng_eastern)
- _check_rng(rng_utc)
- def test_ctor_str_intraday(self):
- rng = DatetimeIndex(['1-1-2000 00:00:01'])
- self.assertEqual(rng[0].second, 1)
- def test_series_ctor_plus_datetimeindex(self):
- rng = date_range('20090415', '20090519', freq='B')
- data = dict((k, 1) for k in rng)
- result = Series(data, index=rng)
- self.assertIs(result.index, rng)
- def test_series_pad_backfill_limit(self):
- index = np.arange(10)
- s = Series(np.random.randn(10), index=index)
- result = s[:2].reindex(index, method='pad', limit=5)
- expected = s[:2].reindex(index).fillna(method='pad')
- expected[-3:] = np.nan
- assert_series_equal(result, expected)
- result = s[-2:].reindex(index, method='backfill', limit=5)
- expected = s[-2:].reindex(index).fillna(method='backfill')
- expected[:3] = np.nan
- assert_series_equal(result, expected)
- def test_series_fillna_limit(self):
- index = np.arange(10)
- s = Series(np.random.randn(10), index=index)
- result = s[:2].reindex(index)
- result = result.fillna(method='pad', limit=5)
- expected = s[:2].reindex(index).fillna(method='pad')
- expected[-3:] = np.nan
- assert_series_equal(result, expected)
- result = s[-2:].reindex(index)
- result = result.fillna(method='bfill', limit=5)
- expected = s[-2:].reindex(index).fillna(method='backfill')
- expected[:3] = np.nan
- assert_series_equal(result, expected)
- def test_frame_pad_backfill_limit(self):
- index = np.arange(10)
- df = DataFrame(np.random.randn(10, 4), index=index)
- result = df[:2].reindex(index, method='pad', limit=5)
- expected = df[:2].reindex(index).fillna(method='pad')
- expected.values[-3:] = np.nan
- tm.assert_frame_equal(result, expected)
- result = df[-2:].reindex(index, method='backfill', limit=5)
- expected = df[-2:].reindex(index).fillna(method='backfill')
- expected.values[:3] = np.nan
- tm.assert_frame_equal(result, expected)
- def test_frame_fillna_limit(self):
- index = np.arange(10)
- df = DataFrame(np.random.randn(10, 4), index=index)
- result = df[:2].reindex(index)
- result = result.fillna(method='pad', limit=5)
- expected = df[:2].reindex(index).fillna(method='pad')
- expected.values[-3:] = np.nan
- tm.assert_frame_equal(result, expected)
- result = df[-2:].reindex(index)
- result = result.fillna(method='backfill', limit=5)
- expected = df[-2:].reindex(index).fillna(method='backfill')
- expected.values[:3] = np.nan
- tm.assert_frame_equal(result, expected)
- def test_frame_setitem_timestamp(self):
- # 2155
- columns = DatetimeIndex(start='1/1/2012', end='2/1/2012',
- freq=datetools.bday)
- index = lrange(10)
- data = DataFrame(columns=columns, index=index)
- t = datetime(2012, 11, 1)
- ts = Timestamp(t)
- data[ts] = np.nan # works
- def test_sparse_series_fillna_limit(self):
- index = np.arange(10)
- s = Series(np.random.randn(10), index=index)
- ss = s[:2].reindex(index).to_sparse()
- result = ss.fillna(method='pad', limit=5)
- expected = ss.fillna(method='pad', limit=5)
- expected = expected.to_dense()
- expected[-3:] = np.nan
- expected = expected.to_sparse()
- assert_series_equal(result, expected)
- ss = s[-2:].reindex(index).to_sparse()
- result = ss.fillna(method='backfill', limit=5)
- expected = ss.fillna(method='backfill')
- expected = expected.to_dense()
- expected[:3] = np.nan
- expected = expected.to_sparse()
- assert_series_equal(result, expected)
- def test_sparse_series_pad_backfill_limit(self):
- index = np.arange(10)
- s = Series(np.random.randn(10), index=index)
- s = s.to_sparse()
- result = s[:2].reindex(index, method='pad', limit=5)
- expected = s[:2].reindex(index).fillna(method='pad')
- expected = expected.to_dense()
- expected[-3:] = np.nan
- expected = expected.to_sparse()
- assert_series_equal(result, expected)
- result = s[-2:].reindex(index, method='backfill', limit=5)
- expected = s[-2:].reindex(index).fillna(method='backfill')
- expected = expected.to_dense()
- expected[:3] = np.nan
- expected = expected.to_sparse()
- assert_series_equal(result, expected)
- def test_sparse_frame_pad_backfill_limit(self):
- index = np.arange(10)
- df = DataFrame(np.random.randn(10, 4), index=index)
- sdf = df.to_sparse()
- result = sdf[:2].reindex(index, method='pad', limit=5)
- expected = sdf[:2].reindex(index).fillna(method='pad')
- expected = expected.to_dense()
- expected.values[-3:] = np.nan
- expected = expected.to_sparse()
- tm.assert_frame_equal(result, expected)
- result = sdf[-2:].reindex(index, method='backfill', limit=5)
- expected = sdf[-2:].reindex(index).fillna(method='backfill')
- expected = expected.to_dense()
- expected.values[:3] = np.nan
- expected = expected.to_sparse()
- tm.assert_frame_equal(result, expected)
- def test_sparse_frame_fillna_limit(self):
- index = np.arange(10)
- df = DataFrame(np.random.randn(10, 4), index=index)
- sdf = df.to_sparse()
- result = sdf[:2].reindex(index)
- result = result.fillna(method='pad', limit=5)
- expected = sdf[:2].reindex(index).fillna(method='pad')
- expected = expected.to_dense()
- expected.values[-3:] = np.nan
- expected = expected.to_sparse()
- tm.assert_frame_equal(result, expected)
- result = sdf[-2:].reindex(index)
- result = result.fillna(method='backfill', limit=5)
- expected = sdf[-2:].reindex(index).fillna(method='backfill')
- expected = expected.to_dense()
- expected.values[:3] = np.nan
- expected = expected.to_sparse()
- tm.assert_frame_equal(result, expected)
- def test_pad_require_monotonicity(self):
- rng = date_range('1/1/2000', '3/1/2000', freq='B')
- rng2 = rng[::2][::-1]
- self.assertRaises(ValueError, rng2.get_indexer, rng,
- method='pad')
- def test_frame_ctor_datetime64_column(self):
- rng = date_range('1/1/2000 00:00:00', '1/1/2000 1:59:50',
- freq='10s')
- dates = np.asarray(rng)
- df = DataFrame({'A': np.random.randn(len(rng)), 'B': dates})
- self.assertTrue(np.issubdtype(df['B'].dtype, np.dtype('M8[ns]')))
- def test_frame_add_datetime64_column(self):
- rng = date_range('1/1/2000 00:00:00', '1/1/2000 1:59:50',
- freq='10s')
- df = DataFrame(index=np.arange(len(rng)))
- df['A'] = rng
- self.assertTrue(np.issubdtype(df['A'].dtype, np.dtype('M8[ns]')))
- def test_frame_datetime64_pre1900_repr(self):
- df = DataFrame({'year': date_range('1/1/1700', periods=50,
- freq='A-DEC')})
- # it works!
- repr(df)
- def test_frame_add_datetime64_col_other_units(self):
- n = 100
- units = ['h', 'm', 's', 'ms', 'D', 'M', 'Y']
- ns_dtype = np.dtype('M8[ns]')
- for unit in units:
- dtype = np.dtype('M8[%s]' % unit)
- vals = np.arange(n, dtype=np.int64).view(dtype)
- df = DataFrame({'ints': np.arange(n)}, index=np.arange(n))
- df[unit] = vals
- ex_vals = to_datetime(vals.astype('O'))
- self.assertEqual(df[unit].dtype, ns_dtype)
- self.assertTrue((df[unit].values == ex_vals).all())
- # Test insertion into existing datetime64 column
- df = DataFrame({'ints': np.arange(n)}, index=np.arange(n))
- df['dates'] = np.arange(n, dtype=np.int64).view(ns_dtype)
- for unit in units:
- dtype = np.dtype('M8[%s]' % unit)
- vals = np.arange(n, dtype=np.int64).view(dtype)
- tmp = df.copy()
- tmp['dates'] = vals
- ex_vals = to_datetime(vals.astype('O'))
- self.assertTrue((tmp['dates'].values == ex_vals).all())
- def test_to_datetime_unit(self):
- epoch = 1370745748
- s = Series([ epoch + t for t in range(20) ])
- result = to_datetime(s,unit='s')
- expected = Series([ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ])
- assert_series_equal(result,expected)
- s = Series([ epoch + t for t in range(20) ]).astype(float)
- result = to_datetime(s,unit='s')
- expected = Series([ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ])
- assert_series_equal(result,expected)
- s = Series([ epoch + t for t in range(20) ] + [iNaT])
- result = to_datetime(s,unit='s')
- expected = Series([ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ] + [NaT])
- assert_series_equal(result,expected)
- s = Series([ epoch + t for t in range(20) ] + [iNaT]).astype(float)
- result = to_datetime(s,unit='s')
- expected = Series([ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ] + [NaT])
- assert_series_equal(result,expected)
- s = concat([Series([ epoch + t for t in range(20) ]).astype(float),Series([np.nan])],ignore_index=True)
- result = to_datetime(s,unit='s')
- expected = Series([ Timestamp('2013-06-09 02:42:28') + timedelta(seconds=t) for t in range(20) ] + [NaT])
- assert_series_equal(result,expected)
- def test_series_ctor_datetime64(self):
- rng = date_range('1/1/2000 00:00:00', '1/1/2000 1:59:50',
- freq='10s')
- dates = np.asarray(rng)
- series = Series(dates)
- self.assertTrue(np.issubdtype(series.dtype, np.dtype('M8[ns]')))
- def test_index_cast_datetime64_other_units(self):
- arr = np.arange(0, 100, 10, dtype=np.int64).view('M8[D]')
- idx = Index(arr)
- self.assertTrue((idx.values == tslib.cast_to_nanoseconds(arr)).all())
- def test_index_astype_datetime64(self):
- # valid only under 1.7!
- if not _np_version_under1p7:
- raise nose.SkipTest("test only valid in numpy < 1.7")
- idx = Index([datetime(2012, 1, 1)], dtype=object)
- casted = idx.astype(np.dtype('M8[D]'))
- casted = idx.astype(np.dtype('M8[D]'))
- expected = DatetimeIndex(idx.values)
- tm.assert_isinstance(casted, DatetimeIndex)
- self.assertTrue(casted.equals(expected))
- def test_reindex_series_add_nat(self):
- rng = date_range('1/1/2000 00:00:00', periods=10, freq='10s')
- series = Series(rng)
- result = series.reindex(lrange(15))
- self.assertTrue(np.issubdtype(result.dtype, np.dtype('M8[ns]')))
- mask = result.isnull()
- self.assertTrue(mask[-5:].all())
- self.assertFalse(mask[:-5].any())
- def test_reindex_frame_add_nat(self):
- rng = date_range('1/1/2000 00:00:00', periods=10, freq='10s')
- df = DataFrame({'A': np.random.randn(len(rng)), 'B': rng})
- result = df.reindex(lrange(15))
- self.assertTrue(np.issubdtype(result['B'].dtype, np.dtype('M8[ns]')))
- mask = com.isnull(result)['B']
- self.assertTrue(mask[-5:].all())
- self.assertFalse(mask[:-5].any())
- def test_series_repr_nat(self):
- series = Series([0, 1000, 2000, iNaT], dtype='M8[ns]')
- result = repr(series)
- expected = ('0 1970-01-01 00:00:00\n'
- '1 1970-01-01 00:00:00.000001\n'
- '2 1970-01-01 00:00:00.000002\n'
- '3 NaT\n'
- 'dtype: datetime64[ns]')
- self.assertEqual(result, expected)
- def test_fillna_nat(self):
- series = Series([0, 1, 2, iNaT], dtype='M8[ns]')
- filled = series.fillna(method='pad')
- filled2 = series.fillna(value=series.values[2])
- expected = series.copy()
- expected.values[3] = expected.values[2]
- assert_series_equal(filled, expected)
- assert_series_equal(filled2, expected)
- df = DataFrame({'A': series})
- filled = df.fillna(method='pad')
- filled2 = df.fillna(value=series.values[2])
- expected = DataFrame({'A': expected})
- assert_frame_equal(filled, expected)
- assert_frame_equal(filled2, expected)
- series = Series([iNaT, 0, 1, 2], dtype='M8[ns]')
- filled = series.fillna(method='bfill')
- filled2 = series.fillna(value=series[1])
- expected = series.copy()
- expected[0] = expected[1]
- assert_series_equal(filled, expected)
- assert_series_equal(filled2, expected)
- df = DataFrame({'A': series})
- filled = df.fillna(method='bfill')
- filled2 = df.fillna(value=series[1])
- expected = DataFrame({'A': expected})
- assert_frame_equal(filled, expected)
- assert_frame_equal(filled2, expected)
- def test_string_na_nat_conversion(self):
- # GH #999, #858
- from pandas.compat import parse_date
- strings = np.array(['1/1/2000', '1/2/2000', np.nan,
- '1/4/2000, 12:34:56'], dtype=object)
- expected = np.empty(4, dtype='M8[ns]')
- for i, val in enumerate(strings):
- if com.isnull(val):
- expected[i] = iNaT
- else:
- expected[i] = parse_date(val)
- result = tslib.array_to_datetime(strings)
- assert_almost_equal(result, expected)
- result2 = to_datetime(strings)
- tm.assert_isinstance(result2, DatetimeIndex)
- assert_almost_equal(result, result2)
- malformed = np.array(['1/100/2000', np.nan], dtype=object)
- result = to_datetime(malformed)
- assert_almost_equal(result, malformed)
- self.assertRaises(ValueError, to_datetime, malformed,
- errors='raise')
- idx = ['a', 'b', 'c', 'd', 'e']
- series = Series(['1/1/2000', np.nan, '1/3/2000', np.nan,
- '1/5/2000'], index=idx, name='foo')
- dseries = Series([to_datetime('1/1/2000'), np.nan,
- to_datetime('1/3/2000'), np.nan,
- to_datetime('1/5/2000')], index=idx, name='foo')
- result = to_datetime(series)
- dresult = to_datetime(dseries)
- expected = Series(np.empty(5, dtype='M8[ns]'), index=idx)
- for i in range(5):
- x = series[i]
- if isnull(x):
- expected[i] = iNaT
- else:
- expected[i] = to_datetime(x)
- assert_series_equal(result, expected)
- self.assertEqual(result.name, 'foo')
- assert_series_equal(dresult, expected)
- self.assertEqual(dresult.name, 'foo')
- def test_to_datetime_iso8601(self):
- result = to_datetime(["2012-01-01 00:00:00"])
- exp = Timestamp("2012-01-01 00:00:00")
- self.assertEqual(result[0], exp)
- result = to_datetime(['20121001']) # bad iso 8601
- exp = Timestamp('2012-10-01')
- self.assertEqual(result[0], exp)
- def test_to_datetime_default(self):
- rs = to_datetime('2001')
- xp = datetime(2001, 1, 1)
- self.assertTrue(rs, xp)
- #### dayfirst is essentially broken
- #### to_datetime('01-13-2012', dayfirst=True)
- #### self.assertRaises(ValueError, to_datetime('01-13-2012', dayfirst=True))
- def test_to_datetime_on_datetime64_series(self):
- # #2699
- s = Series(date_range('1/1/2000', periods=10))
- result = to_datetime(s)
- self.assertEqual(result[0], s[0])
- def test_to_datetime_with_apply(self):
- # this is only locale tested with US/None locales
- _skip_if_has_locale()
- # GH 5195
- # with a format and coerce a single item to_datetime fails
- td = Series(['May 04', 'Jun 02', 'Dec 11'], index=[1,2,3])
- expected = pd.to_datetime(td, format='%b %y')
- result = td.apply(pd.to_datetime, format='%b %y')
- assert_series_equal(result, expected)
- td = pd.Series(['May 04', 'Jun 02', ''], index=[1,2,3])
- self.assertRaises(ValueError, lambda : pd.to_datetime(td,format='%b %y'))
- self.assertRaises(ValueError, lambda : td.apply(pd.to_datetime, format='%b %y'))
- expected = pd.to_datetime(td, format='%b %y', coerce=True)
- result = td.apply(lambda x: pd.to_datetime(x, format='%b %y', coerce=True))
- assert_series_equal(result, expected)
- def test_nat_vector_field_access(self):
- idx = DatetimeIndex(['1/1/2000', None, None, '1/4/2000'])
- fields = ['year', 'quarter', 'month', 'day', 'hour',
- 'minute', 'second', 'microsecond', 'nanosecond',
- 'week', 'dayofyear']
- for field in fields:
- result = getattr(idx, field)
- expected = [getattr(x, field) if x is not NaT else -1
- for x in idx]
- self.assert_numpy_array_equal(result, expected)
- def test_nat_scalar_field_access(self):
- fields = ['year', 'quarter', 'month', 'day', 'hour',
- 'minute', 'second', 'microsecond', 'nanosecond',
- 'week', 'dayofyear']
- for field in fields:
- result = getattr(NaT, field)
- self.assertEqual(result, -1)
- self.assertEqual(NaT.weekday(), -1)
- def test_to_datetime_types(self):
- # empty string
- result = to_datetime('')
- self.assertIs(result, NaT)
- result = to_datetime(['', ''])
- self.assertTrue(isnull(result).all())
- # ints
- result = Timestamp(0)
- expected = to_datetime(0)
- self.assertEqual(result, expected)
- # GH 3888 (strings)
- expected = to_datetime(['2012'])[0]
- result = to_datetime('2012')
- self.assertEqual(result, expected)
- ### array = ['2012','20120101','20120101 12:01:01']
- array = ['20120101','20120101 12:01:01']
- expected = list(to_datetime(array))
- result = lmap(Timestamp,array)
- tm.assert_almost_equal(result,expected)
- ### currently fails ###
- ### result = Timestamp('2012')
- ### expected = to_datetime('2012')
- ### self.assertEqual(result, expected)
- def test_to_datetime_unprocessable_input(self):
- # GH 4928
- self.assert_numpy_array_equal(
- to_datetime([1, '1']),
- np.array([1, '1'], dtype='O')
- )
- self.assertRaises(TypeError, to_datetime, [1, '1'], errors='raise')
- def test_to_datetime_other_datetime64_units(self):
- # 5/25/2012
- scalar = np.int64(1337904000000000).view('M8[us]')
- as_obj = scalar.astype('O')
- index = DatetimeIndex([scalar])
- self.assertEqual(index[0], scalar.astype('O'))
- value = Timestamp(scalar)
- self.assertEqual(value, as_obj)
- def test_to_datetime_list_of_integers(self):
- rng = date_range('1/1/2000', periods=20)
- rng = DatetimeIndex(rng.values)
- ints = list(rng.asi8)
- result = DatetimeIndex(ints)
- self.assertTrue(rng.equals(result))
- def test_to_datetime_dt64s(self):
- in_bound_dts = [
- np.datetime64('2000-01-01'),
- np.datetime64('2000-01-02'),
- ]
- for dt in in_bound_dts:
- self.assertEqual(
- pd.to_datetime(dt),
- Timestamp(dt)
- )
- oob_dts = [
- np.datetime64('1000-01-01'),
- np.datetime64('5000-01-02'),
- ]
- for dt in oob_dts:
- self.assertRaises(ValueError, pd.to_datetime, dt, errors='raise')
- self.assertRaises(ValueError, tslib.Timestamp, dt)
- self.assertIs(pd.to_datetime(dt, coerce=True), NaT)
- def test_to_datetime_array_of_dt64s(self):
- dts = [
- np.datetime64('2000-01-01'),
- np.datetime64('2000-01-02'),
- ]
- # Assuming all datetimes are in bounds, to_datetime() returns
- # an array that is equal to Timestamp() parsing
- self.assert_numpy_array_equal(
- pd.to_datetime(dts, box=False),
- np.array([Timestamp(x).asm8 for x in dts])
- )
- # A list of datetimes where the last one is out of bounds
- dts_with_oob = dts + [np.datetime64('9999-01-01')]
- self.assertRaises(
- ValueError,
- pd.to_datetime,
- dts_with_oob,
- coerce=False,
- errors='raise'
- )
- self.assert_numpy_array_equal(
- pd.to_datetime(dts_with_oob, box=False, coerce=True),
- np.array(
- [
- Timestamp(dts_with_oob[0]).asm8,
- Timestamp(dts_with_oob[1]).asm8,
- iNaT,
- ],
- dtype='M8'
- )
- )
- # With coerce=False and errors='ignore', out of bounds datetime64s
- # are converted to their .item(), which depending on the version of
- # numpy is either a python datetime.datetime or datetime.date
- self.assert_numpy_array_equal(
- pd.to_datetime(dts_with_oob, box=False, coerce=False),
- np.array(
- [dt.item() for dt in dts_with_oob],
- dtype='O'
- )
- )
- def test_index_to_datetime(self):
- idx = Index(['1/1/2000', '1/2/2000', '1/3/2000'])
- result = idx.to_datetime()
- expected = DatetimeIndex(datetools.to_datetime(idx.values))
- self.assertTrue(result.equals(expected))
- today = datetime.today()
- idx = Index([today], dtype=object)
- result = idx.to_datetime()
- expected = DatetimeIndex([today])
- self.assertTrue(result.equals(expected))
- def test_to_datetime_freq(self):
- xp = bdate_range('2000-1-1', periods=10, tz='UTC')
- rs = xp.to_datetime()
- self.assertEqual(xp.freq, rs.freq)
- self.assertEqual(xp.tzinfo, rs.tzinfo)
- def test_range_misspecified(self):
- # GH #1095
- self.assertRaises(ValueError, date_range, '1/1/2000')
- self.assertRaises(ValueError, date_range, end='1/1/2000')
- self.assertRaises(ValueError, date_range, periods=10)
- self.assertRaises(ValueError, date_range, '1/1/2000', freq='H')
- self.assertRaises(ValueError, date_range, end='1/1/2000', freq='H')
- self.assertRaises(ValueError, date_range, periods=10, freq='H')
- def test_reasonable_keyerror(self):
- # GH #1062
- index = DatetimeIndex(['1/3/2000'])
- try:
- index.get_loc('1/1/2000')
- except KeyError as e:
- self.assertIn('2000', str(e))
- def test_reindex_with_datetimes(self):
- rng = date_range('1/1/2000', periods=20)
- ts = Series(np.random.randn(20), index=rng)
- result = ts.reindex(list(ts.index[5:10]))
- expected = ts[5:10]
- tm.assert_series_equal(result, expected)
- result = ts[list(ts.index[5:10])]
- tm.assert_series_equal(result, expected)
- def test_promote_datetime_date(self):
- rng = date_range('1/1/2000', periods=20)
- ts = Series(np.random.randn(20), index=rng)
- ts_slice = ts[5:]
- ts2 = ts_slice.copy()
- ts2.index = [x.date() for x in ts2.index]
- result = ts + ts2
- result2 = ts2 + ts
- expected = ts + ts[5:]
- assert_series_equal(result, expected)
- assert_series_equal(result2, expected)
- # test asfreq
- result = ts2.asfreq('4H', method='ffill')
- expected = ts[5:].asfreq('4H', method='ffill')
- assert_series_equal(result, expected)
- result = rng.get_indexer(ts2.index)
- expected = rng.get_indexer(ts_slice.index)
- self.assert_numpy_array_equal(result, expected)
- def test_asfreq_normalize(self):
- rng = date_range('1/1/2000 09:30', periods=20)
- norm = date_range('1/1/2000', periods=20)
- vals = np.random.randn(20)
- ts = Series(vals, index=rng)
- result = ts.asfreq('D', normalize=True)
- norm = date_range('1/1/2000', periods=20)
- expected = Series(vals, index=norm)
- assert_series_equal(result, expected)
- vals = np.random.randn(20, 3)
- ts = DataFrame(vals, index=rng)
- result = ts.asfreq('D', normalize=True)
- expected = DataFrame(vals, index=norm)
- assert_frame_equal(result, expected)
- def test_date_range_gen_error(self):
- rng = date_range('1/1/2000 00:00', '1/1/2000 00:18', freq='5min')
- self.assertEqual(len(rng), 4)
- def test_first_subset(self):
- ts = _simple_ts('1/1/2000', '1/1/2010', freq='12h')
- result = ts.first('10d')
- self.assertEqual(len(result), 20)
- ts = _simple_ts('1/1/2000', '1/1/2010')
- result = ts.first('10d')
- self.assertEqual(len(result), 10)
- result = ts.first('3M')
- expected = ts[:'3/31/2000']
- assert_series_equal(result, expected)
- result = ts.first('21D')
- expected = ts[:21]
- assert_series_equal(result, expected)
- result = ts[:0].first('3M')
- assert_series_equal(result, ts[:0])
- def test_last_subset(self):
- ts = _simple_ts('1/1/2000', '1/1/2010', freq='12h')
- result = ts.last('10d')
- self.assertEqual(len(result), 20)
- ts = _simple_ts('1/1/2000', '1/1/2010')
- result = ts.last('10d')
- self.assertEqual(len(result), 10)
- result = ts.last('21D')
- expected = ts['12/12/2009':]
- assert_series_equal(result, expected)
- result = ts.last('21D')
- expected = ts[-21:]
- assert_series_equal(result, expected)
- result = ts[:0].last('3M')
- assert_series_equal(result, ts[:0])
- def test_add_offset(self):
- rng = date_range('1/1/2000', '2/1/2000')
- result = rng + offsets.Hour(2)
- expected = date_range('1/1/2000 02:00', '2/1/2000 02:00')
- self.assertTrue(result.equals(expected))
- def test_format_pre_1900_dates(self):
- rng = date_range('1/1/1850', '1/1/1950', freq='A-DEC')
- rng.format()
- ts = Series(1, index=rng)
- repr(ts)
- def test_repeat(self):
- rng = date_range('1/1/2000', '1/1/2001')
- result = rng.repeat(5)
- self.assertIsNone(result.freq)
- self.assertEqual(len(result), 5 * len(rng))
- def test_at_time(self):
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = Series(np.random.randn(len(rng)), index=rng)
- rs = ts.at_time(rng[1])
- self.assertTrue((rs.index.hour == rng[1].hour).all())
- self.assertTrue((rs.index.minute == rng[1].minute).all())
- self.assertTrue((rs.index.second == rng[1].second).all())
- result = ts.at_time('9:30')
- expected = ts.at_time(time(9, 30))
- assert_series_equal(result, expected)
- df = DataFrame(np.random.randn(len(rng), 3), index=rng)
- result = ts[time(9, 30)]
- result_df = df.ix[time(9, 30)]
- expected = ts[(rng.hour == 9) & (rng.minute == 30)]
- exp_df = df[(rng.hour == 9) & (rng.minute == 30)]
- # expected.index = date_range('1/1/2000', '1/4/2000')
- assert_series_equal(result, expected)
- tm.assert_frame_equal(result_df, exp_df)
- chunk = df.ix['1/4/2000':]
- result = chunk.ix[time(9, 30)]
- expected = result_df[-1:]
- tm.assert_frame_equal(result, expected)
- # midnight, everything
- rng = date_range('1/1/2000', '1/31/2000')
- ts = Series(np.random.randn(len(rng)), index=rng)
- result = ts.at_time(time(0, 0))
- assert_series_equal(result, ts)
- # time doesn't exist
- rng = date_range('1/1/2012', freq='23Min', periods=384)
- ts = Series(np.random.randn(len(rng)), rng)
- rs = ts.at_time('16:00')
- self.assertEqual(len(rs), 0)
- def test_at_time_frame(self):
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
- rs = ts.at_time(rng[1])
- self.assertTrue((rs.index.hour == rng[1].hour).all())
- self.assertTrue((rs.index.minute == rng[1].minute).all())
- self.assertTrue((rs.index.second == rng[1].second).all())
- result = ts.at_time('9:30')
- expected = ts.at_time(time(9, 30))
- assert_frame_equal(result, expected)
- result = ts.ix[time(9, 30)]
- expected = ts.ix[(rng.hour == 9) & (rng.minute == 30)]
- assert_frame_equal(result, expected)
- # midnight, everything
- rng = date_range('1/1/2000', '1/31/2000')
- ts = DataFrame(np.random.randn(len(rng), 3), index=rng)
- result = ts.at_time(time(0, 0))
- assert_frame_equal(result, ts)
- # time doesn't exist
- rng = date_range('1/1/2012', freq='23Min', periods=384)
- ts = DataFrame(np.random.randn(len(rng), 2), rng)
- rs = ts.at_time('16:00')
- self.assertEqual(len(rs), 0)
- def test_between_time(self):
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = Series(np.random.randn(len(rng)), index=rng)
- stime = time(0, 0)
- etime = time(1, 0)
- close_open = product([True, False], [True, False])
- for inc_start, inc_end in close_open:
- filtered = ts.between_time(stime, etime, inc_start, inc_end)
- exp_len = 13 * 4 + 1
- if not inc_start:
- exp_len -= 5
- if not inc_end:
- exp_len -= 4
- self.assertEqual(len(filtered), exp_len)
- for rs in filtered.index:
- t = rs.time()
- if inc_start:
- self.assertTrue(t >= stime)
- else:
- self.assertTrue(t > stime)
- if inc_end:
- self.assertTrue(t <= etime)
- else:
- self.assertTrue(t < etime)
- result = ts.between_time('00:00', '01:00')
- expected = ts.between_time(stime, etime)
- assert_series_equal(result, expected)
- # across midnight
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = Series(np.random.randn(len(rng)), index=rng)
- stime = time(22, 0)
- etime = time(9, 0)
- close_open = product([True, False], [True, False])
- for inc_start, inc_end in close_open:
- filtered = ts.between_time(stime, etime, inc_start, inc_end)
- exp_len = (12 * 11 + 1) * 4 + 1
- if not inc_start:
- exp_len -= 4
- if not inc_end:
- exp_len -= 4
- self.assertEqual(len(filtered), exp_len)
- for rs in filtered.index:
- t = rs.time()
- if inc_start:
- self.assertTrue((t >= stime) or (t <= etime))
- else:
- self.assertTrue((t > stime) or (t <= etime))
- if inc_end:
- self.assertTrue((t <= etime) or (t >= stime))
- else:
- self.assertTrue((t < etime) or (t >= stime))
- def test_between_time_frame(self):
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
- stime = time(0, 0)
- etime = time(1, 0)
- close_open = product([True, False], [True, False])
- for inc_start, inc_end in close_open:
- filtered = ts.between_time(stime, etime, inc_start, inc_end)
- exp_len = 13 * 4 + 1
- if not inc_start:
- exp_len -= 5
- if not inc_end:
- exp_len -= 4
- self.assertEqual(len(filtered), exp_len)
- for rs in filtered.index:
- t = rs.time()
- if inc_start:
- self.assertTrue(t >= stime)
- else:
- self.assertTrue(t > stime)
- if inc_end:
- self.assertTrue(t <= etime)
- else:
- self.assertTrue(t < etime)
- result = ts.between_time('00:00', '01:00')
- expected = ts.between_time(stime, etime)
- assert_frame_equal(result, expected)
- # across midnight
- rng = date_range('1/1/2000', '1/5/2000', freq='5min')
- ts = DataFrame(np.random.randn(len(rng), 2), index=rng)
- stime = time(22, 0)
- etime = time(9, 0)
- close_open = product([True, False], [True, False])
- for inc_start, inc_end in close_open:
- filtered = ts.between_time(stime, etime, inc_start, inc_end)
- exp_len = (12 * 11 + 1) * 4 + 1
- if not inc_start:
- exp_len -= 4
- if not inc_end:
- exp_len -= 4
- self.assertEqual(len(filtered), exp_len)
- for rs in filtered.index:
- t = rs.time()
- if inc_start:
- self.assertTrue((t >= stime) or (t <= etime))
- else:
- self.assertTrue((t > stime) or (t <= etime))
- if inc_end:
- …
Large files files are truncated, but you can click here to view the full file