diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__init__.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_arithmetic.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_arithmetic.py new file mode 100644 index 0000000000000000000000000000000000000000..3a7c418b27de6ddf79c87a813d43f21369ecc367 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_arithmetic.py @@ -0,0 +1,56 @@ +# Arithmetic tests specific to DatetimeIndex are generally about `freq` +# rentention or inference. Other arithmetic tests belong in +# tests/arithmetic/test_datetime64.py +import pytest + +from pandas import ( + Timedelta, + TimedeltaIndex, + Timestamp, + date_range, + timedelta_range, +) +import pandas._testing as tm + + +class TestDatetimeIndexArithmetic: + def test_add_timedelta_preserves_freq(self): + # GH#37295 should hold for any DTI with freq=None or Tick freq + tz = "Canada/Eastern" + dti = date_range( + start=Timestamp("2019-03-26 00:00:00-0400", tz=tz), + end=Timestamp("2020-10-17 00:00:00-0400", tz=tz), + freq="D", + ) + result = dti + Timedelta(days=1) + assert result.freq == dti.freq + + def test_sub_datetime_preserves_freq(self, tz_naive_fixture): + # GH#48818 + dti = date_range("2016-01-01", periods=12, tz=tz_naive_fixture) + + res = dti - dti[0] + expected = timedelta_range("0 Days", "11 Days") + tm.assert_index_equal(res, expected) + assert res.freq == expected.freq + + @pytest.mark.xfail( + reason="The inherited freq is incorrect bc dti.freq is incorrect " + "https://github.com/pandas-dev/pandas/pull/48818/files#r982793461" + ) + def test_sub_datetime_preserves_freq_across_dst(self): + # GH#48818 + ts = Timestamp("2016-03-11", tz="US/Pacific") + dti = date_range(ts, periods=4) + + res = dti - dti[0] + expected = TimedeltaIndex( + [ + Timedelta(days=0), + Timedelta(days=1), + Timedelta(days=2), + Timedelta(days=2, hours=23), + ] + ) + tm.assert_index_equal(res, expected) + assert res.freq == expected.freq diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_constructors.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_constructors.py new file mode 100644 index 0000000000000000000000000000000000000000..2abbcf6688833ff05600d8e360711c8ff973a343 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_constructors.py @@ -0,0 +1,1204 @@ +from __future__ import annotations + +from datetime import ( + datetime, + timedelta, + timezone, +) +from functools import partial +from operator import attrgetter + +import dateutil +import dateutil.tz +from dateutil.tz import gettz +import numpy as np +import pytest +import pytz + +from pandas._libs.tslibs import ( + OutOfBoundsDatetime, + astype_overflowsafe, + timezones, +) + +import pandas as pd +from pandas import ( + DatetimeIndex, + Index, + Timestamp, + date_range, + offsets, + to_datetime, +) +import pandas._testing as tm +from pandas.core.arrays import period_array + + +class TestDatetimeIndex: + def test_closed_deprecated(self): + # GH#52628 + msg = "The 'closed' keyword" + with tm.assert_produces_warning(FutureWarning, match=msg): + DatetimeIndex([], closed=True) + + def test_normalize_deprecated(self): + # GH#52628 + msg = "The 'normalize' keyword" + with tm.assert_produces_warning(FutureWarning, match=msg): + DatetimeIndex([], normalize=True) + + def test_from_dt64_unsupported_unit(self): + # GH#49292 + val = np.datetime64(1, "D") + result = DatetimeIndex([val], tz="US/Pacific") + + expected = DatetimeIndex([val.astype("M8[s]")], tz="US/Pacific") + tm.assert_index_equal(result, expected) + + def test_explicit_tz_none(self): + # GH#48659 + dti = date_range("2016-01-01", periods=10, tz="UTC") + + msg = "Passed data is timezone-aware, incompatible with 'tz=None'" + with pytest.raises(ValueError, match=msg): + DatetimeIndex(dti, tz=None) + + with pytest.raises(ValueError, match=msg): + DatetimeIndex(np.array(dti), tz=None) + + msg = "Cannot pass both a timezone-aware dtype and tz=None" + with pytest.raises(ValueError, match=msg): + DatetimeIndex([], dtype="M8[ns, UTC]", tz=None) + + def test_freq_validation_with_nat(self): + # GH#11587 make sure we get a useful error message when generate_range + # raises + msg = ( + "Inferred frequency None from passed values does not conform " + "to passed frequency D" + ) + with pytest.raises(ValueError, match=msg): + DatetimeIndex([pd.NaT, Timestamp("2011-01-01")], freq="D") + with pytest.raises(ValueError, match=msg): + DatetimeIndex([pd.NaT, Timestamp("2011-01-01")._value], freq="D") + + # TODO: better place for tests shared by DTI/TDI? + @pytest.mark.parametrize( + "index", + [ + date_range("2016-01-01", periods=5, tz="US/Pacific"), + pd.timedelta_range("1 Day", periods=5), + ], + ) + def test_shallow_copy_inherits_array_freq(self, index): + # If we pass a DTA/TDA to shallow_copy and dont specify a freq, + # we should inherit the array's freq, not our own. + array = index._data + + arr = array[[0, 3, 2, 4, 1]] + assert arr.freq is None + + result = index._shallow_copy(arr) + assert result.freq is None + + def test_categorical_preserves_tz(self): + # GH#18664 retain tz when going DTI-->Categorical-->DTI + dti = DatetimeIndex( + [pd.NaT, "2015-01-01", "1999-04-06 15:14:13", "2015-01-01"], tz="US/Eastern" + ) + + for dtobj in [dti, dti._data]: + # works for DatetimeIndex or DatetimeArray + + ci = pd.CategoricalIndex(dtobj) + carr = pd.Categorical(dtobj) + cser = pd.Series(ci) + + for obj in [ci, carr, cser]: + result = DatetimeIndex(obj) + tm.assert_index_equal(result, dti) + + def test_dti_with_period_data_raises(self): + # GH#23675 + data = pd.PeriodIndex(["2016Q1", "2016Q2"], freq="Q") + + with pytest.raises(TypeError, match="PeriodDtype data is invalid"): + DatetimeIndex(data) + + with pytest.raises(TypeError, match="PeriodDtype data is invalid"): + to_datetime(data) + + with pytest.raises(TypeError, match="PeriodDtype data is invalid"): + DatetimeIndex(period_array(data)) + + with pytest.raises(TypeError, match="PeriodDtype data is invalid"): + to_datetime(period_array(data)) + + def test_dti_with_timedelta64_data_raises(self): + # GH#23675 deprecated, enforrced in GH#29794 + data = np.array([0], dtype="m8[ns]") + msg = r"timedelta64\[ns\] cannot be converted to datetime64" + with pytest.raises(TypeError, match=msg): + DatetimeIndex(data) + + with pytest.raises(TypeError, match=msg): + to_datetime(data) + + with pytest.raises(TypeError, match=msg): + DatetimeIndex(pd.TimedeltaIndex(data)) + + with pytest.raises(TypeError, match=msg): + to_datetime(pd.TimedeltaIndex(data)) + + def test_constructor_from_sparse_array(self): + # https://github.com/pandas-dev/pandas/issues/35843 + values = [ + Timestamp("2012-05-01T01:00:00.000000"), + Timestamp("2016-05-01T01:00:00.000000"), + ] + arr = pd.arrays.SparseArray(values) + result = Index(arr) + assert type(result) is Index + assert result.dtype == arr.dtype + + def test_construction_caching(self): + df = pd.DataFrame( + { + "dt": date_range("20130101", periods=3), + "dttz": date_range("20130101", periods=3, tz="US/Eastern"), + "dt_with_null": [ + Timestamp("20130101"), + pd.NaT, + Timestamp("20130103"), + ], + "dtns": date_range("20130101", periods=3, freq="ns"), + } + ) + assert df.dttz.dtype.tz.zone == "US/Eastern" + + @pytest.mark.parametrize( + "kwargs", + [{"tz": "dtype.tz"}, {"dtype": "dtype"}, {"dtype": "dtype", "tz": "dtype.tz"}], + ) + def test_construction_with_alt(self, kwargs, tz_aware_fixture): + tz = tz_aware_fixture + i = date_range("20130101", periods=5, freq="h", tz=tz) + kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} + result = DatetimeIndex(i, **kwargs) + tm.assert_index_equal(i, result) + + @pytest.mark.parametrize( + "kwargs", + [{"tz": "dtype.tz"}, {"dtype": "dtype"}, {"dtype": "dtype", "tz": "dtype.tz"}], + ) + def test_construction_with_alt_tz_localize(self, kwargs, tz_aware_fixture): + tz = tz_aware_fixture + i = date_range("20130101", periods=5, freq="h", tz=tz) + i = i._with_freq(None) + kwargs = {key: attrgetter(val)(i) for key, val in kwargs.items()} + + if "tz" in kwargs: + result = DatetimeIndex(i.asi8, tz="UTC").tz_convert(kwargs["tz"]) + + expected = DatetimeIndex(i, **kwargs) + tm.assert_index_equal(result, expected) + + # localize into the provided tz + i2 = DatetimeIndex(i.tz_localize(None).asi8, tz="UTC") + expected = i.tz_localize(None).tz_localize("UTC") + tm.assert_index_equal(i2, expected) + + # incompat tz/dtype + msg = "cannot supply both a tz and a dtype with a tz" + with pytest.raises(ValueError, match=msg): + DatetimeIndex(i.tz_localize(None).asi8, dtype=i.dtype, tz="US/Pacific") + + def test_construction_index_with_mixed_timezones(self): + # gh-11488: no tz results in DatetimeIndex + result = Index([Timestamp("2011-01-01"), Timestamp("2011-01-02")], name="idx") + exp = DatetimeIndex( + [Timestamp("2011-01-01"), Timestamp("2011-01-02")], name="idx" + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is None + + # same tz results in DatetimeIndex + result = Index( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="Asia/Tokyo"), + ], + name="idx", + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00"), Timestamp("2011-01-02 10:00")], + tz="Asia/Tokyo", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is not None + assert result.tz == exp.tz + + # same tz results in DatetimeIndex (DST) + result = Index( + [ + Timestamp("2011-01-01 10:00", tz="US/Eastern"), + Timestamp("2011-08-01 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00"), Timestamp("2011-08-01 10:00")], + tz="US/Eastern", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is not None + assert result.tz == exp.tz + + # Different tz results in Index(dtype=object) + result = Index( + [ + Timestamp("2011-01-01 10:00"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = Index( + [ + Timestamp("2011-01-01 10:00"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + dtype="object", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert not isinstance(result, DatetimeIndex) + + result = Index( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = Index( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + dtype="object", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert not isinstance(result, DatetimeIndex) + + msg = "DatetimeIndex has mixed timezones" + msg_depr = "parsing datetimes with mixed time zones will raise an error" + with pytest.raises(TypeError, match=msg): + with tm.assert_produces_warning(FutureWarning, match=msg_depr): + DatetimeIndex(["2013-11-02 22:00-05:00", "2013-11-03 22:00-06:00"]) + + # length = 1 + result = Index([Timestamp("2011-01-01")], name="idx") + exp = DatetimeIndex([Timestamp("2011-01-01")], name="idx") + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is None + + # length = 1 with tz + result = Index([Timestamp("2011-01-01 10:00", tz="Asia/Tokyo")], name="idx") + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00")], tz="Asia/Tokyo", name="idx" + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is not None + assert result.tz == exp.tz + + def test_construction_index_with_mixed_timezones_with_NaT(self): + # see gh-11488 + result = Index( + [pd.NaT, Timestamp("2011-01-01"), pd.NaT, Timestamp("2011-01-02")], + name="idx", + ) + exp = DatetimeIndex( + [pd.NaT, Timestamp("2011-01-01"), pd.NaT, Timestamp("2011-01-02")], + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is None + + # Same tz results in DatetimeIndex + result = Index( + [ + pd.NaT, + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + pd.NaT, + Timestamp("2011-01-02 10:00", tz="Asia/Tokyo"), + ], + name="idx", + ) + exp = DatetimeIndex( + [ + pd.NaT, + Timestamp("2011-01-01 10:00"), + pd.NaT, + Timestamp("2011-01-02 10:00"), + ], + tz="Asia/Tokyo", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is not None + assert result.tz == exp.tz + + # same tz results in DatetimeIndex (DST) + result = Index( + [ + Timestamp("2011-01-01 10:00", tz="US/Eastern"), + pd.NaT, + Timestamp("2011-08-01 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00"), pd.NaT, Timestamp("2011-08-01 10:00")], + tz="US/Eastern", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is not None + assert result.tz == exp.tz + + # different tz results in Index(dtype=object) + result = Index( + [ + pd.NaT, + Timestamp("2011-01-01 10:00"), + pd.NaT, + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = Index( + [ + pd.NaT, + Timestamp("2011-01-01 10:00"), + pd.NaT, + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + dtype="object", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert not isinstance(result, DatetimeIndex) + + result = Index( + [ + pd.NaT, + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + pd.NaT, + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = Index( + [ + pd.NaT, + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + pd.NaT, + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + dtype="object", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert not isinstance(result, DatetimeIndex) + + # all NaT + result = Index([pd.NaT, pd.NaT], name="idx") + exp = DatetimeIndex([pd.NaT, pd.NaT], name="idx") + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + assert result.tz is None + + def test_construction_dti_with_mixed_timezones(self): + # GH 11488 (not changed, added explicit tests) + + # no tz results in DatetimeIndex + result = DatetimeIndex( + [Timestamp("2011-01-01"), Timestamp("2011-01-02")], name="idx" + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01"), Timestamp("2011-01-02")], name="idx" + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + + # same tz results in DatetimeIndex + result = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="Asia/Tokyo"), + ], + name="idx", + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00"), Timestamp("2011-01-02 10:00")], + tz="Asia/Tokyo", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + + # same tz results in DatetimeIndex (DST) + result = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="US/Eastern"), + Timestamp("2011-08-01 10:00", tz="US/Eastern"), + ], + name="idx", + ) + exp = DatetimeIndex( + [Timestamp("2011-01-01 10:00"), Timestamp("2011-08-01 10:00")], + tz="US/Eastern", + name="idx", + ) + tm.assert_index_equal(result, exp, exact=True) + assert isinstance(result, DatetimeIndex) + + # tz mismatch affecting to tz-aware raises TypeError/ValueError + + msg = "cannot be converted to datetime64" + with pytest.raises(ValueError, match=msg): + DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + name="idx", + ) + + # pre-2.0 this raised bc of awareness mismatch. in 2.0 with a tz# + # specified we behave as if this was called pointwise, so + # the naive Timestamp is treated as a wall time. + dti = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + tz="Asia/Tokyo", + name="idx", + ) + expected = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern").tz_convert("Asia/Tokyo"), + ], + tz="Asia/Tokyo", + name="idx", + ) + tm.assert_index_equal(dti, expected) + + # pre-2.0 mixed-tz scalars raised even if a tz/dtype was specified. + # as of 2.0 we successfully return the requested tz/dtype + dti = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + tz="US/Eastern", + name="idx", + ) + expected = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo").tz_convert("US/Eastern"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + tz="US/Eastern", + name="idx", + ) + tm.assert_index_equal(dti, expected) + + # same thing but pass dtype instead of tz + dti = DatetimeIndex( + [ + Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"), + Timestamp("2011-01-02 10:00", tz="US/Eastern"), + ], + dtype="M8[ns, US/Eastern]", + name="idx", + ) + tm.assert_index_equal(dti, expected) + + def test_construction_base_constructor(self): + arr = [Timestamp("2011-01-01"), pd.NaT, Timestamp("2011-01-03")] + tm.assert_index_equal(Index(arr), DatetimeIndex(arr)) + tm.assert_index_equal(Index(np.array(arr)), DatetimeIndex(np.array(arr))) + + arr = [np.nan, pd.NaT, Timestamp("2011-01-03")] + tm.assert_index_equal(Index(arr), DatetimeIndex(arr)) + tm.assert_index_equal(Index(np.array(arr)), DatetimeIndex(np.array(arr))) + + def test_construction_outofbounds(self): + # GH 13663 + dates = [ + datetime(3000, 1, 1), + datetime(4000, 1, 1), + datetime(5000, 1, 1), + datetime(6000, 1, 1), + ] + exp = Index(dates, dtype=object) + # coerces to object + tm.assert_index_equal(Index(dates), exp) + + msg = "^Out of bounds nanosecond timestamp: 3000-01-01 00:00:00, at position 0$" + with pytest.raises(OutOfBoundsDatetime, match=msg): + # can't create DatetimeIndex + DatetimeIndex(dates) + + @pytest.mark.parametrize("data", [["1400-01-01"], [datetime(1400, 1, 1)]]) + def test_dti_date_out_of_range(self, data): + # GH#1475 + msg = ( + "^Out of bounds nanosecond timestamp: " + "1400-01-01( 00:00:00)?, at position 0$" + ) + with pytest.raises(OutOfBoundsDatetime, match=msg): + DatetimeIndex(data) + + def test_construction_with_ndarray(self): + # GH 5152 + dates = [datetime(2013, 10, 7), datetime(2013, 10, 8), datetime(2013, 10, 9)] + data = DatetimeIndex(dates, freq=offsets.BDay()).values + result = DatetimeIndex(data, freq=offsets.BDay()) + expected = DatetimeIndex(["2013-10-07", "2013-10-08", "2013-10-09"], freq="B") + tm.assert_index_equal(result, expected) + + def test_integer_values_and_tz_interpreted_as_utc(self): + # GH-24559 + val = np.datetime64("2000-01-01 00:00:00", "ns") + values = np.array([val.view("i8")]) + + result = DatetimeIndex(values).tz_localize("US/Central") + + expected = DatetimeIndex(["2000-01-01T00:00:00"], dtype="M8[ns, US/Central]") + tm.assert_index_equal(result, expected) + + # but UTC is *not* deprecated. + with tm.assert_produces_warning(None): + result = DatetimeIndex(values, tz="UTC") + expected = DatetimeIndex(["2000-01-01T00:00:00"], dtype="M8[ns, UTC]") + tm.assert_index_equal(result, expected) + + def test_constructor_coverage(self): + msg = r"DatetimeIndex\(\.\.\.\) must be called with a collection" + with pytest.raises(TypeError, match=msg): + DatetimeIndex("1/1/2000") + + # generator expression + gen = (datetime(2000, 1, 1) + timedelta(i) for i in range(10)) + result = DatetimeIndex(gen) + expected = DatetimeIndex( + [datetime(2000, 1, 1) + timedelta(i) for i in range(10)] + ) + tm.assert_index_equal(result, expected) + + # NumPy string array + strings = np.array(["2000-01-01", "2000-01-02", "2000-01-03"]) + result = DatetimeIndex(strings) + expected = DatetimeIndex(strings.astype("O")) + tm.assert_index_equal(result, expected) + + from_ints = DatetimeIndex(expected.asi8) + tm.assert_index_equal(from_ints, expected) + + # string with NaT + strings = np.array(["2000-01-01", "2000-01-02", "NaT"]) + result = DatetimeIndex(strings) + expected = DatetimeIndex(strings.astype("O")) + tm.assert_index_equal(result, expected) + + from_ints = DatetimeIndex(expected.asi8) + tm.assert_index_equal(from_ints, expected) + + # non-conforming + msg = ( + "Inferred frequency None from passed values does not conform " + "to passed frequency D" + ) + with pytest.raises(ValueError, match=msg): + DatetimeIndex(["2000-01-01", "2000-01-02", "2000-01-04"], freq="D") + + @pytest.mark.parametrize("freq", ["YS", "W-SUN"]) + def test_constructor_datetime64_tzformat(self, freq): + # see GH#6572: ISO 8601 format results in stdlib timezone object + idx = date_range( + "2013-01-01T00:00:00-05:00", "2016-01-01T23:59:59-05:00", freq=freq + ) + expected = date_range( + "2013-01-01T00:00:00", + "2016-01-01T23:59:59", + freq=freq, + tz=timezone(timedelta(minutes=-300)), + ) + tm.assert_index_equal(idx, expected) + # Unable to use `US/Eastern` because of DST + expected_i8 = date_range( + "2013-01-01T00:00:00", "2016-01-01T23:59:59", freq=freq, tz="America/Lima" + ) + tm.assert_numpy_array_equal(idx.asi8, expected_i8.asi8) + + idx = date_range( + "2013-01-01T00:00:00+09:00", "2016-01-01T23:59:59+09:00", freq=freq + ) + expected = date_range( + "2013-01-01T00:00:00", + "2016-01-01T23:59:59", + freq=freq, + tz=timezone(timedelta(minutes=540)), + ) + tm.assert_index_equal(idx, expected) + expected_i8 = date_range( + "2013-01-01T00:00:00", "2016-01-01T23:59:59", freq=freq, tz="Asia/Tokyo" + ) + tm.assert_numpy_array_equal(idx.asi8, expected_i8.asi8) + + # Non ISO 8601 format results in dateutil.tz.tzoffset + idx = date_range("2013/1/1 0:00:00-5:00", "2016/1/1 23:59:59-5:00", freq=freq) + expected = date_range( + "2013-01-01T00:00:00", + "2016-01-01T23:59:59", + freq=freq, + tz=timezone(timedelta(minutes=-300)), + ) + tm.assert_index_equal(idx, expected) + # Unable to use `US/Eastern` because of DST + expected_i8 = date_range( + "2013-01-01T00:00:00", "2016-01-01T23:59:59", freq=freq, tz="America/Lima" + ) + tm.assert_numpy_array_equal(idx.asi8, expected_i8.asi8) + + idx = date_range("2013/1/1 0:00:00+9:00", "2016/1/1 23:59:59+09:00", freq=freq) + expected = date_range( + "2013-01-01T00:00:00", + "2016-01-01T23:59:59", + freq=freq, + tz=timezone(timedelta(minutes=540)), + ) + tm.assert_index_equal(idx, expected) + expected_i8 = date_range( + "2013-01-01T00:00:00", "2016-01-01T23:59:59", freq=freq, tz="Asia/Tokyo" + ) + tm.assert_numpy_array_equal(idx.asi8, expected_i8.asi8) + + def test_constructor_dtype(self): + # passing a dtype with a tz should localize + idx = DatetimeIndex( + ["2013-01-01", "2013-01-02"], dtype="datetime64[ns, US/Eastern]" + ) + expected = ( + DatetimeIndex(["2013-01-01", "2013-01-02"]) + .as_unit("ns") + .tz_localize("US/Eastern") + ) + tm.assert_index_equal(idx, expected) + + idx = DatetimeIndex(["2013-01-01", "2013-01-02"], tz="US/Eastern").as_unit("ns") + tm.assert_index_equal(idx, expected) + + def test_constructor_dtype_tz_mismatch_raises(self): + # if we already have a tz and its not the same, then raise + idx = DatetimeIndex( + ["2013-01-01", "2013-01-02"], dtype="datetime64[ns, US/Eastern]" + ) + + msg = ( + "cannot supply both a tz and a timezone-naive dtype " + r"\(i\.e\. datetime64\[ns\]\)" + ) + with pytest.raises(ValueError, match=msg): + DatetimeIndex(idx, dtype="datetime64[ns]") + + # this is effectively trying to convert tz's + msg = "data is already tz-aware US/Eastern, unable to set specified tz: CET" + with pytest.raises(TypeError, match=msg): + DatetimeIndex(idx, dtype="datetime64[ns, CET]") + msg = "cannot supply both a tz and a dtype with a tz" + with pytest.raises(ValueError, match=msg): + DatetimeIndex(idx, tz="CET", dtype="datetime64[ns, US/Eastern]") + + result = DatetimeIndex(idx, dtype="datetime64[ns, US/Eastern]") + tm.assert_index_equal(idx, result) + + @pytest.mark.parametrize("dtype", [object, np.int32, np.int64]) + def test_constructor_invalid_dtype_raises(self, dtype): + # GH 23986 + msg = "Unexpected value for 'dtype'" + with pytest.raises(ValueError, match=msg): + DatetimeIndex([1, 2], dtype=dtype) + + def test_000constructor_resolution(self): + # 2252 + t1 = Timestamp((1352934390 * 1000000000) + 1000000 + 1000 + 1) + idx = DatetimeIndex([t1]) + + assert idx.nanosecond[0] == t1.nanosecond + + def test_disallow_setting_tz(self): + # GH 3746 + dti = DatetimeIndex(["2010"], tz="UTC") + msg = "Cannot directly set timezone" + with pytest.raises(AttributeError, match=msg): + dti.tz = pytz.timezone("US/Pacific") + + @pytest.mark.parametrize( + "tz", + [ + None, + "America/Los_Angeles", + pytz.timezone("America/Los_Angeles"), + Timestamp("2000", tz="America/Los_Angeles").tz, + ], + ) + def test_constructor_start_end_with_tz(self, tz): + # GH 18595 + start = Timestamp("2013-01-01 06:00:00", tz="America/Los_Angeles") + end = Timestamp("2013-01-02 06:00:00", tz="America/Los_Angeles") + result = date_range(freq="D", start=start, end=end, tz=tz) + expected = DatetimeIndex( + ["2013-01-01 06:00:00", "2013-01-02 06:00:00"], + dtype="M8[ns, America/Los_Angeles]", + freq="D", + ) + tm.assert_index_equal(result, expected) + # Especially assert that the timezone is consistent for pytz + assert pytz.timezone("America/Los_Angeles") is result.tz + + @pytest.mark.parametrize("tz", ["US/Pacific", "US/Eastern", "Asia/Tokyo"]) + def test_constructor_with_non_normalized_pytz(self, tz): + # GH 18595 + non_norm_tz = Timestamp("2010", tz=tz).tz + result = DatetimeIndex(["2010"], tz=non_norm_tz) + assert pytz.timezone(tz) is result.tz + + def test_constructor_timestamp_near_dst(self): + # GH 20854 + ts = [ + Timestamp("2016-10-30 03:00:00+0300", tz="Europe/Helsinki"), + Timestamp("2016-10-30 03:00:00+0200", tz="Europe/Helsinki"), + ] + result = DatetimeIndex(ts) + expected = DatetimeIndex([ts[0].to_pydatetime(), ts[1].to_pydatetime()]) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("klass", [Index, DatetimeIndex]) + @pytest.mark.parametrize("box", [np.array, partial(np.array, dtype=object), list]) + @pytest.mark.parametrize( + "tz, dtype", + [("US/Pacific", "datetime64[ns, US/Pacific]"), (None, "datetime64[ns]")], + ) + def test_constructor_with_int_tz(self, klass, box, tz, dtype): + # GH 20997, 20964 + ts = Timestamp("2018-01-01", tz=tz).as_unit("ns") + result = klass(box([ts._value]), dtype=dtype) + expected = klass([ts]) + assert result == expected + + def test_construction_int_rountrip(self, tz_naive_fixture): + # GH 12619, GH#24559 + tz = tz_naive_fixture + + result = 1293858000000000000 + expected = DatetimeIndex([result], tz=tz).asi8[0] + assert result == expected + + def test_construction_from_replaced_timestamps_with_dst(self): + # GH 18785 + index = date_range( + Timestamp(2000, 12, 31), + Timestamp(2005, 12, 31), + freq="YE-DEC", + tz="Australia/Melbourne", + ) + result = DatetimeIndex([x.replace(month=6, day=1) for x in index]) + expected = DatetimeIndex( + [ + "2000-06-01 00:00:00", + "2001-06-01 00:00:00", + "2002-06-01 00:00:00", + "2003-06-01 00:00:00", + "2004-06-01 00:00:00", + "2005-06-01 00:00:00", + ], + tz="Australia/Melbourne", + ) + tm.assert_index_equal(result, expected) + + def test_construction_with_tz_and_tz_aware_dti(self): + # GH 23579 + dti = date_range("2016-01-01", periods=3, tz="US/Central") + msg = "data is already tz-aware US/Central, unable to set specified tz" + with pytest.raises(TypeError, match=msg): + DatetimeIndex(dti, tz="Asia/Tokyo") + + def test_construction_with_nat_and_tzlocal(self): + tz = dateutil.tz.tzlocal() + result = DatetimeIndex(["2018", "NaT"], tz=tz) + expected = DatetimeIndex([Timestamp("2018", tz=tz), pd.NaT]) + tm.assert_index_equal(result, expected) + + def test_constructor_with_ambiguous_keyword_arg(self): + # GH 35297 + + expected = DatetimeIndex( + ["2020-11-01 01:00:00", "2020-11-02 01:00:00"], + dtype="datetime64[ns, America/New_York]", + freq="D", + ambiguous=False, + ) + + # ambiguous keyword in start + timezone = "America/New_York" + start = Timestamp(year=2020, month=11, day=1, hour=1).tz_localize( + timezone, ambiguous=False + ) + result = date_range(start=start, periods=2, ambiguous=False) + tm.assert_index_equal(result, expected) + + # ambiguous keyword in end + timezone = "America/New_York" + end = Timestamp(year=2020, month=11, day=2, hour=1).tz_localize( + timezone, ambiguous=False + ) + result = date_range(end=end, periods=2, ambiguous=False) + tm.assert_index_equal(result, expected) + + def test_constructor_with_nonexistent_keyword_arg(self, warsaw): + # GH 35297 + timezone = warsaw + + # nonexistent keyword in start + start = Timestamp("2015-03-29 02:30:00").tz_localize( + timezone, nonexistent="shift_forward" + ) + result = date_range(start=start, periods=2, freq="h") + expected = DatetimeIndex( + [ + Timestamp("2015-03-29 03:00:00+02:00", tz=timezone), + Timestamp("2015-03-29 04:00:00+02:00", tz=timezone), + ] + ) + + tm.assert_index_equal(result, expected) + + # nonexistent keyword in end + end = start + result = date_range(end=end, periods=2, freq="h") + expected = DatetimeIndex( + [ + Timestamp("2015-03-29 01:00:00+01:00", tz=timezone), + Timestamp("2015-03-29 03:00:00+02:00", tz=timezone), + ] + ) + + tm.assert_index_equal(result, expected) + + def test_constructor_no_precision_raises(self): + # GH-24753, GH-24739 + + msg = "with no precision is not allowed" + with pytest.raises(ValueError, match=msg): + DatetimeIndex(["2000"], dtype="datetime64") + + msg = "The 'datetime64' dtype has no unit. Please pass in" + with pytest.raises(ValueError, match=msg): + Index(["2000"], dtype="datetime64") + + def test_constructor_wrong_precision_raises(self): + dti = DatetimeIndex(["2000"], dtype="datetime64[us]") + assert dti.dtype == "M8[us]" + assert dti[0] == Timestamp(2000, 1, 1) + + def test_index_constructor_with_numpy_object_array_and_timestamp_tz_with_nan(self): + # GH 27011 + result = Index(np.array([Timestamp("2019", tz="UTC"), np.nan], dtype=object)) + expected = DatetimeIndex([Timestamp("2019", tz="UTC"), pd.NaT]) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("tz", [pytz.timezone("US/Eastern"), gettz("US/Eastern")]) + def test_dti_from_tzaware_datetime(self, tz): + d = [datetime(2012, 8, 19, tzinfo=tz)] + + index = DatetimeIndex(d) + assert timezones.tz_compare(index.tz, tz) + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_dti_tz_constructors(self, tzstr): + """Test different DatetimeIndex constructions with timezone + Follow-up of GH#4229 + """ + arr = ["11/10/2005 08:00:00", "11/10/2005 09:00:00"] + + idx1 = to_datetime(arr).tz_localize(tzstr) + idx2 = date_range(start="2005-11-10 08:00:00", freq="h", periods=2, tz=tzstr) + idx2 = idx2._with_freq(None) # the others all have freq=None + idx3 = DatetimeIndex(arr, tz=tzstr) + idx4 = DatetimeIndex(np.array(arr), tz=tzstr) + + for other in [idx2, idx3, idx4]: + tm.assert_index_equal(idx1, other) + + def test_dti_construction_idempotent(self, unit): + rng = date_range( + "03/12/2012 00:00", periods=10, freq="W-FRI", tz="US/Eastern", unit=unit + ) + rng2 = DatetimeIndex(data=rng, tz="US/Eastern") + tm.assert_index_equal(rng, rng2) + + @pytest.mark.parametrize("prefix", ["", "dateutil/"]) + def test_dti_constructor_static_tzinfo(self, prefix): + # it works! + index = DatetimeIndex([datetime(2012, 1, 1)], tz=prefix + "EST") + index.hour + index[0] + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_dti_convert_datetime_list(self, tzstr): + dr = date_range("2012-06-02", periods=10, tz=tzstr, name="foo") + dr2 = DatetimeIndex(list(dr), name="foo", freq="D") + tm.assert_index_equal(dr, dr2) + + @pytest.mark.parametrize( + "tz", + [ + pytz.timezone("US/Eastern"), + gettz("US/Eastern"), + ], + ) + @pytest.mark.parametrize("use_str", [True, False]) + @pytest.mark.parametrize("box_cls", [Timestamp, DatetimeIndex]) + def test_dti_ambiguous_matches_timestamp(self, tz, use_str, box_cls, request): + # GH#47471 check that we get the same raising behavior in the DTI + # constructor and Timestamp constructor + dtstr = "2013-11-03 01:59:59.999999" + item = dtstr + if not use_str: + item = Timestamp(dtstr).to_pydatetime() + if box_cls is not Timestamp: + item = [item] + + if not use_str and isinstance(tz, dateutil.tz.tzfile): + # FIXME: The Timestamp constructor here behaves differently than all + # the other cases bc with dateutil/zoneinfo tzinfos we implicitly + # get fold=0. Having this raise is not important, but having the + # behavior be consistent across cases is. + mark = pytest.mark.xfail(reason="We implicitly get fold=0.") + request.applymarker(mark) + + with pytest.raises(pytz.AmbiguousTimeError, match=dtstr): + box_cls(item, tz=tz) + + @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"]) + def test_dti_constructor_with_non_nano_dtype(self, tz): + # GH#55756, GH#54620 + ts = Timestamp("2999-01-01") + dtype = "M8[us]" + if tz is not None: + dtype = f"M8[us, {tz}]" + vals = [ts, "2999-01-02 03:04:05.678910", 2500] + result = DatetimeIndex(vals, dtype=dtype) + # The 2500 is interpreted as microseconds, consistent with what + # we would get if we created DatetimeIndexes from vals[:2] and vals[2:] + # and concated the results. + pointwise = [ + vals[0].tz_localize(tz), + Timestamp(vals[1], tz=tz), + to_datetime(vals[2], unit="us", utc=True).tz_convert(tz), + ] + exp_vals = [x.as_unit("us").asm8 for x in pointwise] + exp_arr = np.array(exp_vals, dtype="M8[us]") + expected = DatetimeIndex(exp_arr, dtype="M8[us]") + if tz is not None: + expected = expected.tz_localize("UTC").tz_convert(tz) + tm.assert_index_equal(result, expected) + + result2 = DatetimeIndex(np.array(vals, dtype=object), dtype=dtype) + tm.assert_index_equal(result2, expected) + + def test_dti_constructor_with_non_nano_now_today(self): + # GH#55756 + now = Timestamp.now() + today = Timestamp.today() + result = DatetimeIndex(["now", "today"], dtype="M8[s]") + assert result.dtype == "M8[s]" + + # result may not exactly match [now, today] so we'll test it up to a tolerance. + # (it *may* match exactly due to rounding) + tolerance = pd.Timedelta(microseconds=1) + + diff0 = result[0] - now.as_unit("s") + assert diff0 >= pd.Timedelta(0) + assert diff0 < tolerance + + diff1 = result[1] - today.as_unit("s") + assert diff1 >= pd.Timedelta(0) + assert diff1 < tolerance + + def test_dti_constructor_object_float_matches_float_dtype(self): + # GH#55780 + arr = np.array([0, np.nan], dtype=np.float64) + arr2 = arr.astype(object) + + dti1 = DatetimeIndex(arr, tz="CET") + dti2 = DatetimeIndex(arr2, tz="CET") + tm.assert_index_equal(dti1, dti2) + + @pytest.mark.parametrize("dtype", ["M8[us]", "M8[us, US/Pacific]"]) + def test_dti_constructor_with_dtype_object_int_matches_int_dtype(self, dtype): + # Going through the object path should match the non-object path + + vals1 = np.arange(5, dtype="i8") * 1000 + vals1[0] = pd.NaT.value + + vals2 = vals1.astype(np.float64) + vals2[0] = np.nan + + vals3 = vals1.astype(object) + # change lib.infer_dtype(vals3) from "integer" so we go through + # array_to_datetime in _sequence_to_dt64 + vals3[0] = pd.NaT + + vals4 = vals2.astype(object) + + res1 = DatetimeIndex(vals1, dtype=dtype) + res2 = DatetimeIndex(vals2, dtype=dtype) + res3 = DatetimeIndex(vals3, dtype=dtype) + res4 = DatetimeIndex(vals4, dtype=dtype) + + expected = DatetimeIndex(vals1.view("M8[us]")) + if res1.tz is not None: + expected = expected.tz_localize("UTC").tz_convert(res1.tz) + tm.assert_index_equal(res1, expected) + tm.assert_index_equal(res2, expected) + tm.assert_index_equal(res3, expected) + tm.assert_index_equal(res4, expected) + + +class TestTimeSeries: + def test_dti_constructor_preserve_dti_freq(self): + rng = date_range("1/1/2000", "1/2/2000", freq="5min") + + rng2 = DatetimeIndex(rng) + assert rng.freq == rng2.freq + + def test_explicit_none_freq(self): + # Explicitly passing freq=None is respected + rng = date_range("1/1/2000", "1/2/2000", freq="5min") + + result = DatetimeIndex(rng, freq=None) + assert result.freq is None + + result = DatetimeIndex(rng._data, freq=None) + assert result.freq is None + + def test_dti_constructor_small_int(self, any_int_numpy_dtype): + # see gh-13721 + exp = DatetimeIndex( + [ + "1970-01-01 00:00:00.00000000", + "1970-01-01 00:00:00.00000001", + "1970-01-01 00:00:00.00000002", + ] + ) + + arr = np.array([0, 10, 20], dtype=any_int_numpy_dtype) + tm.assert_index_equal(DatetimeIndex(arr), exp) + + def test_ctor_str_intraday(self): + rng = DatetimeIndex(["1-1-2000 00:00:01"]) + assert rng[0].second == 1 + + def test_index_cast_datetime64_other_units(self): + arr = np.arange(0, 100, 10, dtype=np.int64).view("M8[D]") + idx = Index(arr) + + assert (idx.values == astype_overflowsafe(arr, dtype=np.dtype("M8[ns]"))).all() + + def test_constructor_int64_nocopy(self): + # GH#1624 + arr = np.arange(1000, dtype=np.int64) + index = DatetimeIndex(arr) + + arr[50:100] = -1 + assert (index.asi8[50:100] == -1).all() + + arr = np.arange(1000, dtype=np.int64) + index = DatetimeIndex(arr, copy=True) + + arr[50:100] = -1 + assert (index.asi8[50:100] != -1).all() + + @pytest.mark.parametrize( + "freq", + ["ME", "QE", "YE", "D", "B", "bh", "min", "s", "ms", "us", "h", "ns", "C"], + ) + def test_from_freq_recreate_from_data(self, freq): + org = date_range(start="2001/02/01 09:00", freq=freq, periods=1) + idx = DatetimeIndex(org, freq=freq) + tm.assert_index_equal(idx, org) + + org = date_range( + start="2001/02/01 09:00", freq=freq, tz="US/Pacific", periods=1 + ) + idx = DatetimeIndex(org, freq=freq, tz="US/Pacific") + tm.assert_index_equal(idx, org) + + def test_datetimeindex_constructor_misc(self): + arr = ["1/1/2005", "1/2/2005", "Jn 3, 2005", "2005-01-04"] + msg = r"(\(')?Unknown datetime string format(:', 'Jn 3, 2005'\))?" + with pytest.raises(ValueError, match=msg): + DatetimeIndex(arr) + + arr = ["1/1/2005", "1/2/2005", "1/3/2005", "2005-01-04"] + idx1 = DatetimeIndex(arr) + + arr = [datetime(2005, 1, 1), "1/2/2005", "1/3/2005", "2005-01-04"] + idx2 = DatetimeIndex(arr) + + arr = [Timestamp(datetime(2005, 1, 1)), "1/2/2005", "1/3/2005", "2005-01-04"] + idx3 = DatetimeIndex(arr) + + arr = np.array(["1/1/2005", "1/2/2005", "1/3/2005", "2005-01-04"], dtype="O") + idx4 = DatetimeIndex(arr) + + idx5 = DatetimeIndex(["12/05/2007", "25/01/2008"], dayfirst=True) + idx6 = DatetimeIndex( + ["2007/05/12", "2008/01/25"], dayfirst=False, yearfirst=True + ) + tm.assert_index_equal(idx5, idx6) + + for other in [idx2, idx3, idx4]: + assert (idx1.values == other.values).all() + + def test_dti_constructor_object_dtype_dayfirst_yearfirst_with_tz(self): + # GH#55813 + val = "5/10/16" + + dfirst = Timestamp(2016, 10, 5, tz="US/Pacific") + yfirst = Timestamp(2005, 10, 16, tz="US/Pacific") + + result1 = DatetimeIndex([val], tz="US/Pacific", dayfirst=True) + expected1 = DatetimeIndex([dfirst]) + tm.assert_index_equal(result1, expected1) + + result2 = DatetimeIndex([val], tz="US/Pacific", yearfirst=True) + expected2 = DatetimeIndex([yfirst]) + tm.assert_index_equal(result2, expected2) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_date_range.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_date_range.py new file mode 100644 index 0000000000000000000000000000000000000000..d26bee80003e92092722790d9c38225a3b16b035 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_date_range.py @@ -0,0 +1,1721 @@ +""" +test date_range, bdate_range construction from the convenience range functions +""" + +from datetime import ( + datetime, + time, + timedelta, +) +import re + +import numpy as np +import pytest +import pytz +from pytz import timezone + +from pandas._libs.tslibs import timezones +from pandas._libs.tslibs.offsets import ( + BDay, + CDay, + DateOffset, + MonthEnd, + prefix_mapping, +) +from pandas.errors import OutOfBoundsDatetime +import pandas.util._test_decorators as td + +import pandas as pd +from pandas import ( + DataFrame, + DatetimeIndex, + Series, + Timedelta, + Timestamp, + bdate_range, + date_range, + offsets, +) +import pandas._testing as tm +from pandas.core.arrays.datetimes import _generate_range as generate_range +from pandas.tests.indexes.datetimes.test_timezones import ( + FixedOffset, + fixed_off_no_name, +) + +from pandas.tseries.holiday import USFederalHolidayCalendar + +START, END = datetime(2009, 1, 1), datetime(2010, 1, 1) + + +def _get_expected_range( + begin_to_match, + end_to_match, + both_range, + inclusive_endpoints, +): + """Helper to get expected range from a both inclusive range""" + left_match = begin_to_match == both_range[0] + right_match = end_to_match == both_range[-1] + + if inclusive_endpoints == "left" and right_match: + expected_range = both_range[:-1] + elif inclusive_endpoints == "right" and left_match: + expected_range = both_range[1:] + elif inclusive_endpoints == "neither" and left_match and right_match: + expected_range = both_range[1:-1] + elif inclusive_endpoints == "neither" and right_match: + expected_range = both_range[:-1] + elif inclusive_endpoints == "neither" and left_match: + expected_range = both_range[1:] + elif inclusive_endpoints == "both": + expected_range = both_range[:] + else: + expected_range = both_range[:] + + return expected_range + + +class TestTimestampEquivDateRange: + # Older tests in TestTimeSeries constructed their `stamp` objects + # using `date_range` instead of the `Timestamp` constructor. + # TestTimestampEquivDateRange checks that these are equivalent in the + # pertinent cases. + + def test_date_range_timestamp_equiv(self): + rng = date_range("20090415", "20090519", tz="US/Eastern") + stamp = rng[0] + + ts = Timestamp("20090415", tz="US/Eastern") + assert ts == stamp + + def test_date_range_timestamp_equiv_dateutil(self): + rng = date_range("20090415", "20090519", tz="dateutil/US/Eastern") + stamp = rng[0] + + ts = Timestamp("20090415", tz="dateutil/US/Eastern") + assert ts == stamp + + def test_date_range_timestamp_equiv_explicit_pytz(self): + rng = date_range("20090415", "20090519", tz=pytz.timezone("US/Eastern")) + stamp = rng[0] + + ts = Timestamp("20090415", tz=pytz.timezone("US/Eastern")) + assert ts == stamp + + @td.skip_if_windows + def test_date_range_timestamp_equiv_explicit_dateutil(self): + from pandas._libs.tslibs.timezones import dateutil_gettz as gettz + + rng = date_range("20090415", "20090519", tz=gettz("US/Eastern")) + stamp = rng[0] + + ts = Timestamp("20090415", tz=gettz("US/Eastern")) + assert ts == stamp + + def test_date_range_timestamp_equiv_from_datetime_instance(self): + datetime_instance = datetime(2014, 3, 4) + # build a timestamp with a frequency, since then it supports + # addition/subtraction of integers + timestamp_instance = date_range(datetime_instance, periods=1, freq="D")[0] + + ts = Timestamp(datetime_instance) + assert ts == timestamp_instance + + def test_date_range_timestamp_equiv_preserve_frequency(self): + timestamp_instance = date_range("2014-03-05", periods=1, freq="D")[0] + ts = Timestamp("2014-03-05") + + assert timestamp_instance == ts + + +class TestDateRanges: + def test_date_range_name(self): + idx = date_range(start="2000-01-01", periods=1, freq="YE", name="TEST") + assert idx.name == "TEST" + + def test_date_range_invalid_periods(self): + msg = "periods must be a number, got foo" + with pytest.raises(TypeError, match=msg): + date_range(start="1/1/2000", periods="foo", freq="D") + + def test_date_range_fractional_period(self): + msg = "Non-integer 'periods' in pd.date_range, pd.timedelta_range" + with tm.assert_produces_warning(FutureWarning, match=msg): + rng = date_range("1/1/2000", periods=10.5) + exp = date_range("1/1/2000", periods=10) + tm.assert_index_equal(rng, exp) + + @pytest.mark.parametrize( + "freq,freq_depr", + [ + ("2ME", "2M"), + ("2SME", "2SM"), + ("2BQE", "2BQ"), + ("2BYE", "2BY"), + ], + ) + def test_date_range_frequency_M_SM_BQ_BY_deprecated(self, freq, freq_depr): + # GH#52064 + depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed " + f"in a future version, please use '{freq[1:]}' instead." + + expected = date_range("1/1/2000", periods=4, freq=freq) + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + result = date_range("1/1/2000", periods=4, freq=freq_depr) + tm.assert_index_equal(result, expected) + + def test_date_range_tuple_freq_raises(self): + # GH#34703 + edate = datetime(2000, 1, 1) + with pytest.raises(TypeError, match="pass as a string instead"): + date_range(end=edate, freq=("D", 5), periods=20) + + @pytest.mark.parametrize("freq", ["ns", "us", "ms", "min", "s", "h", "D"]) + def test_date_range_edges(self, freq): + # GH#13672 + td = Timedelta(f"1{freq}") + ts = Timestamp("1970-01-01") + + idx = date_range( + start=ts + td, + end=ts + 4 * td, + freq=freq, + ) + exp = DatetimeIndex( + [ts + n * td for n in range(1, 5)], + dtype="M8[ns]", + freq=freq, + ) + tm.assert_index_equal(idx, exp) + + # start after end + idx = date_range( + start=ts + 4 * td, + end=ts + td, + freq=freq, + ) + exp = DatetimeIndex([], dtype="M8[ns]", freq=freq) + tm.assert_index_equal(idx, exp) + + # start matches end + idx = date_range( + start=ts + td, + end=ts + td, + freq=freq, + ) + exp = DatetimeIndex([ts + td], dtype="M8[ns]", freq=freq) + tm.assert_index_equal(idx, exp) + + def test_date_range_near_implementation_bound(self): + # GH#??? + freq = Timedelta(1) + + with pytest.raises(OutOfBoundsDatetime, match="Cannot generate range with"): + date_range(end=Timestamp.min, periods=2, freq=freq) + + def test_date_range_nat(self): + # GH#11587 + msg = "Neither `start` nor `end` can be NaT" + with pytest.raises(ValueError, match=msg): + date_range(start="2016-01-01", end=pd.NaT, freq="D") + with pytest.raises(ValueError, match=msg): + date_range(start=pd.NaT, end="2016-01-01", freq="D") + + def test_date_range_multiplication_overflow(self): + # GH#24255 + # check that overflows in calculating `addend = periods * stride` + # are caught + with tm.assert_produces_warning(None): + # we should _not_ be seeing a overflow RuntimeWarning + dti = date_range(start="1677-09-22", periods=213503, freq="D") + + assert dti[0] == Timestamp("1677-09-22") + assert len(dti) == 213503 + + msg = "Cannot generate range with" + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range("1969-05-04", periods=200000000, freq="30000D") + + def test_date_range_unsigned_overflow_handling(self): + # GH#24255 + # case where `addend = periods * stride` overflows int64 bounds + # but not uint64 bounds + dti = date_range(start="1677-09-22", end="2262-04-11", freq="D") + + dti2 = date_range(start=dti[0], periods=len(dti), freq="D") + assert dti2.equals(dti) + + dti3 = date_range(end=dti[-1], periods=len(dti), freq="D") + assert dti3.equals(dti) + + def test_date_range_int64_overflow_non_recoverable(self): + # GH#24255 + # case with start later than 1970-01-01, overflow int64 but not uint64 + msg = "Cannot generate range with" + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range(start="1970-02-01", periods=106752 * 24, freq="h") + + # case with end before 1970-01-01, overflow int64 but not uint64 + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range(end="1969-11-14", periods=106752 * 24, freq="h") + + @pytest.mark.slow + @pytest.mark.parametrize( + "s_ts, e_ts", [("2262-02-23", "1969-11-14"), ("1970-02-01", "1677-10-22")] + ) + def test_date_range_int64_overflow_stride_endpoint_different_signs( + self, s_ts, e_ts + ): + # cases where stride * periods overflow int64 and stride/endpoint + # have different signs + start = Timestamp(s_ts) + end = Timestamp(e_ts) + + expected = date_range(start=start, end=end, freq="-1h") + assert expected[0] == start + assert expected[-1] == end + + dti = date_range(end=end, periods=len(expected), freq="-1h") + tm.assert_index_equal(dti, expected) + + def test_date_range_out_of_bounds(self): + # GH#14187 + msg = "Cannot generate range" + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range("2016-01-01", periods=100000, freq="D") + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range(end="1763-10-12", periods=100000, freq="D") + + def test_date_range_gen_error(self): + rng = date_range("1/1/2000 00:00", "1/1/2000 00:18", freq="5min") + assert len(rng) == 4 + + def test_date_range_normalize(self): + snap = datetime.today() + n = 50 + + rng = date_range(snap, periods=n, normalize=False, freq="2D") + + offset = timedelta(2) + expected = DatetimeIndex( + [snap + i * offset for i in range(n)], dtype="M8[ns]", freq=offset + ) + + tm.assert_index_equal(rng, expected) + + rng = date_range("1/1/2000 08:15", periods=n, normalize=False, freq="B") + the_time = time(8, 15) + for val in rng: + assert val.time() == the_time + + def test_date_range_ambiguous_arguments(self): + # #2538 + start = datetime(2011, 1, 1, 5, 3, 40) + end = datetime(2011, 1, 1, 8, 9, 40) + + msg = ( + "Of the four parameters: start, end, periods, and " + "freq, exactly three must be specified" + ) + with pytest.raises(ValueError, match=msg): + date_range(start, end, periods=10, freq="s") + + def test_date_range_convenience_periods(self, unit): + # GH 20808 + result = date_range("2018-04-24", "2018-04-27", periods=3, unit=unit) + expected = DatetimeIndex( + ["2018-04-24 00:00:00", "2018-04-25 12:00:00", "2018-04-27 00:00:00"], + dtype=f"M8[{unit}]", + freq=None, + ) + + tm.assert_index_equal(result, expected) + + # Test if spacing remains linear if tz changes to dst in range + result = date_range( + "2018-04-01 01:00:00", + "2018-04-01 04:00:00", + tz="Australia/Sydney", + periods=3, + unit=unit, + ) + expected = DatetimeIndex( + [ + Timestamp("2018-04-01 01:00:00+1100", tz="Australia/Sydney"), + Timestamp("2018-04-01 02:00:00+1000", tz="Australia/Sydney"), + Timestamp("2018-04-01 04:00:00+1000", tz="Australia/Sydney"), + ] + ).as_unit(unit) + tm.assert_index_equal(result, expected) + + def test_date_range_index_comparison(self): + rng = date_range("2011-01-01", periods=3, tz="US/Eastern") + df = Series(rng).to_frame() + arr = np.array([rng.to_list()]).T + arr2 = np.array([rng]).T + + with pytest.raises(ValueError, match="Unable to coerce to Series"): + rng == df + + with pytest.raises(ValueError, match="Unable to coerce to Series"): + df == rng + + expected = DataFrame([True, True, True]) + + results = df == arr2 + tm.assert_frame_equal(results, expected) + + expected = Series([True, True, True], name=0) + + results = df[0] == arr2[:, 0] + tm.assert_series_equal(results, expected) + + expected = np.array( + [[True, False, False], [False, True, False], [False, False, True]] + ) + results = rng == arr + tm.assert_numpy_array_equal(results, expected) + + @pytest.mark.parametrize( + "start,end,result_tz", + [ + ["20180101", "20180103", "US/Eastern"], + [datetime(2018, 1, 1), datetime(2018, 1, 3), "US/Eastern"], + [Timestamp("20180101"), Timestamp("20180103"), "US/Eastern"], + [ + Timestamp("20180101", tz="US/Eastern"), + Timestamp("20180103", tz="US/Eastern"), + "US/Eastern", + ], + [ + Timestamp("20180101", tz="US/Eastern"), + Timestamp("20180103", tz="US/Eastern"), + None, + ], + ], + ) + def test_date_range_linspacing_tz(self, start, end, result_tz): + # GH 20983 + result = date_range(start, end, periods=3, tz=result_tz) + expected = date_range("20180101", periods=3, freq="D", tz="US/Eastern") + tm.assert_index_equal(result, expected) + + def test_date_range_timedelta(self): + start = "2020-01-01" + end = "2020-01-11" + rng1 = date_range(start, end, freq="3D") + rng2 = date_range(start, end, freq=timedelta(days=3)) + tm.assert_index_equal(rng1, rng2) + + def test_range_misspecified(self): + # GH #1095 + msg = ( + "Of the four parameters: start, end, periods, and " + "freq, exactly three must be specified" + ) + + with pytest.raises(ValueError, match=msg): + date_range(start="1/1/2000") + + with pytest.raises(ValueError, match=msg): + date_range(end="1/1/2000") + + with pytest.raises(ValueError, match=msg): + date_range(periods=10) + + with pytest.raises(ValueError, match=msg): + date_range(start="1/1/2000", freq="h") + + with pytest.raises(ValueError, match=msg): + date_range(end="1/1/2000", freq="h") + + with pytest.raises(ValueError, match=msg): + date_range(periods=10, freq="h") + + with pytest.raises(ValueError, match=msg): + date_range() + + def test_compat_replace(self): + # https://github.com/statsmodels/statsmodels/issues/3349 + # replace should take ints/longs for compat + result = date_range(Timestamp("1960-04-01 00:00:00"), periods=76, freq="QS-JAN") + assert len(result) == 76 + + def test_catch_infinite_loop(self): + offset = offsets.DateOffset(minute=5) + # blow up, don't loop forever + msg = "Offset did not increment date" + with pytest.raises(ValueError, match=msg): + date_range(datetime(2011, 11, 11), datetime(2011, 11, 12), freq=offset) + + def test_construct_over_dst(self, unit): + # GH 20854 + pre_dst = Timestamp("2010-11-07 01:00:00").tz_localize( + "US/Pacific", ambiguous=True + ) + pst_dst = Timestamp("2010-11-07 01:00:00").tz_localize( + "US/Pacific", ambiguous=False + ) + expect_data = [ + Timestamp("2010-11-07 00:00:00", tz="US/Pacific"), + pre_dst, + pst_dst, + ] + expected = DatetimeIndex(expect_data, freq="h").as_unit(unit) + result = date_range( + start="2010-11-7", periods=3, freq="h", tz="US/Pacific", unit=unit + ) + tm.assert_index_equal(result, expected) + + def test_construct_with_different_start_end_string_format(self, unit): + # GH 12064 + result = date_range( + "2013-01-01 00:00:00+09:00", + "2013/01/01 02:00:00+09:00", + freq="h", + unit=unit, + ) + expected = DatetimeIndex( + [ + Timestamp("2013-01-01 00:00:00+09:00"), + Timestamp("2013-01-01 01:00:00+09:00"), + Timestamp("2013-01-01 02:00:00+09:00"), + ], + freq="h", + ).as_unit(unit) + tm.assert_index_equal(result, expected) + + def test_error_with_zero_monthends(self): + msg = r"Offset <0 \* MonthEnds> did not increment date" + with pytest.raises(ValueError, match=msg): + date_range("1/1/2000", "1/1/2001", freq=MonthEnd(0)) + + def test_range_bug(self, unit): + # GH #770 + offset = DateOffset(months=3) + result = date_range("2011-1-1", "2012-1-31", freq=offset, unit=unit) + + start = datetime(2011, 1, 1) + expected = DatetimeIndex( + [start + i * offset for i in range(5)], dtype=f"M8[{unit}]", freq=offset + ) + tm.assert_index_equal(result, expected) + + def test_range_tz_pytz(self): + # see gh-2906 + tz = timezone("US/Eastern") + start = tz.localize(datetime(2011, 1, 1)) + end = tz.localize(datetime(2011, 1, 3)) + + dr = date_range(start=start, periods=3) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end + + dr = date_range(end=end, periods=3) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end + + dr = date_range(start=start, end=end) + assert dr.tz.zone == tz.zone + assert dr[0] == start + assert dr[2] == end + + @pytest.mark.parametrize( + "start, end", + [ + [ + Timestamp(datetime(2014, 3, 6), tz="US/Eastern"), + Timestamp(datetime(2014, 3, 12), tz="US/Eastern"), + ], + [ + Timestamp(datetime(2013, 11, 1), tz="US/Eastern"), + Timestamp(datetime(2013, 11, 6), tz="US/Eastern"), + ], + ], + ) + def test_range_tz_dst_straddle_pytz(self, start, end): + dr = date_range(start, end, freq="D") + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) + + dr = date_range(start, end, freq="D", tz="US/Eastern") + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) + + dr = date_range( + start.replace(tzinfo=None), + end.replace(tzinfo=None), + freq="D", + tz="US/Eastern", + ) + assert dr[0] == start + assert dr[-1] == end + assert np.all(dr.hour == 0) + + def test_range_tz_dateutil(self): + # see gh-2906 + + # Use maybe_get_tz to fix filename in tz under dateutil. + from pandas._libs.tslibs.timezones import maybe_get_tz + + tz = lambda x: maybe_get_tz("dateutil/" + x) + + start = datetime(2011, 1, 1, tzinfo=tz("US/Eastern")) + end = datetime(2011, 1, 3, tzinfo=tz("US/Eastern")) + + dr = date_range(start=start, periods=3) + assert dr.tz == tz("US/Eastern") + assert dr[0] == start + assert dr[2] == end + + dr = date_range(end=end, periods=3) + assert dr.tz == tz("US/Eastern") + assert dr[0] == start + assert dr[2] == end + + dr = date_range(start=start, end=end) + assert dr.tz == tz("US/Eastern") + assert dr[0] == start + assert dr[2] == end + + @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3h", "YE"]) + @pytest.mark.parametrize("tz", [None, "US/Eastern"]) + def test_range_closed(self, freq, tz, inclusive_endpoints_fixture): + # GH#12409, GH#12684 + + begin = Timestamp("2011/1/1", tz=tz) + end = Timestamp("2014/1/1", tz=tz) + + result_range = date_range( + begin, end, inclusive=inclusive_endpoints_fixture, freq=freq + ) + both_range = date_range(begin, end, inclusive="both", freq=freq) + expected_range = _get_expected_range( + begin, end, both_range, inclusive_endpoints_fixture + ) + + tm.assert_index_equal(expected_range, result_range) + + @pytest.mark.parametrize("freq", ["1D", "3D", "2ME", "7W", "3h", "YE"]) + def test_range_with_tz_closed_with_tz_aware_start_end( + self, freq, inclusive_endpoints_fixture + ): + begin = Timestamp("2011/1/1") + end = Timestamp("2014/1/1") + begintz = Timestamp("2011/1/1", tz="US/Eastern") + endtz = Timestamp("2014/1/1", tz="US/Eastern") + + result_range = date_range( + begin, + end, + inclusive=inclusive_endpoints_fixture, + freq=freq, + tz="US/Eastern", + ) + both_range = date_range( + begin, end, inclusive="both", freq=freq, tz="US/Eastern" + ) + expected_range = _get_expected_range( + begintz, + endtz, + both_range, + inclusive_endpoints_fixture, + ) + + tm.assert_index_equal(expected_range, result_range) + + def test_range_closed_boundary(self, inclusive_endpoints_fixture): + # GH#11804 + right_boundary = date_range( + "2015-09-12", + "2015-12-01", + freq="QS-MAR", + inclusive=inclusive_endpoints_fixture, + ) + left_boundary = date_range( + "2015-09-01", + "2015-09-12", + freq="QS-MAR", + inclusive=inclusive_endpoints_fixture, + ) + both_boundary = date_range( + "2015-09-01", + "2015-12-01", + freq="QS-MAR", + inclusive=inclusive_endpoints_fixture, + ) + neither_boundary = date_range( + "2015-09-11", + "2015-09-12", + freq="QS-MAR", + inclusive=inclusive_endpoints_fixture, + ) + + expected_right = both_boundary + expected_left = both_boundary + expected_both = both_boundary + + if inclusive_endpoints_fixture == "right": + expected_left = both_boundary[1:] + elif inclusive_endpoints_fixture == "left": + expected_right = both_boundary[:-1] + elif inclusive_endpoints_fixture == "both": + expected_right = both_boundary[1:] + expected_left = both_boundary[:-1] + + expected_neither = both_boundary[1:-1] + + tm.assert_index_equal(right_boundary, expected_right) + tm.assert_index_equal(left_boundary, expected_left) + tm.assert_index_equal(both_boundary, expected_both) + tm.assert_index_equal(neither_boundary, expected_neither) + + def test_date_range_years_only(self, tz_naive_fixture): + tz = tz_naive_fixture + # GH#6961 + rng1 = date_range("2014", "2015", freq="ME", tz=tz) + expected1 = date_range("2014-01-31", "2014-12-31", freq="ME", tz=tz) + tm.assert_index_equal(rng1, expected1) + + rng2 = date_range("2014", "2015", freq="MS", tz=tz) + expected2 = date_range("2014-01-01", "2015-01-01", freq="MS", tz=tz) + tm.assert_index_equal(rng2, expected2) + + rng3 = date_range("2014", "2020", freq="YE", tz=tz) + expected3 = date_range("2014-12-31", "2019-12-31", freq="YE", tz=tz) + tm.assert_index_equal(rng3, expected3) + + rng4 = date_range("2014", "2020", freq="YS", tz=tz) + expected4 = date_range("2014-01-01", "2020-01-01", freq="YS", tz=tz) + tm.assert_index_equal(rng4, expected4) + + def test_freq_divides_end_in_nanos(self): + # GH 10885 + result_1 = date_range("2005-01-12 10:00", "2005-01-12 16:00", freq="345min") + result_2 = date_range("2005-01-13 10:00", "2005-01-13 16:00", freq="345min") + expected_1 = DatetimeIndex( + ["2005-01-12 10:00:00", "2005-01-12 15:45:00"], + dtype="datetime64[ns]", + freq="345min", + tz=None, + ) + expected_2 = DatetimeIndex( + ["2005-01-13 10:00:00", "2005-01-13 15:45:00"], + dtype="datetime64[ns]", + freq="345min", + tz=None, + ) + tm.assert_index_equal(result_1, expected_1) + tm.assert_index_equal(result_2, expected_2) + + def test_cached_range_bug(self): + rng = date_range("2010-09-01 05:00:00", periods=50, freq=DateOffset(hours=6)) + assert len(rng) == 50 + assert rng[0] == datetime(2010, 9, 1, 5) + + def test_timezone_comparison_bug(self): + # smoke test + start = Timestamp("20130220 10:00", tz="US/Eastern") + result = date_range(start, periods=2, tz="US/Eastern") + assert len(result) == 2 + + def test_timezone_comparison_assert(self): + start = Timestamp("20130220 10:00", tz="US/Eastern") + msg = "Inferred time zone not equal to passed time zone" + with pytest.raises(AssertionError, match=msg): + date_range(start, periods=2, tz="Europe/Berlin") + + def test_negative_non_tick_frequency_descending_dates(self, tz_aware_fixture): + # GH 23270 + tz = tz_aware_fixture + result = date_range(start="2011-06-01", end="2011-01-01", freq="-1MS", tz=tz) + expected = date_range(end="2011-06-01", start="2011-01-01", freq="1MS", tz=tz)[ + ::-1 + ] + tm.assert_index_equal(result, expected) + + def test_range_where_start_equal_end(self, inclusive_endpoints_fixture): + # GH 43394 + start = "2021-09-02" + end = "2021-09-02" + result = date_range( + start=start, end=end, freq="D", inclusive=inclusive_endpoints_fixture + ) + + both_range = date_range(start=start, end=end, freq="D", inclusive="both") + if inclusive_endpoints_fixture == "neither": + expected = both_range[1:-1] + elif inclusive_endpoints_fixture in ("left", "right", "both"): + expected = both_range[:] + + tm.assert_index_equal(result, expected) + + def test_freq_dateoffset_with_relateivedelta_nanos(self): + # GH 46877 + freq = DateOffset(hours=10, days=57, nanoseconds=3) + result = date_range(end="1970-01-01 00:00:00", periods=10, freq=freq, name="a") + expected = DatetimeIndex( + [ + "1968-08-02T05:59:59.999999973", + "1968-09-28T15:59:59.999999976", + "1968-11-25T01:59:59.999999979", + "1969-01-21T11:59:59.999999982", + "1969-03-19T21:59:59.999999985", + "1969-05-16T07:59:59.999999988", + "1969-07-12T17:59:59.999999991", + "1969-09-08T03:59:59.999999994", + "1969-11-04T13:59:59.999999997", + "1970-01-01T00:00:00.000000000", + ], + name="a", + ) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "freq,freq_depr", + [ + ("h", "H"), + ("2min", "2T"), + ("1s", "1S"), + ("2ms", "2L"), + ("1us", "1U"), + ("2ns", "2N"), + ], + ) + def test_frequencies_H_T_S_L_U_N_deprecated(self, freq, freq_depr): + # GH#52536 + freq_msg = re.split("[0-9]*", freq, maxsplit=1)[1] + freq_depr_msg = re.split("[0-9]*", freq_depr, maxsplit=1)[1] + msg = ( + f"'{freq_depr_msg}' is deprecated and will be removed in a future version, " + ) + f"please use '{freq_msg}' instead" + + expected = date_range("1/1/2000", periods=2, freq=freq) + with tm.assert_produces_warning(FutureWarning, match=msg): + result = date_range("1/1/2000", periods=2, freq=freq_depr) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "freq,freq_depr", + [ + ("200YE", "200A"), + ("YE", "Y"), + ("2YE-MAY", "2A-MAY"), + ("YE-MAY", "Y-MAY"), + ], + ) + def test_frequencies_A_deprecated_Y_renamed(self, freq, freq_depr): + # GH#9586, GH#54275 + freq_msg = re.split("[0-9]*", freq, maxsplit=1)[1] + freq_depr_msg = re.split("[0-9]*", freq_depr, maxsplit=1)[1] + msg = f"'{freq_depr_msg}' is deprecated and will be removed " + f"in a future version, please use '{freq_msg}' instead." + + expected = date_range("1/1/2000", periods=2, freq=freq) + with tm.assert_produces_warning(FutureWarning, match=msg): + result = date_range("1/1/2000", periods=2, freq=freq_depr) + tm.assert_index_equal(result, expected) + + def test_to_offset_with_lowercase_deprecated_freq(self) -> None: + # https://github.com/pandas-dev/pandas/issues/56847 + msg = ( + "'m' is deprecated and will be removed in a future version, please use " + "'ME' instead." + ) + with tm.assert_produces_warning(FutureWarning, match=msg): + result = date_range("2010-01-01", periods=2, freq="m") + expected = DatetimeIndex(["2010-01-31", "2010-02-28"], freq="ME") + tm.assert_index_equal(result, expected) + + def test_date_range_bday(self): + sdate = datetime(1999, 12, 25) + idx = date_range(start=sdate, freq="1B", periods=20) + assert len(idx) == 20 + assert idx[0] == sdate + 0 * offsets.BDay() + assert idx.freq == "B" + + +class TestDateRangeTZ: + """Tests for date_range with timezones""" + + def test_hongkong_tz_convert(self): + # GH#1673 smoke test + dr = date_range("2012-01-01", "2012-01-10", freq="D", tz="Hongkong") + + # it works! + dr.hour + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_date_range_span_dst_transition(self, tzstr): + # GH#1778 + + # Standard -> Daylight Savings Time + dr = date_range("03/06/2012 00:00", periods=200, freq="W-FRI", tz="US/Eastern") + + assert (dr.hour == 0).all() + + dr = date_range("2012-11-02", periods=10, tz=tzstr) + result = dr.hour + expected = pd.Index([0] * 10, dtype="int32") + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_date_range_timezone_str_argument(self, tzstr): + tz = timezones.maybe_get_tz(tzstr) + result = date_range("1/1/2000", periods=10, tz=tzstr) + expected = date_range("1/1/2000", periods=10, tz=tz) + + tm.assert_index_equal(result, expected) + + def test_date_range_with_fixed_tz(self): + off = FixedOffset(420, "+07:00") + start = datetime(2012, 3, 11, 5, 0, 0, tzinfo=off) + end = datetime(2012, 6, 11, 5, 0, 0, tzinfo=off) + rng = date_range(start=start, end=end) + assert off == rng.tz + + rng2 = date_range(start, periods=len(rng), tz=off) + tm.assert_index_equal(rng, rng2) + + rng3 = date_range("3/11/2012 05:00:00+07:00", "6/11/2012 05:00:00+07:00") + assert (rng.values == rng3.values).all() + + def test_date_range_with_fixedoffset_noname(self): + off = fixed_off_no_name + start = datetime(2012, 3, 11, 5, 0, 0, tzinfo=off) + end = datetime(2012, 6, 11, 5, 0, 0, tzinfo=off) + rng = date_range(start=start, end=end) + assert off == rng.tz + + idx = pd.Index([start, end]) + assert off == idx.tz + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_date_range_with_tz(self, tzstr): + stamp = Timestamp("3/11/2012 05:00", tz=tzstr) + assert stamp.hour == 5 + + rng = date_range("3/11/2012 04:00", periods=10, freq="h", tz=tzstr) + + assert stamp == rng[1] + + @pytest.mark.parametrize("tz", ["Europe/London", "dateutil/Europe/London"]) + def test_date_range_ambiguous_endpoint(self, tz): + # construction with an ambiguous end-point + # GH#11626 + + with pytest.raises(pytz.AmbiguousTimeError, match="Cannot infer dst time"): + date_range( + "2013-10-26 23:00", "2013-10-27 01:00", tz="Europe/London", freq="h" + ) + + times = date_range( + "2013-10-26 23:00", "2013-10-27 01:00", freq="h", tz=tz, ambiguous="infer" + ) + assert times[0] == Timestamp("2013-10-26 23:00", tz=tz) + assert times[-1] == Timestamp("2013-10-27 01:00:00+0000", tz=tz) + + @pytest.mark.parametrize( + "tz, option, expected", + [ + ["US/Pacific", "shift_forward", "2019-03-10 03:00"], + ["dateutil/US/Pacific", "shift_forward", "2019-03-10 03:00"], + ["US/Pacific", "shift_backward", "2019-03-10 01:00"], + ["dateutil/US/Pacific", "shift_backward", "2019-03-10 01:00"], + ["US/Pacific", timedelta(hours=1), "2019-03-10 03:00"], + ], + ) + def test_date_range_nonexistent_endpoint(self, tz, option, expected): + # construction with an nonexistent end-point + + with pytest.raises(pytz.NonExistentTimeError, match="2019-03-10 02:00:00"): + date_range( + "2019-03-10 00:00", "2019-03-10 02:00", tz="US/Pacific", freq="h" + ) + + times = date_range( + "2019-03-10 00:00", "2019-03-10 02:00", freq="h", tz=tz, nonexistent=option + ) + assert times[-1] == Timestamp(expected, tz=tz) + + +class TestGenRangeGeneration: + @pytest.mark.parametrize( + "freqstr,offset", + [ + ("B", BDay()), + ("C", CDay()), + ], + ) + def test_generate(self, freqstr, offset): + rng1 = list(generate_range(START, END, periods=None, offset=offset, unit="ns")) + rng2 = list(generate_range(START, END, periods=None, offset=freqstr, unit="ns")) + assert rng1 == rng2 + + def test_1(self): + rng = list( + generate_range( + start=datetime(2009, 3, 25), + end=None, + periods=2, + offset=BDay(), + unit="ns", + ) + ) + expected = [datetime(2009, 3, 25), datetime(2009, 3, 26)] + assert rng == expected + + def test_2(self): + rng = list( + generate_range( + start=datetime(2008, 1, 1), + end=datetime(2008, 1, 3), + periods=None, + offset=BDay(), + unit="ns", + ) + ) + expected = [datetime(2008, 1, 1), datetime(2008, 1, 2), datetime(2008, 1, 3)] + assert rng == expected + + def test_3(self): + rng = list( + generate_range( + start=datetime(2008, 1, 5), + end=datetime(2008, 1, 6), + periods=None, + offset=BDay(), + unit="ns", + ) + ) + expected = [] + assert rng == expected + + def test_precision_finer_than_offset(self): + # GH#9907 + result1 = date_range( + start="2015-04-15 00:00:03", end="2016-04-22 00:00:00", freq="QE" + ) + result2 = date_range( + start="2015-04-15 00:00:03", end="2015-06-22 00:00:04", freq="W" + ) + expected1_list = [ + "2015-06-30 00:00:03", + "2015-09-30 00:00:03", + "2015-12-31 00:00:03", + "2016-03-31 00:00:03", + ] + expected2_list = [ + "2015-04-19 00:00:03", + "2015-04-26 00:00:03", + "2015-05-03 00:00:03", + "2015-05-10 00:00:03", + "2015-05-17 00:00:03", + "2015-05-24 00:00:03", + "2015-05-31 00:00:03", + "2015-06-07 00:00:03", + "2015-06-14 00:00:03", + "2015-06-21 00:00:03", + ] + expected1 = DatetimeIndex( + expected1_list, dtype="datetime64[ns]", freq="QE-DEC", tz=None + ) + expected2 = DatetimeIndex( + expected2_list, dtype="datetime64[ns]", freq="W-SUN", tz=None + ) + tm.assert_index_equal(result1, expected1) + tm.assert_index_equal(result2, expected2) + + dt1, dt2 = "2017-01-01", "2017-01-01" + tz1, tz2 = "US/Eastern", "Europe/London" + + @pytest.mark.parametrize( + "start,end", + [ + (Timestamp(dt1, tz=tz1), Timestamp(dt2)), + (Timestamp(dt1), Timestamp(dt2, tz=tz2)), + (Timestamp(dt1, tz=tz1), Timestamp(dt2, tz=tz2)), + (Timestamp(dt1, tz=tz2), Timestamp(dt2, tz=tz1)), + ], + ) + def test_mismatching_tz_raises_err(self, start, end): + # issue 18488 + msg = "Start and end cannot both be tz-aware with different timezones" + with pytest.raises(TypeError, match=msg): + date_range(start, end) + with pytest.raises(TypeError, match=msg): + date_range(start, end, freq=BDay()) + + +class TestBusinessDateRange: + def test_constructor(self): + bdate_range(START, END, freq=BDay()) + bdate_range(START, periods=20, freq=BDay()) + bdate_range(end=START, periods=20, freq=BDay()) + + msg = "periods must be a number, got B" + with pytest.raises(TypeError, match=msg): + date_range("2011-1-1", "2012-1-1", "B") + + with pytest.raises(TypeError, match=msg): + bdate_range("2011-1-1", "2012-1-1", "B") + + msg = "freq must be specified for bdate_range; use date_range instead" + with pytest.raises(TypeError, match=msg): + bdate_range(START, END, periods=10, freq=None) + + def test_misc(self): + end = datetime(2009, 5, 13) + dr = bdate_range(end=end, periods=20) + firstDate = end - 19 * BDay() + + assert len(dr) == 20 + assert dr[0] == firstDate + assert dr[-1] == end + + def test_date_parse_failure(self): + badly_formed_date = "2007/100/1" + + msg = "Unknown datetime string format, unable to parse: 2007/100/1" + with pytest.raises(ValueError, match=msg): + Timestamp(badly_formed_date) + + with pytest.raises(ValueError, match=msg): + bdate_range(start=badly_formed_date, periods=10) + + with pytest.raises(ValueError, match=msg): + bdate_range(end=badly_formed_date, periods=10) + + with pytest.raises(ValueError, match=msg): + bdate_range(badly_formed_date, badly_formed_date) + + def test_daterange_bug_456(self): + # GH #456 + rng1 = bdate_range("12/5/2011", "12/5/2011") + rng2 = bdate_range("12/2/2011", "12/5/2011") + assert rng2._data.freq == BDay() + + result = rng1.union(rng2) + assert isinstance(result, DatetimeIndex) + + @pytest.mark.parametrize("inclusive", ["left", "right", "neither", "both"]) + def test_bdays_and_open_boundaries(self, inclusive): + # GH 6673 + start = "2018-07-21" # Saturday + end = "2018-07-29" # Sunday + result = date_range(start, end, freq="B", inclusive=inclusive) + + bday_start = "2018-07-23" # Monday + bday_end = "2018-07-27" # Friday + expected = date_range(bday_start, bday_end, freq="D") + tm.assert_index_equal(result, expected) + # Note: we do _not_ expect the freqs to match here + + def test_bday_near_overflow(self): + # GH#24252 avoid doing unnecessary addition that _would_ overflow + start = Timestamp.max.floor("D").to_pydatetime() + rng = date_range(start, end=None, periods=1, freq="B") + expected = DatetimeIndex([start], freq="B").as_unit("ns") + tm.assert_index_equal(rng, expected) + + def test_bday_overflow_error(self): + # GH#24252 check that we get OutOfBoundsDatetime and not OverflowError + msg = "Out of bounds nanosecond timestamp" + start = Timestamp.max.floor("D").to_pydatetime() + with pytest.raises(OutOfBoundsDatetime, match=msg): + date_range(start, periods=2, freq="B") + + +class TestCustomDateRange: + def test_constructor(self): + bdate_range(START, END, freq=CDay()) + bdate_range(START, periods=20, freq=CDay()) + bdate_range(end=START, periods=20, freq=CDay()) + + msg = "periods must be a number, got C" + with pytest.raises(TypeError, match=msg): + date_range("2011-1-1", "2012-1-1", "C") + + with pytest.raises(TypeError, match=msg): + bdate_range("2011-1-1", "2012-1-1", "C") + + def test_misc(self): + end = datetime(2009, 5, 13) + dr = bdate_range(end=end, periods=20, freq="C") + firstDate = end - 19 * CDay() + + assert len(dr) == 20 + assert dr[0] == firstDate + assert dr[-1] == end + + def test_daterange_bug_456(self): + # GH #456 + rng1 = bdate_range("12/5/2011", "12/5/2011", freq="C") + rng2 = bdate_range("12/2/2011", "12/5/2011", freq="C") + assert rng2._data.freq == CDay() + + result = rng1.union(rng2) + assert isinstance(result, DatetimeIndex) + + def test_cdaterange(self, unit): + result = bdate_range("2013-05-01", periods=3, freq="C", unit=unit) + expected = DatetimeIndex( + ["2013-05-01", "2013-05-02", "2013-05-03"], dtype=f"M8[{unit}]", freq="C" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + def test_cdaterange_weekmask(self, unit): + result = bdate_range( + "2013-05-01", periods=3, freq="C", weekmask="Sun Mon Tue Wed Thu", unit=unit + ) + expected = DatetimeIndex( + ["2013-05-01", "2013-05-02", "2013-05-05"], + dtype=f"M8[{unit}]", + freq=result.freq, + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + # raise with non-custom freq + msg = ( + "a custom frequency string is required when holidays or " + "weekmask are passed, got frequency B" + ) + with pytest.raises(ValueError, match=msg): + bdate_range("2013-05-01", periods=3, weekmask="Sun Mon Tue Wed Thu") + + def test_cdaterange_holidays(self, unit): + result = bdate_range( + "2013-05-01", periods=3, freq="C", holidays=["2013-05-01"], unit=unit + ) + expected = DatetimeIndex( + ["2013-05-02", "2013-05-03", "2013-05-06"], + dtype=f"M8[{unit}]", + freq=result.freq, + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + # raise with non-custom freq + msg = ( + "a custom frequency string is required when holidays or " + "weekmask are passed, got frequency B" + ) + with pytest.raises(ValueError, match=msg): + bdate_range("2013-05-01", periods=3, holidays=["2013-05-01"]) + + def test_cdaterange_weekmask_and_holidays(self, unit): + result = bdate_range( + "2013-05-01", + periods=3, + freq="C", + weekmask="Sun Mon Tue Wed Thu", + holidays=["2013-05-01"], + unit=unit, + ) + expected = DatetimeIndex( + ["2013-05-02", "2013-05-05", "2013-05-06"], + dtype=f"M8[{unit}]", + freq=result.freq, + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + def test_cdaterange_holidays_weekmask_requires_freqstr(self): + # raise with non-custom freq + msg = ( + "a custom frequency string is required when holidays or " + "weekmask are passed, got frequency B" + ) + with pytest.raises(ValueError, match=msg): + bdate_range( + "2013-05-01", + periods=3, + weekmask="Sun Mon Tue Wed Thu", + holidays=["2013-05-01"], + ) + + @pytest.mark.parametrize( + "freq", [freq for freq in prefix_mapping if freq.startswith("C")] + ) + def test_all_custom_freq(self, freq): + # should not raise + bdate_range( + START, END, freq=freq, weekmask="Mon Wed Fri", holidays=["2009-03-14"] + ) + + bad_freq = freq + "FOO" + msg = f"invalid custom frequency string: {bad_freq}" + with pytest.raises(ValueError, match=msg): + bdate_range(START, END, freq=bad_freq) + + @pytest.mark.parametrize( + "start_end", + [ + ("2018-01-01T00:00:01.000Z", "2018-01-03T00:00:01.000Z"), + ("2018-01-01T00:00:00.010Z", "2018-01-03T00:00:00.010Z"), + ("2001-01-01T00:00:00.010Z", "2001-01-03T00:00:00.010Z"), + ], + ) + def test_range_with_millisecond_resolution(self, start_end): + # https://github.com/pandas-dev/pandas/issues/24110 + start, end = start_end + result = date_range(start=start, end=end, periods=2, inclusive="left") + expected = DatetimeIndex([start], dtype="M8[ns, UTC]") + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "start,period,expected", + [ + ("2022-07-23 00:00:00+02:00", 1, ["2022-07-25 00:00:00+02:00"]), + ("2022-07-22 00:00:00+02:00", 1, ["2022-07-22 00:00:00+02:00"]), + ( + "2022-07-22 00:00:00+02:00", + 2, + ["2022-07-22 00:00:00+02:00", "2022-07-25 00:00:00+02:00"], + ), + ], + ) + def test_range_with_timezone_and_custombusinessday(self, start, period, expected): + # GH49441 + result = date_range(start=start, periods=period, freq="C") + expected = DatetimeIndex(expected).as_unit("ns") + tm.assert_index_equal(result, expected) + + +class TestDateRangeNonNano: + def test_date_range_reso_validation(self): + msg = "'unit' must be one of 's', 'ms', 'us', 'ns'" + with pytest.raises(ValueError, match=msg): + date_range("2016-01-01", "2016-03-04", periods=3, unit="h") + + def test_date_range_freq_higher_than_reso(self): + # freq being higher-resolution than reso is a problem + msg = "Use a lower freq or a higher unit instead" + with pytest.raises(ValueError, match=msg): + # # TODO give a more useful or informative message? + date_range("2016-01-01", "2016-01-02", freq="ns", unit="ms") + + def test_date_range_freq_matches_reso(self): + # GH#49106 matching reso is OK + dti = date_range("2016-01-01", "2016-01-01 00:00:01", freq="ms", unit="ms") + rng = np.arange(1_451_606_400_000, 1_451_606_401_001, dtype=np.int64) + expected = DatetimeIndex(rng.view("M8[ms]"), freq="ms") + tm.assert_index_equal(dti, expected) + + dti = date_range("2016-01-01", "2016-01-01 00:00:01", freq="us", unit="us") + rng = np.arange(1_451_606_400_000_000, 1_451_606_401_000_001, dtype=np.int64) + expected = DatetimeIndex(rng.view("M8[us]"), freq="us") + tm.assert_index_equal(dti, expected) + + dti = date_range("2016-01-01", "2016-01-01 00:00:00.001", freq="ns", unit="ns") + rng = np.arange( + 1_451_606_400_000_000_000, 1_451_606_400_001_000_001, dtype=np.int64 + ) + expected = DatetimeIndex(rng.view("M8[ns]"), freq="ns") + tm.assert_index_equal(dti, expected) + + def test_date_range_freq_lower_than_endpoints(self): + start = Timestamp("2022-10-19 11:50:44.719781") + end = Timestamp("2022-10-19 11:50:47.066458") + + # start and end cannot be cast to "s" unit without lossy rounding, + # so we do not allow this in date_range + with pytest.raises(ValueError, match="Cannot losslessly convert units"): + date_range(start, end, periods=3, unit="s") + + # but we can losslessly cast to "us" + dti = date_range(start, end, periods=2, unit="us") + rng = np.array( + [start.as_unit("us")._value, end.as_unit("us")._value], dtype=np.int64 + ) + expected = DatetimeIndex(rng.view("M8[us]")) + tm.assert_index_equal(dti, expected) + + def test_date_range_non_nano(self): + start = np.datetime64("1066-10-14") # Battle of Hastings + end = np.datetime64("2305-07-13") # Jean-Luc Picard's birthday + + dti = date_range(start, end, freq="D", unit="s") + assert dti.freq == "D" + assert dti.dtype == "M8[s]" + + exp = np.arange( + start.astype("M8[s]").view("i8"), + (end + 1).astype("M8[s]").view("i8"), + 24 * 3600, + ).view("M8[s]") + + tm.assert_numpy_array_equal(dti.to_numpy(), exp) + + +class TestDateRangeNonTickFreq: + # Tests revolving around less-common (non-Tick) `freq` keywords. + + def test_date_range_custom_business_month_begin(self, unit): + hcal = USFederalHolidayCalendar() + freq = offsets.CBMonthBegin(calendar=hcal) + dti = date_range(start="20120101", end="20130101", freq=freq, unit=unit) + assert all(freq.is_on_offset(x) for x in dti) + + expected = DatetimeIndex( + [ + "2012-01-03", + "2012-02-01", + "2012-03-01", + "2012-04-02", + "2012-05-01", + "2012-06-01", + "2012-07-02", + "2012-08-01", + "2012-09-04", + "2012-10-01", + "2012-11-01", + "2012-12-03", + ], + dtype=f"M8[{unit}]", + freq=freq, + ) + tm.assert_index_equal(dti, expected) + + def test_date_range_custom_business_month_end(self, unit): + hcal = USFederalHolidayCalendar() + freq = offsets.CBMonthEnd(calendar=hcal) + dti = date_range(start="20120101", end="20130101", freq=freq, unit=unit) + assert all(freq.is_on_offset(x) for x in dti) + + expected = DatetimeIndex( + [ + "2012-01-31", + "2012-02-29", + "2012-03-30", + "2012-04-30", + "2012-05-31", + "2012-06-29", + "2012-07-31", + "2012-08-31", + "2012-09-28", + "2012-10-31", + "2012-11-30", + "2012-12-31", + ], + dtype=f"M8[{unit}]", + freq=freq, + ) + tm.assert_index_equal(dti, expected) + + def test_date_range_with_custom_holidays(self, unit): + # GH#30593 + freq = offsets.CustomBusinessHour(start="15:00", holidays=["2020-11-26"]) + result = date_range(start="2020-11-25 15:00", periods=4, freq=freq, unit=unit) + expected = DatetimeIndex( + [ + "2020-11-25 15:00:00", + "2020-11-25 16:00:00", + "2020-11-27 15:00:00", + "2020-11-27 16:00:00", + ], + dtype=f"M8[{unit}]", + freq=freq, + ) + tm.assert_index_equal(result, expected) + + def test_date_range_businesshour(self, unit): + idx = DatetimeIndex( + [ + "2014-07-04 09:00", + "2014-07-04 10:00", + "2014-07-04 11:00", + "2014-07-04 12:00", + "2014-07-04 13:00", + "2014-07-04 14:00", + "2014-07-04 15:00", + "2014-07-04 16:00", + ], + dtype=f"M8[{unit}]", + freq="bh", + ) + rng = date_range("2014-07-04 09:00", "2014-07-04 16:00", freq="bh", unit=unit) + tm.assert_index_equal(idx, rng) + + idx = DatetimeIndex( + ["2014-07-04 16:00", "2014-07-07 09:00"], dtype=f"M8[{unit}]", freq="bh" + ) + rng = date_range("2014-07-04 16:00", "2014-07-07 09:00", freq="bh", unit=unit) + tm.assert_index_equal(idx, rng) + + idx = DatetimeIndex( + [ + "2014-07-04 09:00", + "2014-07-04 10:00", + "2014-07-04 11:00", + "2014-07-04 12:00", + "2014-07-04 13:00", + "2014-07-04 14:00", + "2014-07-04 15:00", + "2014-07-04 16:00", + "2014-07-07 09:00", + "2014-07-07 10:00", + "2014-07-07 11:00", + "2014-07-07 12:00", + "2014-07-07 13:00", + "2014-07-07 14:00", + "2014-07-07 15:00", + "2014-07-07 16:00", + "2014-07-08 09:00", + "2014-07-08 10:00", + "2014-07-08 11:00", + "2014-07-08 12:00", + "2014-07-08 13:00", + "2014-07-08 14:00", + "2014-07-08 15:00", + "2014-07-08 16:00", + ], + dtype=f"M8[{unit}]", + freq="bh", + ) + rng = date_range("2014-07-04 09:00", "2014-07-08 16:00", freq="bh", unit=unit) + tm.assert_index_equal(idx, rng) + + def test_date_range_business_hour2(self, unit): + idx1 = date_range( + start="2014-07-04 15:00", end="2014-07-08 10:00", freq="bh", unit=unit + ) + idx2 = date_range(start="2014-07-04 15:00", periods=12, freq="bh", unit=unit) + idx3 = date_range(end="2014-07-08 10:00", periods=12, freq="bh", unit=unit) + expected = DatetimeIndex( + [ + "2014-07-04 15:00", + "2014-07-04 16:00", + "2014-07-07 09:00", + "2014-07-07 10:00", + "2014-07-07 11:00", + "2014-07-07 12:00", + "2014-07-07 13:00", + "2014-07-07 14:00", + "2014-07-07 15:00", + "2014-07-07 16:00", + "2014-07-08 09:00", + "2014-07-08 10:00", + ], + dtype=f"M8[{unit}]", + freq="bh", + ) + tm.assert_index_equal(idx1, expected) + tm.assert_index_equal(idx2, expected) + tm.assert_index_equal(idx3, expected) + + idx4 = date_range( + start="2014-07-04 15:45", end="2014-07-08 10:45", freq="bh", unit=unit + ) + idx5 = date_range(start="2014-07-04 15:45", periods=12, freq="bh", unit=unit) + idx6 = date_range(end="2014-07-08 10:45", periods=12, freq="bh", unit=unit) + + expected2 = expected + Timedelta(minutes=45).as_unit(unit) + expected2.freq = "bh" + tm.assert_index_equal(idx4, expected2) + tm.assert_index_equal(idx5, expected2) + tm.assert_index_equal(idx6, expected2) + + def test_date_range_business_hour_short(self, unit): + # GH#49835 + idx4 = date_range(start="2014-07-01 10:00", freq="bh", periods=1, unit=unit) + expected4 = DatetimeIndex(["2014-07-01 10:00"], dtype=f"M8[{unit}]", freq="bh") + tm.assert_index_equal(idx4, expected4) + + def test_date_range_year_start(self, unit): + # see GH#9313 + rng = date_range("1/1/2013", "7/1/2017", freq="YS", unit=unit) + exp = DatetimeIndex( + ["2013-01-01", "2014-01-01", "2015-01-01", "2016-01-01", "2017-01-01"], + dtype=f"M8[{unit}]", + freq="YS", + ) + tm.assert_index_equal(rng, exp) + + def test_date_range_year_end(self, unit): + # see GH#9313 + rng = date_range("1/1/2013", "7/1/2017", freq="YE", unit=unit) + exp = DatetimeIndex( + ["2013-12-31", "2014-12-31", "2015-12-31", "2016-12-31"], + dtype=f"M8[{unit}]", + freq="YE", + ) + tm.assert_index_equal(rng, exp) + + def test_date_range_negative_freq_year_end(self, unit): + # GH#11018 + rng = date_range("2011-12-31", freq="-2YE", periods=3, unit=unit) + exp = DatetimeIndex( + ["2011-12-31", "2009-12-31", "2007-12-31"], dtype=f"M8[{unit}]", freq="-2YE" + ) + tm.assert_index_equal(rng, exp) + assert rng.freq == "-2YE" + + def test_date_range_business_year_end_year(self, unit): + # see GH#9313 + rng = date_range("1/1/2013", "7/1/2017", freq="BYE", unit=unit) + exp = DatetimeIndex( + ["2013-12-31", "2014-12-31", "2015-12-31", "2016-12-30"], + dtype=f"M8[{unit}]", + freq="BYE", + ) + tm.assert_index_equal(rng, exp) + + def test_date_range_bms(self, unit): + # GH#1645 + result = date_range("1/1/2000", periods=10, freq="BMS", unit=unit) + + expected = DatetimeIndex( + [ + "2000-01-03", + "2000-02-01", + "2000-03-01", + "2000-04-03", + "2000-05-01", + "2000-06-01", + "2000-07-03", + "2000-08-01", + "2000-09-01", + "2000-10-02", + ], + dtype=f"M8[{unit}]", + freq="BMS", + ) + tm.assert_index_equal(result, expected) + + def test_date_range_semi_month_begin(self, unit): + dates = [ + datetime(2007, 12, 15), + datetime(2008, 1, 1), + datetime(2008, 1, 15), + datetime(2008, 2, 1), + datetime(2008, 2, 15), + datetime(2008, 3, 1), + datetime(2008, 3, 15), + datetime(2008, 4, 1), + datetime(2008, 4, 15), + datetime(2008, 5, 1), + datetime(2008, 5, 15), + datetime(2008, 6, 1), + datetime(2008, 6, 15), + datetime(2008, 7, 1), + datetime(2008, 7, 15), + datetime(2008, 8, 1), + datetime(2008, 8, 15), + datetime(2008, 9, 1), + datetime(2008, 9, 15), + datetime(2008, 10, 1), + datetime(2008, 10, 15), + datetime(2008, 11, 1), + datetime(2008, 11, 15), + datetime(2008, 12, 1), + datetime(2008, 12, 15), + ] + # ensure generating a range with DatetimeIndex gives same result + result = date_range(start=dates[0], end=dates[-1], freq="SMS", unit=unit) + exp = DatetimeIndex(dates, dtype=f"M8[{unit}]", freq="SMS") + tm.assert_index_equal(result, exp) + + def test_date_range_semi_month_end(self, unit): + dates = [ + datetime(2007, 12, 31), + datetime(2008, 1, 15), + datetime(2008, 1, 31), + datetime(2008, 2, 15), + datetime(2008, 2, 29), + datetime(2008, 3, 15), + datetime(2008, 3, 31), + datetime(2008, 4, 15), + datetime(2008, 4, 30), + datetime(2008, 5, 15), + datetime(2008, 5, 31), + datetime(2008, 6, 15), + datetime(2008, 6, 30), + datetime(2008, 7, 15), + datetime(2008, 7, 31), + datetime(2008, 8, 15), + datetime(2008, 8, 31), + datetime(2008, 9, 15), + datetime(2008, 9, 30), + datetime(2008, 10, 15), + datetime(2008, 10, 31), + datetime(2008, 11, 15), + datetime(2008, 11, 30), + datetime(2008, 12, 15), + datetime(2008, 12, 31), + ] + # ensure generating a range with DatetimeIndex gives same result + result = date_range(start=dates[0], end=dates[-1], freq="SME", unit=unit) + exp = DatetimeIndex(dates, dtype=f"M8[{unit}]", freq="SME") + tm.assert_index_equal(result, exp) + + def test_date_range_week_of_month(self, unit): + # GH#20517 + # Note the start here is not on_offset for this freq + result = date_range(start="20110101", periods=1, freq="WOM-1MON", unit=unit) + expected = DatetimeIndex(["2011-01-03"], dtype=f"M8[{unit}]", freq="WOM-1MON") + tm.assert_index_equal(result, expected) + + result2 = date_range(start="20110101", periods=2, freq="WOM-1MON", unit=unit) + expected2 = DatetimeIndex( + ["2011-01-03", "2011-02-07"], dtype=f"M8[{unit}]", freq="WOM-1MON" + ) + tm.assert_index_equal(result2, expected2) + + def test_date_range_week_of_month2(self, unit): + # GH#5115, GH#5348 + result = date_range("2013-1-1", periods=4, freq="WOM-1SAT", unit=unit) + expected = DatetimeIndex( + ["2013-01-05", "2013-02-02", "2013-03-02", "2013-04-06"], + dtype=f"M8[{unit}]", + freq="WOM-1SAT", + ) + tm.assert_index_equal(result, expected) + + def test_date_range_negative_freq_month_end(self, unit): + # GH#11018 + rng = date_range("2011-01-31", freq="-2ME", periods=3, unit=unit) + exp = DatetimeIndex( + ["2011-01-31", "2010-11-30", "2010-09-30"], dtype=f"M8[{unit}]", freq="-2ME" + ) + tm.assert_index_equal(rng, exp) + assert rng.freq == "-2ME" + + def test_date_range_fy5253(self, unit): + freq = offsets.FY5253(startingMonth=1, weekday=3, variation="nearest") + dti = date_range( + start="2013-01-01", + periods=2, + freq=freq, + unit=unit, + ) + expected = DatetimeIndex( + ["2013-01-31", "2014-01-30"], dtype=f"M8[{unit}]", freq=freq + ) + + tm.assert_index_equal(dti, expected) + + @pytest.mark.parametrize( + "freqstr,offset", + [ + ("QS", offsets.QuarterBegin(startingMonth=1)), + ("BQE", offsets.BQuarterEnd(startingMonth=12)), + ("W-SUN", offsets.Week(weekday=6)), + ], + ) + def test_date_range_freqstr_matches_offset(self, freqstr, offset): + sdate = datetime(1999, 12, 25) + edate = datetime(2000, 1, 1) + + idx1 = date_range(start=sdate, end=edate, freq=freqstr) + idx2 = date_range(start=sdate, end=edate, freq=offset) + assert len(idx1) == len(idx2) + assert idx1.freq == idx2.freq diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_datetime.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_datetime.py new file mode 100644 index 0000000000000000000000000000000000000000..f7fc64d4b01633edc011349441b1f75dd2f00cb9 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_datetime.py @@ -0,0 +1,216 @@ +import datetime as dt +from datetime import date +import re + +import numpy as np +import pytest + +from pandas.compat.numpy import np_long + +import pandas as pd +from pandas import ( + DataFrame, + DatetimeIndex, + Index, + Timestamp, + date_range, + offsets, +) +import pandas._testing as tm + + +class TestDatetimeIndex: + def test_is_(self): + dti = date_range(start="1/1/2005", end="12/1/2005", freq="ME") + assert dti.is_(dti) + assert dti.is_(dti.view()) + assert not dti.is_(dti.copy()) + + def test_time_overflow_for_32bit_machines(self): + # GH8943. On some machines NumPy defaults to np.int32 (for example, + # 32-bit Linux machines). In the function _generate_regular_range + # found in tseries/index.py, `periods` gets multiplied by `strides` + # (which has value 1e9) and since the max value for np.int32 is ~2e9, + # and since those machines won't promote np.int32 to np.int64, we get + # overflow. + periods = np_long(1000) + + idx1 = date_range(start="2000", periods=periods, freq="s") + assert len(idx1) == periods + + idx2 = date_range(end="2000", periods=periods, freq="s") + assert len(idx2) == periods + + def test_nat(self): + assert DatetimeIndex([np.nan])[0] is pd.NaT + + def test_week_of_month_frequency(self): + # GH 5348: "ValueError: Could not evaluate WOM-1SUN" shouldn't raise + d1 = date(2002, 9, 1) + d2 = date(2013, 10, 27) + d3 = date(2012, 9, 30) + idx1 = DatetimeIndex([d1, d2]) + idx2 = DatetimeIndex([d3]) + result_append = idx1.append(idx2) + expected = DatetimeIndex([d1, d2, d3]) + tm.assert_index_equal(result_append, expected) + result_union = idx1.union(idx2) + expected = DatetimeIndex([d1, d3, d2]) + tm.assert_index_equal(result_union, expected) + + def test_append_nondatetimeindex(self): + rng = date_range("1/1/2000", periods=10) + idx = Index(["a", "b", "c", "d"]) + + result = rng.append(idx) + assert isinstance(result[0], Timestamp) + + def test_misc_coverage(self): + rng = date_range("1/1/2000", periods=5) + result = rng.groupby(rng.day) + assert isinstance(next(iter(result.values()))[0], Timestamp) + + # TODO: belongs in frame groupby tests? + def test_groupby_function_tuple_1677(self): + df = DataFrame( + np.random.default_rng(2).random(100), + index=date_range("1/1/2000", periods=100), + ) + monthly_group = df.groupby(lambda x: (x.year, x.month)) + + result = monthly_group.mean() + assert isinstance(result.index[0], tuple) + + def assert_index_parameters(self, index): + assert index.freq == "40960ns" + assert index.inferred_freq == "40960ns" + + def test_ns_index(self): + nsamples = 400 + ns = int(1e9 / 24414) + dtstart = np.datetime64("2012-09-20T00:00:00") + + dt = dtstart + np.arange(nsamples) * np.timedelta64(ns, "ns") + freq = ns * offsets.Nano() + index = DatetimeIndex(dt, freq=freq, name="time") + self.assert_index_parameters(index) + + new_index = date_range(start=index[0], end=index[-1], freq=index.freq) + self.assert_index_parameters(new_index) + + def test_asarray_tz_naive(self): + # This shouldn't produce a warning. + idx = date_range("2000", periods=2) + # M8[ns] by default + result = np.asarray(idx) + + expected = np.array(["2000-01-01", "2000-01-02"], dtype="M8[ns]") + tm.assert_numpy_array_equal(result, expected) + + # optionally, object + result = np.asarray(idx, dtype=object) + + expected = np.array([Timestamp("2000-01-01"), Timestamp("2000-01-02")]) + tm.assert_numpy_array_equal(result, expected) + + def test_asarray_tz_aware(self): + tz = "US/Central" + idx = date_range("2000", periods=2, tz=tz) + expected = np.array(["2000-01-01T06", "2000-01-02T06"], dtype="M8[ns]") + result = np.asarray(idx, dtype="datetime64[ns]") + + tm.assert_numpy_array_equal(result, expected) + + # Old behavior with no warning + result = np.asarray(idx, dtype="M8[ns]") + + tm.assert_numpy_array_equal(result, expected) + + # Future behavior with no warning + expected = np.array( + [Timestamp("2000-01-01", tz=tz), Timestamp("2000-01-02", tz=tz)] + ) + result = np.asarray(idx, dtype=object) + + tm.assert_numpy_array_equal(result, expected) + + def test_CBH_deprecated(self): + msg = "'CBH' is deprecated and will be removed in a future version." + + with tm.assert_produces_warning(FutureWarning, match=msg): + expected = date_range( + dt.datetime(2022, 12, 11), dt.datetime(2022, 12, 13), freq="CBH" + ) + result = DatetimeIndex( + [ + "2022-12-12 09:00:00", + "2022-12-12 10:00:00", + "2022-12-12 11:00:00", + "2022-12-12 12:00:00", + "2022-12-12 13:00:00", + "2022-12-12 14:00:00", + "2022-12-12 15:00:00", + "2022-12-12 16:00:00", + ], + dtype="datetime64[ns]", + freq="cbh", + ) + + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "freq_depr, expected_values, expected_freq", + [ + ( + "AS-AUG", + ["2021-08-01", "2022-08-01", "2023-08-01"], + "YS-AUG", + ), + ( + "1BAS-MAY", + ["2021-05-03", "2022-05-02", "2023-05-01"], + "1BYS-MAY", + ), + ], + ) + def test_AS_BAS_deprecated(self, freq_depr, expected_values, expected_freq): + # GH#55479 + freq_msg = re.split("[0-9]*", freq_depr, maxsplit=1)[1] + msg = f"'{freq_msg}' is deprecated and will be removed in a future version." + + with tm.assert_produces_warning(FutureWarning, match=msg): + expected = date_range( + dt.datetime(2020, 12, 1), dt.datetime(2023, 12, 1), freq=freq_depr + ) + result = DatetimeIndex( + expected_values, + dtype="datetime64[ns]", + freq=expected_freq, + ) + + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "freq, expected_values, freq_depr", + [ + ("2BYE-MAR", ["2016-03-31"], "2BA-MAR"), + ("2BYE-JUN", ["2016-06-30"], "2BY-JUN"), + ("2BME", ["2016-02-29", "2016-04-29", "2016-06-30"], "2BM"), + ("2BQE", ["2016-03-31"], "2BQ"), + ("1BQE-MAR", ["2016-03-31", "2016-06-30"], "1BQ-MAR"), + ], + ) + def test_BM_BQ_BY_deprecated(self, freq, expected_values, freq_depr): + # GH#52064 + msg = f"'{freq_depr[1:]}' is deprecated and will be removed " + f"in a future version, please use '{freq[1:]}' instead." + + with tm.assert_produces_warning(FutureWarning, match=msg): + expected = date_range(start="2016-02-21", end="2016-08-21", freq=freq_depr) + result = DatetimeIndex( + data=expected_values, + dtype="datetime64[ns]", + freq=freq, + ) + + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_formats.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_formats.py new file mode 100644 index 0000000000000000000000000000000000000000..b52eed8c509c6e655425eb5b9be3351f369fee4d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_formats.py @@ -0,0 +1,356 @@ +from datetime import datetime + +import dateutil.tz +import numpy as np +import pytest +import pytz + +import pandas as pd +from pandas import ( + DatetimeIndex, + NaT, + Series, +) +import pandas._testing as tm + + +@pytest.fixture(params=["s", "ms", "us", "ns"]) +def unit(request): + return request.param + + +def test_get_values_for_csv(): + index = pd.date_range(freq="1D", periods=3, start="2017-01-01") + + # First, with no arguments. + expected = np.array(["2017-01-01", "2017-01-02", "2017-01-03"], dtype=object) + + result = index._get_values_for_csv() + tm.assert_numpy_array_equal(result, expected) + + # No NaN values, so na_rep has no effect + result = index._get_values_for_csv(na_rep="pandas") + tm.assert_numpy_array_equal(result, expected) + + # Make sure date formatting works + expected = np.array(["01-2017-01", "01-2017-02", "01-2017-03"], dtype=object) + + result = index._get_values_for_csv(date_format="%m-%Y-%d") + tm.assert_numpy_array_equal(result, expected) + + # NULL object handling should work + index = DatetimeIndex(["2017-01-01", NaT, "2017-01-03"]) + expected = np.array(["2017-01-01", "NaT", "2017-01-03"], dtype=object) + + result = index._get_values_for_csv(na_rep="NaT") + tm.assert_numpy_array_equal(result, expected) + + expected = np.array(["2017-01-01", "pandas", "2017-01-03"], dtype=object) + + result = index._get_values_for_csv(na_rep="pandas") + tm.assert_numpy_array_equal(result, expected) + + result = index._get_values_for_csv(na_rep="NaT", date_format="%Y-%m-%d %H:%M:%S.%f") + expected = np.array( + ["2017-01-01 00:00:00.000000", "NaT", "2017-01-03 00:00:00.000000"], + dtype=object, + ) + tm.assert_numpy_array_equal(result, expected) + + # invalid format + result = index._get_values_for_csv(na_rep="NaT", date_format="foo") + expected = np.array(["foo", "NaT", "foo"], dtype=object) + tm.assert_numpy_array_equal(result, expected) + + +class TestDatetimeIndexRendering: + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_dti_with_timezone_repr(self, tzstr): + rng = pd.date_range("4/13/2010", "5/6/2010") + + rng_eastern = rng.tz_localize(tzstr) + + rng_repr = repr(rng_eastern) + assert "2010-04-13 00:00:00" in rng_repr + + def test_dti_repr_dates(self): + text = str(pd.to_datetime([datetime(2013, 1, 1), datetime(2014, 1, 1)])) + assert "['2013-01-01'," in text + assert ", '2014-01-01']" in text + + def test_dti_repr_mixed(self): + text = str( + pd.to_datetime( + [datetime(2013, 1, 1), datetime(2014, 1, 1, 12), datetime(2014, 1, 1)] + ) + ) + assert "'2013-01-01 00:00:00'," in text + assert "'2014-01-01 00:00:00']" in text + + def test_dti_repr_short(self): + dr = pd.date_range(start="1/1/2012", periods=1) + repr(dr) + + dr = pd.date_range(start="1/1/2012", periods=2) + repr(dr) + + dr = pd.date_range(start="1/1/2012", periods=3) + repr(dr) + + @pytest.mark.parametrize( + "dates, freq, expected_repr", + [ + ( + ["2012-01-01 00:00:00"], + "60min", + ( + "DatetimeIndex(['2012-01-01 00:00:00'], " + "dtype='datetime64[ns]', freq='60min')" + ), + ), + ( + ["2012-01-01 00:00:00", "2012-01-01 01:00:00"], + "60min", + "DatetimeIndex(['2012-01-01 00:00:00', '2012-01-01 01:00:00'], " + "dtype='datetime64[ns]', freq='60min')", + ), + ( + ["2012-01-01"], + "24h", + "DatetimeIndex(['2012-01-01'], dtype='datetime64[ns]', freq='24h')", + ), + ], + ) + def test_dti_repr_time_midnight(self, dates, freq, expected_repr, unit): + # GH53634 + dti = DatetimeIndex(dates, freq).as_unit(unit) + actual_repr = repr(dti) + assert actual_repr == expected_repr.replace("[ns]", f"[{unit}]") + + def test_dti_representation(self, unit): + idxs = [] + idxs.append(DatetimeIndex([], freq="D")) + idxs.append(DatetimeIndex(["2011-01-01"], freq="D")) + idxs.append(DatetimeIndex(["2011-01-01", "2011-01-02"], freq="D")) + idxs.append(DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D")) + idxs.append( + DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], + freq="h", + tz="Asia/Tokyo", + ) + ) + idxs.append( + DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", NaT], tz="US/Eastern" + ) + ) + idxs.append( + DatetimeIndex(["2011-01-01 09:00", "2011-01-01 10:00", NaT], tz="UTC") + ) + + exp = [] + exp.append("DatetimeIndex([], dtype='datetime64[ns]', freq='D')") + exp.append("DatetimeIndex(['2011-01-01'], dtype='datetime64[ns]', freq='D')") + exp.append( + "DatetimeIndex(['2011-01-01', '2011-01-02'], " + "dtype='datetime64[ns]', freq='D')" + ) + exp.append( + "DatetimeIndex(['2011-01-01', '2011-01-02', '2011-01-03'], " + "dtype='datetime64[ns]', freq='D')" + ) + exp.append( + "DatetimeIndex(['2011-01-01 09:00:00+09:00', " + "'2011-01-01 10:00:00+09:00', '2011-01-01 11:00:00+09:00']" + ", dtype='datetime64[ns, Asia/Tokyo]', freq='h')" + ) + exp.append( + "DatetimeIndex(['2011-01-01 09:00:00-05:00', " + "'2011-01-01 10:00:00-05:00', 'NaT'], " + "dtype='datetime64[ns, US/Eastern]', freq=None)" + ) + exp.append( + "DatetimeIndex(['2011-01-01 09:00:00+00:00', " + "'2011-01-01 10:00:00+00:00', 'NaT'], " + "dtype='datetime64[ns, UTC]', freq=None)" + "" + ) + + with pd.option_context("display.width", 300): + for index, expected in zip(idxs, exp): + index = index.as_unit(unit) + expected = expected.replace("[ns", f"[{unit}") + result = repr(index) + assert result == expected + result = str(index) + assert result == expected + + # TODO: this is a Series.__repr__ test + def test_dti_representation_to_series(self, unit): + idx1 = DatetimeIndex([], freq="D") + idx2 = DatetimeIndex(["2011-01-01"], freq="D") + idx3 = DatetimeIndex(["2011-01-01", "2011-01-02"], freq="D") + idx4 = DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") + idx5 = DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], + freq="h", + tz="Asia/Tokyo", + ) + idx6 = DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", NaT], tz="US/Eastern" + ) + idx7 = DatetimeIndex(["2011-01-01 09:00", "2011-01-02 10:15"]) + + exp1 = """Series([], dtype: datetime64[ns])""" + + exp2 = "0 2011-01-01\ndtype: datetime64[ns]" + + exp3 = "0 2011-01-01\n1 2011-01-02\ndtype: datetime64[ns]" + + exp4 = ( + "0 2011-01-01\n" + "1 2011-01-02\n" + "2 2011-01-03\n" + "dtype: datetime64[ns]" + ) + + exp5 = ( + "0 2011-01-01 09:00:00+09:00\n" + "1 2011-01-01 10:00:00+09:00\n" + "2 2011-01-01 11:00:00+09:00\n" + "dtype: datetime64[ns, Asia/Tokyo]" + ) + + exp6 = ( + "0 2011-01-01 09:00:00-05:00\n" + "1 2011-01-01 10:00:00-05:00\n" + "2 NaT\n" + "dtype: datetime64[ns, US/Eastern]" + ) + + exp7 = ( + "0 2011-01-01 09:00:00\n" + "1 2011-01-02 10:15:00\n" + "dtype: datetime64[ns]" + ) + + with pd.option_context("display.width", 300): + for idx, expected in zip( + [idx1, idx2, idx3, idx4, idx5, idx6, idx7], + [exp1, exp2, exp3, exp4, exp5, exp6, exp7], + ): + ser = Series(idx.as_unit(unit)) + result = repr(ser) + assert result == expected.replace("[ns", f"[{unit}") + + def test_dti_summary(self): + # GH#9116 + idx1 = DatetimeIndex([], freq="D") + idx2 = DatetimeIndex(["2011-01-01"], freq="D") + idx3 = DatetimeIndex(["2011-01-01", "2011-01-02"], freq="D") + idx4 = DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], freq="D") + idx5 = DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], + freq="h", + tz="Asia/Tokyo", + ) + idx6 = DatetimeIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", NaT], tz="US/Eastern" + ) + + exp1 = "DatetimeIndex: 0 entries\nFreq: D" + + exp2 = "DatetimeIndex: 1 entries, 2011-01-01 to 2011-01-01\nFreq: D" + + exp3 = "DatetimeIndex: 2 entries, 2011-01-01 to 2011-01-02\nFreq: D" + + exp4 = "DatetimeIndex: 3 entries, 2011-01-01 to 2011-01-03\nFreq: D" + + exp5 = ( + "DatetimeIndex: 3 entries, 2011-01-01 09:00:00+09:00 " + "to 2011-01-01 11:00:00+09:00\n" + "Freq: h" + ) + + exp6 = """DatetimeIndex: 3 entries, 2011-01-01 09:00:00-05:00 to NaT""" + + for idx, expected in zip( + [idx1, idx2, idx3, idx4, idx5, idx6], [exp1, exp2, exp3, exp4, exp5, exp6] + ): + result = idx._summary() + assert result == expected + + @pytest.mark.parametrize("tz", [None, pytz.utc, dateutil.tz.tzutc()]) + @pytest.mark.parametrize("freq", ["B", "C"]) + def test_dti_business_repr_etc_smoke(self, tz, freq): + # only really care that it works + dti = pd.bdate_range( + datetime(2009, 1, 1), datetime(2010, 1, 1), tz=tz, freq=freq + ) + repr(dti) + dti._summary() + dti[2:2]._summary() + + +class TestFormat: + def test_format(self): + # GH#35439 + idx = pd.date_range("20130101", periods=5) + expected = [f"{x:%Y-%m-%d}" for x in idx] + msg = r"DatetimeIndex\.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + assert idx.format() == expected + + def test_format_with_name_time_info(self): + # bug I fixed 12/20/2011 + dates = pd.date_range("2011-01-01 04:00:00", periods=10, name="something") + + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = dates.format(name=True) + assert formatted[0] == "something" + + def test_format_datetime_with_time(self): + dti = DatetimeIndex([datetime(2012, 2, 7), datetime(2012, 2, 7, 23)]) + + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + result = dti.format() + expected = ["2012-02-07 00:00:00", "2012-02-07 23:00:00"] + assert len(result) == 2 + assert result == expected + + def test_format_datetime(self): + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = pd.to_datetime([datetime(2003, 1, 1, 12), NaT]).format() + assert formatted[0] == "2003-01-01 12:00:00" + assert formatted[1] == "NaT" + + def test_format_date(self): + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = pd.to_datetime([datetime(2003, 1, 1), NaT]).format() + assert formatted[0] == "2003-01-01" + assert formatted[1] == "NaT" + + def test_format_date_tz(self): + dti = pd.to_datetime([datetime(2013, 1, 1)], utc=True) + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = dti.format() + assert formatted[0] == "2013-01-01 00:00:00+00:00" + + dti = pd.to_datetime([datetime(2013, 1, 1), NaT], utc=True) + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = dti.format() + assert formatted[0] == "2013-01-01 00:00:00+00:00" + + def test_format_date_explicit_date_format(self): + dti = pd.to_datetime([datetime(2003, 2, 1), NaT]) + msg = "DatetimeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + formatted = dti.format(date_format="%m-%d-%Y", na_rep="UT") + assert formatted[0] == "02-01-2003" + assert formatted[1] == "UT" diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_freq_attr.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_freq_attr.py new file mode 100644 index 0000000000000000000000000000000000000000..5cddf56cd1c73b3c00d8b59c6f99095ba9a704fb --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_freq_attr.py @@ -0,0 +1,61 @@ +import pytest + +from pandas import ( + DatetimeIndex, + date_range, +) + +from pandas.tseries.offsets import ( + BDay, + DateOffset, + Day, + Hour, +) + + +class TestFreq: + def test_freq_setter_errors(self): + # GH#20678 + idx = DatetimeIndex(["20180101", "20180103", "20180105"]) + + # setting with an incompatible freq + msg = ( + "Inferred frequency 2D from passed values does not conform to " + "passed frequency 5D" + ) + with pytest.raises(ValueError, match=msg): + idx._data.freq = "5D" + + # setting with non-freq string + with pytest.raises(ValueError, match="Invalid frequency"): + idx._data.freq = "foo" + + @pytest.mark.parametrize("values", [["20180101", "20180103", "20180105"], []]) + @pytest.mark.parametrize("freq", ["2D", Day(2), "2B", BDay(2), "48h", Hour(48)]) + @pytest.mark.parametrize("tz", [None, "US/Eastern"]) + def test_freq_setter(self, values, freq, tz): + # GH#20678 + idx = DatetimeIndex(values, tz=tz) + + # can set to an offset, converting from string if necessary + idx._data.freq = freq + assert idx.freq == freq + assert isinstance(idx.freq, DateOffset) + + # can reset to None + idx._data.freq = None + assert idx.freq is None + + def test_freq_view_safe(self): + # Setting the freq for one DatetimeIndex shouldn't alter the freq + # for another that views the same data + + dti = date_range("2016-01-01", periods=5) + dta = dti._data + + dti2 = DatetimeIndex(dta)._with_freq(None) + assert dti2.freq is None + + # Original was not altered + assert dti.freq == "D" + assert dta.freq == "D" diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_indexing.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_indexing.py new file mode 100644 index 0000000000000000000000000000000000000000..bfbcdcff51ee6e7f50325962a44209a5c5bf9653 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_indexing.py @@ -0,0 +1,717 @@ +from datetime import ( + date, + datetime, + time, + timedelta, +) + +import numpy as np +import pytest + +from pandas._libs import index as libindex +from pandas.compat.numpy import np_long + +import pandas as pd +from pandas import ( + DatetimeIndex, + Index, + Timestamp, + bdate_range, + date_range, + notna, +) +import pandas._testing as tm + +from pandas.tseries.frequencies import to_offset + +START, END = datetime(2009, 1, 1), datetime(2010, 1, 1) + + +class TestGetItem: + def test_getitem_slice_keeps_name(self): + # GH4226 + st = Timestamp("2013-07-01 00:00:00", tz="America/Los_Angeles") + et = Timestamp("2013-07-02 00:00:00", tz="America/Los_Angeles") + dr = date_range(st, et, freq="h", name="timebucket") + assert dr[1:].name == dr.name + + @pytest.mark.parametrize("tz", [None, "Asia/Tokyo"]) + def test_getitem(self, tz): + idx = date_range("2011-01-01", "2011-01-31", freq="D", tz=tz, name="idx") + + result = idx[0] + assert result == Timestamp("2011-01-01", tz=idx.tz) + + result = idx[0:5] + expected = date_range( + "2011-01-01", "2011-01-05", freq="D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx[0:10:2] + expected = date_range( + "2011-01-01", "2011-01-09", freq="2D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx[-20:-5:3] + expected = date_range( + "2011-01-12", "2011-01-24", freq="3D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx[4::-1] + expected = DatetimeIndex( + ["2011-01-05", "2011-01-04", "2011-01-03", "2011-01-02", "2011-01-01"], + dtype=idx.dtype, + freq="-1D", + name="idx", + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + @pytest.mark.parametrize("freq", ["B", "C"]) + def test_dti_business_getitem(self, freq): + rng = bdate_range(START, END, freq=freq) + smaller = rng[:5] + exp = DatetimeIndex(rng.view(np.ndarray)[:5], freq=freq) + tm.assert_index_equal(smaller, exp) + assert smaller.freq == exp.freq + assert smaller.freq == rng.freq + + sliced = rng[::5] + assert sliced.freq == to_offset(freq) * 5 + + fancy_indexed = rng[[4, 3, 2, 1, 0]] + assert len(fancy_indexed) == 5 + assert isinstance(fancy_indexed, DatetimeIndex) + assert fancy_indexed.freq is None + + # 32-bit vs. 64-bit platforms + assert rng[4] == rng[np_long(4)] + + @pytest.mark.parametrize("freq", ["B", "C"]) + def test_dti_business_getitem_matplotlib_hackaround(self, freq): + rng = bdate_range(START, END, freq=freq) + with pytest.raises(ValueError, match="Multi-dimensional indexing"): + # GH#30588 multi-dimensional indexing deprecated + rng[:, None] + + def test_getitem_int_list(self): + dti = date_range(start="1/1/2005", end="12/1/2005", freq="ME") + dti2 = dti[[1, 3, 5]] + + v1 = dti2[0] + v2 = dti2[1] + v3 = dti2[2] + + assert v1 == Timestamp("2/28/2005") + assert v2 == Timestamp("4/30/2005") + assert v3 == Timestamp("6/30/2005") + + # getitem with non-slice drops freq + assert dti2.freq is None + + +class TestWhere: + def test_where_doesnt_retain_freq(self): + dti = date_range("20130101", periods=3, freq="D", name="idx") + cond = [True, True, False] + expected = DatetimeIndex([dti[0], dti[1], dti[0]], freq=None, name="idx") + + result = dti.where(cond, dti[::-1]) + tm.assert_index_equal(result, expected) + + def test_where_other(self): + # other is ndarray or Index + i = date_range("20130101", periods=3, tz="US/Eastern") + + for arr in [np.nan, pd.NaT]: + result = i.where(notna(i), other=arr) + expected = i + tm.assert_index_equal(result, expected) + + i2 = i.copy() + i2 = Index([pd.NaT, pd.NaT] + i[2:].tolist()) + result = i.where(notna(i2), i2) + tm.assert_index_equal(result, i2) + + i2 = i.copy() + i2 = Index([pd.NaT, pd.NaT] + i[2:].tolist()) + result = i.where(notna(i2), i2._values) + tm.assert_index_equal(result, i2) + + def test_where_invalid_dtypes(self): + dti = date_range("20130101", periods=3, tz="US/Eastern") + + tail = dti[2:].tolist() + i2 = Index([pd.NaT, pd.NaT] + tail) + + mask = notna(i2) + + # passing tz-naive ndarray to tzaware DTI + result = dti.where(mask, i2.values) + expected = Index([pd.NaT.asm8, pd.NaT.asm8] + tail, dtype=object) + tm.assert_index_equal(result, expected) + + # passing tz-aware DTI to tznaive DTI + naive = dti.tz_localize(None) + result = naive.where(mask, i2) + expected = Index([i2[0], i2[1]] + naive[2:].tolist(), dtype=object) + tm.assert_index_equal(result, expected) + + pi = i2.tz_localize(None).to_period("D") + result = dti.where(mask, pi) + expected = Index([pi[0], pi[1]] + tail, dtype=object) + tm.assert_index_equal(result, expected) + + tda = i2.asi8.view("timedelta64[ns]") + result = dti.where(mask, tda) + expected = Index([tda[0], tda[1]] + tail, dtype=object) + assert isinstance(expected[0], np.timedelta64) + tm.assert_index_equal(result, expected) + + result = dti.where(mask, i2.asi8) + expected = Index([pd.NaT._value, pd.NaT._value] + tail, dtype=object) + assert isinstance(expected[0], int) + tm.assert_index_equal(result, expected) + + # non-matching scalar + td = pd.Timedelta(days=4) + result = dti.where(mask, td) + expected = Index([td, td] + tail, dtype=object) + assert expected[0] is td + tm.assert_index_equal(result, expected) + + def test_where_mismatched_nat(self, tz_aware_fixture): + tz = tz_aware_fixture + dti = date_range("2013-01-01", periods=3, tz=tz) + cond = np.array([True, False, True]) + + tdnat = np.timedelta64("NaT", "ns") + expected = Index([dti[0], tdnat, dti[2]], dtype=object) + assert expected[1] is tdnat + + result = dti.where(cond, tdnat) + tm.assert_index_equal(result, expected) + + def test_where_tz(self): + i = date_range("20130101", periods=3, tz="US/Eastern") + result = i.where(notna(i)) + expected = i + tm.assert_index_equal(result, expected) + + i2 = i.copy() + i2 = Index([pd.NaT, pd.NaT] + i[2:].tolist()) + result = i.where(notna(i2)) + expected = i2 + tm.assert_index_equal(result, expected) + + +class TestTake: + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_dti_take_dont_lose_meta(self, tzstr): + rng = date_range("1/1/2000", periods=20, tz=tzstr) + + result = rng.take(range(5)) + assert result.tz == rng.tz + assert result.freq == rng.freq + + def test_take_nan_first_datetime(self): + index = DatetimeIndex([pd.NaT, Timestamp("20130101"), Timestamp("20130102")]) + result = index.take([-1, 0, 1]) + expected = DatetimeIndex([index[-1], index[0], index[1]]) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("tz", [None, "Asia/Tokyo"]) + def test_take(self, tz): + # GH#10295 + idx = date_range("2011-01-01", "2011-01-31", freq="D", name="idx", tz=tz) + + result = idx.take([0]) + assert result == Timestamp("2011-01-01", tz=idx.tz) + + result = idx.take([0, 1, 2]) + expected = date_range( + "2011-01-01", "2011-01-03", freq="D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx.take([0, 2, 4]) + expected = date_range( + "2011-01-01", "2011-01-05", freq="2D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx.take([7, 4, 1]) + expected = date_range( + "2011-01-08", "2011-01-02", freq="-3D", tz=idx.tz, name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + result = idx.take([3, 2, 5]) + expected = DatetimeIndex( + ["2011-01-04", "2011-01-03", "2011-01-06"], + dtype=idx.dtype, + freq=None, + name="idx", + ) + tm.assert_index_equal(result, expected) + assert result.freq is None + + result = idx.take([-3, 2, 5]) + expected = DatetimeIndex( + ["2011-01-29", "2011-01-03", "2011-01-06"], + dtype=idx.dtype, + freq=None, + name="idx", + ) + tm.assert_index_equal(result, expected) + assert result.freq is None + + def test_take_invalid_kwargs(self): + idx = date_range("2011-01-01", "2011-01-31", freq="D", name="idx") + indices = [1, 6, 5, 9, 10, 13, 15, 3] + + msg = r"take\(\) got an unexpected keyword argument 'foo'" + with pytest.raises(TypeError, match=msg): + idx.take(indices, foo=2) + + msg = "the 'out' parameter is not supported" + with pytest.raises(ValueError, match=msg): + idx.take(indices, out=indices) + + msg = "the 'mode' parameter is not supported" + with pytest.raises(ValueError, match=msg): + idx.take(indices, mode="clip") + + # TODO: This method came from test_datetime; de-dup with version above + @pytest.mark.parametrize("tz", [None, "US/Eastern", "Asia/Tokyo"]) + def test_take2(self, tz): + dates = [ + datetime(2010, 1, 1, 14), + datetime(2010, 1, 1, 15), + datetime(2010, 1, 1, 17), + datetime(2010, 1, 1, 21), + ] + + idx = date_range( + start="2010-01-01 09:00", + end="2010-02-01 09:00", + freq="h", + tz=tz, + name="idx", + ) + expected = DatetimeIndex(dates, freq=None, name="idx", dtype=idx.dtype) + + taken1 = idx.take([5, 6, 8, 12]) + taken2 = idx[[5, 6, 8, 12]] + + for taken in [taken1, taken2]: + tm.assert_index_equal(taken, expected) + assert isinstance(taken, DatetimeIndex) + assert taken.freq is None + assert taken.tz == expected.tz + assert taken.name == expected.name + + def test_take_fill_value(self): + # GH#12631 + idx = DatetimeIndex(["2011-01-01", "2011-02-01", "2011-03-01"], name="xxx") + result = idx.take(np.array([1, 0, -1])) + expected = DatetimeIndex(["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx") + tm.assert_index_equal(result, expected) + + # fill_value + result = idx.take(np.array([1, 0, -1]), fill_value=True) + expected = DatetimeIndex(["2011-02-01", "2011-01-01", "NaT"], name="xxx") + tm.assert_index_equal(result, expected) + + # allow_fill=False + result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) + expected = DatetimeIndex(["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx") + tm.assert_index_equal(result, expected) + + msg = ( + "When allow_fill=True and fill_value is not None, " + "all indices must be >= -1" + ) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -2]), fill_value=True) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -5]), fill_value=True) + + msg = "out of bounds" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -5])) + + def test_take_fill_value_with_timezone(self): + idx = DatetimeIndex( + ["2011-01-01", "2011-02-01", "2011-03-01"], name="xxx", tz="US/Eastern" + ) + result = idx.take(np.array([1, 0, -1])) + expected = DatetimeIndex( + ["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx", tz="US/Eastern" + ) + tm.assert_index_equal(result, expected) + + # fill_value + result = idx.take(np.array([1, 0, -1]), fill_value=True) + expected = DatetimeIndex( + ["2011-02-01", "2011-01-01", "NaT"], name="xxx", tz="US/Eastern" + ) + tm.assert_index_equal(result, expected) + + # allow_fill=False + result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) + expected = DatetimeIndex( + ["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx", tz="US/Eastern" + ) + tm.assert_index_equal(result, expected) + + msg = ( + "When allow_fill=True and fill_value is not None, " + "all indices must be >= -1" + ) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -2]), fill_value=True) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -5]), fill_value=True) + + msg = "out of bounds" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -5])) + + +class TestGetLoc: + def test_get_loc_key_unit_mismatch(self): + idx = date_range("2000-01-01", periods=3) + key = idx[1].as_unit("ms") + loc = idx.get_loc(key) + assert loc == 1 + assert key in idx + + def test_get_loc_key_unit_mismatch_not_castable(self): + dta = date_range("2000-01-01", periods=3)._data.astype("M8[s]") + dti = DatetimeIndex(dta) + key = dta[0].as_unit("ns") + pd.Timedelta(1) + + with pytest.raises( + KeyError, match=r"Timestamp\('2000-01-01 00:00:00.000000001'\)" + ): + dti.get_loc(key) + + assert key not in dti + + def test_get_loc_time_obj(self): + # time indexing + idx = date_range("2000-01-01", periods=24, freq="h") + + result = idx.get_loc(time(12)) + expected = np.array([12]) + tm.assert_numpy_array_equal(result, expected, check_dtype=False) + + result = idx.get_loc(time(12, 30)) + expected = np.array([]) + tm.assert_numpy_array_equal(result, expected, check_dtype=False) + + @pytest.mark.parametrize("offset", [-10, 10]) + def test_get_loc_time_obj2(self, monkeypatch, offset): + # GH#8667 + size_cutoff = 50 + n = size_cutoff + offset + key = time(15, 11, 30) + start = key.hour * 3600 + key.minute * 60 + key.second + step = 24 * 3600 + + with monkeypatch.context(): + monkeypatch.setattr(libindex, "_SIZE_CUTOFF", size_cutoff) + idx = date_range("2014-11-26", periods=n, freq="s") + ts = pd.Series(np.random.default_rng(2).standard_normal(n), index=idx) + locs = np.arange(start, n, step, dtype=np.intp) + + result = ts.index.get_loc(key) + tm.assert_numpy_array_equal(result, locs) + tm.assert_series_equal(ts[key], ts.iloc[locs]) + + left, right = ts.copy(), ts.copy() + left[key] *= -10 + right.iloc[locs] *= -10 + tm.assert_series_equal(left, right) + + def test_get_loc_time_nat(self): + # GH#35114 + # Case where key's total microseconds happens to match iNaT % 1e6 // 1000 + tic = time(minute=12, second=43, microsecond=145224) + dti = DatetimeIndex([pd.NaT]) + + loc = dti.get_loc(tic) + expected = np.array([], dtype=np.intp) + tm.assert_numpy_array_equal(loc, expected) + + def test_get_loc_nat(self): + # GH#20464 + index = DatetimeIndex(["1/3/2000", "NaT"]) + assert index.get_loc(pd.NaT) == 1 + + assert index.get_loc(None) == 1 + + assert index.get_loc(np.nan) == 1 + + assert index.get_loc(pd.NA) == 1 + + assert index.get_loc(np.datetime64("NaT")) == 1 + + with pytest.raises(KeyError, match="NaT"): + index.get_loc(np.timedelta64("NaT")) + + @pytest.mark.parametrize("key", [pd.Timedelta(0), pd.Timedelta(1), timedelta(0)]) + def test_get_loc_timedelta_invalid_key(self, key): + # GH#20464 + dti = date_range("1970-01-01", periods=10) + msg = "Cannot index DatetimeIndex with [Tt]imedelta" + with pytest.raises(TypeError, match=msg): + dti.get_loc(key) + + def test_get_loc_reasonable_key_error(self): + # GH#1062 + index = DatetimeIndex(["1/3/2000"]) + with pytest.raises(KeyError, match="2000"): + index.get_loc("1/1/2000") + + def test_get_loc_year_str(self): + rng = date_range("1/1/2000", "1/1/2010") + + result = rng.get_loc("2009") + expected = slice(3288, 3653) + assert result == expected + + +class TestContains: + def test_dti_contains_with_duplicates(self): + d = datetime(2011, 12, 5, 20, 30) + ix = DatetimeIndex([d, d]) + assert d in ix + + @pytest.mark.parametrize( + "vals", + [ + [0, 1, 0], + [0, 0, -1], + [0, -1, -1], + ["2015", "2015", "2016"], + ["2015", "2015", "2014"], + ], + ) + def test_contains_nonunique(self, vals): + # GH#9512 + idx = DatetimeIndex(vals) + assert idx[0] in idx + + +class TestGetIndexer: + def test_get_indexer_date_objs(self): + rng = date_range("1/1/2000", periods=20) + + result = rng.get_indexer(rng.map(lambda x: x.date())) + expected = rng.get_indexer(rng) + tm.assert_numpy_array_equal(result, expected) + + def test_get_indexer(self): + idx = date_range("2000-01-01", periods=3) + exp = np.array([0, 1, 2], dtype=np.intp) + tm.assert_numpy_array_equal(idx.get_indexer(idx), exp) + + target = idx[0] + pd.to_timedelta(["-1 hour", "12 hours", "1 day 1 hour"]) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "pad"), np.array([-1, 0, 1], dtype=np.intp) + ) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "backfill"), np.array([0, 1, 2], dtype=np.intp) + ) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "nearest"), np.array([0, 1, 1], dtype=np.intp) + ) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "nearest", tolerance=pd.Timedelta("1 hour")), + np.array([0, -1, 1], dtype=np.intp), + ) + tol_raw = [ + pd.Timedelta("1 hour"), + pd.Timedelta("1 hour"), + pd.Timedelta("1 hour").to_timedelta64(), + ] + tm.assert_numpy_array_equal( + idx.get_indexer( + target, "nearest", tolerance=[np.timedelta64(x) for x in tol_raw] + ), + np.array([0, -1, 1], dtype=np.intp), + ) + tol_bad = [ + pd.Timedelta("2 hour").to_timedelta64(), + pd.Timedelta("1 hour").to_timedelta64(), + "foo", + ] + msg = "Could not convert 'foo' to NumPy timedelta" + with pytest.raises(ValueError, match=msg): + idx.get_indexer(target, "nearest", tolerance=tol_bad) + with pytest.raises(ValueError, match="abbreviation w/o a number"): + idx.get_indexer(idx[[0]], method="nearest", tolerance="foo") + + @pytest.mark.parametrize( + "target", + [ + [date(2020, 1, 1), Timestamp("2020-01-02")], + [Timestamp("2020-01-01"), date(2020, 1, 2)], + ], + ) + def test_get_indexer_mixed_dtypes(self, target): + # https://github.com/pandas-dev/pandas/issues/33741 + values = DatetimeIndex([Timestamp("2020-01-01"), Timestamp("2020-01-02")]) + result = values.get_indexer(target) + expected = np.array([0, 1], dtype=np.intp) + tm.assert_numpy_array_equal(result, expected) + + @pytest.mark.parametrize( + "target, positions", + [ + ([date(9999, 1, 1), Timestamp("2020-01-01")], [-1, 0]), + ([Timestamp("2020-01-01"), date(9999, 1, 1)], [0, -1]), + ([date(9999, 1, 1), date(9999, 1, 1)], [-1, -1]), + ], + ) + def test_get_indexer_out_of_bounds_date(self, target, positions): + values = DatetimeIndex([Timestamp("2020-01-01"), Timestamp("2020-01-02")]) + + result = values.get_indexer(target) + expected = np.array(positions, dtype=np.intp) + tm.assert_numpy_array_equal(result, expected) + + def test_get_indexer_pad_requires_monotonicity(self): + rng = date_range("1/1/2000", "3/1/2000", freq="B") + + # neither monotonic increasing or decreasing + rng2 = rng[[1, 0, 2]] + + msg = "index must be monotonic increasing or decreasing" + with pytest.raises(ValueError, match=msg): + rng2.get_indexer(rng, method="pad") + + +class TestMaybeCastSliceBound: + def test_maybe_cast_slice_bounds_empty(self): + # GH#14354 + empty_idx = date_range(freq="1h", periods=0, end="2015") + + right = empty_idx._maybe_cast_slice_bound("2015-01-02", "right") + exp = Timestamp("2015-01-02 23:59:59.999999999") + assert right == exp + + left = empty_idx._maybe_cast_slice_bound("2015-01-02", "left") + exp = Timestamp("2015-01-02 00:00:00") + assert left == exp + + def test_maybe_cast_slice_duplicate_monotonic(self): + # https://github.com/pandas-dev/pandas/issues/16515 + idx = DatetimeIndex(["2017", "2017"]) + result = idx._maybe_cast_slice_bound("2017-01-01", "left") + expected = Timestamp("2017-01-01") + assert result == expected + + +class TestGetSliceBounds: + @pytest.mark.parametrize("box", [date, datetime, Timestamp]) + @pytest.mark.parametrize("side, expected", [("left", 4), ("right", 5)]) + def test_get_slice_bounds_datetime_within( + self, box, side, expected, tz_aware_fixture + ): + # GH 35690 + tz = tz_aware_fixture + index = bdate_range("2000-01-03", "2000-02-11").tz_localize(tz) + key = box(year=2000, month=1, day=7) + + if tz is not None: + with pytest.raises(TypeError, match="Cannot compare tz-naive"): + # GH#36148 we require tzawareness-compat as of 2.0 + index.get_slice_bound(key, side=side) + else: + result = index.get_slice_bound(key, side=side) + assert result == expected + + @pytest.mark.parametrize("box", [datetime, Timestamp]) + @pytest.mark.parametrize("side", ["left", "right"]) + @pytest.mark.parametrize("year, expected", [(1999, 0), (2020, 30)]) + def test_get_slice_bounds_datetime_outside( + self, box, side, year, expected, tz_aware_fixture + ): + # GH 35690 + tz = tz_aware_fixture + index = bdate_range("2000-01-03", "2000-02-11").tz_localize(tz) + key = box(year=year, month=1, day=7) + + if tz is not None: + with pytest.raises(TypeError, match="Cannot compare tz-naive"): + # GH#36148 we require tzawareness-compat as of 2.0 + index.get_slice_bound(key, side=side) + else: + result = index.get_slice_bound(key, side=side) + assert result == expected + + @pytest.mark.parametrize("box", [datetime, Timestamp]) + def test_slice_datetime_locs(self, box, tz_aware_fixture): + # GH 34077 + tz = tz_aware_fixture + index = DatetimeIndex(["2010-01-01", "2010-01-03"]).tz_localize(tz) + key = box(2010, 1, 1) + + if tz is not None: + with pytest.raises(TypeError, match="Cannot compare tz-naive"): + # GH#36148 we require tzawareness-compat as of 2.0 + index.slice_locs(key, box(2010, 1, 2)) + else: + result = index.slice_locs(key, box(2010, 1, 2)) + expected = (0, 1) + assert result == expected + + +class TestIndexerBetweenTime: + def test_indexer_between_time(self): + # GH#11818 + rng = date_range("1/1/2000", "1/5/2000", freq="5min") + msg = r"Cannot convert arg \[datetime\.datetime\(2010, 1, 2, 1, 0\)\] to a time" + with pytest.raises(ValueError, match=msg): + rng.indexer_between_time(datetime(2010, 1, 2, 1), datetime(2010, 1, 2, 5)) + + @pytest.mark.parametrize("unit", ["us", "ms", "s"]) + def test_indexer_between_time_non_nano(self, unit): + # For simple cases like this, the non-nano indexer_between_time + # should match the nano result + + rng = date_range("1/1/2000", "1/5/2000", freq="5min") + arr_nano = rng._data._ndarray + + arr = arr_nano.astype(f"M8[{unit}]") + + dta = type(rng._data)._simple_new(arr, dtype=arr.dtype) + dti = DatetimeIndex(dta) + assert dti.dtype == arr.dtype + + tic = time(1, 25) + toc = time(2, 29) + + result = dti.indexer_between_time(tic, toc) + expected = rng.indexer_between_time(tic, toc) + tm.assert_numpy_array_equal(result, expected) + + # case with non-zero micros in arguments + tic = time(1, 25, 0, 45678) + toc = time(2, 29, 0, 1234) + + result = dti.indexer_between_time(tic, toc) + expected = rng.indexer_between_time(tic, toc) + tm.assert_numpy_array_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_iter.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_iter.py new file mode 100644 index 0000000000000000000000000000000000000000..a006ed79f27baed75bedb95e6f24e948e429172e --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_iter.py @@ -0,0 +1,76 @@ +import dateutil.tz +import numpy as np +import pytest + +from pandas import ( + DatetimeIndex, + date_range, + to_datetime, +) +from pandas.core.arrays import datetimes + + +class TestDatetimeIndexIteration: + @pytest.mark.parametrize( + "tz", [None, "UTC", "US/Central", dateutil.tz.tzoffset(None, -28800)] + ) + def test_iteration_preserves_nanoseconds(self, tz): + # GH#19603 + index = DatetimeIndex( + ["2018-02-08 15:00:00.168456358", "2018-02-08 15:00:00.168456359"], tz=tz + ) + for i, ts in enumerate(index): + assert ts == index[i] # pylint: disable=unnecessary-list-index-lookup + + def test_iter_readonly(self): + # GH#28055 ints_to_pydatetime with readonly array + arr = np.array([np.datetime64("2012-02-15T12:00:00.000000000")]) + arr.setflags(write=False) + dti = to_datetime(arr) + list(dti) + + def test_iteration_preserves_tz(self): + # see GH#8890 + index = date_range("2012-01-01", periods=3, freq="h", tz="US/Eastern") + + for i, ts in enumerate(index): + result = ts + expected = index[i] # pylint: disable=unnecessary-list-index-lookup + assert result == expected + + def test_iteration_preserves_tz2(self): + index = date_range( + "2012-01-01", periods=3, freq="h", tz=dateutil.tz.tzoffset(None, -28800) + ) + + for i, ts in enumerate(index): + result = ts + expected = index[i] # pylint: disable=unnecessary-list-index-lookup + assert result._repr_base == expected._repr_base + assert result == expected + + def test_iteration_preserves_tz3(self): + # GH#9100 + index = DatetimeIndex( + ["2014-12-01 03:32:39.987000-08:00", "2014-12-01 04:12:34.987000-08:00"] + ) + for i, ts in enumerate(index): + result = ts + expected = index[i] # pylint: disable=unnecessary-list-index-lookup + assert result._repr_base == expected._repr_base + assert result == expected + + @pytest.mark.parametrize("offset", [-5, -1, 0, 1]) + def test_iteration_over_chunksize(self, offset, monkeypatch): + # GH#21012 + chunksize = 5 + index = date_range( + "2000-01-01 00:00:00", periods=chunksize - offset, freq="min" + ) + num = 0 + with monkeypatch.context() as m: + m.setattr(datetimes, "_ITER_CHUNKSIZE", chunksize) + for stamp in index: + assert index[num] == stamp + num += 1 + assert num == len(index) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_join.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_join.py new file mode 100644 index 0000000000000000000000000000000000000000..d0ac32939296c3f6f0fb0ecb501dde9d65ba989d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_join.py @@ -0,0 +1,149 @@ +from datetime import ( + datetime, + timezone, +) + +import numpy as np +import pytest + +from pandas import ( + DataFrame, + DatetimeIndex, + Index, + Timestamp, + date_range, + period_range, + to_datetime, +) +import pandas._testing as tm + +from pandas.tseries.offsets import ( + BDay, + BMonthEnd, +) + + +class TestJoin: + def test_does_not_convert_mixed_integer(self): + df = DataFrame(np.ones((3, 2)), columns=date_range("2020-01-01", periods=2)) + cols = df.columns.join(df.index, how="outer") + joined = cols.join(df.columns) + assert cols.dtype == np.dtype("O") + assert cols.dtype == joined.dtype + tm.assert_numpy_array_equal(cols.values, joined.values) + + def test_join_self(self, join_type): + index = date_range("1/1/2000", periods=10) + joined = index.join(index, how=join_type) + assert index is joined + + def test_join_with_period_index(self, join_type): + df = DataFrame( + np.ones((10, 2)), + index=date_range("2020-01-01", periods=10), + columns=period_range("2020-01-01", periods=2), + ) + s = df.iloc[:5, 0] + + expected = df.columns.astype("O").join(s.index, how=join_type) + result = df.columns.join(s.index, how=join_type) + tm.assert_index_equal(expected, result) + + def test_join_object_index(self): + rng = date_range("1/1/2000", periods=10) + idx = Index(["a", "b", "c", "d"]) + + result = rng.join(idx, how="outer") + assert isinstance(result[0], Timestamp) + + def test_join_utc_convert(self, join_type): + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") + + left = rng.tz_convert("US/Eastern") + right = rng.tz_convert("Europe/Berlin") + + result = left.join(left[:-5], how=join_type) + assert isinstance(result, DatetimeIndex) + assert result.tz == left.tz + + result = left.join(right[:-5], how=join_type) + assert isinstance(result, DatetimeIndex) + assert result.tz is timezone.utc + + def test_datetimeindex_union_join_empty(self, sort): + dti = date_range(start="1/1/2001", end="2/1/2001", freq="D") + empty = Index([]) + + result = dti.union(empty, sort=sort) + expected = dti.astype("O") + tm.assert_index_equal(result, expected) + + result = dti.join(empty) + assert isinstance(result, DatetimeIndex) + tm.assert_index_equal(result, dti) + + def test_join_nonunique(self): + idx1 = to_datetime(["2012-11-06 16:00:11.477563", "2012-11-06 16:00:11.477563"]) + idx2 = to_datetime(["2012-11-06 15:11:09.006507", "2012-11-06 15:11:09.006507"]) + rs = idx1.join(idx2, how="outer") + assert rs.is_monotonic_increasing + + @pytest.mark.parametrize("freq", ["B", "C"]) + def test_outer_join(self, freq): + # should just behave as union + start, end = datetime(2009, 1, 1), datetime(2010, 1, 1) + rng = date_range(start=start, end=end, freq=freq) + + # overlapping + left = rng[:10] + right = rng[5:10] + + the_join = left.join(right, how="outer") + assert isinstance(the_join, DatetimeIndex) + + # non-overlapping, gap in middle + left = rng[:5] + right = rng[10:] + + the_join = left.join(right, how="outer") + assert isinstance(the_join, DatetimeIndex) + assert the_join.freq is None + + # non-overlapping, no gap + left = rng[:5] + right = rng[5:10] + + the_join = left.join(right, how="outer") + assert isinstance(the_join, DatetimeIndex) + + # overlapping, but different offset + other = date_range(start, end, freq=BMonthEnd()) + + the_join = rng.join(other, how="outer") + assert isinstance(the_join, DatetimeIndex) + assert the_join.freq is None + + def test_naive_aware_conflicts(self): + start, end = datetime(2009, 1, 1), datetime(2010, 1, 1) + naive = date_range(start, end, freq=BDay(), tz=None) + aware = date_range(start, end, freq=BDay(), tz="Asia/Hong_Kong") + + msg = "tz-naive.*tz-aware" + with pytest.raises(TypeError, match=msg): + naive.join(aware) + + with pytest.raises(TypeError, match=msg): + aware.join(naive) + + @pytest.mark.parametrize("tz", [None, "US/Pacific"]) + def test_join_preserves_freq(self, tz): + # GH#32157 + dti = date_range("2016-01-01", periods=10, tz=tz) + result = dti[:5].join(dti[5:], how="outer") + assert result.freq == dti.freq + tm.assert_index_equal(result, dti) + + result = dti[:5].join(dti[6:], how="outer") + assert result.freq is None + expected = dti.delete(5) + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_npfuncs.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_npfuncs.py new file mode 100644 index 0000000000000000000000000000000000000000..6c3e44c2a5db1ebc4f02686d19d34ae3caf1e9ad --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_npfuncs.py @@ -0,0 +1,13 @@ +import numpy as np + +from pandas import date_range +import pandas._testing as tm + + +class TestSplit: + def test_split_non_utc(self): + # GH#14042 + indices = date_range("2016-01-01 00:00:00+0200", freq="s", periods=10) + result = np.split(indices, indices_or_sections=[])[0] + expected = indices._with_freq(None) + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_ops.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_ops.py new file mode 100644 index 0000000000000000000000000000000000000000..bac9548b932c163dc7a33282796c1bb682187664 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_ops.py @@ -0,0 +1,56 @@ +from datetime import datetime + +import pytest + +from pandas import ( + DatetimeIndex, + Index, + bdate_range, + date_range, +) +import pandas._testing as tm + + +class TestDatetimeIndexOps: + def test_infer_freq(self, freq_sample): + # GH 11018 + idx = date_range("2011-01-01 09:00:00", freq=freq_sample, periods=10) + result = DatetimeIndex(idx.asi8, freq="infer") + tm.assert_index_equal(idx, result) + assert result.freq == freq_sample + + +@pytest.mark.parametrize("freq", ["B", "C"]) +class TestBusinessDatetimeIndex: + @pytest.fixture + def rng(self, freq): + START, END = datetime(2009, 1, 1), datetime(2010, 1, 1) + return bdate_range(START, END, freq=freq) + + def test_comparison(self, rng): + d = rng[10] + + comp = rng > d + assert comp[11] + assert not comp[9] + + def test_copy(self, rng): + cp = rng.copy() + tm.assert_index_equal(cp, rng) + + def test_identical(self, rng): + t1 = rng.copy() + t2 = rng.copy() + assert t1.identical(t2) + + # name + t1 = t1.rename("foo") + assert t1.equals(t2) + assert not t1.identical(t2) + t2 = t2.rename("foo") + assert t1.identical(t2) + + # freq + t2v = Index(t2.values) + assert t1.equals(t2v) + assert not t1.identical(t2v) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_pickle.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_pickle.py new file mode 100644 index 0000000000000000000000000000000000000000..922b4a18119f4d457de501225611f8884689d434 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_pickle.py @@ -0,0 +1,45 @@ +import pytest + +from pandas import ( + NaT, + date_range, + to_datetime, +) +import pandas._testing as tm + + +class TestPickle: + def test_pickle(self): + # GH#4606 + idx = to_datetime(["2013-01-01", NaT, "2014-01-06"]) + idx_p = tm.round_trip_pickle(idx) + assert idx_p[0] == idx[0] + assert idx_p[1] is NaT + assert idx_p[2] == idx[2] + + def test_pickle_dont_infer_freq(self): + # GH#11002 + # don't infer freq + idx = date_range("1750-1-1", "2050-1-1", freq="7D") + idx_p = tm.round_trip_pickle(idx) + tm.assert_index_equal(idx, idx_p) + + def test_pickle_after_set_freq(self): + dti = date_range("20130101", periods=3, tz="US/Eastern", name="foo") + dti = dti._with_freq(None) + + res = tm.round_trip_pickle(dti) + tm.assert_index_equal(res, dti) + + def test_roundtrip_pickle_with_tz(self): + # GH#8367 + # round-trip of timezone + index = date_range("20130101", periods=3, tz="US/Eastern", name="foo") + unpickled = tm.round_trip_pickle(index) + tm.assert_index_equal(index, unpickled) + + @pytest.mark.parametrize("freq", ["B", "C"]) + def test_pickle_unpickle(self, freq): + rng = date_range("2009-01-01", "2010-01-01", freq=freq) + unpickled = tm.round_trip_pickle(rng) + assert unpickled.freq == freq diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_reindex.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_reindex.py new file mode 100644 index 0000000000000000000000000000000000000000..e4911aa3c4a2938cedb70887b6bd3f28e408f8c5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_reindex.py @@ -0,0 +1,56 @@ +from datetime import timedelta + +import numpy as np + +from pandas import ( + DatetimeIndex, + date_range, +) +import pandas._testing as tm + + +class TestDatetimeIndexReindex: + def test_reindex_preserves_tz_if_target_is_empty_list_or_array(self): + # GH#7774 + index = date_range("2013-01-01", periods=3, tz="US/Eastern") + assert str(index.reindex([])[0].tz) == "US/Eastern" + assert str(index.reindex(np.array([]))[0].tz) == "US/Eastern" + + def test_reindex_with_same_tz_nearest(self): + # GH#32740 + rng_a = date_range("2010-01-01", "2010-01-02", periods=24, tz="utc") + rng_b = date_range("2010-01-01", "2010-01-02", periods=23, tz="utc") + result1, result2 = rng_a.reindex( + rng_b, method="nearest", tolerance=timedelta(seconds=20) + ) + expected_list1 = [ + "2010-01-01 00:00:00", + "2010-01-01 01:05:27.272727272", + "2010-01-01 02:10:54.545454545", + "2010-01-01 03:16:21.818181818", + "2010-01-01 04:21:49.090909090", + "2010-01-01 05:27:16.363636363", + "2010-01-01 06:32:43.636363636", + "2010-01-01 07:38:10.909090909", + "2010-01-01 08:43:38.181818181", + "2010-01-01 09:49:05.454545454", + "2010-01-01 10:54:32.727272727", + "2010-01-01 12:00:00", + "2010-01-01 13:05:27.272727272", + "2010-01-01 14:10:54.545454545", + "2010-01-01 15:16:21.818181818", + "2010-01-01 16:21:49.090909090", + "2010-01-01 17:27:16.363636363", + "2010-01-01 18:32:43.636363636", + "2010-01-01 19:38:10.909090909", + "2010-01-01 20:43:38.181818181", + "2010-01-01 21:49:05.454545454", + "2010-01-01 22:54:32.727272727", + "2010-01-02 00:00:00", + ] + expected1 = DatetimeIndex( + expected_list1, dtype="datetime64[ns, UTC]", freq=None + ) + expected2 = np.array([0] + [-1] * 21 + [23], dtype=np.dtype("intp")) + tm.assert_index_equal(result1, expected1) + tm.assert_numpy_array_equal(result2, expected2) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_scalar_compat.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_scalar_compat.py new file mode 100644 index 0000000000000000000000000000000000000000..e93fc0e2a4e2e740e2dee27e332b68b060ba7aa7 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_scalar_compat.py @@ -0,0 +1,329 @@ +""" +Tests for DatetimeIndex methods behaving like their Timestamp counterparts +""" + +import calendar +from datetime import ( + date, + datetime, + time, +) +import locale +import unicodedata + +import numpy as np +import pytest + +from pandas._libs.tslibs import timezones + +from pandas import ( + DatetimeIndex, + Index, + NaT, + Timestamp, + date_range, + offsets, +) +import pandas._testing as tm +from pandas.core.arrays import DatetimeArray + + +class TestDatetimeIndexOps: + def test_dti_no_millisecond_field(self): + msg = "type object 'DatetimeIndex' has no attribute 'millisecond'" + with pytest.raises(AttributeError, match=msg): + DatetimeIndex.millisecond + + msg = "'DatetimeIndex' object has no attribute 'millisecond'" + with pytest.raises(AttributeError, match=msg): + DatetimeIndex([]).millisecond + + def test_dti_time(self): + rng = date_range("1/1/2000", freq="12min", periods=10) + result = Index(rng).time + expected = [t.time() for t in rng] + assert (result == expected).all() + + def test_dti_date(self): + rng = date_range("1/1/2000", freq="12h", periods=10) + result = Index(rng).date + expected = [t.date() for t in rng] + assert (result == expected).all() + + @pytest.mark.parametrize( + "dtype", + [None, "datetime64[ns, CET]", "datetime64[ns, EST]", "datetime64[ns, UTC]"], + ) + def test_dti_date2(self, dtype): + # Regression test for GH#21230 + expected = np.array([date(2018, 6, 4), NaT]) + + index = DatetimeIndex(["2018-06-04 10:00:00", NaT], dtype=dtype) + result = index.date + + tm.assert_numpy_array_equal(result, expected) + + @pytest.mark.parametrize( + "dtype", + [None, "datetime64[ns, CET]", "datetime64[ns, EST]", "datetime64[ns, UTC]"], + ) + def test_dti_time2(self, dtype): + # Regression test for GH#21267 + expected = np.array([time(10, 20, 30), NaT]) + + index = DatetimeIndex(["2018-06-04 10:20:30", NaT], dtype=dtype) + result = index.time + + tm.assert_numpy_array_equal(result, expected) + + def test_dti_timetz(self, tz_naive_fixture): + # GH#21358 + tz = timezones.maybe_get_tz(tz_naive_fixture) + + expected = np.array([time(10, 20, 30, tzinfo=tz), NaT]) + + index = DatetimeIndex(["2018-06-04 10:20:30", NaT], tz=tz) + result = index.timetz + + tm.assert_numpy_array_equal(result, expected) + + @pytest.mark.parametrize( + "field", + [ + "dayofweek", + "day_of_week", + "dayofyear", + "day_of_year", + "quarter", + "days_in_month", + "is_month_start", + "is_month_end", + "is_quarter_start", + "is_quarter_end", + "is_year_start", + "is_year_end", + ], + ) + def test_dti_timestamp_fields(self, field): + # extra fields from DatetimeIndex like quarter and week + idx = date_range("2020-01-01", periods=10) + expected = getattr(idx, field)[-1] + + result = getattr(Timestamp(idx[-1]), field) + assert result == expected + + def test_dti_nanosecond(self): + dti = DatetimeIndex(np.arange(10)) + expected = Index(np.arange(10, dtype=np.int32)) + + tm.assert_index_equal(dti.nanosecond, expected) + + @pytest.mark.parametrize("prefix", ["", "dateutil/"]) + def test_dti_hour_tzaware(self, prefix): + strdates = ["1/1/2012", "3/1/2012", "4/1/2012"] + rng = DatetimeIndex(strdates, tz=prefix + "US/Eastern") + assert (rng.hour == 0).all() + + # a more unusual time zone, GH#1946 + dr = date_range( + "2011-10-02 00:00", freq="h", periods=10, tz=prefix + "America/Atikokan" + ) + + expected = Index(np.arange(10, dtype=np.int32)) + tm.assert_index_equal(dr.hour, expected) + + # GH#12806 + # error: Unsupported operand types for + ("List[None]" and "List[str]") + @pytest.mark.parametrize( + "time_locale", [None] + tm.get_locales() # type: ignore[operator] + ) + def test_day_name_month_name(self, time_locale): + # Test Monday -> Sunday and January -> December, in that sequence + if time_locale is None: + # If the time_locale is None, day-name and month_name should + # return the english attributes + expected_days = [ + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ] + expected_months = [ + "January", + "February", + "March", + "April", + "May", + "June", + "July", + "August", + "September", + "October", + "November", + "December", + ] + else: + with tm.set_locale(time_locale, locale.LC_TIME): + expected_days = calendar.day_name[:] + expected_months = calendar.month_name[1:] + + # GH#11128 + dti = date_range(freq="D", start=datetime(1998, 1, 1), periods=365) + english_days = [ + "Monday", + "Tuesday", + "Wednesday", + "Thursday", + "Friday", + "Saturday", + "Sunday", + ] + for day, name, eng_name in zip(range(4, 11), expected_days, english_days): + name = name.capitalize() + assert dti.day_name(locale=time_locale)[day] == name + assert dti.day_name(locale=None)[day] == eng_name + ts = Timestamp(datetime(2016, 4, day)) + assert ts.day_name(locale=time_locale) == name + dti = dti.append(DatetimeIndex([NaT])) + assert np.isnan(dti.day_name(locale=time_locale)[-1]) + ts = Timestamp(NaT) + assert np.isnan(ts.day_name(locale=time_locale)) + + # GH#12805 + dti = date_range(freq="ME", start="2012", end="2013") + result = dti.month_name(locale=time_locale) + expected = Index([month.capitalize() for month in expected_months]) + + # work around different normalization schemes GH#22342 + result = result.str.normalize("NFD") + expected = expected.str.normalize("NFD") + + tm.assert_index_equal(result, expected) + + for item, expected in zip(dti, expected_months): + result = item.month_name(locale=time_locale) + expected = expected.capitalize() + + result = unicodedata.normalize("NFD", result) + expected = unicodedata.normalize("NFD", result) + + assert result == expected + dti = dti.append(DatetimeIndex([NaT])) + assert np.isnan(dti.month_name(locale=time_locale)[-1]) + + def test_dti_week(self): + # GH#6538: Check that DatetimeIndex and its TimeStamp elements + # return the same weekofyear accessor close to new year w/ tz + dates = ["2013/12/29", "2013/12/30", "2013/12/31"] + dates = DatetimeIndex(dates, tz="Europe/Brussels") + expected = [52, 1, 1] + assert dates.isocalendar().week.tolist() == expected + assert [d.weekofyear for d in dates] == expected + + @pytest.mark.parametrize("tz", [None, "US/Eastern"]) + def test_dti_fields(self, tz): + # GH#13303 + dti = date_range(freq="D", start=datetime(1998, 1, 1), periods=365, tz=tz) + assert dti.year[0] == 1998 + assert dti.month[0] == 1 + assert dti.day[0] == 1 + assert dti.hour[0] == 0 + assert dti.minute[0] == 0 + assert dti.second[0] == 0 + assert dti.microsecond[0] == 0 + assert dti.dayofweek[0] == 3 + + assert dti.dayofyear[0] == 1 + assert dti.dayofyear[120] == 121 + + assert dti.isocalendar().week.iloc[0] == 1 + assert dti.isocalendar().week.iloc[120] == 18 + + assert dti.quarter[0] == 1 + assert dti.quarter[120] == 2 + + assert dti.days_in_month[0] == 31 + assert dti.days_in_month[90] == 30 + + assert dti.is_month_start[0] + assert not dti.is_month_start[1] + assert dti.is_month_start[31] + assert dti.is_quarter_start[0] + assert dti.is_quarter_start[90] + assert dti.is_year_start[0] + assert not dti.is_year_start[364] + assert not dti.is_month_end[0] + assert dti.is_month_end[30] + assert not dti.is_month_end[31] + assert dti.is_month_end[364] + assert not dti.is_quarter_end[0] + assert not dti.is_quarter_end[30] + assert dti.is_quarter_end[89] + assert dti.is_quarter_end[364] + assert not dti.is_year_end[0] + assert dti.is_year_end[364] + + assert len(dti.year) == 365 + assert len(dti.month) == 365 + assert len(dti.day) == 365 + assert len(dti.hour) == 365 + assert len(dti.minute) == 365 + assert len(dti.second) == 365 + assert len(dti.microsecond) == 365 + assert len(dti.dayofweek) == 365 + assert len(dti.dayofyear) == 365 + assert len(dti.isocalendar()) == 365 + assert len(dti.quarter) == 365 + assert len(dti.is_month_start) == 365 + assert len(dti.is_month_end) == 365 + assert len(dti.is_quarter_start) == 365 + assert len(dti.is_quarter_end) == 365 + assert len(dti.is_year_start) == 365 + assert len(dti.is_year_end) == 365 + + dti.name = "name" + + # non boolean accessors -> return Index + for accessor in DatetimeArray._field_ops: + res = getattr(dti, accessor) + assert len(res) == 365 + assert isinstance(res, Index) + assert res.name == "name" + + # boolean accessors -> return array + for accessor in DatetimeArray._bool_ops: + res = getattr(dti, accessor) + assert len(res) == 365 + assert isinstance(res, np.ndarray) + + # test boolean indexing + res = dti[dti.is_quarter_start] + exp = dti[[0, 90, 181, 273]] + tm.assert_index_equal(res, exp) + res = dti[dti.is_leap_year] + exp = DatetimeIndex([], freq="D", tz=dti.tz, name="name").as_unit("ns") + tm.assert_index_equal(res, exp) + + def test_dti_is_year_quarter_start(self): + dti = date_range(freq="BQE-FEB", start=datetime(1998, 1, 1), periods=4) + + assert sum(dti.is_quarter_start) == 0 + assert sum(dti.is_quarter_end) == 4 + assert sum(dti.is_year_start) == 0 + assert sum(dti.is_year_end) == 1 + + def test_dti_is_month_start(self): + dti = DatetimeIndex(["2000-01-01", "2000-01-02", "2000-01-03"]) + + assert dti.is_month_start[0] == 1 + + def test_dti_is_month_start_custom(self): + # Ensure is_start/end accessors throw ValueError for CustomBusinessDay, + bday_egypt = offsets.CustomBusinessDay(weekmask="Sun Mon Tue Wed Thu") + dti = date_range(datetime(2013, 4, 30), periods=5, freq=bday_egypt) + msg = "Custom business days is not supported by is_month_start" + with pytest.raises(ValueError, match=msg): + dti.is_month_start diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_setops.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_setops.py new file mode 100644 index 0000000000000000000000000000000000000000..fc3a1d4721841a052c19071883653a48c835c3b2 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_setops.py @@ -0,0 +1,666 @@ +from datetime import ( + datetime, + timezone, +) + +import numpy as np +import pytest +import pytz + +import pandas.util._test_decorators as td + +import pandas as pd +from pandas import ( + DataFrame, + DatetimeIndex, + Index, + Series, + Timestamp, + bdate_range, + date_range, +) +import pandas._testing as tm + +from pandas.tseries.offsets import ( + BMonthEnd, + Minute, + MonthEnd, +) + +START, END = datetime(2009, 1, 1), datetime(2010, 1, 1) + + +class TestDatetimeIndexSetOps: + tz = [ + None, + "UTC", + "Asia/Tokyo", + "US/Eastern", + "dateutil/Asia/Singapore", + "dateutil/US/Pacific", + ] + + # TODO: moved from test_datetimelike; dedup with version below + def test_union2(self, sort): + everything = date_range("2020-01-01", periods=10) + first = everything[:5] + second = everything[5:] + union = first.union(second, sort=sort) + tm.assert_index_equal(union, everything) + + @pytest.mark.parametrize("box", [np.array, Series, list]) + def test_union3(self, sort, box): + everything = date_range("2020-01-01", periods=10) + first = everything[:5] + second = everything[5:] + + # GH 10149 support listlike inputs other than Index objects + expected = first.union(second, sort=sort) + case = box(second.values) + result = first.union(case, sort=sort) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("tz", tz) + def test_union(self, tz, sort): + rng1 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + other1 = date_range("1/6/2000", freq="D", periods=5, tz=tz) + expected1 = date_range("1/1/2000", freq="D", periods=10, tz=tz) + expected1_notsorted = DatetimeIndex(list(other1) + list(rng1)) + + rng2 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + other2 = date_range("1/4/2000", freq="D", periods=5, tz=tz) + expected2 = date_range("1/1/2000", freq="D", periods=8, tz=tz) + expected2_notsorted = DatetimeIndex(list(other2) + list(rng2[:3])) + + rng3 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + other3 = DatetimeIndex([], tz=tz).as_unit("ns") + expected3 = date_range("1/1/2000", freq="D", periods=5, tz=tz) + expected3_notsorted = rng3 + + for rng, other, exp, exp_notsorted in [ + (rng1, other1, expected1, expected1_notsorted), + (rng2, other2, expected2, expected2_notsorted), + (rng3, other3, expected3, expected3_notsorted), + ]: + result_union = rng.union(other, sort=sort) + tm.assert_index_equal(result_union, exp) + + result_union = other.union(rng, sort=sort) + if sort is None: + tm.assert_index_equal(result_union, exp) + else: + tm.assert_index_equal(result_union, exp_notsorted) + + def test_union_coverage(self, sort): + idx = DatetimeIndex(["2000-01-03", "2000-01-01", "2000-01-02"]) + ordered = DatetimeIndex(idx.sort_values(), freq="infer") + result = ordered.union(idx, sort=sort) + tm.assert_index_equal(result, ordered) + + result = ordered[:0].union(ordered, sort=sort) + tm.assert_index_equal(result, ordered) + assert result.freq == ordered.freq + + def test_union_bug_1730(self, sort): + rng_a = date_range("1/1/2012", periods=4, freq="3h") + rng_b = date_range("1/1/2012", periods=4, freq="4h") + + result = rng_a.union(rng_b, sort=sort) + exp = list(rng_a) + list(rng_b[1:]) + if sort is None: + exp = DatetimeIndex(sorted(exp)) + else: + exp = DatetimeIndex(exp) + tm.assert_index_equal(result, exp) + + def test_union_bug_1745(self, sort): + left = DatetimeIndex(["2012-05-11 15:19:49.695000"]) + right = DatetimeIndex( + [ + "2012-05-29 13:04:21.322000", + "2012-05-11 15:27:24.873000", + "2012-05-11 15:31:05.350000", + ] + ) + + result = left.union(right, sort=sort) + exp = DatetimeIndex( + [ + "2012-05-11 15:19:49.695000", + "2012-05-29 13:04:21.322000", + "2012-05-11 15:27:24.873000", + "2012-05-11 15:31:05.350000", + ] + ) + if sort is None: + exp = exp.sort_values() + tm.assert_index_equal(result, exp) + + def test_union_bug_4564(self, sort): + from pandas import DateOffset + + left = date_range("2013-01-01", "2013-02-01") + right = left + DateOffset(minutes=15) + + result = left.union(right, sort=sort) + exp = list(left) + list(right) + if sort is None: + exp = DatetimeIndex(sorted(exp)) + else: + exp = DatetimeIndex(exp) + tm.assert_index_equal(result, exp) + + def test_union_freq_both_none(self, sort): + # GH11086 + expected = bdate_range("20150101", periods=10) + expected._data.freq = None + + result = expected.union(expected, sort=sort) + tm.assert_index_equal(result, expected) + assert result.freq is None + + def test_union_freq_infer(self): + # When taking the union of two DatetimeIndexes, we infer + # a freq even if the arguments don't have freq. This matches + # TimedeltaIndex behavior. + dti = date_range("2016-01-01", periods=5) + left = dti[[0, 1, 3, 4]] + right = dti[[2, 3, 1]] + + assert left.freq is None + assert right.freq is None + + result = left.union(right) + tm.assert_index_equal(result, dti) + assert result.freq == "D" + + def test_union_dataframe_index(self): + rng1 = date_range("1/1/1999", "1/1/2012", freq="MS") + s1 = Series(np.random.default_rng(2).standard_normal(len(rng1)), rng1) + + rng2 = date_range("1/1/1980", "12/1/2001", freq="MS") + s2 = Series(np.random.default_rng(2).standard_normal(len(rng2)), rng2) + df = DataFrame({"s1": s1, "s2": s2}) + + exp = date_range("1/1/1980", "1/1/2012", freq="MS") + tm.assert_index_equal(df.index, exp) + + def test_union_with_DatetimeIndex(self, sort): + i1 = Index(np.arange(0, 20, 2, dtype=np.int64)) + i2 = date_range(start="2012-01-03 00:00:00", periods=10, freq="D") + # Works + i1.union(i2, sort=sort) + # Fails with "AttributeError: can't set attribute" + i2.union(i1, sort=sort) + + def test_union_same_timezone_different_units(self): + # GH 55238 + idx1 = date_range("2000-01-01", periods=3, tz="UTC").as_unit("ms") + idx2 = date_range("2000-01-01", periods=3, tz="UTC").as_unit("us") + result = idx1.union(idx2) + expected = date_range("2000-01-01", periods=3, tz="UTC").as_unit("us") + tm.assert_index_equal(result, expected) + + # TODO: moved from test_datetimelike; de-duplicate with version below + def test_intersection2(self): + first = date_range("2020-01-01", periods=10) + second = first[5:] + intersect = first.intersection(second) + tm.assert_index_equal(intersect, second) + + # GH 10149 + cases = [klass(second.values) for klass in [np.array, Series, list]] + for case in cases: + result = first.intersection(case) + tm.assert_index_equal(result, second) + + third = Index(["a", "b", "c"]) + result = first.intersection(third) + expected = Index([], dtype=object) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "tz", [None, "Asia/Tokyo", "US/Eastern", "dateutil/US/Pacific"] + ) + def test_intersection(self, tz, sort): + # GH 4690 (with tz) + base = date_range("6/1/2000", "6/30/2000", freq="D", name="idx") + + # if target has the same name, it is preserved + rng2 = date_range("5/15/2000", "6/20/2000", freq="D", name="idx") + expected2 = date_range("6/1/2000", "6/20/2000", freq="D", name="idx") + + # if target name is different, it will be reset + rng3 = date_range("5/15/2000", "6/20/2000", freq="D", name="other") + expected3 = date_range("6/1/2000", "6/20/2000", freq="D", name=None) + + rng4 = date_range("7/1/2000", "7/31/2000", freq="D", name="idx") + expected4 = DatetimeIndex([], freq="D", name="idx", dtype="M8[ns]") + + for rng, expected in [ + (rng2, expected2), + (rng3, expected3), + (rng4, expected4), + ]: + result = base.intersection(rng) + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + # non-monotonic + base = DatetimeIndex( + ["2011-01-05", "2011-01-04", "2011-01-02", "2011-01-03"], tz=tz, name="idx" + ).as_unit("ns") + + rng2 = DatetimeIndex( + ["2011-01-04", "2011-01-02", "2011-02-02", "2011-02-03"], tz=tz, name="idx" + ).as_unit("ns") + expected2 = DatetimeIndex( + ["2011-01-04", "2011-01-02"], tz=tz, name="idx" + ).as_unit("ns") + + rng3 = DatetimeIndex( + ["2011-01-04", "2011-01-02", "2011-02-02", "2011-02-03"], + tz=tz, + name="other", + ).as_unit("ns") + expected3 = DatetimeIndex( + ["2011-01-04", "2011-01-02"], tz=tz, name=None + ).as_unit("ns") + + # GH 7880 + rng4 = date_range("7/1/2000", "7/31/2000", freq="D", tz=tz, name="idx") + expected4 = DatetimeIndex([], tz=tz, name="idx").as_unit("ns") + assert expected4.freq is None + + for rng, expected in [ + (rng2, expected2), + (rng3, expected3), + (rng4, expected4), + ]: + result = base.intersection(rng, sort=sort) + if sort is None: + expected = expected.sort_values() + tm.assert_index_equal(result, expected) + assert result.freq == expected.freq + + # parametrize over both anchored and non-anchored freqs, as they + # have different code paths + @pytest.mark.parametrize("freq", ["min", "B"]) + def test_intersection_empty(self, tz_aware_fixture, freq): + # empty same freq GH2129 + tz = tz_aware_fixture + rng = date_range("6/1/2000", "6/15/2000", freq=freq, tz=tz) + result = rng[0:0].intersection(rng) + assert len(result) == 0 + assert result.freq == rng.freq + + result = rng.intersection(rng[0:0]) + assert len(result) == 0 + assert result.freq == rng.freq + + # no overlap GH#33604 + check_freq = freq != "min" # We don't preserve freq on non-anchored offsets + result = rng[:3].intersection(rng[-3:]) + tm.assert_index_equal(result, rng[:0]) + if check_freq: + # We don't preserve freq on non-anchored offsets + assert result.freq == rng.freq + + # swapped left and right + result = rng[-3:].intersection(rng[:3]) + tm.assert_index_equal(result, rng[:0]) + if check_freq: + # We don't preserve freq on non-anchored offsets + assert result.freq == rng.freq + + def test_intersection_bug_1708(self): + from pandas import DateOffset + + index_1 = date_range("1/1/2012", periods=4, freq="12h") + index_2 = index_1 + DateOffset(hours=1) + + result = index_1.intersection(index_2) + assert len(result) == 0 + + @pytest.mark.parametrize("tz", tz) + def test_difference(self, tz, sort): + rng_dates = ["1/2/2000", "1/3/2000", "1/1/2000", "1/4/2000", "1/5/2000"] + + rng1 = DatetimeIndex(rng_dates, tz=tz) + other1 = date_range("1/6/2000", freq="D", periods=5, tz=tz) + expected1 = DatetimeIndex(rng_dates, tz=tz) + + rng2 = DatetimeIndex(rng_dates, tz=tz) + other2 = date_range("1/4/2000", freq="D", periods=5, tz=tz) + expected2 = DatetimeIndex(rng_dates[:3], tz=tz) + + rng3 = DatetimeIndex(rng_dates, tz=tz) + other3 = DatetimeIndex([], tz=tz) + expected3 = DatetimeIndex(rng_dates, tz=tz) + + for rng, other, expected in [ + (rng1, other1, expected1), + (rng2, other2, expected2), + (rng3, other3, expected3), + ]: + result_diff = rng.difference(other, sort) + if sort is None and len(other): + # We dont sort (yet?) when empty GH#24959 + expected = expected.sort_values() + tm.assert_index_equal(result_diff, expected) + + def test_difference_freq(self, sort): + # GH14323: difference of DatetimeIndex should not preserve frequency + + index = date_range("20160920", "20160925", freq="D") + other = date_range("20160921", "20160924", freq="D") + expected = DatetimeIndex(["20160920", "20160925"], dtype="M8[ns]", freq=None) + idx_diff = index.difference(other, sort) + tm.assert_index_equal(idx_diff, expected) + tm.assert_attr_equal("freq", idx_diff, expected) + + # preserve frequency when the difference is a contiguous + # subset of the original range + other = date_range("20160922", "20160925", freq="D") + idx_diff = index.difference(other, sort) + expected = DatetimeIndex(["20160920", "20160921"], dtype="M8[ns]", freq="D") + tm.assert_index_equal(idx_diff, expected) + tm.assert_attr_equal("freq", idx_diff, expected) + + def test_datetimeindex_diff(self, sort): + dti1 = date_range(freq="QE-JAN", start=datetime(1997, 12, 31), periods=100) + dti2 = date_range(freq="QE-JAN", start=datetime(1997, 12, 31), periods=98) + assert len(dti1.difference(dti2, sort)) == 2 + + @pytest.mark.parametrize("tz", [None, "Asia/Tokyo", "US/Eastern"]) + def test_setops_preserve_freq(self, tz): + rng = date_range("1/1/2000", "1/1/2002", name="idx", tz=tz) + + result = rng[:50].union(rng[50:100]) + assert result.name == rng.name + assert result.freq == rng.freq + assert result.tz == rng.tz + + result = rng[:50].union(rng[30:100]) + assert result.name == rng.name + assert result.freq == rng.freq + assert result.tz == rng.tz + + result = rng[:50].union(rng[60:100]) + assert result.name == rng.name + assert result.freq is None + assert result.tz == rng.tz + + result = rng[:50].intersection(rng[25:75]) + assert result.name == rng.name + assert result.freqstr == "D" + assert result.tz == rng.tz + + nofreq = DatetimeIndex(list(rng[25:75]), name="other") + result = rng[:50].union(nofreq) + assert result.name is None + assert result.freq == rng.freq + assert result.tz == rng.tz + + result = rng[:50].intersection(nofreq) + assert result.name is None + assert result.freq == rng.freq + assert result.tz == rng.tz + + def test_intersection_non_tick_no_fastpath(self): + # GH#42104 + dti = DatetimeIndex( + [ + "2018-12-31", + "2019-03-31", + "2019-06-30", + "2019-09-30", + "2019-12-31", + "2020-03-31", + ], + freq="QE-DEC", + ) + result = dti[::2].intersection(dti[1::2]) + expected = dti[:0] + tm.assert_index_equal(result, expected) + + def test_dti_intersection(self): + rng = date_range("1/1/2011", periods=100, freq="h", tz="utc") + + left = rng[10:90][::-1] + right = rng[20:80][::-1] + + assert left.tz == rng.tz + result = left.intersection(right) + assert result.tz == left.tz + + # Note: not difference, as there is no symmetry requirement there + @pytest.mark.parametrize("setop", ["union", "intersection", "symmetric_difference"]) + def test_dti_setop_aware(self, setop): + # non-overlapping + # GH#39328 as of 2.0 we cast these to UTC instead of object + rng = date_range("2012-11-15 00:00:00", periods=6, freq="h", tz="US/Central") + + rng2 = date_range("2012-11-15 12:00:00", periods=6, freq="h", tz="US/Eastern") + + result = getattr(rng, setop)(rng2) + + left = rng.tz_convert("UTC") + right = rng2.tz_convert("UTC") + expected = getattr(left, setop)(right) + tm.assert_index_equal(result, expected) + assert result.tz == left.tz + if len(result): + assert result[0].tz is timezone.utc + assert result[-1].tz is timezone.utc + + def test_dti_union_mixed(self): + # GH#21671 + rng = DatetimeIndex([Timestamp("2011-01-01"), pd.NaT]) + rng2 = DatetimeIndex(["2012-01-01", "2012-01-02"], tz="Asia/Tokyo") + result = rng.union(rng2) + expected = Index( + [ + Timestamp("2011-01-01"), + pd.NaT, + Timestamp("2012-01-01", tz="Asia/Tokyo"), + Timestamp("2012-01-02", tz="Asia/Tokyo"), + ], + dtype=object, + ) + tm.assert_index_equal(result, expected) + + +class TestBusinessDatetimeIndex: + def test_union(self, sort): + rng = bdate_range(START, END) + # overlapping + left = rng[:10] + right = rng[5:10] + + the_union = left.union(right, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + # non-overlapping, gap in middle + left = rng[:5] + right = rng[10:] + + the_union = left.union(right, sort=sort) + assert isinstance(the_union, Index) + + # non-overlapping, no gap + left = rng[:5] + right = rng[5:10] + + the_union = left.union(right, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + # order does not matter + if sort is None: + tm.assert_index_equal(right.union(left, sort=sort), the_union) + else: + expected = DatetimeIndex(list(right) + list(left)) + tm.assert_index_equal(right.union(left, sort=sort), expected) + + # overlapping, but different offset + rng = date_range(START, END, freq=BMonthEnd()) + + the_union = rng.union(rng, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + def test_union_not_cacheable(self, sort): + rng = date_range("1/1/2000", periods=50, freq=Minute()) + rng1 = rng[10:] + rng2 = rng[:25] + the_union = rng1.union(rng2, sort=sort) + if sort is None: + tm.assert_index_equal(the_union, rng) + else: + expected = DatetimeIndex(list(rng[10:]) + list(rng[:10])) + tm.assert_index_equal(the_union, expected) + + rng1 = rng[10:] + rng2 = rng[15:35] + the_union = rng1.union(rng2, sort=sort) + expected = rng[10:] + tm.assert_index_equal(the_union, expected) + + def test_intersection(self): + rng = date_range("1/1/2000", periods=50, freq=Minute()) + rng1 = rng[10:] + rng2 = rng[:25] + the_int = rng1.intersection(rng2) + expected = rng[10:25] + tm.assert_index_equal(the_int, expected) + assert isinstance(the_int, DatetimeIndex) + assert the_int.freq == rng.freq + + the_int = rng1.intersection(rng2) + tm.assert_index_equal(the_int, expected) + + # non-overlapping + the_int = rng[:10].intersection(rng[10:]) + expected = DatetimeIndex([]).as_unit("ns") + tm.assert_index_equal(the_int, expected) + + def test_intersection_bug(self): + # GH #771 + a = bdate_range("11/30/2011", "12/31/2011") + b = bdate_range("12/10/2011", "12/20/2011") + result = a.intersection(b) + tm.assert_index_equal(result, b) + assert result.freq == b.freq + + def test_intersection_list(self): + # GH#35876 + # values is not an Index -> no name -> retain "a" + values = [Timestamp("2020-01-01"), Timestamp("2020-02-01")] + idx = DatetimeIndex(values, name="a") + res = idx.intersection(values) + tm.assert_index_equal(res, idx) + + def test_month_range_union_tz_pytz(self, sort): + tz = pytz.timezone("US/Eastern") + + early_start = datetime(2011, 1, 1) + early_end = datetime(2011, 3, 1) + + late_start = datetime(2011, 3, 1) + late_end = datetime(2011, 5, 1) + + early_dr = date_range(start=early_start, end=early_end, tz=tz, freq=MonthEnd()) + late_dr = date_range(start=late_start, end=late_end, tz=tz, freq=MonthEnd()) + + early_dr.union(late_dr, sort=sort) + + @td.skip_if_windows + def test_month_range_union_tz_dateutil(self, sort): + from pandas._libs.tslibs.timezones import dateutil_gettz + + tz = dateutil_gettz("US/Eastern") + + early_start = datetime(2011, 1, 1) + early_end = datetime(2011, 3, 1) + + late_start = datetime(2011, 3, 1) + late_end = datetime(2011, 5, 1) + + early_dr = date_range(start=early_start, end=early_end, tz=tz, freq=MonthEnd()) + late_dr = date_range(start=late_start, end=late_end, tz=tz, freq=MonthEnd()) + + early_dr.union(late_dr, sort=sort) + + @pytest.mark.parametrize("sort", [False, None]) + def test_intersection_duplicates(self, sort): + # GH#38196 + idx1 = Index( + [ + Timestamp("2019-12-13"), + Timestamp("2019-12-12"), + Timestamp("2019-12-12"), + ] + ) + result = idx1.intersection(idx1, sort=sort) + expected = Index([Timestamp("2019-12-13"), Timestamp("2019-12-12")]) + tm.assert_index_equal(result, expected) + + +class TestCustomDatetimeIndex: + def test_union(self, sort): + # overlapping + rng = bdate_range(START, END, freq="C") + left = rng[:10] + right = rng[5:10] + + the_union = left.union(right, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + # non-overlapping, gap in middle + left = rng[:5] + right = rng[10:] + + the_union = left.union(right, sort) + assert isinstance(the_union, Index) + + # non-overlapping, no gap + left = rng[:5] + right = rng[5:10] + + the_union = left.union(right, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + # order does not matter + if sort is None: + tm.assert_index_equal(right.union(left, sort=sort), the_union) + + # overlapping, but different offset + rng = date_range(START, END, freq=BMonthEnd()) + + the_union = rng.union(rng, sort=sort) + assert isinstance(the_union, DatetimeIndex) + + def test_intersection_bug(self): + # GH #771 + a = bdate_range("11/30/2011", "12/31/2011", freq="C") + b = bdate_range("12/10/2011", "12/20/2011", freq="C") + result = a.intersection(b) + tm.assert_index_equal(result, b) + assert result.freq == b.freq + + @pytest.mark.parametrize( + "tz", [None, "UTC", "Europe/Berlin", pytz.FixedOffset(-60)] + ) + def test_intersection_dst_transition(self, tz): + # GH 46702: Europe/Berlin has DST transition + idx1 = date_range("2020-03-27", periods=5, freq="D", tz=tz) + idx2 = date_range("2020-03-30", periods=5, freq="D", tz=tz) + result = idx1.intersection(idx2) + expected = date_range("2020-03-30", periods=2, freq="D", tz=tz) + tm.assert_index_equal(result, expected) + + # GH#45863 same problem for union + index1 = date_range("2021-10-28", periods=3, freq="D", tz="Europe/London") + index2 = date_range("2021-10-30", periods=4, freq="D", tz="Europe/London") + result = index1.union(index2) + expected = date_range("2021-10-28", periods=6, freq="D", tz="Europe/London") + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_timezones.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_timezones.py new file mode 100644 index 0000000000000000000000000000000000000000..daa5b346eb4ec2034fb164be5c03f12b7d0b4dc6 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/test_timezones.py @@ -0,0 +1,251 @@ +""" +Tests for DatetimeIndex timezone-related methods +""" +from datetime import ( + datetime, + timedelta, + timezone, + tzinfo, +) + +from dateutil.tz import gettz +import numpy as np +import pytest +import pytz + +from pandas._libs.tslibs import ( + conversion, + timezones, +) + +import pandas as pd +from pandas import ( + DatetimeIndex, + Timestamp, + bdate_range, + date_range, + isna, + to_datetime, +) +import pandas._testing as tm + + +class FixedOffset(tzinfo): + """Fixed offset in minutes east from UTC.""" + + def __init__(self, offset, name) -> None: + self.__offset = timedelta(minutes=offset) + self.__name = name + + def utcoffset(self, dt): + return self.__offset + + def tzname(self, dt): + return self.__name + + def dst(self, dt): + return timedelta(0) + + +fixed_off_no_name = FixedOffset(-330, None) + + +class TestDatetimeIndexTimezones: + # ------------------------------------------------------------- + # Unsorted + + def test_dti_drop_dont_lose_tz(self): + # GH#2621 + ind = date_range("2012-12-01", periods=10, tz="utc") + ind = ind.drop(ind[-1]) + + assert ind.tz is not None + + def test_dti_tz_conversion_freq(self, tz_naive_fixture): + # GH25241 + t3 = DatetimeIndex(["2019-01-01 10:00"], freq="h") + assert t3.tz_localize(tz=tz_naive_fixture).freq == t3.freq + t4 = DatetimeIndex(["2019-01-02 12:00"], tz="UTC", freq="min") + assert t4.tz_convert(tz="UTC").freq == t4.freq + + def test_drop_dst_boundary(self): + # see gh-18031 + tz = "Europe/Brussels" + freq = "15min" + + start = Timestamp("201710290100", tz=tz) + end = Timestamp("201710290300", tz=tz) + index = date_range(start=start, end=end, freq=freq) + + expected = DatetimeIndex( + [ + "201710290115", + "201710290130", + "201710290145", + "201710290200", + "201710290215", + "201710290230", + "201710290245", + "201710290200", + "201710290215", + "201710290230", + "201710290245", + "201710290300", + ], + dtype="M8[ns, Europe/Brussels]", + freq=freq, + ambiguous=[ + True, + True, + True, + True, + True, + True, + True, + False, + False, + False, + False, + False, + ], + ) + result = index.drop(index[0]) + tm.assert_index_equal(result, expected) + + def test_date_range_localize(self, unit): + rng = date_range( + "3/11/2012 03:00", periods=15, freq="h", tz="US/Eastern", unit=unit + ) + rng2 = DatetimeIndex( + ["3/11/2012 03:00", "3/11/2012 04:00"], dtype=f"M8[{unit}, US/Eastern]" + ) + rng3 = date_range("3/11/2012 03:00", periods=15, freq="h", unit=unit) + rng3 = rng3.tz_localize("US/Eastern") + + tm.assert_index_equal(rng._with_freq(None), rng3) + + # DST transition time + val = rng[0] + exp = Timestamp("3/11/2012 03:00", tz="US/Eastern") + + assert val.hour == 3 + assert exp.hour == 3 + assert val == exp # same UTC value + tm.assert_index_equal(rng[:2], rng2) + + def test_date_range_localize2(self, unit): + # Right before the DST transition + rng = date_range( + "3/11/2012 00:00", periods=2, freq="h", tz="US/Eastern", unit=unit + ) + rng2 = DatetimeIndex( + ["3/11/2012 00:00", "3/11/2012 01:00"], + dtype=f"M8[{unit}, US/Eastern]", + freq="h", + ) + tm.assert_index_equal(rng, rng2) + exp = Timestamp("3/11/2012 00:00", tz="US/Eastern") + assert exp.hour == 0 + assert rng[0] == exp + exp = Timestamp("3/11/2012 01:00", tz="US/Eastern") + assert exp.hour == 1 + assert rng[1] == exp + + rng = date_range( + "3/11/2012 00:00", periods=10, freq="h", tz="US/Eastern", unit=unit + ) + assert rng[2].hour == 3 + + def test_timestamp_equality_different_timezones(self): + utc_range = date_range("1/1/2000", periods=20, tz="UTC") + eastern_range = utc_range.tz_convert("US/Eastern") + berlin_range = utc_range.tz_convert("Europe/Berlin") + + for a, b, c in zip(utc_range, eastern_range, berlin_range): + assert a == b + assert b == c + assert a == c + + assert (utc_range == eastern_range).all() + assert (utc_range == berlin_range).all() + assert (berlin_range == eastern_range).all() + + def test_dti_equals_with_tz(self): + left = date_range("1/1/2011", periods=100, freq="h", tz="utc") + right = date_range("1/1/2011", periods=100, freq="h", tz="US/Eastern") + + assert not left.equals(right) + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_dti_tz_nat(self, tzstr): + idx = DatetimeIndex([Timestamp("2013-1-1", tz=tzstr), pd.NaT]) + + assert isna(idx[1]) + assert idx[0].tzinfo is not None + + @pytest.mark.parametrize("tzstr", ["US/Eastern", "dateutil/US/Eastern"]) + def test_utc_box_timestamp_and_localize(self, tzstr): + tz = timezones.maybe_get_tz(tzstr) + + rng = date_range("3/11/2012", "3/12/2012", freq="h", tz="utc") + rng_eastern = rng.tz_convert(tzstr) + + expected = rng[-1].astimezone(tz) + + stamp = rng_eastern[-1] + assert stamp == expected + assert stamp.tzinfo == expected.tzinfo + + # right tzinfo + rng = date_range("3/13/2012", "3/14/2012", freq="h", tz="utc") + rng_eastern = rng.tz_convert(tzstr) + # test not valid for dateutil timezones. + # assert 'EDT' in repr(rng_eastern[0].tzinfo) + assert "EDT" in repr(rng_eastern[0].tzinfo) or "tzfile" in repr( + rng_eastern[0].tzinfo + ) + + @pytest.mark.parametrize("tz", [pytz.timezone("US/Central"), gettz("US/Central")]) + def test_with_tz(self, tz): + # just want it to work + start = datetime(2011, 3, 12, tzinfo=pytz.utc) + dr = bdate_range(start, periods=50, freq=pd.offsets.Hour()) + assert dr.tz is pytz.utc + + # DateRange with naive datetimes + dr = bdate_range("1/1/2005", "1/1/2009", tz=pytz.utc) + dr = bdate_range("1/1/2005", "1/1/2009", tz=tz) + + # normalized + central = dr.tz_convert(tz) + assert central.tz is tz + naive = central[0].to_pydatetime().replace(tzinfo=None) + comp = conversion.localize_pydatetime(naive, tz).tzinfo + assert central[0].tz is comp + + # compare vs a localized tz + naive = dr[0].to_pydatetime().replace(tzinfo=None) + comp = conversion.localize_pydatetime(naive, tz).tzinfo + assert central[0].tz is comp + + # datetimes with tzinfo set + dr = bdate_range( + datetime(2005, 1, 1, tzinfo=pytz.utc), datetime(2009, 1, 1, tzinfo=pytz.utc) + ) + msg = "Start and end cannot both be tz-aware with different timezones" + with pytest.raises(Exception, match=msg): + bdate_range(datetime(2005, 1, 1, tzinfo=pytz.utc), "1/1/2009", tz=tz) + + @pytest.mark.parametrize("tz", [pytz.timezone("US/Eastern"), gettz("US/Eastern")]) + def test_dti_convert_tz_aware_datetime_datetime(self, tz): + # GH#1581 + dates = [datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)] + + dates_aware = [conversion.localize_pydatetime(x, tz) for x in dates] + result = DatetimeIndex(dates_aware).as_unit("ns") + assert timezones.tz_compare(result.tz, tz) + + converted = to_datetime(dates_aware, utc=True).as_unit("ns") + ex_vals = np.array([Timestamp(x).as_unit("ns")._value for x in dates_aware]) + tm.assert_numpy_array_equal(converted.asi8, ex_vals) + assert converted.tz is timezone.utc diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__init__.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b35872b70bf5daf9e54ff7fed3e2c8a1ea2a6ba1 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_astype.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_astype.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10463c366552d45cbbb8b1f33816d512972dd9fe Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_astype.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_indexing.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_indexing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a16d3cf30cb33535a7d0c042c936ebae95919dc0 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_indexing.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_join.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_join.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9341ab5734319c3e424c185e265f8b50cecd298e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_join.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_numeric.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_numeric.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e868f6608e632867addb9c4f99f717165ae763e2 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_numeric.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_setops.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_setops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1a9ca39f86c85ad94dccda6f14e6bfb8c6d4d936 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/__pycache__/test_setops.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_astype.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_astype.py new file mode 100644 index 0000000000000000000000000000000000000000..1c2df6008de5d85789b026e947ac27a8036a9be7 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_astype.py @@ -0,0 +1,95 @@ +import numpy as np +import pytest + +from pandas import ( + Index, + to_datetime, + to_timedelta, +) +import pandas._testing as tm + + +class TestAstype: + def test_astype_float64_to_uint64(self): + # GH#45309 used to incorrectly return Index with int64 dtype + idx = Index([0.0, 5.0, 10.0, 15.0, 20.0], dtype=np.float64) + result = idx.astype("u8") + expected = Index([0, 5, 10, 15, 20], dtype=np.uint64) + tm.assert_index_equal(result, expected, exact=True) + + idx_with_negatives = idx - 10 + with pytest.raises(ValueError, match="losslessly"): + idx_with_negatives.astype(np.uint64) + + def test_astype_float64_to_object(self): + float_index = Index([0.0, 2.5, 5.0, 7.5, 10.0], dtype=np.float64) + result = float_index.astype(object) + assert result.equals(float_index) + assert float_index.equals(result) + assert isinstance(result, Index) and result.dtype == object + + def test_astype_float64_mixed_to_object(self): + # mixed int-float + idx = Index([1.5, 2, 3, 4, 5], dtype=np.float64) + idx.name = "foo" + result = idx.astype(object) + assert result.equals(idx) + assert idx.equals(result) + assert isinstance(result, Index) and result.dtype == object + + @pytest.mark.parametrize("dtype", ["int16", "int32", "int64"]) + def test_astype_float64_to_int_dtype(self, dtype): + # GH#12881 + # a float astype int + idx = Index([0, 1, 2], dtype=np.float64) + result = idx.astype(dtype) + expected = Index([0, 1, 2], dtype=dtype) + tm.assert_index_equal(result, expected, exact=True) + + idx = Index([0, 1.1, 2], dtype=np.float64) + result = idx.astype(dtype) + expected = Index([0, 1, 2], dtype=dtype) + tm.assert_index_equal(result, expected, exact=True) + + @pytest.mark.parametrize("dtype", ["float32", "float64"]) + def test_astype_float64_to_float_dtype(self, dtype): + # GH#12881 + # a float astype int + idx = Index([0, 1, 2], dtype=np.float64) + result = idx.astype(dtype) + assert isinstance(result, Index) and result.dtype == dtype + + @pytest.mark.parametrize("dtype", ["M8[ns]", "m8[ns]"]) + def test_astype_float_to_datetimelike(self, dtype): + # GH#49660 pre-2.0 Index.astype from floating to M8/m8/Period raised, + # inconsistent with Series.astype + idx = Index([0, 1.1, 2], dtype=np.float64) + + result = idx.astype(dtype) + if dtype[0] == "M": + expected = to_datetime(idx.values) + else: + expected = to_timedelta(idx.values) + tm.assert_index_equal(result, expected) + + # check that we match Series behavior + result = idx.to_series().set_axis(range(3)).astype(dtype) + expected = expected.to_series().set_axis(range(3)) + tm.assert_series_equal(result, expected) + + @pytest.mark.parametrize("dtype", [int, "int16", "int32", "int64"]) + @pytest.mark.parametrize("non_finite", [np.inf, np.nan]) + def test_cannot_cast_inf_to_int(self, non_finite, dtype): + # GH#13149 + idx = Index([1, 2, non_finite], dtype=np.float64) + + msg = r"Cannot convert non-finite values \(NA or inf\) to integer" + with pytest.raises(ValueError, match=msg): + idx.astype(dtype) + + def test_astype_from_object(self): + index = Index([1.0, np.nan, 0.2], dtype="object") + result = index.astype(float) + expected = Index([1.0, np.nan, 0.2], dtype=np.float64) + assert result.dtype == expected.dtype + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_indexing.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_indexing.py new file mode 100644 index 0000000000000000000000000000000000000000..cd28d519313ed36228040361dfbb2a8dccf77be5 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_indexing.py @@ -0,0 +1,611 @@ +import numpy as np +import pytest + +from pandas.errors import InvalidIndexError + +from pandas import ( + NA, + Index, + RangeIndex, + Series, + Timestamp, +) +import pandas._testing as tm +from pandas.core.arrays import ( + ArrowExtensionArray, + FloatingArray, +) + + +@pytest.fixture +def index_large(): + # large values used in Index[uint64] tests where no compat needed with Int64/Float64 + large = [2**63, 2**63 + 10, 2**63 + 15, 2**63 + 20, 2**63 + 25] + return Index(large, dtype=np.uint64) + + +class TestGetLoc: + def test_get_loc(self): + index = Index([0, 1, 2]) + assert index.get_loc(1) == 1 + + def test_get_loc_raises_bad_label(self): + index = Index([0, 1, 2]) + with pytest.raises(InvalidIndexError, match=r"\[1, 2\]"): + index.get_loc([1, 2]) + + def test_get_loc_float64(self): + idx = Index([0.0, 1.0, 2.0], dtype=np.float64) + + with pytest.raises(KeyError, match="^'foo'$"): + idx.get_loc("foo") + with pytest.raises(KeyError, match=r"^1\.5$"): + idx.get_loc(1.5) + with pytest.raises(KeyError, match="^True$"): + idx.get_loc(True) + with pytest.raises(KeyError, match="^False$"): + idx.get_loc(False) + + def test_get_loc_na(self): + idx = Index([np.nan, 1, 2], dtype=np.float64) + assert idx.get_loc(1) == 1 + assert idx.get_loc(np.nan) == 0 + + idx = Index([np.nan, 1, np.nan], dtype=np.float64) + assert idx.get_loc(1) == 1 + + # representable by slice [0:2:2] + msg = "'Cannot get left slice bound for non-unique label: nan'" + with pytest.raises(KeyError, match=msg): + idx.slice_locs(np.nan) + # not representable by slice + idx = Index([np.nan, 1, np.nan, np.nan], dtype=np.float64) + assert idx.get_loc(1) == 1 + msg = "'Cannot get left slice bound for non-unique label: nan" + with pytest.raises(KeyError, match=msg): + idx.slice_locs(np.nan) + + def test_get_loc_missing_nan(self): + # GH#8569 + idx = Index([1, 2], dtype=np.float64) + assert idx.get_loc(1) == 0 + with pytest.raises(KeyError, match=r"^3$"): + idx.get_loc(3) + with pytest.raises(KeyError, match="^nan$"): + idx.get_loc(np.nan) + with pytest.raises(InvalidIndexError, match=r"\[nan\]"): + # listlike/non-hashable raises TypeError + idx.get_loc([np.nan]) + + @pytest.mark.parametrize("vals", [[1], [1.0], [Timestamp("2019-12-31")], ["test"]]) + def test_get_loc_float_index_nan_with_method(self, vals): + # GH#39382 + idx = Index(vals) + with pytest.raises(KeyError, match="nan"): + idx.get_loc(np.nan) + + @pytest.mark.parametrize("dtype", ["f8", "i8", "u8"]) + def test_get_loc_numericindex_none_raises(self, dtype): + # case that goes through searchsorted and key is non-comparable to values + arr = np.arange(10**7, dtype=dtype) + idx = Index(arr) + with pytest.raises(KeyError, match="None"): + idx.get_loc(None) + + def test_get_loc_overflows(self): + # unique but non-monotonic goes through IndexEngine.mapping.get_item + idx = Index([0, 2, 1]) + + val = np.iinfo(np.int64).max + 1 + + with pytest.raises(KeyError, match=str(val)): + idx.get_loc(val) + with pytest.raises(KeyError, match=str(val)): + idx._engine.get_loc(val) + + +class TestGetIndexer: + def test_get_indexer(self): + index1 = Index([1, 2, 3, 4, 5]) + index2 = Index([2, 4, 6]) + + r1 = index1.get_indexer(index2) + e1 = np.array([1, 3, -1], dtype=np.intp) + tm.assert_almost_equal(r1, e1) + + @pytest.mark.parametrize("reverse", [True, False]) + @pytest.mark.parametrize( + "expected,method", + [ + (np.array([-1, 0, 0, 1, 1], dtype=np.intp), "pad"), + (np.array([-1, 0, 0, 1, 1], dtype=np.intp), "ffill"), + (np.array([0, 0, 1, 1, 2], dtype=np.intp), "backfill"), + (np.array([0, 0, 1, 1, 2], dtype=np.intp), "bfill"), + ], + ) + def test_get_indexer_methods(self, reverse, expected, method): + index1 = Index([1, 2, 3, 4, 5]) + index2 = Index([2, 4, 6]) + + if reverse: + index1 = index1[::-1] + expected = expected[::-1] + + result = index2.get_indexer(index1, method=method) + tm.assert_almost_equal(result, expected) + + def test_get_indexer_invalid(self): + # GH10411 + index = Index(np.arange(10)) + + with pytest.raises(ValueError, match="tolerance argument"): + index.get_indexer([1, 0], tolerance=1) + + with pytest.raises(ValueError, match="limit argument"): + index.get_indexer([1, 0], limit=1) + + @pytest.mark.parametrize( + "method, tolerance, indexer, expected", + [ + ("pad", None, [0, 5, 9], [0, 5, 9]), + ("backfill", None, [0, 5, 9], [0, 5, 9]), + ("nearest", None, [0, 5, 9], [0, 5, 9]), + ("pad", 0, [0, 5, 9], [0, 5, 9]), + ("backfill", 0, [0, 5, 9], [0, 5, 9]), + ("nearest", 0, [0, 5, 9], [0, 5, 9]), + ("pad", None, [0.2, 1.8, 8.5], [0, 1, 8]), + ("backfill", None, [0.2, 1.8, 8.5], [1, 2, 9]), + ("nearest", None, [0.2, 1.8, 8.5], [0, 2, 9]), + ("pad", 1, [0.2, 1.8, 8.5], [0, 1, 8]), + ("backfill", 1, [0.2, 1.8, 8.5], [1, 2, 9]), + ("nearest", 1, [0.2, 1.8, 8.5], [0, 2, 9]), + ("pad", 0.2, [0.2, 1.8, 8.5], [0, -1, -1]), + ("backfill", 0.2, [0.2, 1.8, 8.5], [-1, 2, -1]), + ("nearest", 0.2, [0.2, 1.8, 8.5], [0, 2, -1]), + ], + ) + def test_get_indexer_nearest(self, method, tolerance, indexer, expected): + index = Index(np.arange(10)) + + actual = index.get_indexer(indexer, method=method, tolerance=tolerance) + tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) + + @pytest.mark.parametrize("listtype", [list, tuple, Series, np.array]) + @pytest.mark.parametrize( + "tolerance, expected", + list( + zip( + [[0.3, 0.3, 0.1], [0.2, 0.1, 0.1], [0.1, 0.5, 0.5]], + [[0, 2, -1], [0, -1, -1], [-1, 2, 9]], + ) + ), + ) + def test_get_indexer_nearest_listlike_tolerance( + self, tolerance, expected, listtype + ): + index = Index(np.arange(10)) + + actual = index.get_indexer( + [0.2, 1.8, 8.5], method="nearest", tolerance=listtype(tolerance) + ) + tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) + + def test_get_indexer_nearest_error(self): + index = Index(np.arange(10)) + with pytest.raises(ValueError, match="limit argument"): + index.get_indexer([1, 0], method="nearest", limit=1) + + with pytest.raises(ValueError, match="tolerance size must match"): + index.get_indexer([1, 0], method="nearest", tolerance=[1, 2, 3]) + + @pytest.mark.parametrize( + "method,expected", + [("pad", [8, 7, 0]), ("backfill", [9, 8, 1]), ("nearest", [9, 7, 0])], + ) + def test_get_indexer_nearest_decreasing(self, method, expected): + index = Index(np.arange(10))[::-1] + + actual = index.get_indexer([0, 5, 9], method=method) + tm.assert_numpy_array_equal(actual, np.array([9, 4, 0], dtype=np.intp)) + + actual = index.get_indexer([0.2, 1.8, 8.5], method=method) + tm.assert_numpy_array_equal(actual, np.array(expected, dtype=np.intp)) + + @pytest.mark.parametrize("idx_dtype", ["int64", "float64", "uint64", "range"]) + @pytest.mark.parametrize("method", ["get_indexer", "get_indexer_non_unique"]) + def test_get_indexer_numeric_index_boolean_target(self, method, idx_dtype): + # GH 16877 + + if idx_dtype == "range": + numeric_index = RangeIndex(4) + else: + numeric_index = Index(np.arange(4, dtype=idx_dtype)) + + other = Index([True, False, True]) + + result = getattr(numeric_index, method)(other) + expected = np.array([-1, -1, -1], dtype=np.intp) + if method == "get_indexer": + tm.assert_numpy_array_equal(result, expected) + else: + missing = np.arange(3, dtype=np.intp) + tm.assert_numpy_array_equal(result[0], expected) + tm.assert_numpy_array_equal(result[1], missing) + + @pytest.mark.parametrize("method", ["pad", "backfill", "nearest"]) + def test_get_indexer_with_method_numeric_vs_bool(self, method): + left = Index([1, 2, 3]) + right = Index([True, False]) + + with pytest.raises(TypeError, match="Cannot compare"): + left.get_indexer(right, method=method) + + with pytest.raises(TypeError, match="Cannot compare"): + right.get_indexer(left, method=method) + + def test_get_indexer_numeric_vs_bool(self): + left = Index([1, 2, 3]) + right = Index([True, False]) + + res = left.get_indexer(right) + expected = -1 * np.ones(len(right), dtype=np.intp) + tm.assert_numpy_array_equal(res, expected) + + res = right.get_indexer(left) + expected = -1 * np.ones(len(left), dtype=np.intp) + tm.assert_numpy_array_equal(res, expected) + + res = left.get_indexer_non_unique(right)[0] + expected = -1 * np.ones(len(right), dtype=np.intp) + tm.assert_numpy_array_equal(res, expected) + + res = right.get_indexer_non_unique(left)[0] + expected = -1 * np.ones(len(left), dtype=np.intp) + tm.assert_numpy_array_equal(res, expected) + + def test_get_indexer_float64(self): + idx = Index([0.0, 1.0, 2.0], dtype=np.float64) + tm.assert_numpy_array_equal( + idx.get_indexer(idx), np.array([0, 1, 2], dtype=np.intp) + ) + + target = [-0.1, 0.5, 1.1] + tm.assert_numpy_array_equal( + idx.get_indexer(target, "pad"), np.array([-1, 0, 1], dtype=np.intp) + ) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "backfill"), np.array([0, 1, 2], dtype=np.intp) + ) + tm.assert_numpy_array_equal( + idx.get_indexer(target, "nearest"), np.array([0, 1, 1], dtype=np.intp) + ) + + def test_get_indexer_nan(self): + # GH#7820 + result = Index([1, 2, np.nan], dtype=np.float64).get_indexer([np.nan]) + expected = np.array([2], dtype=np.intp) + tm.assert_numpy_array_equal(result, expected) + + def test_get_indexer_int64(self): + index = Index(range(0, 20, 2), dtype=np.int64) + target = Index(np.arange(10), dtype=np.int64) + indexer = index.get_indexer(target) + expected = np.array([0, -1, 1, -1, 2, -1, 3, -1, 4, -1], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + target = Index(np.arange(10), dtype=np.int64) + indexer = index.get_indexer(target, method="pad") + expected = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + target = Index(np.arange(10), dtype=np.int64) + indexer = index.get_indexer(target, method="backfill") + expected = np.array([0, 1, 1, 2, 2, 3, 3, 4, 4, 5], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + def test_get_indexer_uint64(self, index_large): + target = Index(np.arange(10).astype("uint64") * 5 + 2**63) + indexer = index_large.get_indexer(target) + expected = np.array([0, -1, 1, 2, 3, 4, -1, -1, -1, -1], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + target = Index(np.arange(10).astype("uint64") * 5 + 2**63) + indexer = index_large.get_indexer(target, method="pad") + expected = np.array([0, 0, 1, 2, 3, 4, 4, 4, 4, 4], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + target = Index(np.arange(10).astype("uint64") * 5 + 2**63) + indexer = index_large.get_indexer(target, method="backfill") + expected = np.array([0, 1, 1, 2, 3, 4, -1, -1, -1, -1], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + @pytest.mark.parametrize("val, val2", [(4, 5), (4, 4), (4, NA), (NA, NA)]) + def test_get_loc_masked(self, val, val2, any_numeric_ea_and_arrow_dtype): + # GH#39133 + idx = Index([1, 2, 3, val, val2], dtype=any_numeric_ea_and_arrow_dtype) + result = idx.get_loc(2) + assert result == 1 + + with pytest.raises(KeyError, match="9"): + idx.get_loc(9) + + def test_get_loc_masked_na(self, any_numeric_ea_and_arrow_dtype): + # GH#39133 + idx = Index([1, 2, NA], dtype=any_numeric_ea_and_arrow_dtype) + result = idx.get_loc(NA) + assert result == 2 + + idx = Index([1, 2, NA, NA], dtype=any_numeric_ea_and_arrow_dtype) + result = idx.get_loc(NA) + tm.assert_numpy_array_equal(result, np.array([False, False, True, True])) + + idx = Index([1, 2, 3], dtype=any_numeric_ea_and_arrow_dtype) + with pytest.raises(KeyError, match="NA"): + idx.get_loc(NA) + + def test_get_loc_masked_na_and_nan(self): + # GH#39133 + idx = Index( + FloatingArray( + np.array([1, 2, 1, np.nan]), mask=np.array([False, False, True, False]) + ) + ) + result = idx.get_loc(NA) + assert result == 2 + result = idx.get_loc(np.nan) + assert result == 3 + + idx = Index( + FloatingArray(np.array([1, 2, 1.0]), mask=np.array([False, False, True])) + ) + result = idx.get_loc(NA) + assert result == 2 + with pytest.raises(KeyError, match="nan"): + idx.get_loc(np.nan) + + idx = Index( + FloatingArray( + np.array([1, 2, np.nan]), mask=np.array([False, False, False]) + ) + ) + result = idx.get_loc(np.nan) + assert result == 2 + with pytest.raises(KeyError, match="NA"): + idx.get_loc(NA) + + @pytest.mark.parametrize("val", [4, 2]) + def test_get_indexer_masked_na(self, any_numeric_ea_and_arrow_dtype, val): + # GH#39133 + idx = Index([1, 2, NA, 3, val], dtype=any_numeric_ea_and_arrow_dtype) + result = idx.get_indexer_for([1, NA, 5]) + expected = np.array([0, 2, -1]) + tm.assert_numpy_array_equal(result, expected, check_dtype=False) + + @pytest.mark.parametrize("dtype", ["boolean", "bool[pyarrow]"]) + def test_get_indexer_masked_na_boolean(self, dtype): + # GH#39133 + if dtype == "bool[pyarrow]": + pytest.importorskip("pyarrow") + idx = Index([True, False, NA], dtype=dtype) + result = idx.get_loc(False) + assert result == 1 + result = idx.get_loc(NA) + assert result == 2 + + def test_get_indexer_arrow_dictionary_target(self): + pa = pytest.importorskip("pyarrow") + target = Index( + ArrowExtensionArray( + pa.array([1, 2], type=pa.dictionary(pa.int8(), pa.int8())) + ) + ) + idx = Index([1]) + + result = idx.get_indexer(target) + expected = np.array([0, -1], dtype=np.int64) + tm.assert_numpy_array_equal(result, expected) + + result_1, result_2 = idx.get_indexer_non_unique(target) + expected_1, expected_2 = np.array([0, -1], dtype=np.int64), np.array( + [1], dtype=np.int64 + ) + tm.assert_numpy_array_equal(result_1, expected_1) + tm.assert_numpy_array_equal(result_2, expected_2) + + +class TestWhere: + @pytest.mark.parametrize( + "index", + [ + Index(np.arange(5, dtype="float64")), + Index(range(0, 20, 2), dtype=np.int64), + Index(np.arange(5, dtype="uint64")), + ], + ) + def test_where(self, listlike_box, index): + cond = [True] * len(index) + expected = index + result = index.where(listlike_box(cond)) + + cond = [False] + [True] * (len(index) - 1) + expected = Index([index._na_value] + index[1:].tolist(), dtype=np.float64) + result = index.where(listlike_box(cond)) + tm.assert_index_equal(result, expected) + + def test_where_uint64(self): + idx = Index([0, 6, 2], dtype=np.uint64) + mask = np.array([False, True, False]) + other = np.array([1], dtype=np.int64) + + expected = Index([1, 6, 1], dtype=np.uint64) + + result = idx.where(mask, other) + tm.assert_index_equal(result, expected) + + result = idx.putmask(~mask, other) + tm.assert_index_equal(result, expected) + + def test_where_infers_type_instead_of_trying_to_convert_string_to_float(self): + # GH 32413 + index = Index([1, np.nan]) + cond = index.notna() + other = Index(["a", "b"], dtype="string") + + expected = Index([1.0, "b"]) + result = index.where(cond, other) + + tm.assert_index_equal(result, expected) + + +class TestTake: + @pytest.mark.parametrize("idx_dtype", [np.float64, np.int64, np.uint64]) + def test_take_preserve_name(self, idx_dtype): + index = Index([1, 2, 3, 4], dtype=idx_dtype, name="foo") + taken = index.take([3, 0, 1]) + assert index.name == taken.name + + def test_take_fill_value_float64(self): + # GH 12631 + idx = Index([1.0, 2.0, 3.0], name="xxx", dtype=np.float64) + result = idx.take(np.array([1, 0, -1])) + expected = Index([2.0, 1.0, 3.0], dtype=np.float64, name="xxx") + tm.assert_index_equal(result, expected) + + # fill_value + result = idx.take(np.array([1, 0, -1]), fill_value=True) + expected = Index([2.0, 1.0, np.nan], dtype=np.float64, name="xxx") + tm.assert_index_equal(result, expected) + + # allow_fill=False + result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) + expected = Index([2.0, 1.0, 3.0], dtype=np.float64, name="xxx") + tm.assert_index_equal(result, expected) + + msg = ( + "When allow_fill=True and fill_value is not None, " + "all indices must be >= -1" + ) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -2]), fill_value=True) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -5]), fill_value=True) + + msg = "index -5 is out of bounds for (axis 0 with )?size 3" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -5])) + + @pytest.mark.parametrize("dtype", [np.int64, np.uint64]) + def test_take_fill_value_ints(self, dtype): + # see gh-12631 + idx = Index([1, 2, 3], dtype=dtype, name="xxx") + result = idx.take(np.array([1, 0, -1])) + expected = Index([2, 1, 3], dtype=dtype, name="xxx") + tm.assert_index_equal(result, expected) + + name = type(idx).__name__ + msg = f"Unable to fill values because {name} cannot contain NA" + + # fill_value=True + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -1]), fill_value=True) + + # allow_fill=False + result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) + expected = Index([2, 1, 3], dtype=dtype, name="xxx") + tm.assert_index_equal(result, expected) + + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -2]), fill_value=True) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -5]), fill_value=True) + + msg = "index -5 is out of bounds for (axis 0 with )?size 3" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -5])) + + +class TestContains: + @pytest.mark.parametrize("dtype", [np.float64, np.int64, np.uint64]) + def test_contains_none(self, dtype): + # GH#35788 should return False, not raise TypeError + index = Index([0, 1, 2, 3, 4], dtype=dtype) + assert None not in index + + def test_contains_float64_nans(self): + index = Index([1.0, 2.0, np.nan], dtype=np.float64) + assert np.nan in index + + def test_contains_float64_not_nans(self): + index = Index([1.0, 2.0, np.nan], dtype=np.float64) + assert 1.0 in index + + +class TestSliceLocs: + @pytest.mark.parametrize("dtype", [int, float]) + def test_slice_locs(self, dtype): + index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype)) + n = len(index) + + assert index.slice_locs(start=2) == (2, n) + assert index.slice_locs(start=3) == (3, n) + assert index.slice_locs(3, 8) == (3, 6) + assert index.slice_locs(5, 10) == (3, n) + assert index.slice_locs(end=8) == (0, 6) + assert index.slice_locs(end=9) == (0, 7) + + # reversed + index2 = index[::-1] + assert index2.slice_locs(8, 2) == (2, 6) + assert index2.slice_locs(7, 3) == (2, 5) + + @pytest.mark.parametrize("dtype", [int, float]) + def test_slice_locs_float_locs(self, dtype): + index = Index(np.array([0, 1, 2, 5, 6, 7, 9, 10], dtype=dtype)) + n = len(index) + assert index.slice_locs(5.0, 10.0) == (3, n) + assert index.slice_locs(4.5, 10.5) == (3, 8) + + index2 = index[::-1] + assert index2.slice_locs(8.5, 1.5) == (2, 6) + assert index2.slice_locs(10.5, -1) == (0, n) + + @pytest.mark.parametrize("dtype", [int, float]) + def test_slice_locs_dup_numeric(self, dtype): + index = Index(np.array([10, 12, 12, 14], dtype=dtype)) + assert index.slice_locs(12, 12) == (1, 3) + assert index.slice_locs(11, 13) == (1, 3) + + index2 = index[::-1] + assert index2.slice_locs(12, 12) == (1, 3) + assert index2.slice_locs(13, 11) == (1, 3) + + def test_slice_locs_na(self): + index = Index([np.nan, 1, 2]) + assert index.slice_locs(1) == (1, 3) + assert index.slice_locs(np.nan) == (0, 3) + + index = Index([0, np.nan, np.nan, 1, 2]) + assert index.slice_locs(np.nan) == (1, 5) + + def test_slice_locs_na_raises(self): + index = Index([np.nan, 1, 2]) + with pytest.raises(KeyError, match=""): + index.slice_locs(start=1.5) + + with pytest.raises(KeyError, match=""): + index.slice_locs(end=1.5) + + +class TestGetSliceBounds: + @pytest.mark.parametrize("side, expected", [("left", 4), ("right", 5)]) + def test_get_slice_bounds_within(self, side, expected): + index = Index(range(6)) + result = index.get_slice_bound(4, side=side) + assert result == expected + + @pytest.mark.parametrize("side", ["left", "right"]) + @pytest.mark.parametrize("bound, expected", [(-1, 0), (10, 6)]) + def test_get_slice_bounds_outside(self, side, expected, bound): + index = Index(range(6)) + result = index.get_slice_bound(bound, side=side) + assert result == expected diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_join.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_join.py new file mode 100644 index 0000000000000000000000000000000000000000..918d5052167356b1d51018434c03e6682f828872 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_join.py @@ -0,0 +1,380 @@ +import numpy as np +import pytest + +import pandas._testing as tm +from pandas.core.indexes.api import Index + + +class TestJoinInt64Index: + def test_join_non_unique(self): + left = Index([4, 4, 3, 3]) + + joined, lidx, ridx = left.join(left, return_indexers=True) + + exp_joined = Index([4, 4, 4, 4, 3, 3, 3, 3]) + tm.assert_index_equal(joined, exp_joined) + + exp_lidx = np.array([0, 0, 1, 1, 2, 2, 3, 3], dtype=np.intp) + tm.assert_numpy_array_equal(lidx, exp_lidx) + + exp_ridx = np.array([0, 1, 0, 1, 2, 3, 2, 3], dtype=np.intp) + tm.assert_numpy_array_equal(ridx, exp_ridx) + + def test_join_inner(self): + index = Index(range(0, 20, 2), dtype=np.int64) + other = Index([7, 12, 25, 1, 2, 5], dtype=np.int64) + other_mono = Index([1, 2, 5, 7, 12, 25], dtype=np.int64) + + # not monotonic + res, lidx, ridx = index.join(other, how="inner", return_indexers=True) + + # no guarantee of sortedness, so sort for comparison purposes + ind = res.argsort() + res = res.take(ind) + lidx = lidx.take(ind) + ridx = ridx.take(ind) + + eres = Index([2, 12], dtype=np.int64) + elidx = np.array([1, 6], dtype=np.intp) + eridx = np.array([4, 1], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index.join(other_mono, how="inner", return_indexers=True) + + res2 = index.intersection(other_mono) + tm.assert_index_equal(res, res2) + + elidx = np.array([1, 6], dtype=np.intp) + eridx = np.array([1, 4], dtype=np.intp) + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_left(self): + index = Index(range(0, 20, 2), dtype=np.int64) + other = Index([7, 12, 25, 1, 2, 5], dtype=np.int64) + other_mono = Index([1, 2, 5, 7, 12, 25], dtype=np.int64) + + # not monotonic + res, lidx, ridx = index.join(other, how="left", return_indexers=True) + eres = index + eridx = np.array([-1, 4, -1, -1, -1, -1, 1, -1, -1, -1], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index.join(other_mono, how="left", return_indexers=True) + eridx = np.array([-1, 1, -1, -1, -1, -1, 4, -1, -1, -1], dtype=np.intp) + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + # non-unique + idx = Index([1, 1, 2, 5]) + idx2 = Index([1, 2, 5, 7, 9]) + res, lidx, ridx = idx2.join(idx, how="left", return_indexers=True) + eres = Index([1, 1, 2, 5, 7, 9]) # 1 is in idx2, so it should be x2 + eridx = np.array([0, 1, 2, 3, -1, -1], dtype=np.intp) + elidx = np.array([0, 0, 1, 2, 3, 4], dtype=np.intp) + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_right(self): + index = Index(range(0, 20, 2), dtype=np.int64) + other = Index([7, 12, 25, 1, 2, 5], dtype=np.int64) + other_mono = Index([1, 2, 5, 7, 12, 25], dtype=np.int64) + + # not monotonic + res, lidx, ridx = index.join(other, how="right", return_indexers=True) + eres = other + elidx = np.array([-1, 6, -1, -1, 1, -1], dtype=np.intp) + + assert isinstance(other, Index) and other.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + assert ridx is None + + # monotonic + res, lidx, ridx = index.join(other_mono, how="right", return_indexers=True) + eres = other_mono + elidx = np.array([-1, 1, -1, -1, 6, -1], dtype=np.intp) + assert isinstance(other, Index) and other.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + assert ridx is None + + # non-unique + idx = Index([1, 1, 2, 5]) + idx2 = Index([1, 2, 5, 7, 9]) + res, lidx, ridx = idx.join(idx2, how="right", return_indexers=True) + eres = Index([1, 1, 2, 5, 7, 9]) # 1 is in idx2, so it should be x2 + elidx = np.array([0, 1, 2, 3, -1, -1], dtype=np.intp) + eridx = np.array([0, 0, 1, 2, 3, 4], dtype=np.intp) + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_non_int_index(self): + index = Index(range(0, 20, 2), dtype=np.int64) + other = Index([3, 6, 7, 8, 10], dtype=object) + + outer = index.join(other, how="outer") + outer2 = other.join(index, how="outer") + expected = Index([0, 2, 3, 4, 6, 7, 8, 10, 12, 14, 16, 18]) + tm.assert_index_equal(outer, outer2) + tm.assert_index_equal(outer, expected) + + inner = index.join(other, how="inner") + inner2 = other.join(index, how="inner") + expected = Index([6, 8, 10]) + tm.assert_index_equal(inner, inner2) + tm.assert_index_equal(inner, expected) + + left = index.join(other, how="left") + tm.assert_index_equal(left, index.astype(object)) + + left2 = other.join(index, how="left") + tm.assert_index_equal(left2, other) + + right = index.join(other, how="right") + tm.assert_index_equal(right, other) + + right2 = other.join(index, how="right") + tm.assert_index_equal(right2, index.astype(object)) + + def test_join_outer(self): + index = Index(range(0, 20, 2), dtype=np.int64) + other = Index([7, 12, 25, 1, 2, 5], dtype=np.int64) + other_mono = Index([1, 2, 5, 7, 12, 25], dtype=np.int64) + + # not monotonic + # guarantee of sortedness + res, lidx, ridx = index.join(other, how="outer", return_indexers=True) + noidx_res = index.join(other, how="outer") + tm.assert_index_equal(res, noidx_res) + + eres = Index([0, 1, 2, 4, 5, 6, 7, 8, 10, 12, 14, 16, 18, 25], dtype=np.int64) + elidx = np.array([0, -1, 1, 2, -1, 3, -1, 4, 5, 6, 7, 8, 9, -1], dtype=np.intp) + eridx = np.array( + [-1, 3, 4, -1, 5, -1, 0, -1, -1, 1, -1, -1, -1, 2], dtype=np.intp + ) + + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index.join(other_mono, how="outer", return_indexers=True) + noidx_res = index.join(other_mono, how="outer") + tm.assert_index_equal(res, noidx_res) + + elidx = np.array([0, -1, 1, 2, -1, 3, -1, 4, 5, 6, 7, 8, 9, -1], dtype=np.intp) + eridx = np.array( + [-1, 0, 1, -1, 2, -1, 3, -1, -1, 4, -1, -1, -1, 5], dtype=np.intp + ) + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + +class TestJoinUInt64Index: + @pytest.fixture + def index_large(self): + # large values used in TestUInt64Index where no compat needed with int64/float64 + large = [2**63, 2**63 + 10, 2**63 + 15, 2**63 + 20, 2**63 + 25] + return Index(large, dtype=np.uint64) + + def test_join_inner(self, index_large): + other = Index(2**63 + np.array([7, 12, 25, 1, 2, 10], dtype="uint64")) + other_mono = Index(2**63 + np.array([1, 2, 7, 10, 12, 25], dtype="uint64")) + + # not monotonic + res, lidx, ridx = index_large.join(other, how="inner", return_indexers=True) + + # no guarantee of sortedness, so sort for comparison purposes + ind = res.argsort() + res = res.take(ind) + lidx = lidx.take(ind) + ridx = ridx.take(ind) + + eres = Index(2**63 + np.array([10, 25], dtype="uint64")) + elidx = np.array([1, 4], dtype=np.intp) + eridx = np.array([5, 2], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index_large.join( + other_mono, how="inner", return_indexers=True + ) + + res2 = index_large.intersection(other_mono) + tm.assert_index_equal(res, res2) + + elidx = np.array([1, 4], dtype=np.intp) + eridx = np.array([3, 5], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_left(self, index_large): + other = Index(2**63 + np.array([7, 12, 25, 1, 2, 10], dtype="uint64")) + other_mono = Index(2**63 + np.array([1, 2, 7, 10, 12, 25], dtype="uint64")) + + # not monotonic + res, lidx, ridx = index_large.join(other, how="left", return_indexers=True) + eres = index_large + eridx = np.array([-1, 5, -1, -1, 2], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index_large.join(other_mono, how="left", return_indexers=True) + eridx = np.array([-1, 3, -1, -1, 5], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + # non-unique + idx = Index(2**63 + np.array([1, 1, 2, 5], dtype="uint64")) + idx2 = Index(2**63 + np.array([1, 2, 5, 7, 9], dtype="uint64")) + res, lidx, ridx = idx2.join(idx, how="left", return_indexers=True) + + # 1 is in idx2, so it should be x2 + eres = Index(2**63 + np.array([1, 1, 2, 5, 7, 9], dtype="uint64")) + eridx = np.array([0, 1, 2, 3, -1, -1], dtype=np.intp) + elidx = np.array([0, 0, 1, 2, 3, 4], dtype=np.intp) + + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_right(self, index_large): + other = Index(2**63 + np.array([7, 12, 25, 1, 2, 10], dtype="uint64")) + other_mono = Index(2**63 + np.array([1, 2, 7, 10, 12, 25], dtype="uint64")) + + # not monotonic + res, lidx, ridx = index_large.join(other, how="right", return_indexers=True) + eres = other + elidx = np.array([-1, -1, 4, -1, -1, 1], dtype=np.intp) + + tm.assert_numpy_array_equal(lidx, elidx) + assert isinstance(other, Index) and other.dtype == np.uint64 + tm.assert_index_equal(res, eres) + assert ridx is None + + # monotonic + res, lidx, ridx = index_large.join( + other_mono, how="right", return_indexers=True + ) + eres = other_mono + elidx = np.array([-1, -1, -1, 1, -1, 4], dtype=np.intp) + + assert isinstance(other, Index) and other.dtype == np.uint64 + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_index_equal(res, eres) + assert ridx is None + + # non-unique + idx = Index(2**63 + np.array([1, 1, 2, 5], dtype="uint64")) + idx2 = Index(2**63 + np.array([1, 2, 5, 7, 9], dtype="uint64")) + res, lidx, ridx = idx.join(idx2, how="right", return_indexers=True) + + # 1 is in idx2, so it should be x2 + eres = Index(2**63 + np.array([1, 1, 2, 5, 7, 9], dtype="uint64")) + elidx = np.array([0, 1, 2, 3, -1, -1], dtype=np.intp) + eridx = np.array([0, 0, 1, 2, 3, 4], dtype=np.intp) + + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_non_int_index(self, index_large): + other = Index( + 2**63 + np.array([1, 5, 7, 10, 20], dtype="uint64"), dtype=object + ) + + outer = index_large.join(other, how="outer") + outer2 = other.join(index_large, how="outer") + expected = Index( + 2**63 + np.array([0, 1, 5, 7, 10, 15, 20, 25], dtype="uint64") + ) + tm.assert_index_equal(outer, outer2) + tm.assert_index_equal(outer, expected) + + inner = index_large.join(other, how="inner") + inner2 = other.join(index_large, how="inner") + expected = Index(2**63 + np.array([10, 20], dtype="uint64")) + tm.assert_index_equal(inner, inner2) + tm.assert_index_equal(inner, expected) + + left = index_large.join(other, how="left") + tm.assert_index_equal(left, index_large.astype(object)) + + left2 = other.join(index_large, how="left") + tm.assert_index_equal(left2, other) + + right = index_large.join(other, how="right") + tm.assert_index_equal(right, other) + + right2 = other.join(index_large, how="right") + tm.assert_index_equal(right2, index_large.astype(object)) + + def test_join_outer(self, index_large): + other = Index(2**63 + np.array([7, 12, 25, 1, 2, 10], dtype="uint64")) + other_mono = Index(2**63 + np.array([1, 2, 7, 10, 12, 25], dtype="uint64")) + + # not monotonic + # guarantee of sortedness + res, lidx, ridx = index_large.join(other, how="outer", return_indexers=True) + noidx_res = index_large.join(other, how="outer") + tm.assert_index_equal(res, noidx_res) + + eres = Index( + 2**63 + np.array([0, 1, 2, 7, 10, 12, 15, 20, 25], dtype="uint64") + ) + elidx = np.array([0, -1, -1, -1, 1, -1, 2, 3, 4], dtype=np.intp) + eridx = np.array([-1, 3, 4, 0, 5, 1, -1, -1, 2], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # monotonic + res, lidx, ridx = index_large.join( + other_mono, how="outer", return_indexers=True + ) + noidx_res = index_large.join(other_mono, how="outer") + tm.assert_index_equal(res, noidx_res) + + elidx = np.array([0, -1, -1, -1, 1, -1, 2, 3, 4], dtype=np.intp) + eridx = np.array([-1, 0, 1, 2, 3, 4, -1, -1, 5], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.uint64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_numeric.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_numeric.py new file mode 100644 index 0000000000000000000000000000000000000000..4fd807e1827ddc4faf900f15dcefa18c08d4cd0b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_numeric.py @@ -0,0 +1,553 @@ +import numpy as np +import pytest + +import pandas as pd +from pandas import ( + Index, + Series, +) +import pandas._testing as tm + + +class TestFloatNumericIndex: + @pytest.fixture(params=[np.float64, np.float32]) + def dtype(self, request): + return request.param + + @pytest.fixture + def simple_index(self, dtype): + values = np.arange(5, dtype=dtype) + return Index(values) + + @pytest.fixture( + params=[ + [1.5, 2, 3, 4, 5], + [0.0, 2.5, 5.0, 7.5, 10.0], + [5, 4, 3, 2, 1.5], + [10.0, 7.5, 5.0, 2.5, 0.0], + ], + ids=["mixed", "float", "mixed_dec", "float_dec"], + ) + def index(self, request, dtype): + return Index(request.param, dtype=dtype) + + @pytest.fixture + def mixed_index(self, dtype): + return Index([1.5, 2, 3, 4, 5], dtype=dtype) + + @pytest.fixture + def float_index(self, dtype): + return Index([0.0, 2.5, 5.0, 7.5, 10.0], dtype=dtype) + + def test_repr_roundtrip(self, index): + tm.assert_index_equal(eval(repr(index)), index, exact=True) + + def check_coerce(self, a, b, is_float_index=True): + assert a.equals(b) + tm.assert_index_equal(a, b, exact=False) + if is_float_index: + assert isinstance(b, Index) + else: + assert type(b) is Index + + def test_constructor_from_list_no_dtype(self): + index = Index([1.5, 2.5, 3.5]) + assert index.dtype == np.float64 + + def test_constructor(self, dtype): + index_cls = Index + + # explicit construction + index = index_cls([1, 2, 3, 4, 5], dtype=dtype) + + assert isinstance(index, index_cls) + assert index.dtype == dtype + + expected = np.array([1, 2, 3, 4, 5], dtype=dtype) + tm.assert_numpy_array_equal(index.values, expected) + + index = index_cls(np.array([1, 2, 3, 4, 5]), dtype=dtype) + assert isinstance(index, index_cls) + assert index.dtype == dtype + + index = index_cls([1.0, 2, 3, 4, 5], dtype=dtype) + assert isinstance(index, index_cls) + assert index.dtype == dtype + + index = index_cls(np.array([1.0, 2, 3, 4, 5]), dtype=dtype) + assert isinstance(index, index_cls) + assert index.dtype == dtype + + index = index_cls([1.0, 2, 3, 4, 5], dtype=dtype) + assert isinstance(index, index_cls) + assert index.dtype == dtype + + index = index_cls(np.array([1.0, 2, 3, 4, 5]), dtype=dtype) + assert isinstance(index, index_cls) + assert index.dtype == dtype + + # nan handling + result = index_cls([np.nan, np.nan], dtype=dtype) + assert pd.isna(result.values).all() + + result = index_cls(np.array([np.nan]), dtype=dtype) + assert pd.isna(result.values).all() + + def test_constructor_invalid(self): + index_cls = Index + cls_name = index_cls.__name__ + # invalid + msg = ( + rf"{cls_name}\(\.\.\.\) must be called with a collection of " + r"some kind, 0\.0 was passed" + ) + with pytest.raises(TypeError, match=msg): + index_cls(0.0) + + def test_constructor_coerce(self, mixed_index, float_index): + self.check_coerce(mixed_index, Index([1.5, 2, 3, 4, 5])) + self.check_coerce(float_index, Index(np.arange(5) * 2.5)) + + result = Index(np.array(np.arange(5) * 2.5, dtype=object)) + assert result.dtype == object # as of 2.0 to match Series + self.check_coerce(float_index, result.astype("float64")) + + def test_constructor_explicit(self, mixed_index, float_index): + # these don't auto convert + self.check_coerce( + float_index, Index((np.arange(5) * 2.5), dtype=object), is_float_index=False + ) + self.check_coerce( + mixed_index, Index([1.5, 2, 3, 4, 5], dtype=object), is_float_index=False + ) + + def test_type_coercion_fail(self, any_int_numpy_dtype): + # see gh-15832 + msg = "Trying to coerce float values to integers" + with pytest.raises(ValueError, match=msg): + Index([1, 2, 3.5], dtype=any_int_numpy_dtype) + + def test_equals_numeric(self): + index_cls = Index + + idx = index_cls([1.0, 2.0]) + assert idx.equals(idx) + assert idx.identical(idx) + + idx2 = index_cls([1.0, 2.0]) + assert idx.equals(idx2) + + idx = index_cls([1.0, np.nan]) + assert idx.equals(idx) + assert idx.identical(idx) + + idx2 = index_cls([1.0, np.nan]) + assert idx.equals(idx2) + + @pytest.mark.parametrize( + "other", + ( + Index([1, 2], dtype=np.int64), + Index([1.0, 2.0], dtype=object), + Index([1, 2], dtype=object), + ), + ) + def test_equals_numeric_other_index_type(self, other): + idx = Index([1.0, 2.0]) + assert idx.equals(other) + assert other.equals(idx) + + @pytest.mark.parametrize( + "vals", + [ + pd.date_range("2016-01-01", periods=3), + pd.timedelta_range("1 Day", periods=3), + ], + ) + def test_lookups_datetimelike_values(self, vals, dtype): + # If we have datetime64 or timedelta64 values, make sure they are + # wrapped correctly GH#31163 + ser = Series(vals, index=range(3, 6)) + ser.index = ser.index.astype(dtype) + + expected = vals[1] + + result = ser[4.0] + assert isinstance(result, type(expected)) and result == expected + result = ser[4] + assert isinstance(result, type(expected)) and result == expected + + result = ser.loc[4.0] + assert isinstance(result, type(expected)) and result == expected + result = ser.loc[4] + assert isinstance(result, type(expected)) and result == expected + + result = ser.at[4.0] + assert isinstance(result, type(expected)) and result == expected + # GH#31329 .at[4] should cast to 4.0, matching .loc behavior + result = ser.at[4] + assert isinstance(result, type(expected)) and result == expected + + result = ser.iloc[1] + assert isinstance(result, type(expected)) and result == expected + + result = ser.iat[1] + assert isinstance(result, type(expected)) and result == expected + + def test_doesnt_contain_all_the_things(self): + idx = Index([np.nan]) + assert not idx.isin([0]).item() + assert not idx.isin([1]).item() + assert idx.isin([np.nan]).item() + + def test_nan_multiple_containment(self): + index_cls = Index + + idx = index_cls([1.0, np.nan]) + tm.assert_numpy_array_equal(idx.isin([1.0]), np.array([True, False])) + tm.assert_numpy_array_equal(idx.isin([2.0, np.pi]), np.array([False, False])) + tm.assert_numpy_array_equal(idx.isin([np.nan]), np.array([False, True])) + tm.assert_numpy_array_equal(idx.isin([1.0, np.nan]), np.array([True, True])) + idx = index_cls([1.0, 2.0]) + tm.assert_numpy_array_equal(idx.isin([np.nan]), np.array([False, False])) + + def test_fillna_float64(self): + index_cls = Index + # GH 11343 + idx = Index([1.0, np.nan, 3.0], dtype=float, name="x") + # can't downcast + exp = Index([1.0, 0.1, 3.0], name="x") + tm.assert_index_equal(idx.fillna(0.1), exp, exact=True) + + # downcast + exp = index_cls([1.0, 2.0, 3.0], name="x") + tm.assert_index_equal(idx.fillna(2), exp) + + # object + exp = Index([1.0, "obj", 3.0], name="x") + tm.assert_index_equal(idx.fillna("obj"), exp, exact=True) + + def test_logical_compat(self, simple_index): + idx = simple_index + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() + + assert idx.all() == idx.to_series().all() + assert idx.any() == idx.to_series().any() + + +class TestNumericInt: + @pytest.fixture(params=[np.int64, np.int32, np.int16, np.int8, np.uint64]) + def dtype(self, request): + return request.param + + @pytest.fixture + def simple_index(self, dtype): + return Index(range(0, 20, 2), dtype=dtype) + + def test_is_monotonic(self): + index_cls = Index + + index = index_cls([1, 2, 3, 4]) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index._is_strictly_monotonic_increasing is True + assert index.is_monotonic_decreasing is False + assert index._is_strictly_monotonic_decreasing is False + + index = index_cls([4, 3, 2, 1]) + assert index.is_monotonic_increasing is False + assert index._is_strictly_monotonic_increasing is False + assert index._is_strictly_monotonic_decreasing is True + + index = index_cls([1]) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_increasing is True + assert index._is_strictly_monotonic_decreasing is True + + def test_is_strictly_monotonic(self): + index_cls = Index + + index = index_cls([1, 1, 2, 3]) + assert index.is_monotonic_increasing is True + assert index._is_strictly_monotonic_increasing is False + + index = index_cls([3, 2, 1, 1]) + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_decreasing is False + + index = index_cls([1, 1]) + assert index.is_monotonic_increasing + assert index.is_monotonic_decreasing + assert not index._is_strictly_monotonic_increasing + assert not index._is_strictly_monotonic_decreasing + + def test_logical_compat(self, simple_index): + idx = simple_index + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() + + def test_identical(self, simple_index, dtype): + index = simple_index + + idx = Index(index.copy()) + assert idx.identical(index) + + same_values_different_type = Index(idx, dtype=object) + assert not idx.identical(same_values_different_type) + + idx = index.astype(dtype=object) + idx = idx.rename("foo") + same_values = Index(idx, dtype=object) + assert same_values.identical(idx) + + assert not idx.identical(index) + assert Index(same_values, name="foo", dtype=object).identical(idx) + + assert not index.astype(dtype=object).identical(index.astype(dtype=dtype)) + + def test_cant_or_shouldnt_cast(self, dtype): + msg = r"invalid literal for int\(\) with base 10: 'foo'" + + # can't + data = ["foo", "bar", "baz"] + with pytest.raises(ValueError, match=msg): + Index(data, dtype=dtype) + + def test_view_index(self, simple_index): + index = simple_index + msg = "Passing a type in .*Index.view is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + index.view(Index) + + def test_prevent_casting(self, simple_index): + index = simple_index + result = index.astype("O") + assert result.dtype == np.object_ + + +class TestIntNumericIndex: + @pytest.fixture(params=[np.int64, np.int32, np.int16, np.int8]) + def dtype(self, request): + return request.param + + def test_constructor_from_list_no_dtype(self): + index = Index([1, 2, 3]) + assert index.dtype == np.int64 + + def test_constructor(self, dtype): + index_cls = Index + + # scalar raise Exception + msg = ( + rf"{index_cls.__name__}\(\.\.\.\) must be called with a collection of some " + "kind, 5 was passed" + ) + with pytest.raises(TypeError, match=msg): + index_cls(5) + + # copy + # pass list, coerce fine + index = index_cls([-5, 0, 1, 2], dtype=dtype) + arr = index.values.copy() + new_index = index_cls(arr, copy=True) + tm.assert_index_equal(new_index, index, exact=True) + val = int(arr[0]) + 3000 + + # this should not change index + if dtype != np.int8: + # NEP 50 won't allow assignment that would overflow + arr[0] = val + assert new_index[0] != val + + if dtype == np.int64: + # pass list, coerce fine + index = index_cls([-5, 0, 1, 2], dtype=dtype) + expected = Index([-5, 0, 1, 2], dtype=dtype) + tm.assert_index_equal(index, expected) + + # from iterable + index = index_cls(iter([-5, 0, 1, 2]), dtype=dtype) + expected = index_cls([-5, 0, 1, 2], dtype=dtype) + tm.assert_index_equal(index, expected, exact=True) + + # interpret list-like + expected = index_cls([5, 0], dtype=dtype) + for cls in [Index, index_cls]: + for idx in [ + cls([5, 0], dtype=dtype), + cls(np.array([5, 0]), dtype=dtype), + cls(Series([5, 0]), dtype=dtype), + ]: + tm.assert_index_equal(idx, expected) + + def test_constructor_corner(self, dtype): + index_cls = Index + + arr = np.array([1, 2, 3, 4], dtype=object) + + index = index_cls(arr, dtype=dtype) + assert index.values.dtype == index.dtype + if dtype == np.int64: + without_dtype = Index(arr) + # as of 2.0 we do not infer a dtype when we get an object-dtype + # ndarray of numbers, matching Series behavior + assert without_dtype.dtype == object + + tm.assert_index_equal(index, without_dtype.astype(np.int64)) + + # preventing casting + arr = np.array([1, "2", 3, "4"], dtype=object) + msg = "Trying to coerce float values to integers" + with pytest.raises(ValueError, match=msg): + index_cls(arr, dtype=dtype) + + def test_constructor_coercion_signed_to_unsigned( + self, + any_unsigned_int_numpy_dtype, + ): + # see gh-15832 + msg = "|".join( + [ + "Trying to coerce negative values to unsigned integers", + "The elements provided in the data cannot all be casted", + ] + ) + with pytest.raises(OverflowError, match=msg): + Index([-1], dtype=any_unsigned_int_numpy_dtype) + + def test_constructor_np_signed(self, any_signed_int_numpy_dtype): + # GH#47475 + scalar = np.dtype(any_signed_int_numpy_dtype).type(1) + result = Index([scalar]) + expected = Index([1], dtype=any_signed_int_numpy_dtype) + tm.assert_index_equal(result, expected, exact=True) + + def test_constructor_np_unsigned(self, any_unsigned_int_numpy_dtype): + # GH#47475 + scalar = np.dtype(any_unsigned_int_numpy_dtype).type(1) + result = Index([scalar]) + expected = Index([1], dtype=any_unsigned_int_numpy_dtype) + tm.assert_index_equal(result, expected, exact=True) + + def test_coerce_list(self): + # coerce things + arr = Index([1, 2, 3, 4]) + assert isinstance(arr, Index) + + # but not if explicit dtype passed + arr = Index([1, 2, 3, 4], dtype=object) + assert type(arr) is Index + + +class TestFloat16Index: + # float 16 indexes not supported + # GH 49535 + def test_constructor(self): + index_cls = Index + dtype = np.float16 + + msg = "float16 indexes are not supported" + + # explicit construction + with pytest.raises(NotImplementedError, match=msg): + index_cls([1, 2, 3, 4, 5], dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls(np.array([1, 2, 3, 4, 5]), dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls([1.0, 2, 3, 4, 5], dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls(np.array([1.0, 2, 3, 4, 5]), dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls([1.0, 2, 3, 4, 5], dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls(np.array([1.0, 2, 3, 4, 5]), dtype=dtype) + + # nan handling + with pytest.raises(NotImplementedError, match=msg): + index_cls([np.nan, np.nan], dtype=dtype) + + with pytest.raises(NotImplementedError, match=msg): + index_cls(np.array([np.nan]), dtype=dtype) + + +@pytest.mark.parametrize( + "box", + [list, lambda x: np.array(x, dtype=object), lambda x: Index(x, dtype=object)], +) +def test_uint_index_does_not_convert_to_float64(box): + # https://github.com/pandas-dev/pandas/issues/28279 + # https://github.com/pandas-dev/pandas/issues/28023 + series = Series( + [0, 1, 2, 3, 4, 5], + index=[ + 7606741985629028552, + 17876870360202815256, + 17876870360202815256, + 13106359306506049338, + 8991270399732411471, + 8991270399732411472, + ], + ) + + result = series.loc[box([7606741985629028552, 17876870360202815256])] + + expected = Index( + [7606741985629028552, 17876870360202815256, 17876870360202815256], + dtype="uint64", + ) + tm.assert_index_equal(result.index, expected) + + tm.assert_equal(result, series.iloc[:3]) + + +def test_float64_index_equals(): + # https://github.com/pandas-dev/pandas/issues/35217 + float_index = Index([1.0, 2, 3]) + string_index = Index(["1", "2", "3"]) + + result = float_index.equals(string_index) + assert result is False + + result = string_index.equals(float_index) + assert result is False + + +def test_map_dtype_inference_unsigned_to_signed(): + # GH#44609 cases where we don't retain dtype + idx = Index([1, 2, 3], dtype=np.uint64) + result = idx.map(lambda x: -x) + expected = Index([-1, -2, -3], dtype=np.int64) + tm.assert_index_equal(result, expected) + + +def test_map_dtype_inference_overflows(): + # GH#44609 case where we have to upcast + idx = Index(np.array([1, 2, 3], dtype=np.int8)) + result = idx.map(lambda x: x * 1000) + # TODO: we could plausibly try to infer down to int16 here + expected = Index([1000, 2000, 3000], dtype=np.int64) + tm.assert_index_equal(result, expected) + + +def test_view_to_datetimelike(): + # GH#55710 + idx = Index([1, 2, 3]) + res = idx.view("m8[s]") + expected = pd.TimedeltaIndex(idx.values.view("m8[s]")) + tm.assert_index_equal(res, expected) + + res2 = idx.view("m8[D]") + expected2 = idx.values.view("m8[D]") + tm.assert_numpy_array_equal(res2, expected2) + + res3 = idx.view("M8[h]") + expected3 = idx.values.view("M8[h]") + tm.assert_numpy_array_equal(res3, expected3) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_setops.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_setops.py new file mode 100644 index 0000000000000000000000000000000000000000..376b51dd98bb1b1c7c6c8a67914bc72f6c6c588d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/numeric/test_setops.py @@ -0,0 +1,168 @@ +from datetime import ( + datetime, + timedelta, +) + +import numpy as np +import pytest + +import pandas._testing as tm +from pandas.core.indexes.api import ( + Index, + RangeIndex, +) + + +@pytest.fixture +def index_large(): + # large values used in TestUInt64Index where no compat needed with int64/float64 + large = [2**63, 2**63 + 10, 2**63 + 15, 2**63 + 20, 2**63 + 25] + return Index(large, dtype=np.uint64) + + +class TestSetOps: + @pytest.mark.parametrize("dtype", ["f8", "u8", "i8"]) + def test_union_non_numeric(self, dtype): + # corner case, non-numeric + index = Index(np.arange(5, dtype=dtype), dtype=dtype) + assert index.dtype == dtype + + other = Index([datetime.now() + timedelta(i) for i in range(4)], dtype=object) + result = index.union(other) + expected = Index(np.concatenate((index, other))) + tm.assert_index_equal(result, expected) + + result = other.union(index) + expected = Index(np.concatenate((other, index))) + tm.assert_index_equal(result, expected) + + def test_intersection(self): + index = Index(range(5), dtype=np.int64) + + other = Index([1, 2, 3, 4, 5]) + result = index.intersection(other) + expected = Index(np.sort(np.intersect1d(index.values, other.values))) + tm.assert_index_equal(result, expected) + + result = other.intersection(index) + expected = Index( + np.sort(np.asarray(np.intersect1d(index.values, other.values))) + ) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize("dtype", ["int64", "uint64"]) + def test_int_float_union_dtype(self, dtype): + # https://github.com/pandas-dev/pandas/issues/26778 + # [u]int | float -> float + index = Index([0, 2, 3], dtype=dtype) + other = Index([0.5, 1.5], dtype=np.float64) + expected = Index([0.0, 0.5, 1.5, 2.0, 3.0], dtype=np.float64) + result = index.union(other) + tm.assert_index_equal(result, expected) + + result = other.union(index) + tm.assert_index_equal(result, expected) + + def test_range_float_union_dtype(self): + # https://github.com/pandas-dev/pandas/issues/26778 + index = RangeIndex(start=0, stop=3) + other = Index([0.5, 1.5], dtype=np.float64) + result = index.union(other) + expected = Index([0.0, 0.5, 1, 1.5, 2.0], dtype=np.float64) + tm.assert_index_equal(result, expected) + + result = other.union(index) + tm.assert_index_equal(result, expected) + + def test_range_uint64_union_dtype(self): + # https://github.com/pandas-dev/pandas/issues/26778 + index = RangeIndex(start=0, stop=3) + other = Index([0, 10], dtype=np.uint64) + result = index.union(other) + expected = Index([0, 1, 2, 10], dtype=object) + tm.assert_index_equal(result, expected) + + result = other.union(index) + tm.assert_index_equal(result, expected) + + def test_float64_index_difference(self): + # https://github.com/pandas-dev/pandas/issues/35217 + float_index = Index([1.0, 2, 3]) + string_index = Index(["1", "2", "3"]) + + result = float_index.difference(string_index) + tm.assert_index_equal(result, float_index) + + result = string_index.difference(float_index) + tm.assert_index_equal(result, string_index) + + def test_intersection_uint64_outside_int64_range(self, index_large): + other = Index([2**63, 2**63 + 5, 2**63 + 10, 2**63 + 15, 2**63 + 20]) + result = index_large.intersection(other) + expected = Index(np.sort(np.intersect1d(index_large.values, other.values))) + tm.assert_index_equal(result, expected) + + result = other.intersection(index_large) + expected = Index( + np.sort(np.asarray(np.intersect1d(index_large.values, other.values))) + ) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "index2,keeps_name", + [ + (Index([4, 7, 6, 5, 3], name="index"), True), + (Index([4, 7, 6, 5, 3], name="other"), False), + ], + ) + def test_intersection_monotonic(self, index2, keeps_name, sort): + index1 = Index([5, 3, 2, 4, 1], name="index") + expected = Index([5, 3, 4]) + + if keeps_name: + expected.name = "index" + + result = index1.intersection(index2, sort=sort) + if sort is None: + expected = expected.sort_values() + tm.assert_index_equal(result, expected) + + def test_symmetric_difference(self, sort): + # smoke + index1 = Index([5, 2, 3, 4], name="index1") + index2 = Index([2, 3, 4, 1]) + result = index1.symmetric_difference(index2, sort=sort) + expected = Index([5, 1]) + if sort is not None: + tm.assert_index_equal(result, expected) + else: + tm.assert_index_equal(result, expected.sort_values()) + assert result.name is None + if sort is None: + expected = expected.sort_values() + tm.assert_index_equal(result, expected) + + +class TestSetOpsSort: + @pytest.mark.parametrize("slice_", [slice(None), slice(0)]) + def test_union_sort_other_special(self, slice_): + # https://github.com/pandas-dev/pandas/issues/24959 + + idx = Index([1, 0, 2]) + # default, sort=None + other = idx[slice_] + tm.assert_index_equal(idx.union(other), idx) + tm.assert_index_equal(other.union(idx), idx) + + # sort=False + tm.assert_index_equal(idx.union(other, sort=False), idx) + + @pytest.mark.parametrize("slice_", [slice(None), slice(0)]) + def test_union_sort_special_true(self, slice_): + idx = Index([1, 0, 2]) + # default, sort=None + other = idx[slice_] + + result = idx.union(other, sort=True) + expected = Index([0, 1, 2]) + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..24d1ed8eee49671f77a0787f83d6ba284225bd89 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_constructors.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_constructors.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..679948c5f5e56d286b7c1b19be46ada35c98dd97 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_constructors.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_formats.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_formats.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..44acea3474b964432f1a487c3a5953d22279d27f Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_formats.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_freq_attr.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_freq_attr.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..71c4dd5e1ca524ccc2b5dbc68dbeb000c8014835 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_freq_attr.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_indexing.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_indexing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ce059185e0c706fa7626231894733ea9ae365483 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_indexing.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_join.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_join.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d2b77530204a353637e5d3b4ccac2b1789ff974e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_join.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_monotonic.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_monotonic.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..13464e345cabadf36bd397b5bb09451b7da0a2e3 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_monotonic.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_partial_slicing.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_partial_slicing.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..16102b6ca71f7f5539de69cb82882af798e23acd Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_partial_slicing.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..53fd6dcf8c84c4a0f01e1957db9f90c05f44df50 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period_range.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period_range.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b8eff0a27db176f1e70e6b6ac17700577ffbd86a Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_period_range.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_scalar_compat.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_scalar_compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..713368357cd223d398c612edbd4cf19df8bcadbe Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_scalar_compat.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_searchsorted.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_searchsorted.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7da0a97c7793e8d86a890fc76a7cdbe1af25d47d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_searchsorted.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_setops.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_setops.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..48f999a9374d82133df4d8cedf8d7b7622b946d9 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_setops.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_tools.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_tools.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..791df96959ad7d8656c269019b84aa746233cc24 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/__pycache__/test_tools.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/__init__.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..62907ef250ba76bc758e7ba3bdb1209f48bf570e Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/__init__.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_asfreq.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_asfreq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f59286ce34f80d7e4985f40c814e871572e2ad39 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_asfreq.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_astype.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_astype.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..9458c959b4205c6d2fc731567109036bb4f33b37 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_astype.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_factorize.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_factorize.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..644661773d4d8854ae6cb037f1050c0c43d2654d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_factorize.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_fillna.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_fillna.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ae182eb8cd87799532ee6305a5a3331f0e31b93d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_fillna.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_insert.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_insert.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f52353f55b89db52add05fb339e230705b723929 Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_insert.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_to_timestamp.cpython-310.pyc b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_to_timestamp.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0804f9bb5f737ce7fa79b2943d41d80d18271f6d Binary files /dev/null and b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_to_timestamp.cpython-310.pyc differ diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_astype.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_astype.py new file mode 100644 index 0000000000000000000000000000000000000000..d545bfd2fae0f7034c8cb62a8955c8b3c0ba0d6d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_astype.py @@ -0,0 +1,151 @@ +import numpy as np +import pytest + +from pandas import ( + CategoricalIndex, + DatetimeIndex, + Index, + NaT, + Period, + PeriodIndex, + period_range, +) +import pandas._testing as tm + + +class TestPeriodIndexAsType: + @pytest.mark.parametrize("dtype", [float, "timedelta64", "timedelta64[ns]"]) + def test_astype_raises(self, dtype): + # GH#13149, GH#13209 + idx = PeriodIndex(["2016-05-16", "NaT", NaT, np.nan], freq="D") + msg = "Cannot cast PeriodIndex to dtype" + with pytest.raises(TypeError, match=msg): + idx.astype(dtype) + + def test_astype_conversion(self): + # GH#13149, GH#13209 + idx = PeriodIndex(["2016-05-16", "NaT", NaT, np.nan], freq="D", name="idx") + + result = idx.astype(object) + expected = Index( + [Period("2016-05-16", freq="D")] + [Period(NaT, freq="D")] * 3, + dtype="object", + name="idx", + ) + tm.assert_index_equal(result, expected) + + result = idx.astype(np.int64) + expected = Index( + [16937] + [-9223372036854775808] * 3, dtype=np.int64, name="idx" + ) + tm.assert_index_equal(result, expected) + + result = idx.astype(str) + expected = Index([str(x) for x in idx], name="idx", dtype=object) + tm.assert_index_equal(result, expected) + + idx = period_range("1990", "2009", freq="Y", name="idx") + result = idx.astype("i8") + tm.assert_index_equal(result, Index(idx.asi8, name="idx")) + tm.assert_numpy_array_equal(result.values, idx.asi8) + + def test_astype_uint(self): + arr = period_range("2000", periods=2, name="idx") + + with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"): + arr.astype("uint64") + with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"): + arr.astype("uint32") + + def test_astype_object(self): + idx = PeriodIndex([], freq="M") + + exp = np.array([], dtype=object) + tm.assert_numpy_array_equal(idx.astype(object).values, exp) + tm.assert_numpy_array_equal(idx._mpl_repr(), exp) + + idx = PeriodIndex(["2011-01", NaT], freq="M") + + exp = np.array([Period("2011-01", freq="M"), NaT], dtype=object) + tm.assert_numpy_array_equal(idx.astype(object).values, exp) + tm.assert_numpy_array_equal(idx._mpl_repr(), exp) + + exp = np.array([Period("2011-01-01", freq="D"), NaT], dtype=object) + idx = PeriodIndex(["2011-01-01", NaT], freq="D") + tm.assert_numpy_array_equal(idx.astype(object).values, exp) + tm.assert_numpy_array_equal(idx._mpl_repr(), exp) + + # TODO: de-duplicate this version (from test_ops) with the one above + # (from test_period) + def test_astype_object2(self): + idx = period_range(start="2013-01-01", periods=4, freq="M", name="idx") + expected_list = [ + Period("2013-01-31", freq="M"), + Period("2013-02-28", freq="M"), + Period("2013-03-31", freq="M"), + Period("2013-04-30", freq="M"), + ] + expected = Index(expected_list, dtype=object, name="idx") + result = idx.astype(object) + assert isinstance(result, Index) + assert result.dtype == object + tm.assert_index_equal(result, expected) + assert result.name == expected.name + assert idx.tolist() == expected_list + + idx = PeriodIndex( + ["2013-01-01", "2013-01-02", "NaT", "2013-01-04"], freq="D", name="idx" + ) + expected_list = [ + Period("2013-01-01", freq="D"), + Period("2013-01-02", freq="D"), + Period("NaT", freq="D"), + Period("2013-01-04", freq="D"), + ] + expected = Index(expected_list, dtype=object, name="idx") + result = idx.astype(object) + assert isinstance(result, Index) + assert result.dtype == object + tm.assert_index_equal(result, expected) + for i in [0, 1, 3]: + assert result[i] == expected[i] + assert result[2] is NaT + assert result.name == expected.name + + result_list = idx.tolist() + for i in [0, 1, 3]: + assert result_list[i] == expected_list[i] + assert result_list[2] is NaT + + def test_astype_category(self): + obj = period_range("2000", periods=2, name="idx") + result = obj.astype("category") + expected = CategoricalIndex( + [Period("2000-01-01", freq="D"), Period("2000-01-02", freq="D")], name="idx" + ) + tm.assert_index_equal(result, expected) + + result = obj._data.astype("category") + expected = expected.values + tm.assert_categorical_equal(result, expected) + + def test_astype_array_fallback(self): + obj = period_range("2000", periods=2, name="idx") + result = obj.astype(bool) + expected = Index(np.array([True, True]), name="idx") + tm.assert_index_equal(result, expected) + + result = obj._data.astype(bool) + expected = np.array([True, True]) + tm.assert_numpy_array_equal(result, expected) + + def test_period_astype_to_timestamp(self, unit): + # GH#55958 + pi = PeriodIndex(["2011-01", "2011-02", "2011-03"], freq="M") + + exp = DatetimeIndex( + ["2011-01-01", "2011-02-01", "2011-03-01"], tz="US/Eastern" + ).as_unit(unit) + res = pi.astype(f"datetime64[{unit}, US/Eastern]") + tm.assert_index_equal(res, exp) + assert res.freq == exp.freq diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_fillna.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_fillna.py new file mode 100644 index 0000000000000000000000000000000000000000..ed6b4686a06defdc3eac4e1f6427fb0569c2d48d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_fillna.py @@ -0,0 +1,41 @@ +from pandas import ( + Index, + NaT, + Period, + PeriodIndex, +) +import pandas._testing as tm + + +class TestFillNA: + def test_fillna_period(self): + # GH#11343 + idx = PeriodIndex(["2011-01-01 09:00", NaT, "2011-01-01 11:00"], freq="h") + + exp = PeriodIndex( + ["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"], freq="h" + ) + result = idx.fillna(Period("2011-01-01 10:00", freq="h")) + tm.assert_index_equal(result, exp) + + exp = Index( + [ + Period("2011-01-01 09:00", freq="h"), + "x", + Period("2011-01-01 11:00", freq="h"), + ], + dtype=object, + ) + result = idx.fillna("x") + tm.assert_index_equal(result, exp) + + exp = Index( + [ + Period("2011-01-01 09:00", freq="h"), + Period("2011-01-01", freq="D"), + Period("2011-01-01 11:00", freq="h"), + ], + dtype=object, + ) + result = idx.fillna(Period("2011-01-01", freq="D")) + tm.assert_index_equal(result, exp) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_shift.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_shift.py new file mode 100644 index 0000000000000000000000000000000000000000..fca3e3a559e1fe2e53571f5af919e9a0c49c4e68 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/test_shift.py @@ -0,0 +1,122 @@ +import numpy as np +import pytest + +from pandas import ( + PeriodIndex, + period_range, +) +import pandas._testing as tm + + +class TestPeriodIndexShift: + # --------------------------------------------------------------- + # PeriodIndex.shift is used by __add__ and __sub__ + + def test_pi_shift_ndarray(self): + idx = PeriodIndex( + ["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx" + ) + result = idx.shift(np.array([1, 2, 3, 4])) + expected = PeriodIndex( + ["2011-02", "2011-04", "NaT", "2011-08"], freq="M", name="idx" + ) + tm.assert_index_equal(result, expected) + + result = idx.shift(np.array([1, -2, 3, -4])) + expected = PeriodIndex( + ["2011-02", "2010-12", "NaT", "2010-12"], freq="M", name="idx" + ) + tm.assert_index_equal(result, expected) + + def test_shift(self): + pi1 = period_range(freq="Y", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="Y", start="1/1/2002", end="12/1/2010") + + tm.assert_index_equal(pi1.shift(0), pi1) + + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(1), pi2) + + pi1 = period_range(freq="Y", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="Y", start="1/1/2000", end="12/1/2008") + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(-1), pi2) + + pi1 = period_range(freq="M", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="M", start="2/1/2001", end="1/1/2010") + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(1), pi2) + + pi1 = period_range(freq="M", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="M", start="12/1/2000", end="11/1/2009") + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(-1), pi2) + + pi1 = period_range(freq="D", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="D", start="1/2/2001", end="12/2/2009") + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(1), pi2) + + pi1 = period_range(freq="D", start="1/1/2001", end="12/1/2009") + pi2 = period_range(freq="D", start="12/31/2000", end="11/30/2009") + assert len(pi1) == len(pi2) + tm.assert_index_equal(pi1.shift(-1), pi2) + + def test_shift_corner_cases(self): + # GH#9903 + idx = PeriodIndex([], name="xxx", freq="h") + + msg = "`freq` argument is not supported for PeriodIndex.shift" + with pytest.raises(TypeError, match=msg): + # period shift doesn't accept freq + idx.shift(1, freq="h") + + tm.assert_index_equal(idx.shift(0), idx) + tm.assert_index_equal(idx.shift(3), idx) + + idx = PeriodIndex( + ["2011-01-01 10:00", "2011-01-01 11:00", "2011-01-01 12:00"], + name="xxx", + freq="h", + ) + tm.assert_index_equal(idx.shift(0), idx) + exp = PeriodIndex( + ["2011-01-01 13:00", "2011-01-01 14:00", "2011-01-01 15:00"], + name="xxx", + freq="h", + ) + tm.assert_index_equal(idx.shift(3), exp) + exp = PeriodIndex( + ["2011-01-01 07:00", "2011-01-01 08:00", "2011-01-01 09:00"], + name="xxx", + freq="h", + ) + tm.assert_index_equal(idx.shift(-3), exp) + + def test_shift_nat(self): + idx = PeriodIndex( + ["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx" + ) + result = idx.shift(1) + expected = PeriodIndex( + ["2011-02", "2011-03", "NaT", "2011-05"], freq="M", name="idx" + ) + tm.assert_index_equal(result, expected) + assert result.name == expected.name + + def test_shift_gh8083(self): + # test shift for PeriodIndex + # GH#8083 + drange = period_range("20130101", periods=5, freq="D") + result = drange.shift(1) + expected = PeriodIndex( + ["2013-01-02", "2013-01-03", "2013-01-04", "2013-01-05", "2013-01-06"], + freq="D", + ) + tm.assert_index_equal(result, expected) + + def test_shift_periods(self): + # GH #22458 : argument 'n' was deprecated in favor of 'periods' + idx = period_range(freq="Y", start="1/1/2001", end="12/1/2009") + tm.assert_index_equal(idx.shift(periods=0), idx) + tm.assert_index_equal(idx.shift(0), idx) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/test_constructors.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/test_constructors.py new file mode 100644 index 0000000000000000000000000000000000000000..892eb7b4a00d1ffbd9477194466bf9f2a2c522ff --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/period/test_constructors.py @@ -0,0 +1,691 @@ +import numpy as np +import pytest + +from pandas._libs.tslibs.period import IncompatibleFrequency + +from pandas.core.dtypes.dtypes import PeriodDtype + +from pandas import ( + Index, + NaT, + Period, + PeriodIndex, + Series, + date_range, + offsets, + period_range, +) +import pandas._testing as tm +from pandas.core.arrays import PeriodArray + + +class TestPeriodIndexDisallowedFreqs: + @pytest.mark.parametrize( + "freq,freq_depr", + [ + ("2M", "2ME"), + ("2Q-MAR", "2QE-MAR"), + ("2Y-FEB", "2YE-FEB"), + ("2M", "2me"), + ("2Q-MAR", "2qe-MAR"), + ("2Y-FEB", "2yE-feb"), + ], + ) + def test_period_index_offsets_frequency_error_message(self, freq, freq_depr): + # GH#52064 + msg = f"for Period, please use '{freq[1:]}' instead of '{freq_depr[1:]}'" + + with pytest.raises(ValueError, match=msg): + PeriodIndex(["2020-01-01", "2020-01-02"], freq=freq_depr) + + with pytest.raises(ValueError, match=msg): + period_range(start="2020-01-01", end="2020-01-02", freq=freq_depr) + + @pytest.mark.parametrize("freq_depr", ["2SME", "2sme", "2CBME", "2BYE", "2Bye"]) + def test_period_index_frequency_invalid_freq(self, freq_depr): + # GH#9586 + msg = f"Invalid frequency: {freq_depr[1:]}" + + with pytest.raises(ValueError, match=msg): + period_range("2020-01", "2020-05", freq=freq_depr) + with pytest.raises(ValueError, match=msg): + PeriodIndex(["2020-01", "2020-05"], freq=freq_depr) + + @pytest.mark.parametrize("freq", ["2BQE-SEP", "2BYE-MAR", "2BME"]) + def test_period_index_from_datetime_index_invalid_freq(self, freq): + # GH#56899 + msg = f"Invalid frequency: {freq[1:]}" + + rng = date_range("01-Jan-2012", periods=8, freq=freq) + with pytest.raises(ValueError, match=msg): + rng.to_period() + + +class TestPeriodIndex: + def test_from_ordinals(self): + Period(ordinal=-1000, freq="Y") + Period(ordinal=0, freq="Y") + + msg = "The 'ordinal' keyword in PeriodIndex is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + idx1 = PeriodIndex(ordinal=[-1, 0, 1], freq="Y") + with tm.assert_produces_warning(FutureWarning, match=msg): + idx2 = PeriodIndex(ordinal=np.array([-1, 0, 1]), freq="Y") + tm.assert_index_equal(idx1, idx2) + + alt1 = PeriodIndex.from_ordinals([-1, 0, 1], freq="Y") + tm.assert_index_equal(alt1, idx1) + + alt2 = PeriodIndex.from_ordinals(np.array([-1, 0, 1]), freq="Y") + tm.assert_index_equal(alt2, idx2) + + def test_keyword_mismatch(self): + # GH#55961 we should get exactly one of data/ordinals/**fields + per = Period("2016-01-01", "D") + depr_msg1 = "The 'ordinal' keyword in PeriodIndex is deprecated" + depr_msg2 = "Constructing PeriodIndex from fields is deprecated" + + err_msg1 = "Cannot pass both data and ordinal" + with pytest.raises(ValueError, match=err_msg1): + with tm.assert_produces_warning(FutureWarning, match=depr_msg1): + PeriodIndex(data=[per], ordinal=[per.ordinal], freq=per.freq) + + err_msg2 = "Cannot pass both data and fields" + with pytest.raises(ValueError, match=err_msg2): + with tm.assert_produces_warning(FutureWarning, match=depr_msg2): + PeriodIndex(data=[per], year=[per.year], freq=per.freq) + + err_msg3 = "Cannot pass both ordinal and fields" + with pytest.raises(ValueError, match=err_msg3): + with tm.assert_produces_warning(FutureWarning, match=depr_msg2): + PeriodIndex(ordinal=[per.ordinal], year=[per.year], freq=per.freq) + + def test_construction_base_constructor(self): + # GH 13664 + arr = [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")] + tm.assert_index_equal(Index(arr), PeriodIndex(arr)) + tm.assert_index_equal(Index(np.array(arr)), PeriodIndex(np.array(arr))) + + arr = [np.nan, NaT, Period("2011-03", freq="M")] + tm.assert_index_equal(Index(arr), PeriodIndex(arr)) + tm.assert_index_equal(Index(np.array(arr)), PeriodIndex(np.array(arr))) + + arr = [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="D")] + tm.assert_index_equal(Index(arr), Index(arr, dtype=object)) + + tm.assert_index_equal(Index(np.array(arr)), Index(np.array(arr), dtype=object)) + + def test_base_constructor_with_period_dtype(self): + dtype = PeriodDtype("D") + values = ["2011-01-01", "2012-03-04", "2014-05-01"] + result = Index(values, dtype=dtype) + + expected = PeriodIndex(values, dtype=dtype) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "values_constructor", [list, np.array, PeriodIndex, PeriodArray._from_sequence] + ) + def test_index_object_dtype(self, values_constructor): + # Index(periods, dtype=object) is an Index (not an PeriodIndex) + periods = [ + Period("2011-01", freq="M"), + NaT, + Period("2011-03", freq="M"), + ] + values = values_constructor(periods) + result = Index(values, dtype=object) + + assert type(result) is Index + tm.assert_numpy_array_equal(result.values, np.array(values)) + + def test_constructor_use_start_freq(self): + # GH #1118 + msg1 = "Period with BDay freq is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg1): + p = Period("4/2/2012", freq="B") + msg2 = r"PeriodDtype\[B\] is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg2): + expected = period_range(start="4/2/2012", periods=10, freq="B") + + with tm.assert_produces_warning(FutureWarning, match=msg2): + index = period_range(start=p, periods=10) + tm.assert_index_equal(index, expected) + + def test_constructor_field_arrays(self): + # GH #1264 + + years = np.arange(1990, 2010).repeat(4)[2:-2] + quarters = np.tile(np.arange(1, 5), 20)[2:-2] + + depr_msg = "Constructing PeriodIndex from fields is deprecated" + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + index = PeriodIndex(year=years, quarter=quarters, freq="Q-DEC") + expected = period_range("1990Q3", "2009Q2", freq="Q-DEC") + tm.assert_index_equal(index, expected) + + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + index2 = PeriodIndex(year=years, quarter=quarters, freq="2Q-DEC") + tm.assert_numpy_array_equal(index.asi8, index2.asi8) + + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + index = PeriodIndex(year=years, quarter=quarters) + tm.assert_index_equal(index, expected) + + years = [2007, 2007, 2007] + months = [1, 2] + + msg = "Mismatched Period array lengths" + with pytest.raises(ValueError, match=msg): + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + PeriodIndex(year=years, month=months, freq="M") + with pytest.raises(ValueError, match=msg): + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + PeriodIndex(year=years, month=months, freq="2M") + + years = [2007, 2007, 2007] + months = [1, 2, 3] + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + idx = PeriodIndex(year=years, month=months, freq="M") + exp = period_range("2007-01", periods=3, freq="M") + tm.assert_index_equal(idx, exp) + + def test_constructor_nano(self): + idx = period_range( + start=Period(ordinal=1, freq="ns"), + end=Period(ordinal=4, freq="ns"), + freq="ns", + ) + exp = PeriodIndex( + [ + Period(ordinal=1, freq="ns"), + Period(ordinal=2, freq="ns"), + Period(ordinal=3, freq="ns"), + Period(ordinal=4, freq="ns"), + ], + freq="ns", + ) + tm.assert_index_equal(idx, exp) + + def test_constructor_arrays_negative_year(self): + years = np.arange(1960, 2000, dtype=np.int64).repeat(4) + quarters = np.tile(np.array([1, 2, 3, 4], dtype=np.int64), 40) + + msg = "Constructing PeriodIndex from fields is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + pindex = PeriodIndex(year=years, quarter=quarters) + + tm.assert_index_equal(pindex.year, Index(years)) + tm.assert_index_equal(pindex.quarter, Index(quarters)) + + alt = PeriodIndex.from_fields(year=years, quarter=quarters) + tm.assert_index_equal(alt, pindex) + + def test_constructor_invalid_quarters(self): + depr_msg = "Constructing PeriodIndex from fields is deprecated" + msg = "Quarter must be 1 <= q <= 4" + with pytest.raises(ValueError, match=msg): + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + PeriodIndex( + year=range(2000, 2004), quarter=list(range(4)), freq="Q-DEC" + ) + + def test_period_range_fractional_period(self): + msg = "Non-integer 'periods' in pd.date_range, pd.timedelta_range" + with tm.assert_produces_warning(FutureWarning, match=msg): + result = period_range("2007-01", periods=10.5, freq="M") + exp = period_range("2007-01", periods=10, freq="M") + tm.assert_index_equal(result, exp) + + def test_constructor_with_without_freq(self): + # GH53687 + start = Period("2002-01-01 00:00", freq="30min") + exp = period_range(start=start, periods=5, freq=start.freq) + result = period_range(start=start, periods=5) + tm.assert_index_equal(exp, result) + + def test_constructor_fromarraylike(self): + idx = period_range("2007-01", periods=20, freq="M") + + # values is an array of Period, thus can retrieve freq + tm.assert_index_equal(PeriodIndex(idx.values), idx) + tm.assert_index_equal(PeriodIndex(list(idx.values)), idx) + + msg = "freq not specified and cannot be inferred" + with pytest.raises(ValueError, match=msg): + PeriodIndex(idx.asi8) + with pytest.raises(ValueError, match=msg): + PeriodIndex(list(idx.asi8)) + + msg = "'Period' object is not iterable" + with pytest.raises(TypeError, match=msg): + PeriodIndex(data=Period("2007", freq="Y")) + + result = PeriodIndex(iter(idx)) + tm.assert_index_equal(result, idx) + + result = PeriodIndex(idx) + tm.assert_index_equal(result, idx) + + result = PeriodIndex(idx, freq="M") + tm.assert_index_equal(result, idx) + + result = PeriodIndex(idx, freq=offsets.MonthEnd()) + tm.assert_index_equal(result, idx) + assert result.freq == "ME" + + result = PeriodIndex(idx, freq="2M") + tm.assert_index_equal(result, idx.asfreq("2M")) + assert result.freq == "2ME" + + result = PeriodIndex(idx, freq=offsets.MonthEnd(2)) + tm.assert_index_equal(result, idx.asfreq("2M")) + assert result.freq == "2ME" + + result = PeriodIndex(idx, freq="D") + exp = idx.asfreq("D", "e") + tm.assert_index_equal(result, exp) + + def test_constructor_datetime64arr(self): + vals = np.arange(100000, 100000 + 10000, 100, dtype=np.int64) + vals = vals.view(np.dtype("M8[us]")) + + pi = PeriodIndex(vals, freq="D") + + expected = PeriodIndex(vals.astype("M8[ns]"), freq="D") + tm.assert_index_equal(pi, expected) + + @pytest.mark.parametrize("box", [None, "series", "index"]) + def test_constructor_datetime64arr_ok(self, box): + # https://github.com/pandas-dev/pandas/issues/23438 + data = date_range("2017", periods=4, freq="ME") + if box is None: + data = data._values + elif box == "series": + data = Series(data) + + result = PeriodIndex(data, freq="D") + expected = PeriodIndex( + ["2017-01-31", "2017-02-28", "2017-03-31", "2017-04-30"], freq="D" + ) + tm.assert_index_equal(result, expected) + + def test_constructor_dtype(self): + # passing a dtype with a tz should localize + idx = PeriodIndex(["2013-01", "2013-03"], dtype="period[M]") + exp = PeriodIndex(["2013-01", "2013-03"], freq="M") + tm.assert_index_equal(idx, exp) + assert idx.dtype == "period[M]" + + idx = PeriodIndex(["2013-01-05", "2013-03-05"], dtype="period[3D]") + exp = PeriodIndex(["2013-01-05", "2013-03-05"], freq="3D") + tm.assert_index_equal(idx, exp) + assert idx.dtype == "period[3D]" + + # if we already have a freq and its not the same, then asfreq + # (not changed) + idx = PeriodIndex(["2013-01-01", "2013-01-02"], freq="D") + + res = PeriodIndex(idx, dtype="period[M]") + exp = PeriodIndex(["2013-01", "2013-01"], freq="M") + tm.assert_index_equal(res, exp) + assert res.dtype == "period[M]" + + res = PeriodIndex(idx, freq="M") + tm.assert_index_equal(res, exp) + assert res.dtype == "period[M]" + + msg = "specified freq and dtype are different" + with pytest.raises(IncompatibleFrequency, match=msg): + PeriodIndex(["2011-01"], freq="M", dtype="period[D]") + + def test_constructor_empty(self): + idx = PeriodIndex([], freq="M") + assert isinstance(idx, PeriodIndex) + assert len(idx) == 0 + assert idx.freq == "ME" + + with pytest.raises(ValueError, match="freq not specified"): + PeriodIndex([]) + + def test_constructor_pi_nat(self): + idx = PeriodIndex( + [Period("2011-01", freq="M"), NaT, Period("2011-01", freq="M")] + ) + exp = PeriodIndex(["2011-01", "NaT", "2011-01"], freq="M") + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex( + np.array([Period("2011-01", freq="M"), NaT, Period("2011-01", freq="M")]) + ) + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex( + [NaT, NaT, Period("2011-01", freq="M"), Period("2011-01", freq="M")] + ) + exp = PeriodIndex(["NaT", "NaT", "2011-01", "2011-01"], freq="M") + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex( + np.array( + [NaT, NaT, Period("2011-01", freq="M"), Period("2011-01", freq="M")] + ) + ) + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex([NaT, NaT, "2011-01", "2011-01"], freq="M") + tm.assert_index_equal(idx, exp) + + with pytest.raises(ValueError, match="freq not specified"): + PeriodIndex([NaT, NaT]) + + with pytest.raises(ValueError, match="freq not specified"): + PeriodIndex(np.array([NaT, NaT])) + + with pytest.raises(ValueError, match="freq not specified"): + PeriodIndex(["NaT", "NaT"]) + + with pytest.raises(ValueError, match="freq not specified"): + PeriodIndex(np.array(["NaT", "NaT"])) + + def test_constructor_incompat_freq(self): + msg = "Input has different freq=D from PeriodIndex\\(freq=M\\)" + + with pytest.raises(IncompatibleFrequency, match=msg): + PeriodIndex([Period("2011-01", freq="M"), NaT, Period("2011-01", freq="D")]) + + with pytest.raises(IncompatibleFrequency, match=msg): + PeriodIndex( + np.array( + [Period("2011-01", freq="M"), NaT, Period("2011-01", freq="D")] + ) + ) + + # first element is NaT + with pytest.raises(IncompatibleFrequency, match=msg): + PeriodIndex([NaT, Period("2011-01", freq="M"), Period("2011-01", freq="D")]) + + with pytest.raises(IncompatibleFrequency, match=msg): + PeriodIndex( + np.array( + [NaT, Period("2011-01", freq="M"), Period("2011-01", freq="D")] + ) + ) + + def test_constructor_mixed(self): + idx = PeriodIndex(["2011-01", NaT, Period("2011-01", freq="M")]) + exp = PeriodIndex(["2011-01", "NaT", "2011-01"], freq="M") + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex(["NaT", NaT, Period("2011-01", freq="M")]) + exp = PeriodIndex(["NaT", "NaT", "2011-01"], freq="M") + tm.assert_index_equal(idx, exp) + + idx = PeriodIndex([Period("2011-01-01", freq="D"), NaT, "2012-01-01"]) + exp = PeriodIndex(["2011-01-01", "NaT", "2012-01-01"], freq="D") + tm.assert_index_equal(idx, exp) + + @pytest.mark.parametrize("floats", [[1.1, 2.1], np.array([1.1, 2.1])]) + def test_constructor_floats(self, floats): + msg = "PeriodIndex does not allow floating point in construction" + with pytest.raises(TypeError, match=msg): + PeriodIndex(floats) + + def test_constructor_year_and_quarter(self): + year = Series([2001, 2002, 2003]) + quarter = year - 2000 + msg = "Constructing PeriodIndex from fields is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + idx = PeriodIndex(year=year, quarter=quarter) + strs = [f"{t[0]:d}Q{t[1]:d}" for t in zip(quarter, year)] + lops = list(map(Period, strs)) + p = PeriodIndex(lops) + tm.assert_index_equal(p, idx) + + def test_constructor_freq_mult(self): + # GH #7811 + pidx = period_range(start="2014-01", freq="2M", periods=4) + expected = PeriodIndex(["2014-01", "2014-03", "2014-05", "2014-07"], freq="2M") + tm.assert_index_equal(pidx, expected) + + pidx = period_range(start="2014-01-02", end="2014-01-15", freq="3D") + expected = PeriodIndex( + ["2014-01-02", "2014-01-05", "2014-01-08", "2014-01-11", "2014-01-14"], + freq="3D", + ) + tm.assert_index_equal(pidx, expected) + + pidx = period_range(end="2014-01-01 17:00", freq="4h", periods=3) + expected = PeriodIndex( + ["2014-01-01 09:00", "2014-01-01 13:00", "2014-01-01 17:00"], freq="4h" + ) + tm.assert_index_equal(pidx, expected) + + msg = "Frequency must be positive, because it represents span: -1M" + with pytest.raises(ValueError, match=msg): + PeriodIndex(["2011-01"], freq="-1M") + + msg = "Frequency must be positive, because it represents span: 0M" + with pytest.raises(ValueError, match=msg): + PeriodIndex(["2011-01"], freq="0M") + + msg = "Frequency must be positive, because it represents span: 0M" + with pytest.raises(ValueError, match=msg): + period_range("2011-01", periods=3, freq="0M") + + @pytest.mark.parametrize( + "freq_offset, freq_period", + [ + ("YE", "Y"), + ("ME", "M"), + ("D", "D"), + ("min", "min"), + ("s", "s"), + ], + ) + @pytest.mark.parametrize("mult", [1, 2, 3, 4, 5]) + def test_constructor_freq_mult_dti_compat(self, mult, freq_offset, freq_period): + freqstr_offset = str(mult) + freq_offset + freqstr_period = str(mult) + freq_period + pidx = period_range(start="2014-04-01", freq=freqstr_period, periods=10) + expected = date_range( + start="2014-04-01", freq=freqstr_offset, periods=10 + ).to_period(freqstr_period) + tm.assert_index_equal(pidx, expected) + + @pytest.mark.parametrize("mult", [1, 2, 3, 4, 5]) + def test_constructor_freq_mult_dti_compat_month(self, mult): + pidx = period_range(start="2014-04-01", freq=f"{mult}M", periods=10) + expected = date_range( + start="2014-04-01", freq=f"{mult}ME", periods=10 + ).to_period(f"{mult}M") + tm.assert_index_equal(pidx, expected) + + def test_constructor_freq_combined(self): + for freq in ["1D1h", "1h1D"]: + pidx = PeriodIndex(["2016-01-01", "2016-01-02"], freq=freq) + expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 00:00"], freq="25h") + for freq in ["1D1h", "1h1D"]: + pidx = period_range(start="2016-01-01", periods=2, freq=freq) + expected = PeriodIndex(["2016-01-01 00:00", "2016-01-02 01:00"], freq="25h") + tm.assert_index_equal(pidx, expected) + + def test_period_range_length(self): + pi = period_range(freq="Y", start="1/1/2001", end="12/1/2009") + assert len(pi) == 9 + + pi = period_range(freq="Q", start="1/1/2001", end="12/1/2009") + assert len(pi) == 4 * 9 + + pi = period_range(freq="M", start="1/1/2001", end="12/1/2009") + assert len(pi) == 12 * 9 + + pi = period_range(freq="D", start="1/1/2001", end="12/31/2009") + assert len(pi) == 365 * 9 + 2 + + msg = "Period with BDay freq is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + pi = period_range(freq="B", start="1/1/2001", end="12/31/2009") + assert len(pi) == 261 * 9 + + pi = period_range(freq="h", start="1/1/2001", end="12/31/2001 23:00") + assert len(pi) == 365 * 24 + + pi = period_range(freq="Min", start="1/1/2001", end="1/1/2001 23:59") + assert len(pi) == 24 * 60 + + pi = period_range(freq="s", start="1/1/2001", end="1/1/2001 23:59:59") + assert len(pi) == 24 * 60 * 60 + + with tm.assert_produces_warning(FutureWarning, match=msg): + start = Period("02-Apr-2005", "B") + i1 = period_range(start=start, periods=20) + assert len(i1) == 20 + assert i1.freq == start.freq + assert i1[0] == start + + end_intv = Period("2006-12-31", "W") + i1 = period_range(end=end_intv, periods=10) + assert len(i1) == 10 + assert i1.freq == end_intv.freq + assert i1[-1] == end_intv + + msg = "'w' is deprecated and will be removed in a future version." + with tm.assert_produces_warning(FutureWarning, match=msg): + end_intv = Period("2006-12-31", "1w") + i2 = period_range(end=end_intv, periods=10) + assert len(i1) == len(i2) + assert (i1 == i2).all() + assert i1.freq == i2.freq + + def test_infer_freq_from_first_element(self): + msg = "Period with BDay freq is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + start = Period("02-Apr-2005", "B") + end_intv = Period("2005-05-01", "B") + period_range(start=start, end=end_intv) + + # infer freq from first element + i2 = PeriodIndex([end_intv, Period("2005-05-05", "B")]) + assert len(i2) == 2 + assert i2[0] == end_intv + + with tm.assert_produces_warning(FutureWarning, match=msg): + i2 = PeriodIndex(np.array([end_intv, Period("2005-05-05", "B")])) + assert len(i2) == 2 + assert i2[0] == end_intv + + def test_mixed_freq_raises(self): + # Mixed freq should fail + msg = "Period with BDay freq is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + end_intv = Period("2005-05-01", "B") + + msg = "'w' is deprecated and will be removed in a future version." + with tm.assert_produces_warning(FutureWarning, match=msg): + vals = [end_intv, Period("2006-12-31", "w")] + msg = r"Input has different freq=W-SUN from PeriodIndex\(freq=B\)" + depr_msg = r"PeriodDtype\[B\] is deprecated" + with pytest.raises(IncompatibleFrequency, match=msg): + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + PeriodIndex(vals) + vals = np.array(vals) + with pytest.raises(IncompatibleFrequency, match=msg): + with tm.assert_produces_warning(FutureWarning, match=depr_msg): + PeriodIndex(vals) + + @pytest.mark.parametrize( + "freq", ["M", "Q", "Y", "D", "B", "min", "s", "ms", "us", "ns", "h"] + ) + @pytest.mark.filterwarnings( + r"ignore:Period with BDay freq is deprecated:FutureWarning" + ) + @pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning") + def test_recreate_from_data(self, freq): + org = period_range(start="2001/04/01", freq=freq, periods=1) + idx = PeriodIndex(org.values, freq=freq) + tm.assert_index_equal(idx, org) + + def test_map_with_string_constructor(self): + raw = [2005, 2007, 2009] + index = PeriodIndex(raw, freq="Y") + + expected = Index([str(num) for num in raw]) + res = index.map(str) + + # should return an Index + assert isinstance(res, Index) + + # preserve element types + assert all(isinstance(resi, str) for resi in res) + + # lastly, values should compare equal + tm.assert_index_equal(res, expected) + + +class TestSimpleNew: + def test_constructor_simple_new(self): + idx = period_range("2007-01", name="p", periods=2, freq="M") + + with pytest.raises(AssertionError, match=""): + idx._simple_new(idx, name="p") + + result = idx._simple_new(idx._data, name="p") + tm.assert_index_equal(result, idx) + + msg = "Should be numpy array of type i8" + with pytest.raises(AssertionError, match=msg): + # Need ndarray, not int64 Index + type(idx._data)._simple_new(Index(idx.asi8), dtype=idx.dtype) + + arr = type(idx._data)._simple_new(idx.asi8, dtype=idx.dtype) + result = idx._simple_new(arr, name="p") + tm.assert_index_equal(result, idx) + + def test_constructor_simple_new_empty(self): + # GH13079 + idx = PeriodIndex([], freq="M", name="p") + with pytest.raises(AssertionError, match=""): + idx._simple_new(idx, name="p") + + result = idx._simple_new(idx._data, name="p") + tm.assert_index_equal(result, idx) + + @pytest.mark.parametrize("floats", [[1.1, 2.1], np.array([1.1, 2.1])]) + def test_period_index_simple_new_disallows_floats(self, floats): + with pytest.raises(AssertionError, match=""): + RangeIndex("1", "10", "1") + with pytest.raises(TypeError, match=r"Wrong type \"): + RangeIndex(1.1, 10.2, 1.3) + + # invalid passed type + with pytest.raises( + ValueError, + match="Incorrect `dtype` passed: expected signed integer, received float64", + ): + RangeIndex(1, 5, dtype="float64") diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_indexing.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_indexing.py new file mode 100644 index 0000000000000000000000000000000000000000..6202074a11d7883c6f6aa984c23d7964e9042eb0 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_indexing.py @@ -0,0 +1,137 @@ +import numpy as np +import pytest + +from pandas import ( + Index, + RangeIndex, +) +import pandas._testing as tm + + +class TestGetIndexer: + def test_get_indexer(self): + index = RangeIndex(start=0, stop=20, step=2) + target = RangeIndex(10) + indexer = index.get_indexer(target) + expected = np.array([0, -1, 1, -1, 2, -1, 3, -1, 4, -1], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + def test_get_indexer_pad(self): + index = RangeIndex(start=0, stop=20, step=2) + target = RangeIndex(10) + indexer = index.get_indexer(target, method="pad") + expected = np.array([0, 0, 1, 1, 2, 2, 3, 3, 4, 4], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + def test_get_indexer_backfill(self): + index = RangeIndex(start=0, stop=20, step=2) + target = RangeIndex(10) + indexer = index.get_indexer(target, method="backfill") + expected = np.array([0, 1, 1, 2, 2, 3, 3, 4, 4, 5], dtype=np.intp) + tm.assert_numpy_array_equal(indexer, expected) + + def test_get_indexer_limit(self): + # GH#28631 + idx = RangeIndex(4) + target = RangeIndex(6) + result = idx.get_indexer(target, method="pad", limit=1) + expected = np.array([0, 1, 2, 3, 3, -1], dtype=np.intp) + tm.assert_numpy_array_equal(result, expected) + + @pytest.mark.parametrize("stop", [0, -1, -2]) + def test_get_indexer_decreasing(self, stop): + # GH#28678 + index = RangeIndex(7, stop, -3) + result = index.get_indexer(range(9)) + expected = np.array([-1, 2, -1, -1, 1, -1, -1, 0, -1], dtype=np.intp) + tm.assert_numpy_array_equal(result, expected) + + +class TestTake: + def test_take_preserve_name(self): + index = RangeIndex(1, 5, name="foo") + taken = index.take([3, 0, 1]) + assert index.name == taken.name + + def test_take_fill_value(self): + # GH#12631 + idx = RangeIndex(1, 4, name="xxx") + result = idx.take(np.array([1, 0, -1])) + expected = Index([2, 1, 3], dtype=np.int64, name="xxx") + tm.assert_index_equal(result, expected) + + # fill_value + msg = "Unable to fill values because RangeIndex cannot contain NA" + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -1]), fill_value=True) + + # allow_fill=False + result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True) + expected = Index([2, 1, 3], dtype=np.int64, name="xxx") + tm.assert_index_equal(result, expected) + + msg = "Unable to fill values because RangeIndex cannot contain NA" + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -2]), fill_value=True) + with pytest.raises(ValueError, match=msg): + idx.take(np.array([1, 0, -5]), fill_value=True) + + def test_take_raises_index_error(self): + idx = RangeIndex(1, 4, name="xxx") + + msg = "index -5 is out of bounds for (axis 0 with )?size 3" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -5])) + + msg = "index -4 is out of bounds for (axis 0 with )?size 3" + with pytest.raises(IndexError, match=msg): + idx.take(np.array([1, -4])) + + # no errors + result = idx.take(np.array([1, -3])) + expected = Index([2, 1], dtype=np.int64, name="xxx") + tm.assert_index_equal(result, expected) + + def test_take_accepts_empty_array(self): + idx = RangeIndex(1, 4, name="foo") + result = idx.take(np.array([])) + expected = Index([], dtype=np.int64, name="foo") + tm.assert_index_equal(result, expected) + + # empty index + idx = RangeIndex(0, name="foo") + result = idx.take(np.array([])) + expected = Index([], dtype=np.int64, name="foo") + tm.assert_index_equal(result, expected) + + def test_take_accepts_non_int64_array(self): + idx = RangeIndex(1, 4, name="foo") + result = idx.take(np.array([2, 1], dtype=np.uint32)) + expected = Index([3, 2], dtype=np.int64, name="foo") + tm.assert_index_equal(result, expected) + + def test_take_when_index_has_step(self): + idx = RangeIndex(1, 11, 3, name="foo") # [1, 4, 7, 10] + result = idx.take(np.array([1, 0, -1, -4])) + expected = Index([4, 1, 10, 1], dtype=np.int64, name="foo") + tm.assert_index_equal(result, expected) + + def test_take_when_index_has_negative_step(self): + idx = RangeIndex(11, -4, -2, name="foo") # [11, 9, 7, 5, 3, 1, -1, -3] + result = idx.take(np.array([1, 0, -1, -8])) + expected = Index([9, 11, -3, 11], dtype=np.int64, name="foo") + tm.assert_index_equal(result, expected) + + +class TestWhere: + def test_where_putmask_range_cast(self): + # GH#43240 + idx = RangeIndex(0, 5, name="test") + + mask = np.array([True, True, False, False, False]) + result = idx.putmask(mask, 10) + expected = Index([10, 10, 2, 3, 4], dtype=np.int64, name="test") + tm.assert_index_equal(result, expected) + + result = idx.where(~mask, 10) + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_join.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_join.py new file mode 100644 index 0000000000000000000000000000000000000000..682b5c8def9ff0e00b533610c1d45a093e7d7a8d --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_join.py @@ -0,0 +1,177 @@ +import numpy as np + +from pandas import ( + Index, + RangeIndex, +) +import pandas._testing as tm + + +class TestJoin: + def test_join_outer(self): + # join with Index[int64] + index = RangeIndex(start=0, stop=20, step=2) + other = Index(np.arange(25, 14, -1, dtype=np.int64)) + + res, lidx, ridx = index.join(other, how="outer", return_indexers=True) + noidx_res = index.join(other, how="outer") + tm.assert_index_equal(res, noidx_res) + + eres = Index( + [0, 2, 4, 6, 8, 10, 12, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25] + ) + elidx = np.array( + [0, 1, 2, 3, 4, 5, 6, 7, -1, 8, -1, 9, -1, -1, -1, -1, -1, -1, -1], + dtype=np.intp, + ) + eridx = np.array( + [-1, -1, -1, -1, -1, -1, -1, -1, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0], + dtype=np.intp, + ) + + assert isinstance(res, Index) and res.dtype == np.dtype(np.int64) + assert not isinstance(res, RangeIndex) + tm.assert_index_equal(res, eres, exact=True) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # join with RangeIndex + other = RangeIndex(25, 14, -1) + + res, lidx, ridx = index.join(other, how="outer", return_indexers=True) + noidx_res = index.join(other, how="outer") + tm.assert_index_equal(res, noidx_res) + + assert isinstance(res, Index) and res.dtype == np.int64 + assert not isinstance(res, RangeIndex) + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_inner(self): + # Join with non-RangeIndex + index = RangeIndex(start=0, stop=20, step=2) + other = Index(np.arange(25, 14, -1, dtype=np.int64)) + + res, lidx, ridx = index.join(other, how="inner", return_indexers=True) + + # no guarantee of sortedness, so sort for comparison purposes + ind = res.argsort() + res = res.take(ind) + lidx = lidx.take(ind) + ridx = ridx.take(ind) + + eres = Index([16, 18]) + elidx = np.array([8, 9], dtype=np.intp) + eridx = np.array([9, 7], dtype=np.intp) + + assert isinstance(res, Index) and res.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + # Join two RangeIndex + other = RangeIndex(25, 14, -1) + + res, lidx, ridx = index.join(other, how="inner", return_indexers=True) + + assert isinstance(res, RangeIndex) + tm.assert_index_equal(res, eres, exact="equiv") + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_left(self): + # Join with Index[int64] + index = RangeIndex(start=0, stop=20, step=2) + other = Index(np.arange(25, 14, -1, dtype=np.int64)) + + res, lidx, ridx = index.join(other, how="left", return_indexers=True) + eres = index + eridx = np.array([-1, -1, -1, -1, -1, -1, -1, -1, 9, 7], dtype=np.intp) + + assert isinstance(res, RangeIndex) + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + # Join withRangeIndex + other = Index(np.arange(25, 14, -1, dtype=np.int64)) + + res, lidx, ridx = index.join(other, how="left", return_indexers=True) + + assert isinstance(res, RangeIndex) + tm.assert_index_equal(res, eres) + assert lidx is None + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_right(self): + # Join with Index[int64] + index = RangeIndex(start=0, stop=20, step=2) + other = Index(np.arange(25, 14, -1, dtype=np.int64)) + + res, lidx, ridx = index.join(other, how="right", return_indexers=True) + eres = other + elidx = np.array([-1, -1, -1, -1, -1, -1, -1, 9, -1, 8, -1], dtype=np.intp) + + assert isinstance(other, Index) and other.dtype == np.int64 + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + assert ridx is None + + # Join withRangeIndex + other = RangeIndex(25, 14, -1) + + res, lidx, ridx = index.join(other, how="right", return_indexers=True) + eres = other + + assert isinstance(other, RangeIndex) + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + assert ridx is None + + def test_join_non_int_index(self): + index = RangeIndex(start=0, stop=20, step=2) + other = Index([3, 6, 7, 8, 10], dtype=object) + + outer = index.join(other, how="outer") + outer2 = other.join(index, how="outer") + expected = Index([0, 2, 3, 4, 6, 7, 8, 10, 12, 14, 16, 18]) + tm.assert_index_equal(outer, outer2) + tm.assert_index_equal(outer, expected) + + inner = index.join(other, how="inner") + inner2 = other.join(index, how="inner") + expected = Index([6, 8, 10]) + tm.assert_index_equal(inner, inner2) + tm.assert_index_equal(inner, expected) + + left = index.join(other, how="left") + tm.assert_index_equal(left, index.astype(object)) + + left2 = other.join(index, how="left") + tm.assert_index_equal(left2, other) + + right = index.join(other, how="right") + tm.assert_index_equal(right, other) + + right2 = other.join(index, how="right") + tm.assert_index_equal(right2, index.astype(object)) + + def test_join_non_unique(self): + index = RangeIndex(start=0, stop=20, step=2) + other = Index([4, 4, 3, 3]) + + res, lidx, ridx = index.join(other, return_indexers=True) + + eres = Index([0, 2, 4, 4, 6, 8, 10, 12, 14, 16, 18]) + elidx = np.array([0, 1, 2, 2, 3, 4, 5, 6, 7, 8, 9], dtype=np.intp) + eridx = np.array([-1, -1, 0, 1, -1, -1, -1, -1, -1, -1, -1], dtype=np.intp) + + tm.assert_index_equal(res, eres) + tm.assert_numpy_array_equal(lidx, elidx) + tm.assert_numpy_array_equal(ridx, eridx) + + def test_join_self(self, join_type): + index = RangeIndex(start=0, stop=20, step=2) + joined = index.join(index, how=join_type) + assert index is joined diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_range.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_range.py new file mode 100644 index 0000000000000000000000000000000000000000..06e19eeca67663318709772ff23f76675545e19b --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_range.py @@ -0,0 +1,622 @@ +import numpy as np +import pytest + +from pandas.core.dtypes.common import ensure_platform_int + +import pandas as pd +from pandas import ( + Index, + RangeIndex, +) +import pandas._testing as tm + + +class TestRangeIndex: + @pytest.fixture + def simple_index(self): + return RangeIndex(start=0, stop=20, step=2) + + def test_constructor_unwraps_index(self): + result = RangeIndex(1, 3) + expected = np.array([1, 2], dtype=np.int64) + tm.assert_numpy_array_equal(result._data, expected) + + def test_can_hold_identifiers(self, simple_index): + idx = simple_index + key = idx[0] + assert idx._can_hold_identifiers_and_holds_name(key) is False + + def test_too_many_names(self, simple_index): + index = simple_index + with pytest.raises(ValueError, match="^Length"): + index.names = ["roger", "harold"] + + @pytest.mark.parametrize( + "index, start, stop, step", + [ + (RangeIndex(5), 0, 5, 1), + (RangeIndex(0, 5), 0, 5, 1), + (RangeIndex(5, step=2), 0, 5, 2), + (RangeIndex(1, 5, 2), 1, 5, 2), + ], + ) + def test_start_stop_step_attrs(self, index, start, stop, step): + # GH 25710 + assert index.start == start + assert index.stop == stop + assert index.step == step + + def test_copy(self): + i = RangeIndex(5, name="Foo") + i_copy = i.copy() + assert i_copy is not i + assert i_copy.identical(i) + assert i_copy._range == range(0, 5, 1) + assert i_copy.name == "Foo" + + def test_repr(self): + i = RangeIndex(5, name="Foo") + result = repr(i) + expected = "RangeIndex(start=0, stop=5, step=1, name='Foo')" + assert result == expected + + result = eval(result) + tm.assert_index_equal(result, i, exact=True) + + i = RangeIndex(5, 0, -1) + result = repr(i) + expected = "RangeIndex(start=5, stop=0, step=-1)" + assert result == expected + + result = eval(result) + tm.assert_index_equal(result, i, exact=True) + + def test_insert(self): + idx = RangeIndex(5, name="Foo") + result = idx[1:4] + + # test 0th element + tm.assert_index_equal(idx[0:4], result.insert(0, idx[0]), exact="equiv") + + # GH 18295 (test missing) + expected = Index([0, np.nan, 1, 2, 3, 4], dtype=np.float64) + for na in [np.nan, None, pd.NA]: + result = RangeIndex(5).insert(1, na) + tm.assert_index_equal(result, expected) + + result = RangeIndex(5).insert(1, pd.NaT) + expected = Index([0, pd.NaT, 1, 2, 3, 4], dtype=object) + tm.assert_index_equal(result, expected) + + def test_insert_edges_preserves_rangeindex(self): + idx = Index(range(4, 9, 2)) + + result = idx.insert(0, 2) + expected = Index(range(2, 9, 2)) + tm.assert_index_equal(result, expected, exact=True) + + result = idx.insert(3, 10) + expected = Index(range(4, 11, 2)) + tm.assert_index_equal(result, expected, exact=True) + + def test_insert_middle_preserves_rangeindex(self): + # insert in the middle + idx = Index(range(0, 3, 2)) + result = idx.insert(1, 1) + expected = Index(range(3)) + tm.assert_index_equal(result, expected, exact=True) + + idx = idx * 2 + result = idx.insert(1, 2) + expected = expected * 2 + tm.assert_index_equal(result, expected, exact=True) + + def test_delete(self): + idx = RangeIndex(5, name="Foo") + expected = idx[1:] + result = idx.delete(0) + tm.assert_index_equal(result, expected, exact=True) + assert result.name == expected.name + + expected = idx[:-1] + result = idx.delete(-1) + tm.assert_index_equal(result, expected, exact=True) + assert result.name == expected.name + + msg = "index 5 is out of bounds for axis 0 with size 5" + with pytest.raises((IndexError, ValueError), match=msg): + # either depending on numpy version + result = idx.delete(len(idx)) + + def test_delete_preserves_rangeindex(self): + idx = Index(range(2), name="foo") + + result = idx.delete([1]) + expected = Index(range(1), name="foo") + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(1) + tm.assert_index_equal(result, expected, exact=True) + + def test_delete_preserves_rangeindex_middle(self): + idx = Index(range(3), name="foo") + result = idx.delete(1) + expected = idx[::2] + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(-2) + tm.assert_index_equal(result, expected, exact=True) + + def test_delete_preserves_rangeindex_list_at_end(self): + idx = RangeIndex(0, 6, 1) + + loc = [2, 3, 4, 5] + result = idx.delete(loc) + expected = idx[:2] + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(loc[::-1]) + tm.assert_index_equal(result, expected, exact=True) + + def test_delete_preserves_rangeindex_list_middle(self): + idx = RangeIndex(0, 6, 1) + + loc = [1, 2, 3, 4] + result = idx.delete(loc) + expected = RangeIndex(0, 6, 5) + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(loc[::-1]) + tm.assert_index_equal(result, expected, exact=True) + + def test_delete_all_preserves_rangeindex(self): + idx = RangeIndex(0, 6, 1) + + loc = [0, 1, 2, 3, 4, 5] + result = idx.delete(loc) + expected = idx[:0] + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(loc[::-1]) + tm.assert_index_equal(result, expected, exact=True) + + def test_delete_not_preserving_rangeindex(self): + idx = RangeIndex(0, 6, 1) + + loc = [0, 3, 5] + result = idx.delete(loc) + expected = Index([1, 2, 4]) + tm.assert_index_equal(result, expected, exact=True) + + result = idx.delete(loc[::-1]) + tm.assert_index_equal(result, expected, exact=True) + + def test_view(self): + i = RangeIndex(0, name="Foo") + i_view = i.view() + assert i_view.name == "Foo" + + i_view = i.view("i8") + tm.assert_numpy_array_equal(i.values, i_view) + + msg = "Passing a type in RangeIndex.view is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + i_view = i.view(RangeIndex) + tm.assert_index_equal(i, i_view) + + def test_dtype(self, simple_index): + index = simple_index + assert index.dtype == np.int64 + + def test_cache(self): + # GH 26565, GH26617, GH35432, GH53387 + # This test checks whether _cache has been set. + # Calling RangeIndex._cache["_data"] creates an int64 array of the same length + # as the RangeIndex and stores it in _cache. + idx = RangeIndex(0, 100, 10) + + assert idx._cache == {} + + repr(idx) + assert idx._cache == {} + + str(idx) + assert idx._cache == {} + + idx.get_loc(20) + assert idx._cache == {} + + 90 in idx # True + assert idx._cache == {} + + 91 in idx # False + assert idx._cache == {} + + idx.all() + assert idx._cache == {} + + idx.any() + assert idx._cache == {} + + for _ in idx: + pass + assert idx._cache == {} + + msg = "RangeIndex.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + idx.format() + assert idx._cache == {} + + df = pd.DataFrame({"a": range(10)}, index=idx) + + # df.__repr__ should not populate index cache + str(df) + assert idx._cache == {} + + df.loc[50] + assert idx._cache == {} + + with pytest.raises(KeyError, match="51"): + df.loc[51] + assert idx._cache == {} + + df.loc[10:50] + assert idx._cache == {} + + df.iloc[5:10] + assert idx._cache == {} + + # after calling take, _cache may contain other keys, but not "_data" + idx.take([3, 0, 1]) + assert "_data" not in idx._cache + + df.loc[[50]] + assert "_data" not in idx._cache + + df.iloc[[5, 6, 7, 8, 9]] + assert "_data" not in idx._cache + + # idx._cache should contain a _data entry after call to idx._data + idx._data + assert isinstance(idx._data, np.ndarray) + assert idx._data is idx._data # check cached value is reused + assert "_data" in idx._cache + expected = np.arange(0, 100, 10, dtype="int64") + tm.assert_numpy_array_equal(idx._cache["_data"], expected) + + def test_is_monotonic(self): + index = RangeIndex(0, 20, 2) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index.is_monotonic_decreasing is False + assert index._is_strictly_monotonic_increasing is True + assert index._is_strictly_monotonic_decreasing is False + + index = RangeIndex(4, 0, -1) + assert index.is_monotonic_increasing is False + assert index._is_strictly_monotonic_increasing is False + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_decreasing is True + + index = RangeIndex(1, 2) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_increasing is True + assert index._is_strictly_monotonic_decreasing is True + + index = RangeIndex(2, 1) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_increasing is True + assert index._is_strictly_monotonic_decreasing is True + + index = RangeIndex(1, 1) + assert index.is_monotonic_increasing is True + assert index.is_monotonic_increasing is True + assert index.is_monotonic_decreasing is True + assert index._is_strictly_monotonic_increasing is True + assert index._is_strictly_monotonic_decreasing is True + + @pytest.mark.parametrize( + "left,right", + [ + (RangeIndex(0, 9, 2), RangeIndex(0, 10, 2)), + (RangeIndex(0), RangeIndex(1, -1, 3)), + (RangeIndex(1, 2, 3), RangeIndex(1, 3, 4)), + (RangeIndex(0, -9, -2), RangeIndex(0, -10, -2)), + ], + ) + def test_equals_range(self, left, right): + assert left.equals(right) + assert right.equals(left) + + def test_logical_compat(self, simple_index): + idx = simple_index + assert idx.all() == idx.values.all() + assert idx.any() == idx.values.any() + + def test_identical(self, simple_index): + index = simple_index + i = Index(index.copy()) + assert i.identical(index) + + # we don't allow object dtype for RangeIndex + if isinstance(index, RangeIndex): + return + + same_values_different_type = Index(i, dtype=object) + assert not i.identical(same_values_different_type) + + i = index.copy(dtype=object) + i = i.rename("foo") + same_values = Index(i, dtype=object) + assert same_values.identical(index.copy(dtype=object)) + + assert not i.identical(index) + assert Index(same_values, name="foo", dtype=object).identical(i) + + assert not index.copy(dtype=object).identical(index.copy(dtype="int64")) + + def test_nbytes(self): + # memory savings vs int index + idx = RangeIndex(0, 1000) + assert idx.nbytes < Index(idx._values).nbytes / 10 + + # constant memory usage + i2 = RangeIndex(0, 10) + assert idx.nbytes == i2.nbytes + + @pytest.mark.parametrize( + "start,stop,step", + [ + # can't + ("foo", "bar", "baz"), + # shouldn't + ("0", "1", "2"), + ], + ) + def test_cant_or_shouldnt_cast(self, start, stop, step): + msg = f"Wrong type {type(start)} for value {start}" + with pytest.raises(TypeError, match=msg): + RangeIndex(start, stop, step) + + def test_view_index(self, simple_index): + index = simple_index + msg = "Passing a type in RangeIndex.view is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + index.view(Index) + + def test_prevent_casting(self, simple_index): + index = simple_index + result = index.astype("O") + assert result.dtype == np.object_ + + def test_repr_roundtrip(self, simple_index): + index = simple_index + tm.assert_index_equal(eval(repr(index)), index) + + def test_slice_keep_name(self): + idx = RangeIndex(1, 2, name="asdf") + assert idx.name == idx[1:].name + + @pytest.mark.parametrize( + "index", + [ + RangeIndex(start=0, stop=20, step=2, name="foo"), + RangeIndex(start=18, stop=-1, step=-2, name="bar"), + ], + ids=["index_inc", "index_dec"], + ) + def test_has_duplicates(self, index): + assert index.is_unique + assert not index.has_duplicates + + def test_extended_gcd(self, simple_index): + index = simple_index + result = index._extended_gcd(6, 10) + assert result[0] == result[1] * 6 + result[2] * 10 + assert 2 == result[0] + + result = index._extended_gcd(10, 6) + assert 2 == result[1] * 10 + result[2] * 6 + assert 2 == result[0] + + def test_min_fitting_element(self): + result = RangeIndex(0, 20, 2)._min_fitting_element(1) + assert 2 == result + + result = RangeIndex(1, 6)._min_fitting_element(1) + assert 1 == result + + result = RangeIndex(18, -2, -2)._min_fitting_element(1) + assert 2 == result + + result = RangeIndex(5, 0, -1)._min_fitting_element(1) + assert 1 == result + + big_num = 500000000000000000000000 + + result = RangeIndex(5, big_num * 2, 1)._min_fitting_element(big_num) + assert big_num == result + + def test_slice_specialised(self, simple_index): + index = simple_index + index.name = "foo" + + # scalar indexing + res = index[1] + expected = 2 + assert res == expected + + res = index[-1] + expected = 18 + assert res == expected + + # slicing + # slice value completion + index_slice = index[:] + expected = index + tm.assert_index_equal(index_slice, expected) + + # positive slice values + index_slice = index[7:10:2] + expected = Index([14, 18], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + # negative slice values + index_slice = index[-1:-5:-2] + expected = Index([18, 14], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + # stop overshoot + index_slice = index[2:100:4] + expected = Index([4, 12], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + # reverse + index_slice = index[::-1] + expected = Index(index.values[::-1], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + index_slice = index[-8::-1] + expected = Index([4, 2, 0], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + index_slice = index[-40::-1] + expected = Index(np.array([], dtype=np.int64), name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + index_slice = index[40::-1] + expected = Index(index.values[40::-1], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + index_slice = index[10::-1] + expected = Index(index.values[::-1], name="foo") + tm.assert_index_equal(index_slice, expected, exact="equiv") + + @pytest.mark.parametrize("step", set(range(-5, 6)) - {0}) + def test_len_specialised(self, step): + # make sure that our len is the same as np.arange calc + start, stop = (0, 5) if step > 0 else (5, 0) + + arr = np.arange(start, stop, step) + index = RangeIndex(start, stop, step) + assert len(index) == len(arr) + + index = RangeIndex(stop, start, step) + assert len(index) == 0 + + @pytest.mark.parametrize( + "indices, expected", + [ + ([RangeIndex(1, 12, 5)], RangeIndex(1, 12, 5)), + ([RangeIndex(0, 6, 4)], RangeIndex(0, 6, 4)), + ([RangeIndex(1, 3), RangeIndex(3, 7)], RangeIndex(1, 7)), + ([RangeIndex(1, 5, 2), RangeIndex(5, 6)], RangeIndex(1, 6, 2)), + ([RangeIndex(1, 3, 2), RangeIndex(4, 7, 3)], RangeIndex(1, 7, 3)), + ([RangeIndex(-4, 3, 2), RangeIndex(4, 7, 2)], RangeIndex(-4, 7, 2)), + ([RangeIndex(-4, -8), RangeIndex(-8, -12)], RangeIndex(0, 0)), + ([RangeIndex(-4, -8), RangeIndex(3, -4)], RangeIndex(0, 0)), + ([RangeIndex(-4, -8), RangeIndex(3, 5)], RangeIndex(3, 5)), + ([RangeIndex(-4, -2), RangeIndex(3, 5)], Index([-4, -3, 3, 4])), + ([RangeIndex(-2), RangeIndex(3, 5)], RangeIndex(3, 5)), + ([RangeIndex(2), RangeIndex(2)], Index([0, 1, 0, 1])), + ([RangeIndex(2), RangeIndex(2, 5), RangeIndex(5, 8, 4)], RangeIndex(0, 6)), + ( + [RangeIndex(2), RangeIndex(3, 5), RangeIndex(5, 8, 4)], + Index([0, 1, 3, 4, 5]), + ), + ( + [RangeIndex(-2, 2), RangeIndex(2, 5), RangeIndex(5, 8, 4)], + RangeIndex(-2, 6), + ), + ([RangeIndex(3), Index([-1, 3, 15])], Index([0, 1, 2, -1, 3, 15])), + ([RangeIndex(3), Index([-1, 3.1, 15.0])], Index([0, 1, 2, -1, 3.1, 15.0])), + ([RangeIndex(3), Index(["a", None, 14])], Index([0, 1, 2, "a", None, 14])), + ([RangeIndex(3, 1), Index(["a", None, 14])], Index(["a", None, 14])), + ], + ) + def test_append(self, indices, expected): + # GH16212 + result = indices[0].append(indices[1:]) + tm.assert_index_equal(result, expected, exact=True) + + if len(indices) == 2: + # Append single item rather than list + result2 = indices[0].append(indices[1]) + tm.assert_index_equal(result2, expected, exact=True) + + def test_engineless_lookup(self): + # GH 16685 + # Standard lookup on RangeIndex should not require the engine to be + # created + idx = RangeIndex(2, 10, 3) + + assert idx.get_loc(5) == 1 + tm.assert_numpy_array_equal( + idx.get_indexer([2, 8]), ensure_platform_int(np.array([0, 2])) + ) + with pytest.raises(KeyError, match="3"): + idx.get_loc(3) + + assert "_engine" not in idx._cache + + # Different types of scalars can be excluded immediately, no need to + # use the _engine + with pytest.raises(KeyError, match="'a'"): + idx.get_loc("a") + + assert "_engine" not in idx._cache + + def test_format_empty(self): + # GH35712 + empty_idx = RangeIndex(0) + msg = r"RangeIndex\.format is deprecated" + with tm.assert_produces_warning(FutureWarning, match=msg): + assert empty_idx.format() == [] + with tm.assert_produces_warning(FutureWarning, match=msg): + assert empty_idx.format(name=True) == [""] + + @pytest.mark.parametrize( + "ri", + [ + RangeIndex(0, -1, -1), + RangeIndex(0, 1, 1), + RangeIndex(1, 3, 2), + RangeIndex(0, -1, -2), + RangeIndex(-3, -5, -2), + ], + ) + def test_append_len_one(self, ri): + # GH39401 + result = ri.append([]) + tm.assert_index_equal(result, ri, exact=True) + + @pytest.mark.parametrize("base", [RangeIndex(0, 2), Index([0, 1])]) + def test_isin_range(self, base): + # GH#41151 + values = RangeIndex(0, 1) + result = base.isin(values) + expected = np.array([True, False]) + tm.assert_numpy_array_equal(result, expected) + + def test_sort_values_key(self): + # GH#43666, GH#52764 + sort_order = {8: 2, 6: 0, 4: 8, 2: 10, 0: 12} + values = RangeIndex(0, 10, 2) + result = values.sort_values(key=lambda x: x.map(sort_order)) + expected = Index([6, 8, 4, 2, 0], dtype="int64") + tm.assert_index_equal(result, expected, check_exact=True) + + # check this matches the Series.sort_values behavior + ser = values.to_series() + result2 = ser.sort_values(key=lambda x: x.map(sort_order)) + tm.assert_series_equal(result2, expected.to_series(), check_exact=True) + + def test_range_index_rsub_by_const(self): + # GH#53255 + result = 3 - RangeIndex(0, 4, 1) + expected = RangeIndex(3, -1, -1) + tm.assert_index_equal(result, expected) diff --git a/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_setops.py b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_setops.py new file mode 100644 index 0000000000000000000000000000000000000000..d417b8b743dc589bdf9d6acf5bde396a129ece23 --- /dev/null +++ b/env-llmeval/lib/python3.10/site-packages/pandas/tests/indexes/ranges/test_setops.py @@ -0,0 +1,493 @@ +from datetime import ( + datetime, + timedelta, +) + +from hypothesis import ( + assume, + given, + strategies as st, +) +import numpy as np +import pytest + +from pandas import ( + Index, + RangeIndex, +) +import pandas._testing as tm + + +class TestRangeIndexSetOps: + @pytest.mark.parametrize("dtype", [None, "int64", "uint64"]) + def test_intersection_mismatched_dtype(self, dtype): + # check that we cast to float, not object + index = RangeIndex(start=0, stop=20, step=2, name="foo") + index = Index(index, dtype=dtype) + + flt = index.astype(np.float64) + + # bc index.equals(flt), we go through fastpath and get RangeIndex back + result = index.intersection(flt) + tm.assert_index_equal(result, index, exact=True) + + result = flt.intersection(index) + tm.assert_index_equal(result, flt, exact=True) + + # neither empty, not-equals + result = index.intersection(flt[1:]) + tm.assert_index_equal(result, flt[1:], exact=True) + + result = flt[1:].intersection(index) + tm.assert_index_equal(result, flt[1:], exact=True) + + # empty other + result = index.intersection(flt[:0]) + tm.assert_index_equal(result, flt[:0], exact=True) + + result = flt[:0].intersection(index) + tm.assert_index_equal(result, flt[:0], exact=True) + + def test_intersection_empty(self, sort, names): + # name retention on empty intersections + index = RangeIndex(start=0, stop=20, step=2, name=names[0]) + + # empty other + result = index.intersection(index[:0].rename(names[1]), sort=sort) + tm.assert_index_equal(result, index[:0].rename(names[2]), exact=True) + + # empty self + result = index[:0].intersection(index.rename(names[1]), sort=sort) + tm.assert_index_equal(result, index[:0].rename(names[2]), exact=True) + + def test_intersection(self, sort): + # intersect with Index with dtype int64 + index = RangeIndex(start=0, stop=20, step=2) + other = Index(np.arange(1, 6)) + result = index.intersection(other, sort=sort) + expected = Index(np.sort(np.intersect1d(index.values, other.values))) + tm.assert_index_equal(result, expected) + + result = other.intersection(index, sort=sort) + expected = Index( + np.sort(np.asarray(np.intersect1d(index.values, other.values))) + ) + tm.assert_index_equal(result, expected) + + # intersect with increasing RangeIndex + other = RangeIndex(1, 6) + result = index.intersection(other, sort=sort) + expected = Index(np.sort(np.intersect1d(index.values, other.values))) + tm.assert_index_equal(result, expected, exact="equiv") + + # intersect with decreasing RangeIndex + other = RangeIndex(5, 0, -1) + result = index.intersection(other, sort=sort) + expected = Index(np.sort(np.intersect1d(index.values, other.values))) + tm.assert_index_equal(result, expected, exact="equiv") + + # reversed (GH 17296) + result = other.intersection(index, sort=sort) + tm.assert_index_equal(result, expected, exact="equiv") + + # GH 17296: intersect two decreasing RangeIndexes + first = RangeIndex(10, -2, -2) + other = RangeIndex(5, -4, -1) + expected = first.astype(int).intersection(other.astype(int), sort=sort) + result = first.intersection(other, sort=sort).astype(int) + tm.assert_index_equal(result, expected) + + # reversed + result = other.intersection(first, sort=sort).astype(int) + tm.assert_index_equal(result, expected) + + index = RangeIndex(5, name="foo") + + # intersect of non-overlapping indices + other = RangeIndex(5, 10, 1, name="foo") + result = index.intersection(other, sort=sort) + expected = RangeIndex(0, 0, 1, name="foo") + tm.assert_index_equal(result, expected) + + other = RangeIndex(-1, -5, -1) + result = index.intersection(other, sort=sort) + expected = RangeIndex(0, 0, 1) + tm.assert_index_equal(result, expected) + + # intersection of empty indices + other = RangeIndex(0, 0, 1) + result = index.intersection(other, sort=sort) + expected = RangeIndex(0, 0, 1) + tm.assert_index_equal(result, expected) + + result = other.intersection(index, sort=sort) + tm.assert_index_equal(result, expected) + + def test_intersection_non_overlapping_gcd(self, sort, names): + # intersection of non-overlapping values based on start value and gcd + index = RangeIndex(1, 10, 2, name=names[0]) + other = RangeIndex(0, 10, 4, name=names[1]) + result = index.intersection(other, sort=sort) + expected = RangeIndex(0, 0, 1, name=names[2]) + tm.assert_index_equal(result, expected) + + def test_union_noncomparable(self, sort): + # corner case, Index with non-int64 dtype + index = RangeIndex(start=0, stop=20, step=2) + other = Index([datetime.now() + timedelta(i) for i in range(4)], dtype=object) + result = index.union(other, sort=sort) + expected = Index(np.concatenate((index, other))) + tm.assert_index_equal(result, expected) + + result = other.union(index, sort=sort) + expected = Index(np.concatenate((other, index))) + tm.assert_index_equal(result, expected) + + @pytest.mark.parametrize( + "idx1, idx2, expected_sorted, expected_notsorted", + [ + ( + RangeIndex(0, 10, 1), + RangeIndex(0, 10, 1), + RangeIndex(0, 10, 1), + RangeIndex(0, 10, 1), + ), + ( + RangeIndex(0, 10, 1), + RangeIndex(5, 20, 1), + RangeIndex(0, 20, 1), + RangeIndex(0, 20, 1), + ), + ( + RangeIndex(0, 10, 1), + RangeIndex(10, 20, 1), + RangeIndex(0, 20, 1), + RangeIndex(0, 20, 1), + ), + ( + RangeIndex(0, -10, -1), + RangeIndex(0, -10, -1), + RangeIndex(0, -10, -1), + RangeIndex(0, -10, -1), + ), + ( + RangeIndex(0, -10, -1), + RangeIndex(-10, -20, -1), + RangeIndex(-19, 1, 1), + RangeIndex(0, -20, -1), + ), + ( + RangeIndex(0, 10, 2), + RangeIndex(1, 10, 2), + RangeIndex(0, 10, 1), + Index(list(range(0, 10, 2)) + list(range(1, 10, 2))), + ), + ( + RangeIndex(0, 11, 2), + RangeIndex(1, 12, 2), + RangeIndex(0, 12, 1), + Index(list(range(0, 11, 2)) + list(range(1, 12, 2))), + ), + ( + RangeIndex(0, 21, 4), + RangeIndex(-2, 24, 4), + RangeIndex(-2, 24, 2), + Index(list(range(0, 21, 4)) + list(range(-2, 24, 4))), + ), + ( + RangeIndex(0, -20, -2), + RangeIndex(-1, -21, -2), + RangeIndex(-19, 1, 1), + Index(list(range(0, -20, -2)) + list(range(-1, -21, -2))), + ), + ( + RangeIndex(0, 100, 5), + RangeIndex(0, 100, 20), + RangeIndex(0, 100, 5), + RangeIndex(0, 100, 5), + ), + ( + RangeIndex(0, -100, -5), + RangeIndex(5, -100, -20), + RangeIndex(-95, 10, 5), + Index(list(range(0, -100, -5)) + [5]), + ), + ( + RangeIndex(0, -11, -1), + RangeIndex(1, -12, -4), + RangeIndex(-11, 2, 1), + Index(list(range(0, -11, -1)) + [1, -11]), + ), + (RangeIndex(0), RangeIndex(0), RangeIndex(0), RangeIndex(0)), + ( + RangeIndex(0, -10, -2), + RangeIndex(0), + RangeIndex(0, -10, -2), + RangeIndex(0, -10, -2), + ), + ( + RangeIndex(0, 100, 2), + RangeIndex(100, 150, 200), + RangeIndex(0, 102, 2), + RangeIndex(0, 102, 2), + ), + ( + RangeIndex(0, -100, -2), + RangeIndex(-100, 50, 102), + RangeIndex(-100, 4, 2), + Index(list(range(0, -100, -2)) + [-100, 2]), + ), + ( + RangeIndex(0, -100, -1), + RangeIndex(0, -50, -3), + RangeIndex(-99, 1, 1), + RangeIndex(0, -100, -1), + ), + ( + RangeIndex(0, 1, 1), + RangeIndex(5, 6, 10), + RangeIndex(0, 6, 5), + RangeIndex(0, 10, 5), + ), + ( + RangeIndex(0, 10, 5), + RangeIndex(-5, -6, -20), + RangeIndex(-5, 10, 5), + Index([0, 5, -5]), + ), + ( + RangeIndex(0, 3, 1), + RangeIndex(4, 5, 1), + Index([0, 1, 2, 4]), + Index([0, 1, 2, 4]), + ), + ( + RangeIndex(0, 10, 1), + Index([], dtype=np.int64), + RangeIndex(0, 10, 1), + RangeIndex(0, 10, 1), + ), + ( + RangeIndex(0), + Index([1, 5, 6]), + Index([1, 5, 6]), + Index([1, 5, 6]), + ), + # GH 43885 + ( + RangeIndex(0, 10), + RangeIndex(0, 5), + RangeIndex(0, 10), + RangeIndex(0, 10), + ), + ], + ids=lambda x: repr(x) if isinstance(x, RangeIndex) else x, + ) + def test_union_sorted(self, idx1, idx2, expected_sorted, expected_notsorted): + res1 = idx1.union(idx2, sort=None) + tm.assert_index_equal(res1, expected_sorted, exact=True) + + res1 = idx1.union(idx2, sort=False) + tm.assert_index_equal(res1, expected_notsorted, exact=True) + + res2 = idx2.union(idx1, sort=None) + res3 = Index(idx1._values, name=idx1.name).union(idx2, sort=None) + tm.assert_index_equal(res2, expected_sorted, exact=True) + tm.assert_index_equal(res3, expected_sorted, exact="equiv") + + def test_union_same_step_misaligned(self): + # GH#44019 + left = RangeIndex(range(0, 20, 4)) + right = RangeIndex(range(1, 21, 4)) + + result = left.union(right) + expected = Index([0, 1, 4, 5, 8, 9, 12, 13, 16, 17]) + tm.assert_index_equal(result, expected, exact=True) + + def test_difference(self): + # GH#12034 Cases where we operate against another RangeIndex and may + # get back another RangeIndex + obj = RangeIndex.from_range(range(1, 10), name="foo") + + result = obj.difference(obj) + expected = RangeIndex.from_range(range(0), name="foo") + tm.assert_index_equal(result, expected, exact=True) + + result = obj.difference(expected.rename("bar")) + tm.assert_index_equal(result, obj.rename(None), exact=True) + + result = obj.difference(obj[:3]) + tm.assert_index_equal(result, obj[3:], exact=True) + + result = obj.difference(obj[-3:]) + tm.assert_index_equal(result, obj[:-3], exact=True) + + # Flipping the step of 'other' doesn't affect the result, but + # flipping the stepof 'self' does when sort=None + result = obj[::-1].difference(obj[-3:]) + tm.assert_index_equal(result, obj[:-3], exact=True) + + result = obj[::-1].difference(obj[-3:], sort=False) + tm.assert_index_equal(result, obj[:-3][::-1], exact=True) + + result = obj[::-1].difference(obj[-3:][::-1]) + tm.assert_index_equal(result, obj[:-3], exact=True) + + result = obj[::-1].difference(obj[-3:][::-1], sort=False) + tm.assert_index_equal(result, obj[:-3][::-1], exact=True) + + result = obj.difference(obj[2:6]) + expected = Index([1, 2, 7, 8, 9], name="foo") + tm.assert_index_equal(result, expected, exact=True) + + def test_difference_sort(self): + # GH#44085 ensure we respect the sort keyword + + idx = Index(range(4))[::-1] + other = Index(range(3, 4)) + + result = idx.difference(other) + expected = Index(range(3)) + tm.assert_index_equal(result, expected, exact=True) + + result = idx.difference(other, sort=False) + expected = expected[::-1] + tm.assert_index_equal(result, expected, exact=True) + + # case where the intersection is empty + other = range(10, 12) + result = idx.difference(other, sort=None) + expected = idx[::-1] + tm.assert_index_equal(result, expected, exact=True) + + def test_difference_mismatched_step(self): + obj = RangeIndex.from_range(range(1, 10), name="foo") + + result = obj.difference(obj[::2]) + expected = obj[1::2] + tm.assert_index_equal(result, expected, exact=True) + + result = obj[::-1].difference(obj[::2], sort=False) + tm.assert_index_equal(result, expected[::-1], exact=True) + + result = obj.difference(obj[1::2]) + expected = obj[::2] + tm.assert_index_equal(result, expected, exact=True) + + result = obj[::-1].difference(obj[1::2], sort=False) + tm.assert_index_equal(result, expected[::-1], exact=True) + + def test_difference_interior_overlap_endpoints_preserved(self): + left = RangeIndex(range(4)) + right = RangeIndex(range(1, 3)) + + result = left.difference(right) + expected = RangeIndex(0, 4, 3) + assert expected.tolist() == [0, 3] + tm.assert_index_equal(result, expected, exact=True) + + def test_difference_endpoints_overlap_interior_preserved(self): + left = RangeIndex(-8, 20, 7) + right = RangeIndex(13, -9, -3) + + result = left.difference(right) + expected = RangeIndex(-1, 13, 7) + assert expected.tolist() == [-1, 6] + tm.assert_index_equal(result, expected, exact=True) + + def test_difference_interior_non_preserving(self): + # case with intersection of length 1 but RangeIndex is not preserved + idx = Index(range(10)) + + other = idx[3:4] + result = idx.difference(other) + expected = Index([0, 1, 2, 4, 5, 6, 7, 8, 9]) + tm.assert_index_equal(result, expected, exact=True) + + # case with other.step / self.step > 2 + other = idx[::3] + result = idx.difference(other) + expected = Index([1, 2, 4, 5, 7, 8]) + tm.assert_index_equal(result, expected, exact=True) + + # cases with only reaching one end of left + obj = Index(range(20)) + other = obj[:10:2] + result = obj.difference(other) + expected = Index([1, 3, 5, 7, 9] + list(range(10, 20))) + tm.assert_index_equal(result, expected, exact=True) + + other = obj[1:11:2] + result = obj.difference(other) + expected = Index([0, 2, 4, 6, 8, 10] + list(range(11, 20))) + tm.assert_index_equal(result, expected, exact=True) + + def test_symmetric_difference(self): + # GH#12034 Cases where we operate against another RangeIndex and may + # get back another RangeIndex + left = RangeIndex.from_range(range(1, 10), name="foo") + + result = left.symmetric_difference(left) + expected = RangeIndex.from_range(range(0), name="foo") + tm.assert_index_equal(result, expected) + + result = left.symmetric_difference(expected.rename("bar")) + tm.assert_index_equal(result, left.rename(None)) + + result = left[:-2].symmetric_difference(left[2:]) + expected = Index([1, 2, 8, 9], name="foo") + tm.assert_index_equal(result, expected, exact=True) + + right = RangeIndex.from_range(range(10, 15)) + + result = left.symmetric_difference(right) + expected = RangeIndex.from_range(range(1, 15)) + tm.assert_index_equal(result, expected) + + result = left.symmetric_difference(right[1:]) + expected = Index([1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14]) + tm.assert_index_equal(result, expected, exact=True) + + +def assert_range_or_not_is_rangelike(index): + """ + Check that we either have a RangeIndex or that this index *cannot* + be represented as a RangeIndex. + """ + if not isinstance(index, RangeIndex) and len(index) > 0: + diff = index[:-1] - index[1:] + assert not (diff == diff[0]).all() + + +@given( + st.integers(-20, 20), + st.integers(-20, 20), + st.integers(-20, 20), + st.integers(-20, 20), + st.integers(-20, 20), + st.integers(-20, 20), +) +def test_range_difference(start1, stop1, step1, start2, stop2, step2): + # test that + # a) we match Index[int64].difference and + # b) we return RangeIndex whenever it is possible to do so. + assume(step1 != 0) + assume(step2 != 0) + + left = RangeIndex(start1, stop1, step1) + right = RangeIndex(start2, stop2, step2) + + result = left.difference(right, sort=None) + assert_range_or_not_is_rangelike(result) + + left_int64 = Index(left.to_numpy()) + right_int64 = Index(right.to_numpy()) + + alt = left_int64.difference(right_int64, sort=None) + tm.assert_index_equal(result, alt, exact="equiv") + + result = left.difference(right, sort=False) + assert_range_or_not_is_rangelike(result) + + alt = left_int64.difference(right_int64, sort=False) + tm.assert_index_equal(result, alt, exact="equiv")