Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +1 -0
- llmeval-env/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so +3 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_array.py +478 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimes.py +840 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_ndarray_backed.py +75 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_period.py +184 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_timedeltas.py +313 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_datetime.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_getitem.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_mask.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_set_value.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_setitem.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_datetime.py +499 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_get.py +238 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_getitem.py +735 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_indexing.py +518 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_setitem.py +1847 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_take.py +50 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_where.py +481 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_xs.py +82 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py +69 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py +29 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py +558 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py +119 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py +58 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py +332 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_observance.py +105 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/common.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_hour.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_month.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_year.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_common.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_day.cpython-310.pyc +0 -0
.gitattributes
CHANGED
@@ -195,3 +195,4 @@ llmeval-env/lib/python3.10/site-packages/pandas/_libs/interval.cpython-310-x86_6
|
|
195 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
196 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
197 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
|
|
|
195 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/join.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
196 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/algos.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
197 |
llmeval-env/lib/python3.10/site-packages/pandas/_libs/groupby.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
198 |
+
llmeval-env/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
|
llmeval-env/lib/python3.10/site-packages/pandas/_libs/tslibs/offsets.cpython-310-x86_64-linux-gnu.so
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2308b178030a6881fbe3c6ada5afd178c04270fabb04a6cd4c58e36bf322e466
|
3 |
+
size 1175424
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_array.py
ADDED
@@ -0,0 +1,478 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import decimal
|
3 |
+
import re
|
4 |
+
|
5 |
+
import numpy as np
|
6 |
+
import pytest
|
7 |
+
import pytz
|
8 |
+
|
9 |
+
import pandas as pd
|
10 |
+
import pandas._testing as tm
|
11 |
+
from pandas.api.extensions import register_extension_dtype
|
12 |
+
from pandas.arrays import (
|
13 |
+
BooleanArray,
|
14 |
+
DatetimeArray,
|
15 |
+
FloatingArray,
|
16 |
+
IntegerArray,
|
17 |
+
IntervalArray,
|
18 |
+
SparseArray,
|
19 |
+
TimedeltaArray,
|
20 |
+
)
|
21 |
+
from pandas.core.arrays import (
|
22 |
+
NumpyExtensionArray,
|
23 |
+
period_array,
|
24 |
+
)
|
25 |
+
from pandas.tests.extension.decimal import (
|
26 |
+
DecimalArray,
|
27 |
+
DecimalDtype,
|
28 |
+
to_decimal,
|
29 |
+
)
|
30 |
+
|
31 |
+
|
32 |
+
@pytest.mark.parametrize("dtype_unit", ["M8[h]", "M8[m]", "m8[h]", "M8[m]"])
|
33 |
+
def test_dt64_array(dtype_unit):
|
34 |
+
# PR 53817
|
35 |
+
dtype_var = np.dtype(dtype_unit)
|
36 |
+
msg = (
|
37 |
+
r"datetime64 and timedelta64 dtype resolutions other than "
|
38 |
+
r"'s', 'ms', 'us', and 'ns' are deprecated. "
|
39 |
+
r"In future releases passing unsupported resolutions will "
|
40 |
+
r"raise an exception."
|
41 |
+
)
|
42 |
+
with tm.assert_produces_warning(FutureWarning, match=re.escape(msg)):
|
43 |
+
pd.array([], dtype=dtype_var)
|
44 |
+
|
45 |
+
|
46 |
+
@pytest.mark.parametrize(
|
47 |
+
"data, dtype, expected",
|
48 |
+
[
|
49 |
+
# Basic NumPy defaults.
|
50 |
+
([], None, FloatingArray._from_sequence([], dtype="Float64")),
|
51 |
+
([1, 2], None, IntegerArray._from_sequence([1, 2], dtype="Int64")),
|
52 |
+
([1, 2], object, NumpyExtensionArray(np.array([1, 2], dtype=object))),
|
53 |
+
(
|
54 |
+
[1, 2],
|
55 |
+
np.dtype("float32"),
|
56 |
+
NumpyExtensionArray(np.array([1.0, 2.0], dtype=np.dtype("float32"))),
|
57 |
+
),
|
58 |
+
(
|
59 |
+
np.array([], dtype=object),
|
60 |
+
None,
|
61 |
+
NumpyExtensionArray(np.array([], dtype=object)),
|
62 |
+
),
|
63 |
+
(
|
64 |
+
np.array([1, 2], dtype="int64"),
|
65 |
+
None,
|
66 |
+
IntegerArray._from_sequence([1, 2], dtype="Int64"),
|
67 |
+
),
|
68 |
+
(
|
69 |
+
np.array([1.0, 2.0], dtype="float64"),
|
70 |
+
None,
|
71 |
+
FloatingArray._from_sequence([1.0, 2.0], dtype="Float64"),
|
72 |
+
),
|
73 |
+
# String alias passes through to NumPy
|
74 |
+
([1, 2], "float32", NumpyExtensionArray(np.array([1, 2], dtype="float32"))),
|
75 |
+
([1, 2], "int64", NumpyExtensionArray(np.array([1, 2], dtype=np.int64))),
|
76 |
+
# GH#44715 FloatingArray does not support float16, so fall
|
77 |
+
# back to NumpyExtensionArray
|
78 |
+
(
|
79 |
+
np.array([1, 2], dtype=np.float16),
|
80 |
+
None,
|
81 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.float16)),
|
82 |
+
),
|
83 |
+
# idempotency with e.g. pd.array(pd.array([1, 2], dtype="int64"))
|
84 |
+
(
|
85 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int32)),
|
86 |
+
None,
|
87 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int32)),
|
88 |
+
),
|
89 |
+
# Period alias
|
90 |
+
(
|
91 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D")],
|
92 |
+
"Period[D]",
|
93 |
+
period_array(["2000", "2001"], freq="D"),
|
94 |
+
),
|
95 |
+
# Period dtype
|
96 |
+
(
|
97 |
+
[pd.Period("2000", "D")],
|
98 |
+
pd.PeriodDtype("D"),
|
99 |
+
period_array(["2000"], freq="D"),
|
100 |
+
),
|
101 |
+
# Datetime (naive)
|
102 |
+
(
|
103 |
+
[1, 2],
|
104 |
+
np.dtype("datetime64[ns]"),
|
105 |
+
DatetimeArray._from_sequence(
|
106 |
+
np.array([1, 2], dtype="M8[ns]"), dtype="M8[ns]"
|
107 |
+
),
|
108 |
+
),
|
109 |
+
(
|
110 |
+
[1, 2],
|
111 |
+
np.dtype("datetime64[s]"),
|
112 |
+
DatetimeArray._from_sequence(
|
113 |
+
np.array([1, 2], dtype="M8[s]"), dtype="M8[s]"
|
114 |
+
),
|
115 |
+
),
|
116 |
+
(
|
117 |
+
np.array([1, 2], dtype="datetime64[ns]"),
|
118 |
+
None,
|
119 |
+
DatetimeArray._from_sequence(
|
120 |
+
np.array([1, 2], dtype="M8[ns]"), dtype="M8[ns]"
|
121 |
+
),
|
122 |
+
),
|
123 |
+
(
|
124 |
+
pd.DatetimeIndex(["2000", "2001"]),
|
125 |
+
np.dtype("datetime64[ns]"),
|
126 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
127 |
+
),
|
128 |
+
(
|
129 |
+
pd.DatetimeIndex(["2000", "2001"]),
|
130 |
+
None,
|
131 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
132 |
+
),
|
133 |
+
(
|
134 |
+
["2000", "2001"],
|
135 |
+
np.dtype("datetime64[ns]"),
|
136 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
137 |
+
),
|
138 |
+
# Datetime (tz-aware)
|
139 |
+
(
|
140 |
+
["2000", "2001"],
|
141 |
+
pd.DatetimeTZDtype(tz="CET"),
|
142 |
+
DatetimeArray._from_sequence(
|
143 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz="CET")
|
144 |
+
),
|
145 |
+
),
|
146 |
+
# Timedelta
|
147 |
+
(
|
148 |
+
["1h", "2h"],
|
149 |
+
np.dtype("timedelta64[ns]"),
|
150 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
151 |
+
),
|
152 |
+
(
|
153 |
+
pd.TimedeltaIndex(["1h", "2h"]),
|
154 |
+
np.dtype("timedelta64[ns]"),
|
155 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
156 |
+
),
|
157 |
+
(
|
158 |
+
np.array([1, 2], dtype="m8[s]"),
|
159 |
+
np.dtype("timedelta64[s]"),
|
160 |
+
TimedeltaArray._from_sequence(
|
161 |
+
np.array([1, 2], dtype="m8[s]"), dtype="m8[s]"
|
162 |
+
),
|
163 |
+
),
|
164 |
+
(
|
165 |
+
pd.TimedeltaIndex(["1h", "2h"]),
|
166 |
+
None,
|
167 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
168 |
+
),
|
169 |
+
(
|
170 |
+
# preserve non-nano, i.e. don't cast to NumpyExtensionArray
|
171 |
+
TimedeltaArray._simple_new(
|
172 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
173 |
+
),
|
174 |
+
None,
|
175 |
+
TimedeltaArray._simple_new(
|
176 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
177 |
+
),
|
178 |
+
),
|
179 |
+
(
|
180 |
+
# preserve non-nano, i.e. don't cast to NumpyExtensionArray
|
181 |
+
TimedeltaArray._simple_new(
|
182 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
183 |
+
),
|
184 |
+
np.dtype("m8[s]"),
|
185 |
+
TimedeltaArray._simple_new(
|
186 |
+
np.arange(5, dtype=np.int64).view("m8[s]"), dtype=np.dtype("m8[s]")
|
187 |
+
),
|
188 |
+
),
|
189 |
+
# Category
|
190 |
+
(["a", "b"], "category", pd.Categorical(["a", "b"])),
|
191 |
+
(
|
192 |
+
["a", "b"],
|
193 |
+
pd.CategoricalDtype(None, ordered=True),
|
194 |
+
pd.Categorical(["a", "b"], ordered=True),
|
195 |
+
),
|
196 |
+
# Interval
|
197 |
+
(
|
198 |
+
[pd.Interval(1, 2), pd.Interval(3, 4)],
|
199 |
+
"interval",
|
200 |
+
IntervalArray.from_tuples([(1, 2), (3, 4)]),
|
201 |
+
),
|
202 |
+
# Sparse
|
203 |
+
([0, 1], "Sparse[int64]", SparseArray([0, 1], dtype="int64")),
|
204 |
+
# IntegerNA
|
205 |
+
([1, None], "Int16", pd.array([1, None], dtype="Int16")),
|
206 |
+
(
|
207 |
+
pd.Series([1, 2]),
|
208 |
+
None,
|
209 |
+
NumpyExtensionArray(np.array([1, 2], dtype=np.int64)),
|
210 |
+
),
|
211 |
+
# String
|
212 |
+
(
|
213 |
+
["a", None],
|
214 |
+
"string",
|
215 |
+
pd.StringDtype()
|
216 |
+
.construct_array_type()
|
217 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
218 |
+
),
|
219 |
+
(
|
220 |
+
["a", None],
|
221 |
+
pd.StringDtype(),
|
222 |
+
pd.StringDtype()
|
223 |
+
.construct_array_type()
|
224 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
225 |
+
),
|
226 |
+
# Boolean
|
227 |
+
(
|
228 |
+
[True, None],
|
229 |
+
"boolean",
|
230 |
+
BooleanArray._from_sequence([True, None], dtype="boolean"),
|
231 |
+
),
|
232 |
+
(
|
233 |
+
[True, None],
|
234 |
+
pd.BooleanDtype(),
|
235 |
+
BooleanArray._from_sequence([True, None], dtype="boolean"),
|
236 |
+
),
|
237 |
+
# Index
|
238 |
+
(pd.Index([1, 2]), None, NumpyExtensionArray(np.array([1, 2], dtype=np.int64))),
|
239 |
+
# Series[EA] returns the EA
|
240 |
+
(
|
241 |
+
pd.Series(pd.Categorical(["a", "b"], categories=["a", "b", "c"])),
|
242 |
+
None,
|
243 |
+
pd.Categorical(["a", "b"], categories=["a", "b", "c"]),
|
244 |
+
),
|
245 |
+
# "3rd party" EAs work
|
246 |
+
([decimal.Decimal(0), decimal.Decimal(1)], "decimal", to_decimal([0, 1])),
|
247 |
+
# pass an ExtensionArray, but a different dtype
|
248 |
+
(
|
249 |
+
period_array(["2000", "2001"], freq="D"),
|
250 |
+
"category",
|
251 |
+
pd.Categorical([pd.Period("2000", "D"), pd.Period("2001", "D")]),
|
252 |
+
),
|
253 |
+
],
|
254 |
+
)
|
255 |
+
def test_array(data, dtype, expected):
|
256 |
+
result = pd.array(data, dtype=dtype)
|
257 |
+
tm.assert_equal(result, expected)
|
258 |
+
|
259 |
+
|
260 |
+
def test_array_copy():
|
261 |
+
a = np.array([1, 2])
|
262 |
+
# default is to copy
|
263 |
+
b = pd.array(a, dtype=a.dtype)
|
264 |
+
assert not tm.shares_memory(a, b)
|
265 |
+
|
266 |
+
# copy=True
|
267 |
+
b = pd.array(a, dtype=a.dtype, copy=True)
|
268 |
+
assert not tm.shares_memory(a, b)
|
269 |
+
|
270 |
+
# copy=False
|
271 |
+
b = pd.array(a, dtype=a.dtype, copy=False)
|
272 |
+
assert tm.shares_memory(a, b)
|
273 |
+
|
274 |
+
|
275 |
+
cet = pytz.timezone("CET")
|
276 |
+
|
277 |
+
|
278 |
+
@pytest.mark.parametrize(
|
279 |
+
"data, expected",
|
280 |
+
[
|
281 |
+
# period
|
282 |
+
(
|
283 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D")],
|
284 |
+
period_array(["2000", "2001"], freq="D"),
|
285 |
+
),
|
286 |
+
# interval
|
287 |
+
([pd.Interval(0, 1), pd.Interval(1, 2)], IntervalArray.from_breaks([0, 1, 2])),
|
288 |
+
# datetime
|
289 |
+
(
|
290 |
+
[pd.Timestamp("2000"), pd.Timestamp("2001")],
|
291 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
292 |
+
),
|
293 |
+
(
|
294 |
+
[datetime.datetime(2000, 1, 1), datetime.datetime(2001, 1, 1)],
|
295 |
+
DatetimeArray._from_sequence(["2000", "2001"], dtype="M8[ns]"),
|
296 |
+
),
|
297 |
+
(
|
298 |
+
np.array([1, 2], dtype="M8[ns]"),
|
299 |
+
DatetimeArray._from_sequence(np.array([1, 2], dtype="M8[ns]")),
|
300 |
+
),
|
301 |
+
(
|
302 |
+
np.array([1, 2], dtype="M8[us]"),
|
303 |
+
DatetimeArray._simple_new(
|
304 |
+
np.array([1, 2], dtype="M8[us]"), dtype=np.dtype("M8[us]")
|
305 |
+
),
|
306 |
+
),
|
307 |
+
# datetimetz
|
308 |
+
(
|
309 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2001", tz="CET")],
|
310 |
+
DatetimeArray._from_sequence(
|
311 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz="CET", unit="ns")
|
312 |
+
),
|
313 |
+
),
|
314 |
+
(
|
315 |
+
[
|
316 |
+
datetime.datetime(2000, 1, 1, tzinfo=cet),
|
317 |
+
datetime.datetime(2001, 1, 1, tzinfo=cet),
|
318 |
+
],
|
319 |
+
DatetimeArray._from_sequence(
|
320 |
+
["2000", "2001"], dtype=pd.DatetimeTZDtype(tz=cet, unit="ns")
|
321 |
+
),
|
322 |
+
),
|
323 |
+
# timedelta
|
324 |
+
(
|
325 |
+
[pd.Timedelta("1h"), pd.Timedelta("2h")],
|
326 |
+
TimedeltaArray._from_sequence(["1h", "2h"], dtype="m8[ns]"),
|
327 |
+
),
|
328 |
+
(
|
329 |
+
np.array([1, 2], dtype="m8[ns]"),
|
330 |
+
TimedeltaArray._from_sequence(np.array([1, 2], dtype="m8[ns]")),
|
331 |
+
),
|
332 |
+
(
|
333 |
+
np.array([1, 2], dtype="m8[us]"),
|
334 |
+
TimedeltaArray._from_sequence(np.array([1, 2], dtype="m8[us]")),
|
335 |
+
),
|
336 |
+
# integer
|
337 |
+
([1, 2], IntegerArray._from_sequence([1, 2], dtype="Int64")),
|
338 |
+
([1, None], IntegerArray._from_sequence([1, None], dtype="Int64")),
|
339 |
+
([1, pd.NA], IntegerArray._from_sequence([1, pd.NA], dtype="Int64")),
|
340 |
+
([1, np.nan], IntegerArray._from_sequence([1, np.nan], dtype="Int64")),
|
341 |
+
# float
|
342 |
+
([0.1, 0.2], FloatingArray._from_sequence([0.1, 0.2], dtype="Float64")),
|
343 |
+
([0.1, None], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
344 |
+
([0.1, np.nan], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
345 |
+
([0.1, pd.NA], FloatingArray._from_sequence([0.1, pd.NA], dtype="Float64")),
|
346 |
+
# integer-like float
|
347 |
+
([1.0, 2.0], FloatingArray._from_sequence([1.0, 2.0], dtype="Float64")),
|
348 |
+
([1.0, None], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
349 |
+
([1.0, np.nan], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
350 |
+
([1.0, pd.NA], FloatingArray._from_sequence([1.0, pd.NA], dtype="Float64")),
|
351 |
+
# mixed-integer-float
|
352 |
+
([1, 2.0], FloatingArray._from_sequence([1.0, 2.0], dtype="Float64")),
|
353 |
+
(
|
354 |
+
[1, np.nan, 2.0],
|
355 |
+
FloatingArray._from_sequence([1.0, None, 2.0], dtype="Float64"),
|
356 |
+
),
|
357 |
+
# string
|
358 |
+
(
|
359 |
+
["a", "b"],
|
360 |
+
pd.StringDtype()
|
361 |
+
.construct_array_type()
|
362 |
+
._from_sequence(["a", "b"], dtype=pd.StringDtype()),
|
363 |
+
),
|
364 |
+
(
|
365 |
+
["a", None],
|
366 |
+
pd.StringDtype()
|
367 |
+
.construct_array_type()
|
368 |
+
._from_sequence(["a", None], dtype=pd.StringDtype()),
|
369 |
+
),
|
370 |
+
# Boolean
|
371 |
+
([True, False], BooleanArray._from_sequence([True, False], dtype="boolean")),
|
372 |
+
([True, None], BooleanArray._from_sequence([True, None], dtype="boolean")),
|
373 |
+
],
|
374 |
+
)
|
375 |
+
def test_array_inference(data, expected):
|
376 |
+
result = pd.array(data)
|
377 |
+
tm.assert_equal(result, expected)
|
378 |
+
|
379 |
+
|
380 |
+
@pytest.mark.parametrize(
|
381 |
+
"data",
|
382 |
+
[
|
383 |
+
# mix of frequencies
|
384 |
+
[pd.Period("2000", "D"), pd.Period("2001", "Y")],
|
385 |
+
# mix of closed
|
386 |
+
[pd.Interval(0, 1, closed="left"), pd.Interval(1, 2, closed="right")],
|
387 |
+
# Mix of timezones
|
388 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2000", tz="UTC")],
|
389 |
+
# Mix of tz-aware and tz-naive
|
390 |
+
[pd.Timestamp("2000", tz="CET"), pd.Timestamp("2000")],
|
391 |
+
np.array([pd.Timestamp("2000"), pd.Timestamp("2000", tz="CET")]),
|
392 |
+
],
|
393 |
+
)
|
394 |
+
def test_array_inference_fails(data):
|
395 |
+
result = pd.array(data)
|
396 |
+
expected = NumpyExtensionArray(np.array(data, dtype=object))
|
397 |
+
tm.assert_extension_array_equal(result, expected)
|
398 |
+
|
399 |
+
|
400 |
+
@pytest.mark.parametrize("data", [np.array(0)])
|
401 |
+
def test_nd_raises(data):
|
402 |
+
with pytest.raises(ValueError, match="NumpyExtensionArray must be 1-dimensional"):
|
403 |
+
pd.array(data, dtype="int64")
|
404 |
+
|
405 |
+
|
406 |
+
def test_scalar_raises():
|
407 |
+
with pytest.raises(ValueError, match="Cannot pass scalar '1'"):
|
408 |
+
pd.array(1)
|
409 |
+
|
410 |
+
|
411 |
+
def test_dataframe_raises():
|
412 |
+
# GH#51167 don't accidentally cast to StringArray by doing inference on columns
|
413 |
+
df = pd.DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
|
414 |
+
msg = "Cannot pass DataFrame to 'pandas.array'"
|
415 |
+
with pytest.raises(TypeError, match=msg):
|
416 |
+
pd.array(df)
|
417 |
+
|
418 |
+
|
419 |
+
def test_bounds_check():
|
420 |
+
# GH21796
|
421 |
+
with pytest.raises(
|
422 |
+
TypeError, match=r"cannot safely cast non-equivalent int(32|64) to uint16"
|
423 |
+
):
|
424 |
+
pd.array([-1, 2, 3], dtype="UInt16")
|
425 |
+
|
426 |
+
|
427 |
+
# ---------------------------------------------------------------------------
|
428 |
+
# A couple dummy classes to ensure that Series and Indexes are unboxed before
|
429 |
+
# getting to the EA classes.
|
430 |
+
|
431 |
+
|
432 |
+
@register_extension_dtype
|
433 |
+
class DecimalDtype2(DecimalDtype):
|
434 |
+
name = "decimal2"
|
435 |
+
|
436 |
+
@classmethod
|
437 |
+
def construct_array_type(cls):
|
438 |
+
"""
|
439 |
+
Return the array type associated with this dtype.
|
440 |
+
|
441 |
+
Returns
|
442 |
+
-------
|
443 |
+
type
|
444 |
+
"""
|
445 |
+
return DecimalArray2
|
446 |
+
|
447 |
+
|
448 |
+
class DecimalArray2(DecimalArray):
|
449 |
+
@classmethod
|
450 |
+
def _from_sequence(cls, scalars, *, dtype=None, copy=False):
|
451 |
+
if isinstance(scalars, (pd.Series, pd.Index)):
|
452 |
+
raise TypeError("scalars should not be of type pd.Series or pd.Index")
|
453 |
+
|
454 |
+
return super()._from_sequence(scalars, dtype=dtype, copy=copy)
|
455 |
+
|
456 |
+
|
457 |
+
def test_array_unboxes(index_or_series):
|
458 |
+
box = index_or_series
|
459 |
+
|
460 |
+
data = box([decimal.Decimal("1"), decimal.Decimal("2")])
|
461 |
+
dtype = DecimalDtype2()
|
462 |
+
# make sure it works
|
463 |
+
with pytest.raises(
|
464 |
+
TypeError, match="scalars should not be of type pd.Series or pd.Index"
|
465 |
+
):
|
466 |
+
DecimalArray2._from_sequence(data, dtype=dtype)
|
467 |
+
|
468 |
+
result = pd.array(data, dtype="decimal2")
|
469 |
+
expected = DecimalArray2._from_sequence(data.values, dtype=dtype)
|
470 |
+
tm.assert_equal(result, expected)
|
471 |
+
|
472 |
+
|
473 |
+
def test_array_to_numpy_na():
|
474 |
+
# GH#40638
|
475 |
+
arr = pd.array([pd.NA, 1], dtype="string[python]")
|
476 |
+
result = arr.to_numpy(na_value=True, dtype=bool)
|
477 |
+
expected = np.array([True, True])
|
478 |
+
tm.assert_numpy_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_datetimes.py
ADDED
@@ -0,0 +1,840 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Tests for DatetimeArray
|
3 |
+
"""
|
4 |
+
from __future__ import annotations
|
5 |
+
|
6 |
+
from datetime import timedelta
|
7 |
+
import operator
|
8 |
+
|
9 |
+
try:
|
10 |
+
from zoneinfo import ZoneInfo
|
11 |
+
except ImportError:
|
12 |
+
# Cannot assign to a type
|
13 |
+
ZoneInfo = None # type: ignore[misc, assignment]
|
14 |
+
|
15 |
+
import numpy as np
|
16 |
+
import pytest
|
17 |
+
|
18 |
+
from pandas._libs.tslibs import tz_compare
|
19 |
+
|
20 |
+
from pandas.core.dtypes.dtypes import DatetimeTZDtype
|
21 |
+
|
22 |
+
import pandas as pd
|
23 |
+
import pandas._testing as tm
|
24 |
+
from pandas.core.arrays import (
|
25 |
+
DatetimeArray,
|
26 |
+
TimedeltaArray,
|
27 |
+
)
|
28 |
+
|
29 |
+
|
30 |
+
class TestNonNano:
|
31 |
+
@pytest.fixture(params=["s", "ms", "us"])
|
32 |
+
def unit(self, request):
|
33 |
+
"""Fixture returning parametrized time units"""
|
34 |
+
return request.param
|
35 |
+
|
36 |
+
@pytest.fixture
|
37 |
+
def dtype(self, unit, tz_naive_fixture):
|
38 |
+
tz = tz_naive_fixture
|
39 |
+
if tz is None:
|
40 |
+
return np.dtype(f"datetime64[{unit}]")
|
41 |
+
else:
|
42 |
+
return DatetimeTZDtype(unit=unit, tz=tz)
|
43 |
+
|
44 |
+
@pytest.fixture
|
45 |
+
def dta_dti(self, unit, dtype):
|
46 |
+
tz = getattr(dtype, "tz", None)
|
47 |
+
|
48 |
+
dti = pd.date_range("2016-01-01", periods=55, freq="D", tz=tz)
|
49 |
+
if tz is None:
|
50 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
51 |
+
else:
|
52 |
+
arr = np.asarray(dti.tz_convert("UTC").tz_localize(None)).astype(
|
53 |
+
f"M8[{unit}]"
|
54 |
+
)
|
55 |
+
|
56 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
57 |
+
return dta, dti
|
58 |
+
|
59 |
+
@pytest.fixture
|
60 |
+
def dta(self, dta_dti):
|
61 |
+
dta, dti = dta_dti
|
62 |
+
return dta
|
63 |
+
|
64 |
+
def test_non_nano(self, unit, dtype):
|
65 |
+
arr = np.arange(5, dtype=np.int64).view(f"M8[{unit}]")
|
66 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
67 |
+
|
68 |
+
assert dta.dtype == dtype
|
69 |
+
assert dta[0].unit == unit
|
70 |
+
assert tz_compare(dta.tz, dta[0].tz)
|
71 |
+
assert (dta[0] == dta[:1]).all()
|
72 |
+
|
73 |
+
@pytest.mark.parametrize(
|
74 |
+
"field", DatetimeArray._field_ops + DatetimeArray._bool_ops
|
75 |
+
)
|
76 |
+
def test_fields(self, unit, field, dtype, dta_dti):
|
77 |
+
dta, dti = dta_dti
|
78 |
+
|
79 |
+
assert (dti == dta).all()
|
80 |
+
|
81 |
+
res = getattr(dta, field)
|
82 |
+
expected = getattr(dti._data, field)
|
83 |
+
tm.assert_numpy_array_equal(res, expected)
|
84 |
+
|
85 |
+
def test_normalize(self, unit):
|
86 |
+
dti = pd.date_range("2016-01-01 06:00:00", periods=55, freq="D")
|
87 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
88 |
+
|
89 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
90 |
+
|
91 |
+
assert not dta.is_normalized
|
92 |
+
|
93 |
+
# TODO: simplify once we can just .astype to other unit
|
94 |
+
exp = np.asarray(dti.normalize()).astype(f"M8[{unit}]")
|
95 |
+
expected = DatetimeArray._simple_new(exp, dtype=exp.dtype)
|
96 |
+
|
97 |
+
res = dta.normalize()
|
98 |
+
tm.assert_extension_array_equal(res, expected)
|
99 |
+
|
100 |
+
def test_simple_new_requires_match(self, unit):
|
101 |
+
arr = np.arange(5, dtype=np.int64).view(f"M8[{unit}]")
|
102 |
+
dtype = DatetimeTZDtype(unit, "UTC")
|
103 |
+
|
104 |
+
dta = DatetimeArray._simple_new(arr, dtype=dtype)
|
105 |
+
assert dta.dtype == dtype
|
106 |
+
|
107 |
+
wrong = DatetimeTZDtype("ns", "UTC")
|
108 |
+
with pytest.raises(AssertionError, match=""):
|
109 |
+
DatetimeArray._simple_new(arr, dtype=wrong)
|
110 |
+
|
111 |
+
def test_std_non_nano(self, unit):
|
112 |
+
dti = pd.date_range("2016-01-01", periods=55, freq="D")
|
113 |
+
arr = np.asarray(dti).astype(f"M8[{unit}]")
|
114 |
+
|
115 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
116 |
+
|
117 |
+
# we should match the nano-reso std, but floored to our reso.
|
118 |
+
res = dta.std()
|
119 |
+
assert res._creso == dta._creso
|
120 |
+
assert res == dti.std().floor(unit)
|
121 |
+
|
122 |
+
@pytest.mark.filterwarnings("ignore:Converting to PeriodArray.*:UserWarning")
|
123 |
+
def test_to_period(self, dta_dti):
|
124 |
+
dta, dti = dta_dti
|
125 |
+
result = dta.to_period("D")
|
126 |
+
expected = dti._data.to_period("D")
|
127 |
+
|
128 |
+
tm.assert_extension_array_equal(result, expected)
|
129 |
+
|
130 |
+
def test_iter(self, dta):
|
131 |
+
res = next(iter(dta))
|
132 |
+
expected = dta[0]
|
133 |
+
|
134 |
+
assert type(res) is pd.Timestamp
|
135 |
+
assert res._value == expected._value
|
136 |
+
assert res._creso == expected._creso
|
137 |
+
assert res == expected
|
138 |
+
|
139 |
+
def test_astype_object(self, dta):
|
140 |
+
result = dta.astype(object)
|
141 |
+
assert all(x._creso == dta._creso for x in result)
|
142 |
+
assert all(x == y for x, y in zip(result, dta))
|
143 |
+
|
144 |
+
def test_to_pydatetime(self, dta_dti):
|
145 |
+
dta, dti = dta_dti
|
146 |
+
|
147 |
+
result = dta.to_pydatetime()
|
148 |
+
expected = dti.to_pydatetime()
|
149 |
+
tm.assert_numpy_array_equal(result, expected)
|
150 |
+
|
151 |
+
@pytest.mark.parametrize("meth", ["time", "timetz", "date"])
|
152 |
+
def test_time_date(self, dta_dti, meth):
|
153 |
+
dta, dti = dta_dti
|
154 |
+
|
155 |
+
result = getattr(dta, meth)
|
156 |
+
expected = getattr(dti, meth)
|
157 |
+
tm.assert_numpy_array_equal(result, expected)
|
158 |
+
|
159 |
+
def test_format_native_types(self, unit, dtype, dta_dti):
|
160 |
+
# In this case we should get the same formatted values with our nano
|
161 |
+
# version dti._data as we do with the non-nano dta
|
162 |
+
dta, dti = dta_dti
|
163 |
+
|
164 |
+
res = dta._format_native_types()
|
165 |
+
exp = dti._data._format_native_types()
|
166 |
+
tm.assert_numpy_array_equal(res, exp)
|
167 |
+
|
168 |
+
def test_repr(self, dta_dti, unit):
|
169 |
+
dta, dti = dta_dti
|
170 |
+
|
171 |
+
assert repr(dta) == repr(dti._data).replace("[ns", f"[{unit}")
|
172 |
+
|
173 |
+
# TODO: tests with td64
|
174 |
+
def test_compare_mismatched_resolutions(self, comparison_op):
|
175 |
+
# comparison that numpy gets wrong bc of silent overflows
|
176 |
+
op = comparison_op
|
177 |
+
|
178 |
+
iinfo = np.iinfo(np.int64)
|
179 |
+
vals = np.array([iinfo.min, iinfo.min + 1, iinfo.max], dtype=np.int64)
|
180 |
+
|
181 |
+
# Construct so that arr2[1] < arr[1] < arr[2] < arr2[2]
|
182 |
+
arr = np.array(vals).view("M8[ns]")
|
183 |
+
arr2 = arr.view("M8[s]")
|
184 |
+
|
185 |
+
left = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
186 |
+
right = DatetimeArray._simple_new(arr2, dtype=arr2.dtype)
|
187 |
+
|
188 |
+
if comparison_op is operator.eq:
|
189 |
+
expected = np.array([False, False, False])
|
190 |
+
elif comparison_op is operator.ne:
|
191 |
+
expected = np.array([True, True, True])
|
192 |
+
elif comparison_op in [operator.lt, operator.le]:
|
193 |
+
expected = np.array([False, False, True])
|
194 |
+
else:
|
195 |
+
expected = np.array([False, True, False])
|
196 |
+
|
197 |
+
result = op(left, right)
|
198 |
+
tm.assert_numpy_array_equal(result, expected)
|
199 |
+
|
200 |
+
result = op(left[1], right)
|
201 |
+
tm.assert_numpy_array_equal(result, expected)
|
202 |
+
|
203 |
+
if op not in [operator.eq, operator.ne]:
|
204 |
+
# check that numpy still gets this wrong; if it is fixed we may be
|
205 |
+
# able to remove compare_mismatched_resolutions
|
206 |
+
np_res = op(left._ndarray, right._ndarray)
|
207 |
+
tm.assert_numpy_array_equal(np_res[1:], ~expected[1:])
|
208 |
+
|
209 |
+
def test_add_mismatched_reso_doesnt_downcast(self):
|
210 |
+
# https://github.com/pandas-dev/pandas/pull/48748#issuecomment-1260181008
|
211 |
+
td = pd.Timedelta(microseconds=1)
|
212 |
+
dti = pd.date_range("2016-01-01", periods=3) - td
|
213 |
+
dta = dti._data.as_unit("us")
|
214 |
+
|
215 |
+
res = dta + td.as_unit("us")
|
216 |
+
# even though the result is an even number of days
|
217 |
+
# (so we _could_ downcast to unit="s"), we do not.
|
218 |
+
assert res.unit == "us"
|
219 |
+
|
220 |
+
@pytest.mark.parametrize(
|
221 |
+
"scalar",
|
222 |
+
[
|
223 |
+
timedelta(hours=2),
|
224 |
+
pd.Timedelta(hours=2),
|
225 |
+
np.timedelta64(2, "h"),
|
226 |
+
np.timedelta64(2 * 3600 * 1000, "ms"),
|
227 |
+
pd.offsets.Minute(120),
|
228 |
+
pd.offsets.Hour(2),
|
229 |
+
],
|
230 |
+
)
|
231 |
+
def test_add_timedeltalike_scalar_mismatched_reso(self, dta_dti, scalar):
|
232 |
+
dta, dti = dta_dti
|
233 |
+
|
234 |
+
td = pd.Timedelta(scalar)
|
235 |
+
exp_unit = tm.get_finest_unit(dta.unit, td.unit)
|
236 |
+
|
237 |
+
expected = (dti + td)._data.as_unit(exp_unit)
|
238 |
+
result = dta + scalar
|
239 |
+
tm.assert_extension_array_equal(result, expected)
|
240 |
+
|
241 |
+
result = scalar + dta
|
242 |
+
tm.assert_extension_array_equal(result, expected)
|
243 |
+
|
244 |
+
expected = (dti - td)._data.as_unit(exp_unit)
|
245 |
+
result = dta - scalar
|
246 |
+
tm.assert_extension_array_equal(result, expected)
|
247 |
+
|
248 |
+
def test_sub_datetimelike_scalar_mismatch(self):
|
249 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
250 |
+
dta = dti._data.as_unit("us")
|
251 |
+
|
252 |
+
ts = dta[0].as_unit("s")
|
253 |
+
|
254 |
+
result = dta - ts
|
255 |
+
expected = (dti - dti[0])._data.as_unit("us")
|
256 |
+
assert result.dtype == "m8[us]"
|
257 |
+
tm.assert_extension_array_equal(result, expected)
|
258 |
+
|
259 |
+
def test_sub_datetime64_reso_mismatch(self):
|
260 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
261 |
+
left = dti._data.as_unit("s")
|
262 |
+
right = left.as_unit("ms")
|
263 |
+
|
264 |
+
result = left - right
|
265 |
+
exp_values = np.array([0, 0, 0], dtype="m8[ms]")
|
266 |
+
expected = TimedeltaArray._simple_new(
|
267 |
+
exp_values,
|
268 |
+
dtype=exp_values.dtype,
|
269 |
+
)
|
270 |
+
tm.assert_extension_array_equal(result, expected)
|
271 |
+
result2 = right - left
|
272 |
+
tm.assert_extension_array_equal(result2, expected)
|
273 |
+
|
274 |
+
|
275 |
+
class TestDatetimeArrayComparisons:
|
276 |
+
# TODO: merge this into tests/arithmetic/test_datetime64 once it is
|
277 |
+
# sufficiently robust
|
278 |
+
|
279 |
+
def test_cmp_dt64_arraylike_tznaive(self, comparison_op):
|
280 |
+
# arbitrary tz-naive DatetimeIndex
|
281 |
+
op = comparison_op
|
282 |
+
|
283 |
+
dti = pd.date_range("2016-01-1", freq="MS", periods=9, tz=None)
|
284 |
+
arr = dti._data
|
285 |
+
assert arr.freq == dti.freq
|
286 |
+
assert arr.tz == dti.tz
|
287 |
+
|
288 |
+
right = dti
|
289 |
+
|
290 |
+
expected = np.ones(len(arr), dtype=bool)
|
291 |
+
if comparison_op.__name__ in ["ne", "gt", "lt"]:
|
292 |
+
# for these the comparisons should be all-False
|
293 |
+
expected = ~expected
|
294 |
+
|
295 |
+
result = op(arr, arr)
|
296 |
+
tm.assert_numpy_array_equal(result, expected)
|
297 |
+
for other in [
|
298 |
+
right,
|
299 |
+
np.array(right),
|
300 |
+
list(right),
|
301 |
+
tuple(right),
|
302 |
+
right.astype(object),
|
303 |
+
]:
|
304 |
+
result = op(arr, other)
|
305 |
+
tm.assert_numpy_array_equal(result, expected)
|
306 |
+
|
307 |
+
result = op(other, arr)
|
308 |
+
tm.assert_numpy_array_equal(result, expected)
|
309 |
+
|
310 |
+
|
311 |
+
class TestDatetimeArray:
|
312 |
+
def test_astype_ns_to_ms_near_bounds(self):
|
313 |
+
# GH#55979
|
314 |
+
ts = pd.Timestamp("1677-09-21 00:12:43.145225")
|
315 |
+
target = ts.as_unit("ms")
|
316 |
+
|
317 |
+
dta = DatetimeArray._from_sequence([ts], dtype="M8[ns]")
|
318 |
+
assert (dta.view("i8") == ts.as_unit("ns").value).all()
|
319 |
+
|
320 |
+
result = dta.astype("M8[ms]")
|
321 |
+
assert result[0] == target
|
322 |
+
|
323 |
+
expected = DatetimeArray._from_sequence([ts], dtype="M8[ms]")
|
324 |
+
assert (expected.view("i8") == target._value).all()
|
325 |
+
|
326 |
+
tm.assert_datetime_array_equal(result, expected)
|
327 |
+
|
328 |
+
def test_astype_non_nano_tznaive(self):
|
329 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
330 |
+
|
331 |
+
res = dti.astype("M8[s]")
|
332 |
+
assert res.dtype == "M8[s]"
|
333 |
+
|
334 |
+
dta = dti._data
|
335 |
+
res = dta.astype("M8[s]")
|
336 |
+
assert res.dtype == "M8[s]"
|
337 |
+
assert isinstance(res, pd.core.arrays.DatetimeArray) # used to be ndarray
|
338 |
+
|
339 |
+
def test_astype_non_nano_tzaware(self):
|
340 |
+
dti = pd.date_range("2016-01-01", periods=3, tz="UTC")
|
341 |
+
|
342 |
+
res = dti.astype("M8[s, US/Pacific]")
|
343 |
+
assert res.dtype == "M8[s, US/Pacific]"
|
344 |
+
|
345 |
+
dta = dti._data
|
346 |
+
res = dta.astype("M8[s, US/Pacific]")
|
347 |
+
assert res.dtype == "M8[s, US/Pacific]"
|
348 |
+
|
349 |
+
# from non-nano to non-nano, preserving reso
|
350 |
+
res2 = res.astype("M8[s, UTC]")
|
351 |
+
assert res2.dtype == "M8[s, UTC]"
|
352 |
+
assert not tm.shares_memory(res2, res)
|
353 |
+
|
354 |
+
res3 = res.astype("M8[s, UTC]", copy=False)
|
355 |
+
assert res2.dtype == "M8[s, UTC]"
|
356 |
+
assert tm.shares_memory(res3, res)
|
357 |
+
|
358 |
+
def test_astype_to_same(self):
|
359 |
+
arr = DatetimeArray._from_sequence(
|
360 |
+
["2000"], dtype=DatetimeTZDtype(tz="US/Central")
|
361 |
+
)
|
362 |
+
result = arr.astype(DatetimeTZDtype(tz="US/Central"), copy=False)
|
363 |
+
assert result is arr
|
364 |
+
|
365 |
+
@pytest.mark.parametrize("dtype", ["datetime64[ns]", "datetime64[ns, UTC]"])
|
366 |
+
@pytest.mark.parametrize(
|
367 |
+
"other", ["datetime64[ns]", "datetime64[ns, UTC]", "datetime64[ns, CET]"]
|
368 |
+
)
|
369 |
+
def test_astype_copies(self, dtype, other):
|
370 |
+
# https://github.com/pandas-dev/pandas/pull/32490
|
371 |
+
ser = pd.Series([1, 2], dtype=dtype)
|
372 |
+
orig = ser.copy()
|
373 |
+
|
374 |
+
err = False
|
375 |
+
if (dtype == "datetime64[ns]") ^ (other == "datetime64[ns]"):
|
376 |
+
# deprecated in favor of tz_localize
|
377 |
+
err = True
|
378 |
+
|
379 |
+
if err:
|
380 |
+
if dtype == "datetime64[ns]":
|
381 |
+
msg = "Use obj.tz_localize instead or series.dt.tz_localize instead"
|
382 |
+
else:
|
383 |
+
msg = "from timezone-aware dtype to timezone-naive dtype"
|
384 |
+
with pytest.raises(TypeError, match=msg):
|
385 |
+
ser.astype(other)
|
386 |
+
else:
|
387 |
+
t = ser.astype(other)
|
388 |
+
t[:] = pd.NaT
|
389 |
+
tm.assert_series_equal(ser, orig)
|
390 |
+
|
391 |
+
@pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"])
|
392 |
+
def test_astype_int(self, dtype):
|
393 |
+
arr = DatetimeArray._from_sequence(
|
394 |
+
[pd.Timestamp("2000"), pd.Timestamp("2001")], dtype="M8[ns]"
|
395 |
+
)
|
396 |
+
|
397 |
+
if np.dtype(dtype) != np.int64:
|
398 |
+
with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"):
|
399 |
+
arr.astype(dtype)
|
400 |
+
return
|
401 |
+
|
402 |
+
result = arr.astype(dtype)
|
403 |
+
expected = arr._ndarray.view("i8")
|
404 |
+
tm.assert_numpy_array_equal(result, expected)
|
405 |
+
|
406 |
+
def test_astype_to_sparse_dt64(self):
|
407 |
+
# GH#50082
|
408 |
+
dti = pd.date_range("2016-01-01", periods=4)
|
409 |
+
dta = dti._data
|
410 |
+
result = dta.astype("Sparse[datetime64[ns]]")
|
411 |
+
|
412 |
+
assert result.dtype == "Sparse[datetime64[ns]]"
|
413 |
+
assert (result == dta).all()
|
414 |
+
|
415 |
+
def test_tz_setter_raises(self):
|
416 |
+
arr = DatetimeArray._from_sequence(
|
417 |
+
["2000"], dtype=DatetimeTZDtype(tz="US/Central")
|
418 |
+
)
|
419 |
+
with pytest.raises(AttributeError, match="tz_localize"):
|
420 |
+
arr.tz = "UTC"
|
421 |
+
|
422 |
+
def test_setitem_str_impute_tz(self, tz_naive_fixture):
|
423 |
+
# Like for getitem, if we are passed a naive-like string, we impute
|
424 |
+
# our own timezone.
|
425 |
+
tz = tz_naive_fixture
|
426 |
+
|
427 |
+
data = np.array([1, 2, 3], dtype="M8[ns]")
|
428 |
+
dtype = data.dtype if tz is None else DatetimeTZDtype(tz=tz)
|
429 |
+
arr = DatetimeArray._from_sequence(data, dtype=dtype)
|
430 |
+
expected = arr.copy()
|
431 |
+
|
432 |
+
ts = pd.Timestamp("2020-09-08 16:50").tz_localize(tz)
|
433 |
+
setter = str(ts.tz_localize(None))
|
434 |
+
|
435 |
+
# Setting a scalar tznaive string
|
436 |
+
expected[0] = ts
|
437 |
+
arr[0] = setter
|
438 |
+
tm.assert_equal(arr, expected)
|
439 |
+
|
440 |
+
# Setting a listlike of tznaive strings
|
441 |
+
expected[1] = ts
|
442 |
+
arr[:2] = [setter, setter]
|
443 |
+
tm.assert_equal(arr, expected)
|
444 |
+
|
445 |
+
def test_setitem_different_tz_raises(self):
|
446 |
+
# pre-2.0 we required exact tz match, in 2.0 we require only
|
447 |
+
# tzawareness-match
|
448 |
+
data = np.array([1, 2, 3], dtype="M8[ns]")
|
449 |
+
arr = DatetimeArray._from_sequence(
|
450 |
+
data, copy=False, dtype=DatetimeTZDtype(tz="US/Central")
|
451 |
+
)
|
452 |
+
with pytest.raises(TypeError, match="Cannot compare tz-naive and tz-aware"):
|
453 |
+
arr[0] = pd.Timestamp("2000")
|
454 |
+
|
455 |
+
ts = pd.Timestamp("2000", tz="US/Eastern")
|
456 |
+
arr[0] = ts
|
457 |
+
assert arr[0] == ts.tz_convert("US/Central")
|
458 |
+
|
459 |
+
def test_setitem_clears_freq(self):
|
460 |
+
a = pd.date_range("2000", periods=2, freq="D", tz="US/Central")._data
|
461 |
+
a[0] = pd.Timestamp("2000", tz="US/Central")
|
462 |
+
assert a.freq is None
|
463 |
+
|
464 |
+
@pytest.mark.parametrize(
|
465 |
+
"obj",
|
466 |
+
[
|
467 |
+
pd.Timestamp("2021-01-01"),
|
468 |
+
pd.Timestamp("2021-01-01").to_datetime64(),
|
469 |
+
pd.Timestamp("2021-01-01").to_pydatetime(),
|
470 |
+
],
|
471 |
+
)
|
472 |
+
def test_setitem_objects(self, obj):
|
473 |
+
# make sure we accept datetime64 and datetime in addition to Timestamp
|
474 |
+
dti = pd.date_range("2000", periods=2, freq="D")
|
475 |
+
arr = dti._data
|
476 |
+
|
477 |
+
arr[0] = obj
|
478 |
+
assert arr[0] == obj
|
479 |
+
|
480 |
+
def test_repeat_preserves_tz(self):
|
481 |
+
dti = pd.date_range("2000", periods=2, freq="D", tz="US/Central")
|
482 |
+
arr = dti._data
|
483 |
+
|
484 |
+
repeated = arr.repeat([1, 1])
|
485 |
+
|
486 |
+
# preserves tz and values, but not freq
|
487 |
+
expected = DatetimeArray._from_sequence(arr.asi8, dtype=arr.dtype)
|
488 |
+
tm.assert_equal(repeated, expected)
|
489 |
+
|
490 |
+
def test_value_counts_preserves_tz(self):
|
491 |
+
dti = pd.date_range("2000", periods=2, freq="D", tz="US/Central")
|
492 |
+
arr = dti._data.repeat([4, 3])
|
493 |
+
|
494 |
+
result = arr.value_counts()
|
495 |
+
|
496 |
+
# Note: not tm.assert_index_equal, since `freq`s do not match
|
497 |
+
assert result.index.equals(dti)
|
498 |
+
|
499 |
+
arr[-2] = pd.NaT
|
500 |
+
result = arr.value_counts(dropna=False)
|
501 |
+
expected = pd.Series([4, 2, 1], index=[dti[0], dti[1], pd.NaT], name="count")
|
502 |
+
tm.assert_series_equal(result, expected)
|
503 |
+
|
504 |
+
@pytest.mark.parametrize("method", ["pad", "backfill"])
|
505 |
+
def test_fillna_preserves_tz(self, method):
|
506 |
+
dti = pd.date_range("2000-01-01", periods=5, freq="D", tz="US/Central")
|
507 |
+
arr = DatetimeArray._from_sequence(dti, copy=True)
|
508 |
+
arr[2] = pd.NaT
|
509 |
+
|
510 |
+
fill_val = dti[1] if method == "pad" else dti[3]
|
511 |
+
expected = DatetimeArray._from_sequence(
|
512 |
+
[dti[0], dti[1], fill_val, dti[3], dti[4]],
|
513 |
+
dtype=DatetimeTZDtype(tz="US/Central"),
|
514 |
+
)
|
515 |
+
|
516 |
+
result = arr._pad_or_backfill(method=method)
|
517 |
+
tm.assert_extension_array_equal(result, expected)
|
518 |
+
|
519 |
+
# assert that arr and dti were not modified in-place
|
520 |
+
assert arr[2] is pd.NaT
|
521 |
+
assert dti[2] == pd.Timestamp("2000-01-03", tz="US/Central")
|
522 |
+
|
523 |
+
def test_fillna_2d(self):
|
524 |
+
dti = pd.date_range("2016-01-01", periods=6, tz="US/Pacific")
|
525 |
+
dta = dti._data.reshape(3, 2).copy()
|
526 |
+
dta[0, 1] = pd.NaT
|
527 |
+
dta[1, 0] = pd.NaT
|
528 |
+
|
529 |
+
res1 = dta._pad_or_backfill(method="pad")
|
530 |
+
expected1 = dta.copy()
|
531 |
+
expected1[1, 0] = dta[0, 0]
|
532 |
+
tm.assert_extension_array_equal(res1, expected1)
|
533 |
+
|
534 |
+
res2 = dta._pad_or_backfill(method="backfill")
|
535 |
+
expected2 = dta.copy()
|
536 |
+
expected2 = dta.copy()
|
537 |
+
expected2[1, 0] = dta[2, 0]
|
538 |
+
expected2[0, 1] = dta[1, 1]
|
539 |
+
tm.assert_extension_array_equal(res2, expected2)
|
540 |
+
|
541 |
+
# with different ordering for underlying ndarray; behavior should
|
542 |
+
# be unchanged
|
543 |
+
dta2 = dta._from_backing_data(dta._ndarray.copy(order="F"))
|
544 |
+
assert dta2._ndarray.flags["F_CONTIGUOUS"]
|
545 |
+
assert not dta2._ndarray.flags["C_CONTIGUOUS"]
|
546 |
+
tm.assert_extension_array_equal(dta, dta2)
|
547 |
+
|
548 |
+
res3 = dta2._pad_or_backfill(method="pad")
|
549 |
+
tm.assert_extension_array_equal(res3, expected1)
|
550 |
+
|
551 |
+
res4 = dta2._pad_or_backfill(method="backfill")
|
552 |
+
tm.assert_extension_array_equal(res4, expected2)
|
553 |
+
|
554 |
+
# test the DataFrame method while we're here
|
555 |
+
df = pd.DataFrame(dta)
|
556 |
+
res = df.ffill()
|
557 |
+
expected = pd.DataFrame(expected1)
|
558 |
+
tm.assert_frame_equal(res, expected)
|
559 |
+
|
560 |
+
res = df.bfill()
|
561 |
+
expected = pd.DataFrame(expected2)
|
562 |
+
tm.assert_frame_equal(res, expected)
|
563 |
+
|
564 |
+
def test_array_interface_tz(self):
|
565 |
+
tz = "US/Central"
|
566 |
+
data = pd.date_range("2017", periods=2, tz=tz)._data
|
567 |
+
result = np.asarray(data)
|
568 |
+
|
569 |
+
expected = np.array(
|
570 |
+
[
|
571 |
+
pd.Timestamp("2017-01-01T00:00:00", tz=tz),
|
572 |
+
pd.Timestamp("2017-01-02T00:00:00", tz=tz),
|
573 |
+
],
|
574 |
+
dtype=object,
|
575 |
+
)
|
576 |
+
tm.assert_numpy_array_equal(result, expected)
|
577 |
+
|
578 |
+
result = np.asarray(data, dtype=object)
|
579 |
+
tm.assert_numpy_array_equal(result, expected)
|
580 |
+
|
581 |
+
result = np.asarray(data, dtype="M8[ns]")
|
582 |
+
|
583 |
+
expected = np.array(
|
584 |
+
["2017-01-01T06:00:00", "2017-01-02T06:00:00"], dtype="M8[ns]"
|
585 |
+
)
|
586 |
+
tm.assert_numpy_array_equal(result, expected)
|
587 |
+
|
588 |
+
def test_array_interface(self):
|
589 |
+
data = pd.date_range("2017", periods=2)._data
|
590 |
+
expected = np.array(
|
591 |
+
["2017-01-01T00:00:00", "2017-01-02T00:00:00"], dtype="datetime64[ns]"
|
592 |
+
)
|
593 |
+
|
594 |
+
result = np.asarray(data)
|
595 |
+
tm.assert_numpy_array_equal(result, expected)
|
596 |
+
|
597 |
+
result = np.asarray(data, dtype=object)
|
598 |
+
expected = np.array(
|
599 |
+
[pd.Timestamp("2017-01-01T00:00:00"), pd.Timestamp("2017-01-02T00:00:00")],
|
600 |
+
dtype=object,
|
601 |
+
)
|
602 |
+
tm.assert_numpy_array_equal(result, expected)
|
603 |
+
|
604 |
+
@pytest.mark.parametrize("index", [True, False])
|
605 |
+
def test_searchsorted_different_tz(self, index):
|
606 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
607 |
+
arr = pd.DatetimeIndex(data, freq="D")._data.tz_localize("Asia/Tokyo")
|
608 |
+
if index:
|
609 |
+
arr = pd.Index(arr)
|
610 |
+
|
611 |
+
expected = arr.searchsorted(arr[2])
|
612 |
+
result = arr.searchsorted(arr[2].tz_convert("UTC"))
|
613 |
+
assert result == expected
|
614 |
+
|
615 |
+
expected = arr.searchsorted(arr[2:6])
|
616 |
+
result = arr.searchsorted(arr[2:6].tz_convert("UTC"))
|
617 |
+
tm.assert_equal(result, expected)
|
618 |
+
|
619 |
+
@pytest.mark.parametrize("index", [True, False])
|
620 |
+
def test_searchsorted_tzawareness_compat(self, index):
|
621 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
622 |
+
arr = pd.DatetimeIndex(data, freq="D")._data
|
623 |
+
if index:
|
624 |
+
arr = pd.Index(arr)
|
625 |
+
|
626 |
+
mismatch = arr.tz_localize("Asia/Tokyo")
|
627 |
+
|
628 |
+
msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
|
629 |
+
with pytest.raises(TypeError, match=msg):
|
630 |
+
arr.searchsorted(mismatch[0])
|
631 |
+
with pytest.raises(TypeError, match=msg):
|
632 |
+
arr.searchsorted(mismatch)
|
633 |
+
|
634 |
+
with pytest.raises(TypeError, match=msg):
|
635 |
+
mismatch.searchsorted(arr[0])
|
636 |
+
with pytest.raises(TypeError, match=msg):
|
637 |
+
mismatch.searchsorted(arr)
|
638 |
+
|
639 |
+
@pytest.mark.parametrize(
|
640 |
+
"other",
|
641 |
+
[
|
642 |
+
1,
|
643 |
+
np.int64(1),
|
644 |
+
1.0,
|
645 |
+
np.timedelta64("NaT"),
|
646 |
+
pd.Timedelta(days=2),
|
647 |
+
"invalid",
|
648 |
+
np.arange(10, dtype="i8") * 24 * 3600 * 10**9,
|
649 |
+
np.arange(10).view("timedelta64[ns]") * 24 * 3600 * 10**9,
|
650 |
+
pd.Timestamp("2021-01-01").to_period("D"),
|
651 |
+
],
|
652 |
+
)
|
653 |
+
@pytest.mark.parametrize("index", [True, False])
|
654 |
+
def test_searchsorted_invalid_types(self, other, index):
|
655 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
656 |
+
arr = pd.DatetimeIndex(data, freq="D")._data
|
657 |
+
if index:
|
658 |
+
arr = pd.Index(arr)
|
659 |
+
|
660 |
+
msg = "|".join(
|
661 |
+
[
|
662 |
+
"searchsorted requires compatible dtype or scalar",
|
663 |
+
"value should be a 'Timestamp', 'NaT', or array of those. Got",
|
664 |
+
]
|
665 |
+
)
|
666 |
+
with pytest.raises(TypeError, match=msg):
|
667 |
+
arr.searchsorted(other)
|
668 |
+
|
669 |
+
def test_shift_fill_value(self):
|
670 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
671 |
+
|
672 |
+
dta = dti._data
|
673 |
+
expected = DatetimeArray._from_sequence(np.roll(dta._ndarray, 1))
|
674 |
+
|
675 |
+
fv = dta[-1]
|
676 |
+
for fill_value in [fv, fv.to_pydatetime(), fv.to_datetime64()]:
|
677 |
+
result = dta.shift(1, fill_value=fill_value)
|
678 |
+
tm.assert_datetime_array_equal(result, expected)
|
679 |
+
|
680 |
+
dta = dta.tz_localize("UTC")
|
681 |
+
expected = expected.tz_localize("UTC")
|
682 |
+
fv = dta[-1]
|
683 |
+
for fill_value in [fv, fv.to_pydatetime()]:
|
684 |
+
result = dta.shift(1, fill_value=fill_value)
|
685 |
+
tm.assert_datetime_array_equal(result, expected)
|
686 |
+
|
687 |
+
def test_shift_value_tzawareness_mismatch(self):
|
688 |
+
dti = pd.date_range("2016-01-01", periods=3)
|
689 |
+
|
690 |
+
dta = dti._data
|
691 |
+
|
692 |
+
fv = dta[-1].tz_localize("UTC")
|
693 |
+
for invalid in [fv, fv.to_pydatetime()]:
|
694 |
+
with pytest.raises(TypeError, match="Cannot compare"):
|
695 |
+
dta.shift(1, fill_value=invalid)
|
696 |
+
|
697 |
+
dta = dta.tz_localize("UTC")
|
698 |
+
fv = dta[-1].tz_localize(None)
|
699 |
+
for invalid in [fv, fv.to_pydatetime(), fv.to_datetime64()]:
|
700 |
+
with pytest.raises(TypeError, match="Cannot compare"):
|
701 |
+
dta.shift(1, fill_value=invalid)
|
702 |
+
|
703 |
+
def test_shift_requires_tzmatch(self):
|
704 |
+
# pre-2.0 we required exact tz match, in 2.0 we require just
|
705 |
+
# matching tzawareness
|
706 |
+
dti = pd.date_range("2016-01-01", periods=3, tz="UTC")
|
707 |
+
dta = dti._data
|
708 |
+
|
709 |
+
fill_value = pd.Timestamp("2020-10-18 18:44", tz="US/Pacific")
|
710 |
+
|
711 |
+
result = dta.shift(1, fill_value=fill_value)
|
712 |
+
expected = dta.shift(1, fill_value=fill_value.tz_convert("UTC"))
|
713 |
+
tm.assert_equal(result, expected)
|
714 |
+
|
715 |
+
def test_tz_localize_t2d(self):
|
716 |
+
dti = pd.date_range("1994-05-12", periods=12, tz="US/Pacific")
|
717 |
+
dta = dti._data.reshape(3, 4)
|
718 |
+
result = dta.tz_localize(None)
|
719 |
+
|
720 |
+
expected = dta.ravel().tz_localize(None).reshape(dta.shape)
|
721 |
+
tm.assert_datetime_array_equal(result, expected)
|
722 |
+
|
723 |
+
roundtrip = expected.tz_localize("US/Pacific")
|
724 |
+
tm.assert_datetime_array_equal(roundtrip, dta)
|
725 |
+
|
726 |
+
easts = ["US/Eastern", "dateutil/US/Eastern"]
|
727 |
+
if ZoneInfo is not None:
|
728 |
+
try:
|
729 |
+
tz = ZoneInfo("US/Eastern")
|
730 |
+
except KeyError:
|
731 |
+
# no tzdata
|
732 |
+
pass
|
733 |
+
else:
|
734 |
+
# Argument 1 to "append" of "list" has incompatible type "ZoneInfo";
|
735 |
+
# expected "str"
|
736 |
+
easts.append(tz) # type: ignore[arg-type]
|
737 |
+
|
738 |
+
@pytest.mark.parametrize("tz", easts)
|
739 |
+
def test_iter_zoneinfo_fold(self, tz):
|
740 |
+
# GH#49684
|
741 |
+
utc_vals = np.array(
|
742 |
+
[1320552000, 1320555600, 1320559200, 1320562800], dtype=np.int64
|
743 |
+
)
|
744 |
+
utc_vals *= 1_000_000_000
|
745 |
+
|
746 |
+
dta = DatetimeArray._from_sequence(utc_vals).tz_localize("UTC").tz_convert(tz)
|
747 |
+
|
748 |
+
left = dta[2]
|
749 |
+
right = list(dta)[2]
|
750 |
+
assert str(left) == str(right)
|
751 |
+
# previously there was a bug where with non-pytz right would be
|
752 |
+
# Timestamp('2011-11-06 01:00:00-0400', tz='US/Eastern')
|
753 |
+
# while left would be
|
754 |
+
# Timestamp('2011-11-06 01:00:00-0500', tz='US/Eastern')
|
755 |
+
# The .value's would match (so they would compare as equal),
|
756 |
+
# but the folds would not
|
757 |
+
assert left.utcoffset() == right.utcoffset()
|
758 |
+
|
759 |
+
# The same bug in ints_to_pydatetime affected .astype, so we test
|
760 |
+
# that here.
|
761 |
+
right2 = dta.astype(object)[2]
|
762 |
+
assert str(left) == str(right2)
|
763 |
+
assert left.utcoffset() == right2.utcoffset()
|
764 |
+
|
765 |
+
@pytest.mark.parametrize(
|
766 |
+
"freq, freq_depr",
|
767 |
+
[
|
768 |
+
("2ME", "2M"),
|
769 |
+
("2SME", "2SM"),
|
770 |
+
("2SME", "2sm"),
|
771 |
+
("2QE", "2Q"),
|
772 |
+
("2QE-SEP", "2Q-SEP"),
|
773 |
+
("1YE", "1Y"),
|
774 |
+
("2YE-MAR", "2Y-MAR"),
|
775 |
+
("1YE", "1A"),
|
776 |
+
("2YE-MAR", "2A-MAR"),
|
777 |
+
("2ME", "2m"),
|
778 |
+
("2QE-SEP", "2q-sep"),
|
779 |
+
("2YE-MAR", "2a-mar"),
|
780 |
+
("2YE", "2y"),
|
781 |
+
],
|
782 |
+
)
|
783 |
+
def test_date_range_frequency_M_Q_Y_A_deprecated(self, freq, freq_depr):
|
784 |
+
# GH#9586, GH#54275
|
785 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed "
|
786 |
+
f"in a future version, please use '{freq[1:]}' instead."
|
787 |
+
|
788 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq)
|
789 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
790 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
791 |
+
tm.assert_index_equal(result, expected)
|
792 |
+
|
793 |
+
@pytest.mark.parametrize("freq_depr", ["2H", "2CBH", "2MIN", "2S", "2mS", "2Us"])
|
794 |
+
def test_date_range_uppercase_frequency_deprecated(self, freq_depr):
|
795 |
+
# GH#9586, GH#54939
|
796 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed in a "
|
797 |
+
f"future version. Please use '{freq_depr.lower()[1:]}' instead."
|
798 |
+
|
799 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq_depr.lower())
|
800 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
801 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
802 |
+
tm.assert_index_equal(result, expected)
|
803 |
+
|
804 |
+
@pytest.mark.parametrize(
|
805 |
+
"freq_depr",
|
806 |
+
[
|
807 |
+
"2ye-mar",
|
808 |
+
"2ys",
|
809 |
+
"2qe",
|
810 |
+
"2qs-feb",
|
811 |
+
"2bqs",
|
812 |
+
"2sms",
|
813 |
+
"2bms",
|
814 |
+
"2cbme",
|
815 |
+
"2me",
|
816 |
+
"2w",
|
817 |
+
],
|
818 |
+
)
|
819 |
+
def test_date_range_lowercase_frequency_deprecated(self, freq_depr):
|
820 |
+
# GH#9586, GH#54939
|
821 |
+
depr_msg = f"'{freq_depr[1:]}' is deprecated and will be removed in a "
|
822 |
+
f"future version, please use '{freq_depr.upper()[1:]}' instead."
|
823 |
+
|
824 |
+
expected = pd.date_range("1/1/2000", periods=4, freq=freq_depr.upper())
|
825 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
826 |
+
result = pd.date_range("1/1/2000", periods=4, freq=freq_depr)
|
827 |
+
tm.assert_index_equal(result, expected)
|
828 |
+
|
829 |
+
|
830 |
+
def test_factorize_sort_without_freq():
|
831 |
+
dta = DatetimeArray._from_sequence([0, 2, 1], dtype="M8[ns]")
|
832 |
+
|
833 |
+
msg = r"call pd.factorize\(obj, sort=True\) instead"
|
834 |
+
with pytest.raises(NotImplementedError, match=msg):
|
835 |
+
dta.factorize(sort=True)
|
836 |
+
|
837 |
+
# Do TimedeltaArray while we're here
|
838 |
+
tda = dta - dta[0]
|
839 |
+
with pytest.raises(NotImplementedError, match=msg):
|
840 |
+
tda.factorize(sort=True)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_ndarray_backed.py
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Tests for subclasses of NDArrayBackedExtensionArray
|
3 |
+
"""
|
4 |
+
import numpy as np
|
5 |
+
|
6 |
+
from pandas import (
|
7 |
+
CategoricalIndex,
|
8 |
+
date_range,
|
9 |
+
)
|
10 |
+
from pandas.core.arrays import (
|
11 |
+
Categorical,
|
12 |
+
DatetimeArray,
|
13 |
+
NumpyExtensionArray,
|
14 |
+
TimedeltaArray,
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
class TestEmpty:
|
19 |
+
def test_empty_categorical(self):
|
20 |
+
ci = CategoricalIndex(["a", "b", "c"], ordered=True)
|
21 |
+
dtype = ci.dtype
|
22 |
+
|
23 |
+
# case with int8 codes
|
24 |
+
shape = (4,)
|
25 |
+
result = Categorical._empty(shape, dtype=dtype)
|
26 |
+
assert isinstance(result, Categorical)
|
27 |
+
assert result.shape == shape
|
28 |
+
assert result._ndarray.dtype == np.int8
|
29 |
+
|
30 |
+
# case where repr would segfault if we didn't override base implementation
|
31 |
+
result = Categorical._empty((4096,), dtype=dtype)
|
32 |
+
assert isinstance(result, Categorical)
|
33 |
+
assert result.shape == (4096,)
|
34 |
+
assert result._ndarray.dtype == np.int8
|
35 |
+
repr(result)
|
36 |
+
|
37 |
+
# case with int16 codes
|
38 |
+
ci = CategoricalIndex(list(range(512)) * 4, ordered=False)
|
39 |
+
dtype = ci.dtype
|
40 |
+
result = Categorical._empty(shape, dtype=dtype)
|
41 |
+
assert isinstance(result, Categorical)
|
42 |
+
assert result.shape == shape
|
43 |
+
assert result._ndarray.dtype == np.int16
|
44 |
+
|
45 |
+
def test_empty_dt64tz(self):
|
46 |
+
dti = date_range("2016-01-01", periods=2, tz="Asia/Tokyo")
|
47 |
+
dtype = dti.dtype
|
48 |
+
|
49 |
+
shape = (0,)
|
50 |
+
result = DatetimeArray._empty(shape, dtype=dtype)
|
51 |
+
assert result.dtype == dtype
|
52 |
+
assert isinstance(result, DatetimeArray)
|
53 |
+
assert result.shape == shape
|
54 |
+
|
55 |
+
def test_empty_dt64(self):
|
56 |
+
shape = (3, 9)
|
57 |
+
result = DatetimeArray._empty(shape, dtype="datetime64[ns]")
|
58 |
+
assert isinstance(result, DatetimeArray)
|
59 |
+
assert result.shape == shape
|
60 |
+
|
61 |
+
def test_empty_td64(self):
|
62 |
+
shape = (3, 9)
|
63 |
+
result = TimedeltaArray._empty(shape, dtype="m8[ns]")
|
64 |
+
assert isinstance(result, TimedeltaArray)
|
65 |
+
assert result.shape == shape
|
66 |
+
|
67 |
+
def test_empty_pandas_array(self):
|
68 |
+
arr = NumpyExtensionArray(np.array([1, 2]))
|
69 |
+
dtype = arr.dtype
|
70 |
+
|
71 |
+
shape = (3, 9)
|
72 |
+
result = NumpyExtensionArray._empty(shape, dtype=dtype)
|
73 |
+
assert isinstance(result, NumpyExtensionArray)
|
74 |
+
assert result.dtype == dtype
|
75 |
+
assert result.shape == shape
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_period.py
ADDED
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas._libs.tslibs import iNaT
|
5 |
+
from pandas._libs.tslibs.period import IncompatibleFrequency
|
6 |
+
|
7 |
+
from pandas.core.dtypes.base import _registry as registry
|
8 |
+
from pandas.core.dtypes.dtypes import PeriodDtype
|
9 |
+
|
10 |
+
import pandas as pd
|
11 |
+
import pandas._testing as tm
|
12 |
+
from pandas.core.arrays import PeriodArray
|
13 |
+
|
14 |
+
# ----------------------------------------------------------------------------
|
15 |
+
# Dtype
|
16 |
+
|
17 |
+
|
18 |
+
def test_registered():
|
19 |
+
assert PeriodDtype in registry.dtypes
|
20 |
+
result = registry.find("Period[D]")
|
21 |
+
expected = PeriodDtype("D")
|
22 |
+
assert result == expected
|
23 |
+
|
24 |
+
|
25 |
+
# ----------------------------------------------------------------------------
|
26 |
+
# period_array
|
27 |
+
|
28 |
+
|
29 |
+
def test_asi8():
|
30 |
+
result = PeriodArray._from_sequence(["2000", "2001", None], dtype="period[D]").asi8
|
31 |
+
expected = np.array([10957, 11323, iNaT])
|
32 |
+
tm.assert_numpy_array_equal(result, expected)
|
33 |
+
|
34 |
+
|
35 |
+
def test_take_raises():
|
36 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
37 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
38 |
+
arr.take([0, -1], allow_fill=True, fill_value=pd.Period("2000", freq="W"))
|
39 |
+
|
40 |
+
msg = "value should be a 'Period' or 'NaT'. Got 'str' instead"
|
41 |
+
with pytest.raises(TypeError, match=msg):
|
42 |
+
arr.take([0, -1], allow_fill=True, fill_value="foo")
|
43 |
+
|
44 |
+
|
45 |
+
def test_fillna_raises():
|
46 |
+
arr = PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
47 |
+
with pytest.raises(ValueError, match="Length"):
|
48 |
+
arr.fillna(arr[:2])
|
49 |
+
|
50 |
+
|
51 |
+
def test_fillna_copies():
|
52 |
+
arr = PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
53 |
+
result = arr.fillna(pd.Period("2000", "D"))
|
54 |
+
assert result is not arr
|
55 |
+
|
56 |
+
|
57 |
+
# ----------------------------------------------------------------------------
|
58 |
+
# setitem
|
59 |
+
|
60 |
+
|
61 |
+
@pytest.mark.parametrize(
|
62 |
+
"key, value, expected",
|
63 |
+
[
|
64 |
+
([0], pd.Period("2000", "D"), [10957, 1, 2]),
|
65 |
+
([0], None, [iNaT, 1, 2]),
|
66 |
+
([0], np.nan, [iNaT, 1, 2]),
|
67 |
+
([0, 1, 2], pd.Period("2000", "D"), [10957] * 3),
|
68 |
+
(
|
69 |
+
[0, 1, 2],
|
70 |
+
[pd.Period("2000", "D"), pd.Period("2001", "D"), pd.Period("2002", "D")],
|
71 |
+
[10957, 11323, 11688],
|
72 |
+
),
|
73 |
+
],
|
74 |
+
)
|
75 |
+
def test_setitem(key, value, expected):
|
76 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
77 |
+
expected = PeriodArray(expected, dtype="period[D]")
|
78 |
+
arr[key] = value
|
79 |
+
tm.assert_period_array_equal(arr, expected)
|
80 |
+
|
81 |
+
|
82 |
+
def test_setitem_raises_incompatible_freq():
|
83 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
84 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
85 |
+
arr[0] = pd.Period("2000", freq="Y")
|
86 |
+
|
87 |
+
other = PeriodArray._from_sequence(["2000", "2001"], dtype="period[Y]")
|
88 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
89 |
+
arr[[0, 1]] = other
|
90 |
+
|
91 |
+
|
92 |
+
def test_setitem_raises_length():
|
93 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
94 |
+
with pytest.raises(ValueError, match="length"):
|
95 |
+
arr[[0, 1]] = [pd.Period("2000", freq="D")]
|
96 |
+
|
97 |
+
|
98 |
+
def test_setitem_raises_type():
|
99 |
+
arr = PeriodArray(np.arange(3), dtype="period[D]")
|
100 |
+
with pytest.raises(TypeError, match="int"):
|
101 |
+
arr[0] = 1
|
102 |
+
|
103 |
+
|
104 |
+
# ----------------------------------------------------------------------------
|
105 |
+
# Ops
|
106 |
+
|
107 |
+
|
108 |
+
def test_sub_period():
|
109 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
110 |
+
other = pd.Period("2000", freq="M")
|
111 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
112 |
+
arr - other
|
113 |
+
|
114 |
+
|
115 |
+
def test_sub_period_overflow():
|
116 |
+
# GH#47538
|
117 |
+
dti = pd.date_range("1677-09-22", periods=2, freq="D")
|
118 |
+
pi = dti.to_period("ns")
|
119 |
+
|
120 |
+
per = pd.Period._from_ordinal(10**14, pi.freq)
|
121 |
+
|
122 |
+
with pytest.raises(OverflowError, match="Overflow in int64 addition"):
|
123 |
+
pi - per
|
124 |
+
|
125 |
+
with pytest.raises(OverflowError, match="Overflow in int64 addition"):
|
126 |
+
per - pi
|
127 |
+
|
128 |
+
|
129 |
+
# ----------------------------------------------------------------------------
|
130 |
+
# Methods
|
131 |
+
|
132 |
+
|
133 |
+
@pytest.mark.parametrize(
|
134 |
+
"other",
|
135 |
+
[
|
136 |
+
pd.Period("2000", freq="h"),
|
137 |
+
PeriodArray._from_sequence(["2000", "2001", "2000"], dtype="period[h]"),
|
138 |
+
],
|
139 |
+
)
|
140 |
+
def test_where_different_freq_raises(other):
|
141 |
+
# GH#45768 The PeriodArray method raises, the Series method coerces
|
142 |
+
ser = pd.Series(
|
143 |
+
PeriodArray._from_sequence(["2000", "2001", "2002"], dtype="period[D]")
|
144 |
+
)
|
145 |
+
cond = np.array([True, False, True])
|
146 |
+
|
147 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
148 |
+
ser.array._where(cond, other)
|
149 |
+
|
150 |
+
res = ser.where(cond, other)
|
151 |
+
expected = ser.astype(object).where(cond, other)
|
152 |
+
tm.assert_series_equal(res, expected)
|
153 |
+
|
154 |
+
|
155 |
+
# ----------------------------------------------------------------------------
|
156 |
+
# Printing
|
157 |
+
|
158 |
+
|
159 |
+
def test_repr_small():
|
160 |
+
arr = PeriodArray._from_sequence(["2000", "2001"], dtype="period[D]")
|
161 |
+
result = str(arr)
|
162 |
+
expected = (
|
163 |
+
"<PeriodArray>\n['2000-01-01', '2001-01-01']\nLength: 2, dtype: period[D]"
|
164 |
+
)
|
165 |
+
assert result == expected
|
166 |
+
|
167 |
+
|
168 |
+
def test_repr_large():
|
169 |
+
arr = PeriodArray._from_sequence(["2000", "2001"] * 500, dtype="period[D]")
|
170 |
+
result = str(arr)
|
171 |
+
expected = (
|
172 |
+
"<PeriodArray>\n"
|
173 |
+
"['2000-01-01', '2001-01-01', '2000-01-01', '2001-01-01', "
|
174 |
+
"'2000-01-01',\n"
|
175 |
+
" '2001-01-01', '2000-01-01', '2001-01-01', '2000-01-01', "
|
176 |
+
"'2001-01-01',\n"
|
177 |
+
" ...\n"
|
178 |
+
" '2000-01-01', '2001-01-01', '2000-01-01', '2001-01-01', "
|
179 |
+
"'2000-01-01',\n"
|
180 |
+
" '2001-01-01', '2000-01-01', '2001-01-01', '2000-01-01', "
|
181 |
+
"'2001-01-01']\n"
|
182 |
+
"Length: 1000, dtype: period[D]"
|
183 |
+
)
|
184 |
+
assert result == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/test_timedeltas.py
ADDED
@@ -0,0 +1,313 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import timedelta
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
from pandas import Timedelta
|
8 |
+
import pandas._testing as tm
|
9 |
+
from pandas.core.arrays import (
|
10 |
+
DatetimeArray,
|
11 |
+
TimedeltaArray,
|
12 |
+
)
|
13 |
+
|
14 |
+
|
15 |
+
class TestNonNano:
|
16 |
+
@pytest.fixture(params=["s", "ms", "us"])
|
17 |
+
def unit(self, request):
|
18 |
+
return request.param
|
19 |
+
|
20 |
+
@pytest.fixture
|
21 |
+
def tda(self, unit):
|
22 |
+
arr = np.arange(5, dtype=np.int64).view(f"m8[{unit}]")
|
23 |
+
return TimedeltaArray._simple_new(arr, dtype=arr.dtype)
|
24 |
+
|
25 |
+
def test_non_nano(self, unit):
|
26 |
+
arr = np.arange(5, dtype=np.int64).view(f"m8[{unit}]")
|
27 |
+
tda = TimedeltaArray._simple_new(arr, dtype=arr.dtype)
|
28 |
+
|
29 |
+
assert tda.dtype == arr.dtype
|
30 |
+
assert tda[0].unit == unit
|
31 |
+
|
32 |
+
def test_as_unit_raises(self, tda):
|
33 |
+
# GH#50616
|
34 |
+
with pytest.raises(ValueError, match="Supported units"):
|
35 |
+
tda.as_unit("D")
|
36 |
+
|
37 |
+
tdi = pd.Index(tda)
|
38 |
+
with pytest.raises(ValueError, match="Supported units"):
|
39 |
+
tdi.as_unit("D")
|
40 |
+
|
41 |
+
@pytest.mark.parametrize("field", TimedeltaArray._field_ops)
|
42 |
+
def test_fields(self, tda, field):
|
43 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
44 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
45 |
+
|
46 |
+
result = getattr(tda, field)
|
47 |
+
expected = getattr(tda_nano, field)
|
48 |
+
tm.assert_numpy_array_equal(result, expected)
|
49 |
+
|
50 |
+
def test_to_pytimedelta(self, tda):
|
51 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
52 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
53 |
+
|
54 |
+
result = tda.to_pytimedelta()
|
55 |
+
expected = tda_nano.to_pytimedelta()
|
56 |
+
tm.assert_numpy_array_equal(result, expected)
|
57 |
+
|
58 |
+
def test_total_seconds(self, unit, tda):
|
59 |
+
as_nano = tda._ndarray.astype("m8[ns]")
|
60 |
+
tda_nano = TimedeltaArray._simple_new(as_nano, dtype=as_nano.dtype)
|
61 |
+
|
62 |
+
result = tda.total_seconds()
|
63 |
+
expected = tda_nano.total_seconds()
|
64 |
+
tm.assert_numpy_array_equal(result, expected)
|
65 |
+
|
66 |
+
def test_timedelta_array_total_seconds(self):
|
67 |
+
# GH34290
|
68 |
+
expected = Timedelta("2 min").total_seconds()
|
69 |
+
|
70 |
+
result = pd.array([Timedelta("2 min")]).total_seconds()[0]
|
71 |
+
assert result == expected
|
72 |
+
|
73 |
+
def test_total_seconds_nanoseconds(self):
|
74 |
+
# issue #48521
|
75 |
+
start_time = pd.Series(["2145-11-02 06:00:00"]).astype("datetime64[ns]")
|
76 |
+
end_time = pd.Series(["2145-11-02 07:06:00"]).astype("datetime64[ns]")
|
77 |
+
expected = (end_time - start_time).values / np.timedelta64(1, "s")
|
78 |
+
result = (end_time - start_time).dt.total_seconds().values
|
79 |
+
assert result == expected
|
80 |
+
|
81 |
+
@pytest.mark.parametrize(
|
82 |
+
"nat", [np.datetime64("NaT", "ns"), np.datetime64("NaT", "us")]
|
83 |
+
)
|
84 |
+
def test_add_nat_datetimelike_scalar(self, nat, tda):
|
85 |
+
result = tda + nat
|
86 |
+
assert isinstance(result, DatetimeArray)
|
87 |
+
assert result._creso == tda._creso
|
88 |
+
assert result.isna().all()
|
89 |
+
|
90 |
+
result = nat + tda
|
91 |
+
assert isinstance(result, DatetimeArray)
|
92 |
+
assert result._creso == tda._creso
|
93 |
+
assert result.isna().all()
|
94 |
+
|
95 |
+
def test_add_pdnat(self, tda):
|
96 |
+
result = tda + pd.NaT
|
97 |
+
assert isinstance(result, TimedeltaArray)
|
98 |
+
assert result._creso == tda._creso
|
99 |
+
assert result.isna().all()
|
100 |
+
|
101 |
+
result = pd.NaT + tda
|
102 |
+
assert isinstance(result, TimedeltaArray)
|
103 |
+
assert result._creso == tda._creso
|
104 |
+
assert result.isna().all()
|
105 |
+
|
106 |
+
# TODO: 2022-07-11 this is the only test that gets to DTA.tz_convert
|
107 |
+
# or tz_localize with non-nano; implement tests specific to that.
|
108 |
+
def test_add_datetimelike_scalar(self, tda, tz_naive_fixture):
|
109 |
+
ts = pd.Timestamp("2016-01-01", tz=tz_naive_fixture).as_unit("ns")
|
110 |
+
|
111 |
+
expected = tda.as_unit("ns") + ts
|
112 |
+
res = tda + ts
|
113 |
+
tm.assert_extension_array_equal(res, expected)
|
114 |
+
res = ts + tda
|
115 |
+
tm.assert_extension_array_equal(res, expected)
|
116 |
+
|
117 |
+
ts += Timedelta(1) # case where we can't cast losslessly
|
118 |
+
|
119 |
+
exp_values = tda._ndarray + ts.asm8
|
120 |
+
expected = (
|
121 |
+
DatetimeArray._simple_new(exp_values, dtype=exp_values.dtype)
|
122 |
+
.tz_localize("UTC")
|
123 |
+
.tz_convert(ts.tz)
|
124 |
+
)
|
125 |
+
|
126 |
+
result = tda + ts
|
127 |
+
tm.assert_extension_array_equal(result, expected)
|
128 |
+
|
129 |
+
result = ts + tda
|
130 |
+
tm.assert_extension_array_equal(result, expected)
|
131 |
+
|
132 |
+
def test_mul_scalar(self, tda):
|
133 |
+
other = 2
|
134 |
+
result = tda * other
|
135 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
136 |
+
tm.assert_extension_array_equal(result, expected)
|
137 |
+
assert result._creso == tda._creso
|
138 |
+
|
139 |
+
def test_mul_listlike(self, tda):
|
140 |
+
other = np.arange(len(tda))
|
141 |
+
result = tda * other
|
142 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
143 |
+
tm.assert_extension_array_equal(result, expected)
|
144 |
+
assert result._creso == tda._creso
|
145 |
+
|
146 |
+
def test_mul_listlike_object(self, tda):
|
147 |
+
other = np.arange(len(tda))
|
148 |
+
result = tda * other.astype(object)
|
149 |
+
expected = TimedeltaArray._simple_new(tda._ndarray * other, dtype=tda.dtype)
|
150 |
+
tm.assert_extension_array_equal(result, expected)
|
151 |
+
assert result._creso == tda._creso
|
152 |
+
|
153 |
+
def test_div_numeric_scalar(self, tda):
|
154 |
+
other = 2
|
155 |
+
result = tda / other
|
156 |
+
expected = TimedeltaArray._simple_new(tda._ndarray / other, dtype=tda.dtype)
|
157 |
+
tm.assert_extension_array_equal(result, expected)
|
158 |
+
assert result._creso == tda._creso
|
159 |
+
|
160 |
+
def test_div_td_scalar(self, tda):
|
161 |
+
other = timedelta(seconds=1)
|
162 |
+
result = tda / other
|
163 |
+
expected = tda._ndarray / np.timedelta64(1, "s")
|
164 |
+
tm.assert_numpy_array_equal(result, expected)
|
165 |
+
|
166 |
+
def test_div_numeric_array(self, tda):
|
167 |
+
other = np.arange(len(tda))
|
168 |
+
result = tda / other
|
169 |
+
expected = TimedeltaArray._simple_new(tda._ndarray / other, dtype=tda.dtype)
|
170 |
+
tm.assert_extension_array_equal(result, expected)
|
171 |
+
assert result._creso == tda._creso
|
172 |
+
|
173 |
+
def test_div_td_array(self, tda):
|
174 |
+
other = tda._ndarray + tda._ndarray[-1]
|
175 |
+
result = tda / other
|
176 |
+
expected = tda._ndarray / other
|
177 |
+
tm.assert_numpy_array_equal(result, expected)
|
178 |
+
|
179 |
+
def test_add_timedeltaarraylike(self, tda):
|
180 |
+
tda_nano = tda.astype("m8[ns]")
|
181 |
+
|
182 |
+
expected = tda_nano * 2
|
183 |
+
res = tda_nano + tda
|
184 |
+
tm.assert_extension_array_equal(res, expected)
|
185 |
+
res = tda + tda_nano
|
186 |
+
tm.assert_extension_array_equal(res, expected)
|
187 |
+
|
188 |
+
expected = tda_nano * 0
|
189 |
+
res = tda - tda_nano
|
190 |
+
tm.assert_extension_array_equal(res, expected)
|
191 |
+
|
192 |
+
res = tda_nano - tda
|
193 |
+
tm.assert_extension_array_equal(res, expected)
|
194 |
+
|
195 |
+
|
196 |
+
class TestTimedeltaArray:
|
197 |
+
@pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"])
|
198 |
+
def test_astype_int(self, dtype):
|
199 |
+
arr = TimedeltaArray._from_sequence(
|
200 |
+
[Timedelta("1h"), Timedelta("2h")], dtype="m8[ns]"
|
201 |
+
)
|
202 |
+
|
203 |
+
if np.dtype(dtype) != np.int64:
|
204 |
+
with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"):
|
205 |
+
arr.astype(dtype)
|
206 |
+
return
|
207 |
+
|
208 |
+
result = arr.astype(dtype)
|
209 |
+
expected = arr._ndarray.view("i8")
|
210 |
+
tm.assert_numpy_array_equal(result, expected)
|
211 |
+
|
212 |
+
def test_setitem_clears_freq(self):
|
213 |
+
a = pd.timedelta_range("1h", periods=2, freq="h")._data
|
214 |
+
a[0] = Timedelta("1h")
|
215 |
+
assert a.freq is None
|
216 |
+
|
217 |
+
@pytest.mark.parametrize(
|
218 |
+
"obj",
|
219 |
+
[
|
220 |
+
Timedelta(seconds=1),
|
221 |
+
Timedelta(seconds=1).to_timedelta64(),
|
222 |
+
Timedelta(seconds=1).to_pytimedelta(),
|
223 |
+
],
|
224 |
+
)
|
225 |
+
def test_setitem_objects(self, obj):
|
226 |
+
# make sure we accept timedelta64 and timedelta in addition to Timedelta
|
227 |
+
tdi = pd.timedelta_range("2 Days", periods=4, freq="h")
|
228 |
+
arr = tdi._data
|
229 |
+
|
230 |
+
arr[0] = obj
|
231 |
+
assert arr[0] == Timedelta(seconds=1)
|
232 |
+
|
233 |
+
@pytest.mark.parametrize(
|
234 |
+
"other",
|
235 |
+
[
|
236 |
+
1,
|
237 |
+
np.int64(1),
|
238 |
+
1.0,
|
239 |
+
np.datetime64("NaT"),
|
240 |
+
pd.Timestamp("2021-01-01"),
|
241 |
+
"invalid",
|
242 |
+
np.arange(10, dtype="i8") * 24 * 3600 * 10**9,
|
243 |
+
(np.arange(10) * 24 * 3600 * 10**9).view("datetime64[ns]"),
|
244 |
+
pd.Timestamp("2021-01-01").to_period("D"),
|
245 |
+
],
|
246 |
+
)
|
247 |
+
@pytest.mark.parametrize("index", [True, False])
|
248 |
+
def test_searchsorted_invalid_types(self, other, index):
|
249 |
+
data = np.arange(10, dtype="i8") * 24 * 3600 * 10**9
|
250 |
+
arr = pd.TimedeltaIndex(data, freq="D")._data
|
251 |
+
if index:
|
252 |
+
arr = pd.Index(arr)
|
253 |
+
|
254 |
+
msg = "|".join(
|
255 |
+
[
|
256 |
+
"searchsorted requires compatible dtype or scalar",
|
257 |
+
"value should be a 'Timedelta', 'NaT', or array of those. Got",
|
258 |
+
]
|
259 |
+
)
|
260 |
+
with pytest.raises(TypeError, match=msg):
|
261 |
+
arr.searchsorted(other)
|
262 |
+
|
263 |
+
|
264 |
+
class TestUnaryOps:
|
265 |
+
def test_abs(self):
|
266 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
267 |
+
arr = TimedeltaArray._from_sequence(vals)
|
268 |
+
|
269 |
+
evals = np.array([3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
270 |
+
expected = TimedeltaArray._from_sequence(evals)
|
271 |
+
|
272 |
+
result = abs(arr)
|
273 |
+
tm.assert_timedelta_array_equal(result, expected)
|
274 |
+
|
275 |
+
result2 = np.abs(arr)
|
276 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
277 |
+
|
278 |
+
def test_pos(self):
|
279 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
280 |
+
arr = TimedeltaArray._from_sequence(vals)
|
281 |
+
|
282 |
+
result = +arr
|
283 |
+
tm.assert_timedelta_array_equal(result, arr)
|
284 |
+
assert not tm.shares_memory(result, arr)
|
285 |
+
|
286 |
+
result2 = np.positive(arr)
|
287 |
+
tm.assert_timedelta_array_equal(result2, arr)
|
288 |
+
assert not tm.shares_memory(result2, arr)
|
289 |
+
|
290 |
+
def test_neg(self):
|
291 |
+
vals = np.array([-3600 * 10**9, "NaT", 7200 * 10**9], dtype="m8[ns]")
|
292 |
+
arr = TimedeltaArray._from_sequence(vals)
|
293 |
+
|
294 |
+
evals = np.array([3600 * 10**9, "NaT", -7200 * 10**9], dtype="m8[ns]")
|
295 |
+
expected = TimedeltaArray._from_sequence(evals)
|
296 |
+
|
297 |
+
result = -arr
|
298 |
+
tm.assert_timedelta_array_equal(result, expected)
|
299 |
+
|
300 |
+
result2 = np.negative(arr)
|
301 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
302 |
+
|
303 |
+
def test_neg_freq(self):
|
304 |
+
tdi = pd.timedelta_range("2 Days", periods=4, freq="h")
|
305 |
+
arr = tdi._data
|
306 |
+
|
307 |
+
expected = -tdi._data
|
308 |
+
|
309 |
+
result = -arr
|
310 |
+
tm.assert_timedelta_array_equal(result, expected)
|
311 |
+
|
312 |
+
result2 = np.negative(arr)
|
313 |
+
tm.assert_timedelta_array_equal(result2, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (201 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_datetime.cpython-310.pyc
ADDED
Binary file (11.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_getitem.cpython-310.pyc
ADDED
Binary file (26.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_mask.cpython-310.pyc
ADDED
Binary file (1.94 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_set_value.cpython-310.pyc
ADDED
Binary file (1.26 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/__pycache__/test_setitem.cpython-310.pyc
ADDED
Binary file (55.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_datetime.py
ADDED
@@ -0,0 +1,499 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Also test support for datetime64[ns] in Series / DataFrame
|
3 |
+
"""
|
4 |
+
from datetime import (
|
5 |
+
datetime,
|
6 |
+
timedelta,
|
7 |
+
)
|
8 |
+
import re
|
9 |
+
|
10 |
+
from dateutil.tz import (
|
11 |
+
gettz,
|
12 |
+
tzutc,
|
13 |
+
)
|
14 |
+
import numpy as np
|
15 |
+
import pytest
|
16 |
+
import pytz
|
17 |
+
|
18 |
+
from pandas._libs import index as libindex
|
19 |
+
|
20 |
+
import pandas as pd
|
21 |
+
from pandas import (
|
22 |
+
DataFrame,
|
23 |
+
Series,
|
24 |
+
Timestamp,
|
25 |
+
date_range,
|
26 |
+
period_range,
|
27 |
+
)
|
28 |
+
import pandas._testing as tm
|
29 |
+
|
30 |
+
|
31 |
+
def test_fancy_getitem():
|
32 |
+
dti = date_range(
|
33 |
+
freq="WOM-1FRI", start=datetime(2005, 1, 1), end=datetime(2010, 1, 1)
|
34 |
+
)
|
35 |
+
|
36 |
+
s = Series(np.arange(len(dti)), index=dti)
|
37 |
+
|
38 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
39 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
40 |
+
assert s[48] == 48
|
41 |
+
assert s["1/2/2009"] == 48
|
42 |
+
assert s["2009-1-2"] == 48
|
43 |
+
assert s[datetime(2009, 1, 2)] == 48
|
44 |
+
assert s[Timestamp(datetime(2009, 1, 2))] == 48
|
45 |
+
with pytest.raises(KeyError, match=r"^'2009-1-3'$"):
|
46 |
+
s["2009-1-3"]
|
47 |
+
tm.assert_series_equal(
|
48 |
+
s["3/6/2009":"2009-06-05"], s[datetime(2009, 3, 6) : datetime(2009, 6, 5)]
|
49 |
+
)
|
50 |
+
|
51 |
+
|
52 |
+
def test_fancy_setitem():
|
53 |
+
dti = date_range(
|
54 |
+
freq="WOM-1FRI", start=datetime(2005, 1, 1), end=datetime(2010, 1, 1)
|
55 |
+
)
|
56 |
+
|
57 |
+
s = Series(np.arange(len(dti)), index=dti)
|
58 |
+
|
59 |
+
msg = "Series.__setitem__ treating keys as positions is deprecated"
|
60 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
61 |
+
s[48] = -1
|
62 |
+
assert s.iloc[48] == -1
|
63 |
+
s["1/2/2009"] = -2
|
64 |
+
assert s.iloc[48] == -2
|
65 |
+
s["1/2/2009":"2009-06-05"] = -3
|
66 |
+
assert (s[48:54] == -3).all()
|
67 |
+
|
68 |
+
|
69 |
+
@pytest.mark.parametrize("tz_source", ["pytz", "dateutil"])
|
70 |
+
def test_getitem_setitem_datetime_tz(tz_source):
|
71 |
+
if tz_source == "pytz":
|
72 |
+
tzget = pytz.timezone
|
73 |
+
else:
|
74 |
+
# handle special case for utc in dateutil
|
75 |
+
tzget = lambda x: tzutc() if x == "UTC" else gettz(x)
|
76 |
+
|
77 |
+
N = 50
|
78 |
+
# testing with timezone, GH #2785
|
79 |
+
rng = date_range("1/1/1990", periods=N, freq="h", tz=tzget("US/Eastern"))
|
80 |
+
ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
|
81 |
+
|
82 |
+
# also test Timestamp tz handling, GH #2789
|
83 |
+
result = ts.copy()
|
84 |
+
result["1990-01-01 09:00:00+00:00"] = 0
|
85 |
+
result["1990-01-01 09:00:00+00:00"] = ts.iloc[4]
|
86 |
+
tm.assert_series_equal(result, ts)
|
87 |
+
|
88 |
+
result = ts.copy()
|
89 |
+
result["1990-01-01 03:00:00-06:00"] = 0
|
90 |
+
result["1990-01-01 03:00:00-06:00"] = ts.iloc[4]
|
91 |
+
tm.assert_series_equal(result, ts)
|
92 |
+
|
93 |
+
# repeat with datetimes
|
94 |
+
result = ts.copy()
|
95 |
+
result[datetime(1990, 1, 1, 9, tzinfo=tzget("UTC"))] = 0
|
96 |
+
result[datetime(1990, 1, 1, 9, tzinfo=tzget("UTC"))] = ts.iloc[4]
|
97 |
+
tm.assert_series_equal(result, ts)
|
98 |
+
|
99 |
+
result = ts.copy()
|
100 |
+
dt = Timestamp(1990, 1, 1, 3).tz_localize(tzget("US/Central"))
|
101 |
+
dt = dt.to_pydatetime()
|
102 |
+
result[dt] = 0
|
103 |
+
result[dt] = ts.iloc[4]
|
104 |
+
tm.assert_series_equal(result, ts)
|
105 |
+
|
106 |
+
|
107 |
+
def test_getitem_setitem_datetimeindex():
|
108 |
+
N = 50
|
109 |
+
# testing with timezone, GH #2785
|
110 |
+
rng = date_range("1/1/1990", periods=N, freq="h", tz="US/Eastern")
|
111 |
+
ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
|
112 |
+
|
113 |
+
result = ts["1990-01-01 04:00:00"]
|
114 |
+
expected = ts.iloc[4]
|
115 |
+
assert result == expected
|
116 |
+
|
117 |
+
result = ts.copy()
|
118 |
+
result["1990-01-01 04:00:00"] = 0
|
119 |
+
result["1990-01-01 04:00:00"] = ts.iloc[4]
|
120 |
+
tm.assert_series_equal(result, ts)
|
121 |
+
|
122 |
+
result = ts["1990-01-01 04:00:00":"1990-01-01 07:00:00"]
|
123 |
+
expected = ts[4:8]
|
124 |
+
tm.assert_series_equal(result, expected)
|
125 |
+
|
126 |
+
result = ts.copy()
|
127 |
+
result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = 0
|
128 |
+
result["1990-01-01 04:00:00":"1990-01-01 07:00:00"] = ts[4:8]
|
129 |
+
tm.assert_series_equal(result, ts)
|
130 |
+
|
131 |
+
lb = "1990-01-01 04:00:00"
|
132 |
+
rb = "1990-01-01 07:00:00"
|
133 |
+
# GH#18435 strings get a pass from tzawareness compat
|
134 |
+
result = ts[(ts.index >= lb) & (ts.index <= rb)]
|
135 |
+
expected = ts[4:8]
|
136 |
+
tm.assert_series_equal(result, expected)
|
137 |
+
|
138 |
+
lb = "1990-01-01 04:00:00-0500"
|
139 |
+
rb = "1990-01-01 07:00:00-0500"
|
140 |
+
result = ts[(ts.index >= lb) & (ts.index <= rb)]
|
141 |
+
expected = ts[4:8]
|
142 |
+
tm.assert_series_equal(result, expected)
|
143 |
+
|
144 |
+
# But we do not give datetimes a pass on tzawareness compat
|
145 |
+
msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
|
146 |
+
naive = datetime(1990, 1, 1, 4)
|
147 |
+
for key in [naive, Timestamp(naive), np.datetime64(naive, "ns")]:
|
148 |
+
with pytest.raises(KeyError, match=re.escape(repr(key))):
|
149 |
+
# GH#36148 as of 2.0 we require tzawareness-compat
|
150 |
+
ts[key]
|
151 |
+
|
152 |
+
result = ts.copy()
|
153 |
+
# GH#36148 as of 2.0 we do not ignore tzawareness mismatch in indexing,
|
154 |
+
# so setting it as a new key casts to object rather than matching
|
155 |
+
# rng[4]
|
156 |
+
result[naive] = ts.iloc[4]
|
157 |
+
assert result.index.dtype == object
|
158 |
+
tm.assert_index_equal(result.index[:-1], rng.astype(object))
|
159 |
+
assert result.index[-1] == naive
|
160 |
+
|
161 |
+
msg = "Cannot compare tz-naive and tz-aware datetime-like objects"
|
162 |
+
with pytest.raises(TypeError, match=msg):
|
163 |
+
# GH#36148 require tzawareness compat as of 2.0
|
164 |
+
ts[naive : datetime(1990, 1, 1, 7)]
|
165 |
+
|
166 |
+
result = ts.copy()
|
167 |
+
with pytest.raises(TypeError, match=msg):
|
168 |
+
# GH#36148 require tzawareness compat as of 2.0
|
169 |
+
result[naive : datetime(1990, 1, 1, 7)] = 0
|
170 |
+
with pytest.raises(TypeError, match=msg):
|
171 |
+
# GH#36148 require tzawareness compat as of 2.0
|
172 |
+
result[naive : datetime(1990, 1, 1, 7)] = 99
|
173 |
+
# the __setitems__ here failed, so result should still match ts
|
174 |
+
tm.assert_series_equal(result, ts)
|
175 |
+
|
176 |
+
lb = naive
|
177 |
+
rb = datetime(1990, 1, 1, 7)
|
178 |
+
msg = r"Invalid comparison between dtype=datetime64\[ns, US/Eastern\] and datetime"
|
179 |
+
with pytest.raises(TypeError, match=msg):
|
180 |
+
# tznaive vs tzaware comparison is invalid
|
181 |
+
# see GH#18376, GH#18162
|
182 |
+
ts[(ts.index >= lb) & (ts.index <= rb)]
|
183 |
+
|
184 |
+
lb = Timestamp(naive).tz_localize(rng.tzinfo)
|
185 |
+
rb = Timestamp(datetime(1990, 1, 1, 7)).tz_localize(rng.tzinfo)
|
186 |
+
result = ts[(ts.index >= lb) & (ts.index <= rb)]
|
187 |
+
expected = ts[4:8]
|
188 |
+
tm.assert_series_equal(result, expected)
|
189 |
+
|
190 |
+
result = ts[ts.index[4]]
|
191 |
+
expected = ts.iloc[4]
|
192 |
+
assert result == expected
|
193 |
+
|
194 |
+
result = ts[ts.index[4:8]]
|
195 |
+
expected = ts[4:8]
|
196 |
+
tm.assert_series_equal(result, expected)
|
197 |
+
|
198 |
+
result = ts.copy()
|
199 |
+
result[ts.index[4:8]] = 0
|
200 |
+
result.iloc[4:8] = ts.iloc[4:8]
|
201 |
+
tm.assert_series_equal(result, ts)
|
202 |
+
|
203 |
+
# also test partial date slicing
|
204 |
+
result = ts["1990-01-02"]
|
205 |
+
expected = ts[24:48]
|
206 |
+
tm.assert_series_equal(result, expected)
|
207 |
+
|
208 |
+
result = ts.copy()
|
209 |
+
result["1990-01-02"] = 0
|
210 |
+
result["1990-01-02"] = ts[24:48]
|
211 |
+
tm.assert_series_equal(result, ts)
|
212 |
+
|
213 |
+
|
214 |
+
def test_getitem_setitem_periodindex():
|
215 |
+
N = 50
|
216 |
+
rng = period_range("1/1/1990", periods=N, freq="h")
|
217 |
+
ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
|
218 |
+
|
219 |
+
result = ts["1990-01-01 04"]
|
220 |
+
expected = ts.iloc[4]
|
221 |
+
assert result == expected
|
222 |
+
|
223 |
+
result = ts.copy()
|
224 |
+
result["1990-01-01 04"] = 0
|
225 |
+
result["1990-01-01 04"] = ts.iloc[4]
|
226 |
+
tm.assert_series_equal(result, ts)
|
227 |
+
|
228 |
+
result = ts["1990-01-01 04":"1990-01-01 07"]
|
229 |
+
expected = ts[4:8]
|
230 |
+
tm.assert_series_equal(result, expected)
|
231 |
+
|
232 |
+
result = ts.copy()
|
233 |
+
result["1990-01-01 04":"1990-01-01 07"] = 0
|
234 |
+
result["1990-01-01 04":"1990-01-01 07"] = ts[4:8]
|
235 |
+
tm.assert_series_equal(result, ts)
|
236 |
+
|
237 |
+
lb = "1990-01-01 04"
|
238 |
+
rb = "1990-01-01 07"
|
239 |
+
result = ts[(ts.index >= lb) & (ts.index <= rb)]
|
240 |
+
expected = ts[4:8]
|
241 |
+
tm.assert_series_equal(result, expected)
|
242 |
+
|
243 |
+
# GH 2782
|
244 |
+
result = ts[ts.index[4]]
|
245 |
+
expected = ts.iloc[4]
|
246 |
+
assert result == expected
|
247 |
+
|
248 |
+
result = ts[ts.index[4:8]]
|
249 |
+
expected = ts[4:8]
|
250 |
+
tm.assert_series_equal(result, expected)
|
251 |
+
|
252 |
+
result = ts.copy()
|
253 |
+
result[ts.index[4:8]] = 0
|
254 |
+
result.iloc[4:8] = ts.iloc[4:8]
|
255 |
+
tm.assert_series_equal(result, ts)
|
256 |
+
|
257 |
+
|
258 |
+
def test_datetime_indexing():
|
259 |
+
index = date_range("1/1/2000", "1/7/2000")
|
260 |
+
index = index.repeat(3)
|
261 |
+
|
262 |
+
s = Series(len(index), index=index)
|
263 |
+
stamp = Timestamp("1/8/2000")
|
264 |
+
|
265 |
+
with pytest.raises(KeyError, match=re.escape(repr(stamp))):
|
266 |
+
s[stamp]
|
267 |
+
s[stamp] = 0
|
268 |
+
assert s[stamp] == 0
|
269 |
+
|
270 |
+
# not monotonic
|
271 |
+
s = Series(len(index), index=index)
|
272 |
+
s = s[::-1]
|
273 |
+
|
274 |
+
with pytest.raises(KeyError, match=re.escape(repr(stamp))):
|
275 |
+
s[stamp]
|
276 |
+
s[stamp] = 0
|
277 |
+
assert s[stamp] == 0
|
278 |
+
|
279 |
+
|
280 |
+
# test duplicates in time series
|
281 |
+
|
282 |
+
|
283 |
+
def test_indexing_with_duplicate_datetimeindex(
|
284 |
+
rand_series_with_duplicate_datetimeindex,
|
285 |
+
):
|
286 |
+
ts = rand_series_with_duplicate_datetimeindex
|
287 |
+
|
288 |
+
uniques = ts.index.unique()
|
289 |
+
for date in uniques:
|
290 |
+
result = ts[date]
|
291 |
+
|
292 |
+
mask = ts.index == date
|
293 |
+
total = (ts.index == date).sum()
|
294 |
+
expected = ts[mask]
|
295 |
+
if total > 1:
|
296 |
+
tm.assert_series_equal(result, expected)
|
297 |
+
else:
|
298 |
+
tm.assert_almost_equal(result, expected.iloc[0])
|
299 |
+
|
300 |
+
cp = ts.copy()
|
301 |
+
cp[date] = 0
|
302 |
+
expected = Series(np.where(mask, 0, ts), index=ts.index)
|
303 |
+
tm.assert_series_equal(cp, expected)
|
304 |
+
|
305 |
+
key = datetime(2000, 1, 6)
|
306 |
+
with pytest.raises(KeyError, match=re.escape(repr(key))):
|
307 |
+
ts[key]
|
308 |
+
|
309 |
+
# new index
|
310 |
+
ts[datetime(2000, 1, 6)] = 0
|
311 |
+
assert ts[datetime(2000, 1, 6)] == 0
|
312 |
+
|
313 |
+
|
314 |
+
def test_loc_getitem_over_size_cutoff(monkeypatch):
|
315 |
+
# #1821
|
316 |
+
|
317 |
+
monkeypatch.setattr(libindex, "_SIZE_CUTOFF", 1000)
|
318 |
+
|
319 |
+
# create large list of non periodic datetime
|
320 |
+
dates = []
|
321 |
+
sec = timedelta(seconds=1)
|
322 |
+
half_sec = timedelta(microseconds=500000)
|
323 |
+
d = datetime(2011, 12, 5, 20, 30)
|
324 |
+
n = 1100
|
325 |
+
for i in range(n):
|
326 |
+
dates.append(d)
|
327 |
+
dates.append(d + sec)
|
328 |
+
dates.append(d + sec + half_sec)
|
329 |
+
dates.append(d + sec + sec + half_sec)
|
330 |
+
d += 3 * sec
|
331 |
+
|
332 |
+
# duplicate some values in the list
|
333 |
+
duplicate_positions = np.random.default_rng(2).integers(0, len(dates) - 1, 20)
|
334 |
+
for p in duplicate_positions:
|
335 |
+
dates[p + 1] = dates[p]
|
336 |
+
|
337 |
+
df = DataFrame(
|
338 |
+
np.random.default_rng(2).standard_normal((len(dates), 4)),
|
339 |
+
index=dates,
|
340 |
+
columns=list("ABCD"),
|
341 |
+
)
|
342 |
+
|
343 |
+
pos = n * 3
|
344 |
+
timestamp = df.index[pos]
|
345 |
+
assert timestamp in df.index
|
346 |
+
|
347 |
+
# it works!
|
348 |
+
df.loc[timestamp]
|
349 |
+
assert len(df.loc[[timestamp]]) > 0
|
350 |
+
|
351 |
+
|
352 |
+
def test_indexing_over_size_cutoff_period_index(monkeypatch):
|
353 |
+
# GH 27136
|
354 |
+
|
355 |
+
monkeypatch.setattr(libindex, "_SIZE_CUTOFF", 1000)
|
356 |
+
|
357 |
+
n = 1100
|
358 |
+
idx = period_range("1/1/2000", freq="min", periods=n)
|
359 |
+
assert idx._engine.over_size_threshold
|
360 |
+
|
361 |
+
s = Series(np.random.default_rng(2).standard_normal(len(idx)), index=idx)
|
362 |
+
|
363 |
+
pos = n - 1
|
364 |
+
timestamp = idx[pos]
|
365 |
+
assert timestamp in s.index
|
366 |
+
|
367 |
+
# it works!
|
368 |
+
s[timestamp]
|
369 |
+
assert len(s.loc[[timestamp]]) > 0
|
370 |
+
|
371 |
+
|
372 |
+
def test_indexing_unordered():
|
373 |
+
# GH 2437
|
374 |
+
rng = date_range(start="2011-01-01", end="2011-01-15")
|
375 |
+
ts = Series(np.random.default_rng(2).random(len(rng)), index=rng)
|
376 |
+
ts2 = pd.concat([ts[0:4], ts[-4:], ts[4:-4]])
|
377 |
+
|
378 |
+
for t in ts.index:
|
379 |
+
expected = ts[t]
|
380 |
+
result = ts2[t]
|
381 |
+
assert expected == result
|
382 |
+
|
383 |
+
# GH 3448 (ranges)
|
384 |
+
def compare(slobj):
|
385 |
+
result = ts2[slobj].copy()
|
386 |
+
result = result.sort_index()
|
387 |
+
expected = ts[slobj]
|
388 |
+
expected.index = expected.index._with_freq(None)
|
389 |
+
tm.assert_series_equal(result, expected)
|
390 |
+
|
391 |
+
for key in [
|
392 |
+
slice("2011-01-01", "2011-01-15"),
|
393 |
+
slice("2010-12-30", "2011-01-15"),
|
394 |
+
slice("2011-01-01", "2011-01-16"),
|
395 |
+
# partial ranges
|
396 |
+
slice("2011-01-01", "2011-01-6"),
|
397 |
+
slice("2011-01-06", "2011-01-8"),
|
398 |
+
slice("2011-01-06", "2011-01-12"),
|
399 |
+
]:
|
400 |
+
with pytest.raises(
|
401 |
+
KeyError, match="Value based partial slicing on non-monotonic"
|
402 |
+
):
|
403 |
+
compare(key)
|
404 |
+
|
405 |
+
# single values
|
406 |
+
result = ts2["2011"].sort_index()
|
407 |
+
expected = ts["2011"]
|
408 |
+
expected.index = expected.index._with_freq(None)
|
409 |
+
tm.assert_series_equal(result, expected)
|
410 |
+
|
411 |
+
|
412 |
+
def test_indexing_unordered2():
|
413 |
+
# diff freq
|
414 |
+
rng = date_range(datetime(2005, 1, 1), periods=20, freq="ME")
|
415 |
+
ts = Series(np.arange(len(rng)), index=rng)
|
416 |
+
ts = ts.take(np.random.default_rng(2).permutation(20))
|
417 |
+
|
418 |
+
result = ts["2005"]
|
419 |
+
for t in result.index:
|
420 |
+
assert t.year == 2005
|
421 |
+
|
422 |
+
|
423 |
+
def test_indexing():
|
424 |
+
idx = date_range("2001-1-1", periods=20, freq="ME")
|
425 |
+
ts = Series(np.random.default_rng(2).random(len(idx)), index=idx)
|
426 |
+
|
427 |
+
# getting
|
428 |
+
|
429 |
+
# GH 3070, make sure semantics work on Series/Frame
|
430 |
+
result = ts["2001"]
|
431 |
+
tm.assert_series_equal(result, ts.iloc[:12])
|
432 |
+
|
433 |
+
df = DataFrame({"A": ts.copy()})
|
434 |
+
|
435 |
+
# GH#36179 pre-2.0 df["2001"] operated as slicing on rows. in 2.0 it behaves
|
436 |
+
# like any other key, so raises
|
437 |
+
with pytest.raises(KeyError, match="2001"):
|
438 |
+
df["2001"]
|
439 |
+
|
440 |
+
# setting
|
441 |
+
ts = Series(np.random.default_rng(2).random(len(idx)), index=idx)
|
442 |
+
expected = ts.copy()
|
443 |
+
expected.iloc[:12] = 1
|
444 |
+
ts["2001"] = 1
|
445 |
+
tm.assert_series_equal(ts, expected)
|
446 |
+
|
447 |
+
expected = df.copy()
|
448 |
+
expected.iloc[:12, 0] = 1
|
449 |
+
df.loc["2001", "A"] = 1
|
450 |
+
tm.assert_frame_equal(df, expected)
|
451 |
+
|
452 |
+
|
453 |
+
def test_getitem_str_month_with_datetimeindex():
|
454 |
+
# GH3546 (not including times on the last day)
|
455 |
+
idx = date_range(start="2013-05-31 00:00", end="2013-05-31 23:00", freq="h")
|
456 |
+
ts = Series(range(len(idx)), index=idx)
|
457 |
+
expected = ts["2013-05"]
|
458 |
+
tm.assert_series_equal(expected, ts)
|
459 |
+
|
460 |
+
idx = date_range(start="2013-05-31 00:00", end="2013-05-31 23:59", freq="s")
|
461 |
+
ts = Series(range(len(idx)), index=idx)
|
462 |
+
expected = ts["2013-05"]
|
463 |
+
tm.assert_series_equal(expected, ts)
|
464 |
+
|
465 |
+
|
466 |
+
def test_getitem_str_year_with_datetimeindex():
|
467 |
+
idx = [
|
468 |
+
Timestamp("2013-05-31 00:00"),
|
469 |
+
Timestamp(datetime(2013, 5, 31, 23, 59, 59, 999999)),
|
470 |
+
]
|
471 |
+
ts = Series(range(len(idx)), index=idx)
|
472 |
+
expected = ts["2013"]
|
473 |
+
tm.assert_series_equal(expected, ts)
|
474 |
+
|
475 |
+
|
476 |
+
def test_getitem_str_second_with_datetimeindex():
|
477 |
+
# GH14826, indexing with a seconds resolution string / datetime object
|
478 |
+
df = DataFrame(
|
479 |
+
np.random.default_rng(2).random((5, 5)),
|
480 |
+
columns=["open", "high", "low", "close", "volume"],
|
481 |
+
index=date_range("2012-01-02 18:01:00", periods=5, tz="US/Central", freq="s"),
|
482 |
+
)
|
483 |
+
|
484 |
+
# this is a single date, so will raise
|
485 |
+
with pytest.raises(KeyError, match=r"^'2012-01-02 18:01:02'$"):
|
486 |
+
df["2012-01-02 18:01:02"]
|
487 |
+
|
488 |
+
msg = r"Timestamp\('2012-01-02 18:01:02-0600', tz='US/Central'\)"
|
489 |
+
with pytest.raises(KeyError, match=msg):
|
490 |
+
df[df.index[2]]
|
491 |
+
|
492 |
+
|
493 |
+
def test_compare_datetime_with_all_none():
|
494 |
+
# GH#54870
|
495 |
+
ser = Series(["2020-01-01", "2020-01-02"], dtype="datetime64[ns]")
|
496 |
+
ser2 = Series([None, None])
|
497 |
+
result = ser > ser2
|
498 |
+
expected = Series([False, False])
|
499 |
+
tm.assert_series_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_get.py
ADDED
@@ -0,0 +1,238 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
DatetimeIndex,
|
7 |
+
Index,
|
8 |
+
Series,
|
9 |
+
date_range,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
|
14 |
+
def test_get():
|
15 |
+
# GH 6383
|
16 |
+
s = Series(
|
17 |
+
np.array(
|
18 |
+
[
|
19 |
+
43,
|
20 |
+
48,
|
21 |
+
60,
|
22 |
+
48,
|
23 |
+
50,
|
24 |
+
51,
|
25 |
+
50,
|
26 |
+
45,
|
27 |
+
57,
|
28 |
+
48,
|
29 |
+
56,
|
30 |
+
45,
|
31 |
+
51,
|
32 |
+
39,
|
33 |
+
55,
|
34 |
+
43,
|
35 |
+
54,
|
36 |
+
52,
|
37 |
+
51,
|
38 |
+
54,
|
39 |
+
]
|
40 |
+
)
|
41 |
+
)
|
42 |
+
|
43 |
+
result = s.get(25, 0)
|
44 |
+
expected = 0
|
45 |
+
assert result == expected
|
46 |
+
|
47 |
+
s = Series(
|
48 |
+
np.array(
|
49 |
+
[
|
50 |
+
43,
|
51 |
+
48,
|
52 |
+
60,
|
53 |
+
48,
|
54 |
+
50,
|
55 |
+
51,
|
56 |
+
50,
|
57 |
+
45,
|
58 |
+
57,
|
59 |
+
48,
|
60 |
+
56,
|
61 |
+
45,
|
62 |
+
51,
|
63 |
+
39,
|
64 |
+
55,
|
65 |
+
43,
|
66 |
+
54,
|
67 |
+
52,
|
68 |
+
51,
|
69 |
+
54,
|
70 |
+
]
|
71 |
+
),
|
72 |
+
index=Index(
|
73 |
+
[
|
74 |
+
25.0,
|
75 |
+
36.0,
|
76 |
+
49.0,
|
77 |
+
64.0,
|
78 |
+
81.0,
|
79 |
+
100.0,
|
80 |
+
121.0,
|
81 |
+
144.0,
|
82 |
+
169.0,
|
83 |
+
196.0,
|
84 |
+
1225.0,
|
85 |
+
1296.0,
|
86 |
+
1369.0,
|
87 |
+
1444.0,
|
88 |
+
1521.0,
|
89 |
+
1600.0,
|
90 |
+
1681.0,
|
91 |
+
1764.0,
|
92 |
+
1849.0,
|
93 |
+
1936.0,
|
94 |
+
],
|
95 |
+
dtype=np.float64,
|
96 |
+
),
|
97 |
+
)
|
98 |
+
|
99 |
+
result = s.get(25, 0)
|
100 |
+
expected = 43
|
101 |
+
assert result == expected
|
102 |
+
|
103 |
+
# GH 7407
|
104 |
+
# with a boolean accessor
|
105 |
+
df = pd.DataFrame({"i": [0] * 3, "b": [False] * 3})
|
106 |
+
vc = df.i.value_counts()
|
107 |
+
result = vc.get(99, default="Missing")
|
108 |
+
assert result == "Missing"
|
109 |
+
|
110 |
+
vc = df.b.value_counts()
|
111 |
+
result = vc.get(False, default="Missing")
|
112 |
+
assert result == 3
|
113 |
+
|
114 |
+
result = vc.get(True, default="Missing")
|
115 |
+
assert result == "Missing"
|
116 |
+
|
117 |
+
|
118 |
+
def test_get_nan(float_numpy_dtype):
|
119 |
+
# GH 8569
|
120 |
+
s = Index(range(10), dtype=float_numpy_dtype).to_series()
|
121 |
+
assert s.get(np.nan) is None
|
122 |
+
assert s.get(np.nan, default="Missing") == "Missing"
|
123 |
+
|
124 |
+
|
125 |
+
def test_get_nan_multiple(float_numpy_dtype):
|
126 |
+
# GH 8569
|
127 |
+
# ensure that fixing "test_get_nan" above hasn't broken get
|
128 |
+
# with multiple elements
|
129 |
+
s = Index(range(10), dtype=float_numpy_dtype).to_series()
|
130 |
+
|
131 |
+
idx = [2, 30]
|
132 |
+
assert s.get(idx) is None
|
133 |
+
|
134 |
+
idx = [2, np.nan]
|
135 |
+
assert s.get(idx) is None
|
136 |
+
|
137 |
+
# GH 17295 - all missing keys
|
138 |
+
idx = [20, 30]
|
139 |
+
assert s.get(idx) is None
|
140 |
+
|
141 |
+
idx = [np.nan, np.nan]
|
142 |
+
assert s.get(idx) is None
|
143 |
+
|
144 |
+
|
145 |
+
def test_get_with_default():
|
146 |
+
# GH#7725
|
147 |
+
d0 = ["a", "b", "c", "d"]
|
148 |
+
d1 = np.arange(4, dtype="int64")
|
149 |
+
|
150 |
+
for data, index in ((d0, d1), (d1, d0)):
|
151 |
+
s = Series(data, index=index)
|
152 |
+
for i, d in zip(index, data):
|
153 |
+
assert s.get(i) == d
|
154 |
+
assert s.get(i, d) == d
|
155 |
+
assert s.get(i, "z") == d
|
156 |
+
|
157 |
+
assert s.get("e", "z") == "z"
|
158 |
+
assert s.get("e", "e") == "e"
|
159 |
+
|
160 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
161 |
+
warn = None
|
162 |
+
if index is d0:
|
163 |
+
warn = FutureWarning
|
164 |
+
with tm.assert_produces_warning(warn, match=msg):
|
165 |
+
assert s.get(10, "z") == "z"
|
166 |
+
assert s.get(10, 10) == 10
|
167 |
+
|
168 |
+
|
169 |
+
@pytest.mark.parametrize(
|
170 |
+
"arr",
|
171 |
+
[
|
172 |
+
np.random.default_rng(2).standard_normal(10),
|
173 |
+
DatetimeIndex(date_range("2020-01-01", periods=10), name="a").tz_localize(
|
174 |
+
tz="US/Eastern"
|
175 |
+
),
|
176 |
+
],
|
177 |
+
)
|
178 |
+
def test_get_with_ea(arr):
|
179 |
+
# GH#21260
|
180 |
+
ser = Series(arr, index=[2 * i for i in range(len(arr))])
|
181 |
+
assert ser.get(4) == ser.iloc[2]
|
182 |
+
|
183 |
+
result = ser.get([4, 6])
|
184 |
+
expected = ser.iloc[[2, 3]]
|
185 |
+
tm.assert_series_equal(result, expected)
|
186 |
+
|
187 |
+
result = ser.get(slice(2))
|
188 |
+
expected = ser.iloc[[0, 1]]
|
189 |
+
tm.assert_series_equal(result, expected)
|
190 |
+
|
191 |
+
assert ser.get(-1) is None
|
192 |
+
assert ser.get(ser.index.max() + 1) is None
|
193 |
+
|
194 |
+
ser = Series(arr[:6], index=list("abcdef"))
|
195 |
+
assert ser.get("c") == ser.iloc[2]
|
196 |
+
|
197 |
+
result = ser.get(slice("b", "d"))
|
198 |
+
expected = ser.iloc[[1, 2, 3]]
|
199 |
+
tm.assert_series_equal(result, expected)
|
200 |
+
|
201 |
+
result = ser.get("Z")
|
202 |
+
assert result is None
|
203 |
+
|
204 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
205 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
206 |
+
assert ser.get(4) == ser.iloc[4]
|
207 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
208 |
+
assert ser.get(-1) == ser.iloc[-1]
|
209 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
210 |
+
assert ser.get(len(ser)) is None
|
211 |
+
|
212 |
+
# GH#21257
|
213 |
+
ser = Series(arr)
|
214 |
+
ser2 = ser[::2]
|
215 |
+
assert ser2.get(1) is None
|
216 |
+
|
217 |
+
|
218 |
+
def test_getitem_get(string_series, object_series):
|
219 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
220 |
+
|
221 |
+
for obj in [string_series, object_series]:
|
222 |
+
idx = obj.index[5]
|
223 |
+
|
224 |
+
assert obj[idx] == obj.get(idx)
|
225 |
+
assert obj[idx] == obj.iloc[5]
|
226 |
+
|
227 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
228 |
+
assert string_series.get(-1) == string_series.get(string_series.index[-1])
|
229 |
+
assert string_series.iloc[5] == string_series.get(string_series.index[5])
|
230 |
+
|
231 |
+
|
232 |
+
def test_get_none():
|
233 |
+
# GH#5652
|
234 |
+
s1 = Series(dtype=object)
|
235 |
+
s2 = Series(dtype=object, index=list("abc"))
|
236 |
+
for s in [s1, s2]:
|
237 |
+
result = s.get(None)
|
238 |
+
assert result is None
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_getitem.py
ADDED
@@ -0,0 +1,735 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Series.__getitem__ test classes are organized by the type of key passed.
|
3 |
+
"""
|
4 |
+
from datetime import (
|
5 |
+
date,
|
6 |
+
datetime,
|
7 |
+
time,
|
8 |
+
)
|
9 |
+
|
10 |
+
import numpy as np
|
11 |
+
import pytest
|
12 |
+
|
13 |
+
from pandas._libs.tslibs import (
|
14 |
+
conversion,
|
15 |
+
timezones,
|
16 |
+
)
|
17 |
+
|
18 |
+
from pandas.core.dtypes.common import is_scalar
|
19 |
+
|
20 |
+
import pandas as pd
|
21 |
+
from pandas import (
|
22 |
+
Categorical,
|
23 |
+
DataFrame,
|
24 |
+
DatetimeIndex,
|
25 |
+
Index,
|
26 |
+
Series,
|
27 |
+
Timestamp,
|
28 |
+
date_range,
|
29 |
+
period_range,
|
30 |
+
timedelta_range,
|
31 |
+
)
|
32 |
+
import pandas._testing as tm
|
33 |
+
from pandas.core.indexing import IndexingError
|
34 |
+
|
35 |
+
from pandas.tseries.offsets import BDay
|
36 |
+
|
37 |
+
|
38 |
+
class TestSeriesGetitemScalars:
|
39 |
+
def test_getitem_object_index_float_string(self):
|
40 |
+
# GH#17286
|
41 |
+
ser = Series([1] * 4, index=Index(["a", "b", "c", 1.0]))
|
42 |
+
assert ser["a"] == 1
|
43 |
+
assert ser[1.0] == 1
|
44 |
+
|
45 |
+
def test_getitem_float_keys_tuple_values(self):
|
46 |
+
# see GH#13509
|
47 |
+
|
48 |
+
# unique Index
|
49 |
+
ser = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.1, 0.2], name="foo")
|
50 |
+
result = ser[0.0]
|
51 |
+
assert result == (1, 1)
|
52 |
+
|
53 |
+
# non-unique Index
|
54 |
+
expected = Series([(1, 1), (2, 2)], index=[0.0, 0.0], name="foo")
|
55 |
+
ser = Series([(1, 1), (2, 2), (3, 3)], index=[0.0, 0.0, 0.2], name="foo")
|
56 |
+
|
57 |
+
result = ser[0.0]
|
58 |
+
tm.assert_series_equal(result, expected)
|
59 |
+
|
60 |
+
def test_getitem_unrecognized_scalar(self):
|
61 |
+
# GH#32684 a scalar key that is not recognized by lib.is_scalar
|
62 |
+
|
63 |
+
# a series that might be produced via `frame.dtypes`
|
64 |
+
ser = Series([1, 2], index=[np.dtype("O"), np.dtype("i8")])
|
65 |
+
|
66 |
+
key = ser.index[1]
|
67 |
+
|
68 |
+
result = ser[key]
|
69 |
+
assert result == 2
|
70 |
+
|
71 |
+
def test_getitem_negative_out_of_bounds(self):
|
72 |
+
ser = Series(["a"] * 10, index=["a"] * 10)
|
73 |
+
|
74 |
+
msg = "index -11 is out of bounds for axis 0 with size 10|index out of bounds"
|
75 |
+
warn_msg = "Series.__getitem__ treating keys as positions is deprecated"
|
76 |
+
with pytest.raises(IndexError, match=msg):
|
77 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
78 |
+
ser[-11]
|
79 |
+
|
80 |
+
def test_getitem_out_of_bounds_indexerror(self, datetime_series):
|
81 |
+
# don't segfault, GH#495
|
82 |
+
msg = r"index \d+ is out of bounds for axis 0 with size \d+"
|
83 |
+
warn_msg = "Series.__getitem__ treating keys as positions is deprecated"
|
84 |
+
with pytest.raises(IndexError, match=msg):
|
85 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
86 |
+
datetime_series[len(datetime_series)]
|
87 |
+
|
88 |
+
def test_getitem_out_of_bounds_empty_rangeindex_keyerror(self):
|
89 |
+
# GH#917
|
90 |
+
# With a RangeIndex, an int key gives a KeyError
|
91 |
+
ser = Series([], dtype=object)
|
92 |
+
with pytest.raises(KeyError, match="-1"):
|
93 |
+
ser[-1]
|
94 |
+
|
95 |
+
def test_getitem_keyerror_with_integer_index(self, any_int_numpy_dtype):
|
96 |
+
dtype = any_int_numpy_dtype
|
97 |
+
ser = Series(
|
98 |
+
np.random.default_rng(2).standard_normal(6),
|
99 |
+
index=Index([0, 0, 1, 1, 2, 2], dtype=dtype),
|
100 |
+
)
|
101 |
+
|
102 |
+
with pytest.raises(KeyError, match=r"^5$"):
|
103 |
+
ser[5]
|
104 |
+
|
105 |
+
with pytest.raises(KeyError, match=r"^'c'$"):
|
106 |
+
ser["c"]
|
107 |
+
|
108 |
+
# not monotonic
|
109 |
+
ser = Series(
|
110 |
+
np.random.default_rng(2).standard_normal(6), index=[2, 2, 0, 0, 1, 1]
|
111 |
+
)
|
112 |
+
|
113 |
+
with pytest.raises(KeyError, match=r"^5$"):
|
114 |
+
ser[5]
|
115 |
+
|
116 |
+
with pytest.raises(KeyError, match=r"^'c'$"):
|
117 |
+
ser["c"]
|
118 |
+
|
119 |
+
def test_getitem_int64(self, datetime_series):
|
120 |
+
idx = np.int64(5)
|
121 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
122 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
123 |
+
res = datetime_series[idx]
|
124 |
+
assert res == datetime_series.iloc[5]
|
125 |
+
|
126 |
+
def test_getitem_full_range(self):
|
127 |
+
# github.com/pandas-dev/pandas/commit/4f433773141d2eb384325714a2776bcc5b2e20f7
|
128 |
+
ser = Series(range(5), index=list(range(5)))
|
129 |
+
result = ser[list(range(5))]
|
130 |
+
tm.assert_series_equal(result, ser)
|
131 |
+
|
132 |
+
# ------------------------------------------------------------------
|
133 |
+
# Series with DatetimeIndex
|
134 |
+
|
135 |
+
@pytest.mark.parametrize("tzstr", ["Europe/Berlin", "dateutil/Europe/Berlin"])
|
136 |
+
def test_getitem_pydatetime_tz(self, tzstr):
|
137 |
+
tz = timezones.maybe_get_tz(tzstr)
|
138 |
+
|
139 |
+
index = date_range(
|
140 |
+
start="2012-12-24 16:00", end="2012-12-24 18:00", freq="h", tz=tzstr
|
141 |
+
)
|
142 |
+
ts = Series(index=index, data=index.hour)
|
143 |
+
time_pandas = Timestamp("2012-12-24 17:00", tz=tzstr)
|
144 |
+
|
145 |
+
dt = datetime(2012, 12, 24, 17, 0)
|
146 |
+
time_datetime = conversion.localize_pydatetime(dt, tz)
|
147 |
+
assert ts[time_pandas] == ts[time_datetime]
|
148 |
+
|
149 |
+
@pytest.mark.parametrize("tz", ["US/Eastern", "dateutil/US/Eastern"])
|
150 |
+
def test_string_index_alias_tz_aware(self, tz):
|
151 |
+
rng = date_range("1/1/2000", periods=10, tz=tz)
|
152 |
+
ser = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng)
|
153 |
+
|
154 |
+
result = ser["1/3/2000"]
|
155 |
+
tm.assert_almost_equal(result, ser.iloc[2])
|
156 |
+
|
157 |
+
def test_getitem_time_object(self):
|
158 |
+
rng = date_range("1/1/2000", "1/5/2000", freq="5min")
|
159 |
+
ts = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng)
|
160 |
+
|
161 |
+
mask = (rng.hour == 9) & (rng.minute == 30)
|
162 |
+
result = ts[time(9, 30)]
|
163 |
+
expected = ts[mask]
|
164 |
+
result.index = result.index._with_freq(None)
|
165 |
+
tm.assert_series_equal(result, expected)
|
166 |
+
|
167 |
+
# ------------------------------------------------------------------
|
168 |
+
# Series with CategoricalIndex
|
169 |
+
|
170 |
+
def test_getitem_scalar_categorical_index(self):
|
171 |
+
cats = Categorical([Timestamp("12-31-1999"), Timestamp("12-31-2000")])
|
172 |
+
|
173 |
+
ser = Series([1, 2], index=cats)
|
174 |
+
|
175 |
+
expected = ser.iloc[0]
|
176 |
+
result = ser[cats[0]]
|
177 |
+
assert result == expected
|
178 |
+
|
179 |
+
def test_getitem_numeric_categorical_listlike_matches_scalar(self):
|
180 |
+
# GH#15470
|
181 |
+
ser = Series(["a", "b", "c"], index=pd.CategoricalIndex([2, 1, 0]))
|
182 |
+
|
183 |
+
# 0 is treated as a label
|
184 |
+
assert ser[0] == "c"
|
185 |
+
|
186 |
+
# the listlike analogue should also be treated as labels
|
187 |
+
res = ser[[0]]
|
188 |
+
expected = ser.iloc[-1:]
|
189 |
+
tm.assert_series_equal(res, expected)
|
190 |
+
|
191 |
+
res2 = ser[[0, 1, 2]]
|
192 |
+
tm.assert_series_equal(res2, ser.iloc[::-1])
|
193 |
+
|
194 |
+
def test_getitem_integer_categorical_not_positional(self):
|
195 |
+
# GH#14865
|
196 |
+
ser = Series(["a", "b", "c"], index=Index([1, 2, 3], dtype="category"))
|
197 |
+
assert ser.get(3) == "c"
|
198 |
+
assert ser[3] == "c"
|
199 |
+
|
200 |
+
def test_getitem_str_with_timedeltaindex(self):
|
201 |
+
rng = timedelta_range("1 day 10:11:12", freq="h", periods=500)
|
202 |
+
ser = Series(np.arange(len(rng)), index=rng)
|
203 |
+
|
204 |
+
key = "6 days, 23:11:12"
|
205 |
+
indexer = rng.get_loc(key)
|
206 |
+
assert indexer == 133
|
207 |
+
|
208 |
+
result = ser[key]
|
209 |
+
assert result == ser.iloc[133]
|
210 |
+
|
211 |
+
msg = r"^Timedelta\('50 days 00:00:00'\)$"
|
212 |
+
with pytest.raises(KeyError, match=msg):
|
213 |
+
rng.get_loc("50 days")
|
214 |
+
with pytest.raises(KeyError, match=msg):
|
215 |
+
ser["50 days"]
|
216 |
+
|
217 |
+
def test_getitem_bool_index_positional(self):
|
218 |
+
# GH#48653
|
219 |
+
ser = Series({True: 1, False: 0})
|
220 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
221 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
222 |
+
result = ser[0]
|
223 |
+
assert result == 1
|
224 |
+
|
225 |
+
|
226 |
+
class TestSeriesGetitemSlices:
|
227 |
+
def test_getitem_partial_str_slice_with_datetimeindex(self):
|
228 |
+
# GH#34860
|
229 |
+
arr = date_range("1/1/2008", "1/1/2009")
|
230 |
+
ser = arr.to_series()
|
231 |
+
result = ser["2008"]
|
232 |
+
|
233 |
+
rng = date_range(start="2008-01-01", end="2008-12-31")
|
234 |
+
expected = Series(rng, index=rng)
|
235 |
+
|
236 |
+
tm.assert_series_equal(result, expected)
|
237 |
+
|
238 |
+
def test_getitem_slice_strings_with_datetimeindex(self):
|
239 |
+
idx = DatetimeIndex(
|
240 |
+
["1/1/2000", "1/2/2000", "1/2/2000", "1/3/2000", "1/4/2000"]
|
241 |
+
)
|
242 |
+
|
243 |
+
ts = Series(np.random.default_rng(2).standard_normal(len(idx)), index=idx)
|
244 |
+
|
245 |
+
result = ts["1/2/2000":]
|
246 |
+
expected = ts[1:]
|
247 |
+
tm.assert_series_equal(result, expected)
|
248 |
+
|
249 |
+
result = ts["1/2/2000":"1/3/2000"]
|
250 |
+
expected = ts[1:4]
|
251 |
+
tm.assert_series_equal(result, expected)
|
252 |
+
|
253 |
+
def test_getitem_partial_str_slice_with_timedeltaindex(self):
|
254 |
+
rng = timedelta_range("1 day 10:11:12", freq="h", periods=500)
|
255 |
+
ser = Series(np.arange(len(rng)), index=rng)
|
256 |
+
|
257 |
+
result = ser["5 day":"6 day"]
|
258 |
+
expected = ser.iloc[86:134]
|
259 |
+
tm.assert_series_equal(result, expected)
|
260 |
+
|
261 |
+
result = ser["5 day":]
|
262 |
+
expected = ser.iloc[86:]
|
263 |
+
tm.assert_series_equal(result, expected)
|
264 |
+
|
265 |
+
result = ser[:"6 day"]
|
266 |
+
expected = ser.iloc[:134]
|
267 |
+
tm.assert_series_equal(result, expected)
|
268 |
+
|
269 |
+
def test_getitem_partial_str_slice_high_reso_with_timedeltaindex(self):
|
270 |
+
# higher reso
|
271 |
+
rng = timedelta_range("1 day 10:11:12", freq="us", periods=2000)
|
272 |
+
ser = Series(np.arange(len(rng)), index=rng)
|
273 |
+
|
274 |
+
result = ser["1 day 10:11:12":]
|
275 |
+
expected = ser.iloc[0:]
|
276 |
+
tm.assert_series_equal(result, expected)
|
277 |
+
|
278 |
+
result = ser["1 day 10:11:12.001":]
|
279 |
+
expected = ser.iloc[1000:]
|
280 |
+
tm.assert_series_equal(result, expected)
|
281 |
+
|
282 |
+
result = ser["1 days, 10:11:12.001001"]
|
283 |
+
assert result == ser.iloc[1001]
|
284 |
+
|
285 |
+
def test_getitem_slice_2d(self, datetime_series):
|
286 |
+
# GH#30588 multi-dimensional indexing deprecated
|
287 |
+
with pytest.raises(ValueError, match="Multi-dimensional indexing"):
|
288 |
+
datetime_series[:, np.newaxis]
|
289 |
+
|
290 |
+
def test_getitem_median_slice_bug(self):
|
291 |
+
index = date_range("20090415", "20090519", freq="2B")
|
292 |
+
ser = Series(np.random.default_rng(2).standard_normal(13), index=index)
|
293 |
+
|
294 |
+
indexer = [slice(6, 7, None)]
|
295 |
+
msg = "Indexing with a single-item list"
|
296 |
+
with pytest.raises(ValueError, match=msg):
|
297 |
+
# GH#31299
|
298 |
+
ser[indexer]
|
299 |
+
# but we're OK with a single-element tuple
|
300 |
+
result = ser[(indexer[0],)]
|
301 |
+
expected = ser[indexer[0]]
|
302 |
+
tm.assert_series_equal(result, expected)
|
303 |
+
|
304 |
+
@pytest.mark.parametrize(
|
305 |
+
"slc, positions",
|
306 |
+
[
|
307 |
+
[slice(date(2018, 1, 1), None), [0, 1, 2]],
|
308 |
+
[slice(date(2019, 1, 2), None), [2]],
|
309 |
+
[slice(date(2020, 1, 1), None), []],
|
310 |
+
[slice(None, date(2020, 1, 1)), [0, 1, 2]],
|
311 |
+
[slice(None, date(2019, 1, 1)), [0]],
|
312 |
+
],
|
313 |
+
)
|
314 |
+
def test_getitem_slice_date(self, slc, positions):
|
315 |
+
# https://github.com/pandas-dev/pandas/issues/31501
|
316 |
+
ser = Series(
|
317 |
+
[0, 1, 2],
|
318 |
+
DatetimeIndex(["2019-01-01", "2019-01-01T06:00:00", "2019-01-02"]),
|
319 |
+
)
|
320 |
+
result = ser[slc]
|
321 |
+
expected = ser.take(positions)
|
322 |
+
tm.assert_series_equal(result, expected)
|
323 |
+
|
324 |
+
def test_getitem_slice_float_raises(self, datetime_series):
|
325 |
+
msg = (
|
326 |
+
"cannot do slice indexing on DatetimeIndex with these indexers "
|
327 |
+
r"\[{key}\] of type float"
|
328 |
+
)
|
329 |
+
with pytest.raises(TypeError, match=msg.format(key=r"4\.0")):
|
330 |
+
datetime_series[4.0:10.0]
|
331 |
+
|
332 |
+
with pytest.raises(TypeError, match=msg.format(key=r"4\.5")):
|
333 |
+
datetime_series[4.5:10.0]
|
334 |
+
|
335 |
+
def test_getitem_slice_bug(self):
|
336 |
+
ser = Series(range(10), index=list(range(10)))
|
337 |
+
result = ser[-12:]
|
338 |
+
tm.assert_series_equal(result, ser)
|
339 |
+
|
340 |
+
result = ser[-7:]
|
341 |
+
tm.assert_series_equal(result, ser[3:])
|
342 |
+
|
343 |
+
result = ser[:-12]
|
344 |
+
tm.assert_series_equal(result, ser[:0])
|
345 |
+
|
346 |
+
def test_getitem_slice_integers(self):
|
347 |
+
ser = Series(
|
348 |
+
np.random.default_rng(2).standard_normal(8),
|
349 |
+
index=[2, 4, 6, 8, 10, 12, 14, 16],
|
350 |
+
)
|
351 |
+
|
352 |
+
result = ser[:4]
|
353 |
+
expected = Series(ser.values[:4], index=[2, 4, 6, 8])
|
354 |
+
tm.assert_series_equal(result, expected)
|
355 |
+
|
356 |
+
|
357 |
+
class TestSeriesGetitemListLike:
|
358 |
+
@pytest.mark.parametrize("box", [list, np.array, Index, Series])
|
359 |
+
def test_getitem_no_matches(self, box):
|
360 |
+
# GH#33462 we expect the same behavior for list/ndarray/Index/Series
|
361 |
+
ser = Series(["A", "B"])
|
362 |
+
|
363 |
+
key = Series(["C"], dtype=object)
|
364 |
+
key = box(key)
|
365 |
+
|
366 |
+
msg = (
|
367 |
+
r"None of \[Index\(\['C'\], dtype='object|string'\)\] are in the \[index\]"
|
368 |
+
)
|
369 |
+
with pytest.raises(KeyError, match=msg):
|
370 |
+
ser[key]
|
371 |
+
|
372 |
+
def test_getitem_intlist_intindex_periodvalues(self):
|
373 |
+
ser = Series(period_range("2000-01-01", periods=10, freq="D"))
|
374 |
+
|
375 |
+
result = ser[[2, 4]]
|
376 |
+
exp = Series(
|
377 |
+
[pd.Period("2000-01-03", freq="D"), pd.Period("2000-01-05", freq="D")],
|
378 |
+
index=[2, 4],
|
379 |
+
dtype="Period[D]",
|
380 |
+
)
|
381 |
+
tm.assert_series_equal(result, exp)
|
382 |
+
assert result.dtype == "Period[D]"
|
383 |
+
|
384 |
+
@pytest.mark.parametrize("box", [list, np.array, Index])
|
385 |
+
def test_getitem_intlist_intervalindex_non_int(self, box):
|
386 |
+
# GH#33404 fall back to positional since ints are unambiguous
|
387 |
+
dti = date_range("2000-01-03", periods=3)._with_freq(None)
|
388 |
+
ii = pd.IntervalIndex.from_breaks(dti)
|
389 |
+
ser = Series(range(len(ii)), index=ii)
|
390 |
+
|
391 |
+
expected = ser.iloc[:1]
|
392 |
+
key = box([0])
|
393 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
394 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
395 |
+
result = ser[key]
|
396 |
+
tm.assert_series_equal(result, expected)
|
397 |
+
|
398 |
+
@pytest.mark.parametrize("box", [list, np.array, Index])
|
399 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.float64, np.uint64])
|
400 |
+
def test_getitem_intlist_multiindex_numeric_level(self, dtype, box):
|
401 |
+
# GH#33404 do _not_ fall back to positional since ints are ambiguous
|
402 |
+
idx = Index(range(4)).astype(dtype)
|
403 |
+
dti = date_range("2000-01-03", periods=3)
|
404 |
+
mi = pd.MultiIndex.from_product([idx, dti])
|
405 |
+
ser = Series(range(len(mi))[::-1], index=mi)
|
406 |
+
|
407 |
+
key = box([5])
|
408 |
+
with pytest.raises(KeyError, match="5"):
|
409 |
+
ser[key]
|
410 |
+
|
411 |
+
def test_getitem_uint_array_key(self, any_unsigned_int_numpy_dtype):
|
412 |
+
# GH #37218
|
413 |
+
ser = Series([1, 2, 3])
|
414 |
+
key = np.array([4], dtype=any_unsigned_int_numpy_dtype)
|
415 |
+
|
416 |
+
with pytest.raises(KeyError, match="4"):
|
417 |
+
ser[key]
|
418 |
+
with pytest.raises(KeyError, match="4"):
|
419 |
+
ser.loc[key]
|
420 |
+
|
421 |
+
|
422 |
+
class TestGetitemBooleanMask:
|
423 |
+
def test_getitem_boolean(self, string_series):
|
424 |
+
ser = string_series
|
425 |
+
mask = ser > ser.median()
|
426 |
+
|
427 |
+
# passing list is OK
|
428 |
+
result = ser[list(mask)]
|
429 |
+
expected = ser[mask]
|
430 |
+
tm.assert_series_equal(result, expected)
|
431 |
+
tm.assert_index_equal(result.index, ser.index[mask])
|
432 |
+
|
433 |
+
def test_getitem_boolean_empty(self):
|
434 |
+
ser = Series([], dtype=np.int64)
|
435 |
+
ser.index.name = "index_name"
|
436 |
+
ser = ser[ser.isna()]
|
437 |
+
assert ser.index.name == "index_name"
|
438 |
+
assert ser.dtype == np.int64
|
439 |
+
|
440 |
+
# GH#5877
|
441 |
+
# indexing with empty series
|
442 |
+
ser = Series(["A", "B"], dtype=object)
|
443 |
+
expected = Series(dtype=object, index=Index([], dtype="int64"))
|
444 |
+
result = ser[Series([], dtype=object)]
|
445 |
+
tm.assert_series_equal(result, expected)
|
446 |
+
|
447 |
+
# invalid because of the boolean indexer
|
448 |
+
# that's empty or not-aligned
|
449 |
+
msg = (
|
450 |
+
r"Unalignable boolean Series provided as indexer \(index of "
|
451 |
+
r"the boolean Series and of the indexed object do not match"
|
452 |
+
)
|
453 |
+
with pytest.raises(IndexingError, match=msg):
|
454 |
+
ser[Series([], dtype=bool)]
|
455 |
+
|
456 |
+
with pytest.raises(IndexingError, match=msg):
|
457 |
+
ser[Series([True], dtype=bool)]
|
458 |
+
|
459 |
+
def test_getitem_boolean_object(self, string_series):
|
460 |
+
# using column from DataFrame
|
461 |
+
|
462 |
+
ser = string_series
|
463 |
+
mask = ser > ser.median()
|
464 |
+
omask = mask.astype(object)
|
465 |
+
|
466 |
+
# getitem
|
467 |
+
result = ser[omask]
|
468 |
+
expected = ser[mask]
|
469 |
+
tm.assert_series_equal(result, expected)
|
470 |
+
|
471 |
+
# setitem
|
472 |
+
s2 = ser.copy()
|
473 |
+
cop = ser.copy()
|
474 |
+
cop[omask] = 5
|
475 |
+
s2[mask] = 5
|
476 |
+
tm.assert_series_equal(cop, s2)
|
477 |
+
|
478 |
+
# nans raise exception
|
479 |
+
omask[5:10] = np.nan
|
480 |
+
msg = "Cannot mask with non-boolean array containing NA / NaN values"
|
481 |
+
with pytest.raises(ValueError, match=msg):
|
482 |
+
ser[omask]
|
483 |
+
with pytest.raises(ValueError, match=msg):
|
484 |
+
ser[omask] = 5
|
485 |
+
|
486 |
+
def test_getitem_boolean_dt64_copies(self):
|
487 |
+
# GH#36210
|
488 |
+
dti = date_range("2016-01-01", periods=4, tz="US/Pacific")
|
489 |
+
key = np.array([True, True, False, False])
|
490 |
+
|
491 |
+
ser = Series(dti._data)
|
492 |
+
|
493 |
+
res = ser[key]
|
494 |
+
assert res._values._ndarray.base is None
|
495 |
+
|
496 |
+
# compare with numeric case for reference
|
497 |
+
ser2 = Series(range(4))
|
498 |
+
res2 = ser2[key]
|
499 |
+
assert res2._values.base is None
|
500 |
+
|
501 |
+
def test_getitem_boolean_corner(self, datetime_series):
|
502 |
+
ts = datetime_series
|
503 |
+
mask_shifted = ts.shift(1, freq=BDay()) > ts.median()
|
504 |
+
|
505 |
+
msg = (
|
506 |
+
r"Unalignable boolean Series provided as indexer \(index of "
|
507 |
+
r"the boolean Series and of the indexed object do not match"
|
508 |
+
)
|
509 |
+
with pytest.raises(IndexingError, match=msg):
|
510 |
+
ts[mask_shifted]
|
511 |
+
|
512 |
+
with pytest.raises(IndexingError, match=msg):
|
513 |
+
ts.loc[mask_shifted]
|
514 |
+
|
515 |
+
def test_getitem_boolean_different_order(self, string_series):
|
516 |
+
ordered = string_series.sort_values()
|
517 |
+
|
518 |
+
sel = string_series[ordered > 0]
|
519 |
+
exp = string_series[string_series > 0]
|
520 |
+
tm.assert_series_equal(sel, exp)
|
521 |
+
|
522 |
+
def test_getitem_boolean_contiguous_preserve_freq(self):
|
523 |
+
rng = date_range("1/1/2000", "3/1/2000", freq="B")
|
524 |
+
|
525 |
+
mask = np.zeros(len(rng), dtype=bool)
|
526 |
+
mask[10:20] = True
|
527 |
+
|
528 |
+
masked = rng[mask]
|
529 |
+
expected = rng[10:20]
|
530 |
+
assert expected.freq == rng.freq
|
531 |
+
tm.assert_index_equal(masked, expected)
|
532 |
+
|
533 |
+
mask[22] = True
|
534 |
+
masked = rng[mask]
|
535 |
+
assert masked.freq is None
|
536 |
+
|
537 |
+
|
538 |
+
class TestGetitemCallable:
|
539 |
+
def test_getitem_callable(self):
|
540 |
+
# GH#12533
|
541 |
+
ser = Series(4, index=list("ABCD"))
|
542 |
+
result = ser[lambda x: "A"]
|
543 |
+
assert result == ser.loc["A"]
|
544 |
+
|
545 |
+
result = ser[lambda x: ["A", "B"]]
|
546 |
+
expected = ser.loc[["A", "B"]]
|
547 |
+
tm.assert_series_equal(result, expected)
|
548 |
+
|
549 |
+
result = ser[lambda x: [True, False, True, True]]
|
550 |
+
expected = ser.iloc[[0, 2, 3]]
|
551 |
+
tm.assert_series_equal(result, expected)
|
552 |
+
|
553 |
+
|
554 |
+
def test_getitem_generator(string_series):
|
555 |
+
gen = (x > 0 for x in string_series)
|
556 |
+
result = string_series[gen]
|
557 |
+
result2 = string_series[iter(string_series > 0)]
|
558 |
+
expected = string_series[string_series > 0]
|
559 |
+
tm.assert_series_equal(result, expected)
|
560 |
+
tm.assert_series_equal(result2, expected)
|
561 |
+
|
562 |
+
|
563 |
+
@pytest.mark.parametrize(
|
564 |
+
"series",
|
565 |
+
[
|
566 |
+
Series([0, 1]),
|
567 |
+
Series(date_range("2012-01-01", periods=2)),
|
568 |
+
Series(date_range("2012-01-01", periods=2, tz="CET")),
|
569 |
+
],
|
570 |
+
)
|
571 |
+
def test_getitem_ndim_deprecated(series):
|
572 |
+
with pytest.raises(ValueError, match="Multi-dimensional indexing"):
|
573 |
+
series[:, None]
|
574 |
+
|
575 |
+
|
576 |
+
def test_getitem_multilevel_scalar_slice_not_implemented(
|
577 |
+
multiindex_year_month_day_dataframe_random_data,
|
578 |
+
):
|
579 |
+
# not implementing this for now
|
580 |
+
df = multiindex_year_month_day_dataframe_random_data
|
581 |
+
ser = df["A"]
|
582 |
+
|
583 |
+
msg = r"\(2000, slice\(3, 4, None\)\)"
|
584 |
+
with pytest.raises(TypeError, match=msg):
|
585 |
+
ser[2000, 3:4]
|
586 |
+
|
587 |
+
|
588 |
+
def test_getitem_dataframe_raises():
|
589 |
+
rng = list(range(10))
|
590 |
+
ser = Series(10, index=rng)
|
591 |
+
df = DataFrame(rng, index=rng)
|
592 |
+
msg = (
|
593 |
+
"Indexing a Series with DataFrame is not supported, "
|
594 |
+
"use the appropriate DataFrame column"
|
595 |
+
)
|
596 |
+
with pytest.raises(TypeError, match=msg):
|
597 |
+
ser[df > 5]
|
598 |
+
|
599 |
+
|
600 |
+
def test_getitem_assignment_series_alignment():
|
601 |
+
# https://github.com/pandas-dev/pandas/issues/37427
|
602 |
+
# with getitem, when assigning with a Series, it is not first aligned
|
603 |
+
ser = Series(range(10))
|
604 |
+
idx = np.array([2, 4, 9])
|
605 |
+
ser[idx] = Series([10, 11, 12])
|
606 |
+
expected = Series([0, 1, 10, 3, 11, 5, 6, 7, 8, 12])
|
607 |
+
tm.assert_series_equal(ser, expected)
|
608 |
+
|
609 |
+
|
610 |
+
def test_getitem_duplicate_index_mistyped_key_raises_keyerror():
|
611 |
+
# GH#29189 float_index.get_loc(None) should raise KeyError, not TypeError
|
612 |
+
ser = Series([2, 5, 6, 8], index=[2.0, 4.0, 4.0, 5.0])
|
613 |
+
with pytest.raises(KeyError, match="None"):
|
614 |
+
ser[None]
|
615 |
+
|
616 |
+
with pytest.raises(KeyError, match="None"):
|
617 |
+
ser.index.get_loc(None)
|
618 |
+
|
619 |
+
with pytest.raises(KeyError, match="None"):
|
620 |
+
ser.index._engine.get_loc(None)
|
621 |
+
|
622 |
+
|
623 |
+
def test_getitem_1tuple_slice_without_multiindex():
|
624 |
+
ser = Series(range(5))
|
625 |
+
key = (slice(3),)
|
626 |
+
|
627 |
+
result = ser[key]
|
628 |
+
expected = ser[key[0]]
|
629 |
+
tm.assert_series_equal(result, expected)
|
630 |
+
|
631 |
+
|
632 |
+
def test_getitem_preserve_name(datetime_series):
|
633 |
+
result = datetime_series[datetime_series > 0]
|
634 |
+
assert result.name == datetime_series.name
|
635 |
+
|
636 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
637 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
638 |
+
result = datetime_series[[0, 2, 4]]
|
639 |
+
assert result.name == datetime_series.name
|
640 |
+
|
641 |
+
result = datetime_series[5:10]
|
642 |
+
assert result.name == datetime_series.name
|
643 |
+
|
644 |
+
|
645 |
+
def test_getitem_with_integer_labels():
|
646 |
+
# integer indexes, be careful
|
647 |
+
ser = Series(
|
648 |
+
np.random.default_rng(2).standard_normal(10), index=list(range(0, 20, 2))
|
649 |
+
)
|
650 |
+
inds = [0, 2, 5, 7, 8]
|
651 |
+
arr_inds = np.array([0, 2, 5, 7, 8])
|
652 |
+
with pytest.raises(KeyError, match="not in index"):
|
653 |
+
ser[inds]
|
654 |
+
|
655 |
+
with pytest.raises(KeyError, match="not in index"):
|
656 |
+
ser[arr_inds]
|
657 |
+
|
658 |
+
|
659 |
+
def test_getitem_missing(datetime_series):
|
660 |
+
# missing
|
661 |
+
d = datetime_series.index[0] - BDay()
|
662 |
+
msg = r"Timestamp\('1999-12-31 00:00:00'\)"
|
663 |
+
with pytest.raises(KeyError, match=msg):
|
664 |
+
datetime_series[d]
|
665 |
+
|
666 |
+
|
667 |
+
def test_getitem_fancy(string_series, object_series):
|
668 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
669 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
670 |
+
slice1 = string_series[[1, 2, 3]]
|
671 |
+
slice2 = object_series[[1, 2, 3]]
|
672 |
+
assert string_series.index[2] == slice1.index[1]
|
673 |
+
assert object_series.index[2] == slice2.index[1]
|
674 |
+
assert string_series.iloc[2] == slice1.iloc[1]
|
675 |
+
assert object_series.iloc[2] == slice2.iloc[1]
|
676 |
+
|
677 |
+
|
678 |
+
def test_getitem_box_float64(datetime_series):
|
679 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
680 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
681 |
+
value = datetime_series[5]
|
682 |
+
assert isinstance(value, np.float64)
|
683 |
+
|
684 |
+
|
685 |
+
def test_getitem_unordered_dup():
|
686 |
+
obj = Series(range(5), index=["c", "a", "a", "b", "b"])
|
687 |
+
assert is_scalar(obj["c"])
|
688 |
+
assert obj["c"] == 0
|
689 |
+
|
690 |
+
|
691 |
+
def test_getitem_dups():
|
692 |
+
ser = Series(range(5), index=["A", "A", "B", "C", "C"], dtype=np.int64)
|
693 |
+
expected = Series([3, 4], index=["C", "C"], dtype=np.int64)
|
694 |
+
result = ser["C"]
|
695 |
+
tm.assert_series_equal(result, expected)
|
696 |
+
|
697 |
+
|
698 |
+
def test_getitem_categorical_str():
|
699 |
+
# GH#31765
|
700 |
+
ser = Series(range(5), index=Categorical(["a", "b", "c", "a", "b"]))
|
701 |
+
result = ser["a"]
|
702 |
+
expected = ser.iloc[[0, 3]]
|
703 |
+
tm.assert_series_equal(result, expected)
|
704 |
+
|
705 |
+
|
706 |
+
def test_slice_can_reorder_not_uniquely_indexed():
|
707 |
+
ser = Series(1, index=["a", "a", "b", "b", "c"])
|
708 |
+
ser[::-1] # it works!
|
709 |
+
|
710 |
+
|
711 |
+
@pytest.mark.parametrize("index_vals", ["aabcd", "aadcb"])
|
712 |
+
def test_duplicated_index_getitem_positional_indexer(index_vals):
|
713 |
+
# GH 11747
|
714 |
+
s = Series(range(5), index=list(index_vals))
|
715 |
+
|
716 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
717 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
718 |
+
result = s[3]
|
719 |
+
assert result == 3
|
720 |
+
|
721 |
+
|
722 |
+
class TestGetitemDeprecatedIndexers:
|
723 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}])
|
724 |
+
def test_getitem_dict_and_set_deprecated(self, key):
|
725 |
+
# GH#42825 enforced in 2.0
|
726 |
+
ser = Series([1, 2, 3])
|
727 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
728 |
+
ser[key]
|
729 |
+
|
730 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}])
|
731 |
+
def test_setitem_dict_and_set_disallowed(self, key):
|
732 |
+
# GH#42825 enforced in 2.0
|
733 |
+
ser = Series([1, 2, 3])
|
734 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
735 |
+
ser[key] = 1
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_indexing.py
ADDED
@@ -0,0 +1,518 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" test get/set & misc """
|
2 |
+
from datetime import timedelta
|
3 |
+
import re
|
4 |
+
|
5 |
+
import numpy as np
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
from pandas.errors import IndexingError
|
9 |
+
|
10 |
+
from pandas import (
|
11 |
+
NA,
|
12 |
+
DataFrame,
|
13 |
+
Index,
|
14 |
+
IndexSlice,
|
15 |
+
MultiIndex,
|
16 |
+
NaT,
|
17 |
+
Series,
|
18 |
+
Timedelta,
|
19 |
+
Timestamp,
|
20 |
+
concat,
|
21 |
+
date_range,
|
22 |
+
isna,
|
23 |
+
period_range,
|
24 |
+
timedelta_range,
|
25 |
+
)
|
26 |
+
import pandas._testing as tm
|
27 |
+
|
28 |
+
|
29 |
+
def test_basic_indexing():
|
30 |
+
s = Series(
|
31 |
+
np.random.default_rng(2).standard_normal(5), index=["a", "b", "a", "a", "b"]
|
32 |
+
)
|
33 |
+
|
34 |
+
warn_msg = "Series.__[sg]etitem__ treating keys as positions is deprecated"
|
35 |
+
msg = "index 5 is out of bounds for axis 0 with size 5"
|
36 |
+
with pytest.raises(IndexError, match=msg):
|
37 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
38 |
+
s[5]
|
39 |
+
with pytest.raises(IndexError, match=msg):
|
40 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
41 |
+
s[5] = 0
|
42 |
+
|
43 |
+
with pytest.raises(KeyError, match=r"^'c'$"):
|
44 |
+
s["c"]
|
45 |
+
|
46 |
+
s = s.sort_index()
|
47 |
+
|
48 |
+
with pytest.raises(IndexError, match=msg):
|
49 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
50 |
+
s[5]
|
51 |
+
msg = r"index 5 is out of bounds for axis (0|1) with size 5|^5$"
|
52 |
+
with pytest.raises(IndexError, match=msg):
|
53 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
54 |
+
s[5] = 0
|
55 |
+
|
56 |
+
|
57 |
+
def test_getitem_numeric_should_not_fallback_to_positional(any_numeric_dtype):
|
58 |
+
# GH51053
|
59 |
+
dtype = any_numeric_dtype
|
60 |
+
idx = Index([1, 0, 1], dtype=dtype)
|
61 |
+
ser = Series(range(3), index=idx)
|
62 |
+
result = ser[1]
|
63 |
+
expected = Series([0, 2], index=Index([1, 1], dtype=dtype))
|
64 |
+
tm.assert_series_equal(result, expected, check_exact=True)
|
65 |
+
|
66 |
+
|
67 |
+
def test_setitem_numeric_should_not_fallback_to_positional(any_numeric_dtype):
|
68 |
+
# GH51053
|
69 |
+
dtype = any_numeric_dtype
|
70 |
+
idx = Index([1, 0, 1], dtype=dtype)
|
71 |
+
ser = Series(range(3), index=idx)
|
72 |
+
ser[1] = 10
|
73 |
+
expected = Series([10, 1, 10], index=idx)
|
74 |
+
tm.assert_series_equal(ser, expected, check_exact=True)
|
75 |
+
|
76 |
+
|
77 |
+
def test_basic_getitem_with_labels(datetime_series):
|
78 |
+
indices = datetime_series.index[[5, 10, 15]]
|
79 |
+
|
80 |
+
result = datetime_series[indices]
|
81 |
+
expected = datetime_series.reindex(indices)
|
82 |
+
tm.assert_series_equal(result, expected)
|
83 |
+
|
84 |
+
result = datetime_series[indices[0] : indices[2]]
|
85 |
+
expected = datetime_series.loc[indices[0] : indices[2]]
|
86 |
+
tm.assert_series_equal(result, expected)
|
87 |
+
|
88 |
+
|
89 |
+
def test_basic_getitem_dt64tz_values():
|
90 |
+
# GH12089
|
91 |
+
# with tz for values
|
92 |
+
ser = Series(
|
93 |
+
date_range("2011-01-01", periods=3, tz="US/Eastern"), index=["a", "b", "c"]
|
94 |
+
)
|
95 |
+
expected = Timestamp("2011-01-01", tz="US/Eastern")
|
96 |
+
result = ser.loc["a"]
|
97 |
+
assert result == expected
|
98 |
+
result = ser.iloc[0]
|
99 |
+
assert result == expected
|
100 |
+
result = ser["a"]
|
101 |
+
assert result == expected
|
102 |
+
|
103 |
+
|
104 |
+
def test_getitem_setitem_ellipsis(using_copy_on_write, warn_copy_on_write):
|
105 |
+
s = Series(np.random.default_rng(2).standard_normal(10))
|
106 |
+
|
107 |
+
result = s[...]
|
108 |
+
tm.assert_series_equal(result, s)
|
109 |
+
|
110 |
+
with tm.assert_cow_warning(warn_copy_on_write):
|
111 |
+
s[...] = 5
|
112 |
+
if not using_copy_on_write:
|
113 |
+
assert (result == 5).all()
|
114 |
+
|
115 |
+
|
116 |
+
@pytest.mark.parametrize(
|
117 |
+
"result_1, duplicate_item, expected_1",
|
118 |
+
[
|
119 |
+
[
|
120 |
+
Series({1: 12, 2: [1, 2, 2, 3]}),
|
121 |
+
Series({1: 313}),
|
122 |
+
Series({1: 12}, dtype=object),
|
123 |
+
],
|
124 |
+
[
|
125 |
+
Series({1: [1, 2, 3], 2: [1, 2, 2, 3]}),
|
126 |
+
Series({1: [1, 2, 3]}),
|
127 |
+
Series({1: [1, 2, 3]}),
|
128 |
+
],
|
129 |
+
],
|
130 |
+
)
|
131 |
+
def test_getitem_with_duplicates_indices(result_1, duplicate_item, expected_1):
|
132 |
+
# GH 17610
|
133 |
+
result = result_1._append(duplicate_item)
|
134 |
+
expected = expected_1._append(duplicate_item)
|
135 |
+
tm.assert_series_equal(result[1], expected)
|
136 |
+
assert result[2] == result_1[2]
|
137 |
+
|
138 |
+
|
139 |
+
def test_getitem_setitem_integers():
|
140 |
+
# caused bug without test
|
141 |
+
s = Series([1, 2, 3], ["a", "b", "c"])
|
142 |
+
|
143 |
+
assert s.iloc[0] == s["a"]
|
144 |
+
s.iloc[0] = 5
|
145 |
+
tm.assert_almost_equal(s["a"], 5)
|
146 |
+
|
147 |
+
|
148 |
+
def test_series_box_timestamp():
|
149 |
+
rng = date_range("20090415", "20090519", freq="B")
|
150 |
+
ser = Series(rng)
|
151 |
+
assert isinstance(ser[0], Timestamp)
|
152 |
+
assert isinstance(ser.at[1], Timestamp)
|
153 |
+
assert isinstance(ser.iat[2], Timestamp)
|
154 |
+
assert isinstance(ser.loc[3], Timestamp)
|
155 |
+
assert isinstance(ser.iloc[4], Timestamp)
|
156 |
+
|
157 |
+
ser = Series(rng, index=rng)
|
158 |
+
msg = "Series.__getitem__ treating keys as positions is deprecated"
|
159 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
160 |
+
assert isinstance(ser[0], Timestamp)
|
161 |
+
assert isinstance(ser.at[rng[1]], Timestamp)
|
162 |
+
assert isinstance(ser.iat[2], Timestamp)
|
163 |
+
assert isinstance(ser.loc[rng[3]], Timestamp)
|
164 |
+
assert isinstance(ser.iloc[4], Timestamp)
|
165 |
+
|
166 |
+
|
167 |
+
def test_series_box_timedelta():
|
168 |
+
rng = timedelta_range("1 day 1 s", periods=5, freq="h")
|
169 |
+
ser = Series(rng)
|
170 |
+
assert isinstance(ser[0], Timedelta)
|
171 |
+
assert isinstance(ser.at[1], Timedelta)
|
172 |
+
assert isinstance(ser.iat[2], Timedelta)
|
173 |
+
assert isinstance(ser.loc[3], Timedelta)
|
174 |
+
assert isinstance(ser.iloc[4], Timedelta)
|
175 |
+
|
176 |
+
|
177 |
+
def test_getitem_ambiguous_keyerror(indexer_sl):
|
178 |
+
ser = Series(range(10), index=list(range(0, 20, 2)))
|
179 |
+
with pytest.raises(KeyError, match=r"^1$"):
|
180 |
+
indexer_sl(ser)[1]
|
181 |
+
|
182 |
+
|
183 |
+
def test_getitem_dups_with_missing(indexer_sl):
|
184 |
+
# breaks reindex, so need to use .loc internally
|
185 |
+
# GH 4246
|
186 |
+
ser = Series([1, 2, 3, 4], ["foo", "bar", "foo", "bah"])
|
187 |
+
with pytest.raises(KeyError, match=re.escape("['bam'] not in index")):
|
188 |
+
indexer_sl(ser)[["foo", "bar", "bah", "bam"]]
|
189 |
+
|
190 |
+
|
191 |
+
def test_setitem_ambiguous_keyerror(indexer_sl):
|
192 |
+
s = Series(range(10), index=list(range(0, 20, 2)))
|
193 |
+
|
194 |
+
# equivalent of an append
|
195 |
+
s2 = s.copy()
|
196 |
+
indexer_sl(s2)[1] = 5
|
197 |
+
expected = concat([s, Series([5], index=[1])])
|
198 |
+
tm.assert_series_equal(s2, expected)
|
199 |
+
|
200 |
+
|
201 |
+
def test_setitem(datetime_series):
|
202 |
+
datetime_series[datetime_series.index[5]] = np.nan
|
203 |
+
datetime_series.iloc[[1, 2, 17]] = np.nan
|
204 |
+
datetime_series.iloc[6] = np.nan
|
205 |
+
assert np.isnan(datetime_series.iloc[6])
|
206 |
+
assert np.isnan(datetime_series.iloc[2])
|
207 |
+
datetime_series[np.isnan(datetime_series)] = 5
|
208 |
+
assert not np.isnan(datetime_series.iloc[2])
|
209 |
+
|
210 |
+
|
211 |
+
def test_setslice(datetime_series):
|
212 |
+
sl = datetime_series[5:20]
|
213 |
+
assert len(sl) == len(sl.index)
|
214 |
+
assert sl.index.is_unique is True
|
215 |
+
|
216 |
+
|
217 |
+
def test_basic_getitem_setitem_corner(datetime_series):
|
218 |
+
# invalid tuples, e.g. td.ts[:, None] vs. td.ts[:, 2]
|
219 |
+
msg = "key of type tuple not found and not a MultiIndex"
|
220 |
+
with pytest.raises(KeyError, match=msg):
|
221 |
+
datetime_series[:, 2]
|
222 |
+
with pytest.raises(KeyError, match=msg):
|
223 |
+
datetime_series[:, 2] = 2
|
224 |
+
|
225 |
+
# weird lists. [slice(0, 5)] raises but not two slices
|
226 |
+
msg = "Indexing with a single-item list"
|
227 |
+
with pytest.raises(ValueError, match=msg):
|
228 |
+
# GH#31299
|
229 |
+
datetime_series[[slice(None, 5)]]
|
230 |
+
|
231 |
+
# but we're OK with a single-element tuple
|
232 |
+
result = datetime_series[(slice(None, 5),)]
|
233 |
+
expected = datetime_series[:5]
|
234 |
+
tm.assert_series_equal(result, expected)
|
235 |
+
|
236 |
+
# OK
|
237 |
+
msg = r"unhashable type(: 'slice')?"
|
238 |
+
with pytest.raises(TypeError, match=msg):
|
239 |
+
datetime_series[[5, [None, None]]]
|
240 |
+
with pytest.raises(TypeError, match=msg):
|
241 |
+
datetime_series[[5, [None, None]]] = 2
|
242 |
+
|
243 |
+
|
244 |
+
def test_slice(string_series, object_series, using_copy_on_write, warn_copy_on_write):
|
245 |
+
original = string_series.copy()
|
246 |
+
numSlice = string_series[10:20]
|
247 |
+
numSliceEnd = string_series[-10:]
|
248 |
+
objSlice = object_series[10:20]
|
249 |
+
|
250 |
+
assert string_series.index[9] not in numSlice.index
|
251 |
+
assert object_series.index[9] not in objSlice.index
|
252 |
+
|
253 |
+
assert len(numSlice) == len(numSlice.index)
|
254 |
+
assert string_series[numSlice.index[0]] == numSlice[numSlice.index[0]]
|
255 |
+
|
256 |
+
assert numSlice.index[1] == string_series.index[11]
|
257 |
+
tm.assert_numpy_array_equal(np.array(numSliceEnd), np.array(string_series)[-10:])
|
258 |
+
|
259 |
+
# Test return view.
|
260 |
+
sl = string_series[10:20]
|
261 |
+
with tm.assert_cow_warning(warn_copy_on_write):
|
262 |
+
sl[:] = 0
|
263 |
+
|
264 |
+
if using_copy_on_write:
|
265 |
+
# Doesn't modify parent (CoW)
|
266 |
+
tm.assert_series_equal(string_series, original)
|
267 |
+
else:
|
268 |
+
assert (string_series[10:20] == 0).all()
|
269 |
+
|
270 |
+
|
271 |
+
def test_timedelta_assignment():
|
272 |
+
# GH 8209
|
273 |
+
s = Series([], dtype=object)
|
274 |
+
s.loc["B"] = timedelta(1)
|
275 |
+
tm.assert_series_equal(s, Series(Timedelta("1 days"), index=["B"]))
|
276 |
+
|
277 |
+
s = s.reindex(s.index.insert(0, "A"))
|
278 |
+
tm.assert_series_equal(s, Series([np.nan, Timedelta("1 days")], index=["A", "B"]))
|
279 |
+
|
280 |
+
s.loc["A"] = timedelta(1)
|
281 |
+
expected = Series(Timedelta("1 days"), index=["A", "B"])
|
282 |
+
tm.assert_series_equal(s, expected)
|
283 |
+
|
284 |
+
|
285 |
+
def test_underlying_data_conversion(using_copy_on_write):
|
286 |
+
# GH 4080
|
287 |
+
df = DataFrame({c: [1, 2, 3] for c in ["a", "b", "c"]})
|
288 |
+
return_value = df.set_index(["a", "b", "c"], inplace=True)
|
289 |
+
assert return_value is None
|
290 |
+
s = Series([1], index=[(2, 2, 2)])
|
291 |
+
df["val"] = 0
|
292 |
+
df_original = df.copy()
|
293 |
+
df
|
294 |
+
|
295 |
+
if using_copy_on_write:
|
296 |
+
with tm.raises_chained_assignment_error():
|
297 |
+
df["val"].update(s)
|
298 |
+
expected = df_original
|
299 |
+
else:
|
300 |
+
with tm.assert_produces_warning(FutureWarning, match="inplace method"):
|
301 |
+
df["val"].update(s)
|
302 |
+
expected = DataFrame(
|
303 |
+
{"a": [1, 2, 3], "b": [1, 2, 3], "c": [1, 2, 3], "val": [0, 1, 0]}
|
304 |
+
)
|
305 |
+
return_value = expected.set_index(["a", "b", "c"], inplace=True)
|
306 |
+
assert return_value is None
|
307 |
+
tm.assert_frame_equal(df, expected)
|
308 |
+
|
309 |
+
|
310 |
+
def test_preserve_refs(datetime_series):
|
311 |
+
seq = datetime_series.iloc[[5, 10, 15]]
|
312 |
+
seq.iloc[1] = np.nan
|
313 |
+
assert not np.isnan(datetime_series.iloc[10])
|
314 |
+
|
315 |
+
|
316 |
+
def test_multilevel_preserve_name(lexsorted_two_level_string_multiindex, indexer_sl):
|
317 |
+
index = lexsorted_two_level_string_multiindex
|
318 |
+
ser = Series(
|
319 |
+
np.random.default_rng(2).standard_normal(len(index)), index=index, name="sth"
|
320 |
+
)
|
321 |
+
|
322 |
+
result = indexer_sl(ser)["foo"]
|
323 |
+
assert result.name == ser.name
|
324 |
+
|
325 |
+
|
326 |
+
# miscellaneous methods
|
327 |
+
|
328 |
+
|
329 |
+
@pytest.mark.parametrize(
|
330 |
+
"index",
|
331 |
+
[
|
332 |
+
date_range("2014-01-01", periods=20, freq="MS"),
|
333 |
+
period_range("2014-01", periods=20, freq="M"),
|
334 |
+
timedelta_range("0", periods=20, freq="h"),
|
335 |
+
],
|
336 |
+
)
|
337 |
+
def test_slice_with_negative_step(index):
|
338 |
+
keystr1 = str(index[9])
|
339 |
+
keystr2 = str(index[13])
|
340 |
+
|
341 |
+
ser = Series(np.arange(20), index)
|
342 |
+
SLC = IndexSlice
|
343 |
+
|
344 |
+
for key in [keystr1, index[9]]:
|
345 |
+
tm.assert_indexing_slices_equivalent(ser, SLC[key::-1], SLC[9::-1])
|
346 |
+
tm.assert_indexing_slices_equivalent(ser, SLC[:key:-1], SLC[:8:-1])
|
347 |
+
|
348 |
+
for key2 in [keystr2, index[13]]:
|
349 |
+
tm.assert_indexing_slices_equivalent(ser, SLC[key2:key:-1], SLC[13:8:-1])
|
350 |
+
tm.assert_indexing_slices_equivalent(ser, SLC[key:key2:-1], SLC[0:0:-1])
|
351 |
+
|
352 |
+
|
353 |
+
def test_tuple_index():
|
354 |
+
# GH 35534 - Selecting values when a Series has an Index of tuples
|
355 |
+
s = Series([1, 2], index=[("a",), ("b",)])
|
356 |
+
assert s[("a",)] == 1
|
357 |
+
assert s[("b",)] == 2
|
358 |
+
s[("b",)] = 3
|
359 |
+
assert s[("b",)] == 3
|
360 |
+
|
361 |
+
|
362 |
+
def test_frozenset_index():
|
363 |
+
# GH35747 - Selecting values when a Series has an Index of frozenset
|
364 |
+
idx0, idx1 = frozenset("a"), frozenset("b")
|
365 |
+
s = Series([1, 2], index=[idx0, idx1])
|
366 |
+
assert s[idx0] == 1
|
367 |
+
assert s[idx1] == 2
|
368 |
+
s[idx1] = 3
|
369 |
+
assert s[idx1] == 3
|
370 |
+
|
371 |
+
|
372 |
+
def test_loc_setitem_all_false_indexer():
|
373 |
+
# GH#45778
|
374 |
+
ser = Series([1, 2], index=["a", "b"])
|
375 |
+
expected = ser.copy()
|
376 |
+
rhs = Series([6, 7], index=["a", "b"])
|
377 |
+
ser.loc[ser > 100] = rhs
|
378 |
+
tm.assert_series_equal(ser, expected)
|
379 |
+
|
380 |
+
|
381 |
+
def test_loc_boolean_indexer_non_matching_index():
|
382 |
+
# GH#46551
|
383 |
+
ser = Series([1])
|
384 |
+
result = ser.loc[Series([NA, False], dtype="boolean")]
|
385 |
+
expected = Series([], dtype="int64")
|
386 |
+
tm.assert_series_equal(result, expected)
|
387 |
+
|
388 |
+
|
389 |
+
def test_loc_boolean_indexer_miss_matching_index():
|
390 |
+
# GH#46551
|
391 |
+
ser = Series([1])
|
392 |
+
indexer = Series([NA, False], dtype="boolean", index=[1, 2])
|
393 |
+
with pytest.raises(IndexingError, match="Unalignable"):
|
394 |
+
ser.loc[indexer]
|
395 |
+
|
396 |
+
|
397 |
+
def test_loc_setitem_nested_data_enlargement():
|
398 |
+
# GH#48614
|
399 |
+
df = DataFrame({"a": [1]})
|
400 |
+
ser = Series({"label": df})
|
401 |
+
ser.loc["new_label"] = df
|
402 |
+
expected = Series({"label": df, "new_label": df})
|
403 |
+
tm.assert_series_equal(ser, expected)
|
404 |
+
|
405 |
+
|
406 |
+
def test_loc_ea_numeric_index_oob_slice_end():
|
407 |
+
# GH#50161
|
408 |
+
ser = Series(1, index=Index([0, 1, 2], dtype="Int64"))
|
409 |
+
result = ser.loc[2:3]
|
410 |
+
expected = Series(1, index=Index([2], dtype="Int64"))
|
411 |
+
tm.assert_series_equal(result, expected)
|
412 |
+
|
413 |
+
|
414 |
+
def test_getitem_bool_int_key():
|
415 |
+
# GH#48653
|
416 |
+
ser = Series({True: 1, False: 0})
|
417 |
+
with pytest.raises(KeyError, match="0"):
|
418 |
+
ser.loc[0]
|
419 |
+
|
420 |
+
|
421 |
+
@pytest.mark.parametrize("val", [{}, {"b": "x"}])
|
422 |
+
@pytest.mark.parametrize("indexer", [[], [False, False], slice(0, -1), np.array([])])
|
423 |
+
def test_setitem_empty_indexer(indexer, val):
|
424 |
+
# GH#45981
|
425 |
+
df = DataFrame({"a": [1, 2], **val})
|
426 |
+
expected = df.copy()
|
427 |
+
df.loc[indexer] = 1.5
|
428 |
+
tm.assert_frame_equal(df, expected)
|
429 |
+
|
430 |
+
|
431 |
+
class TestDeprecatedIndexers:
|
432 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}])
|
433 |
+
def test_getitem_dict_and_set_deprecated(self, key):
|
434 |
+
# GH#42825 enforced in 2.0
|
435 |
+
ser = Series([1, 2])
|
436 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
437 |
+
ser.loc[key]
|
438 |
+
|
439 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}, ({1}, 2), ({1: 1}, 2)])
|
440 |
+
def test_getitem_dict_and_set_deprecated_multiindex(self, key):
|
441 |
+
# GH#42825 enforced in 2.0
|
442 |
+
ser = Series([1, 2], index=MultiIndex.from_tuples([(1, 2), (3, 4)]))
|
443 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
444 |
+
ser.loc[key]
|
445 |
+
|
446 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}])
|
447 |
+
def test_setitem_dict_and_set_disallowed(self, key):
|
448 |
+
# GH#42825 enforced in 2.0
|
449 |
+
ser = Series([1, 2])
|
450 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
451 |
+
ser.loc[key] = 1
|
452 |
+
|
453 |
+
@pytest.mark.parametrize("key", [{1}, {1: 1}, ({1}, 2), ({1: 1}, 2)])
|
454 |
+
def test_setitem_dict_and_set_disallowed_multiindex(self, key):
|
455 |
+
# GH#42825 enforced in 2.0
|
456 |
+
ser = Series([1, 2], index=MultiIndex.from_tuples([(1, 2), (3, 4)]))
|
457 |
+
with pytest.raises(TypeError, match="as an indexer is not supported"):
|
458 |
+
ser.loc[key] = 1
|
459 |
+
|
460 |
+
|
461 |
+
class TestSetitemValidation:
|
462 |
+
# This is adapted from pandas/tests/arrays/masked/test_indexing.py
|
463 |
+
# but checks for warnings instead of errors.
|
464 |
+
def _check_setitem_invalid(self, ser, invalid, indexer, warn):
|
465 |
+
msg = "Setting an item of incompatible dtype is deprecated"
|
466 |
+
msg = re.escape(msg)
|
467 |
+
|
468 |
+
orig_ser = ser.copy()
|
469 |
+
|
470 |
+
with tm.assert_produces_warning(warn, match=msg):
|
471 |
+
ser[indexer] = invalid
|
472 |
+
ser = orig_ser.copy()
|
473 |
+
|
474 |
+
with tm.assert_produces_warning(warn, match=msg):
|
475 |
+
ser.iloc[indexer] = invalid
|
476 |
+
ser = orig_ser.copy()
|
477 |
+
|
478 |
+
with tm.assert_produces_warning(warn, match=msg):
|
479 |
+
ser.loc[indexer] = invalid
|
480 |
+
ser = orig_ser.copy()
|
481 |
+
|
482 |
+
with tm.assert_produces_warning(warn, match=msg):
|
483 |
+
ser[:] = invalid
|
484 |
+
|
485 |
+
_invalid_scalars = [
|
486 |
+
1 + 2j,
|
487 |
+
"True",
|
488 |
+
"1",
|
489 |
+
"1.0",
|
490 |
+
NaT,
|
491 |
+
np.datetime64("NaT"),
|
492 |
+
np.timedelta64("NaT"),
|
493 |
+
]
|
494 |
+
_indexers = [0, [0], slice(0, 1), [True, False, False], slice(None, None, None)]
|
495 |
+
|
496 |
+
@pytest.mark.parametrize(
|
497 |
+
"invalid", _invalid_scalars + [1, 1.0, np.int64(1), np.float64(1)]
|
498 |
+
)
|
499 |
+
@pytest.mark.parametrize("indexer", _indexers)
|
500 |
+
def test_setitem_validation_scalar_bool(self, invalid, indexer):
|
501 |
+
ser = Series([True, False, False], dtype="bool")
|
502 |
+
self._check_setitem_invalid(ser, invalid, indexer, FutureWarning)
|
503 |
+
|
504 |
+
@pytest.mark.parametrize("invalid", _invalid_scalars + [True, 1.5, np.float64(1.5)])
|
505 |
+
@pytest.mark.parametrize("indexer", _indexers)
|
506 |
+
def test_setitem_validation_scalar_int(self, invalid, any_int_numpy_dtype, indexer):
|
507 |
+
ser = Series([1, 2, 3], dtype=any_int_numpy_dtype)
|
508 |
+
if isna(invalid) and invalid is not NaT and not np.isnat(invalid):
|
509 |
+
warn = None
|
510 |
+
else:
|
511 |
+
warn = FutureWarning
|
512 |
+
self._check_setitem_invalid(ser, invalid, indexer, warn)
|
513 |
+
|
514 |
+
@pytest.mark.parametrize("invalid", _invalid_scalars + [True])
|
515 |
+
@pytest.mark.parametrize("indexer", _indexers)
|
516 |
+
def test_setitem_validation_scalar_float(self, invalid, float_numpy_dtype, indexer):
|
517 |
+
ser = Series([1, 2, None], dtype=float_numpy_dtype)
|
518 |
+
self._check_setitem_invalid(ser, invalid, indexer, FutureWarning)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_setitem.py
ADDED
@@ -0,0 +1,1847 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import (
|
2 |
+
date,
|
3 |
+
datetime,
|
4 |
+
)
|
5 |
+
from decimal import Decimal
|
6 |
+
|
7 |
+
import numpy as np
|
8 |
+
import pytest
|
9 |
+
|
10 |
+
from pandas.compat.numpy import np_version_gte1p24
|
11 |
+
from pandas.errors import IndexingError
|
12 |
+
|
13 |
+
from pandas.core.dtypes.common import is_list_like
|
14 |
+
|
15 |
+
from pandas import (
|
16 |
+
NA,
|
17 |
+
Categorical,
|
18 |
+
DataFrame,
|
19 |
+
DatetimeIndex,
|
20 |
+
Index,
|
21 |
+
Interval,
|
22 |
+
IntervalIndex,
|
23 |
+
MultiIndex,
|
24 |
+
NaT,
|
25 |
+
Period,
|
26 |
+
Series,
|
27 |
+
Timedelta,
|
28 |
+
Timestamp,
|
29 |
+
array,
|
30 |
+
concat,
|
31 |
+
date_range,
|
32 |
+
interval_range,
|
33 |
+
period_range,
|
34 |
+
timedelta_range,
|
35 |
+
)
|
36 |
+
import pandas._testing as tm
|
37 |
+
|
38 |
+
from pandas.tseries.offsets import BDay
|
39 |
+
|
40 |
+
|
41 |
+
class TestSetitemDT64Values:
|
42 |
+
def test_setitem_none_nan(self):
|
43 |
+
series = Series(date_range("1/1/2000", periods=10))
|
44 |
+
series[3] = None
|
45 |
+
assert series[3] is NaT
|
46 |
+
|
47 |
+
series[3:5] = None
|
48 |
+
assert series[4] is NaT
|
49 |
+
|
50 |
+
series[5] = np.nan
|
51 |
+
assert series[5] is NaT
|
52 |
+
|
53 |
+
series[5:7] = np.nan
|
54 |
+
assert series[6] is NaT
|
55 |
+
|
56 |
+
def test_setitem_multiindex_empty_slice(self):
|
57 |
+
# https://github.com/pandas-dev/pandas/issues/35878
|
58 |
+
idx = MultiIndex.from_tuples([("a", 1), ("b", 2)])
|
59 |
+
result = Series([1, 2], index=idx)
|
60 |
+
expected = result.copy()
|
61 |
+
result.loc[[]] = 0
|
62 |
+
tm.assert_series_equal(result, expected)
|
63 |
+
|
64 |
+
def test_setitem_with_string_index(self):
|
65 |
+
# GH#23451
|
66 |
+
# Set object dtype to avoid upcast when setting date.today()
|
67 |
+
ser = Series([1, 2, 3], index=["Date", "b", "other"], dtype=object)
|
68 |
+
ser["Date"] = date.today()
|
69 |
+
assert ser.Date == date.today()
|
70 |
+
assert ser["Date"] == date.today()
|
71 |
+
|
72 |
+
def test_setitem_tuple_with_datetimetz_values(self):
|
73 |
+
# GH#20441
|
74 |
+
arr = date_range("2017", periods=4, tz="US/Eastern")
|
75 |
+
index = [(0, 1), (0, 2), (0, 3), (0, 4)]
|
76 |
+
result = Series(arr, index=index)
|
77 |
+
expected = result.copy()
|
78 |
+
result[(0, 1)] = np.nan
|
79 |
+
expected.iloc[0] = np.nan
|
80 |
+
tm.assert_series_equal(result, expected)
|
81 |
+
|
82 |
+
@pytest.mark.parametrize("tz", ["US/Eastern", "UTC", "Asia/Tokyo"])
|
83 |
+
def test_setitem_with_tz(self, tz, indexer_sli):
|
84 |
+
orig = Series(date_range("2016-01-01", freq="h", periods=3, tz=tz))
|
85 |
+
assert orig.dtype == f"datetime64[ns, {tz}]"
|
86 |
+
|
87 |
+
exp = Series(
|
88 |
+
[
|
89 |
+
Timestamp("2016-01-01 00:00", tz=tz),
|
90 |
+
Timestamp("2011-01-01 00:00", tz=tz),
|
91 |
+
Timestamp("2016-01-01 02:00", tz=tz),
|
92 |
+
],
|
93 |
+
dtype=orig.dtype,
|
94 |
+
)
|
95 |
+
|
96 |
+
# scalar
|
97 |
+
ser = orig.copy()
|
98 |
+
indexer_sli(ser)[1] = Timestamp("2011-01-01", tz=tz)
|
99 |
+
tm.assert_series_equal(ser, exp)
|
100 |
+
|
101 |
+
# vector
|
102 |
+
vals = Series(
|
103 |
+
[Timestamp("2011-01-01", tz=tz), Timestamp("2012-01-01", tz=tz)],
|
104 |
+
index=[1, 2],
|
105 |
+
dtype=orig.dtype,
|
106 |
+
)
|
107 |
+
assert vals.dtype == f"datetime64[ns, {tz}]"
|
108 |
+
|
109 |
+
exp = Series(
|
110 |
+
[
|
111 |
+
Timestamp("2016-01-01 00:00", tz=tz),
|
112 |
+
Timestamp("2011-01-01 00:00", tz=tz),
|
113 |
+
Timestamp("2012-01-01 00:00", tz=tz),
|
114 |
+
],
|
115 |
+
dtype=orig.dtype,
|
116 |
+
)
|
117 |
+
|
118 |
+
ser = orig.copy()
|
119 |
+
indexer_sli(ser)[[1, 2]] = vals
|
120 |
+
tm.assert_series_equal(ser, exp)
|
121 |
+
|
122 |
+
def test_setitem_with_tz_dst(self, indexer_sli):
|
123 |
+
# GH#14146 trouble setting values near DST boundary
|
124 |
+
tz = "US/Eastern"
|
125 |
+
orig = Series(date_range("2016-11-06", freq="h", periods=3, tz=tz))
|
126 |
+
assert orig.dtype == f"datetime64[ns, {tz}]"
|
127 |
+
|
128 |
+
exp = Series(
|
129 |
+
[
|
130 |
+
Timestamp("2016-11-06 00:00-04:00", tz=tz),
|
131 |
+
Timestamp("2011-01-01 00:00-05:00", tz=tz),
|
132 |
+
Timestamp("2016-11-06 01:00-05:00", tz=tz),
|
133 |
+
],
|
134 |
+
dtype=orig.dtype,
|
135 |
+
)
|
136 |
+
|
137 |
+
# scalar
|
138 |
+
ser = orig.copy()
|
139 |
+
indexer_sli(ser)[1] = Timestamp("2011-01-01", tz=tz)
|
140 |
+
tm.assert_series_equal(ser, exp)
|
141 |
+
|
142 |
+
# vector
|
143 |
+
vals = Series(
|
144 |
+
[Timestamp("2011-01-01", tz=tz), Timestamp("2012-01-01", tz=tz)],
|
145 |
+
index=[1, 2],
|
146 |
+
dtype=orig.dtype,
|
147 |
+
)
|
148 |
+
assert vals.dtype == f"datetime64[ns, {tz}]"
|
149 |
+
|
150 |
+
exp = Series(
|
151 |
+
[
|
152 |
+
Timestamp("2016-11-06 00:00", tz=tz),
|
153 |
+
Timestamp("2011-01-01 00:00", tz=tz),
|
154 |
+
Timestamp("2012-01-01 00:00", tz=tz),
|
155 |
+
],
|
156 |
+
dtype=orig.dtype,
|
157 |
+
)
|
158 |
+
|
159 |
+
ser = orig.copy()
|
160 |
+
indexer_sli(ser)[[1, 2]] = vals
|
161 |
+
tm.assert_series_equal(ser, exp)
|
162 |
+
|
163 |
+
def test_object_series_setitem_dt64array_exact_match(self):
|
164 |
+
# make sure the dt64 isn't cast by numpy to integers
|
165 |
+
# https://github.com/numpy/numpy/issues/12550
|
166 |
+
|
167 |
+
ser = Series({"X": np.nan}, dtype=object)
|
168 |
+
|
169 |
+
indexer = [True]
|
170 |
+
|
171 |
+
# "exact_match" -> size of array being set matches size of ser
|
172 |
+
value = np.array([4], dtype="M8[ns]")
|
173 |
+
|
174 |
+
ser.iloc[indexer] = value
|
175 |
+
|
176 |
+
expected = Series([value[0]], index=["X"], dtype=object)
|
177 |
+
assert all(isinstance(x, np.datetime64) for x in expected.values)
|
178 |
+
|
179 |
+
tm.assert_series_equal(ser, expected)
|
180 |
+
|
181 |
+
|
182 |
+
class TestSetitemScalarIndexer:
|
183 |
+
def test_setitem_negative_out_of_bounds(self):
|
184 |
+
ser = Series(["a"] * 10, index=["a"] * 10)
|
185 |
+
|
186 |
+
# string index falls back to positional
|
187 |
+
msg = "index -11|-1 is out of bounds for axis 0 with size 10"
|
188 |
+
warn_msg = "Series.__setitem__ treating keys as positions is deprecated"
|
189 |
+
with pytest.raises(IndexError, match=msg):
|
190 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
191 |
+
ser[-11] = "foo"
|
192 |
+
|
193 |
+
@pytest.mark.parametrize("indexer", [tm.loc, tm.at])
|
194 |
+
@pytest.mark.parametrize("ser_index", [0, 1])
|
195 |
+
def test_setitem_series_object_dtype(self, indexer, ser_index):
|
196 |
+
# GH#38303
|
197 |
+
ser = Series([0, 0], dtype="object")
|
198 |
+
idxr = indexer(ser)
|
199 |
+
idxr[0] = Series([42], index=[ser_index])
|
200 |
+
expected = Series([Series([42], index=[ser_index]), 0], dtype="object")
|
201 |
+
tm.assert_series_equal(ser, expected)
|
202 |
+
|
203 |
+
@pytest.mark.parametrize("index, exp_value", [(0, 42), (1, np.nan)])
|
204 |
+
def test_setitem_series(self, index, exp_value):
|
205 |
+
# GH#38303
|
206 |
+
ser = Series([0, 0])
|
207 |
+
ser.loc[0] = Series([42], index=[index])
|
208 |
+
expected = Series([exp_value, 0])
|
209 |
+
tm.assert_series_equal(ser, expected)
|
210 |
+
|
211 |
+
|
212 |
+
class TestSetitemSlices:
|
213 |
+
def test_setitem_slice_float_raises(self, datetime_series):
|
214 |
+
msg = (
|
215 |
+
"cannot do slice indexing on DatetimeIndex with these indexers "
|
216 |
+
r"\[{key}\] of type float"
|
217 |
+
)
|
218 |
+
with pytest.raises(TypeError, match=msg.format(key=r"4\.0")):
|
219 |
+
datetime_series[4.0:10.0] = 0
|
220 |
+
|
221 |
+
with pytest.raises(TypeError, match=msg.format(key=r"4\.5")):
|
222 |
+
datetime_series[4.5:10.0] = 0
|
223 |
+
|
224 |
+
def test_setitem_slice(self):
|
225 |
+
ser = Series(range(10), index=list(range(10)))
|
226 |
+
ser[-12:] = 0
|
227 |
+
assert (ser == 0).all()
|
228 |
+
|
229 |
+
ser[:-12] = 5
|
230 |
+
assert (ser == 0).all()
|
231 |
+
|
232 |
+
def test_setitem_slice_integers(self):
|
233 |
+
ser = Series(
|
234 |
+
np.random.default_rng(2).standard_normal(8),
|
235 |
+
index=[2, 4, 6, 8, 10, 12, 14, 16],
|
236 |
+
)
|
237 |
+
|
238 |
+
ser[:4] = 0
|
239 |
+
assert (ser[:4] == 0).all()
|
240 |
+
assert not (ser[4:] == 0).any()
|
241 |
+
|
242 |
+
def test_setitem_slicestep(self):
|
243 |
+
# caught this bug when writing tests
|
244 |
+
series = Series(
|
245 |
+
np.arange(20, dtype=np.float64), index=np.arange(20, dtype=np.int64)
|
246 |
+
)
|
247 |
+
|
248 |
+
series[::2] = 0
|
249 |
+
assert (series[::2] == 0).all()
|
250 |
+
|
251 |
+
def test_setitem_multiindex_slice(self, indexer_sli):
|
252 |
+
# GH 8856
|
253 |
+
mi = MultiIndex.from_product(([0, 1], list("abcde")))
|
254 |
+
result = Series(np.arange(10, dtype=np.int64), mi)
|
255 |
+
indexer_sli(result)[::4] = 100
|
256 |
+
expected = Series([100, 1, 2, 3, 100, 5, 6, 7, 100, 9], mi)
|
257 |
+
tm.assert_series_equal(result, expected)
|
258 |
+
|
259 |
+
|
260 |
+
class TestSetitemBooleanMask:
|
261 |
+
def test_setitem_mask_cast(self):
|
262 |
+
# GH#2746
|
263 |
+
# need to upcast
|
264 |
+
ser = Series([1, 2], index=[1, 2], dtype="int64")
|
265 |
+
ser[[True, False]] = Series([0], index=[1], dtype="int64")
|
266 |
+
expected = Series([0, 2], index=[1, 2], dtype="int64")
|
267 |
+
|
268 |
+
tm.assert_series_equal(ser, expected)
|
269 |
+
|
270 |
+
def test_setitem_mask_align_and_promote(self):
|
271 |
+
# GH#8387: test that changing types does not break alignment
|
272 |
+
ts = Series(
|
273 |
+
np.random.default_rng(2).standard_normal(100), index=np.arange(100, 0, -1)
|
274 |
+
).round(5)
|
275 |
+
mask = ts > 0
|
276 |
+
left = ts.copy()
|
277 |
+
right = ts[mask].copy().map(str)
|
278 |
+
with tm.assert_produces_warning(
|
279 |
+
FutureWarning, match="item of incompatible dtype"
|
280 |
+
):
|
281 |
+
left[mask] = right
|
282 |
+
expected = ts.map(lambda t: str(t) if t > 0 else t)
|
283 |
+
tm.assert_series_equal(left, expected)
|
284 |
+
|
285 |
+
def test_setitem_mask_promote_strs(self):
|
286 |
+
ser = Series([0, 1, 2, 0])
|
287 |
+
mask = ser > 0
|
288 |
+
ser2 = ser[mask].map(str)
|
289 |
+
with tm.assert_produces_warning(
|
290 |
+
FutureWarning, match="item of incompatible dtype"
|
291 |
+
):
|
292 |
+
ser[mask] = ser2
|
293 |
+
|
294 |
+
expected = Series([0, "1", "2", 0])
|
295 |
+
tm.assert_series_equal(ser, expected)
|
296 |
+
|
297 |
+
def test_setitem_mask_promote(self):
|
298 |
+
ser = Series([0, "foo", "bar", 0])
|
299 |
+
mask = Series([False, True, True, False])
|
300 |
+
ser2 = ser[mask]
|
301 |
+
ser[mask] = ser2
|
302 |
+
|
303 |
+
expected = Series([0, "foo", "bar", 0])
|
304 |
+
tm.assert_series_equal(ser, expected)
|
305 |
+
|
306 |
+
def test_setitem_boolean(self, string_series):
|
307 |
+
mask = string_series > string_series.median()
|
308 |
+
|
309 |
+
# similar indexed series
|
310 |
+
result = string_series.copy()
|
311 |
+
result[mask] = string_series * 2
|
312 |
+
expected = string_series * 2
|
313 |
+
tm.assert_series_equal(result[mask], expected[mask])
|
314 |
+
|
315 |
+
# needs alignment
|
316 |
+
result = string_series.copy()
|
317 |
+
result[mask] = (string_series * 2)[0:5]
|
318 |
+
expected = (string_series * 2)[0:5].reindex_like(string_series)
|
319 |
+
expected[-mask] = string_series[mask]
|
320 |
+
tm.assert_series_equal(result[mask], expected[mask])
|
321 |
+
|
322 |
+
def test_setitem_boolean_corner(self, datetime_series):
|
323 |
+
ts = datetime_series
|
324 |
+
mask_shifted = ts.shift(1, freq=BDay()) > ts.median()
|
325 |
+
|
326 |
+
msg = (
|
327 |
+
r"Unalignable boolean Series provided as indexer \(index of "
|
328 |
+
r"the boolean Series and of the indexed object do not match"
|
329 |
+
)
|
330 |
+
with pytest.raises(IndexingError, match=msg):
|
331 |
+
ts[mask_shifted] = 1
|
332 |
+
|
333 |
+
with pytest.raises(IndexingError, match=msg):
|
334 |
+
ts.loc[mask_shifted] = 1
|
335 |
+
|
336 |
+
def test_setitem_boolean_different_order(self, string_series):
|
337 |
+
ordered = string_series.sort_values()
|
338 |
+
|
339 |
+
copy = string_series.copy()
|
340 |
+
copy[ordered > 0] = 0
|
341 |
+
|
342 |
+
expected = string_series.copy()
|
343 |
+
expected[expected > 0] = 0
|
344 |
+
|
345 |
+
tm.assert_series_equal(copy, expected)
|
346 |
+
|
347 |
+
@pytest.mark.parametrize("func", [list, np.array, Series])
|
348 |
+
def test_setitem_boolean_python_list(self, func):
|
349 |
+
# GH19406
|
350 |
+
ser = Series([None, "b", None])
|
351 |
+
mask = func([True, False, True])
|
352 |
+
ser[mask] = ["a", "c"]
|
353 |
+
expected = Series(["a", "b", "c"])
|
354 |
+
tm.assert_series_equal(ser, expected)
|
355 |
+
|
356 |
+
def test_setitem_boolean_nullable_int_types(self, any_numeric_ea_dtype):
|
357 |
+
# GH: 26468
|
358 |
+
ser = Series([5, 6, 7, 8], dtype=any_numeric_ea_dtype)
|
359 |
+
ser[ser > 6] = Series(range(4), dtype=any_numeric_ea_dtype)
|
360 |
+
expected = Series([5, 6, 2, 3], dtype=any_numeric_ea_dtype)
|
361 |
+
tm.assert_series_equal(ser, expected)
|
362 |
+
|
363 |
+
ser = Series([5, 6, 7, 8], dtype=any_numeric_ea_dtype)
|
364 |
+
ser.loc[ser > 6] = Series(range(4), dtype=any_numeric_ea_dtype)
|
365 |
+
tm.assert_series_equal(ser, expected)
|
366 |
+
|
367 |
+
ser = Series([5, 6, 7, 8], dtype=any_numeric_ea_dtype)
|
368 |
+
loc_ser = Series(range(4), dtype=any_numeric_ea_dtype)
|
369 |
+
ser.loc[ser > 6] = loc_ser.loc[loc_ser > 1]
|
370 |
+
tm.assert_series_equal(ser, expected)
|
371 |
+
|
372 |
+
def test_setitem_with_bool_mask_and_values_matching_n_trues_in_length(self):
|
373 |
+
# GH#30567
|
374 |
+
ser = Series([None] * 10)
|
375 |
+
mask = [False] * 3 + [True] * 5 + [False] * 2
|
376 |
+
ser[mask] = range(5)
|
377 |
+
result = ser
|
378 |
+
expected = Series([None] * 3 + list(range(5)) + [None] * 2, dtype=object)
|
379 |
+
tm.assert_series_equal(result, expected)
|
380 |
+
|
381 |
+
def test_setitem_nan_with_bool(self):
|
382 |
+
# GH 13034
|
383 |
+
result = Series([True, False, True])
|
384 |
+
with tm.assert_produces_warning(
|
385 |
+
FutureWarning, match="item of incompatible dtype"
|
386 |
+
):
|
387 |
+
result[0] = np.nan
|
388 |
+
expected = Series([np.nan, False, True], dtype=object)
|
389 |
+
tm.assert_series_equal(result, expected)
|
390 |
+
|
391 |
+
def test_setitem_mask_smallint_upcast(self):
|
392 |
+
orig = Series([1, 2, 3], dtype="int8")
|
393 |
+
alt = np.array([999, 1000, 1001], dtype=np.int64)
|
394 |
+
|
395 |
+
mask = np.array([True, False, True])
|
396 |
+
|
397 |
+
ser = orig.copy()
|
398 |
+
with tm.assert_produces_warning(
|
399 |
+
FutureWarning, match="item of incompatible dtype"
|
400 |
+
):
|
401 |
+
ser[mask] = Series(alt)
|
402 |
+
expected = Series([999, 2, 1001])
|
403 |
+
tm.assert_series_equal(ser, expected)
|
404 |
+
|
405 |
+
ser2 = orig.copy()
|
406 |
+
with tm.assert_produces_warning(
|
407 |
+
FutureWarning, match="item of incompatible dtype"
|
408 |
+
):
|
409 |
+
ser2.mask(mask, alt, inplace=True)
|
410 |
+
tm.assert_series_equal(ser2, expected)
|
411 |
+
|
412 |
+
ser3 = orig.copy()
|
413 |
+
res = ser3.where(~mask, Series(alt))
|
414 |
+
tm.assert_series_equal(res, expected)
|
415 |
+
|
416 |
+
def test_setitem_mask_smallint_no_upcast(self):
|
417 |
+
# like test_setitem_mask_smallint_upcast, but while we can't hold 'alt',
|
418 |
+
# we *can* hold alt[mask] without casting
|
419 |
+
orig = Series([1, 2, 3], dtype="uint8")
|
420 |
+
alt = Series([245, 1000, 246], dtype=np.int64)
|
421 |
+
|
422 |
+
mask = np.array([True, False, True])
|
423 |
+
|
424 |
+
ser = orig.copy()
|
425 |
+
ser[mask] = alt
|
426 |
+
expected = Series([245, 2, 246], dtype="uint8")
|
427 |
+
tm.assert_series_equal(ser, expected)
|
428 |
+
|
429 |
+
ser2 = orig.copy()
|
430 |
+
ser2.mask(mask, alt, inplace=True)
|
431 |
+
tm.assert_series_equal(ser2, expected)
|
432 |
+
|
433 |
+
# TODO: ser.where(~mask, alt) unnecessarily upcasts to int64
|
434 |
+
ser3 = orig.copy()
|
435 |
+
res = ser3.where(~mask, alt)
|
436 |
+
tm.assert_series_equal(res, expected, check_dtype=False)
|
437 |
+
|
438 |
+
|
439 |
+
class TestSetitemViewCopySemantics:
|
440 |
+
def test_setitem_invalidates_datetime_index_freq(self, using_copy_on_write):
|
441 |
+
# GH#24096 altering a datetime64tz Series inplace invalidates the
|
442 |
+
# `freq` attribute on the underlying DatetimeIndex
|
443 |
+
|
444 |
+
dti = date_range("20130101", periods=3, tz="US/Eastern")
|
445 |
+
ts = dti[1]
|
446 |
+
ser = Series(dti)
|
447 |
+
assert ser._values is not dti
|
448 |
+
if using_copy_on_write:
|
449 |
+
assert ser._values._ndarray.base is dti._data._ndarray.base
|
450 |
+
else:
|
451 |
+
assert ser._values._ndarray.base is not dti._data._ndarray.base
|
452 |
+
assert dti.freq == "D"
|
453 |
+
ser.iloc[1] = NaT
|
454 |
+
assert ser._values.freq is None
|
455 |
+
|
456 |
+
# check that the DatetimeIndex was not altered in place
|
457 |
+
assert ser._values is not dti
|
458 |
+
assert ser._values._ndarray.base is not dti._data._ndarray.base
|
459 |
+
assert dti[1] == ts
|
460 |
+
assert dti.freq == "D"
|
461 |
+
|
462 |
+
def test_dt64tz_setitem_does_not_mutate_dti(self, using_copy_on_write):
|
463 |
+
# GH#21907, GH#24096
|
464 |
+
dti = date_range("2016-01-01", periods=10, tz="US/Pacific")
|
465 |
+
ts = dti[0]
|
466 |
+
ser = Series(dti)
|
467 |
+
assert ser._values is not dti
|
468 |
+
if using_copy_on_write:
|
469 |
+
assert ser._values._ndarray.base is dti._data._ndarray.base
|
470 |
+
assert ser._mgr.arrays[0]._ndarray.base is dti._data._ndarray.base
|
471 |
+
else:
|
472 |
+
assert ser._values._ndarray.base is not dti._data._ndarray.base
|
473 |
+
assert ser._mgr.arrays[0]._ndarray.base is not dti._data._ndarray.base
|
474 |
+
|
475 |
+
assert ser._mgr.arrays[0] is not dti
|
476 |
+
|
477 |
+
ser[::3] = NaT
|
478 |
+
assert ser[0] is NaT
|
479 |
+
assert dti[0] == ts
|
480 |
+
|
481 |
+
|
482 |
+
class TestSetitemCallable:
|
483 |
+
def test_setitem_callable_key(self):
|
484 |
+
# GH#12533
|
485 |
+
ser = Series([1, 2, 3, 4], index=list("ABCD"))
|
486 |
+
ser[lambda x: "A"] = -1
|
487 |
+
|
488 |
+
expected = Series([-1, 2, 3, 4], index=list("ABCD"))
|
489 |
+
tm.assert_series_equal(ser, expected)
|
490 |
+
|
491 |
+
def test_setitem_callable_other(self):
|
492 |
+
# GH#13299
|
493 |
+
inc = lambda x: x + 1
|
494 |
+
|
495 |
+
# set object dtype to avoid upcast when setting inc
|
496 |
+
ser = Series([1, 2, -1, 4], dtype=object)
|
497 |
+
ser[ser < 0] = inc
|
498 |
+
|
499 |
+
expected = Series([1, 2, inc, 4])
|
500 |
+
tm.assert_series_equal(ser, expected)
|
501 |
+
|
502 |
+
|
503 |
+
class TestSetitemWithExpansion:
|
504 |
+
def test_setitem_empty_series(self):
|
505 |
+
# GH#10193
|
506 |
+
key = Timestamp("2012-01-01")
|
507 |
+
series = Series(dtype=object)
|
508 |
+
series[key] = 47
|
509 |
+
expected = Series(47, [key])
|
510 |
+
tm.assert_series_equal(series, expected)
|
511 |
+
|
512 |
+
def test_setitem_empty_series_datetimeindex_preserves_freq(self):
|
513 |
+
# GH#33573 our index should retain its freq
|
514 |
+
dti = DatetimeIndex([], freq="D", dtype="M8[ns]")
|
515 |
+
series = Series([], index=dti, dtype=object)
|
516 |
+
key = Timestamp("2012-01-01")
|
517 |
+
series[key] = 47
|
518 |
+
expected = Series(47, DatetimeIndex([key], freq="D").as_unit("ns"))
|
519 |
+
tm.assert_series_equal(series, expected)
|
520 |
+
assert series.index.freq == expected.index.freq
|
521 |
+
|
522 |
+
def test_setitem_empty_series_timestamp_preserves_dtype(self):
|
523 |
+
# GH 21881
|
524 |
+
timestamp = Timestamp(1412526600000000000)
|
525 |
+
series = Series([timestamp], index=["timestamp"], dtype=object)
|
526 |
+
expected = series["timestamp"]
|
527 |
+
|
528 |
+
series = Series([], dtype=object)
|
529 |
+
series["anything"] = 300.0
|
530 |
+
series["timestamp"] = timestamp
|
531 |
+
result = series["timestamp"]
|
532 |
+
assert result == expected
|
533 |
+
|
534 |
+
@pytest.mark.parametrize(
|
535 |
+
"td",
|
536 |
+
[
|
537 |
+
Timedelta("9 days"),
|
538 |
+
Timedelta("9 days").to_timedelta64(),
|
539 |
+
Timedelta("9 days").to_pytimedelta(),
|
540 |
+
],
|
541 |
+
)
|
542 |
+
def test_append_timedelta_does_not_cast(self, td, using_infer_string, request):
|
543 |
+
# GH#22717 inserting a Timedelta should _not_ cast to int64
|
544 |
+
if using_infer_string and not isinstance(td, Timedelta):
|
545 |
+
# TODO: GH#56010
|
546 |
+
request.applymarker(pytest.mark.xfail(reason="inferred as string"))
|
547 |
+
|
548 |
+
expected = Series(["x", td], index=[0, "td"], dtype=object)
|
549 |
+
|
550 |
+
ser = Series(["x"])
|
551 |
+
ser["td"] = td
|
552 |
+
tm.assert_series_equal(ser, expected)
|
553 |
+
assert isinstance(ser["td"], Timedelta)
|
554 |
+
|
555 |
+
ser = Series(["x"])
|
556 |
+
ser.loc["td"] = Timedelta("9 days")
|
557 |
+
tm.assert_series_equal(ser, expected)
|
558 |
+
assert isinstance(ser["td"], Timedelta)
|
559 |
+
|
560 |
+
def test_setitem_with_expansion_type_promotion(self):
|
561 |
+
# GH#12599
|
562 |
+
ser = Series(dtype=object)
|
563 |
+
ser["a"] = Timestamp("2016-01-01")
|
564 |
+
ser["b"] = 3.0
|
565 |
+
ser["c"] = "foo"
|
566 |
+
expected = Series([Timestamp("2016-01-01"), 3.0, "foo"], index=["a", "b", "c"])
|
567 |
+
tm.assert_series_equal(ser, expected)
|
568 |
+
|
569 |
+
def test_setitem_not_contained(self, string_series):
|
570 |
+
# set item that's not contained
|
571 |
+
ser = string_series.copy()
|
572 |
+
assert "foobar" not in ser.index
|
573 |
+
ser["foobar"] = 1
|
574 |
+
|
575 |
+
app = Series([1], index=["foobar"], name="series")
|
576 |
+
expected = concat([string_series, app])
|
577 |
+
tm.assert_series_equal(ser, expected)
|
578 |
+
|
579 |
+
def test_setitem_keep_precision(self, any_numeric_ea_dtype):
|
580 |
+
# GH#32346
|
581 |
+
ser = Series([1, 2], dtype=any_numeric_ea_dtype)
|
582 |
+
ser[2] = 10
|
583 |
+
expected = Series([1, 2, 10], dtype=any_numeric_ea_dtype)
|
584 |
+
tm.assert_series_equal(ser, expected)
|
585 |
+
|
586 |
+
@pytest.mark.parametrize(
|
587 |
+
"na, target_na, dtype, target_dtype, indexer, warn",
|
588 |
+
[
|
589 |
+
(NA, NA, "Int64", "Int64", 1, None),
|
590 |
+
(NA, NA, "Int64", "Int64", 2, None),
|
591 |
+
(NA, np.nan, "int64", "float64", 1, None),
|
592 |
+
(NA, np.nan, "int64", "float64", 2, None),
|
593 |
+
(NaT, NaT, "int64", "object", 1, FutureWarning),
|
594 |
+
(NaT, NaT, "int64", "object", 2, None),
|
595 |
+
(np.nan, NA, "Int64", "Int64", 1, None),
|
596 |
+
(np.nan, NA, "Int64", "Int64", 2, None),
|
597 |
+
(np.nan, NA, "Float64", "Float64", 1, None),
|
598 |
+
(np.nan, NA, "Float64", "Float64", 2, None),
|
599 |
+
(np.nan, np.nan, "int64", "float64", 1, None),
|
600 |
+
(np.nan, np.nan, "int64", "float64", 2, None),
|
601 |
+
],
|
602 |
+
)
|
603 |
+
def test_setitem_enlarge_with_na(
|
604 |
+
self, na, target_na, dtype, target_dtype, indexer, warn
|
605 |
+
):
|
606 |
+
# GH#32346
|
607 |
+
ser = Series([1, 2], dtype=dtype)
|
608 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
609 |
+
ser[indexer] = na
|
610 |
+
expected_values = [1, target_na] if indexer == 1 else [1, 2, target_na]
|
611 |
+
expected = Series(expected_values, dtype=target_dtype)
|
612 |
+
tm.assert_series_equal(ser, expected)
|
613 |
+
|
614 |
+
def test_setitem_enlargement_object_none(self, nulls_fixture, using_infer_string):
|
615 |
+
# GH#48665
|
616 |
+
ser = Series(["a", "b"])
|
617 |
+
ser[3] = nulls_fixture
|
618 |
+
dtype = (
|
619 |
+
"string[pyarrow_numpy]"
|
620 |
+
if using_infer_string and not isinstance(nulls_fixture, Decimal)
|
621 |
+
else object
|
622 |
+
)
|
623 |
+
expected = Series(["a", "b", nulls_fixture], index=[0, 1, 3], dtype=dtype)
|
624 |
+
tm.assert_series_equal(ser, expected)
|
625 |
+
if using_infer_string:
|
626 |
+
ser[3] is np.nan
|
627 |
+
else:
|
628 |
+
assert ser[3] is nulls_fixture
|
629 |
+
|
630 |
+
|
631 |
+
def test_setitem_scalar_into_readonly_backing_data():
|
632 |
+
# GH#14359: test that you cannot mutate a read only buffer
|
633 |
+
|
634 |
+
array = np.zeros(5)
|
635 |
+
array.flags.writeable = False # make the array immutable
|
636 |
+
series = Series(array, copy=False)
|
637 |
+
|
638 |
+
for n in series.index:
|
639 |
+
msg = "assignment destination is read-only"
|
640 |
+
with pytest.raises(ValueError, match=msg):
|
641 |
+
series[n] = 1
|
642 |
+
|
643 |
+
assert array[n] == 0
|
644 |
+
|
645 |
+
|
646 |
+
def test_setitem_slice_into_readonly_backing_data():
|
647 |
+
# GH#14359: test that you cannot mutate a read only buffer
|
648 |
+
|
649 |
+
array = np.zeros(5)
|
650 |
+
array.flags.writeable = False # make the array immutable
|
651 |
+
series = Series(array, copy=False)
|
652 |
+
|
653 |
+
msg = "assignment destination is read-only"
|
654 |
+
with pytest.raises(ValueError, match=msg):
|
655 |
+
series[1:3] = 1
|
656 |
+
|
657 |
+
assert not array.any()
|
658 |
+
|
659 |
+
|
660 |
+
def test_setitem_categorical_assigning_ops():
|
661 |
+
orig = Series(Categorical(["b", "b"], categories=["a", "b"]))
|
662 |
+
ser = orig.copy()
|
663 |
+
ser[:] = "a"
|
664 |
+
exp = Series(Categorical(["a", "a"], categories=["a", "b"]))
|
665 |
+
tm.assert_series_equal(ser, exp)
|
666 |
+
|
667 |
+
ser = orig.copy()
|
668 |
+
ser[1] = "a"
|
669 |
+
exp = Series(Categorical(["b", "a"], categories=["a", "b"]))
|
670 |
+
tm.assert_series_equal(ser, exp)
|
671 |
+
|
672 |
+
ser = orig.copy()
|
673 |
+
ser[ser.index > 0] = "a"
|
674 |
+
exp = Series(Categorical(["b", "a"], categories=["a", "b"]))
|
675 |
+
tm.assert_series_equal(ser, exp)
|
676 |
+
|
677 |
+
ser = orig.copy()
|
678 |
+
ser[[False, True]] = "a"
|
679 |
+
exp = Series(Categorical(["b", "a"], categories=["a", "b"]))
|
680 |
+
tm.assert_series_equal(ser, exp)
|
681 |
+
|
682 |
+
ser = orig.copy()
|
683 |
+
ser.index = ["x", "y"]
|
684 |
+
ser["y"] = "a"
|
685 |
+
exp = Series(Categorical(["b", "a"], categories=["a", "b"]), index=["x", "y"])
|
686 |
+
tm.assert_series_equal(ser, exp)
|
687 |
+
|
688 |
+
|
689 |
+
def test_setitem_nan_into_categorical():
|
690 |
+
# ensure that one can set something to np.nan
|
691 |
+
ser = Series(Categorical([1, 2, 3]))
|
692 |
+
exp = Series(Categorical([1, np.nan, 3], categories=[1, 2, 3]))
|
693 |
+
ser[1] = np.nan
|
694 |
+
tm.assert_series_equal(ser, exp)
|
695 |
+
|
696 |
+
|
697 |
+
class TestSetitemCasting:
|
698 |
+
@pytest.mark.parametrize("unique", [True, False])
|
699 |
+
@pytest.mark.parametrize("val", [3, 3.0, "3"], ids=type)
|
700 |
+
def test_setitem_non_bool_into_bool(self, val, indexer_sli, unique):
|
701 |
+
# dont cast these 3-like values to bool
|
702 |
+
ser = Series([True, False])
|
703 |
+
if not unique:
|
704 |
+
ser.index = [1, 1]
|
705 |
+
|
706 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
707 |
+
indexer_sli(ser)[1] = val
|
708 |
+
assert type(ser.iloc[1]) == type(val)
|
709 |
+
|
710 |
+
expected = Series([True, val], dtype=object, index=ser.index)
|
711 |
+
if not unique and indexer_sli is not tm.iloc:
|
712 |
+
expected = Series([val, val], dtype=object, index=[1, 1])
|
713 |
+
tm.assert_series_equal(ser, expected)
|
714 |
+
|
715 |
+
def test_setitem_boolean_array_into_npbool(self):
|
716 |
+
# GH#45462
|
717 |
+
ser = Series([True, False, True])
|
718 |
+
values = ser._values
|
719 |
+
arr = array([True, False, None])
|
720 |
+
|
721 |
+
ser[:2] = arr[:2] # no NAs -> can set inplace
|
722 |
+
assert ser._values is values
|
723 |
+
|
724 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
725 |
+
ser[1:] = arr[1:] # has an NA -> cast to boolean dtype
|
726 |
+
expected = Series(arr)
|
727 |
+
tm.assert_series_equal(ser, expected)
|
728 |
+
|
729 |
+
|
730 |
+
class SetitemCastingEquivalents:
|
731 |
+
"""
|
732 |
+
Check each of several methods that _should_ be equivalent to `obj[key] = val`
|
733 |
+
|
734 |
+
We assume that
|
735 |
+
- obj.index is the default Index(range(len(obj)))
|
736 |
+
- the setitem does not expand the obj
|
737 |
+
"""
|
738 |
+
|
739 |
+
@pytest.fixture
|
740 |
+
def is_inplace(self, obj, expected):
|
741 |
+
"""
|
742 |
+
Whether we expect the setting to be in-place or not.
|
743 |
+
"""
|
744 |
+
return expected.dtype == obj.dtype
|
745 |
+
|
746 |
+
def check_indexer(self, obj, key, expected, val, indexer, is_inplace):
|
747 |
+
orig = obj
|
748 |
+
obj = obj.copy()
|
749 |
+
arr = obj._values
|
750 |
+
|
751 |
+
indexer(obj)[key] = val
|
752 |
+
tm.assert_series_equal(obj, expected)
|
753 |
+
|
754 |
+
self._check_inplace(is_inplace, orig, arr, obj)
|
755 |
+
|
756 |
+
def _check_inplace(self, is_inplace, orig, arr, obj):
|
757 |
+
if is_inplace is None:
|
758 |
+
# We are not (yet) checking whether setting is inplace or not
|
759 |
+
pass
|
760 |
+
elif is_inplace:
|
761 |
+
if arr.dtype.kind in ["m", "M"]:
|
762 |
+
# We may not have the same DTA/TDA, but will have the same
|
763 |
+
# underlying data
|
764 |
+
assert arr._ndarray is obj._values._ndarray
|
765 |
+
else:
|
766 |
+
assert obj._values is arr
|
767 |
+
else:
|
768 |
+
# otherwise original array should be unchanged
|
769 |
+
tm.assert_equal(arr, orig._values)
|
770 |
+
|
771 |
+
def test_int_key(self, obj, key, expected, warn, val, indexer_sli, is_inplace):
|
772 |
+
if not isinstance(key, int):
|
773 |
+
pytest.skip("Not relevant for int key")
|
774 |
+
|
775 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
776 |
+
self.check_indexer(obj, key, expected, val, indexer_sli, is_inplace)
|
777 |
+
|
778 |
+
if indexer_sli is tm.loc:
|
779 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
780 |
+
self.check_indexer(obj, key, expected, val, tm.at, is_inplace)
|
781 |
+
elif indexer_sli is tm.iloc:
|
782 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
783 |
+
self.check_indexer(obj, key, expected, val, tm.iat, is_inplace)
|
784 |
+
|
785 |
+
rng = range(key, key + 1)
|
786 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
787 |
+
self.check_indexer(obj, rng, expected, val, indexer_sli, is_inplace)
|
788 |
+
|
789 |
+
if indexer_sli is not tm.loc:
|
790 |
+
# Note: no .loc because that handles slice edges differently
|
791 |
+
slc = slice(key, key + 1)
|
792 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
793 |
+
self.check_indexer(obj, slc, expected, val, indexer_sli, is_inplace)
|
794 |
+
|
795 |
+
ilkey = [key]
|
796 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
797 |
+
self.check_indexer(obj, ilkey, expected, val, indexer_sli, is_inplace)
|
798 |
+
|
799 |
+
indkey = np.array(ilkey)
|
800 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
801 |
+
self.check_indexer(obj, indkey, expected, val, indexer_sli, is_inplace)
|
802 |
+
|
803 |
+
genkey = (x for x in [key])
|
804 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
805 |
+
self.check_indexer(obj, genkey, expected, val, indexer_sli, is_inplace)
|
806 |
+
|
807 |
+
def test_slice_key(self, obj, key, expected, warn, val, indexer_sli, is_inplace):
|
808 |
+
if not isinstance(key, slice):
|
809 |
+
pytest.skip("Not relevant for slice key")
|
810 |
+
|
811 |
+
if indexer_sli is not tm.loc:
|
812 |
+
# Note: no .loc because that handles slice edges differently
|
813 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
814 |
+
self.check_indexer(obj, key, expected, val, indexer_sli, is_inplace)
|
815 |
+
|
816 |
+
ilkey = list(range(len(obj)))[key]
|
817 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
818 |
+
self.check_indexer(obj, ilkey, expected, val, indexer_sli, is_inplace)
|
819 |
+
|
820 |
+
indkey = np.array(ilkey)
|
821 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
822 |
+
self.check_indexer(obj, indkey, expected, val, indexer_sli, is_inplace)
|
823 |
+
|
824 |
+
genkey = (x for x in indkey)
|
825 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
826 |
+
self.check_indexer(obj, genkey, expected, val, indexer_sli, is_inplace)
|
827 |
+
|
828 |
+
def test_mask_key(self, obj, key, expected, warn, val, indexer_sli):
|
829 |
+
# setitem with boolean mask
|
830 |
+
mask = np.zeros(obj.shape, dtype=bool)
|
831 |
+
mask[key] = True
|
832 |
+
|
833 |
+
obj = obj.copy()
|
834 |
+
|
835 |
+
if is_list_like(val) and len(val) < mask.sum():
|
836 |
+
msg = "boolean index did not match indexed array along dimension"
|
837 |
+
with pytest.raises(IndexError, match=msg):
|
838 |
+
indexer_sli(obj)[mask] = val
|
839 |
+
return
|
840 |
+
|
841 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
842 |
+
indexer_sli(obj)[mask] = val
|
843 |
+
tm.assert_series_equal(obj, expected)
|
844 |
+
|
845 |
+
def test_series_where(self, obj, key, expected, warn, val, is_inplace):
|
846 |
+
mask = np.zeros(obj.shape, dtype=bool)
|
847 |
+
mask[key] = True
|
848 |
+
|
849 |
+
if is_list_like(val) and len(val) < len(obj):
|
850 |
+
# Series.where is not valid here
|
851 |
+
msg = "operands could not be broadcast together with shapes"
|
852 |
+
with pytest.raises(ValueError, match=msg):
|
853 |
+
obj.where(~mask, val)
|
854 |
+
return
|
855 |
+
|
856 |
+
orig = obj
|
857 |
+
obj = obj.copy()
|
858 |
+
arr = obj._values
|
859 |
+
|
860 |
+
res = obj.where(~mask, val)
|
861 |
+
|
862 |
+
if val is NA and res.dtype == object:
|
863 |
+
expected = expected.fillna(NA)
|
864 |
+
elif val is None and res.dtype == object:
|
865 |
+
assert expected.dtype == object
|
866 |
+
expected = expected.copy()
|
867 |
+
expected[expected.isna()] = None
|
868 |
+
tm.assert_series_equal(res, expected)
|
869 |
+
|
870 |
+
self._check_inplace(is_inplace, orig, arr, obj)
|
871 |
+
|
872 |
+
def test_index_where(self, obj, key, expected, warn, val, using_infer_string):
|
873 |
+
mask = np.zeros(obj.shape, dtype=bool)
|
874 |
+
mask[key] = True
|
875 |
+
|
876 |
+
if using_infer_string and obj.dtype == object:
|
877 |
+
with pytest.raises(TypeError, match="Scalar must"):
|
878 |
+
Index(obj).where(~mask, val)
|
879 |
+
else:
|
880 |
+
res = Index(obj).where(~mask, val)
|
881 |
+
expected_idx = Index(expected, dtype=expected.dtype)
|
882 |
+
tm.assert_index_equal(res, expected_idx)
|
883 |
+
|
884 |
+
def test_index_putmask(self, obj, key, expected, warn, val, using_infer_string):
|
885 |
+
mask = np.zeros(obj.shape, dtype=bool)
|
886 |
+
mask[key] = True
|
887 |
+
|
888 |
+
if using_infer_string and obj.dtype == object:
|
889 |
+
with pytest.raises(TypeError, match="Scalar must"):
|
890 |
+
Index(obj).putmask(mask, val)
|
891 |
+
else:
|
892 |
+
res = Index(obj).putmask(mask, val)
|
893 |
+
tm.assert_index_equal(res, Index(expected, dtype=expected.dtype))
|
894 |
+
|
895 |
+
|
896 |
+
@pytest.mark.parametrize(
|
897 |
+
"obj,expected,key,warn",
|
898 |
+
[
|
899 |
+
pytest.param(
|
900 |
+
# GH#45568 setting a valid NA value into IntervalDtype[int] should
|
901 |
+
# cast to IntervalDtype[float]
|
902 |
+
Series(interval_range(1, 5)),
|
903 |
+
Series(
|
904 |
+
[Interval(1, 2), np.nan, Interval(3, 4), Interval(4, 5)],
|
905 |
+
dtype="interval[float64]",
|
906 |
+
),
|
907 |
+
1,
|
908 |
+
FutureWarning,
|
909 |
+
id="interval_int_na_value",
|
910 |
+
),
|
911 |
+
pytest.param(
|
912 |
+
# these induce dtype changes
|
913 |
+
Series([2, 3, 4, 5, 6, 7, 8, 9, 10]),
|
914 |
+
Series([np.nan, 3, np.nan, 5, np.nan, 7, np.nan, 9, np.nan]),
|
915 |
+
slice(None, None, 2),
|
916 |
+
None,
|
917 |
+
id="int_series_slice_key_step",
|
918 |
+
),
|
919 |
+
pytest.param(
|
920 |
+
Series([True, True, False, False]),
|
921 |
+
Series([np.nan, True, np.nan, False], dtype=object),
|
922 |
+
slice(None, None, 2),
|
923 |
+
FutureWarning,
|
924 |
+
id="bool_series_slice_key_step",
|
925 |
+
),
|
926 |
+
pytest.param(
|
927 |
+
# these induce dtype changes
|
928 |
+
Series(np.arange(10)),
|
929 |
+
Series([np.nan, np.nan, np.nan, np.nan, np.nan, 5, 6, 7, 8, 9]),
|
930 |
+
slice(None, 5),
|
931 |
+
None,
|
932 |
+
id="int_series_slice_key",
|
933 |
+
),
|
934 |
+
pytest.param(
|
935 |
+
# changes dtype GH#4463
|
936 |
+
Series([1, 2, 3]),
|
937 |
+
Series([np.nan, 2, 3]),
|
938 |
+
0,
|
939 |
+
None,
|
940 |
+
id="int_series_int_key",
|
941 |
+
),
|
942 |
+
pytest.param(
|
943 |
+
# changes dtype GH#4463
|
944 |
+
Series([False]),
|
945 |
+
Series([np.nan], dtype=object),
|
946 |
+
# TODO: maybe go to float64 since we are changing the _whole_ Series?
|
947 |
+
0,
|
948 |
+
FutureWarning,
|
949 |
+
id="bool_series_int_key_change_all",
|
950 |
+
),
|
951 |
+
pytest.param(
|
952 |
+
# changes dtype GH#4463
|
953 |
+
Series([False, True]),
|
954 |
+
Series([np.nan, True], dtype=object),
|
955 |
+
0,
|
956 |
+
FutureWarning,
|
957 |
+
id="bool_series_int_key",
|
958 |
+
),
|
959 |
+
],
|
960 |
+
)
|
961 |
+
class TestSetitemCastingEquivalents(SetitemCastingEquivalents):
|
962 |
+
@pytest.fixture(params=[np.nan, np.float64("NaN"), None, NA])
|
963 |
+
def val(self, request):
|
964 |
+
"""
|
965 |
+
NA values that should generally be valid_na for *all* dtypes.
|
966 |
+
|
967 |
+
Include both python float NaN and np.float64; only np.float64 has a
|
968 |
+
`dtype` attribute.
|
969 |
+
"""
|
970 |
+
return request.param
|
971 |
+
|
972 |
+
|
973 |
+
class TestSetitemTimedelta64IntoNumeric(SetitemCastingEquivalents):
|
974 |
+
# timedelta64 should not be treated as integers when setting into
|
975 |
+
# numeric Series
|
976 |
+
|
977 |
+
@pytest.fixture
|
978 |
+
def val(self):
|
979 |
+
td = np.timedelta64(4, "ns")
|
980 |
+
return td
|
981 |
+
# TODO: could also try np.full((1,), td)
|
982 |
+
|
983 |
+
@pytest.fixture(params=[complex, int, float])
|
984 |
+
def dtype(self, request):
|
985 |
+
return request.param
|
986 |
+
|
987 |
+
@pytest.fixture
|
988 |
+
def obj(self, dtype):
|
989 |
+
arr = np.arange(5).astype(dtype)
|
990 |
+
ser = Series(arr)
|
991 |
+
return ser
|
992 |
+
|
993 |
+
@pytest.fixture
|
994 |
+
def expected(self, dtype):
|
995 |
+
arr = np.arange(5).astype(dtype)
|
996 |
+
ser = Series(arr)
|
997 |
+
ser = ser.astype(object)
|
998 |
+
ser.iloc[0] = np.timedelta64(4, "ns")
|
999 |
+
return ser
|
1000 |
+
|
1001 |
+
@pytest.fixture
|
1002 |
+
def key(self):
|
1003 |
+
return 0
|
1004 |
+
|
1005 |
+
@pytest.fixture
|
1006 |
+
def warn(self):
|
1007 |
+
return FutureWarning
|
1008 |
+
|
1009 |
+
|
1010 |
+
class TestSetitemDT64IntoInt(SetitemCastingEquivalents):
|
1011 |
+
# GH#39619 dont cast dt64 to int when doing this setitem
|
1012 |
+
|
1013 |
+
@pytest.fixture(params=["M8[ns]", "m8[ns]"])
|
1014 |
+
def dtype(self, request):
|
1015 |
+
return request.param
|
1016 |
+
|
1017 |
+
@pytest.fixture
|
1018 |
+
def scalar(self, dtype):
|
1019 |
+
val = np.datetime64("2021-01-18 13:25:00", "ns")
|
1020 |
+
if dtype == "m8[ns]":
|
1021 |
+
val = val - val
|
1022 |
+
return val
|
1023 |
+
|
1024 |
+
@pytest.fixture
|
1025 |
+
def expected(self, scalar):
|
1026 |
+
expected = Series([scalar, scalar, 3], dtype=object)
|
1027 |
+
assert isinstance(expected[0], type(scalar))
|
1028 |
+
return expected
|
1029 |
+
|
1030 |
+
@pytest.fixture
|
1031 |
+
def obj(self):
|
1032 |
+
return Series([1, 2, 3])
|
1033 |
+
|
1034 |
+
@pytest.fixture
|
1035 |
+
def key(self):
|
1036 |
+
return slice(None, -1)
|
1037 |
+
|
1038 |
+
@pytest.fixture(params=[None, list, np.array])
|
1039 |
+
def val(self, scalar, request):
|
1040 |
+
box = request.param
|
1041 |
+
if box is None:
|
1042 |
+
return scalar
|
1043 |
+
return box([scalar, scalar])
|
1044 |
+
|
1045 |
+
@pytest.fixture
|
1046 |
+
def warn(self):
|
1047 |
+
return FutureWarning
|
1048 |
+
|
1049 |
+
|
1050 |
+
class TestSetitemNAPeriodDtype(SetitemCastingEquivalents):
|
1051 |
+
# Setting compatible NA values into Series with PeriodDtype
|
1052 |
+
|
1053 |
+
@pytest.fixture
|
1054 |
+
def expected(self, key):
|
1055 |
+
exp = Series(period_range("2000-01-01", periods=10, freq="D"))
|
1056 |
+
exp._values.view("i8")[key] = NaT._value
|
1057 |
+
assert exp[key] is NaT or all(x is NaT for x in exp[key])
|
1058 |
+
return exp
|
1059 |
+
|
1060 |
+
@pytest.fixture
|
1061 |
+
def obj(self):
|
1062 |
+
return Series(period_range("2000-01-01", periods=10, freq="D"))
|
1063 |
+
|
1064 |
+
@pytest.fixture(params=[3, slice(3, 5)])
|
1065 |
+
def key(self, request):
|
1066 |
+
return request.param
|
1067 |
+
|
1068 |
+
@pytest.fixture(params=[None, np.nan])
|
1069 |
+
def val(self, request):
|
1070 |
+
return request.param
|
1071 |
+
|
1072 |
+
@pytest.fixture
|
1073 |
+
def warn(self):
|
1074 |
+
return None
|
1075 |
+
|
1076 |
+
|
1077 |
+
class TestSetitemNADatetimeLikeDtype(SetitemCastingEquivalents):
|
1078 |
+
# some nat-like values should be cast to datetime64/timedelta64 when
|
1079 |
+
# inserting into a datetime64/timedelta64 series. Others should coerce
|
1080 |
+
# to object and retain their dtypes.
|
1081 |
+
# GH#18586 for td64 and boolean mask case
|
1082 |
+
|
1083 |
+
@pytest.fixture(
|
1084 |
+
params=["m8[ns]", "M8[ns]", "datetime64[ns, UTC]", "datetime64[ns, US/Central]"]
|
1085 |
+
)
|
1086 |
+
def dtype(self, request):
|
1087 |
+
return request.param
|
1088 |
+
|
1089 |
+
@pytest.fixture
|
1090 |
+
def obj(self, dtype):
|
1091 |
+
i8vals = date_range("2016-01-01", periods=3).asi8
|
1092 |
+
idx = Index(i8vals, dtype=dtype)
|
1093 |
+
assert idx.dtype == dtype
|
1094 |
+
return Series(idx)
|
1095 |
+
|
1096 |
+
@pytest.fixture(
|
1097 |
+
params=[
|
1098 |
+
None,
|
1099 |
+
np.nan,
|
1100 |
+
NaT,
|
1101 |
+
np.timedelta64("NaT", "ns"),
|
1102 |
+
np.datetime64("NaT", "ns"),
|
1103 |
+
]
|
1104 |
+
)
|
1105 |
+
def val(self, request):
|
1106 |
+
return request.param
|
1107 |
+
|
1108 |
+
@pytest.fixture
|
1109 |
+
def is_inplace(self, val, obj):
|
1110 |
+
# td64 -> cast to object iff val is datetime64("NaT")
|
1111 |
+
# dt64 -> cast to object iff val is timedelta64("NaT")
|
1112 |
+
# dt64tz -> cast to object with anything _but_ NaT
|
1113 |
+
return val is NaT or val is None or val is np.nan or obj.dtype == val.dtype
|
1114 |
+
|
1115 |
+
@pytest.fixture
|
1116 |
+
def expected(self, obj, val, is_inplace):
|
1117 |
+
dtype = obj.dtype if is_inplace else object
|
1118 |
+
expected = Series([val] + list(obj[1:]), dtype=dtype)
|
1119 |
+
return expected
|
1120 |
+
|
1121 |
+
@pytest.fixture
|
1122 |
+
def key(self):
|
1123 |
+
return 0
|
1124 |
+
|
1125 |
+
@pytest.fixture
|
1126 |
+
def warn(self, is_inplace):
|
1127 |
+
return None if is_inplace else FutureWarning
|
1128 |
+
|
1129 |
+
|
1130 |
+
class TestSetitemMismatchedTZCastsToObject(SetitemCastingEquivalents):
|
1131 |
+
# GH#24024
|
1132 |
+
@pytest.fixture
|
1133 |
+
def obj(self):
|
1134 |
+
return Series(date_range("2000", periods=2, tz="US/Central"))
|
1135 |
+
|
1136 |
+
@pytest.fixture
|
1137 |
+
def val(self):
|
1138 |
+
return Timestamp("2000", tz="US/Eastern")
|
1139 |
+
|
1140 |
+
@pytest.fixture
|
1141 |
+
def key(self):
|
1142 |
+
return 0
|
1143 |
+
|
1144 |
+
@pytest.fixture
|
1145 |
+
def expected(self, obj, val):
|
1146 |
+
# pre-2.0 this would cast to object, in 2.0 we cast the val to
|
1147 |
+
# the target tz
|
1148 |
+
expected = Series(
|
1149 |
+
[
|
1150 |
+
val.tz_convert("US/Central"),
|
1151 |
+
Timestamp("2000-01-02 00:00:00-06:00", tz="US/Central"),
|
1152 |
+
],
|
1153 |
+
dtype=obj.dtype,
|
1154 |
+
)
|
1155 |
+
return expected
|
1156 |
+
|
1157 |
+
@pytest.fixture
|
1158 |
+
def warn(self):
|
1159 |
+
return None
|
1160 |
+
|
1161 |
+
|
1162 |
+
@pytest.mark.parametrize(
|
1163 |
+
"obj,expected,warn",
|
1164 |
+
[
|
1165 |
+
# For numeric series, we should coerce to NaN.
|
1166 |
+
(Series([1, 2, 3]), Series([np.nan, 2, 3]), None),
|
1167 |
+
(Series([1.0, 2.0, 3.0]), Series([np.nan, 2.0, 3.0]), None),
|
1168 |
+
# For datetime series, we should coerce to NaT.
|
1169 |
+
(
|
1170 |
+
Series([datetime(2000, 1, 1), datetime(2000, 1, 2), datetime(2000, 1, 3)]),
|
1171 |
+
Series([NaT, datetime(2000, 1, 2), datetime(2000, 1, 3)]),
|
1172 |
+
None,
|
1173 |
+
),
|
1174 |
+
# For objects, we should preserve the None value.
|
1175 |
+
(Series(["foo", "bar", "baz"]), Series([None, "bar", "baz"]), None),
|
1176 |
+
],
|
1177 |
+
)
|
1178 |
+
class TestSeriesNoneCoercion(SetitemCastingEquivalents):
|
1179 |
+
@pytest.fixture
|
1180 |
+
def key(self):
|
1181 |
+
return 0
|
1182 |
+
|
1183 |
+
@pytest.fixture
|
1184 |
+
def val(self):
|
1185 |
+
return None
|
1186 |
+
|
1187 |
+
|
1188 |
+
class TestSetitemFloatIntervalWithIntIntervalValues(SetitemCastingEquivalents):
|
1189 |
+
# GH#44201 Cast to shared IntervalDtype rather than object
|
1190 |
+
|
1191 |
+
def test_setitem_example(self):
|
1192 |
+
# Just a case here to make obvious what this test class is aimed at
|
1193 |
+
idx = IntervalIndex.from_breaks(range(4))
|
1194 |
+
obj = Series(idx)
|
1195 |
+
val = Interval(0.5, 1.5)
|
1196 |
+
|
1197 |
+
with tm.assert_produces_warning(
|
1198 |
+
FutureWarning, match="Setting an item of incompatible dtype"
|
1199 |
+
):
|
1200 |
+
obj[0] = val
|
1201 |
+
assert obj.dtype == "Interval[float64, right]"
|
1202 |
+
|
1203 |
+
@pytest.fixture
|
1204 |
+
def obj(self):
|
1205 |
+
idx = IntervalIndex.from_breaks(range(4))
|
1206 |
+
return Series(idx)
|
1207 |
+
|
1208 |
+
@pytest.fixture
|
1209 |
+
def val(self):
|
1210 |
+
return Interval(0.5, 1.5)
|
1211 |
+
|
1212 |
+
@pytest.fixture
|
1213 |
+
def key(self):
|
1214 |
+
return 0
|
1215 |
+
|
1216 |
+
@pytest.fixture
|
1217 |
+
def expected(self, obj, val):
|
1218 |
+
data = [val] + list(obj[1:])
|
1219 |
+
idx = IntervalIndex(data, dtype="Interval[float64]")
|
1220 |
+
return Series(idx)
|
1221 |
+
|
1222 |
+
@pytest.fixture
|
1223 |
+
def warn(self):
|
1224 |
+
return FutureWarning
|
1225 |
+
|
1226 |
+
|
1227 |
+
class TestSetitemRangeIntoIntegerSeries(SetitemCastingEquivalents):
|
1228 |
+
# GH#44261 Setting a range with sufficiently-small integers into
|
1229 |
+
# small-itemsize integer dtypes should not need to upcast
|
1230 |
+
|
1231 |
+
@pytest.fixture
|
1232 |
+
def obj(self, any_int_numpy_dtype):
|
1233 |
+
dtype = np.dtype(any_int_numpy_dtype)
|
1234 |
+
ser = Series(range(5), dtype=dtype)
|
1235 |
+
return ser
|
1236 |
+
|
1237 |
+
@pytest.fixture
|
1238 |
+
def val(self):
|
1239 |
+
return range(2, 4)
|
1240 |
+
|
1241 |
+
@pytest.fixture
|
1242 |
+
def key(self):
|
1243 |
+
return slice(0, 2)
|
1244 |
+
|
1245 |
+
@pytest.fixture
|
1246 |
+
def expected(self, any_int_numpy_dtype):
|
1247 |
+
dtype = np.dtype(any_int_numpy_dtype)
|
1248 |
+
exp = Series([2, 3, 2, 3, 4], dtype=dtype)
|
1249 |
+
return exp
|
1250 |
+
|
1251 |
+
@pytest.fixture
|
1252 |
+
def warn(self):
|
1253 |
+
return None
|
1254 |
+
|
1255 |
+
|
1256 |
+
@pytest.mark.parametrize(
|
1257 |
+
"val, warn",
|
1258 |
+
[
|
1259 |
+
(np.array([2.0, 3.0]), None),
|
1260 |
+
(np.array([2.5, 3.5]), FutureWarning),
|
1261 |
+
(
|
1262 |
+
np.array([2**65, 2**65 + 1], dtype=np.float64),
|
1263 |
+
FutureWarning,
|
1264 |
+
), # all ints, but can't cast
|
1265 |
+
],
|
1266 |
+
)
|
1267 |
+
class TestSetitemFloatNDarrayIntoIntegerSeries(SetitemCastingEquivalents):
|
1268 |
+
@pytest.fixture
|
1269 |
+
def obj(self):
|
1270 |
+
return Series(range(5), dtype=np.int64)
|
1271 |
+
|
1272 |
+
@pytest.fixture
|
1273 |
+
def key(self):
|
1274 |
+
return slice(0, 2)
|
1275 |
+
|
1276 |
+
@pytest.fixture
|
1277 |
+
def expected(self, val):
|
1278 |
+
if val[0] == 2:
|
1279 |
+
# NB: this condition is based on currently-hardcoded "val" cases
|
1280 |
+
dtype = np.int64
|
1281 |
+
else:
|
1282 |
+
dtype = np.float64
|
1283 |
+
res_values = np.array(range(5), dtype=dtype)
|
1284 |
+
res_values[:2] = val
|
1285 |
+
return Series(res_values)
|
1286 |
+
|
1287 |
+
|
1288 |
+
@pytest.mark.parametrize("val", [512, np.int16(512)])
|
1289 |
+
class TestSetitemIntoIntegerSeriesNeedsUpcast(SetitemCastingEquivalents):
|
1290 |
+
@pytest.fixture
|
1291 |
+
def obj(self):
|
1292 |
+
return Series([1, 2, 3], dtype=np.int8)
|
1293 |
+
|
1294 |
+
@pytest.fixture
|
1295 |
+
def key(self):
|
1296 |
+
return 1
|
1297 |
+
|
1298 |
+
@pytest.fixture
|
1299 |
+
def expected(self):
|
1300 |
+
return Series([1, 512, 3], dtype=np.int16)
|
1301 |
+
|
1302 |
+
@pytest.fixture
|
1303 |
+
def warn(self):
|
1304 |
+
return FutureWarning
|
1305 |
+
|
1306 |
+
|
1307 |
+
@pytest.mark.parametrize("val", [2**33 + 1.0, 2**33 + 1.1, 2**62])
|
1308 |
+
class TestSmallIntegerSetitemUpcast(SetitemCastingEquivalents):
|
1309 |
+
# https://github.com/pandas-dev/pandas/issues/39584#issuecomment-941212124
|
1310 |
+
@pytest.fixture
|
1311 |
+
def obj(self):
|
1312 |
+
return Series([1, 2, 3], dtype="i4")
|
1313 |
+
|
1314 |
+
@pytest.fixture
|
1315 |
+
def key(self):
|
1316 |
+
return 0
|
1317 |
+
|
1318 |
+
@pytest.fixture
|
1319 |
+
def expected(self, val):
|
1320 |
+
if val % 1 != 0:
|
1321 |
+
dtype = "f8"
|
1322 |
+
else:
|
1323 |
+
dtype = "i8"
|
1324 |
+
return Series([val, 2, 3], dtype=dtype)
|
1325 |
+
|
1326 |
+
@pytest.fixture
|
1327 |
+
def warn(self):
|
1328 |
+
return FutureWarning
|
1329 |
+
|
1330 |
+
|
1331 |
+
class CoercionTest(SetitemCastingEquivalents):
|
1332 |
+
# Tests ported from tests.indexing.test_coercion
|
1333 |
+
|
1334 |
+
@pytest.fixture
|
1335 |
+
def key(self):
|
1336 |
+
return 1
|
1337 |
+
|
1338 |
+
@pytest.fixture
|
1339 |
+
def expected(self, obj, key, val, exp_dtype):
|
1340 |
+
vals = list(obj)
|
1341 |
+
vals[key] = val
|
1342 |
+
return Series(vals, dtype=exp_dtype)
|
1343 |
+
|
1344 |
+
|
1345 |
+
@pytest.mark.parametrize(
|
1346 |
+
"val,exp_dtype,warn",
|
1347 |
+
[(np.int32(1), np.int8, None), (np.int16(2**9), np.int16, FutureWarning)],
|
1348 |
+
)
|
1349 |
+
class TestCoercionInt8(CoercionTest):
|
1350 |
+
# previously test_setitem_series_int8 in tests.indexing.test_coercion
|
1351 |
+
@pytest.fixture
|
1352 |
+
def obj(self):
|
1353 |
+
return Series([1, 2, 3, 4], dtype=np.int8)
|
1354 |
+
|
1355 |
+
|
1356 |
+
@pytest.mark.parametrize("val", [1, 1.1, 1 + 1j, True])
|
1357 |
+
@pytest.mark.parametrize("exp_dtype", [object])
|
1358 |
+
class TestCoercionObject(CoercionTest):
|
1359 |
+
# previously test_setitem_series_object in tests.indexing.test_coercion
|
1360 |
+
@pytest.fixture
|
1361 |
+
def obj(self):
|
1362 |
+
return Series(["a", "b", "c", "d"], dtype=object)
|
1363 |
+
|
1364 |
+
@pytest.fixture
|
1365 |
+
def warn(self):
|
1366 |
+
return None
|
1367 |
+
|
1368 |
+
|
1369 |
+
@pytest.mark.parametrize(
|
1370 |
+
"val,exp_dtype,warn",
|
1371 |
+
[
|
1372 |
+
(1, np.complex128, None),
|
1373 |
+
(1.1, np.complex128, None),
|
1374 |
+
(1 + 1j, np.complex128, None),
|
1375 |
+
(True, object, FutureWarning),
|
1376 |
+
],
|
1377 |
+
)
|
1378 |
+
class TestCoercionComplex(CoercionTest):
|
1379 |
+
# previously test_setitem_series_complex128 in tests.indexing.test_coercion
|
1380 |
+
@pytest.fixture
|
1381 |
+
def obj(self):
|
1382 |
+
return Series([1 + 1j, 2 + 2j, 3 + 3j, 4 + 4j])
|
1383 |
+
|
1384 |
+
|
1385 |
+
@pytest.mark.parametrize(
|
1386 |
+
"val,exp_dtype,warn",
|
1387 |
+
[
|
1388 |
+
(1, object, FutureWarning),
|
1389 |
+
("3", object, FutureWarning),
|
1390 |
+
(3, object, FutureWarning),
|
1391 |
+
(1.1, object, FutureWarning),
|
1392 |
+
(1 + 1j, object, FutureWarning),
|
1393 |
+
(True, bool, None),
|
1394 |
+
],
|
1395 |
+
)
|
1396 |
+
class TestCoercionBool(CoercionTest):
|
1397 |
+
# previously test_setitem_series_bool in tests.indexing.test_coercion
|
1398 |
+
@pytest.fixture
|
1399 |
+
def obj(self):
|
1400 |
+
return Series([True, False, True, False], dtype=bool)
|
1401 |
+
|
1402 |
+
|
1403 |
+
@pytest.mark.parametrize(
|
1404 |
+
"val,exp_dtype,warn",
|
1405 |
+
[
|
1406 |
+
(1, np.int64, None),
|
1407 |
+
(1.1, np.float64, FutureWarning),
|
1408 |
+
(1 + 1j, np.complex128, FutureWarning),
|
1409 |
+
(True, object, FutureWarning),
|
1410 |
+
],
|
1411 |
+
)
|
1412 |
+
class TestCoercionInt64(CoercionTest):
|
1413 |
+
# previously test_setitem_series_int64 in tests.indexing.test_coercion
|
1414 |
+
@pytest.fixture
|
1415 |
+
def obj(self):
|
1416 |
+
return Series([1, 2, 3, 4])
|
1417 |
+
|
1418 |
+
|
1419 |
+
@pytest.mark.parametrize(
|
1420 |
+
"val,exp_dtype,warn",
|
1421 |
+
[
|
1422 |
+
(1, np.float64, None),
|
1423 |
+
(1.1, np.float64, None),
|
1424 |
+
(1 + 1j, np.complex128, FutureWarning),
|
1425 |
+
(True, object, FutureWarning),
|
1426 |
+
],
|
1427 |
+
)
|
1428 |
+
class TestCoercionFloat64(CoercionTest):
|
1429 |
+
# previously test_setitem_series_float64 in tests.indexing.test_coercion
|
1430 |
+
@pytest.fixture
|
1431 |
+
def obj(self):
|
1432 |
+
return Series([1.1, 2.2, 3.3, 4.4])
|
1433 |
+
|
1434 |
+
|
1435 |
+
@pytest.mark.parametrize(
|
1436 |
+
"val,exp_dtype,warn",
|
1437 |
+
[
|
1438 |
+
(1, np.float32, None),
|
1439 |
+
pytest.param(
|
1440 |
+
1.1,
|
1441 |
+
np.float32,
|
1442 |
+
None,
|
1443 |
+
marks=pytest.mark.xfail(
|
1444 |
+
(
|
1445 |
+
not np_version_gte1p24
|
1446 |
+
or (np_version_gte1p24 and np._get_promotion_state() != "weak")
|
1447 |
+
),
|
1448 |
+
reason="np.float32(1.1) ends up as 1.100000023841858, so "
|
1449 |
+
"np_can_hold_element raises and we cast to float64",
|
1450 |
+
),
|
1451 |
+
),
|
1452 |
+
(1 + 1j, np.complex128, FutureWarning),
|
1453 |
+
(True, object, FutureWarning),
|
1454 |
+
(np.uint8(2), np.float32, None),
|
1455 |
+
(np.uint32(2), np.float32, None),
|
1456 |
+
# float32 cannot hold np.iinfo(np.uint32).max exactly
|
1457 |
+
# (closest it can hold is 4294967300.0 which off by 5.0), so
|
1458 |
+
# we cast to float64
|
1459 |
+
(np.uint32(np.iinfo(np.uint32).max), np.float64, FutureWarning),
|
1460 |
+
(np.uint64(2), np.float32, None),
|
1461 |
+
(np.int64(2), np.float32, None),
|
1462 |
+
],
|
1463 |
+
)
|
1464 |
+
class TestCoercionFloat32(CoercionTest):
|
1465 |
+
@pytest.fixture
|
1466 |
+
def obj(self):
|
1467 |
+
return Series([1.1, 2.2, 3.3, 4.4], dtype=np.float32)
|
1468 |
+
|
1469 |
+
def test_slice_key(self, obj, key, expected, warn, val, indexer_sli, is_inplace):
|
1470 |
+
super().test_slice_key(obj, key, expected, warn, val, indexer_sli, is_inplace)
|
1471 |
+
|
1472 |
+
if isinstance(val, float):
|
1473 |
+
# the xfail would xpass bc test_slice_key short-circuits
|
1474 |
+
raise AssertionError("xfail not relevant for this test.")
|
1475 |
+
|
1476 |
+
|
1477 |
+
@pytest.mark.parametrize(
|
1478 |
+
"val,exp_dtype,warn",
|
1479 |
+
[
|
1480 |
+
(Timestamp("2012-01-01"), "datetime64[ns]", None),
|
1481 |
+
(1, object, FutureWarning),
|
1482 |
+
("x", object, FutureWarning),
|
1483 |
+
],
|
1484 |
+
)
|
1485 |
+
class TestCoercionDatetime64(CoercionTest):
|
1486 |
+
# previously test_setitem_series_datetime64 in tests.indexing.test_coercion
|
1487 |
+
|
1488 |
+
@pytest.fixture
|
1489 |
+
def obj(self):
|
1490 |
+
return Series(date_range("2011-01-01", freq="D", periods=4))
|
1491 |
+
|
1492 |
+
@pytest.fixture
|
1493 |
+
def warn(self):
|
1494 |
+
return None
|
1495 |
+
|
1496 |
+
|
1497 |
+
@pytest.mark.parametrize(
|
1498 |
+
"val,exp_dtype,warn",
|
1499 |
+
[
|
1500 |
+
(Timestamp("2012-01-01", tz="US/Eastern"), "datetime64[ns, US/Eastern]", None),
|
1501 |
+
# pre-2.0, a mis-matched tz would end up casting to object
|
1502 |
+
(Timestamp("2012-01-01", tz="US/Pacific"), "datetime64[ns, US/Eastern]", None),
|
1503 |
+
(Timestamp("2012-01-01"), object, FutureWarning),
|
1504 |
+
(1, object, FutureWarning),
|
1505 |
+
],
|
1506 |
+
)
|
1507 |
+
class TestCoercionDatetime64TZ(CoercionTest):
|
1508 |
+
# previously test_setitem_series_datetime64tz in tests.indexing.test_coercion
|
1509 |
+
@pytest.fixture
|
1510 |
+
def obj(self):
|
1511 |
+
tz = "US/Eastern"
|
1512 |
+
return Series(date_range("2011-01-01", freq="D", periods=4, tz=tz))
|
1513 |
+
|
1514 |
+
@pytest.fixture
|
1515 |
+
def warn(self):
|
1516 |
+
return None
|
1517 |
+
|
1518 |
+
|
1519 |
+
@pytest.mark.parametrize(
|
1520 |
+
"val,exp_dtype,warn",
|
1521 |
+
[
|
1522 |
+
(Timedelta("12 day"), "timedelta64[ns]", None),
|
1523 |
+
(1, object, FutureWarning),
|
1524 |
+
("x", object, FutureWarning),
|
1525 |
+
],
|
1526 |
+
)
|
1527 |
+
class TestCoercionTimedelta64(CoercionTest):
|
1528 |
+
# previously test_setitem_series_timedelta64 in tests.indexing.test_coercion
|
1529 |
+
@pytest.fixture
|
1530 |
+
def obj(self):
|
1531 |
+
return Series(timedelta_range("1 day", periods=4))
|
1532 |
+
|
1533 |
+
@pytest.fixture
|
1534 |
+
def warn(self):
|
1535 |
+
return None
|
1536 |
+
|
1537 |
+
|
1538 |
+
@pytest.mark.parametrize(
|
1539 |
+
"val", ["foo", Period("2016", freq="Y"), Interval(1, 2, closed="both")]
|
1540 |
+
)
|
1541 |
+
@pytest.mark.parametrize("exp_dtype", [object])
|
1542 |
+
class TestPeriodIntervalCoercion(CoercionTest):
|
1543 |
+
# GH#45768
|
1544 |
+
@pytest.fixture(
|
1545 |
+
params=[
|
1546 |
+
period_range("2016-01-01", periods=3, freq="D"),
|
1547 |
+
interval_range(1, 5),
|
1548 |
+
]
|
1549 |
+
)
|
1550 |
+
def obj(self, request):
|
1551 |
+
return Series(request.param)
|
1552 |
+
|
1553 |
+
@pytest.fixture
|
1554 |
+
def warn(self):
|
1555 |
+
return FutureWarning
|
1556 |
+
|
1557 |
+
|
1558 |
+
def test_20643():
|
1559 |
+
# closed by GH#45121
|
1560 |
+
orig = Series([0, 1, 2], index=["a", "b", "c"])
|
1561 |
+
|
1562 |
+
expected = Series([0, 2.7, 2], index=["a", "b", "c"])
|
1563 |
+
|
1564 |
+
ser = orig.copy()
|
1565 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1566 |
+
ser.at["b"] = 2.7
|
1567 |
+
tm.assert_series_equal(ser, expected)
|
1568 |
+
|
1569 |
+
ser = orig.copy()
|
1570 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1571 |
+
ser.loc["b"] = 2.7
|
1572 |
+
tm.assert_series_equal(ser, expected)
|
1573 |
+
|
1574 |
+
ser = orig.copy()
|
1575 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1576 |
+
ser["b"] = 2.7
|
1577 |
+
tm.assert_series_equal(ser, expected)
|
1578 |
+
|
1579 |
+
ser = orig.copy()
|
1580 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1581 |
+
ser.iat[1] = 2.7
|
1582 |
+
tm.assert_series_equal(ser, expected)
|
1583 |
+
|
1584 |
+
ser = orig.copy()
|
1585 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1586 |
+
ser.iloc[1] = 2.7
|
1587 |
+
tm.assert_series_equal(ser, expected)
|
1588 |
+
|
1589 |
+
orig_df = orig.to_frame("A")
|
1590 |
+
expected_df = expected.to_frame("A")
|
1591 |
+
|
1592 |
+
df = orig_df.copy()
|
1593 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1594 |
+
df.at["b", "A"] = 2.7
|
1595 |
+
tm.assert_frame_equal(df, expected_df)
|
1596 |
+
|
1597 |
+
df = orig_df.copy()
|
1598 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1599 |
+
df.loc["b", "A"] = 2.7
|
1600 |
+
tm.assert_frame_equal(df, expected_df)
|
1601 |
+
|
1602 |
+
df = orig_df.copy()
|
1603 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1604 |
+
df.iloc[1, 0] = 2.7
|
1605 |
+
tm.assert_frame_equal(df, expected_df)
|
1606 |
+
|
1607 |
+
df = orig_df.copy()
|
1608 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1609 |
+
df.iat[1, 0] = 2.7
|
1610 |
+
tm.assert_frame_equal(df, expected_df)
|
1611 |
+
|
1612 |
+
|
1613 |
+
def test_20643_comment():
|
1614 |
+
# https://github.com/pandas-dev/pandas/issues/20643#issuecomment-431244590
|
1615 |
+
# fixed sometime prior to GH#45121
|
1616 |
+
orig = Series([0, 1, 2], index=["a", "b", "c"])
|
1617 |
+
expected = Series([np.nan, 1, 2], index=["a", "b", "c"])
|
1618 |
+
|
1619 |
+
ser = orig.copy()
|
1620 |
+
ser.iat[0] = None
|
1621 |
+
tm.assert_series_equal(ser, expected)
|
1622 |
+
|
1623 |
+
ser = orig.copy()
|
1624 |
+
ser.iloc[0] = None
|
1625 |
+
tm.assert_series_equal(ser, expected)
|
1626 |
+
|
1627 |
+
|
1628 |
+
def test_15413():
|
1629 |
+
# fixed by GH#45121
|
1630 |
+
ser = Series([1, 2, 3])
|
1631 |
+
|
1632 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1633 |
+
ser[ser == 2] += 0.5
|
1634 |
+
expected = Series([1, 2.5, 3])
|
1635 |
+
tm.assert_series_equal(ser, expected)
|
1636 |
+
|
1637 |
+
ser = Series([1, 2, 3])
|
1638 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1639 |
+
ser[1] += 0.5
|
1640 |
+
tm.assert_series_equal(ser, expected)
|
1641 |
+
|
1642 |
+
ser = Series([1, 2, 3])
|
1643 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1644 |
+
ser.loc[1] += 0.5
|
1645 |
+
tm.assert_series_equal(ser, expected)
|
1646 |
+
|
1647 |
+
ser = Series([1, 2, 3])
|
1648 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1649 |
+
ser.iloc[1] += 0.5
|
1650 |
+
tm.assert_series_equal(ser, expected)
|
1651 |
+
|
1652 |
+
ser = Series([1, 2, 3])
|
1653 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1654 |
+
ser.iat[1] += 0.5
|
1655 |
+
tm.assert_series_equal(ser, expected)
|
1656 |
+
|
1657 |
+
ser = Series([1, 2, 3])
|
1658 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1659 |
+
ser.at[1] += 0.5
|
1660 |
+
tm.assert_series_equal(ser, expected)
|
1661 |
+
|
1662 |
+
|
1663 |
+
def test_32878_int_itemsize():
|
1664 |
+
# Fixed by GH#45121
|
1665 |
+
arr = np.arange(5).astype("i4")
|
1666 |
+
ser = Series(arr)
|
1667 |
+
val = np.int64(np.iinfo(np.int64).max)
|
1668 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1669 |
+
ser[0] = val
|
1670 |
+
expected = Series([val, 1, 2, 3, 4], dtype=np.int64)
|
1671 |
+
tm.assert_series_equal(ser, expected)
|
1672 |
+
|
1673 |
+
|
1674 |
+
def test_32878_complex_itemsize():
|
1675 |
+
arr = np.arange(5).astype("c8")
|
1676 |
+
ser = Series(arr)
|
1677 |
+
val = np.finfo(np.float64).max
|
1678 |
+
val = val.astype("c16")
|
1679 |
+
|
1680 |
+
# GH#32878 used to coerce val to inf+0.000000e+00j
|
1681 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1682 |
+
ser[0] = val
|
1683 |
+
assert ser[0] == val
|
1684 |
+
expected = Series([val, 1, 2, 3, 4], dtype="c16")
|
1685 |
+
tm.assert_series_equal(ser, expected)
|
1686 |
+
|
1687 |
+
|
1688 |
+
def test_37692(indexer_al):
|
1689 |
+
# GH#37692
|
1690 |
+
ser = Series([1, 2, 3], index=["a", "b", "c"])
|
1691 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1692 |
+
indexer_al(ser)["b"] = "test"
|
1693 |
+
expected = Series([1, "test", 3], index=["a", "b", "c"], dtype=object)
|
1694 |
+
tm.assert_series_equal(ser, expected)
|
1695 |
+
|
1696 |
+
|
1697 |
+
def test_setitem_bool_int_float_consistency(indexer_sli):
|
1698 |
+
# GH#21513
|
1699 |
+
# bool-with-int and bool-with-float both upcast to object
|
1700 |
+
# int-with-float and float-with-int are both non-casting so long
|
1701 |
+
# as the setitem can be done losslessly
|
1702 |
+
for dtype in [np.float64, np.int64]:
|
1703 |
+
ser = Series(0, index=range(3), dtype=dtype)
|
1704 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1705 |
+
indexer_sli(ser)[0] = True
|
1706 |
+
assert ser.dtype == object
|
1707 |
+
|
1708 |
+
ser = Series(0, index=range(3), dtype=bool)
|
1709 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
1710 |
+
ser[0] = dtype(1)
|
1711 |
+
assert ser.dtype == object
|
1712 |
+
|
1713 |
+
# 1.0 can be held losslessly, so no casting
|
1714 |
+
ser = Series(0, index=range(3), dtype=np.int64)
|
1715 |
+
indexer_sli(ser)[0] = np.float64(1.0)
|
1716 |
+
assert ser.dtype == np.int64
|
1717 |
+
|
1718 |
+
# 1 can be held losslessly, so no casting
|
1719 |
+
ser = Series(0, index=range(3), dtype=np.float64)
|
1720 |
+
indexer_sli(ser)[0] = np.int64(1)
|
1721 |
+
|
1722 |
+
|
1723 |
+
def test_setitem_positional_with_casting():
|
1724 |
+
# GH#45070 case where in __setitem__ we get a KeyError, then when
|
1725 |
+
# we fallback we *also* get a ValueError if we try to set inplace.
|
1726 |
+
ser = Series([1, 2, 3], index=["a", "b", "c"])
|
1727 |
+
|
1728 |
+
warn_msg = "Series.__setitem__ treating keys as positions is deprecated"
|
1729 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
1730 |
+
ser[0] = "X"
|
1731 |
+
expected = Series(["X", 2, 3], index=["a", "b", "c"], dtype=object)
|
1732 |
+
tm.assert_series_equal(ser, expected)
|
1733 |
+
|
1734 |
+
|
1735 |
+
def test_setitem_positional_float_into_int_coerces():
|
1736 |
+
# Case where we hit a KeyError and then trying to set in-place incorrectly
|
1737 |
+
# casts a float to an int
|
1738 |
+
ser = Series([1, 2, 3], index=["a", "b", "c"])
|
1739 |
+
|
1740 |
+
warn_msg = "Series.__setitem__ treating keys as positions is deprecated"
|
1741 |
+
with tm.assert_produces_warning(FutureWarning, match=warn_msg):
|
1742 |
+
ser[0] = 1.5
|
1743 |
+
expected = Series([1.5, 2, 3], index=["a", "b", "c"])
|
1744 |
+
tm.assert_series_equal(ser, expected)
|
1745 |
+
|
1746 |
+
|
1747 |
+
def test_setitem_int_not_positional():
|
1748 |
+
# GH#42215 deprecated falling back to positional on __setitem__ with an
|
1749 |
+
# int not contained in the index; enforced in 2.0
|
1750 |
+
ser = Series([1, 2, 3, 4], index=[1.1, 2.1, 3.0, 4.1])
|
1751 |
+
assert not ser.index._should_fallback_to_positional
|
1752 |
+
# assert not ser.index.astype(object)._should_fallback_to_positional
|
1753 |
+
|
1754 |
+
# 3.0 is in our index, so post-enforcement behavior is unchanged
|
1755 |
+
ser[3] = 10
|
1756 |
+
expected = Series([1, 2, 10, 4], index=ser.index)
|
1757 |
+
tm.assert_series_equal(ser, expected)
|
1758 |
+
|
1759 |
+
# pre-enforcement `ser[5] = 5` raised IndexError
|
1760 |
+
ser[5] = 5
|
1761 |
+
expected = Series([1, 2, 10, 4, 5], index=[1.1, 2.1, 3.0, 4.1, 5.0])
|
1762 |
+
tm.assert_series_equal(ser, expected)
|
1763 |
+
|
1764 |
+
ii = IntervalIndex.from_breaks(range(10))[::2]
|
1765 |
+
ser2 = Series(range(len(ii)), index=ii)
|
1766 |
+
exp_index = ii.astype(object).append(Index([4]))
|
1767 |
+
expected2 = Series([0, 1, 2, 3, 4, 9], index=exp_index)
|
1768 |
+
# pre-enforcement `ser2[4] = 9` interpreted 4 as positional
|
1769 |
+
ser2[4] = 9
|
1770 |
+
tm.assert_series_equal(ser2, expected2)
|
1771 |
+
|
1772 |
+
mi = MultiIndex.from_product([ser.index, ["A", "B"]])
|
1773 |
+
ser3 = Series(range(len(mi)), index=mi)
|
1774 |
+
expected3 = ser3.copy()
|
1775 |
+
expected3.loc[4] = 99
|
1776 |
+
# pre-enforcement `ser3[4] = 99` interpreted 4 as positional
|
1777 |
+
ser3[4] = 99
|
1778 |
+
tm.assert_series_equal(ser3, expected3)
|
1779 |
+
|
1780 |
+
|
1781 |
+
def test_setitem_with_bool_indexer():
|
1782 |
+
# GH#42530
|
1783 |
+
|
1784 |
+
df = DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
|
1785 |
+
result = df.pop("b").copy()
|
1786 |
+
result[[True, False, False]] = 9
|
1787 |
+
expected = Series(data=[9, 5, 6], name="b")
|
1788 |
+
tm.assert_series_equal(result, expected)
|
1789 |
+
|
1790 |
+
df.loc[[True, False, False], "a"] = 10
|
1791 |
+
expected = DataFrame({"a": [10, 2, 3]})
|
1792 |
+
tm.assert_frame_equal(df, expected)
|
1793 |
+
|
1794 |
+
|
1795 |
+
@pytest.mark.parametrize("size", range(2, 6))
|
1796 |
+
@pytest.mark.parametrize(
|
1797 |
+
"mask", [[True, False, False, False, False], [True, False], [False]]
|
1798 |
+
)
|
1799 |
+
@pytest.mark.parametrize(
|
1800 |
+
"item", [2.0, np.nan, np.finfo(float).max, np.finfo(float).min]
|
1801 |
+
)
|
1802 |
+
# Test numpy arrays, lists and tuples as the input to be
|
1803 |
+
# broadcast
|
1804 |
+
@pytest.mark.parametrize(
|
1805 |
+
"box", [lambda x: np.array([x]), lambda x: [x], lambda x: (x,)]
|
1806 |
+
)
|
1807 |
+
def test_setitem_bool_indexer_dont_broadcast_length1_values(size, mask, item, box):
|
1808 |
+
# GH#44265
|
1809 |
+
# see also tests.series.indexing.test_where.test_broadcast
|
1810 |
+
|
1811 |
+
selection = np.resize(mask, size)
|
1812 |
+
|
1813 |
+
data = np.arange(size, dtype=float)
|
1814 |
+
|
1815 |
+
ser = Series(data)
|
1816 |
+
|
1817 |
+
if selection.sum() != 1:
|
1818 |
+
msg = (
|
1819 |
+
"cannot set using a list-like indexer with a different "
|
1820 |
+
"length than the value"
|
1821 |
+
)
|
1822 |
+
with pytest.raises(ValueError, match=msg):
|
1823 |
+
# GH#44265
|
1824 |
+
ser[selection] = box(item)
|
1825 |
+
else:
|
1826 |
+
# In this corner case setting is equivalent to setting with the unboxed
|
1827 |
+
# item
|
1828 |
+
ser[selection] = box(item)
|
1829 |
+
|
1830 |
+
expected = Series(np.arange(size, dtype=float))
|
1831 |
+
expected[selection] = item
|
1832 |
+
tm.assert_series_equal(ser, expected)
|
1833 |
+
|
1834 |
+
|
1835 |
+
def test_setitem_empty_mask_dont_upcast_dt64():
|
1836 |
+
dti = date_range("2016-01-01", periods=3)
|
1837 |
+
ser = Series(dti)
|
1838 |
+
orig = ser.copy()
|
1839 |
+
mask = np.zeros(3, dtype=bool)
|
1840 |
+
|
1841 |
+
ser[mask] = "foo"
|
1842 |
+
assert ser.dtype == dti.dtype # no-op -> dont upcast
|
1843 |
+
tm.assert_series_equal(ser, orig)
|
1844 |
+
|
1845 |
+
ser.mask(mask, "foo", inplace=True)
|
1846 |
+
assert ser.dtype == dti.dtype # no-op -> dont upcast
|
1847 |
+
tm.assert_series_equal(ser, orig)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_take.py
ADDED
@@ -0,0 +1,50 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
from pandas import Series
|
5 |
+
import pandas._testing as tm
|
6 |
+
|
7 |
+
|
8 |
+
def test_take_validate_axis():
|
9 |
+
# GH#51022
|
10 |
+
ser = Series([-1, 5, 6, 2, 4])
|
11 |
+
|
12 |
+
msg = "No axis named foo for object type Series"
|
13 |
+
with pytest.raises(ValueError, match=msg):
|
14 |
+
ser.take([1, 2], axis="foo")
|
15 |
+
|
16 |
+
|
17 |
+
def test_take():
|
18 |
+
ser = Series([-1, 5, 6, 2, 4])
|
19 |
+
|
20 |
+
actual = ser.take([1, 3, 4])
|
21 |
+
expected = Series([5, 2, 4], index=[1, 3, 4])
|
22 |
+
tm.assert_series_equal(actual, expected)
|
23 |
+
|
24 |
+
actual = ser.take([-1, 3, 4])
|
25 |
+
expected = Series([4, 2, 4], index=[4, 3, 4])
|
26 |
+
tm.assert_series_equal(actual, expected)
|
27 |
+
|
28 |
+
msg = "indices are out-of-bounds"
|
29 |
+
with pytest.raises(IndexError, match=msg):
|
30 |
+
ser.take([1, 10])
|
31 |
+
with pytest.raises(IndexError, match=msg):
|
32 |
+
ser.take([2, 5])
|
33 |
+
|
34 |
+
|
35 |
+
def test_take_categorical():
|
36 |
+
# https://github.com/pandas-dev/pandas/issues/20664
|
37 |
+
ser = Series(pd.Categorical(["a", "b", "c"]))
|
38 |
+
result = ser.take([-2, -2, 0])
|
39 |
+
expected = Series(
|
40 |
+
pd.Categorical(["b", "b", "a"], categories=["a", "b", "c"]), index=[1, 1, 0]
|
41 |
+
)
|
42 |
+
tm.assert_series_equal(result, expected)
|
43 |
+
|
44 |
+
|
45 |
+
def test_take_slice_raises():
|
46 |
+
ser = Series([-1, 5, 6, 2, 4])
|
47 |
+
|
48 |
+
msg = "Series.take requires a sequence of integers, not slice"
|
49 |
+
with pytest.raises(TypeError, match=msg):
|
50 |
+
ser.take(slice(0, 3, 1))
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_where.py
ADDED
@@ -0,0 +1,481 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas._config import using_pyarrow_string_dtype
|
5 |
+
|
6 |
+
from pandas.core.dtypes.common import is_integer
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
from pandas import (
|
10 |
+
Series,
|
11 |
+
Timestamp,
|
12 |
+
date_range,
|
13 |
+
isna,
|
14 |
+
)
|
15 |
+
import pandas._testing as tm
|
16 |
+
|
17 |
+
|
18 |
+
def test_where_unsafe_int(any_signed_int_numpy_dtype):
|
19 |
+
s = Series(np.arange(10), dtype=any_signed_int_numpy_dtype)
|
20 |
+
mask = s < 5
|
21 |
+
|
22 |
+
s[mask] = range(2, 7)
|
23 |
+
expected = Series(
|
24 |
+
list(range(2, 7)) + list(range(5, 10)),
|
25 |
+
dtype=any_signed_int_numpy_dtype,
|
26 |
+
)
|
27 |
+
|
28 |
+
tm.assert_series_equal(s, expected)
|
29 |
+
|
30 |
+
|
31 |
+
def test_where_unsafe_float(float_numpy_dtype):
|
32 |
+
s = Series(np.arange(10), dtype=float_numpy_dtype)
|
33 |
+
mask = s < 5
|
34 |
+
|
35 |
+
s[mask] = range(2, 7)
|
36 |
+
data = list(range(2, 7)) + list(range(5, 10))
|
37 |
+
expected = Series(data, dtype=float_numpy_dtype)
|
38 |
+
|
39 |
+
tm.assert_series_equal(s, expected)
|
40 |
+
|
41 |
+
|
42 |
+
@pytest.mark.parametrize(
|
43 |
+
"dtype,expected_dtype",
|
44 |
+
[
|
45 |
+
(np.int8, np.float64),
|
46 |
+
(np.int16, np.float64),
|
47 |
+
(np.int32, np.float64),
|
48 |
+
(np.int64, np.float64),
|
49 |
+
(np.float32, np.float32),
|
50 |
+
(np.float64, np.float64),
|
51 |
+
],
|
52 |
+
)
|
53 |
+
def test_where_unsafe_upcast(dtype, expected_dtype):
|
54 |
+
# see gh-9743
|
55 |
+
s = Series(np.arange(10), dtype=dtype)
|
56 |
+
values = [2.5, 3.5, 4.5, 5.5, 6.5]
|
57 |
+
mask = s < 5
|
58 |
+
expected = Series(values + list(range(5, 10)), dtype=expected_dtype)
|
59 |
+
warn = (
|
60 |
+
None
|
61 |
+
if np.dtype(dtype).kind == np.dtype(expected_dtype).kind == "f"
|
62 |
+
else FutureWarning
|
63 |
+
)
|
64 |
+
with tm.assert_produces_warning(warn, match="incompatible dtype"):
|
65 |
+
s[mask] = values
|
66 |
+
tm.assert_series_equal(s, expected)
|
67 |
+
|
68 |
+
|
69 |
+
def test_where_unsafe():
|
70 |
+
# see gh-9731
|
71 |
+
s = Series(np.arange(10), dtype="int64")
|
72 |
+
values = [2.5, 3.5, 4.5, 5.5]
|
73 |
+
|
74 |
+
mask = s > 5
|
75 |
+
expected = Series(list(range(6)) + values, dtype="float64")
|
76 |
+
|
77 |
+
with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
|
78 |
+
s[mask] = values
|
79 |
+
tm.assert_series_equal(s, expected)
|
80 |
+
|
81 |
+
# see gh-3235
|
82 |
+
s = Series(np.arange(10), dtype="int64")
|
83 |
+
mask = s < 5
|
84 |
+
s[mask] = range(2, 7)
|
85 |
+
expected = Series(list(range(2, 7)) + list(range(5, 10)), dtype="int64")
|
86 |
+
tm.assert_series_equal(s, expected)
|
87 |
+
assert s.dtype == expected.dtype
|
88 |
+
|
89 |
+
s = Series(np.arange(10), dtype="int64")
|
90 |
+
mask = s > 5
|
91 |
+
s[mask] = [0] * 4
|
92 |
+
expected = Series([0, 1, 2, 3, 4, 5] + [0] * 4, dtype="int64")
|
93 |
+
tm.assert_series_equal(s, expected)
|
94 |
+
|
95 |
+
s = Series(np.arange(10))
|
96 |
+
mask = s > 5
|
97 |
+
|
98 |
+
msg = "cannot set using a list-like indexer with a different length than the value"
|
99 |
+
with pytest.raises(ValueError, match=msg):
|
100 |
+
s[mask] = [5, 4, 3, 2, 1]
|
101 |
+
|
102 |
+
with pytest.raises(ValueError, match=msg):
|
103 |
+
s[mask] = [0] * 5
|
104 |
+
|
105 |
+
# dtype changes
|
106 |
+
s = Series([1, 2, 3, 4])
|
107 |
+
result = s.where(s > 2, np.nan)
|
108 |
+
expected = Series([np.nan, np.nan, 3, 4])
|
109 |
+
tm.assert_series_equal(result, expected)
|
110 |
+
|
111 |
+
# GH 4667
|
112 |
+
# setting with None changes dtype
|
113 |
+
s = Series(range(10)).astype(float)
|
114 |
+
s[8] = None
|
115 |
+
result = s[8]
|
116 |
+
assert isna(result)
|
117 |
+
|
118 |
+
s = Series(range(10)).astype(float)
|
119 |
+
s[s > 8] = None
|
120 |
+
result = s[isna(s)]
|
121 |
+
expected = Series(np.nan, index=[9])
|
122 |
+
tm.assert_series_equal(result, expected)
|
123 |
+
|
124 |
+
|
125 |
+
def test_where():
|
126 |
+
s = Series(np.random.default_rng(2).standard_normal(5))
|
127 |
+
cond = s > 0
|
128 |
+
|
129 |
+
rs = s.where(cond).dropna()
|
130 |
+
rs2 = s[cond]
|
131 |
+
tm.assert_series_equal(rs, rs2)
|
132 |
+
|
133 |
+
rs = s.where(cond, -s)
|
134 |
+
tm.assert_series_equal(rs, s.abs())
|
135 |
+
|
136 |
+
rs = s.where(cond)
|
137 |
+
assert s.shape == rs.shape
|
138 |
+
assert rs is not s
|
139 |
+
|
140 |
+
# test alignment
|
141 |
+
cond = Series([True, False, False, True, False], index=s.index)
|
142 |
+
s2 = -(s.abs())
|
143 |
+
|
144 |
+
expected = s2[cond].reindex(s2.index[:3]).reindex(s2.index)
|
145 |
+
rs = s2.where(cond[:3])
|
146 |
+
tm.assert_series_equal(rs, expected)
|
147 |
+
|
148 |
+
expected = s2.abs()
|
149 |
+
expected.iloc[0] = s2[0]
|
150 |
+
rs = s2.where(cond[:3], -s2)
|
151 |
+
tm.assert_series_equal(rs, expected)
|
152 |
+
|
153 |
+
|
154 |
+
def test_where_error():
|
155 |
+
s = Series(np.random.default_rng(2).standard_normal(5))
|
156 |
+
cond = s > 0
|
157 |
+
|
158 |
+
msg = "Array conditional must be same shape as self"
|
159 |
+
with pytest.raises(ValueError, match=msg):
|
160 |
+
s.where(1)
|
161 |
+
with pytest.raises(ValueError, match=msg):
|
162 |
+
s.where(cond[:3].values, -s)
|
163 |
+
|
164 |
+
# GH 2745
|
165 |
+
s = Series([1, 2])
|
166 |
+
s[[True, False]] = [0, 1]
|
167 |
+
expected = Series([0, 2])
|
168 |
+
tm.assert_series_equal(s, expected)
|
169 |
+
|
170 |
+
# failures
|
171 |
+
msg = "cannot set using a list-like indexer with a different length than the value"
|
172 |
+
with pytest.raises(ValueError, match=msg):
|
173 |
+
s[[True, False]] = [0, 2, 3]
|
174 |
+
|
175 |
+
with pytest.raises(ValueError, match=msg):
|
176 |
+
s[[True, False]] = []
|
177 |
+
|
178 |
+
|
179 |
+
@pytest.mark.parametrize("klass", [list, tuple, np.array, Series])
|
180 |
+
def test_where_array_like(klass):
|
181 |
+
# see gh-15414
|
182 |
+
s = Series([1, 2, 3])
|
183 |
+
cond = [False, True, True]
|
184 |
+
expected = Series([np.nan, 2, 3])
|
185 |
+
|
186 |
+
result = s.where(klass(cond))
|
187 |
+
tm.assert_series_equal(result, expected)
|
188 |
+
|
189 |
+
|
190 |
+
@pytest.mark.parametrize(
|
191 |
+
"cond",
|
192 |
+
[
|
193 |
+
[1, 0, 1],
|
194 |
+
Series([2, 5, 7]),
|
195 |
+
["True", "False", "True"],
|
196 |
+
[Timestamp("2017-01-01"), pd.NaT, Timestamp("2017-01-02")],
|
197 |
+
],
|
198 |
+
)
|
199 |
+
def test_where_invalid_input(cond):
|
200 |
+
# see gh-15414: only boolean arrays accepted
|
201 |
+
s = Series([1, 2, 3])
|
202 |
+
msg = "Boolean array expected for the condition"
|
203 |
+
|
204 |
+
with pytest.raises(ValueError, match=msg):
|
205 |
+
s.where(cond)
|
206 |
+
|
207 |
+
msg = "Array conditional must be same shape as self"
|
208 |
+
with pytest.raises(ValueError, match=msg):
|
209 |
+
s.where([True])
|
210 |
+
|
211 |
+
|
212 |
+
def test_where_ndframe_align():
|
213 |
+
msg = "Array conditional must be same shape as self"
|
214 |
+
s = Series([1, 2, 3])
|
215 |
+
|
216 |
+
cond = [True]
|
217 |
+
with pytest.raises(ValueError, match=msg):
|
218 |
+
s.where(cond)
|
219 |
+
|
220 |
+
expected = Series([1, np.nan, np.nan])
|
221 |
+
|
222 |
+
out = s.where(Series(cond))
|
223 |
+
tm.assert_series_equal(out, expected)
|
224 |
+
|
225 |
+
cond = np.array([False, True, False, True])
|
226 |
+
with pytest.raises(ValueError, match=msg):
|
227 |
+
s.where(cond)
|
228 |
+
|
229 |
+
expected = Series([np.nan, 2, np.nan])
|
230 |
+
|
231 |
+
out = s.where(Series(cond))
|
232 |
+
tm.assert_series_equal(out, expected)
|
233 |
+
|
234 |
+
|
235 |
+
@pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't set ints into string")
|
236 |
+
def test_where_setitem_invalid():
|
237 |
+
# GH 2702
|
238 |
+
# make sure correct exceptions are raised on invalid list assignment
|
239 |
+
|
240 |
+
msg = (
|
241 |
+
lambda x: f"cannot set using a {x} indexer with a "
|
242 |
+
"different length than the value"
|
243 |
+
)
|
244 |
+
# slice
|
245 |
+
s = Series(list("abc"))
|
246 |
+
|
247 |
+
with pytest.raises(ValueError, match=msg("slice")):
|
248 |
+
s[0:3] = list(range(27))
|
249 |
+
|
250 |
+
s[0:3] = list(range(3))
|
251 |
+
expected = Series([0, 1, 2])
|
252 |
+
tm.assert_series_equal(s.astype(np.int64), expected)
|
253 |
+
|
254 |
+
# slice with step
|
255 |
+
s = Series(list("abcdef"))
|
256 |
+
|
257 |
+
with pytest.raises(ValueError, match=msg("slice")):
|
258 |
+
s[0:4:2] = list(range(27))
|
259 |
+
|
260 |
+
s = Series(list("abcdef"))
|
261 |
+
s[0:4:2] = list(range(2))
|
262 |
+
expected = Series([0, "b", 1, "d", "e", "f"])
|
263 |
+
tm.assert_series_equal(s, expected)
|
264 |
+
|
265 |
+
# neg slices
|
266 |
+
s = Series(list("abcdef"))
|
267 |
+
|
268 |
+
with pytest.raises(ValueError, match=msg("slice")):
|
269 |
+
s[:-1] = list(range(27))
|
270 |
+
|
271 |
+
s[-3:-1] = list(range(2))
|
272 |
+
expected = Series(["a", "b", "c", 0, 1, "f"])
|
273 |
+
tm.assert_series_equal(s, expected)
|
274 |
+
|
275 |
+
# list
|
276 |
+
s = Series(list("abc"))
|
277 |
+
|
278 |
+
with pytest.raises(ValueError, match=msg("list-like")):
|
279 |
+
s[[0, 1, 2]] = list(range(27))
|
280 |
+
|
281 |
+
s = Series(list("abc"))
|
282 |
+
|
283 |
+
with pytest.raises(ValueError, match=msg("list-like")):
|
284 |
+
s[[0, 1, 2]] = list(range(2))
|
285 |
+
|
286 |
+
# scalar
|
287 |
+
s = Series(list("abc"))
|
288 |
+
s[0] = list(range(10))
|
289 |
+
expected = Series([list(range(10)), "b", "c"])
|
290 |
+
tm.assert_series_equal(s, expected)
|
291 |
+
|
292 |
+
|
293 |
+
@pytest.mark.parametrize("size", range(2, 6))
|
294 |
+
@pytest.mark.parametrize(
|
295 |
+
"mask", [[True, False, False, False, False], [True, False], [False]]
|
296 |
+
)
|
297 |
+
@pytest.mark.parametrize(
|
298 |
+
"item", [2.0, np.nan, np.finfo(float).max, np.finfo(float).min]
|
299 |
+
)
|
300 |
+
# Test numpy arrays, lists and tuples as the input to be
|
301 |
+
# broadcast
|
302 |
+
@pytest.mark.parametrize(
|
303 |
+
"box", [lambda x: np.array([x]), lambda x: [x], lambda x: (x,)]
|
304 |
+
)
|
305 |
+
def test_broadcast(size, mask, item, box):
|
306 |
+
# GH#8801, GH#4195
|
307 |
+
selection = np.resize(mask, size)
|
308 |
+
|
309 |
+
data = np.arange(size, dtype=float)
|
310 |
+
|
311 |
+
# Construct the expected series by taking the source
|
312 |
+
# data or item based on the selection
|
313 |
+
expected = Series(
|
314 |
+
[item if use_item else data[i] for i, use_item in enumerate(selection)]
|
315 |
+
)
|
316 |
+
|
317 |
+
s = Series(data)
|
318 |
+
|
319 |
+
s[selection] = item
|
320 |
+
tm.assert_series_equal(s, expected)
|
321 |
+
|
322 |
+
s = Series(data)
|
323 |
+
result = s.where(~selection, box(item))
|
324 |
+
tm.assert_series_equal(result, expected)
|
325 |
+
|
326 |
+
s = Series(data)
|
327 |
+
result = s.mask(selection, box(item))
|
328 |
+
tm.assert_series_equal(result, expected)
|
329 |
+
|
330 |
+
|
331 |
+
def test_where_inplace():
|
332 |
+
s = Series(np.random.default_rng(2).standard_normal(5))
|
333 |
+
cond = s > 0
|
334 |
+
|
335 |
+
rs = s.copy()
|
336 |
+
|
337 |
+
rs.where(cond, inplace=True)
|
338 |
+
tm.assert_series_equal(rs.dropna(), s[cond])
|
339 |
+
tm.assert_series_equal(rs, s.where(cond))
|
340 |
+
|
341 |
+
rs = s.copy()
|
342 |
+
rs.where(cond, -s, inplace=True)
|
343 |
+
tm.assert_series_equal(rs, s.where(cond, -s))
|
344 |
+
|
345 |
+
|
346 |
+
def test_where_dups():
|
347 |
+
# GH 4550
|
348 |
+
# where crashes with dups in index
|
349 |
+
s1 = Series(list(range(3)))
|
350 |
+
s2 = Series(list(range(3)))
|
351 |
+
comb = pd.concat([s1, s2])
|
352 |
+
result = comb.where(comb < 2)
|
353 |
+
expected = Series([0, 1, np.nan, 0, 1, np.nan], index=[0, 1, 2, 0, 1, 2])
|
354 |
+
tm.assert_series_equal(result, expected)
|
355 |
+
|
356 |
+
# GH 4548
|
357 |
+
# inplace updating not working with dups
|
358 |
+
comb[comb < 1] = 5
|
359 |
+
expected = Series([5, 1, 2, 5, 1, 2], index=[0, 1, 2, 0, 1, 2])
|
360 |
+
tm.assert_series_equal(comb, expected)
|
361 |
+
|
362 |
+
comb[comb < 2] += 10
|
363 |
+
expected = Series([5, 11, 2, 5, 11, 2], index=[0, 1, 2, 0, 1, 2])
|
364 |
+
tm.assert_series_equal(comb, expected)
|
365 |
+
|
366 |
+
|
367 |
+
def test_where_numeric_with_string():
|
368 |
+
# GH 9280
|
369 |
+
s = Series([1, 2, 3])
|
370 |
+
w = s.where(s > 1, "X")
|
371 |
+
|
372 |
+
assert not is_integer(w[0])
|
373 |
+
assert is_integer(w[1])
|
374 |
+
assert is_integer(w[2])
|
375 |
+
assert isinstance(w[0], str)
|
376 |
+
assert w.dtype == "object"
|
377 |
+
|
378 |
+
w = s.where(s > 1, ["X", "Y", "Z"])
|
379 |
+
assert not is_integer(w[0])
|
380 |
+
assert is_integer(w[1])
|
381 |
+
assert is_integer(w[2])
|
382 |
+
assert isinstance(w[0], str)
|
383 |
+
assert w.dtype == "object"
|
384 |
+
|
385 |
+
w = s.where(s > 1, np.array(["X", "Y", "Z"]))
|
386 |
+
assert not is_integer(w[0])
|
387 |
+
assert is_integer(w[1])
|
388 |
+
assert is_integer(w[2])
|
389 |
+
assert isinstance(w[0], str)
|
390 |
+
assert w.dtype == "object"
|
391 |
+
|
392 |
+
|
393 |
+
@pytest.mark.parametrize("dtype", ["timedelta64[ns]", "datetime64[ns]"])
|
394 |
+
def test_where_datetimelike_coerce(dtype):
|
395 |
+
ser = Series([1, 2], dtype=dtype)
|
396 |
+
expected = Series([10, 10])
|
397 |
+
mask = np.array([False, False])
|
398 |
+
|
399 |
+
msg = "Downcasting behavior in Series and DataFrame methods 'where'"
|
400 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
401 |
+
rs = ser.where(mask, [10, 10])
|
402 |
+
tm.assert_series_equal(rs, expected)
|
403 |
+
|
404 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
405 |
+
rs = ser.where(mask, 10)
|
406 |
+
tm.assert_series_equal(rs, expected)
|
407 |
+
|
408 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
409 |
+
rs = ser.where(mask, 10.0)
|
410 |
+
tm.assert_series_equal(rs, expected)
|
411 |
+
|
412 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
413 |
+
rs = ser.where(mask, [10.0, 10.0])
|
414 |
+
tm.assert_series_equal(rs, expected)
|
415 |
+
|
416 |
+
rs = ser.where(mask, [10.0, np.nan])
|
417 |
+
expected = Series([10, np.nan], dtype="object")
|
418 |
+
tm.assert_series_equal(rs, expected)
|
419 |
+
|
420 |
+
|
421 |
+
def test_where_datetimetz():
|
422 |
+
# GH 15701
|
423 |
+
timestamps = ["2016-12-31 12:00:04+00:00", "2016-12-31 12:00:04.010000+00:00"]
|
424 |
+
ser = Series([Timestamp(t) for t in timestamps], dtype="datetime64[ns, UTC]")
|
425 |
+
rs = ser.where(Series([False, True]))
|
426 |
+
expected = Series([pd.NaT, ser[1]], dtype="datetime64[ns, UTC]")
|
427 |
+
tm.assert_series_equal(rs, expected)
|
428 |
+
|
429 |
+
|
430 |
+
def test_where_sparse():
|
431 |
+
# GH#17198 make sure we dont get an AttributeError for sp_index
|
432 |
+
ser = Series(pd.arrays.SparseArray([1, 2]))
|
433 |
+
result = ser.where(ser >= 2, 0)
|
434 |
+
expected = Series(pd.arrays.SparseArray([0, 2]))
|
435 |
+
tm.assert_series_equal(result, expected)
|
436 |
+
|
437 |
+
|
438 |
+
def test_where_empty_series_and_empty_cond_having_non_bool_dtypes():
|
439 |
+
# https://github.com/pandas-dev/pandas/issues/34592
|
440 |
+
ser = Series([], dtype=float)
|
441 |
+
result = ser.where([])
|
442 |
+
tm.assert_series_equal(result, ser)
|
443 |
+
|
444 |
+
|
445 |
+
def test_where_categorical(frame_or_series):
|
446 |
+
# https://github.com/pandas-dev/pandas/issues/18888
|
447 |
+
exp = frame_or_series(
|
448 |
+
pd.Categorical(["A", "A", "B", "B", np.nan], categories=["A", "B", "C"]),
|
449 |
+
dtype="category",
|
450 |
+
)
|
451 |
+
df = frame_or_series(["A", "A", "B", "B", "C"], dtype="category")
|
452 |
+
res = df.where(df != "C")
|
453 |
+
tm.assert_equal(exp, res)
|
454 |
+
|
455 |
+
|
456 |
+
def test_where_datetimelike_categorical(tz_naive_fixture):
|
457 |
+
# GH#37682
|
458 |
+
tz = tz_naive_fixture
|
459 |
+
|
460 |
+
dr = date_range("2001-01-01", periods=3, tz=tz)._with_freq(None)
|
461 |
+
lvals = pd.DatetimeIndex([dr[0], dr[1], pd.NaT])
|
462 |
+
rvals = pd.Categorical([dr[0], pd.NaT, dr[2]])
|
463 |
+
|
464 |
+
mask = np.array([True, True, False])
|
465 |
+
|
466 |
+
# DatetimeIndex.where
|
467 |
+
res = lvals.where(mask, rvals)
|
468 |
+
tm.assert_index_equal(res, dr)
|
469 |
+
|
470 |
+
# DatetimeArray.where
|
471 |
+
res = lvals._data._where(mask, rvals)
|
472 |
+
tm.assert_datetime_array_equal(res, dr._data)
|
473 |
+
|
474 |
+
# Series.where
|
475 |
+
res = Series(lvals).where(mask, rvals)
|
476 |
+
tm.assert_series_equal(res, Series(dr))
|
477 |
+
|
478 |
+
# DataFrame.where
|
479 |
+
res = pd.DataFrame(lvals).where(mask[:, None], pd.DataFrame(rvals))
|
480 |
+
|
481 |
+
tm.assert_frame_equal(res, pd.DataFrame(dr))
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/indexing/test_xs.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
MultiIndex,
|
6 |
+
Series,
|
7 |
+
date_range,
|
8 |
+
)
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
|
12 |
+
def test_xs_datetimelike_wrapping():
|
13 |
+
# GH#31630 a case where we shouldn't wrap datetime64 in Timestamp
|
14 |
+
arr = date_range("2016-01-01", periods=3)._data._ndarray
|
15 |
+
|
16 |
+
ser = Series(arr, dtype=object)
|
17 |
+
for i in range(len(ser)):
|
18 |
+
ser.iloc[i] = arr[i]
|
19 |
+
assert ser.dtype == object
|
20 |
+
assert isinstance(ser[0], np.datetime64)
|
21 |
+
|
22 |
+
result = ser.xs(0)
|
23 |
+
assert isinstance(result, np.datetime64)
|
24 |
+
|
25 |
+
|
26 |
+
class TestXSWithMultiIndex:
|
27 |
+
def test_xs_level_series(self, multiindex_dataframe_random_data):
|
28 |
+
df = multiindex_dataframe_random_data
|
29 |
+
ser = df["A"]
|
30 |
+
expected = ser[:, "two"]
|
31 |
+
result = df.xs("two", level=1)["A"]
|
32 |
+
tm.assert_series_equal(result, expected)
|
33 |
+
|
34 |
+
def test_series_getitem_multiindex_xs_by_label(self):
|
35 |
+
# GH#5684
|
36 |
+
idx = MultiIndex.from_tuples(
|
37 |
+
[("a", "one"), ("a", "two"), ("b", "one"), ("b", "two")]
|
38 |
+
)
|
39 |
+
ser = Series([1, 2, 3, 4], index=idx)
|
40 |
+
return_value = ser.index.set_names(["L1", "L2"], inplace=True)
|
41 |
+
assert return_value is None
|
42 |
+
expected = Series([1, 3], index=["a", "b"])
|
43 |
+
return_value = expected.index.set_names(["L1"], inplace=True)
|
44 |
+
assert return_value is None
|
45 |
+
|
46 |
+
result = ser.xs("one", level="L2")
|
47 |
+
tm.assert_series_equal(result, expected)
|
48 |
+
|
49 |
+
def test_series_getitem_multiindex_xs(self):
|
50 |
+
# GH#6258
|
51 |
+
dt = list(date_range("20130903", periods=3))
|
52 |
+
idx = MultiIndex.from_product([list("AB"), dt])
|
53 |
+
ser = Series([1, 3, 4, 1, 3, 4], index=idx)
|
54 |
+
expected = Series([1, 1], index=list("AB"))
|
55 |
+
|
56 |
+
result = ser.xs("20130903", level=1)
|
57 |
+
tm.assert_series_equal(result, expected)
|
58 |
+
|
59 |
+
def test_series_xs_droplevel_false(self):
|
60 |
+
# GH: 19056
|
61 |
+
mi = MultiIndex.from_tuples(
|
62 |
+
[("a", "x"), ("a", "y"), ("b", "x")], names=["level1", "level2"]
|
63 |
+
)
|
64 |
+
ser = Series([1, 1, 1], index=mi)
|
65 |
+
result = ser.xs("a", axis=0, drop_level=False)
|
66 |
+
expected = Series(
|
67 |
+
[1, 1],
|
68 |
+
index=MultiIndex.from_tuples(
|
69 |
+
[("a", "x"), ("a", "y")], names=["level1", "level2"]
|
70 |
+
),
|
71 |
+
)
|
72 |
+
tm.assert_series_equal(result, expected)
|
73 |
+
|
74 |
+
def test_xs_key_as_list(self):
|
75 |
+
# GH#41760
|
76 |
+
mi = MultiIndex.from_tuples([("a", "x")], names=["level1", "level2"])
|
77 |
+
ser = Series([1], index=mi)
|
78 |
+
with pytest.raises(TypeError, match="list keys are not supported"):
|
79 |
+
ser.xs(["a", "x"], axis=0, drop_level=False)
|
80 |
+
|
81 |
+
with pytest.raises(TypeError, match="list keys are not supported"):
|
82 |
+
ser.xs(["a"], axis=0, drop_level=False)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (193 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (205 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_freq_code.cpython-310.pyc
ADDED
Binary file (1.92 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_frequencies.cpython-310.pyc
ADDED
Binary file (823 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/__pycache__/test_inference.cpython-310.pyc
ADDED
Binary file (13.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_freq_code.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas._libs.tslibs import (
|
5 |
+
Period,
|
6 |
+
to_offset,
|
7 |
+
)
|
8 |
+
|
9 |
+
|
10 |
+
@pytest.mark.parametrize(
|
11 |
+
"freqstr,exp_freqstr",
|
12 |
+
[("D", "D"), ("W", "D"), ("ME", "D"), ("s", "s"), ("min", "s"), ("h", "s")],
|
13 |
+
)
|
14 |
+
def test_get_to_timestamp_base(freqstr, exp_freqstr):
|
15 |
+
off = to_offset(freqstr)
|
16 |
+
per = Period._from_ordinal(1, off)
|
17 |
+
exp_code = to_offset(exp_freqstr)._period_dtype_code
|
18 |
+
|
19 |
+
result_code = per._dtype._get_to_timestamp_base()
|
20 |
+
assert result_code == exp_code
|
21 |
+
|
22 |
+
|
23 |
+
@pytest.mark.parametrize(
|
24 |
+
"args,expected",
|
25 |
+
[
|
26 |
+
((1.5, "min"), (90, "s")),
|
27 |
+
((62.4, "min"), (3744, "s")),
|
28 |
+
((1.04, "h"), (3744, "s")),
|
29 |
+
((1, "D"), (1, "D")),
|
30 |
+
((0.342931, "h"), (1234551600, "us")),
|
31 |
+
((1.2345, "D"), (106660800, "ms")),
|
32 |
+
],
|
33 |
+
)
|
34 |
+
def test_resolution_bumping(args, expected):
|
35 |
+
# see gh-14378
|
36 |
+
off = to_offset(str(args[0]) + args[1])
|
37 |
+
assert off.n == expected[0]
|
38 |
+
assert off._prefix == expected[1]
|
39 |
+
|
40 |
+
|
41 |
+
@pytest.mark.parametrize(
|
42 |
+
"args",
|
43 |
+
[
|
44 |
+
(0.5, "ns"),
|
45 |
+
# Too much precision in the input can prevent.
|
46 |
+
(0.3429324798798269273987982, "h"),
|
47 |
+
],
|
48 |
+
)
|
49 |
+
def test_cat(args):
|
50 |
+
msg = "Invalid frequency"
|
51 |
+
|
52 |
+
with pytest.raises(ValueError, match=msg):
|
53 |
+
to_offset(str(args[0]) + args[1])
|
54 |
+
|
55 |
+
|
56 |
+
@pytest.mark.parametrize(
|
57 |
+
"freqstr,expected",
|
58 |
+
[
|
59 |
+
("1h", "2021-01-01T09:00:00"),
|
60 |
+
("1D", "2021-01-02T08:00:00"),
|
61 |
+
("1W", "2021-01-03T08:00:00"),
|
62 |
+
("1ME", "2021-01-31T08:00:00"),
|
63 |
+
("1YE", "2021-12-31T08:00:00"),
|
64 |
+
],
|
65 |
+
)
|
66 |
+
def test_compatibility(freqstr, expected):
|
67 |
+
ts_np = np.datetime64("2021-01-01T08:00:00.00")
|
68 |
+
do = to_offset(freqstr)
|
69 |
+
assert ts_np + do == np.datetime64(expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_frequencies.py
ADDED
@@ -0,0 +1,29 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from pandas._libs.tslibs import offsets
|
4 |
+
|
5 |
+
from pandas.tseries.frequencies import (
|
6 |
+
is_subperiod,
|
7 |
+
is_superperiod,
|
8 |
+
)
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.mark.parametrize(
|
12 |
+
"p1,p2,expected",
|
13 |
+
[
|
14 |
+
# Input validation.
|
15 |
+
(offsets.MonthEnd(), None, False),
|
16 |
+
(offsets.YearEnd(), None, False),
|
17 |
+
(None, offsets.YearEnd(), False),
|
18 |
+
(None, offsets.MonthEnd(), False),
|
19 |
+
(None, None, False),
|
20 |
+
(offsets.YearEnd(), offsets.MonthEnd(), True),
|
21 |
+
(offsets.Hour(), offsets.Minute(), True),
|
22 |
+
(offsets.Second(), offsets.Milli(), True),
|
23 |
+
(offsets.Milli(), offsets.Micro(), True),
|
24 |
+
(offsets.Micro(), offsets.Nano(), True),
|
25 |
+
],
|
26 |
+
)
|
27 |
+
def test_super_sub_symmetry(p1, p2, expected):
|
28 |
+
assert is_superperiod(p1, p2) is expected
|
29 |
+
assert is_subperiod(p2, p1) is expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/frequencies/test_inference.py
ADDED
@@ -0,0 +1,558 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import (
|
2 |
+
datetime,
|
3 |
+
timedelta,
|
4 |
+
)
|
5 |
+
|
6 |
+
import numpy as np
|
7 |
+
import pytest
|
8 |
+
|
9 |
+
from pandas._libs.tslibs.ccalendar import (
|
10 |
+
DAYS,
|
11 |
+
MONTHS,
|
12 |
+
)
|
13 |
+
from pandas._libs.tslibs.offsets import _get_offset
|
14 |
+
from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
|
15 |
+
from pandas.compat import is_platform_windows
|
16 |
+
|
17 |
+
from pandas import (
|
18 |
+
DatetimeIndex,
|
19 |
+
Index,
|
20 |
+
RangeIndex,
|
21 |
+
Series,
|
22 |
+
Timestamp,
|
23 |
+
date_range,
|
24 |
+
period_range,
|
25 |
+
)
|
26 |
+
import pandas._testing as tm
|
27 |
+
from pandas.core.arrays import (
|
28 |
+
DatetimeArray,
|
29 |
+
TimedeltaArray,
|
30 |
+
)
|
31 |
+
from pandas.core.tools.datetimes import to_datetime
|
32 |
+
|
33 |
+
from pandas.tseries import (
|
34 |
+
frequencies,
|
35 |
+
offsets,
|
36 |
+
)
|
37 |
+
|
38 |
+
|
39 |
+
@pytest.fixture(
|
40 |
+
params=[
|
41 |
+
(timedelta(1), "D"),
|
42 |
+
(timedelta(hours=1), "h"),
|
43 |
+
(timedelta(minutes=1), "min"),
|
44 |
+
(timedelta(seconds=1), "s"),
|
45 |
+
(np.timedelta64(1, "ns"), "ns"),
|
46 |
+
(timedelta(microseconds=1), "us"),
|
47 |
+
(timedelta(microseconds=1000), "ms"),
|
48 |
+
]
|
49 |
+
)
|
50 |
+
def base_delta_code_pair(request):
|
51 |
+
return request.param
|
52 |
+
|
53 |
+
|
54 |
+
freqs = (
|
55 |
+
[f"QE-{month}" for month in MONTHS]
|
56 |
+
+ [f"{annual}-{month}" for annual in ["YE", "BYE"] for month in MONTHS]
|
57 |
+
+ ["ME", "BME", "BMS"]
|
58 |
+
+ [f"WOM-{count}{day}" for count in range(1, 5) for day in DAYS]
|
59 |
+
+ [f"W-{day}" for day in DAYS]
|
60 |
+
)
|
61 |
+
|
62 |
+
|
63 |
+
@pytest.mark.parametrize("freq", freqs)
|
64 |
+
@pytest.mark.parametrize("periods", [5, 7])
|
65 |
+
def test_infer_freq_range(periods, freq):
|
66 |
+
freq = freq.upper()
|
67 |
+
|
68 |
+
gen = date_range("1/1/2000", periods=periods, freq=freq)
|
69 |
+
index = DatetimeIndex(gen.values)
|
70 |
+
|
71 |
+
if not freq.startswith("QE-"):
|
72 |
+
assert frequencies.infer_freq(index) == gen.freqstr
|
73 |
+
else:
|
74 |
+
inf_freq = frequencies.infer_freq(index)
|
75 |
+
is_dec_range = inf_freq == "QE-DEC" and gen.freqstr in (
|
76 |
+
"QE",
|
77 |
+
"QE-DEC",
|
78 |
+
"QE-SEP",
|
79 |
+
"QE-JUN",
|
80 |
+
"QE-MAR",
|
81 |
+
)
|
82 |
+
is_nov_range = inf_freq == "QE-NOV" and gen.freqstr in (
|
83 |
+
"QE-NOV",
|
84 |
+
"QE-AUG",
|
85 |
+
"QE-MAY",
|
86 |
+
"QE-FEB",
|
87 |
+
)
|
88 |
+
is_oct_range = inf_freq == "QE-OCT" and gen.freqstr in (
|
89 |
+
"QE-OCT",
|
90 |
+
"QE-JUL",
|
91 |
+
"QE-APR",
|
92 |
+
"QE-JAN",
|
93 |
+
)
|
94 |
+
assert is_dec_range or is_nov_range or is_oct_range
|
95 |
+
|
96 |
+
|
97 |
+
def test_raise_if_period_index():
|
98 |
+
index = period_range(start="1/1/1990", periods=20, freq="M")
|
99 |
+
msg = "Check the `freq` attribute instead of using infer_freq"
|
100 |
+
|
101 |
+
with pytest.raises(TypeError, match=msg):
|
102 |
+
frequencies.infer_freq(index)
|
103 |
+
|
104 |
+
|
105 |
+
def test_raise_if_too_few():
|
106 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999"])
|
107 |
+
msg = "Need at least 3 dates to infer frequency"
|
108 |
+
|
109 |
+
with pytest.raises(ValueError, match=msg):
|
110 |
+
frequencies.infer_freq(index)
|
111 |
+
|
112 |
+
|
113 |
+
def test_business_daily():
|
114 |
+
index = DatetimeIndex(["01/01/1999", "1/4/1999", "1/5/1999"])
|
115 |
+
assert frequencies.infer_freq(index) == "B"
|
116 |
+
|
117 |
+
|
118 |
+
def test_business_daily_look_alike():
|
119 |
+
# see gh-16624
|
120 |
+
#
|
121 |
+
# Do not infer "B when "weekend" (2-day gap) in wrong place.
|
122 |
+
index = DatetimeIndex(["12/31/1998", "1/3/1999", "1/4/1999"])
|
123 |
+
assert frequencies.infer_freq(index) is None
|
124 |
+
|
125 |
+
|
126 |
+
def test_day_corner():
|
127 |
+
index = DatetimeIndex(["1/1/2000", "1/2/2000", "1/3/2000"])
|
128 |
+
assert frequencies.infer_freq(index) == "D"
|
129 |
+
|
130 |
+
|
131 |
+
def test_non_datetime_index():
|
132 |
+
dates = to_datetime(["1/1/2000", "1/2/2000", "1/3/2000"])
|
133 |
+
assert frequencies.infer_freq(dates) == "D"
|
134 |
+
|
135 |
+
|
136 |
+
def test_fifth_week_of_month_infer():
|
137 |
+
# see gh-9425
|
138 |
+
#
|
139 |
+
# Only attempt to infer up to WOM-4.
|
140 |
+
index = DatetimeIndex(["2014-03-31", "2014-06-30", "2015-03-30"])
|
141 |
+
assert frequencies.infer_freq(index) is None
|
142 |
+
|
143 |
+
|
144 |
+
def test_week_of_month_fake():
|
145 |
+
# All of these dates are on same day
|
146 |
+
# of week and are 4 or 5 weeks apart.
|
147 |
+
index = DatetimeIndex(["2013-08-27", "2013-10-01", "2013-10-29", "2013-11-26"])
|
148 |
+
assert frequencies.infer_freq(index) != "WOM-4TUE"
|
149 |
+
|
150 |
+
|
151 |
+
def test_fifth_week_of_month():
|
152 |
+
# see gh-9425
|
153 |
+
#
|
154 |
+
# Only supports freq up to WOM-4.
|
155 |
+
msg = (
|
156 |
+
"Of the four parameters: start, end, periods, "
|
157 |
+
"and freq, exactly three must be specified"
|
158 |
+
)
|
159 |
+
|
160 |
+
with pytest.raises(ValueError, match=msg):
|
161 |
+
date_range("2014-01-01", freq="WOM-5MON")
|
162 |
+
|
163 |
+
|
164 |
+
def test_monthly_ambiguous():
|
165 |
+
rng = DatetimeIndex(["1/31/2000", "2/29/2000", "3/31/2000"])
|
166 |
+
assert rng.inferred_freq == "ME"
|
167 |
+
|
168 |
+
|
169 |
+
def test_annual_ambiguous():
|
170 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
171 |
+
assert rng.inferred_freq == "YE-JAN"
|
172 |
+
|
173 |
+
|
174 |
+
@pytest.mark.parametrize("count", range(1, 5))
|
175 |
+
def test_infer_freq_delta(base_delta_code_pair, count):
|
176 |
+
b = Timestamp(datetime.now())
|
177 |
+
base_delta, code = base_delta_code_pair
|
178 |
+
|
179 |
+
inc = base_delta * count
|
180 |
+
index = DatetimeIndex([b + inc * j for j in range(3)])
|
181 |
+
|
182 |
+
exp_freq = f"{count:d}{code}" if count > 1 else code
|
183 |
+
assert frequencies.infer_freq(index) == exp_freq
|
184 |
+
|
185 |
+
|
186 |
+
@pytest.mark.parametrize(
|
187 |
+
"constructor",
|
188 |
+
[
|
189 |
+
lambda now, delta: DatetimeIndex(
|
190 |
+
[now + delta * 7] + [now + delta * j for j in range(3)]
|
191 |
+
),
|
192 |
+
lambda now, delta: DatetimeIndex(
|
193 |
+
[now + delta * j for j in range(3)] + [now + delta * 7]
|
194 |
+
),
|
195 |
+
],
|
196 |
+
)
|
197 |
+
def test_infer_freq_custom(base_delta_code_pair, constructor):
|
198 |
+
b = Timestamp(datetime.now())
|
199 |
+
base_delta, _ = base_delta_code_pair
|
200 |
+
|
201 |
+
index = constructor(b, base_delta)
|
202 |
+
assert frequencies.infer_freq(index) is None
|
203 |
+
|
204 |
+
|
205 |
+
@pytest.mark.parametrize(
|
206 |
+
"freq,expected", [("Q", "QE-DEC"), ("Q-NOV", "QE-NOV"), ("Q-OCT", "QE-OCT")]
|
207 |
+
)
|
208 |
+
def test_infer_freq_index(freq, expected):
|
209 |
+
rng = period_range("1959Q2", "2009Q3", freq=freq)
|
210 |
+
with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
|
211 |
+
rng = Index(rng.to_timestamp("D", how="e").astype(object))
|
212 |
+
|
213 |
+
assert rng.inferred_freq == expected
|
214 |
+
|
215 |
+
|
216 |
+
@pytest.mark.parametrize(
|
217 |
+
"expected,dates",
|
218 |
+
list(
|
219 |
+
{
|
220 |
+
"YS-JAN": ["2009-01-01", "2010-01-01", "2011-01-01", "2012-01-01"],
|
221 |
+
"QE-OCT": ["2009-01-31", "2009-04-30", "2009-07-31", "2009-10-31"],
|
222 |
+
"ME": ["2010-11-30", "2010-12-31", "2011-01-31", "2011-02-28"],
|
223 |
+
"W-SAT": ["2010-12-25", "2011-01-01", "2011-01-08", "2011-01-15"],
|
224 |
+
"D": ["2011-01-01", "2011-01-02", "2011-01-03", "2011-01-04"],
|
225 |
+
"h": [
|
226 |
+
"2011-12-31 22:00",
|
227 |
+
"2011-12-31 23:00",
|
228 |
+
"2012-01-01 00:00",
|
229 |
+
"2012-01-01 01:00",
|
230 |
+
],
|
231 |
+
}.items()
|
232 |
+
),
|
233 |
+
)
|
234 |
+
@pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
|
235 |
+
def test_infer_freq_tz(tz_naive_fixture, expected, dates, unit):
|
236 |
+
# see gh-7310, GH#55609
|
237 |
+
tz = tz_naive_fixture
|
238 |
+
idx = DatetimeIndex(dates, tz=tz).as_unit(unit)
|
239 |
+
assert idx.inferred_freq == expected
|
240 |
+
|
241 |
+
|
242 |
+
def test_infer_freq_tz_series(tz_naive_fixture):
|
243 |
+
# infer_freq should work with both tz-naive and tz-aware series. See gh-52456
|
244 |
+
tz = tz_naive_fixture
|
245 |
+
idx = date_range("2021-01-01", "2021-01-04", tz=tz)
|
246 |
+
series = idx.to_series().reset_index(drop=True)
|
247 |
+
inferred_freq = frequencies.infer_freq(series)
|
248 |
+
assert inferred_freq == "D"
|
249 |
+
|
250 |
+
|
251 |
+
@pytest.mark.parametrize(
|
252 |
+
"date_pair",
|
253 |
+
[
|
254 |
+
["2013-11-02", "2013-11-5"], # Fall DST
|
255 |
+
["2014-03-08", "2014-03-11"], # Spring DST
|
256 |
+
["2014-01-01", "2014-01-03"], # Regular Time
|
257 |
+
],
|
258 |
+
)
|
259 |
+
@pytest.mark.parametrize(
|
260 |
+
"freq",
|
261 |
+
["h", "3h", "10min", "3601s", "3600001ms", "3600000001us", "3600000000001ns"],
|
262 |
+
)
|
263 |
+
def test_infer_freq_tz_transition(tz_naive_fixture, date_pair, freq):
|
264 |
+
# see gh-8772
|
265 |
+
tz = tz_naive_fixture
|
266 |
+
idx = date_range(date_pair[0], date_pair[1], freq=freq, tz=tz)
|
267 |
+
assert idx.inferred_freq == freq
|
268 |
+
|
269 |
+
|
270 |
+
def test_infer_freq_tz_transition_custom():
|
271 |
+
index = date_range("2013-11-03", periods=5, freq="3h").tz_localize(
|
272 |
+
"America/Chicago"
|
273 |
+
)
|
274 |
+
assert index.inferred_freq is None
|
275 |
+
|
276 |
+
|
277 |
+
@pytest.mark.parametrize(
|
278 |
+
"data,expected",
|
279 |
+
[
|
280 |
+
# Hourly freq in a day must result in "h"
|
281 |
+
(
|
282 |
+
[
|
283 |
+
"2014-07-01 09:00",
|
284 |
+
"2014-07-01 10:00",
|
285 |
+
"2014-07-01 11:00",
|
286 |
+
"2014-07-01 12:00",
|
287 |
+
"2014-07-01 13:00",
|
288 |
+
"2014-07-01 14:00",
|
289 |
+
],
|
290 |
+
"h",
|
291 |
+
),
|
292 |
+
(
|
293 |
+
[
|
294 |
+
"2014-07-01 09:00",
|
295 |
+
"2014-07-01 10:00",
|
296 |
+
"2014-07-01 11:00",
|
297 |
+
"2014-07-01 12:00",
|
298 |
+
"2014-07-01 13:00",
|
299 |
+
"2014-07-01 14:00",
|
300 |
+
"2014-07-01 15:00",
|
301 |
+
"2014-07-01 16:00",
|
302 |
+
"2014-07-02 09:00",
|
303 |
+
"2014-07-02 10:00",
|
304 |
+
"2014-07-02 11:00",
|
305 |
+
],
|
306 |
+
"bh",
|
307 |
+
),
|
308 |
+
(
|
309 |
+
[
|
310 |
+
"2014-07-04 09:00",
|
311 |
+
"2014-07-04 10:00",
|
312 |
+
"2014-07-04 11:00",
|
313 |
+
"2014-07-04 12:00",
|
314 |
+
"2014-07-04 13:00",
|
315 |
+
"2014-07-04 14:00",
|
316 |
+
"2014-07-04 15:00",
|
317 |
+
"2014-07-04 16:00",
|
318 |
+
"2014-07-07 09:00",
|
319 |
+
"2014-07-07 10:00",
|
320 |
+
"2014-07-07 11:00",
|
321 |
+
],
|
322 |
+
"bh",
|
323 |
+
),
|
324 |
+
(
|
325 |
+
[
|
326 |
+
"2014-07-04 09:00",
|
327 |
+
"2014-07-04 10:00",
|
328 |
+
"2014-07-04 11:00",
|
329 |
+
"2014-07-04 12:00",
|
330 |
+
"2014-07-04 13:00",
|
331 |
+
"2014-07-04 14:00",
|
332 |
+
"2014-07-04 15:00",
|
333 |
+
"2014-07-04 16:00",
|
334 |
+
"2014-07-07 09:00",
|
335 |
+
"2014-07-07 10:00",
|
336 |
+
"2014-07-07 11:00",
|
337 |
+
"2014-07-07 12:00",
|
338 |
+
"2014-07-07 13:00",
|
339 |
+
"2014-07-07 14:00",
|
340 |
+
"2014-07-07 15:00",
|
341 |
+
"2014-07-07 16:00",
|
342 |
+
"2014-07-08 09:00",
|
343 |
+
"2014-07-08 10:00",
|
344 |
+
"2014-07-08 11:00",
|
345 |
+
"2014-07-08 12:00",
|
346 |
+
"2014-07-08 13:00",
|
347 |
+
"2014-07-08 14:00",
|
348 |
+
"2014-07-08 15:00",
|
349 |
+
"2014-07-08 16:00",
|
350 |
+
],
|
351 |
+
"bh",
|
352 |
+
),
|
353 |
+
],
|
354 |
+
)
|
355 |
+
def test_infer_freq_business_hour(data, expected):
|
356 |
+
# see gh-7905
|
357 |
+
idx = DatetimeIndex(data)
|
358 |
+
assert idx.inferred_freq == expected
|
359 |
+
|
360 |
+
|
361 |
+
def test_not_monotonic():
|
362 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
363 |
+
rng = rng[::-1]
|
364 |
+
|
365 |
+
assert rng.inferred_freq == "-1YE-JAN"
|
366 |
+
|
367 |
+
|
368 |
+
def test_non_datetime_index2():
|
369 |
+
rng = DatetimeIndex(["1/31/2000", "1/31/2001", "1/31/2002"])
|
370 |
+
vals = rng.to_pydatetime()
|
371 |
+
|
372 |
+
result = frequencies.infer_freq(vals)
|
373 |
+
assert result == rng.inferred_freq
|
374 |
+
|
375 |
+
|
376 |
+
@pytest.mark.parametrize(
|
377 |
+
"idx",
|
378 |
+
[
|
379 |
+
Index(np.arange(5), dtype=np.int64),
|
380 |
+
Index(np.arange(5), dtype=np.float64),
|
381 |
+
period_range("2020-01-01", periods=5),
|
382 |
+
RangeIndex(5),
|
383 |
+
],
|
384 |
+
)
|
385 |
+
def test_invalid_index_types(idx):
|
386 |
+
# see gh-48439
|
387 |
+
msg = "|".join(
|
388 |
+
[
|
389 |
+
"cannot infer freq from a non-convertible",
|
390 |
+
"Check the `freq` attribute instead of using infer_freq",
|
391 |
+
]
|
392 |
+
)
|
393 |
+
|
394 |
+
with pytest.raises(TypeError, match=msg):
|
395 |
+
frequencies.infer_freq(idx)
|
396 |
+
|
397 |
+
|
398 |
+
@pytest.mark.skipif(is_platform_windows(), reason="see gh-10822: Windows issue")
|
399 |
+
def test_invalid_index_types_unicode():
|
400 |
+
# see gh-10822
|
401 |
+
#
|
402 |
+
# Odd error message on conversions to datetime for unicode.
|
403 |
+
msg = "Unknown datetime string format"
|
404 |
+
|
405 |
+
with pytest.raises(ValueError, match=msg):
|
406 |
+
frequencies.infer_freq(Index(["ZqgszYBfuL"]))
|
407 |
+
|
408 |
+
|
409 |
+
def test_string_datetime_like_compat():
|
410 |
+
# see gh-6463
|
411 |
+
data = ["2004-01", "2004-02", "2004-03", "2004-04"]
|
412 |
+
|
413 |
+
expected = frequencies.infer_freq(data)
|
414 |
+
result = frequencies.infer_freq(Index(data))
|
415 |
+
|
416 |
+
assert result == expected
|
417 |
+
|
418 |
+
|
419 |
+
def test_series():
|
420 |
+
# see gh-6407
|
421 |
+
s = Series(date_range("20130101", "20130110"))
|
422 |
+
inferred = frequencies.infer_freq(s)
|
423 |
+
assert inferred == "D"
|
424 |
+
|
425 |
+
|
426 |
+
@pytest.mark.parametrize("end", [10, 10.0])
|
427 |
+
def test_series_invalid_type(end):
|
428 |
+
# see gh-6407
|
429 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
430 |
+
s = Series(np.arange(end))
|
431 |
+
|
432 |
+
with pytest.raises(TypeError, match=msg):
|
433 |
+
frequencies.infer_freq(s)
|
434 |
+
|
435 |
+
|
436 |
+
def test_series_inconvertible_string(using_infer_string):
|
437 |
+
# see gh-6407
|
438 |
+
if using_infer_string:
|
439 |
+
msg = "cannot infer freq from"
|
440 |
+
|
441 |
+
with pytest.raises(TypeError, match=msg):
|
442 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
443 |
+
else:
|
444 |
+
msg = "Unknown datetime string format"
|
445 |
+
|
446 |
+
with pytest.raises(ValueError, match=msg):
|
447 |
+
frequencies.infer_freq(Series(["foo", "bar"]))
|
448 |
+
|
449 |
+
|
450 |
+
@pytest.mark.parametrize("freq", [None, "ms"])
|
451 |
+
def test_series_period_index(freq):
|
452 |
+
# see gh-6407
|
453 |
+
#
|
454 |
+
# Cannot infer on PeriodIndex
|
455 |
+
msg = "cannot infer freq from a non-convertible dtype on a Series"
|
456 |
+
s = Series(period_range("2013", periods=10, freq=freq))
|
457 |
+
|
458 |
+
with pytest.raises(TypeError, match=msg):
|
459 |
+
frequencies.infer_freq(s)
|
460 |
+
|
461 |
+
|
462 |
+
@pytest.mark.parametrize("freq", ["ME", "ms", "s"])
|
463 |
+
def test_series_datetime_index(freq):
|
464 |
+
s = Series(date_range("20130101", periods=10, freq=freq))
|
465 |
+
inferred = frequencies.infer_freq(s)
|
466 |
+
assert inferred == freq
|
467 |
+
|
468 |
+
|
469 |
+
@pytest.mark.parametrize(
|
470 |
+
"offset_func",
|
471 |
+
[
|
472 |
+
_get_offset,
|
473 |
+
lambda freq: date_range("2011-01-01", periods=5, freq=freq),
|
474 |
+
],
|
475 |
+
)
|
476 |
+
@pytest.mark.parametrize(
|
477 |
+
"freq",
|
478 |
+
[
|
479 |
+
"WEEKDAY",
|
480 |
+
"EOM",
|
481 |
+
"W@MON",
|
482 |
+
"W@TUE",
|
483 |
+
"W@WED",
|
484 |
+
"W@THU",
|
485 |
+
"W@FRI",
|
486 |
+
"W@SAT",
|
487 |
+
"W@SUN",
|
488 |
+
"QE@JAN",
|
489 |
+
"QE@FEB",
|
490 |
+
"QE@MAR",
|
491 |
+
"YE@JAN",
|
492 |
+
"YE@FEB",
|
493 |
+
"YE@MAR",
|
494 |
+
"YE@APR",
|
495 |
+
"YE@MAY",
|
496 |
+
"YE@JUN",
|
497 |
+
"YE@JUL",
|
498 |
+
"YE@AUG",
|
499 |
+
"YE@SEP",
|
500 |
+
"YE@OCT",
|
501 |
+
"YE@NOV",
|
502 |
+
"YE@DEC",
|
503 |
+
"YE@JAN",
|
504 |
+
"WOM@1MON",
|
505 |
+
"WOM@2MON",
|
506 |
+
"WOM@3MON",
|
507 |
+
"WOM@4MON",
|
508 |
+
"WOM@1TUE",
|
509 |
+
"WOM@2TUE",
|
510 |
+
"WOM@3TUE",
|
511 |
+
"WOM@4TUE",
|
512 |
+
"WOM@1WED",
|
513 |
+
"WOM@2WED",
|
514 |
+
"WOM@3WED",
|
515 |
+
"WOM@4WED",
|
516 |
+
"WOM@1THU",
|
517 |
+
"WOM@2THU",
|
518 |
+
"WOM@3THU",
|
519 |
+
"WOM@4THU",
|
520 |
+
"WOM@1FRI",
|
521 |
+
"WOM@2FRI",
|
522 |
+
"WOM@3FRI",
|
523 |
+
"WOM@4FRI",
|
524 |
+
],
|
525 |
+
)
|
526 |
+
def test_legacy_offset_warnings(offset_func, freq):
|
527 |
+
with pytest.raises(ValueError, match=INVALID_FREQ_ERR_MSG):
|
528 |
+
offset_func(freq)
|
529 |
+
|
530 |
+
|
531 |
+
def test_ms_vs_capital_ms():
|
532 |
+
left = _get_offset("ms")
|
533 |
+
right = _get_offset("MS")
|
534 |
+
|
535 |
+
assert left == offsets.Milli()
|
536 |
+
assert right == offsets.MonthBegin()
|
537 |
+
|
538 |
+
|
539 |
+
def test_infer_freq_non_nano():
|
540 |
+
arr = np.arange(10).astype(np.int64).view("M8[s]")
|
541 |
+
dta = DatetimeArray._simple_new(arr, dtype=arr.dtype)
|
542 |
+
res = frequencies.infer_freq(dta)
|
543 |
+
assert res == "s"
|
544 |
+
|
545 |
+
arr2 = arr.view("m8[ms]")
|
546 |
+
tda = TimedeltaArray._simple_new(arr2, dtype=arr2.dtype)
|
547 |
+
res2 = frequencies.infer_freq(tda)
|
548 |
+
assert res2 == "ms"
|
549 |
+
|
550 |
+
|
551 |
+
def test_infer_freq_non_nano_tzaware(tz_aware_fixture):
|
552 |
+
tz = tz_aware_fixture
|
553 |
+
|
554 |
+
dti = date_range("2016-01-01", periods=365, freq="B", tz=tz)
|
555 |
+
dta = dti._data.as_unit("s")
|
556 |
+
|
557 |
+
res = frequencies.infer_freq(dta)
|
558 |
+
assert res == "B"
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (201 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_calendar.cpython-310.pyc
ADDED
Binary file (3.77 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_federal.cpython-310.pyc
ADDED
Binary file (1.98 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_holiday.cpython-310.pyc
ADDED
Binary file (7.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/__pycache__/test_observance.cpython-310.pyc
ADDED
Binary file (2.49 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_calendar.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
from pandas import (
|
6 |
+
DatetimeIndex,
|
7 |
+
offsets,
|
8 |
+
to_datetime,
|
9 |
+
)
|
10 |
+
import pandas._testing as tm
|
11 |
+
|
12 |
+
from pandas.tseries.holiday import (
|
13 |
+
AbstractHolidayCalendar,
|
14 |
+
Holiday,
|
15 |
+
Timestamp,
|
16 |
+
USFederalHolidayCalendar,
|
17 |
+
USLaborDay,
|
18 |
+
USThanksgivingDay,
|
19 |
+
get_calendar,
|
20 |
+
)
|
21 |
+
|
22 |
+
|
23 |
+
@pytest.mark.parametrize(
|
24 |
+
"transform", [lambda x: x, lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
25 |
+
)
|
26 |
+
def test_calendar(transform):
|
27 |
+
start_date = datetime(2012, 1, 1)
|
28 |
+
end_date = datetime(2012, 12, 31)
|
29 |
+
|
30 |
+
calendar = USFederalHolidayCalendar()
|
31 |
+
holidays = calendar.holidays(transform(start_date), transform(end_date))
|
32 |
+
|
33 |
+
expected = [
|
34 |
+
datetime(2012, 1, 2),
|
35 |
+
datetime(2012, 1, 16),
|
36 |
+
datetime(2012, 2, 20),
|
37 |
+
datetime(2012, 5, 28),
|
38 |
+
datetime(2012, 7, 4),
|
39 |
+
datetime(2012, 9, 3),
|
40 |
+
datetime(2012, 10, 8),
|
41 |
+
datetime(2012, 11, 12),
|
42 |
+
datetime(2012, 11, 22),
|
43 |
+
datetime(2012, 12, 25),
|
44 |
+
]
|
45 |
+
|
46 |
+
assert list(holidays.to_pydatetime()) == expected
|
47 |
+
|
48 |
+
|
49 |
+
def test_calendar_caching():
|
50 |
+
# see gh-9552.
|
51 |
+
|
52 |
+
class TestCalendar(AbstractHolidayCalendar):
|
53 |
+
def __init__(self, name=None, rules=None) -> None:
|
54 |
+
super().__init__(name=name, rules=rules)
|
55 |
+
|
56 |
+
jan1 = TestCalendar(rules=[Holiday("jan1", year=2015, month=1, day=1)])
|
57 |
+
jan2 = TestCalendar(rules=[Holiday("jan2", year=2015, month=1, day=2)])
|
58 |
+
|
59 |
+
# Getting holidays for Jan 1 should not alter results for Jan 2.
|
60 |
+
expected = DatetimeIndex(["01-Jan-2015"]).as_unit("ns")
|
61 |
+
tm.assert_index_equal(jan1.holidays(), expected)
|
62 |
+
|
63 |
+
expected2 = DatetimeIndex(["02-Jan-2015"]).as_unit("ns")
|
64 |
+
tm.assert_index_equal(jan2.holidays(), expected2)
|
65 |
+
|
66 |
+
|
67 |
+
def test_calendar_observance_dates():
|
68 |
+
# see gh-11477
|
69 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
70 |
+
holidays0 = us_fed_cal.holidays(
|
71 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
72 |
+
) # <-- same start and end dates
|
73 |
+
holidays1 = us_fed_cal.holidays(
|
74 |
+
datetime(2015, 7, 3), datetime(2015, 7, 6)
|
75 |
+
) # <-- different start and end dates
|
76 |
+
holidays2 = us_fed_cal.holidays(
|
77 |
+
datetime(2015, 7, 3), datetime(2015, 7, 3)
|
78 |
+
) # <-- same start and end dates
|
79 |
+
|
80 |
+
# These should all produce the same result.
|
81 |
+
#
|
82 |
+
# In addition, calling with different start and end
|
83 |
+
# dates should not alter the output if we call the
|
84 |
+
# function again with the same start and end date.
|
85 |
+
tm.assert_index_equal(holidays0, holidays1)
|
86 |
+
tm.assert_index_equal(holidays0, holidays2)
|
87 |
+
|
88 |
+
|
89 |
+
def test_rule_from_name():
|
90 |
+
us_fed_cal = get_calendar("USFederalHolidayCalendar")
|
91 |
+
assert us_fed_cal.rule_from_name("Thanksgiving Day") == USThanksgivingDay
|
92 |
+
|
93 |
+
|
94 |
+
def test_calendar_2031():
|
95 |
+
# See gh-27790
|
96 |
+
#
|
97 |
+
# Labor Day 2031 is on September 1. Saturday before is August 30.
|
98 |
+
# Next working day after August 30 ought to be Tuesday, September 2.
|
99 |
+
|
100 |
+
class testCalendar(AbstractHolidayCalendar):
|
101 |
+
rules = [USLaborDay]
|
102 |
+
|
103 |
+
cal = testCalendar()
|
104 |
+
workDay = offsets.CustomBusinessDay(calendar=cal)
|
105 |
+
Sat_before_Labor_Day_2031 = to_datetime("2031-08-30")
|
106 |
+
next_working_day = Sat_before_Labor_Day_2031 + 0 * workDay
|
107 |
+
assert next_working_day == to_datetime("2031-09-02")
|
108 |
+
|
109 |
+
|
110 |
+
def test_no_holidays_calendar():
|
111 |
+
# Test for issue #31415
|
112 |
+
|
113 |
+
class NoHolidaysCalendar(AbstractHolidayCalendar):
|
114 |
+
pass
|
115 |
+
|
116 |
+
cal = NoHolidaysCalendar()
|
117 |
+
holidays = cal.holidays(Timestamp("01-Jan-2020"), Timestamp("01-Jan-2021"))
|
118 |
+
empty_index = DatetimeIndex([]) # Type is DatetimeIndex since return_name=False
|
119 |
+
tm.assert_index_equal(holidays, empty_index)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_federal.py
ADDED
@@ -0,0 +1,58 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
from pandas import DatetimeIndex
|
4 |
+
import pandas._testing as tm
|
5 |
+
|
6 |
+
from pandas.tseries.holiday import (
|
7 |
+
AbstractHolidayCalendar,
|
8 |
+
USFederalHolidayCalendar,
|
9 |
+
USMartinLutherKingJr,
|
10 |
+
USMemorialDay,
|
11 |
+
)
|
12 |
+
|
13 |
+
|
14 |
+
def test_no_mlk_before_1986():
|
15 |
+
# see gh-10278
|
16 |
+
class MLKCalendar(AbstractHolidayCalendar):
|
17 |
+
rules = [USMartinLutherKingJr]
|
18 |
+
|
19 |
+
holidays = MLKCalendar().holidays(start="1984", end="1988").to_pydatetime().tolist()
|
20 |
+
|
21 |
+
# Testing to make sure holiday is not incorrectly observed before 1986.
|
22 |
+
assert holidays == [datetime(1986, 1, 20, 0, 0), datetime(1987, 1, 19, 0, 0)]
|
23 |
+
|
24 |
+
|
25 |
+
def test_memorial_day():
|
26 |
+
class MemorialDay(AbstractHolidayCalendar):
|
27 |
+
rules = [USMemorialDay]
|
28 |
+
|
29 |
+
holidays = MemorialDay().holidays(start="1971", end="1980").to_pydatetime().tolist()
|
30 |
+
|
31 |
+
# Fixes 5/31 error and checked manually against Wikipedia.
|
32 |
+
assert holidays == [
|
33 |
+
datetime(1971, 5, 31, 0, 0),
|
34 |
+
datetime(1972, 5, 29, 0, 0),
|
35 |
+
datetime(1973, 5, 28, 0, 0),
|
36 |
+
datetime(1974, 5, 27, 0, 0),
|
37 |
+
datetime(1975, 5, 26, 0, 0),
|
38 |
+
datetime(1976, 5, 31, 0, 0),
|
39 |
+
datetime(1977, 5, 30, 0, 0),
|
40 |
+
datetime(1978, 5, 29, 0, 0),
|
41 |
+
datetime(1979, 5, 28, 0, 0),
|
42 |
+
]
|
43 |
+
|
44 |
+
|
45 |
+
def test_federal_holiday_inconsistent_returntype():
|
46 |
+
# GH 49075 test case
|
47 |
+
# Instantiate two calendars to rule out _cache
|
48 |
+
cal1 = USFederalHolidayCalendar()
|
49 |
+
cal2 = USFederalHolidayCalendar()
|
50 |
+
|
51 |
+
results_2018 = cal1.holidays(start=datetime(2018, 8, 1), end=datetime(2018, 8, 31))
|
52 |
+
results_2019 = cal2.holidays(start=datetime(2019, 8, 1), end=datetime(2019, 8, 31))
|
53 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
54 |
+
|
55 |
+
# Check against expected results to ensure both date
|
56 |
+
# ranges generate expected results as per GH49075 submission
|
57 |
+
tm.assert_index_equal(results_2018, expected_results)
|
58 |
+
tm.assert_index_equal(results_2019, expected_results)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_holiday.py
ADDED
@@ -0,0 +1,332 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
from pytz import utc
|
5 |
+
|
6 |
+
from pandas import (
|
7 |
+
DatetimeIndex,
|
8 |
+
Series,
|
9 |
+
)
|
10 |
+
import pandas._testing as tm
|
11 |
+
|
12 |
+
from pandas.tseries.holiday import (
|
13 |
+
MO,
|
14 |
+
SA,
|
15 |
+
AbstractHolidayCalendar,
|
16 |
+
DateOffset,
|
17 |
+
EasterMonday,
|
18 |
+
GoodFriday,
|
19 |
+
Holiday,
|
20 |
+
HolidayCalendarFactory,
|
21 |
+
Timestamp,
|
22 |
+
USColumbusDay,
|
23 |
+
USFederalHolidayCalendar,
|
24 |
+
USLaborDay,
|
25 |
+
USMartinLutherKingJr,
|
26 |
+
USMemorialDay,
|
27 |
+
USPresidentsDay,
|
28 |
+
USThanksgivingDay,
|
29 |
+
get_calendar,
|
30 |
+
next_monday,
|
31 |
+
)
|
32 |
+
|
33 |
+
|
34 |
+
@pytest.mark.parametrize(
|
35 |
+
"holiday,start_date,end_date,expected",
|
36 |
+
[
|
37 |
+
(
|
38 |
+
USMemorialDay,
|
39 |
+
datetime(2011, 1, 1),
|
40 |
+
datetime(2020, 12, 31),
|
41 |
+
[
|
42 |
+
datetime(2011, 5, 30),
|
43 |
+
datetime(2012, 5, 28),
|
44 |
+
datetime(2013, 5, 27),
|
45 |
+
datetime(2014, 5, 26),
|
46 |
+
datetime(2015, 5, 25),
|
47 |
+
datetime(2016, 5, 30),
|
48 |
+
datetime(2017, 5, 29),
|
49 |
+
datetime(2018, 5, 28),
|
50 |
+
datetime(2019, 5, 27),
|
51 |
+
datetime(2020, 5, 25),
|
52 |
+
],
|
53 |
+
),
|
54 |
+
(
|
55 |
+
Holiday("July 4th Eve", month=7, day=3),
|
56 |
+
"2001-01-01",
|
57 |
+
"2003-03-03",
|
58 |
+
[Timestamp("2001-07-03 00:00:00"), Timestamp("2002-07-03 00:00:00")],
|
59 |
+
),
|
60 |
+
(
|
61 |
+
Holiday("July 4th Eve", month=7, day=3, days_of_week=(0, 1, 2, 3)),
|
62 |
+
"2001-01-01",
|
63 |
+
"2008-03-03",
|
64 |
+
[
|
65 |
+
Timestamp("2001-07-03 00:00:00"),
|
66 |
+
Timestamp("2002-07-03 00:00:00"),
|
67 |
+
Timestamp("2003-07-03 00:00:00"),
|
68 |
+
Timestamp("2006-07-03 00:00:00"),
|
69 |
+
Timestamp("2007-07-03 00:00:00"),
|
70 |
+
],
|
71 |
+
),
|
72 |
+
(
|
73 |
+
EasterMonday,
|
74 |
+
datetime(2011, 1, 1),
|
75 |
+
datetime(2020, 12, 31),
|
76 |
+
[
|
77 |
+
Timestamp("2011-04-25 00:00:00"),
|
78 |
+
Timestamp("2012-04-09 00:00:00"),
|
79 |
+
Timestamp("2013-04-01 00:00:00"),
|
80 |
+
Timestamp("2014-04-21 00:00:00"),
|
81 |
+
Timestamp("2015-04-06 00:00:00"),
|
82 |
+
Timestamp("2016-03-28 00:00:00"),
|
83 |
+
Timestamp("2017-04-17 00:00:00"),
|
84 |
+
Timestamp("2018-04-02 00:00:00"),
|
85 |
+
Timestamp("2019-04-22 00:00:00"),
|
86 |
+
Timestamp("2020-04-13 00:00:00"),
|
87 |
+
],
|
88 |
+
),
|
89 |
+
(
|
90 |
+
GoodFriday,
|
91 |
+
datetime(2011, 1, 1),
|
92 |
+
datetime(2020, 12, 31),
|
93 |
+
[
|
94 |
+
Timestamp("2011-04-22 00:00:00"),
|
95 |
+
Timestamp("2012-04-06 00:00:00"),
|
96 |
+
Timestamp("2013-03-29 00:00:00"),
|
97 |
+
Timestamp("2014-04-18 00:00:00"),
|
98 |
+
Timestamp("2015-04-03 00:00:00"),
|
99 |
+
Timestamp("2016-03-25 00:00:00"),
|
100 |
+
Timestamp("2017-04-14 00:00:00"),
|
101 |
+
Timestamp("2018-03-30 00:00:00"),
|
102 |
+
Timestamp("2019-04-19 00:00:00"),
|
103 |
+
Timestamp("2020-04-10 00:00:00"),
|
104 |
+
],
|
105 |
+
),
|
106 |
+
(
|
107 |
+
USThanksgivingDay,
|
108 |
+
datetime(2011, 1, 1),
|
109 |
+
datetime(2020, 12, 31),
|
110 |
+
[
|
111 |
+
datetime(2011, 11, 24),
|
112 |
+
datetime(2012, 11, 22),
|
113 |
+
datetime(2013, 11, 28),
|
114 |
+
datetime(2014, 11, 27),
|
115 |
+
datetime(2015, 11, 26),
|
116 |
+
datetime(2016, 11, 24),
|
117 |
+
datetime(2017, 11, 23),
|
118 |
+
datetime(2018, 11, 22),
|
119 |
+
datetime(2019, 11, 28),
|
120 |
+
datetime(2020, 11, 26),
|
121 |
+
],
|
122 |
+
),
|
123 |
+
],
|
124 |
+
)
|
125 |
+
def test_holiday_dates(holiday, start_date, end_date, expected):
|
126 |
+
assert list(holiday.dates(start_date, end_date)) == expected
|
127 |
+
|
128 |
+
# Verify that timezone info is preserved.
|
129 |
+
assert list(
|
130 |
+
holiday.dates(
|
131 |
+
utc.localize(Timestamp(start_date)), utc.localize(Timestamp(end_date))
|
132 |
+
)
|
133 |
+
) == [utc.localize(dt) for dt in expected]
|
134 |
+
|
135 |
+
|
136 |
+
@pytest.mark.parametrize(
|
137 |
+
"holiday,start,expected",
|
138 |
+
[
|
139 |
+
(USMemorialDay, datetime(2015, 7, 1), []),
|
140 |
+
(USMemorialDay, "2015-05-25", [Timestamp("2015-05-25")]),
|
141 |
+
(USLaborDay, datetime(2015, 7, 1), []),
|
142 |
+
(USLaborDay, "2015-09-07", [Timestamp("2015-09-07")]),
|
143 |
+
(USColumbusDay, datetime(2015, 7, 1), []),
|
144 |
+
(USColumbusDay, "2015-10-12", [Timestamp("2015-10-12")]),
|
145 |
+
(USThanksgivingDay, datetime(2015, 7, 1), []),
|
146 |
+
(USThanksgivingDay, "2015-11-26", [Timestamp("2015-11-26")]),
|
147 |
+
(USMartinLutherKingJr, datetime(2015, 7, 1), []),
|
148 |
+
(USMartinLutherKingJr, "2015-01-19", [Timestamp("2015-01-19")]),
|
149 |
+
(USPresidentsDay, datetime(2015, 7, 1), []),
|
150 |
+
(USPresidentsDay, "2015-02-16", [Timestamp("2015-02-16")]),
|
151 |
+
(GoodFriday, datetime(2015, 7, 1), []),
|
152 |
+
(GoodFriday, "2015-04-03", [Timestamp("2015-04-03")]),
|
153 |
+
(EasterMonday, "2015-04-06", [Timestamp("2015-04-06")]),
|
154 |
+
(EasterMonday, datetime(2015, 7, 1), []),
|
155 |
+
(EasterMonday, "2015-04-05", []),
|
156 |
+
("New Year's Day", "2015-01-01", [Timestamp("2015-01-01")]),
|
157 |
+
("New Year's Day", "2010-12-31", [Timestamp("2010-12-31")]),
|
158 |
+
("New Year's Day", datetime(2015, 7, 1), []),
|
159 |
+
("New Year's Day", "2011-01-01", []),
|
160 |
+
("Independence Day", "2015-07-03", [Timestamp("2015-07-03")]),
|
161 |
+
("Independence Day", datetime(2015, 7, 1), []),
|
162 |
+
("Independence Day", "2015-07-04", []),
|
163 |
+
("Veterans Day", "2012-11-12", [Timestamp("2012-11-12")]),
|
164 |
+
("Veterans Day", datetime(2015, 7, 1), []),
|
165 |
+
("Veterans Day", "2012-11-11", []),
|
166 |
+
("Christmas Day", "2011-12-26", [Timestamp("2011-12-26")]),
|
167 |
+
("Christmas Day", datetime(2015, 7, 1), []),
|
168 |
+
("Christmas Day", "2011-12-25", []),
|
169 |
+
("Juneteenth National Independence Day", "2020-06-19", []),
|
170 |
+
(
|
171 |
+
"Juneteenth National Independence Day",
|
172 |
+
"2021-06-18",
|
173 |
+
[Timestamp("2021-06-18")],
|
174 |
+
),
|
175 |
+
("Juneteenth National Independence Day", "2022-06-19", []),
|
176 |
+
(
|
177 |
+
"Juneteenth National Independence Day",
|
178 |
+
"2022-06-20",
|
179 |
+
[Timestamp("2022-06-20")],
|
180 |
+
),
|
181 |
+
],
|
182 |
+
)
|
183 |
+
def test_holidays_within_dates(holiday, start, expected):
|
184 |
+
# see gh-11477
|
185 |
+
#
|
186 |
+
# Fix holiday behavior where holiday.dates returned dates outside
|
187 |
+
# start/end date, or observed rules could not be applied because the
|
188 |
+
# holiday was not in the original date range (e.g., 7/4/2015 -> 7/3/2015).
|
189 |
+
if isinstance(holiday, str):
|
190 |
+
calendar = get_calendar("USFederalHolidayCalendar")
|
191 |
+
holiday = calendar.rule_from_name(holiday)
|
192 |
+
|
193 |
+
assert list(holiday.dates(start, start)) == expected
|
194 |
+
|
195 |
+
# Verify that timezone info is preserved.
|
196 |
+
assert list(
|
197 |
+
holiday.dates(utc.localize(Timestamp(start)), utc.localize(Timestamp(start)))
|
198 |
+
) == [utc.localize(dt) for dt in expected]
|
199 |
+
|
200 |
+
|
201 |
+
@pytest.mark.parametrize(
|
202 |
+
"transform", [lambda x: x.strftime("%Y-%m-%d"), lambda x: Timestamp(x)]
|
203 |
+
)
|
204 |
+
def test_argument_types(transform):
|
205 |
+
start_date = datetime(2011, 1, 1)
|
206 |
+
end_date = datetime(2020, 12, 31)
|
207 |
+
|
208 |
+
holidays = USThanksgivingDay.dates(start_date, end_date)
|
209 |
+
holidays2 = USThanksgivingDay.dates(transform(start_date), transform(end_date))
|
210 |
+
tm.assert_index_equal(holidays, holidays2)
|
211 |
+
|
212 |
+
|
213 |
+
@pytest.mark.parametrize(
|
214 |
+
"name,kwargs",
|
215 |
+
[
|
216 |
+
("One-Time", {"year": 2012, "month": 5, "day": 28}),
|
217 |
+
(
|
218 |
+
"Range",
|
219 |
+
{
|
220 |
+
"month": 5,
|
221 |
+
"day": 28,
|
222 |
+
"start_date": datetime(2012, 1, 1),
|
223 |
+
"end_date": datetime(2012, 12, 31),
|
224 |
+
"offset": DateOffset(weekday=MO(1)),
|
225 |
+
},
|
226 |
+
),
|
227 |
+
],
|
228 |
+
)
|
229 |
+
def test_special_holidays(name, kwargs):
|
230 |
+
base_date = [datetime(2012, 5, 28)]
|
231 |
+
holiday = Holiday(name, **kwargs)
|
232 |
+
|
233 |
+
start_date = datetime(2011, 1, 1)
|
234 |
+
end_date = datetime(2020, 12, 31)
|
235 |
+
|
236 |
+
assert base_date == holiday.dates(start_date, end_date)
|
237 |
+
|
238 |
+
|
239 |
+
def test_get_calendar():
|
240 |
+
class TestCalendar(AbstractHolidayCalendar):
|
241 |
+
rules = []
|
242 |
+
|
243 |
+
calendar = get_calendar("TestCalendar")
|
244 |
+
assert TestCalendar == type(calendar)
|
245 |
+
|
246 |
+
|
247 |
+
def test_factory():
|
248 |
+
class_1 = HolidayCalendarFactory(
|
249 |
+
"MemorialDay", AbstractHolidayCalendar, USMemorialDay
|
250 |
+
)
|
251 |
+
class_2 = HolidayCalendarFactory(
|
252 |
+
"Thanksgiving", AbstractHolidayCalendar, USThanksgivingDay
|
253 |
+
)
|
254 |
+
class_3 = HolidayCalendarFactory("Combined", class_1, class_2)
|
255 |
+
|
256 |
+
assert len(class_1.rules) == 1
|
257 |
+
assert len(class_2.rules) == 1
|
258 |
+
assert len(class_3.rules) == 2
|
259 |
+
|
260 |
+
|
261 |
+
def test_both_offset_observance_raises():
|
262 |
+
# see gh-10217
|
263 |
+
msg = "Cannot use both offset and observance"
|
264 |
+
with pytest.raises(NotImplementedError, match=msg):
|
265 |
+
Holiday(
|
266 |
+
"Cyber Monday",
|
267 |
+
month=11,
|
268 |
+
day=1,
|
269 |
+
offset=[DateOffset(weekday=SA(4))],
|
270 |
+
observance=next_monday,
|
271 |
+
)
|
272 |
+
|
273 |
+
|
274 |
+
def test_half_open_interval_with_observance():
|
275 |
+
# Prompted by GH 49075
|
276 |
+
# Check for holidays that have a half-open date interval where
|
277 |
+
# they have either a start_date or end_date defined along
|
278 |
+
# with a defined observance pattern to make sure that the return type
|
279 |
+
# for Holiday.dates() remains consistent before & after the year that
|
280 |
+
# marks the 'edge' of the half-open date interval.
|
281 |
+
|
282 |
+
holiday_1 = Holiday(
|
283 |
+
"Arbitrary Holiday - start 2022-03-14",
|
284 |
+
start_date=datetime(2022, 3, 14),
|
285 |
+
month=3,
|
286 |
+
day=14,
|
287 |
+
observance=next_monday,
|
288 |
+
)
|
289 |
+
holiday_2 = Holiday(
|
290 |
+
"Arbitrary Holiday 2 - end 2022-03-20",
|
291 |
+
end_date=datetime(2022, 3, 20),
|
292 |
+
month=3,
|
293 |
+
day=20,
|
294 |
+
observance=next_monday,
|
295 |
+
)
|
296 |
+
|
297 |
+
class TestHolidayCalendar(AbstractHolidayCalendar):
|
298 |
+
rules = [
|
299 |
+
USMartinLutherKingJr,
|
300 |
+
holiday_1,
|
301 |
+
holiday_2,
|
302 |
+
USLaborDay,
|
303 |
+
]
|
304 |
+
|
305 |
+
start = Timestamp("2022-08-01")
|
306 |
+
end = Timestamp("2022-08-31")
|
307 |
+
year_offset = DateOffset(years=5)
|
308 |
+
expected_results = DatetimeIndex([], dtype="datetime64[ns]", freq=None)
|
309 |
+
test_cal = TestHolidayCalendar()
|
310 |
+
|
311 |
+
date_interval_low = test_cal.holidays(start - year_offset, end - year_offset)
|
312 |
+
date_window_edge = test_cal.holidays(start, end)
|
313 |
+
date_interval_high = test_cal.holidays(start + year_offset, end + year_offset)
|
314 |
+
|
315 |
+
tm.assert_index_equal(date_interval_low, expected_results)
|
316 |
+
tm.assert_index_equal(date_window_edge, expected_results)
|
317 |
+
tm.assert_index_equal(date_interval_high, expected_results)
|
318 |
+
|
319 |
+
|
320 |
+
def test_holidays_with_timezone_specified_but_no_occurences():
|
321 |
+
# GH 54580
|
322 |
+
# _apply_rule() in holiday.py was silently dropping timezones if you passed it
|
323 |
+
# an empty list of holiday dates that had timezone information
|
324 |
+
start_date = Timestamp("2018-01-01", tz="America/Chicago")
|
325 |
+
end_date = Timestamp("2018-01-11", tz="America/Chicago")
|
326 |
+
test_case = USFederalHolidayCalendar().holidays(
|
327 |
+
start_date, end_date, return_name=True
|
328 |
+
)
|
329 |
+
expected_results = Series("New Year's Day", index=[start_date])
|
330 |
+
expected_results.index = expected_results.index.as_unit("ns")
|
331 |
+
|
332 |
+
tm.assert_equal(test_case, expected_results)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/holiday/test_observance.py
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
from pandas.tseries.holiday import (
|
6 |
+
after_nearest_workday,
|
7 |
+
before_nearest_workday,
|
8 |
+
nearest_workday,
|
9 |
+
next_monday,
|
10 |
+
next_monday_or_tuesday,
|
11 |
+
next_workday,
|
12 |
+
previous_friday,
|
13 |
+
previous_workday,
|
14 |
+
sunday_to_monday,
|
15 |
+
weekend_to_monday,
|
16 |
+
)
|
17 |
+
|
18 |
+
_WEDNESDAY = datetime(2014, 4, 9)
|
19 |
+
_THURSDAY = datetime(2014, 4, 10)
|
20 |
+
_FRIDAY = datetime(2014, 4, 11)
|
21 |
+
_SATURDAY = datetime(2014, 4, 12)
|
22 |
+
_SUNDAY = datetime(2014, 4, 13)
|
23 |
+
_MONDAY = datetime(2014, 4, 14)
|
24 |
+
_TUESDAY = datetime(2014, 4, 15)
|
25 |
+
_NEXT_WEDNESDAY = datetime(2014, 4, 16)
|
26 |
+
|
27 |
+
|
28 |
+
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
|
29 |
+
def test_next_monday(day):
|
30 |
+
assert next_monday(day) == _MONDAY
|
31 |
+
|
32 |
+
|
33 |
+
@pytest.mark.parametrize(
|
34 |
+
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_MONDAY, _TUESDAY)]
|
35 |
+
)
|
36 |
+
def test_next_monday_or_tuesday(day, expected):
|
37 |
+
assert next_monday_or_tuesday(day) == expected
|
38 |
+
|
39 |
+
|
40 |
+
@pytest.mark.parametrize("day", [_SATURDAY, _SUNDAY])
|
41 |
+
def test_previous_friday(day):
|
42 |
+
assert previous_friday(day) == _FRIDAY
|
43 |
+
|
44 |
+
|
45 |
+
def test_sunday_to_monday():
|
46 |
+
assert sunday_to_monday(_SUNDAY) == _MONDAY
|
47 |
+
|
48 |
+
|
49 |
+
@pytest.mark.parametrize(
|
50 |
+
"day,expected", [(_SATURDAY, _FRIDAY), (_SUNDAY, _MONDAY), (_MONDAY, _MONDAY)]
|
51 |
+
)
|
52 |
+
def test_nearest_workday(day, expected):
|
53 |
+
assert nearest_workday(day) == expected
|
54 |
+
|
55 |
+
|
56 |
+
@pytest.mark.parametrize(
|
57 |
+
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _MONDAY), (_MONDAY, _MONDAY)]
|
58 |
+
)
|
59 |
+
def test_weekend_to_monday(day, expected):
|
60 |
+
assert weekend_to_monday(day) == expected
|
61 |
+
|
62 |
+
|
63 |
+
@pytest.mark.parametrize(
|
64 |
+
"day,expected",
|
65 |
+
[
|
66 |
+
(_WEDNESDAY, _THURSDAY),
|
67 |
+
(_THURSDAY, _FRIDAY),
|
68 |
+
(_SATURDAY, _MONDAY),
|
69 |
+
(_SUNDAY, _MONDAY),
|
70 |
+
(_MONDAY, _TUESDAY),
|
71 |
+
(_TUESDAY, _NEXT_WEDNESDAY), # WED is same week as TUE
|
72 |
+
],
|
73 |
+
)
|
74 |
+
def test_next_workday(day, expected):
|
75 |
+
assert next_workday(day) == expected
|
76 |
+
|
77 |
+
|
78 |
+
@pytest.mark.parametrize(
|
79 |
+
"day,expected", [(_SATURDAY, _FRIDAY), (_SUNDAY, _FRIDAY), (_TUESDAY, _MONDAY)]
|
80 |
+
)
|
81 |
+
def test_previous_workday(day, expected):
|
82 |
+
assert previous_workday(day) == expected
|
83 |
+
|
84 |
+
|
85 |
+
@pytest.mark.parametrize(
|
86 |
+
"day,expected",
|
87 |
+
[
|
88 |
+
(_THURSDAY, _WEDNESDAY),
|
89 |
+
(_FRIDAY, _THURSDAY),
|
90 |
+
(_SATURDAY, _THURSDAY),
|
91 |
+
(_SUNDAY, _FRIDAY),
|
92 |
+
(_MONDAY, _FRIDAY), # last week Friday
|
93 |
+
(_TUESDAY, _MONDAY),
|
94 |
+
(_NEXT_WEDNESDAY, _TUESDAY), # WED is same week as TUE
|
95 |
+
],
|
96 |
+
)
|
97 |
+
def test_before_nearest_workday(day, expected):
|
98 |
+
assert before_nearest_workday(day) == expected
|
99 |
+
|
100 |
+
|
101 |
+
@pytest.mark.parametrize(
|
102 |
+
"day,expected", [(_SATURDAY, _MONDAY), (_SUNDAY, _TUESDAY), (_FRIDAY, _MONDAY)]
|
103 |
+
)
|
104 |
+
def test_after_nearest_workday(day, expected):
|
105 |
+
assert after_nearest_workday(day) == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (201 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/common.cpython-310.pyc
ADDED
Binary file (1.19 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_hour.cpython-310.pyc
ADDED
Binary file (27.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_month.cpython-310.pyc
ADDED
Binary file (4.51 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_business_year.cpython-310.pyc
ADDED
Binary file (4.63 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_common.cpython-310.pyc
ADDED
Binary file (4.96 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/tseries/offsets/__pycache__/test_custom_business_day.cpython-310.pyc
ADDED
Binary file (4.07 kB). View file
|
|