Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_api.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_types.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_duplicate_labels.py +413 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_finalize.py +767 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_series.py +159 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/test_drop_duplicates.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/test_nat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py +89 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_equals.py +181 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_indexing.py +45 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_is_monotonic.py +46 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_nat.py +53 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_sort_values.py +315 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_value_counts.py +103 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_constructors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_date_range.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_datetime.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_freq_attr.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_iter.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_join.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_npfuncs.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_ops.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_partial_slicing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_pickle.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_reindex.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_scalar_compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_constructors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_delete.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_formats.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_freq_attr.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_indexing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_join.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_ops.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_pickle.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_scalar_compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_searchsorted.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_setops.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_timedelta.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_timedelta_range.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__pycache__/test_astype.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (189 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_api.cpython-310.pyc
ADDED
Binary file (7.85 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/api/__pycache__/test_types.cpython-310.pyc
ADDED
Binary file (1.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_duplicate_labels.py
ADDED
@@ -0,0 +1,413 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Tests dealing with the NDFrame.allows_duplicates."""
|
2 |
+
import operator
|
3 |
+
|
4 |
+
import numpy as np
|
5 |
+
import pytest
|
6 |
+
|
7 |
+
import pandas as pd
|
8 |
+
import pandas._testing as tm
|
9 |
+
|
10 |
+
not_implemented = pytest.mark.xfail(reason="Not implemented.")
|
11 |
+
|
12 |
+
# ----------------------------------------------------------------------------
|
13 |
+
# Preservation
|
14 |
+
|
15 |
+
|
16 |
+
class TestPreserves:
|
17 |
+
@pytest.mark.parametrize(
|
18 |
+
"cls, data",
|
19 |
+
[
|
20 |
+
(pd.Series, np.array([])),
|
21 |
+
(pd.Series, [1, 2]),
|
22 |
+
(pd.DataFrame, {}),
|
23 |
+
(pd.DataFrame, {"A": [1, 2]}),
|
24 |
+
],
|
25 |
+
)
|
26 |
+
def test_construction_ok(self, cls, data):
|
27 |
+
result = cls(data)
|
28 |
+
assert result.flags.allows_duplicate_labels is True
|
29 |
+
|
30 |
+
result = cls(data).set_flags(allows_duplicate_labels=False)
|
31 |
+
assert result.flags.allows_duplicate_labels is False
|
32 |
+
|
33 |
+
@pytest.mark.parametrize(
|
34 |
+
"func",
|
35 |
+
[
|
36 |
+
operator.itemgetter(["a"]),
|
37 |
+
operator.methodcaller("add", 1),
|
38 |
+
operator.methodcaller("rename", str.upper),
|
39 |
+
operator.methodcaller("rename", "name"),
|
40 |
+
operator.methodcaller("abs"),
|
41 |
+
np.abs,
|
42 |
+
],
|
43 |
+
)
|
44 |
+
def test_preserved_series(self, func):
|
45 |
+
s = pd.Series([0, 1], index=["a", "b"]).set_flags(allows_duplicate_labels=False)
|
46 |
+
assert func(s).flags.allows_duplicate_labels is False
|
47 |
+
|
48 |
+
@pytest.mark.parametrize(
|
49 |
+
"other", [pd.Series(0, index=["a", "b", "c"]), pd.Series(0, index=["a", "b"])]
|
50 |
+
)
|
51 |
+
# TODO: frame
|
52 |
+
@not_implemented
|
53 |
+
def test_align(self, other):
|
54 |
+
s = pd.Series([0, 1], index=["a", "b"]).set_flags(allows_duplicate_labels=False)
|
55 |
+
a, b = s.align(other)
|
56 |
+
assert a.flags.allows_duplicate_labels is False
|
57 |
+
assert b.flags.allows_duplicate_labels is False
|
58 |
+
|
59 |
+
def test_preserved_frame(self):
|
60 |
+
df = pd.DataFrame({"A": [1, 2], "B": [3, 4]}, index=["a", "b"]).set_flags(
|
61 |
+
allows_duplicate_labels=False
|
62 |
+
)
|
63 |
+
assert df.loc[["a"]].flags.allows_duplicate_labels is False
|
64 |
+
assert df.loc[:, ["A", "B"]].flags.allows_duplicate_labels is False
|
65 |
+
|
66 |
+
def test_to_frame(self):
|
67 |
+
ser = pd.Series(dtype=float).set_flags(allows_duplicate_labels=False)
|
68 |
+
assert ser.to_frame().flags.allows_duplicate_labels is False
|
69 |
+
|
70 |
+
@pytest.mark.parametrize("func", ["add", "sub"])
|
71 |
+
@pytest.mark.parametrize("frame", [False, True])
|
72 |
+
@pytest.mark.parametrize("other", [1, pd.Series([1, 2], name="A")])
|
73 |
+
def test_binops(self, func, other, frame):
|
74 |
+
df = pd.Series([1, 2], name="A", index=["a", "b"]).set_flags(
|
75 |
+
allows_duplicate_labels=False
|
76 |
+
)
|
77 |
+
if frame:
|
78 |
+
df = df.to_frame()
|
79 |
+
if isinstance(other, pd.Series) and frame:
|
80 |
+
other = other.to_frame()
|
81 |
+
func = operator.methodcaller(func, other)
|
82 |
+
assert df.flags.allows_duplicate_labels is False
|
83 |
+
assert func(df).flags.allows_duplicate_labels is False
|
84 |
+
|
85 |
+
def test_preserve_getitem(self):
|
86 |
+
df = pd.DataFrame({"A": [1, 2]}).set_flags(allows_duplicate_labels=False)
|
87 |
+
assert df[["A"]].flags.allows_duplicate_labels is False
|
88 |
+
assert df["A"].flags.allows_duplicate_labels is False
|
89 |
+
assert df.loc[0].flags.allows_duplicate_labels is False
|
90 |
+
assert df.loc[[0]].flags.allows_duplicate_labels is False
|
91 |
+
assert df.loc[0, ["A"]].flags.allows_duplicate_labels is False
|
92 |
+
|
93 |
+
def test_ndframe_getitem_caching_issue(
|
94 |
+
self, request, using_copy_on_write, warn_copy_on_write
|
95 |
+
):
|
96 |
+
if not (using_copy_on_write or warn_copy_on_write):
|
97 |
+
request.applymarker(pytest.mark.xfail(reason="Unclear behavior."))
|
98 |
+
# NDFrame.__getitem__ will cache the first df['A']. May need to
|
99 |
+
# invalidate that cache? Update the cached entries?
|
100 |
+
df = pd.DataFrame({"A": [0]}).set_flags(allows_duplicate_labels=False)
|
101 |
+
assert df["A"].flags.allows_duplicate_labels is False
|
102 |
+
df.flags.allows_duplicate_labels = True
|
103 |
+
assert df["A"].flags.allows_duplicate_labels is True
|
104 |
+
|
105 |
+
@pytest.mark.parametrize(
|
106 |
+
"objs, kwargs",
|
107 |
+
[
|
108 |
+
# Series
|
109 |
+
(
|
110 |
+
[
|
111 |
+
pd.Series(1, index=["a", "b"]),
|
112 |
+
pd.Series(2, index=["c", "d"]),
|
113 |
+
],
|
114 |
+
{},
|
115 |
+
),
|
116 |
+
(
|
117 |
+
[
|
118 |
+
pd.Series(1, index=["a", "b"]),
|
119 |
+
pd.Series(2, index=["a", "b"]),
|
120 |
+
],
|
121 |
+
{"ignore_index": True},
|
122 |
+
),
|
123 |
+
(
|
124 |
+
[
|
125 |
+
pd.Series(1, index=["a", "b"]),
|
126 |
+
pd.Series(2, index=["a", "b"]),
|
127 |
+
],
|
128 |
+
{"axis": 1},
|
129 |
+
),
|
130 |
+
# Frame
|
131 |
+
(
|
132 |
+
[
|
133 |
+
pd.DataFrame({"A": [1, 2]}, index=["a", "b"]),
|
134 |
+
pd.DataFrame({"A": [1, 2]}, index=["c", "d"]),
|
135 |
+
],
|
136 |
+
{},
|
137 |
+
),
|
138 |
+
(
|
139 |
+
[
|
140 |
+
pd.DataFrame({"A": [1, 2]}, index=["a", "b"]),
|
141 |
+
pd.DataFrame({"A": [1, 2]}, index=["a", "b"]),
|
142 |
+
],
|
143 |
+
{"ignore_index": True},
|
144 |
+
),
|
145 |
+
(
|
146 |
+
[
|
147 |
+
pd.DataFrame({"A": [1, 2]}, index=["a", "b"]),
|
148 |
+
pd.DataFrame({"B": [1, 2]}, index=["a", "b"]),
|
149 |
+
],
|
150 |
+
{"axis": 1},
|
151 |
+
),
|
152 |
+
# Series / Frame
|
153 |
+
(
|
154 |
+
[
|
155 |
+
pd.DataFrame({"A": [1, 2]}, index=["a", "b"]),
|
156 |
+
pd.Series([1, 2], index=["a", "b"], name="B"),
|
157 |
+
],
|
158 |
+
{"axis": 1},
|
159 |
+
),
|
160 |
+
],
|
161 |
+
)
|
162 |
+
def test_concat(self, objs, kwargs):
|
163 |
+
objs = [x.set_flags(allows_duplicate_labels=False) for x in objs]
|
164 |
+
result = pd.concat(objs, **kwargs)
|
165 |
+
assert result.flags.allows_duplicate_labels is False
|
166 |
+
|
167 |
+
@pytest.mark.parametrize(
|
168 |
+
"left, right, expected",
|
169 |
+
[
|
170 |
+
# false false false
|
171 |
+
pytest.param(
|
172 |
+
pd.DataFrame({"A": [0, 1]}, index=["a", "b"]).set_flags(
|
173 |
+
allows_duplicate_labels=False
|
174 |
+
),
|
175 |
+
pd.DataFrame({"B": [0, 1]}, index=["a", "d"]).set_flags(
|
176 |
+
allows_duplicate_labels=False
|
177 |
+
),
|
178 |
+
False,
|
179 |
+
marks=not_implemented,
|
180 |
+
),
|
181 |
+
# false true false
|
182 |
+
pytest.param(
|
183 |
+
pd.DataFrame({"A": [0, 1]}, index=["a", "b"]).set_flags(
|
184 |
+
allows_duplicate_labels=False
|
185 |
+
),
|
186 |
+
pd.DataFrame({"B": [0, 1]}, index=["a", "d"]),
|
187 |
+
False,
|
188 |
+
marks=not_implemented,
|
189 |
+
),
|
190 |
+
# true true true
|
191 |
+
(
|
192 |
+
pd.DataFrame({"A": [0, 1]}, index=["a", "b"]),
|
193 |
+
pd.DataFrame({"B": [0, 1]}, index=["a", "d"]),
|
194 |
+
True,
|
195 |
+
),
|
196 |
+
],
|
197 |
+
)
|
198 |
+
def test_merge(self, left, right, expected):
|
199 |
+
result = pd.merge(left, right, left_index=True, right_index=True)
|
200 |
+
assert result.flags.allows_duplicate_labels is expected
|
201 |
+
|
202 |
+
@not_implemented
|
203 |
+
def test_groupby(self):
|
204 |
+
# XXX: This is under tested
|
205 |
+
# TODO:
|
206 |
+
# - apply
|
207 |
+
# - transform
|
208 |
+
# - Should passing a grouper that disallows duplicates propagate?
|
209 |
+
df = pd.DataFrame({"A": [1, 2, 3]}).set_flags(allows_duplicate_labels=False)
|
210 |
+
result = df.groupby([0, 0, 1]).agg("count")
|
211 |
+
assert result.flags.allows_duplicate_labels is False
|
212 |
+
|
213 |
+
@pytest.mark.parametrize("frame", [True, False])
|
214 |
+
@not_implemented
|
215 |
+
def test_window(self, frame):
|
216 |
+
df = pd.Series(
|
217 |
+
1,
|
218 |
+
index=pd.date_range("2000", periods=12),
|
219 |
+
name="A",
|
220 |
+
allows_duplicate_labels=False,
|
221 |
+
)
|
222 |
+
if frame:
|
223 |
+
df = df.to_frame()
|
224 |
+
assert df.rolling(3).mean().flags.allows_duplicate_labels is False
|
225 |
+
assert df.ewm(3).mean().flags.allows_duplicate_labels is False
|
226 |
+
assert df.expanding(3).mean().flags.allows_duplicate_labels is False
|
227 |
+
|
228 |
+
|
229 |
+
# ----------------------------------------------------------------------------
|
230 |
+
# Raises
|
231 |
+
|
232 |
+
|
233 |
+
class TestRaises:
|
234 |
+
@pytest.mark.parametrize(
|
235 |
+
"cls, axes",
|
236 |
+
[
|
237 |
+
(pd.Series, {"index": ["a", "a"], "dtype": float}),
|
238 |
+
(pd.DataFrame, {"index": ["a", "a"]}),
|
239 |
+
(pd.DataFrame, {"index": ["a", "a"], "columns": ["b", "b"]}),
|
240 |
+
(pd.DataFrame, {"columns": ["b", "b"]}),
|
241 |
+
],
|
242 |
+
)
|
243 |
+
def test_set_flags_with_duplicates(self, cls, axes):
|
244 |
+
result = cls(**axes)
|
245 |
+
assert result.flags.allows_duplicate_labels is True
|
246 |
+
|
247 |
+
msg = "Index has duplicates."
|
248 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
249 |
+
cls(**axes).set_flags(allows_duplicate_labels=False)
|
250 |
+
|
251 |
+
@pytest.mark.parametrize(
|
252 |
+
"data",
|
253 |
+
[
|
254 |
+
pd.Series(index=[0, 0], dtype=float),
|
255 |
+
pd.DataFrame(index=[0, 0]),
|
256 |
+
pd.DataFrame(columns=[0, 0]),
|
257 |
+
],
|
258 |
+
)
|
259 |
+
def test_setting_allows_duplicate_labels_raises(self, data):
|
260 |
+
msg = "Index has duplicates."
|
261 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
262 |
+
data.flags.allows_duplicate_labels = False
|
263 |
+
|
264 |
+
assert data.flags.allows_duplicate_labels is True
|
265 |
+
|
266 |
+
def test_series_raises(self):
|
267 |
+
a = pd.Series(0, index=["a", "b"])
|
268 |
+
b = pd.Series([0, 1], index=["a", "b"]).set_flags(allows_duplicate_labels=False)
|
269 |
+
msg = "Index has duplicates."
|
270 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
271 |
+
pd.concat([a, b])
|
272 |
+
|
273 |
+
@pytest.mark.parametrize(
|
274 |
+
"getter, target",
|
275 |
+
[
|
276 |
+
(operator.itemgetter(["A", "A"]), None),
|
277 |
+
# loc
|
278 |
+
(operator.itemgetter(["a", "a"]), "loc"),
|
279 |
+
pytest.param(operator.itemgetter(("a", ["A", "A"])), "loc"),
|
280 |
+
(operator.itemgetter((["a", "a"], "A")), "loc"),
|
281 |
+
# iloc
|
282 |
+
(operator.itemgetter([0, 0]), "iloc"),
|
283 |
+
pytest.param(operator.itemgetter((0, [0, 0])), "iloc"),
|
284 |
+
pytest.param(operator.itemgetter(([0, 0], 0)), "iloc"),
|
285 |
+
],
|
286 |
+
)
|
287 |
+
def test_getitem_raises(self, getter, target):
|
288 |
+
df = pd.DataFrame({"A": [1, 2], "B": [3, 4]}, index=["a", "b"]).set_flags(
|
289 |
+
allows_duplicate_labels=False
|
290 |
+
)
|
291 |
+
if target:
|
292 |
+
# df, df.loc, or df.iloc
|
293 |
+
target = getattr(df, target)
|
294 |
+
else:
|
295 |
+
target = df
|
296 |
+
|
297 |
+
msg = "Index has duplicates."
|
298 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
299 |
+
getter(target)
|
300 |
+
|
301 |
+
@pytest.mark.parametrize(
|
302 |
+
"objs, kwargs",
|
303 |
+
[
|
304 |
+
(
|
305 |
+
[
|
306 |
+
pd.Series(1, index=[0, 1], name="a"),
|
307 |
+
pd.Series(2, index=[0, 1], name="a"),
|
308 |
+
],
|
309 |
+
{"axis": 1},
|
310 |
+
)
|
311 |
+
],
|
312 |
+
)
|
313 |
+
def test_concat_raises(self, objs, kwargs):
|
314 |
+
objs = [x.set_flags(allows_duplicate_labels=False) for x in objs]
|
315 |
+
msg = "Index has duplicates."
|
316 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
317 |
+
pd.concat(objs, **kwargs)
|
318 |
+
|
319 |
+
@not_implemented
|
320 |
+
def test_merge_raises(self):
|
321 |
+
a = pd.DataFrame({"A": [0, 1, 2]}, index=["a", "b", "c"]).set_flags(
|
322 |
+
allows_duplicate_labels=False
|
323 |
+
)
|
324 |
+
b = pd.DataFrame({"B": [0, 1, 2]}, index=["a", "b", "b"])
|
325 |
+
msg = "Index has duplicates."
|
326 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
327 |
+
pd.merge(a, b, left_index=True, right_index=True)
|
328 |
+
|
329 |
+
|
330 |
+
@pytest.mark.parametrize(
|
331 |
+
"idx",
|
332 |
+
[
|
333 |
+
pd.Index([1, 1]),
|
334 |
+
pd.Index(["a", "a"]),
|
335 |
+
pd.Index([1.1, 1.1]),
|
336 |
+
pd.PeriodIndex([pd.Period("2000", "D")] * 2),
|
337 |
+
pd.DatetimeIndex([pd.Timestamp("2000")] * 2),
|
338 |
+
pd.TimedeltaIndex([pd.Timedelta("1D")] * 2),
|
339 |
+
pd.CategoricalIndex(["a", "a"]),
|
340 |
+
pd.IntervalIndex([pd.Interval(0, 1)] * 2),
|
341 |
+
pd.MultiIndex.from_tuples([("a", 1), ("a", 1)]),
|
342 |
+
],
|
343 |
+
ids=lambda x: type(x).__name__,
|
344 |
+
)
|
345 |
+
def test_raises_basic(idx):
|
346 |
+
msg = "Index has duplicates."
|
347 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
348 |
+
pd.Series(1, index=idx).set_flags(allows_duplicate_labels=False)
|
349 |
+
|
350 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
351 |
+
pd.DataFrame({"A": [1, 1]}, index=idx).set_flags(allows_duplicate_labels=False)
|
352 |
+
|
353 |
+
with pytest.raises(pd.errors.DuplicateLabelError, match=msg):
|
354 |
+
pd.DataFrame([[1, 2]], columns=idx).set_flags(allows_duplicate_labels=False)
|
355 |
+
|
356 |
+
|
357 |
+
def test_format_duplicate_labels_message():
|
358 |
+
idx = pd.Index(["a", "b", "a", "b", "c"])
|
359 |
+
result = idx._format_duplicate_message()
|
360 |
+
expected = pd.DataFrame(
|
361 |
+
{"positions": [[0, 2], [1, 3]]}, index=pd.Index(["a", "b"], name="label")
|
362 |
+
)
|
363 |
+
tm.assert_frame_equal(result, expected)
|
364 |
+
|
365 |
+
|
366 |
+
def test_format_duplicate_labels_message_multi():
|
367 |
+
idx = pd.MultiIndex.from_product([["A"], ["a", "b", "a", "b", "c"]])
|
368 |
+
result = idx._format_duplicate_message()
|
369 |
+
expected = pd.DataFrame(
|
370 |
+
{"positions": [[0, 2], [1, 3]]},
|
371 |
+
index=pd.MultiIndex.from_product([["A"], ["a", "b"]]),
|
372 |
+
)
|
373 |
+
tm.assert_frame_equal(result, expected)
|
374 |
+
|
375 |
+
|
376 |
+
def test_dataframe_insert_raises():
|
377 |
+
df = pd.DataFrame({"A": [1, 2]}).set_flags(allows_duplicate_labels=False)
|
378 |
+
msg = "Cannot specify"
|
379 |
+
with pytest.raises(ValueError, match=msg):
|
380 |
+
df.insert(0, "A", [3, 4], allow_duplicates=True)
|
381 |
+
|
382 |
+
|
383 |
+
@pytest.mark.parametrize(
|
384 |
+
"method, frame_only",
|
385 |
+
[
|
386 |
+
(operator.methodcaller("set_index", "A", inplace=True), True),
|
387 |
+
(operator.methodcaller("reset_index", inplace=True), True),
|
388 |
+
(operator.methodcaller("rename", lambda x: x, inplace=True), False),
|
389 |
+
],
|
390 |
+
)
|
391 |
+
def test_inplace_raises(method, frame_only):
|
392 |
+
df = pd.DataFrame({"A": [0, 0], "B": [1, 2]}).set_flags(
|
393 |
+
allows_duplicate_labels=False
|
394 |
+
)
|
395 |
+
s = df["A"]
|
396 |
+
s.flags.allows_duplicate_labels = False
|
397 |
+
msg = "Cannot specify"
|
398 |
+
|
399 |
+
with pytest.raises(ValueError, match=msg):
|
400 |
+
method(df)
|
401 |
+
if not frame_only:
|
402 |
+
with pytest.raises(ValueError, match=msg):
|
403 |
+
method(s)
|
404 |
+
|
405 |
+
|
406 |
+
def test_pickle():
|
407 |
+
a = pd.Series([1, 2]).set_flags(allows_duplicate_labels=False)
|
408 |
+
b = tm.round_trip_pickle(a)
|
409 |
+
tm.assert_series_equal(a, b)
|
410 |
+
|
411 |
+
a = pd.DataFrame({"A": []}).set_flags(allows_duplicate_labels=False)
|
412 |
+
b = tm.round_trip_pickle(a)
|
413 |
+
tm.assert_frame_equal(a, b)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_finalize.py
ADDED
@@ -0,0 +1,767 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
An exhaustive list of pandas methods exercising NDFrame.__finalize__.
|
3 |
+
"""
|
4 |
+
import operator
|
5 |
+
import re
|
6 |
+
|
7 |
+
import numpy as np
|
8 |
+
import pytest
|
9 |
+
|
10 |
+
import pandas as pd
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
# TODO:
|
14 |
+
# * Binary methods (mul, div, etc.)
|
15 |
+
# * Binary outputs (align, etc.)
|
16 |
+
# * top-level methods (concat, merge, get_dummies, etc.)
|
17 |
+
# * window
|
18 |
+
# * cumulative reductions
|
19 |
+
|
20 |
+
not_implemented_mark = pytest.mark.xfail(reason="not implemented")
|
21 |
+
|
22 |
+
mi = pd.MultiIndex.from_product([["a", "b"], [0, 1]], names=["A", "B"])
|
23 |
+
|
24 |
+
frame_data = ({"A": [1]},)
|
25 |
+
frame_mi_data = ({"A": [1, 2, 3, 4]}, mi)
|
26 |
+
|
27 |
+
|
28 |
+
# Tuple of
|
29 |
+
# - Callable: Constructor (Series, DataFrame)
|
30 |
+
# - Tuple: Constructor args
|
31 |
+
# - Callable: pass the constructed value with attrs set to this.
|
32 |
+
|
33 |
+
_all_methods = [
|
34 |
+
(pd.Series, ([0],), operator.methodcaller("take", [])),
|
35 |
+
(pd.Series, ([0],), operator.methodcaller("__getitem__", [True])),
|
36 |
+
(pd.Series, ([0],), operator.methodcaller("repeat", 2)),
|
37 |
+
(pd.Series, ([0],), operator.methodcaller("reset_index")),
|
38 |
+
(pd.Series, ([0],), operator.methodcaller("reset_index", drop=True)),
|
39 |
+
(pd.Series, ([0],), operator.methodcaller("to_frame")),
|
40 |
+
(pd.Series, ([0, 0],), operator.methodcaller("drop_duplicates")),
|
41 |
+
(pd.Series, ([0, 0],), operator.methodcaller("duplicated")),
|
42 |
+
(pd.Series, ([0, 0],), operator.methodcaller("round")),
|
43 |
+
(pd.Series, ([0, 0],), operator.methodcaller("rename", lambda x: x + 1)),
|
44 |
+
(pd.Series, ([0, 0],), operator.methodcaller("rename", "name")),
|
45 |
+
(pd.Series, ([0, 0],), operator.methodcaller("set_axis", ["a", "b"])),
|
46 |
+
(pd.Series, ([0, 0],), operator.methodcaller("reindex", [1, 0])),
|
47 |
+
(pd.Series, ([0, 0],), operator.methodcaller("drop", [0])),
|
48 |
+
(pd.Series, (pd.array([0, pd.NA]),), operator.methodcaller("fillna", 0)),
|
49 |
+
(pd.Series, ([0, 0],), operator.methodcaller("replace", {0: 1})),
|
50 |
+
(pd.Series, ([0, 0],), operator.methodcaller("shift")),
|
51 |
+
(pd.Series, ([0, 0],), operator.methodcaller("isin", [0, 1])),
|
52 |
+
(pd.Series, ([0, 0],), operator.methodcaller("between", 0, 2)),
|
53 |
+
(pd.Series, ([0, 0],), operator.methodcaller("isna")),
|
54 |
+
(pd.Series, ([0, 0],), operator.methodcaller("isnull")),
|
55 |
+
(pd.Series, ([0, 0],), operator.methodcaller("notna")),
|
56 |
+
(pd.Series, ([0, 0],), operator.methodcaller("notnull")),
|
57 |
+
(pd.Series, ([1],), operator.methodcaller("add", pd.Series([1]))),
|
58 |
+
# TODO: mul, div, etc.
|
59 |
+
(
|
60 |
+
pd.Series,
|
61 |
+
([0], pd.period_range("2000", periods=1)),
|
62 |
+
operator.methodcaller("to_timestamp"),
|
63 |
+
),
|
64 |
+
(
|
65 |
+
pd.Series,
|
66 |
+
([0], pd.date_range("2000", periods=1)),
|
67 |
+
operator.methodcaller("to_period"),
|
68 |
+
),
|
69 |
+
pytest.param(
|
70 |
+
(
|
71 |
+
pd.DataFrame,
|
72 |
+
frame_data,
|
73 |
+
operator.methodcaller("dot", pd.DataFrame(index=["A"])),
|
74 |
+
),
|
75 |
+
marks=pytest.mark.xfail(reason="Implement binary finalize"),
|
76 |
+
),
|
77 |
+
(pd.DataFrame, frame_data, operator.methodcaller("transpose")),
|
78 |
+
(pd.DataFrame, frame_data, operator.methodcaller("__getitem__", "A")),
|
79 |
+
(pd.DataFrame, frame_data, operator.methodcaller("__getitem__", ["A"])),
|
80 |
+
(pd.DataFrame, frame_data, operator.methodcaller("__getitem__", np.array([True]))),
|
81 |
+
(pd.DataFrame, ({("A", "a"): [1]},), operator.methodcaller("__getitem__", ["A"])),
|
82 |
+
(pd.DataFrame, frame_data, operator.methodcaller("query", "A == 1")),
|
83 |
+
(pd.DataFrame, frame_data, operator.methodcaller("eval", "A + 1", engine="python")),
|
84 |
+
(pd.DataFrame, frame_data, operator.methodcaller("select_dtypes", include="int")),
|
85 |
+
(pd.DataFrame, frame_data, operator.methodcaller("assign", b=1)),
|
86 |
+
(pd.DataFrame, frame_data, operator.methodcaller("set_axis", ["A"])),
|
87 |
+
(pd.DataFrame, frame_data, operator.methodcaller("reindex", [0, 1])),
|
88 |
+
(pd.DataFrame, frame_data, operator.methodcaller("drop", columns=["A"])),
|
89 |
+
(pd.DataFrame, frame_data, operator.methodcaller("drop", index=[0])),
|
90 |
+
(pd.DataFrame, frame_data, operator.methodcaller("rename", columns={"A": "a"})),
|
91 |
+
(pd.DataFrame, frame_data, operator.methodcaller("rename", index=lambda x: x)),
|
92 |
+
(pd.DataFrame, frame_data, operator.methodcaller("fillna", "A")),
|
93 |
+
(pd.DataFrame, frame_data, operator.methodcaller("fillna", method="ffill")),
|
94 |
+
(pd.DataFrame, frame_data, operator.methodcaller("set_index", "A")),
|
95 |
+
(pd.DataFrame, frame_data, operator.methodcaller("reset_index")),
|
96 |
+
(pd.DataFrame, frame_data, operator.methodcaller("isna")),
|
97 |
+
(pd.DataFrame, frame_data, operator.methodcaller("isnull")),
|
98 |
+
(pd.DataFrame, frame_data, operator.methodcaller("notna")),
|
99 |
+
(pd.DataFrame, frame_data, operator.methodcaller("notnull")),
|
100 |
+
(pd.DataFrame, frame_data, operator.methodcaller("dropna")),
|
101 |
+
(pd.DataFrame, frame_data, operator.methodcaller("drop_duplicates")),
|
102 |
+
(pd.DataFrame, frame_data, operator.methodcaller("duplicated")),
|
103 |
+
(pd.DataFrame, frame_data, operator.methodcaller("sort_values", by="A")),
|
104 |
+
(pd.DataFrame, frame_data, operator.methodcaller("sort_index")),
|
105 |
+
(pd.DataFrame, frame_data, operator.methodcaller("nlargest", 1, "A")),
|
106 |
+
(pd.DataFrame, frame_data, operator.methodcaller("nsmallest", 1, "A")),
|
107 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("swaplevel")),
|
108 |
+
(
|
109 |
+
pd.DataFrame,
|
110 |
+
frame_data,
|
111 |
+
operator.methodcaller("add", pd.DataFrame(*frame_data)),
|
112 |
+
),
|
113 |
+
# TODO: div, mul, etc.
|
114 |
+
(
|
115 |
+
pd.DataFrame,
|
116 |
+
frame_data,
|
117 |
+
operator.methodcaller("combine", pd.DataFrame(*frame_data), operator.add),
|
118 |
+
),
|
119 |
+
(
|
120 |
+
pd.DataFrame,
|
121 |
+
frame_data,
|
122 |
+
operator.methodcaller("combine_first", pd.DataFrame(*frame_data)),
|
123 |
+
),
|
124 |
+
pytest.param(
|
125 |
+
(
|
126 |
+
pd.DataFrame,
|
127 |
+
frame_data,
|
128 |
+
operator.methodcaller("update", pd.DataFrame(*frame_data)),
|
129 |
+
),
|
130 |
+
marks=not_implemented_mark,
|
131 |
+
),
|
132 |
+
(pd.DataFrame, frame_data, operator.methodcaller("pivot", columns="A")),
|
133 |
+
(
|
134 |
+
pd.DataFrame,
|
135 |
+
({"A": [1], "B": [1]},),
|
136 |
+
operator.methodcaller("pivot_table", columns="A"),
|
137 |
+
),
|
138 |
+
(
|
139 |
+
pd.DataFrame,
|
140 |
+
({"A": [1], "B": [1]},),
|
141 |
+
operator.methodcaller("pivot_table", columns="A", aggfunc=["mean", "sum"]),
|
142 |
+
),
|
143 |
+
(pd.DataFrame, frame_data, operator.methodcaller("stack")),
|
144 |
+
(pd.DataFrame, frame_data, operator.methodcaller("explode", "A")),
|
145 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("unstack")),
|
146 |
+
(
|
147 |
+
pd.DataFrame,
|
148 |
+
({"A": ["a", "b", "c"], "B": [1, 3, 5], "C": [2, 4, 6]},),
|
149 |
+
operator.methodcaller("melt", id_vars=["A"], value_vars=["B"]),
|
150 |
+
),
|
151 |
+
(pd.DataFrame, frame_data, operator.methodcaller("map", lambda x: x)),
|
152 |
+
pytest.param(
|
153 |
+
(
|
154 |
+
pd.DataFrame,
|
155 |
+
frame_data,
|
156 |
+
operator.methodcaller("merge", pd.DataFrame({"A": [1]})),
|
157 |
+
),
|
158 |
+
marks=not_implemented_mark,
|
159 |
+
),
|
160 |
+
(pd.DataFrame, frame_data, operator.methodcaller("round", 2)),
|
161 |
+
(pd.DataFrame, frame_data, operator.methodcaller("corr")),
|
162 |
+
pytest.param(
|
163 |
+
(pd.DataFrame, frame_data, operator.methodcaller("cov")),
|
164 |
+
marks=[
|
165 |
+
pytest.mark.filterwarnings("ignore::RuntimeWarning"),
|
166 |
+
],
|
167 |
+
),
|
168 |
+
(
|
169 |
+
pd.DataFrame,
|
170 |
+
frame_data,
|
171 |
+
operator.methodcaller("corrwith", pd.DataFrame(*frame_data)),
|
172 |
+
),
|
173 |
+
(pd.DataFrame, frame_data, operator.methodcaller("count")),
|
174 |
+
(pd.DataFrame, frame_data, operator.methodcaller("nunique")),
|
175 |
+
(pd.DataFrame, frame_data, operator.methodcaller("idxmin")),
|
176 |
+
(pd.DataFrame, frame_data, operator.methodcaller("idxmax")),
|
177 |
+
(pd.DataFrame, frame_data, operator.methodcaller("mode")),
|
178 |
+
(pd.Series, [0], operator.methodcaller("mode")),
|
179 |
+
(pd.DataFrame, frame_data, operator.methodcaller("median")),
|
180 |
+
(
|
181 |
+
pd.DataFrame,
|
182 |
+
frame_data,
|
183 |
+
operator.methodcaller("quantile", numeric_only=True),
|
184 |
+
),
|
185 |
+
(
|
186 |
+
pd.DataFrame,
|
187 |
+
frame_data,
|
188 |
+
operator.methodcaller("quantile", q=[0.25, 0.75], numeric_only=True),
|
189 |
+
),
|
190 |
+
(
|
191 |
+
pd.DataFrame,
|
192 |
+
({"A": [pd.Timedelta(days=1), pd.Timedelta(days=2)]},),
|
193 |
+
operator.methodcaller("quantile", numeric_only=False),
|
194 |
+
),
|
195 |
+
(
|
196 |
+
pd.DataFrame,
|
197 |
+
({"A": [np.datetime64("2022-01-01"), np.datetime64("2022-01-02")]},),
|
198 |
+
operator.methodcaller("quantile", numeric_only=True),
|
199 |
+
),
|
200 |
+
(
|
201 |
+
pd.DataFrame,
|
202 |
+
({"A": [1]}, [pd.Period("2000", "D")]),
|
203 |
+
operator.methodcaller("to_timestamp"),
|
204 |
+
),
|
205 |
+
(
|
206 |
+
pd.DataFrame,
|
207 |
+
({"A": [1]}, [pd.Timestamp("2000")]),
|
208 |
+
operator.methodcaller("to_period", freq="D"),
|
209 |
+
),
|
210 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("isin", [1])),
|
211 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("isin", pd.Series([1]))),
|
212 |
+
(
|
213 |
+
pd.DataFrame,
|
214 |
+
frame_mi_data,
|
215 |
+
operator.methodcaller("isin", pd.DataFrame({"A": [1]})),
|
216 |
+
),
|
217 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("droplevel", "A")),
|
218 |
+
(pd.DataFrame, frame_data, operator.methodcaller("pop", "A")),
|
219 |
+
# Squeeze on columns, otherwise we'll end up with a scalar
|
220 |
+
(pd.DataFrame, frame_data, operator.methodcaller("squeeze", axis="columns")),
|
221 |
+
(pd.Series, ([1, 2],), operator.methodcaller("squeeze")),
|
222 |
+
(pd.Series, ([1, 2],), operator.methodcaller("rename_axis", index="a")),
|
223 |
+
(pd.DataFrame, frame_data, operator.methodcaller("rename_axis", columns="a")),
|
224 |
+
# Unary ops
|
225 |
+
(pd.DataFrame, frame_data, operator.neg),
|
226 |
+
(pd.Series, [1], operator.neg),
|
227 |
+
(pd.DataFrame, frame_data, operator.pos),
|
228 |
+
(pd.Series, [1], operator.pos),
|
229 |
+
(pd.DataFrame, frame_data, operator.inv),
|
230 |
+
(pd.Series, [1], operator.inv),
|
231 |
+
(pd.DataFrame, frame_data, abs),
|
232 |
+
(pd.Series, [1], abs),
|
233 |
+
(pd.DataFrame, frame_data, round),
|
234 |
+
(pd.Series, [1], round),
|
235 |
+
(pd.DataFrame, frame_data, operator.methodcaller("take", [0, 0])),
|
236 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("xs", "a")),
|
237 |
+
(pd.Series, (1, mi), operator.methodcaller("xs", "a")),
|
238 |
+
(pd.DataFrame, frame_data, operator.methodcaller("get", "A")),
|
239 |
+
(
|
240 |
+
pd.DataFrame,
|
241 |
+
frame_data,
|
242 |
+
operator.methodcaller("reindex_like", pd.DataFrame({"A": [1, 2, 3]})),
|
243 |
+
),
|
244 |
+
(
|
245 |
+
pd.Series,
|
246 |
+
frame_data,
|
247 |
+
operator.methodcaller("reindex_like", pd.Series([0, 1, 2])),
|
248 |
+
),
|
249 |
+
(pd.DataFrame, frame_data, operator.methodcaller("add_prefix", "_")),
|
250 |
+
(pd.DataFrame, frame_data, operator.methodcaller("add_suffix", "_")),
|
251 |
+
(pd.Series, (1, ["a", "b"]), operator.methodcaller("add_prefix", "_")),
|
252 |
+
(pd.Series, (1, ["a", "b"]), operator.methodcaller("add_suffix", "_")),
|
253 |
+
(pd.Series, ([3, 2],), operator.methodcaller("sort_values")),
|
254 |
+
(pd.Series, ([1] * 10,), operator.methodcaller("head")),
|
255 |
+
(pd.DataFrame, ({"A": [1] * 10},), operator.methodcaller("head")),
|
256 |
+
(pd.Series, ([1] * 10,), operator.methodcaller("tail")),
|
257 |
+
(pd.DataFrame, ({"A": [1] * 10},), operator.methodcaller("tail")),
|
258 |
+
(pd.Series, ([1, 2],), operator.methodcaller("sample", n=2, replace=True)),
|
259 |
+
(pd.DataFrame, (frame_data,), operator.methodcaller("sample", n=2, replace=True)),
|
260 |
+
(pd.Series, ([1, 2],), operator.methodcaller("astype", float)),
|
261 |
+
(pd.DataFrame, frame_data, operator.methodcaller("astype", float)),
|
262 |
+
(pd.Series, ([1, 2],), operator.methodcaller("copy")),
|
263 |
+
(pd.DataFrame, frame_data, operator.methodcaller("copy")),
|
264 |
+
(pd.Series, ([1, 2], None, object), operator.methodcaller("infer_objects")),
|
265 |
+
(
|
266 |
+
pd.DataFrame,
|
267 |
+
({"A": np.array([1, 2], dtype=object)},),
|
268 |
+
operator.methodcaller("infer_objects"),
|
269 |
+
),
|
270 |
+
(pd.Series, ([1, 2],), operator.methodcaller("convert_dtypes")),
|
271 |
+
(pd.DataFrame, frame_data, operator.methodcaller("convert_dtypes")),
|
272 |
+
(pd.Series, ([1, None, 3],), operator.methodcaller("interpolate")),
|
273 |
+
(pd.DataFrame, ({"A": [1, None, 3]},), operator.methodcaller("interpolate")),
|
274 |
+
(pd.Series, ([1, 2],), operator.methodcaller("clip", lower=1)),
|
275 |
+
(pd.DataFrame, frame_data, operator.methodcaller("clip", lower=1)),
|
276 |
+
(
|
277 |
+
pd.Series,
|
278 |
+
(1, pd.date_range("2000", periods=4)),
|
279 |
+
operator.methodcaller("asfreq", "h"),
|
280 |
+
),
|
281 |
+
(
|
282 |
+
pd.DataFrame,
|
283 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
284 |
+
operator.methodcaller("asfreq", "h"),
|
285 |
+
),
|
286 |
+
(
|
287 |
+
pd.Series,
|
288 |
+
(1, pd.date_range("2000", periods=4)),
|
289 |
+
operator.methodcaller("at_time", "12:00"),
|
290 |
+
),
|
291 |
+
(
|
292 |
+
pd.DataFrame,
|
293 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
294 |
+
operator.methodcaller("at_time", "12:00"),
|
295 |
+
),
|
296 |
+
(
|
297 |
+
pd.Series,
|
298 |
+
(1, pd.date_range("2000", periods=4)),
|
299 |
+
operator.methodcaller("between_time", "12:00", "13:00"),
|
300 |
+
),
|
301 |
+
(
|
302 |
+
pd.DataFrame,
|
303 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
304 |
+
operator.methodcaller("between_time", "12:00", "13:00"),
|
305 |
+
),
|
306 |
+
(
|
307 |
+
pd.Series,
|
308 |
+
(1, pd.date_range("2000", periods=4)),
|
309 |
+
operator.methodcaller("last", "3D"),
|
310 |
+
),
|
311 |
+
(
|
312 |
+
pd.DataFrame,
|
313 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
314 |
+
operator.methodcaller("last", "3D"),
|
315 |
+
),
|
316 |
+
(pd.Series, ([1, 2],), operator.methodcaller("rank")),
|
317 |
+
(pd.DataFrame, frame_data, operator.methodcaller("rank")),
|
318 |
+
(pd.Series, ([1, 2],), operator.methodcaller("where", np.array([True, False]))),
|
319 |
+
(pd.DataFrame, frame_data, operator.methodcaller("where", np.array([[True]]))),
|
320 |
+
(pd.Series, ([1, 2],), operator.methodcaller("mask", np.array([True, False]))),
|
321 |
+
(pd.DataFrame, frame_data, operator.methodcaller("mask", np.array([[True]]))),
|
322 |
+
(pd.Series, ([1, 2],), operator.methodcaller("truncate", before=0)),
|
323 |
+
(pd.DataFrame, frame_data, operator.methodcaller("truncate", before=0)),
|
324 |
+
(
|
325 |
+
pd.Series,
|
326 |
+
(1, pd.date_range("2000", periods=4, tz="UTC")),
|
327 |
+
operator.methodcaller("tz_convert", "CET"),
|
328 |
+
),
|
329 |
+
(
|
330 |
+
pd.DataFrame,
|
331 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4, tz="UTC")),
|
332 |
+
operator.methodcaller("tz_convert", "CET"),
|
333 |
+
),
|
334 |
+
(
|
335 |
+
pd.Series,
|
336 |
+
(1, pd.date_range("2000", periods=4)),
|
337 |
+
operator.methodcaller("tz_localize", "CET"),
|
338 |
+
),
|
339 |
+
(
|
340 |
+
pd.DataFrame,
|
341 |
+
({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
342 |
+
operator.methodcaller("tz_localize", "CET"),
|
343 |
+
),
|
344 |
+
(pd.Series, ([1, 2],), operator.methodcaller("describe")),
|
345 |
+
(pd.DataFrame, frame_data, operator.methodcaller("describe")),
|
346 |
+
(pd.Series, ([1, 2],), operator.methodcaller("pct_change")),
|
347 |
+
(pd.DataFrame, frame_data, operator.methodcaller("pct_change")),
|
348 |
+
(pd.Series, ([1],), operator.methodcaller("transform", lambda x: x - x.min())),
|
349 |
+
(
|
350 |
+
pd.DataFrame,
|
351 |
+
frame_mi_data,
|
352 |
+
operator.methodcaller("transform", lambda x: x - x.min()),
|
353 |
+
),
|
354 |
+
(pd.Series, ([1],), operator.methodcaller("apply", lambda x: x)),
|
355 |
+
(pd.DataFrame, frame_mi_data, operator.methodcaller("apply", lambda x: x)),
|
356 |
+
# Cumulative reductions
|
357 |
+
(pd.Series, ([1],), operator.methodcaller("cumsum")),
|
358 |
+
(pd.DataFrame, frame_data, operator.methodcaller("cumsum")),
|
359 |
+
(pd.Series, ([1],), operator.methodcaller("cummin")),
|
360 |
+
(pd.DataFrame, frame_data, operator.methodcaller("cummin")),
|
361 |
+
(pd.Series, ([1],), operator.methodcaller("cummax")),
|
362 |
+
(pd.DataFrame, frame_data, operator.methodcaller("cummax")),
|
363 |
+
(pd.Series, ([1],), operator.methodcaller("cumprod")),
|
364 |
+
(pd.DataFrame, frame_data, operator.methodcaller("cumprod")),
|
365 |
+
# Reductions
|
366 |
+
(pd.DataFrame, frame_data, operator.methodcaller("any")),
|
367 |
+
(pd.DataFrame, frame_data, operator.methodcaller("all")),
|
368 |
+
(pd.DataFrame, frame_data, operator.methodcaller("min")),
|
369 |
+
(pd.DataFrame, frame_data, operator.methodcaller("max")),
|
370 |
+
(pd.DataFrame, frame_data, operator.methodcaller("sum")),
|
371 |
+
(pd.DataFrame, frame_data, operator.methodcaller("std")),
|
372 |
+
(pd.DataFrame, frame_data, operator.methodcaller("mean")),
|
373 |
+
(pd.DataFrame, frame_data, operator.methodcaller("prod")),
|
374 |
+
(pd.DataFrame, frame_data, operator.methodcaller("sem")),
|
375 |
+
(pd.DataFrame, frame_data, operator.methodcaller("skew")),
|
376 |
+
(pd.DataFrame, frame_data, operator.methodcaller("kurt")),
|
377 |
+
]
|
378 |
+
|
379 |
+
|
380 |
+
def idfn(x):
|
381 |
+
xpr = re.compile(r"'(.*)?'")
|
382 |
+
m = xpr.search(str(x))
|
383 |
+
if m:
|
384 |
+
return m.group(1)
|
385 |
+
else:
|
386 |
+
return str(x)
|
387 |
+
|
388 |
+
|
389 |
+
@pytest.fixture(params=_all_methods, ids=lambda x: idfn(x[-1]))
|
390 |
+
def ndframe_method(request):
|
391 |
+
"""
|
392 |
+
An NDFrame method returning an NDFrame.
|
393 |
+
"""
|
394 |
+
return request.param
|
395 |
+
|
396 |
+
|
397 |
+
@pytest.mark.filterwarnings(
|
398 |
+
"ignore:DataFrame.fillna with 'method' is deprecated:FutureWarning",
|
399 |
+
"ignore:last is deprecated:FutureWarning",
|
400 |
+
)
|
401 |
+
def test_finalize_called(ndframe_method):
|
402 |
+
cls, init_args, method = ndframe_method
|
403 |
+
ndframe = cls(*init_args)
|
404 |
+
|
405 |
+
ndframe.attrs = {"a": 1}
|
406 |
+
result = method(ndframe)
|
407 |
+
|
408 |
+
assert result.attrs == {"a": 1}
|
409 |
+
|
410 |
+
|
411 |
+
@pytest.mark.parametrize(
|
412 |
+
"data",
|
413 |
+
[
|
414 |
+
pd.Series(1, pd.date_range("2000", periods=4)),
|
415 |
+
pd.DataFrame({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
416 |
+
],
|
417 |
+
)
|
418 |
+
def test_finalize_first(data):
|
419 |
+
deprecated_msg = "first is deprecated"
|
420 |
+
|
421 |
+
data.attrs = {"a": 1}
|
422 |
+
with tm.assert_produces_warning(FutureWarning, match=deprecated_msg):
|
423 |
+
result = data.first("3D")
|
424 |
+
assert result.attrs == {"a": 1}
|
425 |
+
|
426 |
+
|
427 |
+
@pytest.mark.parametrize(
|
428 |
+
"data",
|
429 |
+
[
|
430 |
+
pd.Series(1, pd.date_range("2000", periods=4)),
|
431 |
+
pd.DataFrame({"A": [1, 1, 1, 1]}, pd.date_range("2000", periods=4)),
|
432 |
+
],
|
433 |
+
)
|
434 |
+
def test_finalize_last(data):
|
435 |
+
# GH 53710
|
436 |
+
deprecated_msg = "last is deprecated"
|
437 |
+
|
438 |
+
data.attrs = {"a": 1}
|
439 |
+
with tm.assert_produces_warning(FutureWarning, match=deprecated_msg):
|
440 |
+
result = data.last("3D")
|
441 |
+
assert result.attrs == {"a": 1}
|
442 |
+
|
443 |
+
|
444 |
+
@not_implemented_mark
|
445 |
+
def test_finalize_called_eval_numexpr():
|
446 |
+
pytest.importorskip("numexpr")
|
447 |
+
df = pd.DataFrame({"A": [1, 2]})
|
448 |
+
df.attrs["A"] = 1
|
449 |
+
result = df.eval("A + 1", engine="numexpr")
|
450 |
+
assert result.attrs == {"A": 1}
|
451 |
+
|
452 |
+
|
453 |
+
# ----------------------------------------------------------------------------
|
454 |
+
# Binary operations
|
455 |
+
|
456 |
+
|
457 |
+
@pytest.mark.parametrize("annotate", ["left", "right", "both"])
|
458 |
+
@pytest.mark.parametrize(
|
459 |
+
"args",
|
460 |
+
[
|
461 |
+
(1, pd.Series([1])),
|
462 |
+
(1, pd.DataFrame({"A": [1]})),
|
463 |
+
(pd.Series([1]), 1),
|
464 |
+
(pd.DataFrame({"A": [1]}), 1),
|
465 |
+
(pd.Series([1]), pd.Series([1])),
|
466 |
+
(pd.DataFrame({"A": [1]}), pd.DataFrame({"A": [1]})),
|
467 |
+
(pd.Series([1]), pd.DataFrame({"A": [1]})),
|
468 |
+
(pd.DataFrame({"A": [1]}), pd.Series([1])),
|
469 |
+
],
|
470 |
+
ids=lambda x: f"({type(x[0]).__name__},{type(x[1]).__name__})",
|
471 |
+
)
|
472 |
+
def test_binops(request, args, annotate, all_binary_operators):
|
473 |
+
# This generates 624 tests... Is that needed?
|
474 |
+
left, right = args
|
475 |
+
if isinstance(left, (pd.DataFrame, pd.Series)):
|
476 |
+
left.attrs = {}
|
477 |
+
if isinstance(right, (pd.DataFrame, pd.Series)):
|
478 |
+
right.attrs = {}
|
479 |
+
|
480 |
+
if annotate == "left" and isinstance(left, int):
|
481 |
+
pytest.skip("left is an int and doesn't support .attrs")
|
482 |
+
if annotate == "right" and isinstance(right, int):
|
483 |
+
pytest.skip("right is an int and doesn't support .attrs")
|
484 |
+
|
485 |
+
if not (isinstance(left, int) or isinstance(right, int)) and annotate != "both":
|
486 |
+
if not all_binary_operators.__name__.startswith("r"):
|
487 |
+
if annotate == "right" and isinstance(left, type(right)):
|
488 |
+
request.applymarker(
|
489 |
+
pytest.mark.xfail(
|
490 |
+
reason=f"{all_binary_operators} doesn't work when right has "
|
491 |
+
f"attrs and both are {type(left)}"
|
492 |
+
)
|
493 |
+
)
|
494 |
+
if not isinstance(left, type(right)):
|
495 |
+
if annotate == "left" and isinstance(left, pd.Series):
|
496 |
+
request.applymarker(
|
497 |
+
pytest.mark.xfail(
|
498 |
+
reason=f"{all_binary_operators} doesn't work when the "
|
499 |
+
"objects are different Series has attrs"
|
500 |
+
)
|
501 |
+
)
|
502 |
+
elif annotate == "right" and isinstance(right, pd.Series):
|
503 |
+
request.applymarker(
|
504 |
+
pytest.mark.xfail(
|
505 |
+
reason=f"{all_binary_operators} doesn't work when the "
|
506 |
+
"objects are different Series has attrs"
|
507 |
+
)
|
508 |
+
)
|
509 |
+
else:
|
510 |
+
if annotate == "left" and isinstance(left, type(right)):
|
511 |
+
request.applymarker(
|
512 |
+
pytest.mark.xfail(
|
513 |
+
reason=f"{all_binary_operators} doesn't work when left has "
|
514 |
+
f"attrs and both are {type(left)}"
|
515 |
+
)
|
516 |
+
)
|
517 |
+
if not isinstance(left, type(right)):
|
518 |
+
if annotate == "right" and isinstance(right, pd.Series):
|
519 |
+
request.applymarker(
|
520 |
+
pytest.mark.xfail(
|
521 |
+
reason=f"{all_binary_operators} doesn't work when the "
|
522 |
+
"objects are different Series has attrs"
|
523 |
+
)
|
524 |
+
)
|
525 |
+
elif annotate == "left" and isinstance(left, pd.Series):
|
526 |
+
request.applymarker(
|
527 |
+
pytest.mark.xfail(
|
528 |
+
reason=f"{all_binary_operators} doesn't work when the "
|
529 |
+
"objects are different Series has attrs"
|
530 |
+
)
|
531 |
+
)
|
532 |
+
if annotate in {"left", "both"} and not isinstance(left, int):
|
533 |
+
left.attrs = {"a": 1}
|
534 |
+
if annotate in {"right", "both"} and not isinstance(right, int):
|
535 |
+
right.attrs = {"a": 1}
|
536 |
+
|
537 |
+
is_cmp = all_binary_operators in [
|
538 |
+
operator.eq,
|
539 |
+
operator.ne,
|
540 |
+
operator.gt,
|
541 |
+
operator.ge,
|
542 |
+
operator.lt,
|
543 |
+
operator.le,
|
544 |
+
]
|
545 |
+
if is_cmp and isinstance(left, pd.DataFrame) and isinstance(right, pd.Series):
|
546 |
+
# in 2.0 silent alignment on comparisons was removed xref GH#28759
|
547 |
+
left, right = left.align(right, axis=1, copy=False)
|
548 |
+
elif is_cmp and isinstance(left, pd.Series) and isinstance(right, pd.DataFrame):
|
549 |
+
right, left = right.align(left, axis=1, copy=False)
|
550 |
+
|
551 |
+
result = all_binary_operators(left, right)
|
552 |
+
assert result.attrs == {"a": 1}
|
553 |
+
|
554 |
+
|
555 |
+
# ----------------------------------------------------------------------------
|
556 |
+
# Accessors
|
557 |
+
|
558 |
+
|
559 |
+
@pytest.mark.parametrize(
|
560 |
+
"method",
|
561 |
+
[
|
562 |
+
operator.methodcaller("capitalize"),
|
563 |
+
operator.methodcaller("casefold"),
|
564 |
+
operator.methodcaller("cat", ["a"]),
|
565 |
+
operator.methodcaller("contains", "a"),
|
566 |
+
operator.methodcaller("count", "a"),
|
567 |
+
operator.methodcaller("encode", "utf-8"),
|
568 |
+
operator.methodcaller("endswith", "a"),
|
569 |
+
operator.methodcaller("extract", r"(\w)(\d)"),
|
570 |
+
operator.methodcaller("extract", r"(\w)(\d)", expand=False),
|
571 |
+
operator.methodcaller("find", "a"),
|
572 |
+
operator.methodcaller("findall", "a"),
|
573 |
+
operator.methodcaller("get", 0),
|
574 |
+
operator.methodcaller("index", "a"),
|
575 |
+
operator.methodcaller("len"),
|
576 |
+
operator.methodcaller("ljust", 4),
|
577 |
+
operator.methodcaller("lower"),
|
578 |
+
operator.methodcaller("lstrip"),
|
579 |
+
operator.methodcaller("match", r"\w"),
|
580 |
+
operator.methodcaller("normalize", "NFC"),
|
581 |
+
operator.methodcaller("pad", 4),
|
582 |
+
operator.methodcaller("partition", "a"),
|
583 |
+
operator.methodcaller("repeat", 2),
|
584 |
+
operator.methodcaller("replace", "a", "b"),
|
585 |
+
operator.methodcaller("rfind", "a"),
|
586 |
+
operator.methodcaller("rindex", "a"),
|
587 |
+
operator.methodcaller("rjust", 4),
|
588 |
+
operator.methodcaller("rpartition", "a"),
|
589 |
+
operator.methodcaller("rstrip"),
|
590 |
+
operator.methodcaller("slice", 4),
|
591 |
+
operator.methodcaller("slice_replace", 1, repl="a"),
|
592 |
+
operator.methodcaller("startswith", "a"),
|
593 |
+
operator.methodcaller("strip"),
|
594 |
+
operator.methodcaller("swapcase"),
|
595 |
+
operator.methodcaller("translate", {"a": "b"}),
|
596 |
+
operator.methodcaller("upper"),
|
597 |
+
operator.methodcaller("wrap", 4),
|
598 |
+
operator.methodcaller("zfill", 4),
|
599 |
+
operator.methodcaller("isalnum"),
|
600 |
+
operator.methodcaller("isalpha"),
|
601 |
+
operator.methodcaller("isdigit"),
|
602 |
+
operator.methodcaller("isspace"),
|
603 |
+
operator.methodcaller("islower"),
|
604 |
+
operator.methodcaller("isupper"),
|
605 |
+
operator.methodcaller("istitle"),
|
606 |
+
operator.methodcaller("isnumeric"),
|
607 |
+
operator.methodcaller("isdecimal"),
|
608 |
+
operator.methodcaller("get_dummies"),
|
609 |
+
],
|
610 |
+
ids=idfn,
|
611 |
+
)
|
612 |
+
def test_string_method(method):
|
613 |
+
s = pd.Series(["a1"])
|
614 |
+
s.attrs = {"a": 1}
|
615 |
+
result = method(s.str)
|
616 |
+
assert result.attrs == {"a": 1}
|
617 |
+
|
618 |
+
|
619 |
+
@pytest.mark.parametrize(
|
620 |
+
"method",
|
621 |
+
[
|
622 |
+
operator.methodcaller("to_period"),
|
623 |
+
operator.methodcaller("tz_localize", "CET"),
|
624 |
+
operator.methodcaller("normalize"),
|
625 |
+
operator.methodcaller("strftime", "%Y"),
|
626 |
+
operator.methodcaller("round", "h"),
|
627 |
+
operator.methodcaller("floor", "h"),
|
628 |
+
operator.methodcaller("ceil", "h"),
|
629 |
+
operator.methodcaller("month_name"),
|
630 |
+
operator.methodcaller("day_name"),
|
631 |
+
],
|
632 |
+
ids=idfn,
|
633 |
+
)
|
634 |
+
def test_datetime_method(method):
|
635 |
+
s = pd.Series(pd.date_range("2000", periods=4))
|
636 |
+
s.attrs = {"a": 1}
|
637 |
+
result = method(s.dt)
|
638 |
+
assert result.attrs == {"a": 1}
|
639 |
+
|
640 |
+
|
641 |
+
@pytest.mark.parametrize(
|
642 |
+
"attr",
|
643 |
+
[
|
644 |
+
"date",
|
645 |
+
"time",
|
646 |
+
"timetz",
|
647 |
+
"year",
|
648 |
+
"month",
|
649 |
+
"day",
|
650 |
+
"hour",
|
651 |
+
"minute",
|
652 |
+
"second",
|
653 |
+
"microsecond",
|
654 |
+
"nanosecond",
|
655 |
+
"dayofweek",
|
656 |
+
"day_of_week",
|
657 |
+
"dayofyear",
|
658 |
+
"day_of_year",
|
659 |
+
"quarter",
|
660 |
+
"is_month_start",
|
661 |
+
"is_month_end",
|
662 |
+
"is_quarter_start",
|
663 |
+
"is_quarter_end",
|
664 |
+
"is_year_start",
|
665 |
+
"is_year_end",
|
666 |
+
"is_leap_year",
|
667 |
+
"daysinmonth",
|
668 |
+
"days_in_month",
|
669 |
+
],
|
670 |
+
)
|
671 |
+
def test_datetime_property(attr):
|
672 |
+
s = pd.Series(pd.date_range("2000", periods=4))
|
673 |
+
s.attrs = {"a": 1}
|
674 |
+
result = getattr(s.dt, attr)
|
675 |
+
assert result.attrs == {"a": 1}
|
676 |
+
|
677 |
+
|
678 |
+
@pytest.mark.parametrize(
|
679 |
+
"attr", ["days", "seconds", "microseconds", "nanoseconds", "components"]
|
680 |
+
)
|
681 |
+
def test_timedelta_property(attr):
|
682 |
+
s = pd.Series(pd.timedelta_range("2000", periods=4))
|
683 |
+
s.attrs = {"a": 1}
|
684 |
+
result = getattr(s.dt, attr)
|
685 |
+
assert result.attrs == {"a": 1}
|
686 |
+
|
687 |
+
|
688 |
+
@pytest.mark.parametrize("method", [operator.methodcaller("total_seconds")])
|
689 |
+
def test_timedelta_methods(method):
|
690 |
+
s = pd.Series(pd.timedelta_range("2000", periods=4))
|
691 |
+
s.attrs = {"a": 1}
|
692 |
+
result = method(s.dt)
|
693 |
+
assert result.attrs == {"a": 1}
|
694 |
+
|
695 |
+
|
696 |
+
@pytest.mark.parametrize(
|
697 |
+
"method",
|
698 |
+
[
|
699 |
+
operator.methodcaller("add_categories", ["c"]),
|
700 |
+
operator.methodcaller("as_ordered"),
|
701 |
+
operator.methodcaller("as_unordered"),
|
702 |
+
lambda x: getattr(x, "codes"),
|
703 |
+
operator.methodcaller("remove_categories", "a"),
|
704 |
+
operator.methodcaller("remove_unused_categories"),
|
705 |
+
operator.methodcaller("rename_categories", {"a": "A", "b": "B"}),
|
706 |
+
operator.methodcaller("reorder_categories", ["b", "a"]),
|
707 |
+
operator.methodcaller("set_categories", ["A", "B"]),
|
708 |
+
],
|
709 |
+
)
|
710 |
+
@not_implemented_mark
|
711 |
+
def test_categorical_accessor(method):
|
712 |
+
s = pd.Series(["a", "b"], dtype="category")
|
713 |
+
s.attrs = {"a": 1}
|
714 |
+
result = method(s.cat)
|
715 |
+
assert result.attrs == {"a": 1}
|
716 |
+
|
717 |
+
|
718 |
+
# ----------------------------------------------------------------------------
|
719 |
+
# Groupby
|
720 |
+
|
721 |
+
|
722 |
+
@pytest.mark.parametrize(
|
723 |
+
"obj", [pd.Series([0, 0]), pd.DataFrame({"A": [0, 1], "B": [1, 2]})]
|
724 |
+
)
|
725 |
+
@pytest.mark.parametrize(
|
726 |
+
"method",
|
727 |
+
[
|
728 |
+
operator.methodcaller("sum"),
|
729 |
+
lambda x: x.apply(lambda y: y),
|
730 |
+
lambda x: x.agg("sum"),
|
731 |
+
lambda x: x.agg("mean"),
|
732 |
+
lambda x: x.agg("median"),
|
733 |
+
],
|
734 |
+
)
|
735 |
+
def test_groupby_finalize(obj, method):
|
736 |
+
obj.attrs = {"a": 1}
|
737 |
+
result = method(obj.groupby([0, 0], group_keys=False))
|
738 |
+
assert result.attrs == {"a": 1}
|
739 |
+
|
740 |
+
|
741 |
+
@pytest.mark.parametrize(
|
742 |
+
"obj", [pd.Series([0, 0]), pd.DataFrame({"A": [0, 1], "B": [1, 2]})]
|
743 |
+
)
|
744 |
+
@pytest.mark.parametrize(
|
745 |
+
"method",
|
746 |
+
[
|
747 |
+
lambda x: x.agg(["sum", "count"]),
|
748 |
+
lambda x: x.agg("std"),
|
749 |
+
lambda x: x.agg("var"),
|
750 |
+
lambda x: x.agg("sem"),
|
751 |
+
lambda x: x.agg("size"),
|
752 |
+
lambda x: x.agg("ohlc"),
|
753 |
+
],
|
754 |
+
)
|
755 |
+
@not_implemented_mark
|
756 |
+
def test_groupby_finalize_not_implemented(obj, method):
|
757 |
+
obj.attrs = {"a": 1}
|
758 |
+
result = method(obj.groupby([0, 0]))
|
759 |
+
assert result.attrs == {"a": 1}
|
760 |
+
|
761 |
+
|
762 |
+
def test_finalize_frame_series_name():
|
763 |
+
# https://github.com/pandas-dev/pandas/pull/37186/files#r506978889
|
764 |
+
# ensure we don't copy the column `name` to the Series.
|
765 |
+
df = pd.DataFrame({"name": [1, 2]})
|
766 |
+
result = pd.Series([1, 2]).__finalize__(df)
|
767 |
+
assert result.name is None
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/generic/test_series.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from operator import methodcaller
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
from pandas import (
|
8 |
+
MultiIndex,
|
9 |
+
Series,
|
10 |
+
date_range,
|
11 |
+
)
|
12 |
+
import pandas._testing as tm
|
13 |
+
|
14 |
+
|
15 |
+
class TestSeries:
|
16 |
+
@pytest.mark.parametrize("func", ["rename_axis", "_set_axis_name"])
|
17 |
+
def test_set_axis_name_mi(self, func):
|
18 |
+
ser = Series(
|
19 |
+
[11, 21, 31],
|
20 |
+
index=MultiIndex.from_tuples(
|
21 |
+
[("A", x) for x in ["a", "B", "c"]], names=["l1", "l2"]
|
22 |
+
),
|
23 |
+
)
|
24 |
+
|
25 |
+
result = methodcaller(func, ["L1", "L2"])(ser)
|
26 |
+
assert ser.index.name is None
|
27 |
+
assert ser.index.names == ["l1", "l2"]
|
28 |
+
assert result.index.name is None
|
29 |
+
assert result.index.names, ["L1", "L2"]
|
30 |
+
|
31 |
+
def test_set_axis_name_raises(self):
|
32 |
+
ser = Series([1])
|
33 |
+
msg = "No axis named 1 for object type Series"
|
34 |
+
with pytest.raises(ValueError, match=msg):
|
35 |
+
ser._set_axis_name(name="a", axis=1)
|
36 |
+
|
37 |
+
def test_get_bool_data_preserve_dtype(self):
|
38 |
+
ser = Series([True, False, True])
|
39 |
+
result = ser._get_bool_data()
|
40 |
+
tm.assert_series_equal(result, ser)
|
41 |
+
|
42 |
+
def test_nonzero_single_element(self):
|
43 |
+
# allow single item via bool method
|
44 |
+
msg_warn = (
|
45 |
+
"Series.bool is now deprecated and will be removed "
|
46 |
+
"in future version of pandas"
|
47 |
+
)
|
48 |
+
ser = Series([True])
|
49 |
+
ser1 = Series([False])
|
50 |
+
with tm.assert_produces_warning(FutureWarning, match=msg_warn):
|
51 |
+
assert ser.bool()
|
52 |
+
with tm.assert_produces_warning(FutureWarning, match=msg_warn):
|
53 |
+
assert not ser1.bool()
|
54 |
+
|
55 |
+
@pytest.mark.parametrize("data", [np.nan, pd.NaT, True, False])
|
56 |
+
def test_nonzero_single_element_raise_1(self, data):
|
57 |
+
# single item nan to raise
|
58 |
+
series = Series([data])
|
59 |
+
|
60 |
+
msg = "The truth value of a Series is ambiguous"
|
61 |
+
with pytest.raises(ValueError, match=msg):
|
62 |
+
bool(series)
|
63 |
+
|
64 |
+
@pytest.mark.parametrize("data", [np.nan, pd.NaT])
|
65 |
+
def test_nonzero_single_element_raise_2(self, data):
|
66 |
+
msg_warn = (
|
67 |
+
"Series.bool is now deprecated and will be removed "
|
68 |
+
"in future version of pandas"
|
69 |
+
)
|
70 |
+
msg_err = "bool cannot act on a non-boolean single element Series"
|
71 |
+
series = Series([data])
|
72 |
+
with tm.assert_produces_warning(FutureWarning, match=msg_warn):
|
73 |
+
with pytest.raises(ValueError, match=msg_err):
|
74 |
+
series.bool()
|
75 |
+
|
76 |
+
@pytest.mark.parametrize("data", [(True, True), (False, False)])
|
77 |
+
def test_nonzero_multiple_element_raise(self, data):
|
78 |
+
# multiple bool are still an error
|
79 |
+
msg_warn = (
|
80 |
+
"Series.bool is now deprecated and will be removed "
|
81 |
+
"in future version of pandas"
|
82 |
+
)
|
83 |
+
msg_err = "The truth value of a Series is ambiguous"
|
84 |
+
series = Series([data])
|
85 |
+
with pytest.raises(ValueError, match=msg_err):
|
86 |
+
bool(series)
|
87 |
+
with tm.assert_produces_warning(FutureWarning, match=msg_warn):
|
88 |
+
with pytest.raises(ValueError, match=msg_err):
|
89 |
+
series.bool()
|
90 |
+
|
91 |
+
@pytest.mark.parametrize("data", [1, 0, "a", 0.0])
|
92 |
+
def test_nonbool_single_element_raise(self, data):
|
93 |
+
# single non-bool are an error
|
94 |
+
msg_warn = (
|
95 |
+
"Series.bool is now deprecated and will be removed "
|
96 |
+
"in future version of pandas"
|
97 |
+
)
|
98 |
+
msg_err1 = "The truth value of a Series is ambiguous"
|
99 |
+
msg_err2 = "bool cannot act on a non-boolean single element Series"
|
100 |
+
series = Series([data])
|
101 |
+
with pytest.raises(ValueError, match=msg_err1):
|
102 |
+
bool(series)
|
103 |
+
with tm.assert_produces_warning(FutureWarning, match=msg_warn):
|
104 |
+
with pytest.raises(ValueError, match=msg_err2):
|
105 |
+
series.bool()
|
106 |
+
|
107 |
+
def test_metadata_propagation_indiv_resample(self):
|
108 |
+
# resample
|
109 |
+
ts = Series(
|
110 |
+
np.random.default_rng(2).random(1000),
|
111 |
+
index=date_range("20130101", periods=1000, freq="s"),
|
112 |
+
name="foo",
|
113 |
+
)
|
114 |
+
result = ts.resample("1min").mean()
|
115 |
+
tm.assert_metadata_equivalent(ts, result)
|
116 |
+
|
117 |
+
result = ts.resample("1min").min()
|
118 |
+
tm.assert_metadata_equivalent(ts, result)
|
119 |
+
|
120 |
+
result = ts.resample("1min").apply(lambda x: x.sum())
|
121 |
+
tm.assert_metadata_equivalent(ts, result)
|
122 |
+
|
123 |
+
def test_metadata_propagation_indiv(self, monkeypatch):
|
124 |
+
# check that the metadata matches up on the resulting ops
|
125 |
+
|
126 |
+
ser = Series(range(3), range(3))
|
127 |
+
ser.name = "foo"
|
128 |
+
ser2 = Series(range(3), range(3))
|
129 |
+
ser2.name = "bar"
|
130 |
+
|
131 |
+
result = ser.T
|
132 |
+
tm.assert_metadata_equivalent(ser, result)
|
133 |
+
|
134 |
+
def finalize(self, other, method=None, **kwargs):
|
135 |
+
for name in self._metadata:
|
136 |
+
if method == "concat" and name == "filename":
|
137 |
+
value = "+".join(
|
138 |
+
[
|
139 |
+
getattr(obj, name)
|
140 |
+
for obj in other.objs
|
141 |
+
if getattr(obj, name, None)
|
142 |
+
]
|
143 |
+
)
|
144 |
+
object.__setattr__(self, name, value)
|
145 |
+
else:
|
146 |
+
object.__setattr__(self, name, getattr(other, name, None))
|
147 |
+
|
148 |
+
return self
|
149 |
+
|
150 |
+
with monkeypatch.context() as m:
|
151 |
+
m.setattr(Series, "_metadata", ["name", "filename"])
|
152 |
+
m.setattr(Series, "__finalize__", finalize)
|
153 |
+
|
154 |
+
ser.filename = "foo"
|
155 |
+
ser2.filename = "bar"
|
156 |
+
|
157 |
+
result = pd.concat([ser, ser2])
|
158 |
+
assert result.filename == "foo+bar"
|
159 |
+
assert result.name is None
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (207 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/test_drop_duplicates.cpython-310.pyc
ADDED
Binary file (2.96 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/__pycache__/test_nat.cpython-310.pyc
ADDED
Binary file (1.96 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_drop_duplicates.py
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
PeriodIndex,
|
6 |
+
Series,
|
7 |
+
date_range,
|
8 |
+
period_range,
|
9 |
+
timedelta_range,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
|
14 |
+
class DropDuplicates:
|
15 |
+
def test_drop_duplicates_metadata(self, idx):
|
16 |
+
# GH#10115
|
17 |
+
result = idx.drop_duplicates()
|
18 |
+
tm.assert_index_equal(idx, result)
|
19 |
+
assert idx.freq == result.freq
|
20 |
+
|
21 |
+
idx_dup = idx.append(idx)
|
22 |
+
result = idx_dup.drop_duplicates()
|
23 |
+
|
24 |
+
expected = idx
|
25 |
+
if not isinstance(idx, PeriodIndex):
|
26 |
+
# freq is reset except for PeriodIndex
|
27 |
+
assert idx_dup.freq is None
|
28 |
+
assert result.freq is None
|
29 |
+
expected = idx._with_freq(None)
|
30 |
+
else:
|
31 |
+
assert result.freq == expected.freq
|
32 |
+
|
33 |
+
tm.assert_index_equal(result, expected)
|
34 |
+
|
35 |
+
@pytest.mark.parametrize(
|
36 |
+
"keep, expected, index",
|
37 |
+
[
|
38 |
+
(
|
39 |
+
"first",
|
40 |
+
np.concatenate(([False] * 10, [True] * 5)),
|
41 |
+
np.arange(0, 10, dtype=np.int64),
|
42 |
+
),
|
43 |
+
(
|
44 |
+
"last",
|
45 |
+
np.concatenate(([True] * 5, [False] * 10)),
|
46 |
+
np.arange(5, 15, dtype=np.int64),
|
47 |
+
),
|
48 |
+
(
|
49 |
+
False,
|
50 |
+
np.concatenate(([True] * 5, [False] * 5, [True] * 5)),
|
51 |
+
np.arange(5, 10, dtype=np.int64),
|
52 |
+
),
|
53 |
+
],
|
54 |
+
)
|
55 |
+
def test_drop_duplicates(self, keep, expected, index, idx):
|
56 |
+
# to check Index/Series compat
|
57 |
+
idx = idx.append(idx[:5])
|
58 |
+
|
59 |
+
tm.assert_numpy_array_equal(idx.duplicated(keep=keep), expected)
|
60 |
+
expected = idx[~expected]
|
61 |
+
|
62 |
+
result = idx.drop_duplicates(keep=keep)
|
63 |
+
tm.assert_index_equal(result, expected)
|
64 |
+
|
65 |
+
result = Series(idx).drop_duplicates(keep=keep)
|
66 |
+
expected = Series(expected, index=index)
|
67 |
+
tm.assert_series_equal(result, expected)
|
68 |
+
|
69 |
+
|
70 |
+
class TestDropDuplicatesPeriodIndex(DropDuplicates):
|
71 |
+
@pytest.fixture(params=["D", "3D", "h", "2h", "min", "2min", "s", "3s"])
|
72 |
+
def freq(self, request):
|
73 |
+
return request.param
|
74 |
+
|
75 |
+
@pytest.fixture
|
76 |
+
def idx(self, freq):
|
77 |
+
return period_range("2011-01-01", periods=10, freq=freq, name="idx")
|
78 |
+
|
79 |
+
|
80 |
+
class TestDropDuplicatesDatetimeIndex(DropDuplicates):
|
81 |
+
@pytest.fixture
|
82 |
+
def idx(self, freq_sample):
|
83 |
+
return date_range("2011-01-01", freq=freq_sample, periods=10, name="idx")
|
84 |
+
|
85 |
+
|
86 |
+
class TestDropDuplicatesTimedeltaIndex(DropDuplicates):
|
87 |
+
@pytest.fixture
|
88 |
+
def idx(self, freq_sample):
|
89 |
+
return timedelta_range("1 day", periods=10, freq=freq_sample, name="idx")
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_equals.py
ADDED
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Tests shared for DatetimeIndex/TimedeltaIndex/PeriodIndex
|
3 |
+
"""
|
4 |
+
from datetime import (
|
5 |
+
datetime,
|
6 |
+
timedelta,
|
7 |
+
)
|
8 |
+
|
9 |
+
import numpy as np
|
10 |
+
import pytest
|
11 |
+
|
12 |
+
import pandas as pd
|
13 |
+
from pandas import (
|
14 |
+
CategoricalIndex,
|
15 |
+
DatetimeIndex,
|
16 |
+
Index,
|
17 |
+
PeriodIndex,
|
18 |
+
TimedeltaIndex,
|
19 |
+
date_range,
|
20 |
+
period_range,
|
21 |
+
timedelta_range,
|
22 |
+
)
|
23 |
+
import pandas._testing as tm
|
24 |
+
|
25 |
+
|
26 |
+
class EqualsTests:
|
27 |
+
def test_not_equals_numeric(self, index):
|
28 |
+
assert not index.equals(Index(index.asi8))
|
29 |
+
assert not index.equals(Index(index.asi8.astype("u8")))
|
30 |
+
assert not index.equals(Index(index.asi8).astype("f8"))
|
31 |
+
|
32 |
+
def test_equals(self, index):
|
33 |
+
assert index.equals(index)
|
34 |
+
assert index.equals(index.astype(object))
|
35 |
+
assert index.equals(CategoricalIndex(index))
|
36 |
+
assert index.equals(CategoricalIndex(index.astype(object)))
|
37 |
+
|
38 |
+
def test_not_equals_non_arraylike(self, index):
|
39 |
+
assert not index.equals(list(index))
|
40 |
+
|
41 |
+
def test_not_equals_strings(self, index):
|
42 |
+
other = Index([str(x) for x in index], dtype=object)
|
43 |
+
assert not index.equals(other)
|
44 |
+
assert not index.equals(CategoricalIndex(other))
|
45 |
+
|
46 |
+
def test_not_equals_misc_strs(self, index):
|
47 |
+
other = Index(list("abc"))
|
48 |
+
assert not index.equals(other)
|
49 |
+
|
50 |
+
|
51 |
+
class TestPeriodIndexEquals(EqualsTests):
|
52 |
+
@pytest.fixture
|
53 |
+
def index(self):
|
54 |
+
return period_range("2013-01-01", periods=5, freq="D")
|
55 |
+
|
56 |
+
# TODO: de-duplicate with other test_equals2 methods
|
57 |
+
@pytest.mark.parametrize("freq", ["D", "M"])
|
58 |
+
def test_equals2(self, freq):
|
59 |
+
# GH#13107
|
60 |
+
idx = PeriodIndex(["2011-01-01", "2011-01-02", "NaT"], freq=freq)
|
61 |
+
assert idx.equals(idx)
|
62 |
+
assert idx.equals(idx.copy())
|
63 |
+
assert idx.equals(idx.astype(object))
|
64 |
+
assert idx.astype(object).equals(idx)
|
65 |
+
assert idx.astype(object).equals(idx.astype(object))
|
66 |
+
assert not idx.equals(list(idx))
|
67 |
+
assert not idx.equals(pd.Series(idx))
|
68 |
+
|
69 |
+
idx2 = PeriodIndex(["2011-01-01", "2011-01-02", "NaT"], freq="h")
|
70 |
+
assert not idx.equals(idx2)
|
71 |
+
assert not idx.equals(idx2.copy())
|
72 |
+
assert not idx.equals(idx2.astype(object))
|
73 |
+
assert not idx.astype(object).equals(idx2)
|
74 |
+
assert not idx.equals(list(idx2))
|
75 |
+
assert not idx.equals(pd.Series(idx2))
|
76 |
+
|
77 |
+
# same internal, different tz
|
78 |
+
idx3 = PeriodIndex._simple_new(
|
79 |
+
idx._values._simple_new(idx._values.asi8, dtype=pd.PeriodDtype("h"))
|
80 |
+
)
|
81 |
+
tm.assert_numpy_array_equal(idx.asi8, idx3.asi8)
|
82 |
+
assert not idx.equals(idx3)
|
83 |
+
assert not idx.equals(idx3.copy())
|
84 |
+
assert not idx.equals(idx3.astype(object))
|
85 |
+
assert not idx.astype(object).equals(idx3)
|
86 |
+
assert not idx.equals(list(idx3))
|
87 |
+
assert not idx.equals(pd.Series(idx3))
|
88 |
+
|
89 |
+
|
90 |
+
class TestDatetimeIndexEquals(EqualsTests):
|
91 |
+
@pytest.fixture
|
92 |
+
def index(self):
|
93 |
+
return date_range("2013-01-01", periods=5)
|
94 |
+
|
95 |
+
def test_equals2(self):
|
96 |
+
# GH#13107
|
97 |
+
idx = DatetimeIndex(["2011-01-01", "2011-01-02", "NaT"])
|
98 |
+
assert idx.equals(idx)
|
99 |
+
assert idx.equals(idx.copy())
|
100 |
+
assert idx.equals(idx.astype(object))
|
101 |
+
assert idx.astype(object).equals(idx)
|
102 |
+
assert idx.astype(object).equals(idx.astype(object))
|
103 |
+
assert not idx.equals(list(idx))
|
104 |
+
assert not idx.equals(pd.Series(idx))
|
105 |
+
|
106 |
+
idx2 = DatetimeIndex(["2011-01-01", "2011-01-02", "NaT"], tz="US/Pacific")
|
107 |
+
assert not idx.equals(idx2)
|
108 |
+
assert not idx.equals(idx2.copy())
|
109 |
+
assert not idx.equals(idx2.astype(object))
|
110 |
+
assert not idx.astype(object).equals(idx2)
|
111 |
+
assert not idx.equals(list(idx2))
|
112 |
+
assert not idx.equals(pd.Series(idx2))
|
113 |
+
|
114 |
+
# same internal, different tz
|
115 |
+
idx3 = DatetimeIndex(idx.asi8, tz="US/Pacific")
|
116 |
+
tm.assert_numpy_array_equal(idx.asi8, idx3.asi8)
|
117 |
+
assert not idx.equals(idx3)
|
118 |
+
assert not idx.equals(idx3.copy())
|
119 |
+
assert not idx.equals(idx3.astype(object))
|
120 |
+
assert not idx.astype(object).equals(idx3)
|
121 |
+
assert not idx.equals(list(idx3))
|
122 |
+
assert not idx.equals(pd.Series(idx3))
|
123 |
+
|
124 |
+
# check that we do not raise when comparing with OutOfBounds objects
|
125 |
+
oob = Index([datetime(2500, 1, 1)] * 3, dtype=object)
|
126 |
+
assert not idx.equals(oob)
|
127 |
+
assert not idx2.equals(oob)
|
128 |
+
assert not idx3.equals(oob)
|
129 |
+
|
130 |
+
# check that we do not raise when comparing with OutOfBounds dt64
|
131 |
+
oob2 = oob.map(np.datetime64)
|
132 |
+
assert not idx.equals(oob2)
|
133 |
+
assert not idx2.equals(oob2)
|
134 |
+
assert not idx3.equals(oob2)
|
135 |
+
|
136 |
+
@pytest.mark.parametrize("freq", ["B", "C"])
|
137 |
+
def test_not_equals_bday(self, freq):
|
138 |
+
rng = date_range("2009-01-01", "2010-01-01", freq=freq)
|
139 |
+
assert not rng.equals(list(rng))
|
140 |
+
|
141 |
+
|
142 |
+
class TestTimedeltaIndexEquals(EqualsTests):
|
143 |
+
@pytest.fixture
|
144 |
+
def index(self):
|
145 |
+
return timedelta_range("1 day", periods=10)
|
146 |
+
|
147 |
+
def test_equals2(self):
|
148 |
+
# GH#13107
|
149 |
+
idx = TimedeltaIndex(["1 days", "2 days", "NaT"])
|
150 |
+
assert idx.equals(idx)
|
151 |
+
assert idx.equals(idx.copy())
|
152 |
+
assert idx.equals(idx.astype(object))
|
153 |
+
assert idx.astype(object).equals(idx)
|
154 |
+
assert idx.astype(object).equals(idx.astype(object))
|
155 |
+
assert not idx.equals(list(idx))
|
156 |
+
assert not idx.equals(pd.Series(idx))
|
157 |
+
|
158 |
+
idx2 = TimedeltaIndex(["2 days", "1 days", "NaT"])
|
159 |
+
assert not idx.equals(idx2)
|
160 |
+
assert not idx.equals(idx2.copy())
|
161 |
+
assert not idx.equals(idx2.astype(object))
|
162 |
+
assert not idx.astype(object).equals(idx2)
|
163 |
+
assert not idx.astype(object).equals(idx2.astype(object))
|
164 |
+
assert not idx.equals(list(idx2))
|
165 |
+
assert not idx.equals(pd.Series(idx2))
|
166 |
+
|
167 |
+
# Check that we dont raise OverflowError on comparisons outside the
|
168 |
+
# implementation range GH#28532
|
169 |
+
oob = Index([timedelta(days=10**6)] * 3, dtype=object)
|
170 |
+
assert not idx.equals(oob)
|
171 |
+
assert not idx2.equals(oob)
|
172 |
+
|
173 |
+
oob2 = Index([np.timedelta64(x) for x in oob], dtype=object)
|
174 |
+
assert (oob == oob2).all()
|
175 |
+
assert not idx.equals(oob2)
|
176 |
+
assert not idx2.equals(oob2)
|
177 |
+
|
178 |
+
oob3 = oob.map(np.timedelta64)
|
179 |
+
assert (oob3 == oob).all()
|
180 |
+
assert not idx.equals(oob3)
|
181 |
+
assert not idx2.equals(oob3)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_indexing.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
DatetimeIndex,
|
7 |
+
Index,
|
8 |
+
)
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
dtlike_dtypes = [
|
12 |
+
np.dtype("timedelta64[ns]"),
|
13 |
+
np.dtype("datetime64[ns]"),
|
14 |
+
pd.DatetimeTZDtype("ns", "Asia/Tokyo"),
|
15 |
+
pd.PeriodDtype("ns"),
|
16 |
+
]
|
17 |
+
|
18 |
+
|
19 |
+
@pytest.mark.parametrize("ldtype", dtlike_dtypes)
|
20 |
+
@pytest.mark.parametrize("rdtype", dtlike_dtypes)
|
21 |
+
def test_get_indexer_non_unique_wrong_dtype(ldtype, rdtype):
|
22 |
+
vals = np.tile(3600 * 10**9 * np.arange(3), 2)
|
23 |
+
|
24 |
+
def construct(dtype):
|
25 |
+
if dtype is dtlike_dtypes[-1]:
|
26 |
+
# PeriodArray will try to cast ints to strings
|
27 |
+
return DatetimeIndex(vals).astype(dtype)
|
28 |
+
return Index(vals, dtype=dtype)
|
29 |
+
|
30 |
+
left = construct(ldtype)
|
31 |
+
right = construct(rdtype)
|
32 |
+
|
33 |
+
result = left.get_indexer_non_unique(right)
|
34 |
+
|
35 |
+
if ldtype is rdtype:
|
36 |
+
ex1 = np.array([0, 3, 1, 4, 2, 5] * 2, dtype=np.intp)
|
37 |
+
ex2 = np.array([], dtype=np.intp)
|
38 |
+
tm.assert_numpy_array_equal(result[0], ex1)
|
39 |
+
tm.assert_numpy_array_equal(result[1], ex2)
|
40 |
+
|
41 |
+
else:
|
42 |
+
no_matches = np.array([-1] * 6, dtype=np.intp)
|
43 |
+
missing = np.arange(6, dtype=np.intp)
|
44 |
+
tm.assert_numpy_array_equal(result[0], no_matches)
|
45 |
+
tm.assert_numpy_array_equal(result[1], missing)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_is_monotonic.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pandas import (
|
2 |
+
Index,
|
3 |
+
NaT,
|
4 |
+
date_range,
|
5 |
+
)
|
6 |
+
|
7 |
+
|
8 |
+
def test_is_monotonic_with_nat():
|
9 |
+
# GH#31437
|
10 |
+
# PeriodIndex.is_monotonic_increasing should behave analogously to DatetimeIndex,
|
11 |
+
# in particular never be monotonic when we have NaT
|
12 |
+
dti = date_range("2016-01-01", periods=3)
|
13 |
+
pi = dti.to_period("D")
|
14 |
+
tdi = Index(dti.view("timedelta64[ns]"))
|
15 |
+
|
16 |
+
for obj in [pi, pi._engine, dti, dti._engine, tdi, tdi._engine]:
|
17 |
+
if isinstance(obj, Index):
|
18 |
+
# i.e. not Engines
|
19 |
+
assert obj.is_monotonic_increasing
|
20 |
+
assert obj.is_monotonic_increasing
|
21 |
+
assert not obj.is_monotonic_decreasing
|
22 |
+
assert obj.is_unique
|
23 |
+
|
24 |
+
dti1 = dti.insert(0, NaT)
|
25 |
+
pi1 = dti1.to_period("D")
|
26 |
+
tdi1 = Index(dti1.view("timedelta64[ns]"))
|
27 |
+
|
28 |
+
for obj in [pi1, pi1._engine, dti1, dti1._engine, tdi1, tdi1._engine]:
|
29 |
+
if isinstance(obj, Index):
|
30 |
+
# i.e. not Engines
|
31 |
+
assert not obj.is_monotonic_increasing
|
32 |
+
assert not obj.is_monotonic_increasing
|
33 |
+
assert not obj.is_monotonic_decreasing
|
34 |
+
assert obj.is_unique
|
35 |
+
|
36 |
+
dti2 = dti.insert(3, NaT)
|
37 |
+
pi2 = dti2.to_period("h")
|
38 |
+
tdi2 = Index(dti2.view("timedelta64[ns]"))
|
39 |
+
|
40 |
+
for obj in [pi2, pi2._engine, dti2, dti2._engine, tdi2, tdi2._engine]:
|
41 |
+
if isinstance(obj, Index):
|
42 |
+
# i.e. not Engines
|
43 |
+
assert not obj.is_monotonic_increasing
|
44 |
+
assert not obj.is_monotonic_increasing
|
45 |
+
assert not obj.is_monotonic_decreasing
|
46 |
+
assert obj.is_unique
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_nat.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
DatetimeIndex,
|
6 |
+
NaT,
|
7 |
+
PeriodIndex,
|
8 |
+
TimedeltaIndex,
|
9 |
+
)
|
10 |
+
import pandas._testing as tm
|
11 |
+
|
12 |
+
|
13 |
+
class NATests:
|
14 |
+
def test_nat(self, index_without_na):
|
15 |
+
empty_index = index_without_na[:0]
|
16 |
+
|
17 |
+
index_with_na = index_without_na.copy(deep=True)
|
18 |
+
index_with_na._data[1] = NaT
|
19 |
+
|
20 |
+
assert empty_index._na_value is NaT
|
21 |
+
assert index_with_na._na_value is NaT
|
22 |
+
assert index_without_na._na_value is NaT
|
23 |
+
|
24 |
+
idx = index_without_na
|
25 |
+
assert idx._can_hold_na
|
26 |
+
|
27 |
+
tm.assert_numpy_array_equal(idx._isnan, np.array([False, False]))
|
28 |
+
assert idx.hasnans is False
|
29 |
+
|
30 |
+
idx = index_with_na
|
31 |
+
assert idx._can_hold_na
|
32 |
+
|
33 |
+
tm.assert_numpy_array_equal(idx._isnan, np.array([False, True]))
|
34 |
+
assert idx.hasnans is True
|
35 |
+
|
36 |
+
|
37 |
+
class TestDatetimeIndexNA(NATests):
|
38 |
+
@pytest.fixture
|
39 |
+
def index_without_na(self, tz_naive_fixture):
|
40 |
+
tz = tz_naive_fixture
|
41 |
+
return DatetimeIndex(["2011-01-01", "2011-01-02"], tz=tz)
|
42 |
+
|
43 |
+
|
44 |
+
class TestTimedeltaIndexNA(NATests):
|
45 |
+
@pytest.fixture
|
46 |
+
def index_without_na(self):
|
47 |
+
return TimedeltaIndex(["1 days", "2 days"])
|
48 |
+
|
49 |
+
|
50 |
+
class TestPeriodIndexNA(NATests):
|
51 |
+
@pytest.fixture
|
52 |
+
def index_without_na(self):
|
53 |
+
return PeriodIndex(["2011-01-01", "2011-01-02"], freq="D")
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_sort_values.py
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
DatetimeIndex,
|
6 |
+
Index,
|
7 |
+
NaT,
|
8 |
+
PeriodIndex,
|
9 |
+
TimedeltaIndex,
|
10 |
+
timedelta_range,
|
11 |
+
)
|
12 |
+
import pandas._testing as tm
|
13 |
+
|
14 |
+
|
15 |
+
def check_freq_ascending(ordered, orig, ascending):
|
16 |
+
"""
|
17 |
+
Check the expected freq on a PeriodIndex/DatetimeIndex/TimedeltaIndex
|
18 |
+
when the original index is generated (or generate-able) with
|
19 |
+
period_range/date_range/timedelta_range.
|
20 |
+
"""
|
21 |
+
if isinstance(ordered, PeriodIndex):
|
22 |
+
assert ordered.freq == orig.freq
|
23 |
+
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
|
24 |
+
if ascending:
|
25 |
+
assert ordered.freq.n == orig.freq.n
|
26 |
+
else:
|
27 |
+
assert ordered.freq.n == -1 * orig.freq.n
|
28 |
+
|
29 |
+
|
30 |
+
def check_freq_nonmonotonic(ordered, orig):
|
31 |
+
"""
|
32 |
+
Check the expected freq on a PeriodIndex/DatetimeIndex/TimedeltaIndex
|
33 |
+
when the original index is _not_ generated (or generate-able) with
|
34 |
+
period_range/date_range//timedelta_range.
|
35 |
+
"""
|
36 |
+
if isinstance(ordered, PeriodIndex):
|
37 |
+
assert ordered.freq == orig.freq
|
38 |
+
elif isinstance(ordered, (DatetimeIndex, TimedeltaIndex)):
|
39 |
+
assert ordered.freq is None
|
40 |
+
|
41 |
+
|
42 |
+
class TestSortValues:
|
43 |
+
@pytest.fixture(params=[DatetimeIndex, TimedeltaIndex, PeriodIndex])
|
44 |
+
def non_monotonic_idx(self, request):
|
45 |
+
if request.param is DatetimeIndex:
|
46 |
+
return DatetimeIndex(["2000-01-04", "2000-01-01", "2000-01-02"])
|
47 |
+
elif request.param is PeriodIndex:
|
48 |
+
dti = DatetimeIndex(["2000-01-04", "2000-01-01", "2000-01-02"])
|
49 |
+
return dti.to_period("D")
|
50 |
+
else:
|
51 |
+
return TimedeltaIndex(
|
52 |
+
["1 day 00:00:05", "1 day 00:00:01", "1 day 00:00:02"]
|
53 |
+
)
|
54 |
+
|
55 |
+
def test_argmin_argmax(self, non_monotonic_idx):
|
56 |
+
assert non_monotonic_idx.argmin() == 1
|
57 |
+
assert non_monotonic_idx.argmax() == 0
|
58 |
+
|
59 |
+
def test_sort_values(self, non_monotonic_idx):
|
60 |
+
idx = non_monotonic_idx
|
61 |
+
ordered = idx.sort_values()
|
62 |
+
assert ordered.is_monotonic_increasing
|
63 |
+
ordered = idx.sort_values(ascending=False)
|
64 |
+
assert ordered[::-1].is_monotonic_increasing
|
65 |
+
|
66 |
+
ordered, dexer = idx.sort_values(return_indexer=True)
|
67 |
+
assert ordered.is_monotonic_increasing
|
68 |
+
tm.assert_numpy_array_equal(dexer, np.array([1, 2, 0], dtype=np.intp))
|
69 |
+
|
70 |
+
ordered, dexer = idx.sort_values(return_indexer=True, ascending=False)
|
71 |
+
assert ordered[::-1].is_monotonic_increasing
|
72 |
+
tm.assert_numpy_array_equal(dexer, np.array([0, 2, 1], dtype=np.intp))
|
73 |
+
|
74 |
+
def check_sort_values_with_freq(self, idx):
|
75 |
+
ordered = idx.sort_values()
|
76 |
+
tm.assert_index_equal(ordered, idx)
|
77 |
+
check_freq_ascending(ordered, idx, True)
|
78 |
+
|
79 |
+
ordered = idx.sort_values(ascending=False)
|
80 |
+
expected = idx[::-1]
|
81 |
+
tm.assert_index_equal(ordered, expected)
|
82 |
+
check_freq_ascending(ordered, idx, False)
|
83 |
+
|
84 |
+
ordered, indexer = idx.sort_values(return_indexer=True)
|
85 |
+
tm.assert_index_equal(ordered, idx)
|
86 |
+
tm.assert_numpy_array_equal(indexer, np.array([0, 1, 2], dtype=np.intp))
|
87 |
+
check_freq_ascending(ordered, idx, True)
|
88 |
+
|
89 |
+
ordered, indexer = idx.sort_values(return_indexer=True, ascending=False)
|
90 |
+
expected = idx[::-1]
|
91 |
+
tm.assert_index_equal(ordered, expected)
|
92 |
+
tm.assert_numpy_array_equal(indexer, np.array([2, 1, 0], dtype=np.intp))
|
93 |
+
check_freq_ascending(ordered, idx, False)
|
94 |
+
|
95 |
+
@pytest.mark.parametrize("freq", ["D", "h"])
|
96 |
+
def test_sort_values_with_freq_timedeltaindex(self, freq):
|
97 |
+
# GH#10295
|
98 |
+
idx = timedelta_range(start=f"1{freq}", periods=3, freq=freq).rename("idx")
|
99 |
+
|
100 |
+
self.check_sort_values_with_freq(idx)
|
101 |
+
|
102 |
+
@pytest.mark.parametrize(
|
103 |
+
"idx",
|
104 |
+
[
|
105 |
+
DatetimeIndex(
|
106 |
+
["2011-01-01", "2011-01-02", "2011-01-03"], freq="D", name="idx"
|
107 |
+
),
|
108 |
+
DatetimeIndex(
|
109 |
+
["2011-01-01 09:00", "2011-01-01 10:00", "2011-01-01 11:00"],
|
110 |
+
freq="h",
|
111 |
+
name="tzidx",
|
112 |
+
tz="Asia/Tokyo",
|
113 |
+
),
|
114 |
+
],
|
115 |
+
)
|
116 |
+
def test_sort_values_with_freq_datetimeindex(self, idx):
|
117 |
+
self.check_sort_values_with_freq(idx)
|
118 |
+
|
119 |
+
@pytest.mark.parametrize("freq", ["D", "2D", "4D"])
|
120 |
+
def test_sort_values_with_freq_periodindex(self, freq):
|
121 |
+
# here with_freq refers to being period_range-like
|
122 |
+
idx = PeriodIndex(
|
123 |
+
["2011-01-01", "2011-01-02", "2011-01-03"], freq=freq, name="idx"
|
124 |
+
)
|
125 |
+
self.check_sort_values_with_freq(idx)
|
126 |
+
|
127 |
+
@pytest.mark.parametrize(
|
128 |
+
"idx",
|
129 |
+
[
|
130 |
+
PeriodIndex(["2011", "2012", "2013"], name="pidx", freq="Y"),
|
131 |
+
Index([2011, 2012, 2013], name="idx"), # for compatibility check
|
132 |
+
],
|
133 |
+
)
|
134 |
+
def test_sort_values_with_freq_periodindex2(self, idx):
|
135 |
+
# here with_freq indicates this is period_range-like
|
136 |
+
self.check_sort_values_with_freq(idx)
|
137 |
+
|
138 |
+
def check_sort_values_without_freq(self, idx, expected):
|
139 |
+
ordered = idx.sort_values(na_position="first")
|
140 |
+
tm.assert_index_equal(ordered, expected)
|
141 |
+
check_freq_nonmonotonic(ordered, idx)
|
142 |
+
|
143 |
+
if not idx.isna().any():
|
144 |
+
ordered = idx.sort_values()
|
145 |
+
tm.assert_index_equal(ordered, expected)
|
146 |
+
check_freq_nonmonotonic(ordered, idx)
|
147 |
+
|
148 |
+
ordered = idx.sort_values(ascending=False)
|
149 |
+
tm.assert_index_equal(ordered, expected[::-1])
|
150 |
+
check_freq_nonmonotonic(ordered, idx)
|
151 |
+
|
152 |
+
ordered, indexer = idx.sort_values(return_indexer=True, na_position="first")
|
153 |
+
tm.assert_index_equal(ordered, expected)
|
154 |
+
|
155 |
+
exp = np.array([0, 4, 3, 1, 2], dtype=np.intp)
|
156 |
+
tm.assert_numpy_array_equal(indexer, exp)
|
157 |
+
check_freq_nonmonotonic(ordered, idx)
|
158 |
+
|
159 |
+
if not idx.isna().any():
|
160 |
+
ordered, indexer = idx.sort_values(return_indexer=True)
|
161 |
+
tm.assert_index_equal(ordered, expected)
|
162 |
+
|
163 |
+
exp = np.array([0, 4, 3, 1, 2], dtype=np.intp)
|
164 |
+
tm.assert_numpy_array_equal(indexer, exp)
|
165 |
+
check_freq_nonmonotonic(ordered, idx)
|
166 |
+
|
167 |
+
ordered, indexer = idx.sort_values(return_indexer=True, ascending=False)
|
168 |
+
tm.assert_index_equal(ordered, expected[::-1])
|
169 |
+
|
170 |
+
exp = np.array([2, 1, 3, 0, 4], dtype=np.intp)
|
171 |
+
tm.assert_numpy_array_equal(indexer, exp)
|
172 |
+
check_freq_nonmonotonic(ordered, idx)
|
173 |
+
|
174 |
+
def test_sort_values_without_freq_timedeltaindex(self):
|
175 |
+
# GH#10295
|
176 |
+
|
177 |
+
idx = TimedeltaIndex(
|
178 |
+
["1 hour", "3 hour", "5 hour", "2 hour ", "1 hour"], name="idx1"
|
179 |
+
)
|
180 |
+
expected = TimedeltaIndex(
|
181 |
+
["1 hour", "1 hour", "2 hour", "3 hour", "5 hour"], name="idx1"
|
182 |
+
)
|
183 |
+
self.check_sort_values_without_freq(idx, expected)
|
184 |
+
|
185 |
+
@pytest.mark.parametrize(
|
186 |
+
"index_dates,expected_dates",
|
187 |
+
[
|
188 |
+
(
|
189 |
+
["2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01"],
|
190 |
+
["2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05"],
|
191 |
+
),
|
192 |
+
(
|
193 |
+
["2011-01-01", "2011-01-03", "2011-01-05", "2011-01-02", "2011-01-01"],
|
194 |
+
["2011-01-01", "2011-01-01", "2011-01-02", "2011-01-03", "2011-01-05"],
|
195 |
+
),
|
196 |
+
(
|
197 |
+
[NaT, "2011-01-03", "2011-01-05", "2011-01-02", NaT],
|
198 |
+
[NaT, NaT, "2011-01-02", "2011-01-03", "2011-01-05"],
|
199 |
+
),
|
200 |
+
],
|
201 |
+
)
|
202 |
+
def test_sort_values_without_freq_datetimeindex(
|
203 |
+
self, index_dates, expected_dates, tz_naive_fixture
|
204 |
+
):
|
205 |
+
tz = tz_naive_fixture
|
206 |
+
|
207 |
+
# without freq
|
208 |
+
idx = DatetimeIndex(index_dates, tz=tz, name="idx")
|
209 |
+
expected = DatetimeIndex(expected_dates, tz=tz, name="idx")
|
210 |
+
|
211 |
+
self.check_sort_values_without_freq(idx, expected)
|
212 |
+
|
213 |
+
@pytest.mark.parametrize(
|
214 |
+
"idx,expected",
|
215 |
+
[
|
216 |
+
(
|
217 |
+
PeriodIndex(
|
218 |
+
[
|
219 |
+
"2011-01-01",
|
220 |
+
"2011-01-03",
|
221 |
+
"2011-01-05",
|
222 |
+
"2011-01-02",
|
223 |
+
"2011-01-01",
|
224 |
+
],
|
225 |
+
freq="D",
|
226 |
+
name="idx1",
|
227 |
+
),
|
228 |
+
PeriodIndex(
|
229 |
+
[
|
230 |
+
"2011-01-01",
|
231 |
+
"2011-01-01",
|
232 |
+
"2011-01-02",
|
233 |
+
"2011-01-03",
|
234 |
+
"2011-01-05",
|
235 |
+
],
|
236 |
+
freq="D",
|
237 |
+
name="idx1",
|
238 |
+
),
|
239 |
+
),
|
240 |
+
(
|
241 |
+
PeriodIndex(
|
242 |
+
[
|
243 |
+
"2011-01-01",
|
244 |
+
"2011-01-03",
|
245 |
+
"2011-01-05",
|
246 |
+
"2011-01-02",
|
247 |
+
"2011-01-01",
|
248 |
+
],
|
249 |
+
freq="D",
|
250 |
+
name="idx2",
|
251 |
+
),
|
252 |
+
PeriodIndex(
|
253 |
+
[
|
254 |
+
"2011-01-01",
|
255 |
+
"2011-01-01",
|
256 |
+
"2011-01-02",
|
257 |
+
"2011-01-03",
|
258 |
+
"2011-01-05",
|
259 |
+
],
|
260 |
+
freq="D",
|
261 |
+
name="idx2",
|
262 |
+
),
|
263 |
+
),
|
264 |
+
(
|
265 |
+
PeriodIndex(
|
266 |
+
[NaT, "2011-01-03", "2011-01-05", "2011-01-02", NaT],
|
267 |
+
freq="D",
|
268 |
+
name="idx3",
|
269 |
+
),
|
270 |
+
PeriodIndex(
|
271 |
+
[NaT, NaT, "2011-01-02", "2011-01-03", "2011-01-05"],
|
272 |
+
freq="D",
|
273 |
+
name="idx3",
|
274 |
+
),
|
275 |
+
),
|
276 |
+
(
|
277 |
+
PeriodIndex(
|
278 |
+
["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="Y"
|
279 |
+
),
|
280 |
+
PeriodIndex(
|
281 |
+
["2011", "2011", "2012", "2013", "2015"], name="pidx", freq="Y"
|
282 |
+
),
|
283 |
+
),
|
284 |
+
(
|
285 |
+
# For compatibility check
|
286 |
+
Index([2011, 2013, 2015, 2012, 2011], name="idx"),
|
287 |
+
Index([2011, 2011, 2012, 2013, 2015], name="idx"),
|
288 |
+
),
|
289 |
+
],
|
290 |
+
)
|
291 |
+
def test_sort_values_without_freq_periodindex(self, idx, expected):
|
292 |
+
# here without_freq means not generateable by period_range
|
293 |
+
self.check_sort_values_without_freq(idx, expected)
|
294 |
+
|
295 |
+
def test_sort_values_without_freq_periodindex_nat(self):
|
296 |
+
# doesn't quite fit into check_sort_values_without_freq
|
297 |
+
idx = PeriodIndex(["2011", "2013", "NaT", "2011"], name="pidx", freq="D")
|
298 |
+
expected = PeriodIndex(["NaT", "2011", "2011", "2013"], name="pidx", freq="D")
|
299 |
+
|
300 |
+
ordered = idx.sort_values(na_position="first")
|
301 |
+
tm.assert_index_equal(ordered, expected)
|
302 |
+
check_freq_nonmonotonic(ordered, idx)
|
303 |
+
|
304 |
+
ordered = idx.sort_values(ascending=False)
|
305 |
+
tm.assert_index_equal(ordered, expected[::-1])
|
306 |
+
check_freq_nonmonotonic(ordered, idx)
|
307 |
+
|
308 |
+
|
309 |
+
def test_order_stability_compat():
|
310 |
+
# GH#35922. sort_values is stable both for normal and datetime-like Index
|
311 |
+
pidx = PeriodIndex(["2011", "2013", "2015", "2012", "2011"], name="pidx", freq="Y")
|
312 |
+
iidx = Index([2011, 2013, 2015, 2012, 2011], name="idx")
|
313 |
+
ordered1, indexer1 = pidx.sort_values(return_indexer=True, ascending=False)
|
314 |
+
ordered2, indexer2 = iidx.sort_values(return_indexer=True, ascending=False)
|
315 |
+
tm.assert_numpy_array_equal(indexer1, indexer2)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimelike_/test_value_counts.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
|
3 |
+
from pandas import (
|
4 |
+
DatetimeIndex,
|
5 |
+
NaT,
|
6 |
+
PeriodIndex,
|
7 |
+
Series,
|
8 |
+
TimedeltaIndex,
|
9 |
+
date_range,
|
10 |
+
period_range,
|
11 |
+
timedelta_range,
|
12 |
+
)
|
13 |
+
import pandas._testing as tm
|
14 |
+
|
15 |
+
|
16 |
+
class TestValueCounts:
|
17 |
+
# GH#7735
|
18 |
+
|
19 |
+
def test_value_counts_unique_datetimeindex(self, tz_naive_fixture):
|
20 |
+
tz = tz_naive_fixture
|
21 |
+
orig = date_range("2011-01-01 09:00", freq="h", periods=10, tz=tz)
|
22 |
+
self._check_value_counts_with_repeats(orig)
|
23 |
+
|
24 |
+
def test_value_counts_unique_timedeltaindex(self):
|
25 |
+
orig = timedelta_range("1 days 09:00:00", freq="h", periods=10)
|
26 |
+
self._check_value_counts_with_repeats(orig)
|
27 |
+
|
28 |
+
def test_value_counts_unique_periodindex(self):
|
29 |
+
orig = period_range("2011-01-01 09:00", freq="h", periods=10)
|
30 |
+
self._check_value_counts_with_repeats(orig)
|
31 |
+
|
32 |
+
def _check_value_counts_with_repeats(self, orig):
|
33 |
+
# create repeated values, 'n'th element is repeated by n+1 times
|
34 |
+
idx = type(orig)(
|
35 |
+
np.repeat(orig._values, range(1, len(orig) + 1)), dtype=orig.dtype
|
36 |
+
)
|
37 |
+
|
38 |
+
exp_idx = orig[::-1]
|
39 |
+
if not isinstance(exp_idx, PeriodIndex):
|
40 |
+
exp_idx = exp_idx._with_freq(None)
|
41 |
+
expected = Series(range(10, 0, -1), index=exp_idx, dtype="int64", name="count")
|
42 |
+
|
43 |
+
for obj in [idx, Series(idx)]:
|
44 |
+
tm.assert_series_equal(obj.value_counts(), expected)
|
45 |
+
|
46 |
+
tm.assert_index_equal(idx.unique(), orig)
|
47 |
+
|
48 |
+
def test_value_counts_unique_datetimeindex2(self, tz_naive_fixture):
|
49 |
+
tz = tz_naive_fixture
|
50 |
+
idx = DatetimeIndex(
|
51 |
+
[
|
52 |
+
"2013-01-01 09:00",
|
53 |
+
"2013-01-01 09:00",
|
54 |
+
"2013-01-01 09:00",
|
55 |
+
"2013-01-01 08:00",
|
56 |
+
"2013-01-01 08:00",
|
57 |
+
NaT,
|
58 |
+
],
|
59 |
+
tz=tz,
|
60 |
+
)
|
61 |
+
self._check_value_counts_dropna(idx)
|
62 |
+
|
63 |
+
def test_value_counts_unique_timedeltaindex2(self):
|
64 |
+
idx = TimedeltaIndex(
|
65 |
+
[
|
66 |
+
"1 days 09:00:00",
|
67 |
+
"1 days 09:00:00",
|
68 |
+
"1 days 09:00:00",
|
69 |
+
"1 days 08:00:00",
|
70 |
+
"1 days 08:00:00",
|
71 |
+
NaT,
|
72 |
+
]
|
73 |
+
)
|
74 |
+
self._check_value_counts_dropna(idx)
|
75 |
+
|
76 |
+
def test_value_counts_unique_periodindex2(self):
|
77 |
+
idx = PeriodIndex(
|
78 |
+
[
|
79 |
+
"2013-01-01 09:00",
|
80 |
+
"2013-01-01 09:00",
|
81 |
+
"2013-01-01 09:00",
|
82 |
+
"2013-01-01 08:00",
|
83 |
+
"2013-01-01 08:00",
|
84 |
+
NaT,
|
85 |
+
],
|
86 |
+
freq="h",
|
87 |
+
)
|
88 |
+
self._check_value_counts_dropna(idx)
|
89 |
+
|
90 |
+
def _check_value_counts_dropna(self, idx):
|
91 |
+
exp_idx = idx[[2, 3]]
|
92 |
+
expected = Series([3, 2], index=exp_idx, name="count")
|
93 |
+
|
94 |
+
for obj in [idx, Series(idx)]:
|
95 |
+
tm.assert_series_equal(obj.value_counts(), expected)
|
96 |
+
|
97 |
+
exp_idx = idx[[2, 3, -1]]
|
98 |
+
expected = Series([3, 2, 1], index=exp_idx, name="count")
|
99 |
+
|
100 |
+
for obj in [idx, Series(idx)]:
|
101 |
+
tm.assert_series_equal(obj.value_counts(dropna=False), expected)
|
102 |
+
|
103 |
+
tm.assert_index_equal(idx.unique(), exp_idx)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (203 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
Binary file (2.02 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_constructors.cpython-310.pyc
ADDED
Binary file (32.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_date_range.cpython-310.pyc
ADDED
Binary file (50.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_datetime.cpython-310.pyc
ADDED
Binary file (6.93 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_freq_attr.cpython-310.pyc
ADDED
Binary file (1.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_iter.cpython-310.pyc
ADDED
Binary file (2.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_join.cpython-310.pyc
ADDED
Binary file (4.88 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_npfuncs.cpython-310.pyc
ADDED
Binary file (828 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_ops.cpython-310.pyc
ADDED
Binary file (2.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_partial_slicing.cpython-310.pyc
ADDED
Binary file (12.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_pickle.cpython-310.pyc
ADDED
Binary file (1.89 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_reindex.cpython-310.pyc
ADDED
Binary file (2.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/datetimes/__pycache__/test_scalar_compat.cpython-310.pyc
ADDED
Binary file (9.39 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (204 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
Binary file (1.53 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_constructors.cpython-310.pyc
ADDED
Binary file (10.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_delete.cpython-310.pyc
ADDED
Binary file (2.14 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_formats.cpython-310.pyc
ADDED
Binary file (3.42 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_freq_attr.cpython-310.pyc
ADDED
Binary file (2.23 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_indexing.cpython-310.pyc
ADDED
Binary file (11.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_join.cpython-310.pyc
ADDED
Binary file (1.93 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_ops.cpython-310.pyc
ADDED
Binary file (803 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_pickle.cpython-310.pyc
ADDED
Binary file (765 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_scalar_compat.cpython-310.pyc
ADDED
Binary file (4.01 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_searchsorted.cpython-310.pyc
ADDED
Binary file (1.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_setops.cpython-310.pyc
ADDED
Binary file (7.11 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_timedelta.cpython-310.pyc
ADDED
Binary file (2.42 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/__pycache__/test_timedelta_range.cpython-310.pyc
ADDED
Binary file (5.36 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (212 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/timedeltas/methods/__pycache__/test_astype.cpython-310.pyc
ADDED
Binary file (5.94 kB). View file
|
|