Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/floating/__pycache__/conftest.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/floating/__pycache__/test_comparison.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/test_formats.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/test_interval.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_astype.py +28 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_formats.py +13 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_interval.py +231 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_interval_pyarrow.py +160 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_overlaps.py +93 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_arrow_compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_function.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_indexing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_arithmetic.py +248 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_arrow_compat.py +209 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_indexing.py +60 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/test_indexing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/test_numpy.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/test_indexing.py +41 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/test_numpy.py +324 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_arrow_compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_astype.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_constructors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_reductions.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_arrow_compat.py +130 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_astype.py +67 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_constructors.py +156 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_reductions.py +42 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/test_string.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/test_string_arrow.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/test_string.py +703 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/test_string_arrow.py +265 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_constructors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_cumulative.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_reductions.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_constructors.py +103 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_cumulative.py +20 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_reductions.py +218 -0
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/floating/__pycache__/conftest.cpython-310.pyc
ADDED
Binary file (1.48 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/floating/__pycache__/test_comparison.cpython-310.pyc
ADDED
Binary file (2.13 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (201 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/test_formats.cpython-310.pyc
ADDED
Binary file (551 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/__pycache__/test_interval.cpython-310.pyc
ADDED
Binary file (7.53 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_astype.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from pandas import (
|
4 |
+
Categorical,
|
5 |
+
CategoricalDtype,
|
6 |
+
Index,
|
7 |
+
IntervalIndex,
|
8 |
+
)
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
|
12 |
+
class TestAstype:
|
13 |
+
@pytest.mark.parametrize("ordered", [True, False])
|
14 |
+
def test_astype_categorical_retains_ordered(self, ordered):
|
15 |
+
index = IntervalIndex.from_breaks(range(5))
|
16 |
+
arr = index._data
|
17 |
+
|
18 |
+
dtype = CategoricalDtype(None, ordered=ordered)
|
19 |
+
|
20 |
+
expected = Categorical(list(arr), ordered=ordered)
|
21 |
+
result = arr.astype(dtype)
|
22 |
+
assert result.ordered is ordered
|
23 |
+
tm.assert_categorical_equal(result, expected)
|
24 |
+
|
25 |
+
# test IntervalIndex.astype while we're at it.
|
26 |
+
result = index.astype(dtype)
|
27 |
+
expected = Index(expected)
|
28 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_formats.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pandas.core.arrays import IntervalArray
|
2 |
+
|
3 |
+
|
4 |
+
def test_repr():
|
5 |
+
# GH#25022
|
6 |
+
arr = IntervalArray.from_tuples([(0, 1), (1, 2)])
|
7 |
+
result = repr(arr)
|
8 |
+
expected = (
|
9 |
+
"<IntervalArray>\n"
|
10 |
+
"[(0, 1], (1, 2]]\n"
|
11 |
+
"Length: 2, dtype: interval[int64, right]"
|
12 |
+
)
|
13 |
+
assert result == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_interval.py
ADDED
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
Index,
|
7 |
+
Interval,
|
8 |
+
IntervalIndex,
|
9 |
+
Timedelta,
|
10 |
+
Timestamp,
|
11 |
+
date_range,
|
12 |
+
timedelta_range,
|
13 |
+
)
|
14 |
+
import pandas._testing as tm
|
15 |
+
from pandas.core.arrays import IntervalArray
|
16 |
+
|
17 |
+
|
18 |
+
@pytest.fixture(
|
19 |
+
params=[
|
20 |
+
(Index([0, 2, 4]), Index([1, 3, 5])),
|
21 |
+
(Index([0.0, 1.0, 2.0]), Index([1.0, 2.0, 3.0])),
|
22 |
+
(timedelta_range("0 days", periods=3), timedelta_range("1 day", periods=3)),
|
23 |
+
(date_range("20170101", periods=3), date_range("20170102", periods=3)),
|
24 |
+
(
|
25 |
+
date_range("20170101", periods=3, tz="US/Eastern"),
|
26 |
+
date_range("20170102", periods=3, tz="US/Eastern"),
|
27 |
+
),
|
28 |
+
],
|
29 |
+
ids=lambda x: str(x[0].dtype),
|
30 |
+
)
|
31 |
+
def left_right_dtypes(request):
|
32 |
+
"""
|
33 |
+
Fixture for building an IntervalArray from various dtypes
|
34 |
+
"""
|
35 |
+
return request.param
|
36 |
+
|
37 |
+
|
38 |
+
class TestAttributes:
|
39 |
+
@pytest.mark.parametrize(
|
40 |
+
"left, right",
|
41 |
+
[
|
42 |
+
(0, 1),
|
43 |
+
(Timedelta("0 days"), Timedelta("1 day")),
|
44 |
+
(Timestamp("2018-01-01"), Timestamp("2018-01-02")),
|
45 |
+
(
|
46 |
+
Timestamp("2018-01-01", tz="US/Eastern"),
|
47 |
+
Timestamp("2018-01-02", tz="US/Eastern"),
|
48 |
+
),
|
49 |
+
],
|
50 |
+
)
|
51 |
+
@pytest.mark.parametrize("constructor", [IntervalArray, IntervalIndex])
|
52 |
+
def test_is_empty(self, constructor, left, right, closed):
|
53 |
+
# GH27219
|
54 |
+
tuples = [(left, left), (left, right), np.nan]
|
55 |
+
expected = np.array([closed != "both", False, False])
|
56 |
+
result = constructor.from_tuples(tuples, closed=closed).is_empty
|
57 |
+
tm.assert_numpy_array_equal(result, expected)
|
58 |
+
|
59 |
+
|
60 |
+
class TestMethods:
|
61 |
+
@pytest.mark.parametrize("new_closed", ["left", "right", "both", "neither"])
|
62 |
+
def test_set_closed(self, closed, new_closed):
|
63 |
+
# GH 21670
|
64 |
+
array = IntervalArray.from_breaks(range(10), closed=closed)
|
65 |
+
result = array.set_closed(new_closed)
|
66 |
+
expected = IntervalArray.from_breaks(range(10), closed=new_closed)
|
67 |
+
tm.assert_extension_array_equal(result, expected)
|
68 |
+
|
69 |
+
@pytest.mark.parametrize(
|
70 |
+
"other",
|
71 |
+
[
|
72 |
+
Interval(0, 1, closed="right"),
|
73 |
+
IntervalArray.from_breaks([1, 2, 3, 4], closed="right"),
|
74 |
+
],
|
75 |
+
)
|
76 |
+
def test_where_raises(self, other):
|
77 |
+
# GH#45768 The IntervalArray methods raises; the Series method coerces
|
78 |
+
ser = pd.Series(IntervalArray.from_breaks([1, 2, 3, 4], closed="left"))
|
79 |
+
mask = np.array([True, False, True])
|
80 |
+
match = "'value.closed' is 'right', expected 'left'."
|
81 |
+
with pytest.raises(ValueError, match=match):
|
82 |
+
ser.array._where(mask, other)
|
83 |
+
|
84 |
+
res = ser.where(mask, other=other)
|
85 |
+
expected = ser.astype(object).where(mask, other)
|
86 |
+
tm.assert_series_equal(res, expected)
|
87 |
+
|
88 |
+
def test_shift(self):
|
89 |
+
# https://github.com/pandas-dev/pandas/issues/31495, GH#22428, GH#31502
|
90 |
+
a = IntervalArray.from_breaks([1, 2, 3])
|
91 |
+
result = a.shift()
|
92 |
+
# int -> float
|
93 |
+
expected = IntervalArray.from_tuples([(np.nan, np.nan), (1.0, 2.0)])
|
94 |
+
tm.assert_interval_array_equal(result, expected)
|
95 |
+
|
96 |
+
msg = "can only insert Interval objects and NA into an IntervalArray"
|
97 |
+
with pytest.raises(TypeError, match=msg):
|
98 |
+
a.shift(1, fill_value=pd.NaT)
|
99 |
+
|
100 |
+
def test_shift_datetime(self):
|
101 |
+
# GH#31502, GH#31504
|
102 |
+
a = IntervalArray.from_breaks(date_range("2000", periods=4))
|
103 |
+
result = a.shift(2)
|
104 |
+
expected = a.take([-1, -1, 0], allow_fill=True)
|
105 |
+
tm.assert_interval_array_equal(result, expected)
|
106 |
+
|
107 |
+
result = a.shift(-1)
|
108 |
+
expected = a.take([1, 2, -1], allow_fill=True)
|
109 |
+
tm.assert_interval_array_equal(result, expected)
|
110 |
+
|
111 |
+
msg = "can only insert Interval objects and NA into an IntervalArray"
|
112 |
+
with pytest.raises(TypeError, match=msg):
|
113 |
+
a.shift(1, fill_value=np.timedelta64("NaT", "ns"))
|
114 |
+
|
115 |
+
|
116 |
+
class TestSetitem:
|
117 |
+
def test_set_na(self, left_right_dtypes):
|
118 |
+
left, right = left_right_dtypes
|
119 |
+
left = left.copy(deep=True)
|
120 |
+
right = right.copy(deep=True)
|
121 |
+
result = IntervalArray.from_arrays(left, right)
|
122 |
+
|
123 |
+
if result.dtype.subtype.kind not in ["m", "M"]:
|
124 |
+
msg = "'value' should be an interval type, got <.*NaTType'> instead."
|
125 |
+
with pytest.raises(TypeError, match=msg):
|
126 |
+
result[0] = pd.NaT
|
127 |
+
if result.dtype.subtype.kind in ["i", "u"]:
|
128 |
+
msg = "Cannot set float NaN to integer-backed IntervalArray"
|
129 |
+
# GH#45484 TypeError, not ValueError, matches what we get with
|
130 |
+
# non-NA un-holdable value.
|
131 |
+
with pytest.raises(TypeError, match=msg):
|
132 |
+
result[0] = np.nan
|
133 |
+
return
|
134 |
+
|
135 |
+
result[0] = np.nan
|
136 |
+
|
137 |
+
expected_left = Index([left._na_value] + list(left[1:]))
|
138 |
+
expected_right = Index([right._na_value] + list(right[1:]))
|
139 |
+
expected = IntervalArray.from_arrays(expected_left, expected_right)
|
140 |
+
|
141 |
+
tm.assert_extension_array_equal(result, expected)
|
142 |
+
|
143 |
+
def test_setitem_mismatched_closed(self):
|
144 |
+
arr = IntervalArray.from_breaks(range(4))
|
145 |
+
orig = arr.copy()
|
146 |
+
other = arr.set_closed("both")
|
147 |
+
|
148 |
+
msg = "'value.closed' is 'both', expected 'right'"
|
149 |
+
with pytest.raises(ValueError, match=msg):
|
150 |
+
arr[0] = other[0]
|
151 |
+
with pytest.raises(ValueError, match=msg):
|
152 |
+
arr[:1] = other[:1]
|
153 |
+
with pytest.raises(ValueError, match=msg):
|
154 |
+
arr[:0] = other[:0]
|
155 |
+
with pytest.raises(ValueError, match=msg):
|
156 |
+
arr[:] = other[::-1]
|
157 |
+
with pytest.raises(ValueError, match=msg):
|
158 |
+
arr[:] = list(other[::-1])
|
159 |
+
with pytest.raises(ValueError, match=msg):
|
160 |
+
arr[:] = other[::-1].astype(object)
|
161 |
+
with pytest.raises(ValueError, match=msg):
|
162 |
+
arr[:] = other[::-1].astype("category")
|
163 |
+
|
164 |
+
# empty list should be no-op
|
165 |
+
arr[:0] = []
|
166 |
+
tm.assert_interval_array_equal(arr, orig)
|
167 |
+
|
168 |
+
|
169 |
+
class TestReductions:
|
170 |
+
def test_min_max_invalid_axis(self, left_right_dtypes):
|
171 |
+
left, right = left_right_dtypes
|
172 |
+
left = left.copy(deep=True)
|
173 |
+
right = right.copy(deep=True)
|
174 |
+
arr = IntervalArray.from_arrays(left, right)
|
175 |
+
|
176 |
+
msg = "`axis` must be fewer than the number of dimensions"
|
177 |
+
for axis in [-2, 1]:
|
178 |
+
with pytest.raises(ValueError, match=msg):
|
179 |
+
arr.min(axis=axis)
|
180 |
+
with pytest.raises(ValueError, match=msg):
|
181 |
+
arr.max(axis=axis)
|
182 |
+
|
183 |
+
msg = "'>=' not supported between"
|
184 |
+
with pytest.raises(TypeError, match=msg):
|
185 |
+
arr.min(axis="foo")
|
186 |
+
with pytest.raises(TypeError, match=msg):
|
187 |
+
arr.max(axis="foo")
|
188 |
+
|
189 |
+
def test_min_max(self, left_right_dtypes, index_or_series_or_array):
|
190 |
+
# GH#44746
|
191 |
+
left, right = left_right_dtypes
|
192 |
+
left = left.copy(deep=True)
|
193 |
+
right = right.copy(deep=True)
|
194 |
+
arr = IntervalArray.from_arrays(left, right)
|
195 |
+
|
196 |
+
# The expected results below are only valid if monotonic
|
197 |
+
assert left.is_monotonic_increasing
|
198 |
+
assert Index(arr).is_monotonic_increasing
|
199 |
+
|
200 |
+
MIN = arr[0]
|
201 |
+
MAX = arr[-1]
|
202 |
+
|
203 |
+
indexer = np.arange(len(arr))
|
204 |
+
np.random.default_rng(2).shuffle(indexer)
|
205 |
+
arr = arr.take(indexer)
|
206 |
+
|
207 |
+
arr_na = arr.insert(2, np.nan)
|
208 |
+
|
209 |
+
arr = index_or_series_or_array(arr)
|
210 |
+
arr_na = index_or_series_or_array(arr_na)
|
211 |
+
|
212 |
+
for skipna in [True, False]:
|
213 |
+
res = arr.min(skipna=skipna)
|
214 |
+
assert res == MIN
|
215 |
+
assert type(res) == type(MIN)
|
216 |
+
|
217 |
+
res = arr.max(skipna=skipna)
|
218 |
+
assert res == MAX
|
219 |
+
assert type(res) == type(MAX)
|
220 |
+
|
221 |
+
res = arr_na.min(skipna=False)
|
222 |
+
assert np.isnan(res)
|
223 |
+
res = arr_na.max(skipna=False)
|
224 |
+
assert np.isnan(res)
|
225 |
+
|
226 |
+
res = arr_na.min(skipna=True)
|
227 |
+
assert res == MIN
|
228 |
+
assert type(res) == type(MIN)
|
229 |
+
res = arr_na.max(skipna=True)
|
230 |
+
assert res == MAX
|
231 |
+
assert type(res) == type(MAX)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_interval_pyarrow.py
ADDED
@@ -0,0 +1,160 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
import pandas._testing as tm
|
6 |
+
from pandas.core.arrays import IntervalArray
|
7 |
+
|
8 |
+
|
9 |
+
def test_arrow_extension_type():
|
10 |
+
pa = pytest.importorskip("pyarrow")
|
11 |
+
|
12 |
+
from pandas.core.arrays.arrow.extension_types import ArrowIntervalType
|
13 |
+
|
14 |
+
p1 = ArrowIntervalType(pa.int64(), "left")
|
15 |
+
p2 = ArrowIntervalType(pa.int64(), "left")
|
16 |
+
p3 = ArrowIntervalType(pa.int64(), "right")
|
17 |
+
|
18 |
+
assert p1.closed == "left"
|
19 |
+
assert p1 == p2
|
20 |
+
assert p1 != p3
|
21 |
+
assert hash(p1) == hash(p2)
|
22 |
+
assert hash(p1) != hash(p3)
|
23 |
+
|
24 |
+
|
25 |
+
def test_arrow_array():
|
26 |
+
pa = pytest.importorskip("pyarrow")
|
27 |
+
|
28 |
+
from pandas.core.arrays.arrow.extension_types import ArrowIntervalType
|
29 |
+
|
30 |
+
intervals = pd.interval_range(1, 5, freq=1).array
|
31 |
+
|
32 |
+
result = pa.array(intervals)
|
33 |
+
assert isinstance(result.type, ArrowIntervalType)
|
34 |
+
assert result.type.closed == intervals.closed
|
35 |
+
assert result.type.subtype == pa.int64()
|
36 |
+
assert result.storage.field("left").equals(pa.array([1, 2, 3, 4], type="int64"))
|
37 |
+
assert result.storage.field("right").equals(pa.array([2, 3, 4, 5], type="int64"))
|
38 |
+
|
39 |
+
expected = pa.array([{"left": i, "right": i + 1} for i in range(1, 5)])
|
40 |
+
assert result.storage.equals(expected)
|
41 |
+
|
42 |
+
# convert to its storage type
|
43 |
+
result = pa.array(intervals, type=expected.type)
|
44 |
+
assert result.equals(expected)
|
45 |
+
|
46 |
+
# unsupported conversions
|
47 |
+
with pytest.raises(TypeError, match="Not supported to convert IntervalArray"):
|
48 |
+
pa.array(intervals, type="float64")
|
49 |
+
|
50 |
+
with pytest.raises(TypeError, match="Not supported to convert IntervalArray"):
|
51 |
+
pa.array(intervals, type=ArrowIntervalType(pa.float64(), "left"))
|
52 |
+
|
53 |
+
|
54 |
+
def test_arrow_array_missing():
|
55 |
+
pa = pytest.importorskip("pyarrow")
|
56 |
+
|
57 |
+
from pandas.core.arrays.arrow.extension_types import ArrowIntervalType
|
58 |
+
|
59 |
+
arr = IntervalArray.from_breaks([0.0, 1.0, 2.0, 3.0])
|
60 |
+
arr[1] = None
|
61 |
+
|
62 |
+
result = pa.array(arr)
|
63 |
+
assert isinstance(result.type, ArrowIntervalType)
|
64 |
+
assert result.type.closed == arr.closed
|
65 |
+
assert result.type.subtype == pa.float64()
|
66 |
+
|
67 |
+
# fields have missing values (not NaN)
|
68 |
+
left = pa.array([0.0, None, 2.0], type="float64")
|
69 |
+
right = pa.array([1.0, None, 3.0], type="float64")
|
70 |
+
assert result.storage.field("left").equals(left)
|
71 |
+
assert result.storage.field("right").equals(right)
|
72 |
+
|
73 |
+
# structarray itself also has missing values on the array level
|
74 |
+
vals = [
|
75 |
+
{"left": 0.0, "right": 1.0},
|
76 |
+
{"left": None, "right": None},
|
77 |
+
{"left": 2.0, "right": 3.0},
|
78 |
+
]
|
79 |
+
expected = pa.StructArray.from_pandas(vals, mask=np.array([False, True, False]))
|
80 |
+
assert result.storage.equals(expected)
|
81 |
+
|
82 |
+
|
83 |
+
@pytest.mark.filterwarnings(
|
84 |
+
"ignore:Passing a BlockManager to DataFrame:DeprecationWarning"
|
85 |
+
)
|
86 |
+
@pytest.mark.parametrize(
|
87 |
+
"breaks",
|
88 |
+
[[0.0, 1.0, 2.0, 3.0], pd.date_range("2017", periods=4, freq="D")],
|
89 |
+
ids=["float", "datetime64[ns]"],
|
90 |
+
)
|
91 |
+
def test_arrow_table_roundtrip(breaks):
|
92 |
+
pa = pytest.importorskip("pyarrow")
|
93 |
+
|
94 |
+
from pandas.core.arrays.arrow.extension_types import ArrowIntervalType
|
95 |
+
|
96 |
+
arr = IntervalArray.from_breaks(breaks)
|
97 |
+
arr[1] = None
|
98 |
+
df = pd.DataFrame({"a": arr})
|
99 |
+
|
100 |
+
table = pa.table(df)
|
101 |
+
assert isinstance(table.field("a").type, ArrowIntervalType)
|
102 |
+
result = table.to_pandas()
|
103 |
+
assert isinstance(result["a"].dtype, pd.IntervalDtype)
|
104 |
+
tm.assert_frame_equal(result, df)
|
105 |
+
|
106 |
+
table2 = pa.concat_tables([table, table])
|
107 |
+
result = table2.to_pandas()
|
108 |
+
expected = pd.concat([df, df], ignore_index=True)
|
109 |
+
tm.assert_frame_equal(result, expected)
|
110 |
+
|
111 |
+
# GH#41040
|
112 |
+
table = pa.table(
|
113 |
+
[pa.chunked_array([], type=table.column(0).type)], schema=table.schema
|
114 |
+
)
|
115 |
+
result = table.to_pandas()
|
116 |
+
tm.assert_frame_equal(result, expected[0:0])
|
117 |
+
|
118 |
+
|
119 |
+
@pytest.mark.filterwarnings(
|
120 |
+
"ignore:Passing a BlockManager to DataFrame:DeprecationWarning"
|
121 |
+
)
|
122 |
+
@pytest.mark.parametrize(
|
123 |
+
"breaks",
|
124 |
+
[[0.0, 1.0, 2.0, 3.0], pd.date_range("2017", periods=4, freq="D")],
|
125 |
+
ids=["float", "datetime64[ns]"],
|
126 |
+
)
|
127 |
+
def test_arrow_table_roundtrip_without_metadata(breaks):
|
128 |
+
pa = pytest.importorskip("pyarrow")
|
129 |
+
|
130 |
+
arr = IntervalArray.from_breaks(breaks)
|
131 |
+
arr[1] = None
|
132 |
+
df = pd.DataFrame({"a": arr})
|
133 |
+
|
134 |
+
table = pa.table(df)
|
135 |
+
# remove the metadata
|
136 |
+
table = table.replace_schema_metadata()
|
137 |
+
assert table.schema.metadata is None
|
138 |
+
|
139 |
+
result = table.to_pandas()
|
140 |
+
assert isinstance(result["a"].dtype, pd.IntervalDtype)
|
141 |
+
tm.assert_frame_equal(result, df)
|
142 |
+
|
143 |
+
|
144 |
+
def test_from_arrow_from_raw_struct_array():
|
145 |
+
# in case pyarrow lost the Interval extension type (eg on parquet roundtrip
|
146 |
+
# with datetime64[ns] subtype, see GH-45881), still allow conversion
|
147 |
+
# from arrow to IntervalArray
|
148 |
+
pa = pytest.importorskip("pyarrow")
|
149 |
+
|
150 |
+
arr = pa.array([{"left": 0, "right": 1}, {"left": 1, "right": 2}])
|
151 |
+
dtype = pd.IntervalDtype(np.dtype("int64"), closed="neither")
|
152 |
+
|
153 |
+
result = dtype.__from_arrow__(arr)
|
154 |
+
expected = IntervalArray.from_breaks(
|
155 |
+
np.array([0, 1, 2], dtype="int64"), closed="neither"
|
156 |
+
)
|
157 |
+
tm.assert_extension_array_equal(result, expected)
|
158 |
+
|
159 |
+
result = dtype.__from_arrow__(pa.chunked_array([arr]))
|
160 |
+
tm.assert_extension_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/interval/test_overlaps.py
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Tests for Interval-Interval operations, such as overlaps, contains, etc."""
|
2 |
+
import numpy as np
|
3 |
+
import pytest
|
4 |
+
|
5 |
+
from pandas import (
|
6 |
+
Interval,
|
7 |
+
IntervalIndex,
|
8 |
+
Timedelta,
|
9 |
+
Timestamp,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
from pandas.core.arrays import IntervalArray
|
13 |
+
|
14 |
+
|
15 |
+
@pytest.fixture(params=[IntervalArray, IntervalIndex])
|
16 |
+
def constructor(request):
|
17 |
+
"""
|
18 |
+
Fixture for testing both interval container classes.
|
19 |
+
"""
|
20 |
+
return request.param
|
21 |
+
|
22 |
+
|
23 |
+
@pytest.fixture(
|
24 |
+
params=[
|
25 |
+
(Timedelta("0 days"), Timedelta("1 day")),
|
26 |
+
(Timestamp("2018-01-01"), Timedelta("1 day")),
|
27 |
+
(0, 1),
|
28 |
+
],
|
29 |
+
ids=lambda x: type(x[0]).__name__,
|
30 |
+
)
|
31 |
+
def start_shift(request):
|
32 |
+
"""
|
33 |
+
Fixture for generating intervals of different types from a start value
|
34 |
+
and a shift value that can be added to start to generate an endpoint.
|
35 |
+
"""
|
36 |
+
return request.param
|
37 |
+
|
38 |
+
|
39 |
+
class TestOverlaps:
|
40 |
+
def test_overlaps_interval(self, constructor, start_shift, closed, other_closed):
|
41 |
+
start, shift = start_shift
|
42 |
+
interval = Interval(start, start + 3 * shift, other_closed)
|
43 |
+
|
44 |
+
# intervals: identical, nested, spanning, partial, adjacent, disjoint
|
45 |
+
tuples = [
|
46 |
+
(start, start + 3 * shift),
|
47 |
+
(start + shift, start + 2 * shift),
|
48 |
+
(start - shift, start + 4 * shift),
|
49 |
+
(start + 2 * shift, start + 4 * shift),
|
50 |
+
(start + 3 * shift, start + 4 * shift),
|
51 |
+
(start + 4 * shift, start + 5 * shift),
|
52 |
+
]
|
53 |
+
interval_container = constructor.from_tuples(tuples, closed)
|
54 |
+
|
55 |
+
adjacent = interval.closed_right and interval_container.closed_left
|
56 |
+
expected = np.array([True, True, True, True, adjacent, False])
|
57 |
+
result = interval_container.overlaps(interval)
|
58 |
+
tm.assert_numpy_array_equal(result, expected)
|
59 |
+
|
60 |
+
@pytest.mark.parametrize("other_constructor", [IntervalArray, IntervalIndex])
|
61 |
+
def test_overlaps_interval_container(self, constructor, other_constructor):
|
62 |
+
# TODO: modify this test when implemented
|
63 |
+
interval_container = constructor.from_breaks(range(5))
|
64 |
+
other_container = other_constructor.from_breaks(range(5))
|
65 |
+
with pytest.raises(NotImplementedError, match="^$"):
|
66 |
+
interval_container.overlaps(other_container)
|
67 |
+
|
68 |
+
def test_overlaps_na(self, constructor, start_shift):
|
69 |
+
"""NA values are marked as False"""
|
70 |
+
start, shift = start_shift
|
71 |
+
interval = Interval(start, start + shift)
|
72 |
+
|
73 |
+
tuples = [
|
74 |
+
(start, start + shift),
|
75 |
+
np.nan,
|
76 |
+
(start + 2 * shift, start + 3 * shift),
|
77 |
+
]
|
78 |
+
interval_container = constructor.from_tuples(tuples)
|
79 |
+
|
80 |
+
expected = np.array([True, False, False])
|
81 |
+
result = interval_container.overlaps(interval)
|
82 |
+
tm.assert_numpy_array_equal(result, expected)
|
83 |
+
|
84 |
+
@pytest.mark.parametrize(
|
85 |
+
"other",
|
86 |
+
[10, True, "foo", Timedelta("1 day"), Timestamp("2018-01-01")],
|
87 |
+
ids=lambda x: type(x).__name__,
|
88 |
+
)
|
89 |
+
def test_overlaps_invalid_type(self, constructor, other):
|
90 |
+
interval_container = constructor.from_breaks(range(5))
|
91 |
+
msg = f"`other` must be Interval-like, got {type(other).__name__}"
|
92 |
+
with pytest.raises(TypeError, match=msg):
|
93 |
+
interval_container.overlaps(other)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (199 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_arithmetic.cpython-310.pyc
ADDED
Binary file (6.49 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_arrow_compat.cpython-310.pyc
ADDED
Binary file (6.36 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_function.cpython-310.pyc
ADDED
Binary file (2.54 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/__pycache__/test_indexing.cpython-310.pyc
ADDED
Binary file (2.07 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_arithmetic.py
ADDED
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from typing import Any
|
4 |
+
|
5 |
+
import numpy as np
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
# integer dtypes
|
12 |
+
arrays = [pd.array([1, 2, 3, None], dtype=dtype) for dtype in tm.ALL_INT_EA_DTYPES]
|
13 |
+
scalars: list[Any] = [2] * len(arrays)
|
14 |
+
# floating dtypes
|
15 |
+
arrays += [pd.array([0.1, 0.2, 0.3, None], dtype=dtype) for dtype in tm.FLOAT_EA_DTYPES]
|
16 |
+
scalars += [0.2, 0.2]
|
17 |
+
# boolean
|
18 |
+
arrays += [pd.array([True, False, True, None], dtype="boolean")]
|
19 |
+
scalars += [False]
|
20 |
+
|
21 |
+
|
22 |
+
@pytest.fixture(params=zip(arrays, scalars), ids=[a.dtype.name for a in arrays])
|
23 |
+
def data(request):
|
24 |
+
"""Fixture returning parametrized (array, scalar) tuple.
|
25 |
+
|
26 |
+
Used to test equivalence of scalars, numpy arrays with array ops, and the
|
27 |
+
equivalence of DataFrame and Series ops.
|
28 |
+
"""
|
29 |
+
return request.param
|
30 |
+
|
31 |
+
|
32 |
+
def check_skip(data, op_name):
|
33 |
+
if isinstance(data.dtype, pd.BooleanDtype) and "sub" in op_name:
|
34 |
+
pytest.skip("subtract not implemented for boolean")
|
35 |
+
|
36 |
+
|
37 |
+
def is_bool_not_implemented(data, op_name):
|
38 |
+
# match non-masked behavior
|
39 |
+
return data.dtype.kind == "b" and op_name.strip("_").lstrip("r") in [
|
40 |
+
"pow",
|
41 |
+
"truediv",
|
42 |
+
"floordiv",
|
43 |
+
]
|
44 |
+
|
45 |
+
|
46 |
+
# Test equivalence of scalars, numpy arrays with array ops
|
47 |
+
# -----------------------------------------------------------------------------
|
48 |
+
|
49 |
+
|
50 |
+
def test_array_scalar_like_equivalence(data, all_arithmetic_operators):
|
51 |
+
data, scalar = data
|
52 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
53 |
+
check_skip(data, all_arithmetic_operators)
|
54 |
+
|
55 |
+
scalar_array = pd.array([scalar] * len(data), dtype=data.dtype)
|
56 |
+
|
57 |
+
# TODO also add len-1 array (np.array([scalar], dtype=data.dtype.numpy_dtype))
|
58 |
+
for scalar in [scalar, data.dtype.type(scalar)]:
|
59 |
+
if is_bool_not_implemented(data, all_arithmetic_operators):
|
60 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
61 |
+
with pytest.raises(NotImplementedError, match=msg):
|
62 |
+
op(data, scalar)
|
63 |
+
with pytest.raises(NotImplementedError, match=msg):
|
64 |
+
op(data, scalar_array)
|
65 |
+
else:
|
66 |
+
result = op(data, scalar)
|
67 |
+
expected = op(data, scalar_array)
|
68 |
+
tm.assert_extension_array_equal(result, expected)
|
69 |
+
|
70 |
+
|
71 |
+
def test_array_NA(data, all_arithmetic_operators):
|
72 |
+
data, _ = data
|
73 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
74 |
+
check_skip(data, all_arithmetic_operators)
|
75 |
+
|
76 |
+
scalar = pd.NA
|
77 |
+
scalar_array = pd.array([pd.NA] * len(data), dtype=data.dtype)
|
78 |
+
|
79 |
+
mask = data._mask.copy()
|
80 |
+
|
81 |
+
if is_bool_not_implemented(data, all_arithmetic_operators):
|
82 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
83 |
+
with pytest.raises(NotImplementedError, match=msg):
|
84 |
+
op(data, scalar)
|
85 |
+
# GH#45421 check op doesn't alter data._mask inplace
|
86 |
+
tm.assert_numpy_array_equal(mask, data._mask)
|
87 |
+
return
|
88 |
+
|
89 |
+
result = op(data, scalar)
|
90 |
+
# GH#45421 check op doesn't alter data._mask inplace
|
91 |
+
tm.assert_numpy_array_equal(mask, data._mask)
|
92 |
+
|
93 |
+
expected = op(data, scalar_array)
|
94 |
+
tm.assert_numpy_array_equal(mask, data._mask)
|
95 |
+
|
96 |
+
tm.assert_extension_array_equal(result, expected)
|
97 |
+
|
98 |
+
|
99 |
+
def test_numpy_array_equivalence(data, all_arithmetic_operators):
|
100 |
+
data, scalar = data
|
101 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
102 |
+
check_skip(data, all_arithmetic_operators)
|
103 |
+
|
104 |
+
numpy_array = np.array([scalar] * len(data), dtype=data.dtype.numpy_dtype)
|
105 |
+
pd_array = pd.array(numpy_array, dtype=data.dtype)
|
106 |
+
|
107 |
+
if is_bool_not_implemented(data, all_arithmetic_operators):
|
108 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
109 |
+
with pytest.raises(NotImplementedError, match=msg):
|
110 |
+
op(data, numpy_array)
|
111 |
+
with pytest.raises(NotImplementedError, match=msg):
|
112 |
+
op(data, pd_array)
|
113 |
+
return
|
114 |
+
|
115 |
+
result = op(data, numpy_array)
|
116 |
+
expected = op(data, pd_array)
|
117 |
+
tm.assert_extension_array_equal(result, expected)
|
118 |
+
|
119 |
+
|
120 |
+
# Test equivalence with Series and DataFrame ops
|
121 |
+
# -----------------------------------------------------------------------------
|
122 |
+
|
123 |
+
|
124 |
+
def test_frame(data, all_arithmetic_operators):
|
125 |
+
data, scalar = data
|
126 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
127 |
+
check_skip(data, all_arithmetic_operators)
|
128 |
+
|
129 |
+
# DataFrame with scalar
|
130 |
+
df = pd.DataFrame({"A": data})
|
131 |
+
|
132 |
+
if is_bool_not_implemented(data, all_arithmetic_operators):
|
133 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
134 |
+
with pytest.raises(NotImplementedError, match=msg):
|
135 |
+
op(df, scalar)
|
136 |
+
with pytest.raises(NotImplementedError, match=msg):
|
137 |
+
op(data, scalar)
|
138 |
+
return
|
139 |
+
|
140 |
+
result = op(df, scalar)
|
141 |
+
expected = pd.DataFrame({"A": op(data, scalar)})
|
142 |
+
tm.assert_frame_equal(result, expected)
|
143 |
+
|
144 |
+
|
145 |
+
def test_series(data, all_arithmetic_operators):
|
146 |
+
data, scalar = data
|
147 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
148 |
+
check_skip(data, all_arithmetic_operators)
|
149 |
+
|
150 |
+
ser = pd.Series(data)
|
151 |
+
|
152 |
+
others = [
|
153 |
+
scalar,
|
154 |
+
np.array([scalar] * len(data), dtype=data.dtype.numpy_dtype),
|
155 |
+
pd.array([scalar] * len(data), dtype=data.dtype),
|
156 |
+
pd.Series([scalar] * len(data), dtype=data.dtype),
|
157 |
+
]
|
158 |
+
|
159 |
+
for other in others:
|
160 |
+
if is_bool_not_implemented(data, all_arithmetic_operators):
|
161 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
162 |
+
with pytest.raises(NotImplementedError, match=msg):
|
163 |
+
op(ser, other)
|
164 |
+
|
165 |
+
else:
|
166 |
+
result = op(ser, other)
|
167 |
+
expected = pd.Series(op(data, other))
|
168 |
+
tm.assert_series_equal(result, expected)
|
169 |
+
|
170 |
+
|
171 |
+
# Test generic characteristics / errors
|
172 |
+
# -----------------------------------------------------------------------------
|
173 |
+
|
174 |
+
|
175 |
+
def test_error_invalid_object(data, all_arithmetic_operators):
|
176 |
+
data, _ = data
|
177 |
+
|
178 |
+
op = all_arithmetic_operators
|
179 |
+
opa = getattr(data, op)
|
180 |
+
|
181 |
+
# 2d -> return NotImplemented
|
182 |
+
result = opa(pd.DataFrame({"A": data}))
|
183 |
+
assert result is NotImplemented
|
184 |
+
|
185 |
+
msg = r"can only perform ops with 1-d structures"
|
186 |
+
with pytest.raises(NotImplementedError, match=msg):
|
187 |
+
opa(np.arange(len(data)).reshape(-1, len(data)))
|
188 |
+
|
189 |
+
|
190 |
+
def test_error_len_mismatch(data, all_arithmetic_operators):
|
191 |
+
# operating with a list-like with non-matching length raises
|
192 |
+
data, scalar = data
|
193 |
+
op = tm.get_op_from_name(all_arithmetic_operators)
|
194 |
+
|
195 |
+
other = [scalar] * (len(data) - 1)
|
196 |
+
|
197 |
+
err = ValueError
|
198 |
+
msg = "|".join(
|
199 |
+
[
|
200 |
+
r"operands could not be broadcast together with shapes \(3,\) \(4,\)",
|
201 |
+
r"operands could not be broadcast together with shapes \(4,\) \(3,\)",
|
202 |
+
]
|
203 |
+
)
|
204 |
+
if data.dtype.kind == "b" and all_arithmetic_operators.strip("_") in [
|
205 |
+
"sub",
|
206 |
+
"rsub",
|
207 |
+
]:
|
208 |
+
err = TypeError
|
209 |
+
msg = (
|
210 |
+
r"numpy boolean subtract, the `\-` operator, is not supported, use "
|
211 |
+
r"the bitwise_xor, the `\^` operator, or the logical_xor function instead"
|
212 |
+
)
|
213 |
+
elif is_bool_not_implemented(data, all_arithmetic_operators):
|
214 |
+
msg = "operator '.*' not implemented for bool dtypes"
|
215 |
+
err = NotImplementedError
|
216 |
+
|
217 |
+
for other in [other, np.array(other)]:
|
218 |
+
with pytest.raises(err, match=msg):
|
219 |
+
op(data, other)
|
220 |
+
|
221 |
+
s = pd.Series(data)
|
222 |
+
with pytest.raises(err, match=msg):
|
223 |
+
op(s, other)
|
224 |
+
|
225 |
+
|
226 |
+
@pytest.mark.parametrize("op", ["__neg__", "__abs__", "__invert__"])
|
227 |
+
def test_unary_op_does_not_propagate_mask(data, op):
|
228 |
+
# https://github.com/pandas-dev/pandas/issues/39943
|
229 |
+
data, _ = data
|
230 |
+
ser = pd.Series(data)
|
231 |
+
|
232 |
+
if op == "__invert__" and data.dtype.kind == "f":
|
233 |
+
# we follow numpy in raising
|
234 |
+
msg = "ufunc 'invert' not supported for the input types"
|
235 |
+
with pytest.raises(TypeError, match=msg):
|
236 |
+
getattr(ser, op)()
|
237 |
+
with pytest.raises(TypeError, match=msg):
|
238 |
+
getattr(data, op)()
|
239 |
+
with pytest.raises(TypeError, match=msg):
|
240 |
+
# Check that this is still the numpy behavior
|
241 |
+
getattr(data._data, op)()
|
242 |
+
|
243 |
+
return
|
244 |
+
|
245 |
+
result = getattr(ser, op)()
|
246 |
+
expected = result.copy(deep=True)
|
247 |
+
ser[0] = None
|
248 |
+
tm.assert_series_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_arrow_compat.py
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
import pandas._testing as tm
|
6 |
+
|
7 |
+
pytestmark = pytest.mark.filterwarnings(
|
8 |
+
"ignore:Passing a BlockManager to DataFrame:DeprecationWarning"
|
9 |
+
)
|
10 |
+
|
11 |
+
pa = pytest.importorskip("pyarrow")
|
12 |
+
|
13 |
+
from pandas.core.arrays.arrow._arrow_utils import pyarrow_array_to_numpy_and_mask
|
14 |
+
|
15 |
+
arrays = [pd.array([1, 2, 3, None], dtype=dtype) for dtype in tm.ALL_INT_EA_DTYPES]
|
16 |
+
arrays += [pd.array([0.1, 0.2, 0.3, None], dtype=dtype) for dtype in tm.FLOAT_EA_DTYPES]
|
17 |
+
arrays += [pd.array([True, False, True, None], dtype="boolean")]
|
18 |
+
|
19 |
+
|
20 |
+
@pytest.fixture(params=arrays, ids=[a.dtype.name for a in arrays])
|
21 |
+
def data(request):
|
22 |
+
"""
|
23 |
+
Fixture returning parametrized array from given dtype, including integer,
|
24 |
+
float and boolean
|
25 |
+
"""
|
26 |
+
return request.param
|
27 |
+
|
28 |
+
|
29 |
+
def test_arrow_array(data):
|
30 |
+
arr = pa.array(data)
|
31 |
+
expected = pa.array(
|
32 |
+
data.to_numpy(object, na_value=None),
|
33 |
+
type=pa.from_numpy_dtype(data.dtype.numpy_dtype),
|
34 |
+
)
|
35 |
+
assert arr.equals(expected)
|
36 |
+
|
37 |
+
|
38 |
+
def test_arrow_roundtrip(data):
|
39 |
+
df = pd.DataFrame({"a": data})
|
40 |
+
table = pa.table(df)
|
41 |
+
assert table.field("a").type == str(data.dtype.numpy_dtype)
|
42 |
+
|
43 |
+
result = table.to_pandas()
|
44 |
+
assert result["a"].dtype == data.dtype
|
45 |
+
tm.assert_frame_equal(result, df)
|
46 |
+
|
47 |
+
|
48 |
+
def test_dataframe_from_arrow_types_mapper():
|
49 |
+
def types_mapper(arrow_type):
|
50 |
+
if pa.types.is_boolean(arrow_type):
|
51 |
+
return pd.BooleanDtype()
|
52 |
+
elif pa.types.is_integer(arrow_type):
|
53 |
+
return pd.Int64Dtype()
|
54 |
+
|
55 |
+
bools_array = pa.array([True, None, False], type=pa.bool_())
|
56 |
+
ints_array = pa.array([1, None, 2], type=pa.int64())
|
57 |
+
small_ints_array = pa.array([-1, 0, 7], type=pa.int8())
|
58 |
+
record_batch = pa.RecordBatch.from_arrays(
|
59 |
+
[bools_array, ints_array, small_ints_array], ["bools", "ints", "small_ints"]
|
60 |
+
)
|
61 |
+
result = record_batch.to_pandas(types_mapper=types_mapper)
|
62 |
+
bools = pd.Series([True, None, False], dtype="boolean")
|
63 |
+
ints = pd.Series([1, None, 2], dtype="Int64")
|
64 |
+
small_ints = pd.Series([-1, 0, 7], dtype="Int64")
|
65 |
+
expected = pd.DataFrame({"bools": bools, "ints": ints, "small_ints": small_ints})
|
66 |
+
tm.assert_frame_equal(result, expected)
|
67 |
+
|
68 |
+
|
69 |
+
def test_arrow_load_from_zero_chunks(data):
|
70 |
+
# GH-41040
|
71 |
+
|
72 |
+
df = pd.DataFrame({"a": data[0:0]})
|
73 |
+
table = pa.table(df)
|
74 |
+
assert table.field("a").type == str(data.dtype.numpy_dtype)
|
75 |
+
table = pa.table(
|
76 |
+
[pa.chunked_array([], type=table.field("a").type)], schema=table.schema
|
77 |
+
)
|
78 |
+
result = table.to_pandas()
|
79 |
+
assert result["a"].dtype == data.dtype
|
80 |
+
tm.assert_frame_equal(result, df)
|
81 |
+
|
82 |
+
|
83 |
+
def test_arrow_from_arrow_uint():
|
84 |
+
# https://github.com/pandas-dev/pandas/issues/31896
|
85 |
+
# possible mismatch in types
|
86 |
+
|
87 |
+
dtype = pd.UInt32Dtype()
|
88 |
+
result = dtype.__from_arrow__(pa.array([1, 2, 3, 4, None], type="int64"))
|
89 |
+
expected = pd.array([1, 2, 3, 4, None], dtype="UInt32")
|
90 |
+
|
91 |
+
tm.assert_extension_array_equal(result, expected)
|
92 |
+
|
93 |
+
|
94 |
+
def test_arrow_sliced(data):
|
95 |
+
# https://github.com/pandas-dev/pandas/issues/38525
|
96 |
+
|
97 |
+
df = pd.DataFrame({"a": data})
|
98 |
+
table = pa.table(df)
|
99 |
+
result = table.slice(2, None).to_pandas()
|
100 |
+
expected = df.iloc[2:].reset_index(drop=True)
|
101 |
+
tm.assert_frame_equal(result, expected)
|
102 |
+
|
103 |
+
# no missing values
|
104 |
+
df2 = df.fillna(data[0])
|
105 |
+
table = pa.table(df2)
|
106 |
+
result = table.slice(2, None).to_pandas()
|
107 |
+
expected = df2.iloc[2:].reset_index(drop=True)
|
108 |
+
tm.assert_frame_equal(result, expected)
|
109 |
+
|
110 |
+
|
111 |
+
@pytest.fixture
|
112 |
+
def np_dtype_to_arrays(any_real_numpy_dtype):
|
113 |
+
"""
|
114 |
+
Fixture returning actual and expected dtype, pandas and numpy arrays and
|
115 |
+
mask from a given numpy dtype
|
116 |
+
"""
|
117 |
+
np_dtype = np.dtype(any_real_numpy_dtype)
|
118 |
+
pa_type = pa.from_numpy_dtype(np_dtype)
|
119 |
+
|
120 |
+
# None ensures the creation of a bitmask buffer.
|
121 |
+
pa_array = pa.array([0, 1, 2, None], type=pa_type)
|
122 |
+
# Since masked Arrow buffer slots are not required to contain a specific
|
123 |
+
# value, assert only the first three values of the created np.array
|
124 |
+
np_expected = np.array([0, 1, 2], dtype=np_dtype)
|
125 |
+
mask_expected = np.array([True, True, True, False])
|
126 |
+
return np_dtype, pa_array, np_expected, mask_expected
|
127 |
+
|
128 |
+
|
129 |
+
def test_pyarrow_array_to_numpy_and_mask(np_dtype_to_arrays):
|
130 |
+
"""
|
131 |
+
Test conversion from pyarrow array to numpy array.
|
132 |
+
|
133 |
+
Modifies the pyarrow buffer to contain padding and offset, which are
|
134 |
+
considered valid buffers by pyarrow.
|
135 |
+
|
136 |
+
Also tests empty pyarrow arrays with non empty buffers.
|
137 |
+
See https://github.com/pandas-dev/pandas/issues/40896
|
138 |
+
"""
|
139 |
+
np_dtype, pa_array, np_expected, mask_expected = np_dtype_to_arrays
|
140 |
+
data, mask = pyarrow_array_to_numpy_and_mask(pa_array, np_dtype)
|
141 |
+
tm.assert_numpy_array_equal(data[:3], np_expected)
|
142 |
+
tm.assert_numpy_array_equal(mask, mask_expected)
|
143 |
+
|
144 |
+
mask_buffer = pa_array.buffers()[0]
|
145 |
+
data_buffer = pa_array.buffers()[1]
|
146 |
+
data_buffer_bytes = pa_array.buffers()[1].to_pybytes()
|
147 |
+
|
148 |
+
# Add trailing padding to the buffer.
|
149 |
+
data_buffer_trail = pa.py_buffer(data_buffer_bytes + b"\x00")
|
150 |
+
pa_array_trail = pa.Array.from_buffers(
|
151 |
+
type=pa_array.type,
|
152 |
+
length=len(pa_array),
|
153 |
+
buffers=[mask_buffer, data_buffer_trail],
|
154 |
+
offset=pa_array.offset,
|
155 |
+
)
|
156 |
+
pa_array_trail.validate()
|
157 |
+
data, mask = pyarrow_array_to_numpy_and_mask(pa_array_trail, np_dtype)
|
158 |
+
tm.assert_numpy_array_equal(data[:3], np_expected)
|
159 |
+
tm.assert_numpy_array_equal(mask, mask_expected)
|
160 |
+
|
161 |
+
# Add offset to the buffer.
|
162 |
+
offset = b"\x00" * (pa_array.type.bit_width // 8)
|
163 |
+
data_buffer_offset = pa.py_buffer(offset + data_buffer_bytes)
|
164 |
+
mask_buffer_offset = pa.py_buffer(b"\x0E")
|
165 |
+
pa_array_offset = pa.Array.from_buffers(
|
166 |
+
type=pa_array.type,
|
167 |
+
length=len(pa_array),
|
168 |
+
buffers=[mask_buffer_offset, data_buffer_offset],
|
169 |
+
offset=pa_array.offset + 1,
|
170 |
+
)
|
171 |
+
pa_array_offset.validate()
|
172 |
+
data, mask = pyarrow_array_to_numpy_and_mask(pa_array_offset, np_dtype)
|
173 |
+
tm.assert_numpy_array_equal(data[:3], np_expected)
|
174 |
+
tm.assert_numpy_array_equal(mask, mask_expected)
|
175 |
+
|
176 |
+
# Empty array
|
177 |
+
np_expected_empty = np.array([], dtype=np_dtype)
|
178 |
+
mask_expected_empty = np.array([], dtype=np.bool_)
|
179 |
+
|
180 |
+
pa_array_offset = pa.Array.from_buffers(
|
181 |
+
type=pa_array.type,
|
182 |
+
length=0,
|
183 |
+
buffers=[mask_buffer, data_buffer],
|
184 |
+
offset=pa_array.offset,
|
185 |
+
)
|
186 |
+
pa_array_offset.validate()
|
187 |
+
data, mask = pyarrow_array_to_numpy_and_mask(pa_array_offset, np_dtype)
|
188 |
+
tm.assert_numpy_array_equal(data[:3], np_expected_empty)
|
189 |
+
tm.assert_numpy_array_equal(mask, mask_expected_empty)
|
190 |
+
|
191 |
+
|
192 |
+
@pytest.mark.parametrize(
|
193 |
+
"arr", [pa.nulls(10), pa.chunked_array([pa.nulls(4), pa.nulls(6)])]
|
194 |
+
)
|
195 |
+
def test_from_arrow_null(data, arr):
|
196 |
+
res = data.dtype.__from_arrow__(arr)
|
197 |
+
assert res.isna().all()
|
198 |
+
assert len(res) == 10
|
199 |
+
|
200 |
+
|
201 |
+
def test_from_arrow_type_error(data):
|
202 |
+
# ensure that __from_arrow__ returns a TypeError when getting a wrong
|
203 |
+
# array type
|
204 |
+
|
205 |
+
arr = pa.array(data).cast("string")
|
206 |
+
with pytest.raises(TypeError, match=None):
|
207 |
+
# we don't test the exact error message, only the fact that it raises
|
208 |
+
# a TypeError is relevant
|
209 |
+
data.dtype.__from_arrow__(arr)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/masked/test_indexing.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
|
8 |
+
|
9 |
+
class TestSetitemValidation:
|
10 |
+
def _check_setitem_invalid(self, arr, invalid):
|
11 |
+
msg = f"Invalid value '{str(invalid)}' for dtype {arr.dtype}"
|
12 |
+
msg = re.escape(msg)
|
13 |
+
with pytest.raises(TypeError, match=msg):
|
14 |
+
arr[0] = invalid
|
15 |
+
|
16 |
+
with pytest.raises(TypeError, match=msg):
|
17 |
+
arr[:] = invalid
|
18 |
+
|
19 |
+
with pytest.raises(TypeError, match=msg):
|
20 |
+
arr[[0]] = invalid
|
21 |
+
|
22 |
+
# FIXME: don't leave commented-out
|
23 |
+
# with pytest.raises(TypeError):
|
24 |
+
# arr[[0]] = [invalid]
|
25 |
+
|
26 |
+
# with pytest.raises(TypeError):
|
27 |
+
# arr[[0]] = np.array([invalid], dtype=object)
|
28 |
+
|
29 |
+
# Series non-coercion, behavior subject to change
|
30 |
+
ser = pd.Series(arr)
|
31 |
+
with pytest.raises(TypeError, match=msg):
|
32 |
+
ser[0] = invalid
|
33 |
+
# TODO: so, so many other variants of this...
|
34 |
+
|
35 |
+
_invalid_scalars = [
|
36 |
+
1 + 2j,
|
37 |
+
"True",
|
38 |
+
"1",
|
39 |
+
"1.0",
|
40 |
+
pd.NaT,
|
41 |
+
np.datetime64("NaT"),
|
42 |
+
np.timedelta64("NaT"),
|
43 |
+
]
|
44 |
+
|
45 |
+
@pytest.mark.parametrize(
|
46 |
+
"invalid", _invalid_scalars + [1, 1.0, np.int64(1), np.float64(1)]
|
47 |
+
)
|
48 |
+
def test_setitem_validation_scalar_bool(self, invalid):
|
49 |
+
arr = pd.array([True, False, None], dtype="boolean")
|
50 |
+
self._check_setitem_invalid(arr, invalid)
|
51 |
+
|
52 |
+
@pytest.mark.parametrize("invalid", _invalid_scalars + [True, 1.5, np.float64(1.5)])
|
53 |
+
def test_setitem_validation_scalar_int(self, invalid, any_int_ea_dtype):
|
54 |
+
arr = pd.array([1, 2, None], dtype=any_int_ea_dtype)
|
55 |
+
self._check_setitem_invalid(arr, invalid)
|
56 |
+
|
57 |
+
@pytest.mark.parametrize("invalid", _invalid_scalars + [True])
|
58 |
+
def test_setitem_validation_scalar_float(self, invalid, float_ea_dtype):
|
59 |
+
arr = pd.array([1, 2, None], dtype=float_ea_dtype)
|
60 |
+
self._check_setitem_invalid(arr, invalid)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (199 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/test_indexing.cpython-310.pyc
ADDED
Binary file (1.96 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/__pycache__/test_numpy.cpython-310.pyc
ADDED
Binary file (8.24 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/test_indexing.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
|
3 |
+
from pandas.core.dtypes.common import is_scalar
|
4 |
+
|
5 |
+
import pandas as pd
|
6 |
+
import pandas._testing as tm
|
7 |
+
|
8 |
+
|
9 |
+
class TestSearchsorted:
|
10 |
+
def test_searchsorted_string(self, string_dtype):
|
11 |
+
arr = pd.array(["a", "b", "c"], dtype=string_dtype)
|
12 |
+
|
13 |
+
result = arr.searchsorted("a", side="left")
|
14 |
+
assert is_scalar(result)
|
15 |
+
assert result == 0
|
16 |
+
|
17 |
+
result = arr.searchsorted("a", side="right")
|
18 |
+
assert is_scalar(result)
|
19 |
+
assert result == 1
|
20 |
+
|
21 |
+
def test_searchsorted_numeric_dtypes_scalar(self, any_real_numpy_dtype):
|
22 |
+
arr = pd.array([1, 3, 90], dtype=any_real_numpy_dtype)
|
23 |
+
result = arr.searchsorted(30)
|
24 |
+
assert is_scalar(result)
|
25 |
+
assert result == 2
|
26 |
+
|
27 |
+
result = arr.searchsorted([30])
|
28 |
+
expected = np.array([2], dtype=np.intp)
|
29 |
+
tm.assert_numpy_array_equal(result, expected)
|
30 |
+
|
31 |
+
def test_searchsorted_numeric_dtypes_vector(self, any_real_numpy_dtype):
|
32 |
+
arr = pd.array([1, 3, 90], dtype=any_real_numpy_dtype)
|
33 |
+
result = arr.searchsorted([2, 30])
|
34 |
+
expected = np.array([1, 2], dtype=np.intp)
|
35 |
+
tm.assert_numpy_array_equal(result, expected)
|
36 |
+
|
37 |
+
def test_searchsorted_sorter(self, any_real_numpy_dtype):
|
38 |
+
arr = pd.array([3, 1, 2], dtype=any_real_numpy_dtype)
|
39 |
+
result = arr.searchsorted([0, 3], sorter=np.argsort(arr))
|
40 |
+
expected = np.array([0, 2], dtype=np.intp)
|
41 |
+
tm.assert_numpy_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/numpy_/test_numpy.py
ADDED
@@ -0,0 +1,324 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Additional tests for NumpyExtensionArray that aren't covered by
|
3 |
+
the interface tests.
|
4 |
+
"""
|
5 |
+
import numpy as np
|
6 |
+
import pytest
|
7 |
+
|
8 |
+
from pandas.core.dtypes.dtypes import NumpyEADtype
|
9 |
+
|
10 |
+
import pandas as pd
|
11 |
+
import pandas._testing as tm
|
12 |
+
from pandas.arrays import NumpyExtensionArray
|
13 |
+
|
14 |
+
|
15 |
+
@pytest.fixture(
|
16 |
+
params=[
|
17 |
+
np.array(["a", "b"], dtype=object),
|
18 |
+
np.array([0, 1], dtype=float),
|
19 |
+
np.array([0, 1], dtype=int),
|
20 |
+
np.array([0, 1 + 2j], dtype=complex),
|
21 |
+
np.array([True, False], dtype=bool),
|
22 |
+
np.array([0, 1], dtype="datetime64[ns]"),
|
23 |
+
np.array([0, 1], dtype="timedelta64[ns]"),
|
24 |
+
]
|
25 |
+
)
|
26 |
+
def any_numpy_array(request):
|
27 |
+
"""
|
28 |
+
Parametrized fixture for NumPy arrays with different dtypes.
|
29 |
+
|
30 |
+
This excludes string and bytes.
|
31 |
+
"""
|
32 |
+
return request.param
|
33 |
+
|
34 |
+
|
35 |
+
# ----------------------------------------------------------------------------
|
36 |
+
# NumpyEADtype
|
37 |
+
|
38 |
+
|
39 |
+
@pytest.mark.parametrize(
|
40 |
+
"dtype, expected",
|
41 |
+
[
|
42 |
+
("bool", True),
|
43 |
+
("int", True),
|
44 |
+
("uint", True),
|
45 |
+
("float", True),
|
46 |
+
("complex", True),
|
47 |
+
("str", False),
|
48 |
+
("bytes", False),
|
49 |
+
("datetime64[ns]", False),
|
50 |
+
("object", False),
|
51 |
+
("void", False),
|
52 |
+
],
|
53 |
+
)
|
54 |
+
def test_is_numeric(dtype, expected):
|
55 |
+
dtype = NumpyEADtype(dtype)
|
56 |
+
assert dtype._is_numeric is expected
|
57 |
+
|
58 |
+
|
59 |
+
@pytest.mark.parametrize(
|
60 |
+
"dtype, expected",
|
61 |
+
[
|
62 |
+
("bool", True),
|
63 |
+
("int", False),
|
64 |
+
("uint", False),
|
65 |
+
("float", False),
|
66 |
+
("complex", False),
|
67 |
+
("str", False),
|
68 |
+
("bytes", False),
|
69 |
+
("datetime64[ns]", False),
|
70 |
+
("object", False),
|
71 |
+
("void", False),
|
72 |
+
],
|
73 |
+
)
|
74 |
+
def test_is_boolean(dtype, expected):
|
75 |
+
dtype = NumpyEADtype(dtype)
|
76 |
+
assert dtype._is_boolean is expected
|
77 |
+
|
78 |
+
|
79 |
+
def test_repr():
|
80 |
+
dtype = NumpyEADtype(np.dtype("int64"))
|
81 |
+
assert repr(dtype) == "NumpyEADtype('int64')"
|
82 |
+
|
83 |
+
|
84 |
+
def test_constructor_from_string():
|
85 |
+
result = NumpyEADtype.construct_from_string("int64")
|
86 |
+
expected = NumpyEADtype(np.dtype("int64"))
|
87 |
+
assert result == expected
|
88 |
+
|
89 |
+
|
90 |
+
def test_dtype_idempotent(any_numpy_dtype):
|
91 |
+
dtype = NumpyEADtype(any_numpy_dtype)
|
92 |
+
|
93 |
+
result = NumpyEADtype(dtype)
|
94 |
+
assert result == dtype
|
95 |
+
|
96 |
+
|
97 |
+
# ----------------------------------------------------------------------------
|
98 |
+
# Construction
|
99 |
+
|
100 |
+
|
101 |
+
def test_constructor_no_coercion():
|
102 |
+
with pytest.raises(ValueError, match="NumPy array"):
|
103 |
+
NumpyExtensionArray([1, 2, 3])
|
104 |
+
|
105 |
+
|
106 |
+
def test_series_constructor_with_copy():
|
107 |
+
ndarray = np.array([1, 2, 3])
|
108 |
+
ser = pd.Series(NumpyExtensionArray(ndarray), copy=True)
|
109 |
+
|
110 |
+
assert ser.values is not ndarray
|
111 |
+
|
112 |
+
|
113 |
+
def test_series_constructor_with_astype():
|
114 |
+
ndarray = np.array([1, 2, 3])
|
115 |
+
result = pd.Series(NumpyExtensionArray(ndarray), dtype="float64")
|
116 |
+
expected = pd.Series([1.0, 2.0, 3.0], dtype="float64")
|
117 |
+
tm.assert_series_equal(result, expected)
|
118 |
+
|
119 |
+
|
120 |
+
def test_from_sequence_dtype():
|
121 |
+
arr = np.array([1, 2, 3], dtype="int64")
|
122 |
+
result = NumpyExtensionArray._from_sequence(arr, dtype="uint64")
|
123 |
+
expected = NumpyExtensionArray(np.array([1, 2, 3], dtype="uint64"))
|
124 |
+
tm.assert_extension_array_equal(result, expected)
|
125 |
+
|
126 |
+
|
127 |
+
def test_constructor_copy():
|
128 |
+
arr = np.array([0, 1])
|
129 |
+
result = NumpyExtensionArray(arr, copy=True)
|
130 |
+
|
131 |
+
assert not tm.shares_memory(result, arr)
|
132 |
+
|
133 |
+
|
134 |
+
def test_constructor_with_data(any_numpy_array):
|
135 |
+
nparr = any_numpy_array
|
136 |
+
arr = NumpyExtensionArray(nparr)
|
137 |
+
assert arr.dtype.numpy_dtype == nparr.dtype
|
138 |
+
|
139 |
+
|
140 |
+
# ----------------------------------------------------------------------------
|
141 |
+
# Conversion
|
142 |
+
|
143 |
+
|
144 |
+
def test_to_numpy():
|
145 |
+
arr = NumpyExtensionArray(np.array([1, 2, 3]))
|
146 |
+
result = arr.to_numpy()
|
147 |
+
assert result is arr._ndarray
|
148 |
+
|
149 |
+
result = arr.to_numpy(copy=True)
|
150 |
+
assert result is not arr._ndarray
|
151 |
+
|
152 |
+
result = arr.to_numpy(dtype="f8")
|
153 |
+
expected = np.array([1, 2, 3], dtype="f8")
|
154 |
+
tm.assert_numpy_array_equal(result, expected)
|
155 |
+
|
156 |
+
|
157 |
+
# ----------------------------------------------------------------------------
|
158 |
+
# Setitem
|
159 |
+
|
160 |
+
|
161 |
+
def test_setitem_series():
|
162 |
+
ser = pd.Series([1, 2, 3])
|
163 |
+
ser.array[0] = 10
|
164 |
+
expected = pd.Series([10, 2, 3])
|
165 |
+
tm.assert_series_equal(ser, expected)
|
166 |
+
|
167 |
+
|
168 |
+
def test_setitem(any_numpy_array):
|
169 |
+
nparr = any_numpy_array
|
170 |
+
arr = NumpyExtensionArray(nparr, copy=True)
|
171 |
+
|
172 |
+
arr[0] = arr[1]
|
173 |
+
nparr[0] = nparr[1]
|
174 |
+
|
175 |
+
tm.assert_numpy_array_equal(arr.to_numpy(), nparr)
|
176 |
+
|
177 |
+
|
178 |
+
# ----------------------------------------------------------------------------
|
179 |
+
# Reductions
|
180 |
+
|
181 |
+
|
182 |
+
def test_bad_reduce_raises():
|
183 |
+
arr = np.array([1, 2, 3], dtype="int64")
|
184 |
+
arr = NumpyExtensionArray(arr)
|
185 |
+
msg = "cannot perform not_a_method with type int"
|
186 |
+
with pytest.raises(TypeError, match=msg):
|
187 |
+
arr._reduce(msg)
|
188 |
+
|
189 |
+
|
190 |
+
def test_validate_reduction_keyword_args():
|
191 |
+
arr = NumpyExtensionArray(np.array([1, 2, 3]))
|
192 |
+
msg = "the 'keepdims' parameter is not supported .*all"
|
193 |
+
with pytest.raises(ValueError, match=msg):
|
194 |
+
arr.all(keepdims=True)
|
195 |
+
|
196 |
+
|
197 |
+
def test_np_max_nested_tuples():
|
198 |
+
# case where checking in ufunc.nout works while checking for tuples
|
199 |
+
# does not
|
200 |
+
vals = [
|
201 |
+
(("j", "k"), ("l", "m")),
|
202 |
+
(("l", "m"), ("o", "p")),
|
203 |
+
(("o", "p"), ("j", "k")),
|
204 |
+
]
|
205 |
+
ser = pd.Series(vals)
|
206 |
+
arr = ser.array
|
207 |
+
|
208 |
+
assert arr.max() is arr[2]
|
209 |
+
assert ser.max() is arr[2]
|
210 |
+
|
211 |
+
result = np.maximum.reduce(arr)
|
212 |
+
assert result == arr[2]
|
213 |
+
|
214 |
+
result = np.maximum.reduce(ser)
|
215 |
+
assert result == arr[2]
|
216 |
+
|
217 |
+
|
218 |
+
def test_np_reduce_2d():
|
219 |
+
raw = np.arange(12).reshape(4, 3)
|
220 |
+
arr = NumpyExtensionArray(raw)
|
221 |
+
|
222 |
+
res = np.maximum.reduce(arr, axis=0)
|
223 |
+
tm.assert_extension_array_equal(res, arr[-1])
|
224 |
+
|
225 |
+
alt = arr.max(axis=0)
|
226 |
+
tm.assert_extension_array_equal(alt, arr[-1])
|
227 |
+
|
228 |
+
|
229 |
+
# ----------------------------------------------------------------------------
|
230 |
+
# Ops
|
231 |
+
|
232 |
+
|
233 |
+
@pytest.mark.parametrize("ufunc", [np.abs, np.negative, np.positive])
|
234 |
+
def test_ufunc_unary(ufunc):
|
235 |
+
arr = NumpyExtensionArray(np.array([-1.0, 0.0, 1.0]))
|
236 |
+
result = ufunc(arr)
|
237 |
+
expected = NumpyExtensionArray(ufunc(arr._ndarray))
|
238 |
+
tm.assert_extension_array_equal(result, expected)
|
239 |
+
|
240 |
+
# same thing but with the 'out' keyword
|
241 |
+
out = NumpyExtensionArray(np.array([-9.0, -9.0, -9.0]))
|
242 |
+
ufunc(arr, out=out)
|
243 |
+
tm.assert_extension_array_equal(out, expected)
|
244 |
+
|
245 |
+
|
246 |
+
def test_ufunc():
|
247 |
+
arr = NumpyExtensionArray(np.array([-1.0, 0.0, 1.0]))
|
248 |
+
|
249 |
+
r1, r2 = np.divmod(arr, np.add(arr, 2))
|
250 |
+
e1, e2 = np.divmod(arr._ndarray, np.add(arr._ndarray, 2))
|
251 |
+
e1 = NumpyExtensionArray(e1)
|
252 |
+
e2 = NumpyExtensionArray(e2)
|
253 |
+
tm.assert_extension_array_equal(r1, e1)
|
254 |
+
tm.assert_extension_array_equal(r2, e2)
|
255 |
+
|
256 |
+
|
257 |
+
def test_basic_binop():
|
258 |
+
# Just a basic smoke test. The EA interface tests exercise this
|
259 |
+
# more thoroughly.
|
260 |
+
x = NumpyExtensionArray(np.array([1, 2, 3]))
|
261 |
+
result = x + x
|
262 |
+
expected = NumpyExtensionArray(np.array([2, 4, 6]))
|
263 |
+
tm.assert_extension_array_equal(result, expected)
|
264 |
+
|
265 |
+
|
266 |
+
@pytest.mark.parametrize("dtype", [None, object])
|
267 |
+
def test_setitem_object_typecode(dtype):
|
268 |
+
arr = NumpyExtensionArray(np.array(["a", "b", "c"], dtype=dtype))
|
269 |
+
arr[0] = "t"
|
270 |
+
expected = NumpyExtensionArray(np.array(["t", "b", "c"], dtype=dtype))
|
271 |
+
tm.assert_extension_array_equal(arr, expected)
|
272 |
+
|
273 |
+
|
274 |
+
def test_setitem_no_coercion():
|
275 |
+
# https://github.com/pandas-dev/pandas/issues/28150
|
276 |
+
arr = NumpyExtensionArray(np.array([1, 2, 3]))
|
277 |
+
with pytest.raises(ValueError, match="int"):
|
278 |
+
arr[0] = "a"
|
279 |
+
|
280 |
+
# With a value that we do coerce, check that we coerce the value
|
281 |
+
# and not the underlying array.
|
282 |
+
arr[0] = 2.5
|
283 |
+
assert isinstance(arr[0], (int, np.integer)), type(arr[0])
|
284 |
+
|
285 |
+
|
286 |
+
def test_setitem_preserves_views():
|
287 |
+
# GH#28150, see also extension test of the same name
|
288 |
+
arr = NumpyExtensionArray(np.array([1, 2, 3]))
|
289 |
+
view1 = arr.view()
|
290 |
+
view2 = arr[:]
|
291 |
+
view3 = np.asarray(arr)
|
292 |
+
|
293 |
+
arr[0] = 9
|
294 |
+
assert view1[0] == 9
|
295 |
+
assert view2[0] == 9
|
296 |
+
assert view3[0] == 9
|
297 |
+
|
298 |
+
arr[-1] = 2.5
|
299 |
+
view1[-1] = 5
|
300 |
+
assert arr[-1] == 5
|
301 |
+
|
302 |
+
|
303 |
+
@pytest.mark.parametrize("dtype", [np.int64, np.uint64])
|
304 |
+
def test_quantile_empty(dtype):
|
305 |
+
# we should get back np.nans, not -1s
|
306 |
+
arr = NumpyExtensionArray(np.array([], dtype=dtype))
|
307 |
+
idx = pd.Index([0.0, 0.5])
|
308 |
+
|
309 |
+
result = arr._quantile(idx, interpolation="linear")
|
310 |
+
expected = NumpyExtensionArray(np.array([np.nan, np.nan]))
|
311 |
+
tm.assert_extension_array_equal(result, expected)
|
312 |
+
|
313 |
+
|
314 |
+
def test_factorize_unsigned():
|
315 |
+
# don't raise when calling factorize on unsigned int NumpyExtensionArray
|
316 |
+
arr = np.array([1, 2, 3], dtype=np.uint64)
|
317 |
+
obj = NumpyExtensionArray(arr)
|
318 |
+
|
319 |
+
res_codes, res_unique = obj.factorize()
|
320 |
+
exp_codes, exp_unique = pd.factorize(arr)
|
321 |
+
|
322 |
+
tm.assert_numpy_array_equal(res_codes, exp_codes)
|
323 |
+
|
324 |
+
tm.assert_extension_array_equal(res_unique, NumpyExtensionArray(exp_unique))
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (199 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_arrow_compat.cpython-310.pyc
ADDED
Binary file (3.69 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_astype.cpython-310.pyc
ADDED
Binary file (2.35 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_constructors.cpython-310.pyc
ADDED
Binary file (5.38 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/__pycache__/test_reductions.cpython-310.pyc
ADDED
Binary file (1.22 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_arrow_compat.py
ADDED
@@ -0,0 +1,130 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from pandas.compat.pyarrow import pa_version_under10p1
|
4 |
+
|
5 |
+
from pandas.core.dtypes.dtypes import PeriodDtype
|
6 |
+
|
7 |
+
import pandas as pd
|
8 |
+
import pandas._testing as tm
|
9 |
+
from pandas.core.arrays import (
|
10 |
+
PeriodArray,
|
11 |
+
period_array,
|
12 |
+
)
|
13 |
+
|
14 |
+
pytestmark = pytest.mark.filterwarnings(
|
15 |
+
"ignore:Passing a BlockManager to DataFrame:DeprecationWarning"
|
16 |
+
)
|
17 |
+
|
18 |
+
|
19 |
+
pa = pytest.importorskip("pyarrow")
|
20 |
+
|
21 |
+
|
22 |
+
def test_arrow_extension_type():
|
23 |
+
from pandas.core.arrays.arrow.extension_types import ArrowPeriodType
|
24 |
+
|
25 |
+
p1 = ArrowPeriodType("D")
|
26 |
+
p2 = ArrowPeriodType("D")
|
27 |
+
p3 = ArrowPeriodType("M")
|
28 |
+
|
29 |
+
assert p1.freq == "D"
|
30 |
+
assert p1 == p2
|
31 |
+
assert p1 != p3
|
32 |
+
assert hash(p1) == hash(p2)
|
33 |
+
assert hash(p1) != hash(p3)
|
34 |
+
|
35 |
+
|
36 |
+
@pytest.mark.xfail(not pa_version_under10p1, reason="Wrong behavior with pyarrow 10")
|
37 |
+
@pytest.mark.parametrize(
|
38 |
+
"data, freq",
|
39 |
+
[
|
40 |
+
(pd.date_range("2017", periods=3), "D"),
|
41 |
+
(pd.date_range("2017", periods=3, freq="YE"), "Y-DEC"),
|
42 |
+
],
|
43 |
+
)
|
44 |
+
def test_arrow_array(data, freq):
|
45 |
+
from pandas.core.arrays.arrow.extension_types import ArrowPeriodType
|
46 |
+
|
47 |
+
periods = period_array(data, freq=freq)
|
48 |
+
result = pa.array(periods)
|
49 |
+
assert isinstance(result.type, ArrowPeriodType)
|
50 |
+
assert result.type.freq == freq
|
51 |
+
expected = pa.array(periods.asi8, type="int64")
|
52 |
+
assert result.storage.equals(expected)
|
53 |
+
|
54 |
+
# convert to its storage type
|
55 |
+
result = pa.array(periods, type=pa.int64())
|
56 |
+
assert result.equals(expected)
|
57 |
+
|
58 |
+
# unsupported conversions
|
59 |
+
msg = "Not supported to convert PeriodArray to 'double' type"
|
60 |
+
with pytest.raises(TypeError, match=msg):
|
61 |
+
pa.array(periods, type="float64")
|
62 |
+
|
63 |
+
with pytest.raises(TypeError, match="different 'freq'"):
|
64 |
+
pa.array(periods, type=ArrowPeriodType("T"))
|
65 |
+
|
66 |
+
|
67 |
+
def test_arrow_array_missing():
|
68 |
+
from pandas.core.arrays.arrow.extension_types import ArrowPeriodType
|
69 |
+
|
70 |
+
arr = PeriodArray([1, 2, 3], dtype="period[D]")
|
71 |
+
arr[1] = pd.NaT
|
72 |
+
|
73 |
+
result = pa.array(arr)
|
74 |
+
assert isinstance(result.type, ArrowPeriodType)
|
75 |
+
assert result.type.freq == "D"
|
76 |
+
expected = pa.array([1, None, 3], type="int64")
|
77 |
+
assert result.storage.equals(expected)
|
78 |
+
|
79 |
+
|
80 |
+
def test_arrow_table_roundtrip():
|
81 |
+
from pandas.core.arrays.arrow.extension_types import ArrowPeriodType
|
82 |
+
|
83 |
+
arr = PeriodArray([1, 2, 3], dtype="period[D]")
|
84 |
+
arr[1] = pd.NaT
|
85 |
+
df = pd.DataFrame({"a": arr})
|
86 |
+
|
87 |
+
table = pa.table(df)
|
88 |
+
assert isinstance(table.field("a").type, ArrowPeriodType)
|
89 |
+
result = table.to_pandas()
|
90 |
+
assert isinstance(result["a"].dtype, PeriodDtype)
|
91 |
+
tm.assert_frame_equal(result, df)
|
92 |
+
|
93 |
+
table2 = pa.concat_tables([table, table])
|
94 |
+
result = table2.to_pandas()
|
95 |
+
expected = pd.concat([df, df], ignore_index=True)
|
96 |
+
tm.assert_frame_equal(result, expected)
|
97 |
+
|
98 |
+
|
99 |
+
def test_arrow_load_from_zero_chunks():
|
100 |
+
# GH-41040
|
101 |
+
|
102 |
+
from pandas.core.arrays.arrow.extension_types import ArrowPeriodType
|
103 |
+
|
104 |
+
arr = PeriodArray([], dtype="period[D]")
|
105 |
+
df = pd.DataFrame({"a": arr})
|
106 |
+
|
107 |
+
table = pa.table(df)
|
108 |
+
assert isinstance(table.field("a").type, ArrowPeriodType)
|
109 |
+
table = pa.table(
|
110 |
+
[pa.chunked_array([], type=table.column(0).type)], schema=table.schema
|
111 |
+
)
|
112 |
+
|
113 |
+
result = table.to_pandas()
|
114 |
+
assert isinstance(result["a"].dtype, PeriodDtype)
|
115 |
+
tm.assert_frame_equal(result, df)
|
116 |
+
|
117 |
+
|
118 |
+
def test_arrow_table_roundtrip_without_metadata():
|
119 |
+
arr = PeriodArray([1, 2, 3], dtype="period[h]")
|
120 |
+
arr[1] = pd.NaT
|
121 |
+
df = pd.DataFrame({"a": arr})
|
122 |
+
|
123 |
+
table = pa.table(df)
|
124 |
+
# remove the metadata
|
125 |
+
table = table.replace_schema_metadata()
|
126 |
+
assert table.schema.metadata is None
|
127 |
+
|
128 |
+
result = table.to_pandas()
|
129 |
+
assert isinstance(result["a"].dtype, PeriodDtype)
|
130 |
+
tm.assert_frame_equal(result, df)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_astype.py
ADDED
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas.core.dtypes.dtypes import PeriodDtype
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
import pandas._testing as tm
|
8 |
+
from pandas.core.arrays import period_array
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.mark.parametrize("dtype", [int, np.int32, np.int64, "uint32", "uint64"])
|
12 |
+
def test_astype_int(dtype):
|
13 |
+
# We choose to ignore the sign and size of integers for
|
14 |
+
# Period/Datetime/Timedelta astype
|
15 |
+
arr = period_array(["2000", "2001", None], freq="D")
|
16 |
+
|
17 |
+
if np.dtype(dtype) != np.int64:
|
18 |
+
with pytest.raises(TypeError, match=r"Do obj.astype\('int64'\)"):
|
19 |
+
arr.astype(dtype)
|
20 |
+
return
|
21 |
+
|
22 |
+
result = arr.astype(dtype)
|
23 |
+
expected = arr._ndarray.view("i8")
|
24 |
+
tm.assert_numpy_array_equal(result, expected)
|
25 |
+
|
26 |
+
|
27 |
+
def test_astype_copies():
|
28 |
+
arr = period_array(["2000", "2001", None], freq="D")
|
29 |
+
result = arr.astype(np.int64, copy=False)
|
30 |
+
|
31 |
+
# Add the `.base`, since we now use `.asi8` which returns a view.
|
32 |
+
# We could maybe override it in PeriodArray to return ._ndarray directly.
|
33 |
+
assert result.base is arr._ndarray
|
34 |
+
|
35 |
+
result = arr.astype(np.int64, copy=True)
|
36 |
+
assert result is not arr._ndarray
|
37 |
+
tm.assert_numpy_array_equal(result, arr._ndarray.view("i8"))
|
38 |
+
|
39 |
+
|
40 |
+
def test_astype_categorical():
|
41 |
+
arr = period_array(["2000", "2001", "2001", None], freq="D")
|
42 |
+
result = arr.astype("category")
|
43 |
+
categories = pd.PeriodIndex(["2000", "2001"], freq="D")
|
44 |
+
expected = pd.Categorical.from_codes([0, 1, 1, -1], categories=categories)
|
45 |
+
tm.assert_categorical_equal(result, expected)
|
46 |
+
|
47 |
+
|
48 |
+
def test_astype_period():
|
49 |
+
arr = period_array(["2000", "2001", None], freq="D")
|
50 |
+
result = arr.astype(PeriodDtype("M"))
|
51 |
+
expected = period_array(["2000", "2001", None], freq="M")
|
52 |
+
tm.assert_period_array_equal(result, expected)
|
53 |
+
|
54 |
+
|
55 |
+
@pytest.mark.parametrize("dtype", ["datetime64[ns]", "timedelta64[ns]"])
|
56 |
+
def test_astype_datetime(dtype):
|
57 |
+
arr = period_array(["2000", "2001", None], freq="D")
|
58 |
+
# slice off the [ns] so that the regex matches.
|
59 |
+
if dtype == "timedelta64[ns]":
|
60 |
+
with pytest.raises(TypeError, match=dtype[:-4]):
|
61 |
+
arr.astype(dtype)
|
62 |
+
|
63 |
+
else:
|
64 |
+
# GH#45038 allow period->dt64 because we allow dt64->period
|
65 |
+
result = arr.astype(dtype)
|
66 |
+
expected = pd.DatetimeIndex(["2000", "2001", pd.NaT], dtype=dtype)._data
|
67 |
+
tm.assert_datetime_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_constructors.py
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas._libs.tslibs import iNaT
|
5 |
+
from pandas._libs.tslibs.offsets import MonthEnd
|
6 |
+
from pandas._libs.tslibs.period import IncompatibleFrequency
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
import pandas._testing as tm
|
10 |
+
from pandas.core.arrays import (
|
11 |
+
PeriodArray,
|
12 |
+
period_array,
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
@pytest.mark.parametrize(
|
17 |
+
"data, freq, expected",
|
18 |
+
[
|
19 |
+
([pd.Period("2017", "D")], None, [17167]),
|
20 |
+
([pd.Period("2017", "D")], "D", [17167]),
|
21 |
+
([2017], "D", [17167]),
|
22 |
+
(["2017"], "D", [17167]),
|
23 |
+
([pd.Period("2017", "D")], pd.tseries.offsets.Day(), [17167]),
|
24 |
+
([pd.Period("2017", "D"), None], None, [17167, iNaT]),
|
25 |
+
(pd.Series(pd.date_range("2017", periods=3)), None, [17167, 17168, 17169]),
|
26 |
+
(pd.date_range("2017", periods=3), None, [17167, 17168, 17169]),
|
27 |
+
(pd.period_range("2017", periods=4, freq="Q"), None, [188, 189, 190, 191]),
|
28 |
+
],
|
29 |
+
)
|
30 |
+
def test_period_array_ok(data, freq, expected):
|
31 |
+
result = period_array(data, freq=freq).asi8
|
32 |
+
expected = np.asarray(expected, dtype=np.int64)
|
33 |
+
tm.assert_numpy_array_equal(result, expected)
|
34 |
+
|
35 |
+
|
36 |
+
def test_period_array_readonly_object():
|
37 |
+
# https://github.com/pandas-dev/pandas/issues/25403
|
38 |
+
pa = period_array([pd.Period("2019-01-01")])
|
39 |
+
arr = np.asarray(pa, dtype="object")
|
40 |
+
arr.setflags(write=False)
|
41 |
+
|
42 |
+
result = period_array(arr)
|
43 |
+
tm.assert_period_array_equal(result, pa)
|
44 |
+
|
45 |
+
result = pd.Series(arr)
|
46 |
+
tm.assert_series_equal(result, pd.Series(pa))
|
47 |
+
|
48 |
+
result = pd.DataFrame({"A": arr})
|
49 |
+
tm.assert_frame_equal(result, pd.DataFrame({"A": pa}))
|
50 |
+
|
51 |
+
|
52 |
+
def test_from_datetime64_freq_changes():
|
53 |
+
# https://github.com/pandas-dev/pandas/issues/23438
|
54 |
+
arr = pd.date_range("2017", periods=3, freq="D")
|
55 |
+
result = PeriodArray._from_datetime64(arr, freq="M")
|
56 |
+
expected = period_array(["2017-01-01", "2017-01-01", "2017-01-01"], freq="M")
|
57 |
+
tm.assert_period_array_equal(result, expected)
|
58 |
+
|
59 |
+
|
60 |
+
@pytest.mark.parametrize("freq", ["2M", MonthEnd(2)])
|
61 |
+
def test_from_datetime64_freq_2M(freq):
|
62 |
+
arr = np.array(
|
63 |
+
["2020-01-01T00:00:00", "2020-01-02T00:00:00"], dtype="datetime64[ns]"
|
64 |
+
)
|
65 |
+
result = PeriodArray._from_datetime64(arr, freq)
|
66 |
+
expected = period_array(["2020-01", "2020-01"], freq=freq)
|
67 |
+
tm.assert_period_array_equal(result, expected)
|
68 |
+
|
69 |
+
|
70 |
+
@pytest.mark.parametrize(
|
71 |
+
"data, freq, msg",
|
72 |
+
[
|
73 |
+
(
|
74 |
+
[pd.Period("2017", "D"), pd.Period("2017", "Y")],
|
75 |
+
None,
|
76 |
+
"Input has different freq",
|
77 |
+
),
|
78 |
+
([pd.Period("2017", "D")], "Y", "Input has different freq"),
|
79 |
+
],
|
80 |
+
)
|
81 |
+
def test_period_array_raises(data, freq, msg):
|
82 |
+
with pytest.raises(IncompatibleFrequency, match=msg):
|
83 |
+
period_array(data, freq)
|
84 |
+
|
85 |
+
|
86 |
+
def test_period_array_non_period_series_raies():
|
87 |
+
ser = pd.Series([1, 2, 3])
|
88 |
+
with pytest.raises(TypeError, match="dtype"):
|
89 |
+
PeriodArray(ser, dtype="period[D]")
|
90 |
+
|
91 |
+
|
92 |
+
def test_period_array_freq_mismatch():
|
93 |
+
arr = period_array(["2000", "2001"], freq="D")
|
94 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
95 |
+
PeriodArray(arr, dtype="period[M]")
|
96 |
+
|
97 |
+
dtype = pd.PeriodDtype(pd.tseries.offsets.MonthEnd())
|
98 |
+
with pytest.raises(IncompatibleFrequency, match="freq"):
|
99 |
+
PeriodArray(arr, dtype=dtype)
|
100 |
+
|
101 |
+
|
102 |
+
def test_from_sequence_disallows_i8():
|
103 |
+
arr = period_array(["2000", "2001"], freq="D")
|
104 |
+
|
105 |
+
msg = str(arr[0].ordinal)
|
106 |
+
with pytest.raises(TypeError, match=msg):
|
107 |
+
PeriodArray._from_sequence(arr.asi8, dtype=arr.dtype)
|
108 |
+
|
109 |
+
with pytest.raises(TypeError, match=msg):
|
110 |
+
PeriodArray._from_sequence(list(arr.asi8), dtype=arr.dtype)
|
111 |
+
|
112 |
+
|
113 |
+
def test_from_td64nat_sequence_raises():
|
114 |
+
# GH#44507
|
115 |
+
td = pd.NaT.to_numpy("m8[ns]")
|
116 |
+
|
117 |
+
dtype = pd.period_range("2005-01-01", periods=3, freq="D").dtype
|
118 |
+
|
119 |
+
arr = np.array([None], dtype=object)
|
120 |
+
arr[0] = td
|
121 |
+
|
122 |
+
msg = "Value must be Period, string, integer, or datetime"
|
123 |
+
with pytest.raises(ValueError, match=msg):
|
124 |
+
PeriodArray._from_sequence(arr, dtype=dtype)
|
125 |
+
|
126 |
+
with pytest.raises(ValueError, match=msg):
|
127 |
+
pd.PeriodIndex(arr, dtype=dtype)
|
128 |
+
with pytest.raises(ValueError, match=msg):
|
129 |
+
pd.Index(arr, dtype=dtype)
|
130 |
+
with pytest.raises(ValueError, match=msg):
|
131 |
+
pd.array(arr, dtype=dtype)
|
132 |
+
with pytest.raises(ValueError, match=msg):
|
133 |
+
pd.Series(arr, dtype=dtype)
|
134 |
+
with pytest.raises(ValueError, match=msg):
|
135 |
+
pd.DataFrame(arr, dtype=dtype)
|
136 |
+
|
137 |
+
|
138 |
+
def test_freq_deprecated():
|
139 |
+
# GH#52462
|
140 |
+
data = np.arange(5).astype(np.int64)
|
141 |
+
msg = "The 'freq' keyword in the PeriodArray constructor is deprecated"
|
142 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
143 |
+
res = PeriodArray(data, freq="M")
|
144 |
+
|
145 |
+
expected = PeriodArray(data, dtype="period[M]")
|
146 |
+
tm.assert_equal(res, expected)
|
147 |
+
|
148 |
+
|
149 |
+
def test_period_array_from_datetime64():
|
150 |
+
arr = np.array(
|
151 |
+
["2020-01-01T00:00:00", "2020-02-02T00:00:00"], dtype="datetime64[ns]"
|
152 |
+
)
|
153 |
+
result = PeriodArray._from_datetime64(arr, freq=MonthEnd(2))
|
154 |
+
|
155 |
+
expected = period_array(["2020-01-01", "2020-02-01"], freq=MonthEnd(2))
|
156 |
+
tm.assert_period_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/period/test_reductions.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
from pandas.core.arrays import period_array
|
5 |
+
|
6 |
+
|
7 |
+
class TestReductions:
|
8 |
+
def test_min_max(self):
|
9 |
+
arr = period_array(
|
10 |
+
[
|
11 |
+
"2000-01-03",
|
12 |
+
"2000-01-03",
|
13 |
+
"NaT",
|
14 |
+
"2000-01-02",
|
15 |
+
"2000-01-05",
|
16 |
+
"2000-01-04",
|
17 |
+
],
|
18 |
+
freq="D",
|
19 |
+
)
|
20 |
+
|
21 |
+
result = arr.min()
|
22 |
+
expected = pd.Period("2000-01-02", freq="D")
|
23 |
+
assert result == expected
|
24 |
+
|
25 |
+
result = arr.max()
|
26 |
+
expected = pd.Period("2000-01-05", freq="D")
|
27 |
+
assert result == expected
|
28 |
+
|
29 |
+
result = arr.min(skipna=False)
|
30 |
+
assert result is pd.NaT
|
31 |
+
|
32 |
+
result = arr.max(skipna=False)
|
33 |
+
assert result is pd.NaT
|
34 |
+
|
35 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
36 |
+
def test_min_max_empty(self, skipna):
|
37 |
+
arr = period_array([], freq="D")
|
38 |
+
result = arr.min(skipna=skipna)
|
39 |
+
assert result is pd.NaT
|
40 |
+
|
41 |
+
result = arr.max(skipna=skipna)
|
42 |
+
assert result is pd.NaT
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (200 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/test_string.cpython-310.pyc
ADDED
Binary file (19.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/__pycache__/test_string_arrow.cpython-310.pyc
ADDED
Binary file (8.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/test_string.py
ADDED
@@ -0,0 +1,703 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This module tests the functionality of StringArray and ArrowStringArray.
|
3 |
+
Tests for the str accessors are in pandas/tests/strings/test_string_array.py
|
4 |
+
"""
|
5 |
+
import operator
|
6 |
+
|
7 |
+
import numpy as np
|
8 |
+
import pytest
|
9 |
+
|
10 |
+
from pandas.compat.pyarrow import pa_version_under12p0
|
11 |
+
|
12 |
+
from pandas.core.dtypes.common import is_dtype_equal
|
13 |
+
|
14 |
+
import pandas as pd
|
15 |
+
import pandas._testing as tm
|
16 |
+
from pandas.core.arrays.string_arrow import (
|
17 |
+
ArrowStringArray,
|
18 |
+
ArrowStringArrayNumpySemantics,
|
19 |
+
)
|
20 |
+
|
21 |
+
|
22 |
+
def na_val(dtype):
|
23 |
+
if dtype.storage == "pyarrow_numpy":
|
24 |
+
return np.nan
|
25 |
+
else:
|
26 |
+
return pd.NA
|
27 |
+
|
28 |
+
|
29 |
+
@pytest.fixture
|
30 |
+
def dtype(string_storage):
|
31 |
+
"""Fixture giving StringDtype from parametrized 'string_storage'"""
|
32 |
+
return pd.StringDtype(storage=string_storage)
|
33 |
+
|
34 |
+
|
35 |
+
@pytest.fixture
|
36 |
+
def cls(dtype):
|
37 |
+
"""Fixture giving array type from parametrized 'dtype'"""
|
38 |
+
return dtype.construct_array_type()
|
39 |
+
|
40 |
+
|
41 |
+
def test_repr(dtype):
|
42 |
+
df = pd.DataFrame({"A": pd.array(["a", pd.NA, "b"], dtype=dtype)})
|
43 |
+
if dtype.storage == "pyarrow_numpy":
|
44 |
+
expected = " A\n0 a\n1 NaN\n2 b"
|
45 |
+
else:
|
46 |
+
expected = " A\n0 a\n1 <NA>\n2 b"
|
47 |
+
assert repr(df) == expected
|
48 |
+
|
49 |
+
if dtype.storage == "pyarrow_numpy":
|
50 |
+
expected = "0 a\n1 NaN\n2 b\nName: A, dtype: string"
|
51 |
+
else:
|
52 |
+
expected = "0 a\n1 <NA>\n2 b\nName: A, dtype: string"
|
53 |
+
assert repr(df.A) == expected
|
54 |
+
|
55 |
+
if dtype.storage == "pyarrow":
|
56 |
+
arr_name = "ArrowStringArray"
|
57 |
+
expected = f"<{arr_name}>\n['a', <NA>, 'b']\nLength: 3, dtype: string"
|
58 |
+
elif dtype.storage == "pyarrow_numpy":
|
59 |
+
arr_name = "ArrowStringArrayNumpySemantics"
|
60 |
+
expected = f"<{arr_name}>\n['a', nan, 'b']\nLength: 3, dtype: string"
|
61 |
+
else:
|
62 |
+
arr_name = "StringArray"
|
63 |
+
expected = f"<{arr_name}>\n['a', <NA>, 'b']\nLength: 3, dtype: string"
|
64 |
+
assert repr(df.A.array) == expected
|
65 |
+
|
66 |
+
|
67 |
+
def test_none_to_nan(cls, dtype):
|
68 |
+
a = cls._from_sequence(["a", None, "b"], dtype=dtype)
|
69 |
+
assert a[1] is not None
|
70 |
+
assert a[1] is na_val(a.dtype)
|
71 |
+
|
72 |
+
|
73 |
+
def test_setitem_validates(cls, dtype):
|
74 |
+
arr = cls._from_sequence(["a", "b"], dtype=dtype)
|
75 |
+
|
76 |
+
if cls is pd.arrays.StringArray:
|
77 |
+
msg = "Cannot set non-string value '10' into a StringArray."
|
78 |
+
else:
|
79 |
+
msg = "Scalar must be NA or str"
|
80 |
+
with pytest.raises(TypeError, match=msg):
|
81 |
+
arr[0] = 10
|
82 |
+
|
83 |
+
if cls is pd.arrays.StringArray:
|
84 |
+
msg = "Must provide strings."
|
85 |
+
else:
|
86 |
+
msg = "Scalar must be NA or str"
|
87 |
+
with pytest.raises(TypeError, match=msg):
|
88 |
+
arr[:] = np.array([1, 2])
|
89 |
+
|
90 |
+
|
91 |
+
def test_setitem_with_scalar_string(dtype):
|
92 |
+
# is_float_dtype considers some strings, like 'd', to be floats
|
93 |
+
# which can cause issues.
|
94 |
+
arr = pd.array(["a", "c"], dtype=dtype)
|
95 |
+
arr[0] = "d"
|
96 |
+
expected = pd.array(["d", "c"], dtype=dtype)
|
97 |
+
tm.assert_extension_array_equal(arr, expected)
|
98 |
+
|
99 |
+
|
100 |
+
def test_setitem_with_array_with_missing(dtype):
|
101 |
+
# ensure that when setting with an array of values, we don't mutate the
|
102 |
+
# array `value` in __setitem__(self, key, value)
|
103 |
+
arr = pd.array(["a", "b", "c"], dtype=dtype)
|
104 |
+
value = np.array(["A", None])
|
105 |
+
value_orig = value.copy()
|
106 |
+
arr[[0, 1]] = value
|
107 |
+
|
108 |
+
expected = pd.array(["A", pd.NA, "c"], dtype=dtype)
|
109 |
+
tm.assert_extension_array_equal(arr, expected)
|
110 |
+
tm.assert_numpy_array_equal(value, value_orig)
|
111 |
+
|
112 |
+
|
113 |
+
def test_astype_roundtrip(dtype):
|
114 |
+
ser = pd.Series(pd.date_range("2000", periods=12))
|
115 |
+
ser[0] = None
|
116 |
+
|
117 |
+
casted = ser.astype(dtype)
|
118 |
+
assert is_dtype_equal(casted.dtype, dtype)
|
119 |
+
|
120 |
+
result = casted.astype("datetime64[ns]")
|
121 |
+
tm.assert_series_equal(result, ser)
|
122 |
+
|
123 |
+
# GH#38509 same thing for timedelta64
|
124 |
+
ser2 = ser - ser.iloc[-1]
|
125 |
+
casted2 = ser2.astype(dtype)
|
126 |
+
assert is_dtype_equal(casted2.dtype, dtype)
|
127 |
+
|
128 |
+
result2 = casted2.astype(ser2.dtype)
|
129 |
+
tm.assert_series_equal(result2, ser2)
|
130 |
+
|
131 |
+
|
132 |
+
def test_add(dtype):
|
133 |
+
a = pd.Series(["a", "b", "c", None, None], dtype=dtype)
|
134 |
+
b = pd.Series(["x", "y", None, "z", None], dtype=dtype)
|
135 |
+
|
136 |
+
result = a + b
|
137 |
+
expected = pd.Series(["ax", "by", None, None, None], dtype=dtype)
|
138 |
+
tm.assert_series_equal(result, expected)
|
139 |
+
|
140 |
+
result = a.add(b)
|
141 |
+
tm.assert_series_equal(result, expected)
|
142 |
+
|
143 |
+
result = a.radd(b)
|
144 |
+
expected = pd.Series(["xa", "yb", None, None, None], dtype=dtype)
|
145 |
+
tm.assert_series_equal(result, expected)
|
146 |
+
|
147 |
+
result = a.add(b, fill_value="-")
|
148 |
+
expected = pd.Series(["ax", "by", "c-", "-z", None], dtype=dtype)
|
149 |
+
tm.assert_series_equal(result, expected)
|
150 |
+
|
151 |
+
|
152 |
+
def test_add_2d(dtype, request, arrow_string_storage):
|
153 |
+
if dtype.storage in arrow_string_storage:
|
154 |
+
reason = "Failed: DID NOT RAISE <class 'ValueError'>"
|
155 |
+
mark = pytest.mark.xfail(raises=None, reason=reason)
|
156 |
+
request.applymarker(mark)
|
157 |
+
|
158 |
+
a = pd.array(["a", "b", "c"], dtype=dtype)
|
159 |
+
b = np.array([["a", "b", "c"]], dtype=object)
|
160 |
+
with pytest.raises(ValueError, match="3 != 1"):
|
161 |
+
a + b
|
162 |
+
|
163 |
+
s = pd.Series(a)
|
164 |
+
with pytest.raises(ValueError, match="3 != 1"):
|
165 |
+
s + b
|
166 |
+
|
167 |
+
|
168 |
+
def test_add_sequence(dtype):
|
169 |
+
a = pd.array(["a", "b", None, None], dtype=dtype)
|
170 |
+
other = ["x", None, "y", None]
|
171 |
+
|
172 |
+
result = a + other
|
173 |
+
expected = pd.array(["ax", None, None, None], dtype=dtype)
|
174 |
+
tm.assert_extension_array_equal(result, expected)
|
175 |
+
|
176 |
+
result = other + a
|
177 |
+
expected = pd.array(["xa", None, None, None], dtype=dtype)
|
178 |
+
tm.assert_extension_array_equal(result, expected)
|
179 |
+
|
180 |
+
|
181 |
+
def test_mul(dtype):
|
182 |
+
a = pd.array(["a", "b", None], dtype=dtype)
|
183 |
+
result = a * 2
|
184 |
+
expected = pd.array(["aa", "bb", None], dtype=dtype)
|
185 |
+
tm.assert_extension_array_equal(result, expected)
|
186 |
+
|
187 |
+
result = 2 * a
|
188 |
+
tm.assert_extension_array_equal(result, expected)
|
189 |
+
|
190 |
+
|
191 |
+
@pytest.mark.xfail(reason="GH-28527")
|
192 |
+
def test_add_strings(dtype):
|
193 |
+
arr = pd.array(["a", "b", "c", "d"], dtype=dtype)
|
194 |
+
df = pd.DataFrame([["t", "y", "v", "w"]], dtype=object)
|
195 |
+
assert arr.__add__(df) is NotImplemented
|
196 |
+
|
197 |
+
result = arr + df
|
198 |
+
expected = pd.DataFrame([["at", "by", "cv", "dw"]]).astype(dtype)
|
199 |
+
tm.assert_frame_equal(result, expected)
|
200 |
+
|
201 |
+
result = df + arr
|
202 |
+
expected = pd.DataFrame([["ta", "yb", "vc", "wd"]]).astype(dtype)
|
203 |
+
tm.assert_frame_equal(result, expected)
|
204 |
+
|
205 |
+
|
206 |
+
@pytest.mark.xfail(reason="GH-28527")
|
207 |
+
def test_add_frame(dtype):
|
208 |
+
arr = pd.array(["a", "b", np.nan, np.nan], dtype=dtype)
|
209 |
+
df = pd.DataFrame([["x", np.nan, "y", np.nan]])
|
210 |
+
|
211 |
+
assert arr.__add__(df) is NotImplemented
|
212 |
+
|
213 |
+
result = arr + df
|
214 |
+
expected = pd.DataFrame([["ax", np.nan, np.nan, np.nan]]).astype(dtype)
|
215 |
+
tm.assert_frame_equal(result, expected)
|
216 |
+
|
217 |
+
result = df + arr
|
218 |
+
expected = pd.DataFrame([["xa", np.nan, np.nan, np.nan]]).astype(dtype)
|
219 |
+
tm.assert_frame_equal(result, expected)
|
220 |
+
|
221 |
+
|
222 |
+
def test_comparison_methods_scalar(comparison_op, dtype):
|
223 |
+
op_name = f"__{comparison_op.__name__}__"
|
224 |
+
a = pd.array(["a", None, "c"], dtype=dtype)
|
225 |
+
other = "a"
|
226 |
+
result = getattr(a, op_name)(other)
|
227 |
+
if dtype.storage == "pyarrow_numpy":
|
228 |
+
expected = np.array([getattr(item, op_name)(other) for item in a])
|
229 |
+
if comparison_op == operator.ne:
|
230 |
+
expected[1] = True
|
231 |
+
else:
|
232 |
+
expected[1] = False
|
233 |
+
tm.assert_numpy_array_equal(result, expected.astype(np.bool_))
|
234 |
+
else:
|
235 |
+
expected_dtype = "boolean[pyarrow]" if dtype.storage == "pyarrow" else "boolean"
|
236 |
+
expected = np.array([getattr(item, op_name)(other) for item in a], dtype=object)
|
237 |
+
expected = pd.array(expected, dtype=expected_dtype)
|
238 |
+
tm.assert_extension_array_equal(result, expected)
|
239 |
+
|
240 |
+
|
241 |
+
def test_comparison_methods_scalar_pd_na(comparison_op, dtype):
|
242 |
+
op_name = f"__{comparison_op.__name__}__"
|
243 |
+
a = pd.array(["a", None, "c"], dtype=dtype)
|
244 |
+
result = getattr(a, op_name)(pd.NA)
|
245 |
+
|
246 |
+
if dtype.storage == "pyarrow_numpy":
|
247 |
+
if operator.ne == comparison_op:
|
248 |
+
expected = np.array([True, True, True])
|
249 |
+
else:
|
250 |
+
expected = np.array([False, False, False])
|
251 |
+
tm.assert_numpy_array_equal(result, expected)
|
252 |
+
else:
|
253 |
+
expected_dtype = "boolean[pyarrow]" if dtype.storage == "pyarrow" else "boolean"
|
254 |
+
expected = pd.array([None, None, None], dtype=expected_dtype)
|
255 |
+
tm.assert_extension_array_equal(result, expected)
|
256 |
+
tm.assert_extension_array_equal(result, expected)
|
257 |
+
|
258 |
+
|
259 |
+
def test_comparison_methods_scalar_not_string(comparison_op, dtype):
|
260 |
+
op_name = f"__{comparison_op.__name__}__"
|
261 |
+
|
262 |
+
a = pd.array(["a", None, "c"], dtype=dtype)
|
263 |
+
other = 42
|
264 |
+
|
265 |
+
if op_name not in ["__eq__", "__ne__"]:
|
266 |
+
with pytest.raises(TypeError, match="Invalid comparison|not supported between"):
|
267 |
+
getattr(a, op_name)(other)
|
268 |
+
|
269 |
+
return
|
270 |
+
|
271 |
+
result = getattr(a, op_name)(other)
|
272 |
+
|
273 |
+
if dtype.storage == "pyarrow_numpy":
|
274 |
+
expected_data = {
|
275 |
+
"__eq__": [False, False, False],
|
276 |
+
"__ne__": [True, True, True],
|
277 |
+
}[op_name]
|
278 |
+
expected = np.array(expected_data)
|
279 |
+
tm.assert_numpy_array_equal(result, expected)
|
280 |
+
else:
|
281 |
+
expected_data = {"__eq__": [False, None, False], "__ne__": [True, None, True]}[
|
282 |
+
op_name
|
283 |
+
]
|
284 |
+
expected_dtype = "boolean[pyarrow]" if dtype.storage == "pyarrow" else "boolean"
|
285 |
+
expected = pd.array(expected_data, dtype=expected_dtype)
|
286 |
+
tm.assert_extension_array_equal(result, expected)
|
287 |
+
|
288 |
+
|
289 |
+
def test_comparison_methods_array(comparison_op, dtype):
|
290 |
+
op_name = f"__{comparison_op.__name__}__"
|
291 |
+
|
292 |
+
a = pd.array(["a", None, "c"], dtype=dtype)
|
293 |
+
other = [None, None, "c"]
|
294 |
+
result = getattr(a, op_name)(other)
|
295 |
+
if dtype.storage == "pyarrow_numpy":
|
296 |
+
if operator.ne == comparison_op:
|
297 |
+
expected = np.array([True, True, False])
|
298 |
+
else:
|
299 |
+
expected = np.array([False, False, False])
|
300 |
+
expected[-1] = getattr(other[-1], op_name)(a[-1])
|
301 |
+
tm.assert_numpy_array_equal(result, expected)
|
302 |
+
|
303 |
+
result = getattr(a, op_name)(pd.NA)
|
304 |
+
if operator.ne == comparison_op:
|
305 |
+
expected = np.array([True, True, True])
|
306 |
+
else:
|
307 |
+
expected = np.array([False, False, False])
|
308 |
+
tm.assert_numpy_array_equal(result, expected)
|
309 |
+
|
310 |
+
else:
|
311 |
+
expected_dtype = "boolean[pyarrow]" if dtype.storage == "pyarrow" else "boolean"
|
312 |
+
expected = np.full(len(a), fill_value=None, dtype="object")
|
313 |
+
expected[-1] = getattr(other[-1], op_name)(a[-1])
|
314 |
+
expected = pd.array(expected, dtype=expected_dtype)
|
315 |
+
tm.assert_extension_array_equal(result, expected)
|
316 |
+
|
317 |
+
result = getattr(a, op_name)(pd.NA)
|
318 |
+
expected = pd.array([None, None, None], dtype=expected_dtype)
|
319 |
+
tm.assert_extension_array_equal(result, expected)
|
320 |
+
|
321 |
+
|
322 |
+
def test_constructor_raises(cls):
|
323 |
+
if cls is pd.arrays.StringArray:
|
324 |
+
msg = "StringArray requires a sequence of strings or pandas.NA"
|
325 |
+
else:
|
326 |
+
msg = "Unsupported type '<class 'numpy.ndarray'>' for ArrowExtensionArray"
|
327 |
+
|
328 |
+
with pytest.raises(ValueError, match=msg):
|
329 |
+
cls(np.array(["a", "b"], dtype="S1"))
|
330 |
+
|
331 |
+
with pytest.raises(ValueError, match=msg):
|
332 |
+
cls(np.array([]))
|
333 |
+
|
334 |
+
if cls is pd.arrays.StringArray:
|
335 |
+
# GH#45057 np.nan and None do NOT raise, as they are considered valid NAs
|
336 |
+
# for string dtype
|
337 |
+
cls(np.array(["a", np.nan], dtype=object))
|
338 |
+
cls(np.array(["a", None], dtype=object))
|
339 |
+
else:
|
340 |
+
with pytest.raises(ValueError, match=msg):
|
341 |
+
cls(np.array(["a", np.nan], dtype=object))
|
342 |
+
with pytest.raises(ValueError, match=msg):
|
343 |
+
cls(np.array(["a", None], dtype=object))
|
344 |
+
|
345 |
+
with pytest.raises(ValueError, match=msg):
|
346 |
+
cls(np.array(["a", pd.NaT], dtype=object))
|
347 |
+
|
348 |
+
with pytest.raises(ValueError, match=msg):
|
349 |
+
cls(np.array(["a", np.datetime64("NaT", "ns")], dtype=object))
|
350 |
+
|
351 |
+
with pytest.raises(ValueError, match=msg):
|
352 |
+
cls(np.array(["a", np.timedelta64("NaT", "ns")], dtype=object))
|
353 |
+
|
354 |
+
|
355 |
+
@pytest.mark.parametrize("na", [np.nan, np.float64("nan"), float("nan"), None, pd.NA])
|
356 |
+
def test_constructor_nan_like(na):
|
357 |
+
expected = pd.arrays.StringArray(np.array(["a", pd.NA]))
|
358 |
+
tm.assert_extension_array_equal(
|
359 |
+
pd.arrays.StringArray(np.array(["a", na], dtype="object")), expected
|
360 |
+
)
|
361 |
+
|
362 |
+
|
363 |
+
@pytest.mark.parametrize("copy", [True, False])
|
364 |
+
def test_from_sequence_no_mutate(copy, cls, dtype):
|
365 |
+
nan_arr = np.array(["a", np.nan], dtype=object)
|
366 |
+
expected_input = nan_arr.copy()
|
367 |
+
na_arr = np.array(["a", pd.NA], dtype=object)
|
368 |
+
|
369 |
+
result = cls._from_sequence(nan_arr, dtype=dtype, copy=copy)
|
370 |
+
|
371 |
+
if cls in (ArrowStringArray, ArrowStringArrayNumpySemantics):
|
372 |
+
import pyarrow as pa
|
373 |
+
|
374 |
+
expected = cls(pa.array(na_arr, type=pa.string(), from_pandas=True))
|
375 |
+
else:
|
376 |
+
expected = cls(na_arr)
|
377 |
+
|
378 |
+
tm.assert_extension_array_equal(result, expected)
|
379 |
+
tm.assert_numpy_array_equal(nan_arr, expected_input)
|
380 |
+
|
381 |
+
|
382 |
+
def test_astype_int(dtype):
|
383 |
+
arr = pd.array(["1", "2", "3"], dtype=dtype)
|
384 |
+
result = arr.astype("int64")
|
385 |
+
expected = np.array([1, 2, 3], dtype="int64")
|
386 |
+
tm.assert_numpy_array_equal(result, expected)
|
387 |
+
|
388 |
+
arr = pd.array(["1", pd.NA, "3"], dtype=dtype)
|
389 |
+
if dtype.storage == "pyarrow_numpy":
|
390 |
+
err = ValueError
|
391 |
+
msg = "cannot convert float NaN to integer"
|
392 |
+
else:
|
393 |
+
err = TypeError
|
394 |
+
msg = (
|
395 |
+
r"int\(\) argument must be a string, a bytes-like "
|
396 |
+
r"object or a( real)? number"
|
397 |
+
)
|
398 |
+
with pytest.raises(err, match=msg):
|
399 |
+
arr.astype("int64")
|
400 |
+
|
401 |
+
|
402 |
+
def test_astype_nullable_int(dtype):
|
403 |
+
arr = pd.array(["1", pd.NA, "3"], dtype=dtype)
|
404 |
+
|
405 |
+
result = arr.astype("Int64")
|
406 |
+
expected = pd.array([1, pd.NA, 3], dtype="Int64")
|
407 |
+
tm.assert_extension_array_equal(result, expected)
|
408 |
+
|
409 |
+
|
410 |
+
def test_astype_float(dtype, any_float_dtype):
|
411 |
+
# Don't compare arrays (37974)
|
412 |
+
ser = pd.Series(["1.1", pd.NA, "3.3"], dtype=dtype)
|
413 |
+
result = ser.astype(any_float_dtype)
|
414 |
+
expected = pd.Series([1.1, np.nan, 3.3], dtype=any_float_dtype)
|
415 |
+
tm.assert_series_equal(result, expected)
|
416 |
+
|
417 |
+
|
418 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
419 |
+
@pytest.mark.xfail(reason="Not implemented StringArray.sum")
|
420 |
+
def test_reduce(skipna, dtype):
|
421 |
+
arr = pd.Series(["a", "b", "c"], dtype=dtype)
|
422 |
+
result = arr.sum(skipna=skipna)
|
423 |
+
assert result == "abc"
|
424 |
+
|
425 |
+
|
426 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
427 |
+
@pytest.mark.xfail(reason="Not implemented StringArray.sum")
|
428 |
+
def test_reduce_missing(skipna, dtype):
|
429 |
+
arr = pd.Series([None, "a", None, "b", "c", None], dtype=dtype)
|
430 |
+
result = arr.sum(skipna=skipna)
|
431 |
+
if skipna:
|
432 |
+
assert result == "abc"
|
433 |
+
else:
|
434 |
+
assert pd.isna(result)
|
435 |
+
|
436 |
+
|
437 |
+
@pytest.mark.parametrize("method", ["min", "max"])
|
438 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
439 |
+
def test_min_max(method, skipna, dtype):
|
440 |
+
arr = pd.Series(["a", "b", "c", None], dtype=dtype)
|
441 |
+
result = getattr(arr, method)(skipna=skipna)
|
442 |
+
if skipna:
|
443 |
+
expected = "a" if method == "min" else "c"
|
444 |
+
assert result == expected
|
445 |
+
else:
|
446 |
+
assert result is na_val(arr.dtype)
|
447 |
+
|
448 |
+
|
449 |
+
@pytest.mark.parametrize("method", ["min", "max"])
|
450 |
+
@pytest.mark.parametrize("box", [pd.Series, pd.array])
|
451 |
+
def test_min_max_numpy(method, box, dtype, request, arrow_string_storage):
|
452 |
+
if dtype.storage in arrow_string_storage and box is pd.array:
|
453 |
+
if box is pd.array:
|
454 |
+
reason = "'<=' not supported between instances of 'str' and 'NoneType'"
|
455 |
+
else:
|
456 |
+
reason = "'ArrowStringArray' object has no attribute 'max'"
|
457 |
+
mark = pytest.mark.xfail(raises=TypeError, reason=reason)
|
458 |
+
request.applymarker(mark)
|
459 |
+
|
460 |
+
arr = box(["a", "b", "c", None], dtype=dtype)
|
461 |
+
result = getattr(np, method)(arr)
|
462 |
+
expected = "a" if method == "min" else "c"
|
463 |
+
assert result == expected
|
464 |
+
|
465 |
+
|
466 |
+
def test_fillna_args(dtype, arrow_string_storage):
|
467 |
+
# GH 37987
|
468 |
+
|
469 |
+
arr = pd.array(["a", pd.NA], dtype=dtype)
|
470 |
+
|
471 |
+
res = arr.fillna(value="b")
|
472 |
+
expected = pd.array(["a", "b"], dtype=dtype)
|
473 |
+
tm.assert_extension_array_equal(res, expected)
|
474 |
+
|
475 |
+
res = arr.fillna(value=np.str_("b"))
|
476 |
+
expected = pd.array(["a", "b"], dtype=dtype)
|
477 |
+
tm.assert_extension_array_equal(res, expected)
|
478 |
+
|
479 |
+
if dtype.storage in arrow_string_storage:
|
480 |
+
msg = "Invalid value '1' for dtype string"
|
481 |
+
else:
|
482 |
+
msg = "Cannot set non-string value '1' into a StringArray."
|
483 |
+
with pytest.raises(TypeError, match=msg):
|
484 |
+
arr.fillna(value=1)
|
485 |
+
|
486 |
+
|
487 |
+
def test_arrow_array(dtype):
|
488 |
+
# protocol added in 0.15.0
|
489 |
+
pa = pytest.importorskip("pyarrow")
|
490 |
+
import pyarrow.compute as pc
|
491 |
+
|
492 |
+
data = pd.array(["a", "b", "c"], dtype=dtype)
|
493 |
+
arr = pa.array(data)
|
494 |
+
expected = pa.array(list(data), type=pa.large_string(), from_pandas=True)
|
495 |
+
if dtype.storage in ("pyarrow", "pyarrow_numpy") and pa_version_under12p0:
|
496 |
+
expected = pa.chunked_array(expected)
|
497 |
+
if dtype.storage == "python":
|
498 |
+
expected = pc.cast(expected, pa.string())
|
499 |
+
assert arr.equals(expected)
|
500 |
+
|
501 |
+
|
502 |
+
@pytest.mark.filterwarnings("ignore:Passing a BlockManager:DeprecationWarning")
|
503 |
+
def test_arrow_roundtrip(dtype, string_storage2, request, using_infer_string):
|
504 |
+
# roundtrip possible from arrow 1.0.0
|
505 |
+
pa = pytest.importorskip("pyarrow")
|
506 |
+
|
507 |
+
if using_infer_string and string_storage2 != "pyarrow_numpy":
|
508 |
+
request.applymarker(
|
509 |
+
pytest.mark.xfail(
|
510 |
+
reason="infer_string takes precedence over string storage"
|
511 |
+
)
|
512 |
+
)
|
513 |
+
|
514 |
+
data = pd.array(["a", "b", None], dtype=dtype)
|
515 |
+
df = pd.DataFrame({"a": data})
|
516 |
+
table = pa.table(df)
|
517 |
+
if dtype.storage == "python":
|
518 |
+
assert table.field("a").type == "string"
|
519 |
+
else:
|
520 |
+
assert table.field("a").type == "large_string"
|
521 |
+
with pd.option_context("string_storage", string_storage2):
|
522 |
+
result = table.to_pandas()
|
523 |
+
assert isinstance(result["a"].dtype, pd.StringDtype)
|
524 |
+
expected = df.astype(f"string[{string_storage2}]")
|
525 |
+
tm.assert_frame_equal(result, expected)
|
526 |
+
# ensure the missing value is represented by NA and not np.nan or None
|
527 |
+
assert result.loc[2, "a"] is na_val(result["a"].dtype)
|
528 |
+
|
529 |
+
|
530 |
+
@pytest.mark.filterwarnings("ignore:Passing a BlockManager:DeprecationWarning")
|
531 |
+
def test_arrow_load_from_zero_chunks(
|
532 |
+
dtype, string_storage2, request, using_infer_string
|
533 |
+
):
|
534 |
+
# GH-41040
|
535 |
+
pa = pytest.importorskip("pyarrow")
|
536 |
+
|
537 |
+
if using_infer_string and string_storage2 != "pyarrow_numpy":
|
538 |
+
request.applymarker(
|
539 |
+
pytest.mark.xfail(
|
540 |
+
reason="infer_string takes precedence over string storage"
|
541 |
+
)
|
542 |
+
)
|
543 |
+
|
544 |
+
data = pd.array([], dtype=dtype)
|
545 |
+
df = pd.DataFrame({"a": data})
|
546 |
+
table = pa.table(df)
|
547 |
+
if dtype.storage == "python":
|
548 |
+
assert table.field("a").type == "string"
|
549 |
+
else:
|
550 |
+
assert table.field("a").type == "large_string"
|
551 |
+
# Instantiate the same table with no chunks at all
|
552 |
+
table = pa.table([pa.chunked_array([], type=pa.string())], schema=table.schema)
|
553 |
+
with pd.option_context("string_storage", string_storage2):
|
554 |
+
result = table.to_pandas()
|
555 |
+
assert isinstance(result["a"].dtype, pd.StringDtype)
|
556 |
+
expected = df.astype(f"string[{string_storage2}]")
|
557 |
+
tm.assert_frame_equal(result, expected)
|
558 |
+
|
559 |
+
|
560 |
+
def test_value_counts_na(dtype):
|
561 |
+
if getattr(dtype, "storage", "") == "pyarrow":
|
562 |
+
exp_dtype = "int64[pyarrow]"
|
563 |
+
elif getattr(dtype, "storage", "") == "pyarrow_numpy":
|
564 |
+
exp_dtype = "int64"
|
565 |
+
else:
|
566 |
+
exp_dtype = "Int64"
|
567 |
+
arr = pd.array(["a", "b", "a", pd.NA], dtype=dtype)
|
568 |
+
result = arr.value_counts(dropna=False)
|
569 |
+
expected = pd.Series([2, 1, 1], index=arr[[0, 1, 3]], dtype=exp_dtype, name="count")
|
570 |
+
tm.assert_series_equal(result, expected)
|
571 |
+
|
572 |
+
result = arr.value_counts(dropna=True)
|
573 |
+
expected = pd.Series([2, 1], index=arr[:2], dtype=exp_dtype, name="count")
|
574 |
+
tm.assert_series_equal(result, expected)
|
575 |
+
|
576 |
+
|
577 |
+
def test_value_counts_with_normalize(dtype):
|
578 |
+
if getattr(dtype, "storage", "") == "pyarrow":
|
579 |
+
exp_dtype = "double[pyarrow]"
|
580 |
+
elif getattr(dtype, "storage", "") == "pyarrow_numpy":
|
581 |
+
exp_dtype = np.float64
|
582 |
+
else:
|
583 |
+
exp_dtype = "Float64"
|
584 |
+
ser = pd.Series(["a", "b", "a", pd.NA], dtype=dtype)
|
585 |
+
result = ser.value_counts(normalize=True)
|
586 |
+
expected = pd.Series([2, 1], index=ser[:2], dtype=exp_dtype, name="proportion") / 3
|
587 |
+
tm.assert_series_equal(result, expected)
|
588 |
+
|
589 |
+
|
590 |
+
@pytest.mark.parametrize(
|
591 |
+
"values, expected",
|
592 |
+
[
|
593 |
+
(["a", "b", "c"], np.array([False, False, False])),
|
594 |
+
(["a", "b", None], np.array([False, False, True])),
|
595 |
+
],
|
596 |
+
)
|
597 |
+
def test_use_inf_as_na(values, expected, dtype):
|
598 |
+
# https://github.com/pandas-dev/pandas/issues/33655
|
599 |
+
values = pd.array(values, dtype=dtype)
|
600 |
+
msg = "use_inf_as_na option is deprecated"
|
601 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
602 |
+
with pd.option_context("mode.use_inf_as_na", True):
|
603 |
+
result = values.isna()
|
604 |
+
tm.assert_numpy_array_equal(result, expected)
|
605 |
+
|
606 |
+
result = pd.Series(values).isna()
|
607 |
+
expected = pd.Series(expected)
|
608 |
+
tm.assert_series_equal(result, expected)
|
609 |
+
|
610 |
+
result = pd.DataFrame(values).isna()
|
611 |
+
expected = pd.DataFrame(expected)
|
612 |
+
tm.assert_frame_equal(result, expected)
|
613 |
+
|
614 |
+
|
615 |
+
def test_memory_usage(dtype, arrow_string_storage):
|
616 |
+
# GH 33963
|
617 |
+
|
618 |
+
if dtype.storage in arrow_string_storage:
|
619 |
+
pytest.skip(f"not applicable for {dtype.storage}")
|
620 |
+
|
621 |
+
series = pd.Series(["a", "b", "c"], dtype=dtype)
|
622 |
+
|
623 |
+
assert 0 < series.nbytes <= series.memory_usage() < series.memory_usage(deep=True)
|
624 |
+
|
625 |
+
|
626 |
+
@pytest.mark.parametrize("float_dtype", [np.float16, np.float32, np.float64])
|
627 |
+
def test_astype_from_float_dtype(float_dtype, dtype):
|
628 |
+
# https://github.com/pandas-dev/pandas/issues/36451
|
629 |
+
ser = pd.Series([0.1], dtype=float_dtype)
|
630 |
+
result = ser.astype(dtype)
|
631 |
+
expected = pd.Series(["0.1"], dtype=dtype)
|
632 |
+
tm.assert_series_equal(result, expected)
|
633 |
+
|
634 |
+
|
635 |
+
def test_to_numpy_returns_pdna_default(dtype):
|
636 |
+
arr = pd.array(["a", pd.NA, "b"], dtype=dtype)
|
637 |
+
result = np.array(arr)
|
638 |
+
expected = np.array(["a", na_val(dtype), "b"], dtype=object)
|
639 |
+
tm.assert_numpy_array_equal(result, expected)
|
640 |
+
|
641 |
+
|
642 |
+
def test_to_numpy_na_value(dtype, nulls_fixture):
|
643 |
+
na_value = nulls_fixture
|
644 |
+
arr = pd.array(["a", pd.NA, "b"], dtype=dtype)
|
645 |
+
result = arr.to_numpy(na_value=na_value)
|
646 |
+
expected = np.array(["a", na_value, "b"], dtype=object)
|
647 |
+
tm.assert_numpy_array_equal(result, expected)
|
648 |
+
|
649 |
+
|
650 |
+
def test_isin(dtype, fixed_now_ts):
|
651 |
+
s = pd.Series(["a", "b", None], dtype=dtype)
|
652 |
+
|
653 |
+
result = s.isin(["a", "c"])
|
654 |
+
expected = pd.Series([True, False, False])
|
655 |
+
tm.assert_series_equal(result, expected)
|
656 |
+
|
657 |
+
result = s.isin(["a", pd.NA])
|
658 |
+
expected = pd.Series([True, False, True])
|
659 |
+
tm.assert_series_equal(result, expected)
|
660 |
+
|
661 |
+
result = s.isin([])
|
662 |
+
expected = pd.Series([False, False, False])
|
663 |
+
tm.assert_series_equal(result, expected)
|
664 |
+
|
665 |
+
result = s.isin(["a", fixed_now_ts])
|
666 |
+
expected = pd.Series([True, False, False])
|
667 |
+
tm.assert_series_equal(result, expected)
|
668 |
+
|
669 |
+
|
670 |
+
def test_setitem_scalar_with_mask_validation(dtype):
|
671 |
+
# https://github.com/pandas-dev/pandas/issues/47628
|
672 |
+
# setting None with a boolean mask (through _putmaks) should still result
|
673 |
+
# in pd.NA values in the underlying array
|
674 |
+
ser = pd.Series(["a", "b", "c"], dtype=dtype)
|
675 |
+
mask = np.array([False, True, False])
|
676 |
+
|
677 |
+
ser[mask] = None
|
678 |
+
assert ser.array[1] is na_val(ser.dtype)
|
679 |
+
|
680 |
+
# for other non-string we should also raise an error
|
681 |
+
ser = pd.Series(["a", "b", "c"], dtype=dtype)
|
682 |
+
if type(ser.array) is pd.arrays.StringArray:
|
683 |
+
msg = "Cannot set non-string value"
|
684 |
+
else:
|
685 |
+
msg = "Scalar must be NA or str"
|
686 |
+
with pytest.raises(TypeError, match=msg):
|
687 |
+
ser[mask] = 1
|
688 |
+
|
689 |
+
|
690 |
+
def test_from_numpy_str(dtype):
|
691 |
+
vals = ["a", "b", "c"]
|
692 |
+
arr = np.array(vals, dtype=np.str_)
|
693 |
+
result = pd.array(arr, dtype=dtype)
|
694 |
+
expected = pd.array(vals, dtype=dtype)
|
695 |
+
tm.assert_extension_array_equal(result, expected)
|
696 |
+
|
697 |
+
|
698 |
+
def test_tolist(dtype):
|
699 |
+
vals = ["a", "b", "c"]
|
700 |
+
arr = pd.array(vals, dtype=dtype)
|
701 |
+
result = arr.tolist()
|
702 |
+
expected = vals
|
703 |
+
tm.assert_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/string_/test_string_arrow.py
ADDED
@@ -0,0 +1,265 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pickle
|
2 |
+
import re
|
3 |
+
|
4 |
+
import numpy as np
|
5 |
+
import pytest
|
6 |
+
|
7 |
+
import pandas.util._test_decorators as td
|
8 |
+
|
9 |
+
import pandas as pd
|
10 |
+
import pandas._testing as tm
|
11 |
+
from pandas.core.arrays.string_ import (
|
12 |
+
StringArray,
|
13 |
+
StringDtype,
|
14 |
+
)
|
15 |
+
from pandas.core.arrays.string_arrow import (
|
16 |
+
ArrowStringArray,
|
17 |
+
ArrowStringArrayNumpySemantics,
|
18 |
+
)
|
19 |
+
|
20 |
+
|
21 |
+
def test_eq_all_na():
|
22 |
+
pytest.importorskip("pyarrow")
|
23 |
+
a = pd.array([pd.NA, pd.NA], dtype=StringDtype("pyarrow"))
|
24 |
+
result = a == a
|
25 |
+
expected = pd.array([pd.NA, pd.NA], dtype="boolean[pyarrow]")
|
26 |
+
tm.assert_extension_array_equal(result, expected)
|
27 |
+
|
28 |
+
|
29 |
+
def test_config(string_storage, request, using_infer_string):
|
30 |
+
if using_infer_string and string_storage != "pyarrow_numpy":
|
31 |
+
request.applymarker(pytest.mark.xfail(reason="infer string takes precedence"))
|
32 |
+
with pd.option_context("string_storage", string_storage):
|
33 |
+
assert StringDtype().storage == string_storage
|
34 |
+
result = pd.array(["a", "b"])
|
35 |
+
assert result.dtype.storage == string_storage
|
36 |
+
|
37 |
+
dtype = StringDtype(string_storage)
|
38 |
+
expected = dtype.construct_array_type()._from_sequence(["a", "b"], dtype=dtype)
|
39 |
+
tm.assert_equal(result, expected)
|
40 |
+
|
41 |
+
|
42 |
+
def test_config_bad_storage_raises():
|
43 |
+
msg = re.escape("Value must be one of python|pyarrow")
|
44 |
+
with pytest.raises(ValueError, match=msg):
|
45 |
+
pd.options.mode.string_storage = "foo"
|
46 |
+
|
47 |
+
|
48 |
+
@pytest.mark.parametrize("chunked", [True, False])
|
49 |
+
@pytest.mark.parametrize("array", ["numpy", "pyarrow"])
|
50 |
+
def test_constructor_not_string_type_raises(array, chunked, arrow_string_storage):
|
51 |
+
pa = pytest.importorskip("pyarrow")
|
52 |
+
|
53 |
+
array = pa if array in arrow_string_storage else np
|
54 |
+
|
55 |
+
arr = array.array([1, 2, 3])
|
56 |
+
if chunked:
|
57 |
+
if array is np:
|
58 |
+
pytest.skip("chunked not applicable to numpy array")
|
59 |
+
arr = pa.chunked_array(arr)
|
60 |
+
if array is np:
|
61 |
+
msg = "Unsupported type '<class 'numpy.ndarray'>' for ArrowExtensionArray"
|
62 |
+
else:
|
63 |
+
msg = re.escape(
|
64 |
+
"ArrowStringArray requires a PyArrow (chunked) array of large_string type"
|
65 |
+
)
|
66 |
+
with pytest.raises(ValueError, match=msg):
|
67 |
+
ArrowStringArray(arr)
|
68 |
+
|
69 |
+
|
70 |
+
@pytest.mark.parametrize("chunked", [True, False])
|
71 |
+
def test_constructor_not_string_type_value_dictionary_raises(chunked):
|
72 |
+
pa = pytest.importorskip("pyarrow")
|
73 |
+
|
74 |
+
arr = pa.array([1, 2, 3], pa.dictionary(pa.int32(), pa.int32()))
|
75 |
+
if chunked:
|
76 |
+
arr = pa.chunked_array(arr)
|
77 |
+
|
78 |
+
msg = re.escape(
|
79 |
+
"ArrowStringArray requires a PyArrow (chunked) array of large_string type"
|
80 |
+
)
|
81 |
+
with pytest.raises(ValueError, match=msg):
|
82 |
+
ArrowStringArray(arr)
|
83 |
+
|
84 |
+
|
85 |
+
@pytest.mark.xfail(
|
86 |
+
reason="dict conversion does not seem to be implemented for large string in arrow"
|
87 |
+
)
|
88 |
+
@pytest.mark.parametrize("chunked", [True, False])
|
89 |
+
def test_constructor_valid_string_type_value_dictionary(chunked):
|
90 |
+
pa = pytest.importorskip("pyarrow")
|
91 |
+
|
92 |
+
arr = pa.array(["1", "2", "3"], pa.large_string()).dictionary_encode()
|
93 |
+
if chunked:
|
94 |
+
arr = pa.chunked_array(arr)
|
95 |
+
|
96 |
+
arr = ArrowStringArray(arr)
|
97 |
+
assert pa.types.is_string(arr._pa_array.type.value_type)
|
98 |
+
|
99 |
+
|
100 |
+
def test_constructor_from_list():
|
101 |
+
# GH#27673
|
102 |
+
pytest.importorskip("pyarrow")
|
103 |
+
result = pd.Series(["E"], dtype=StringDtype(storage="pyarrow"))
|
104 |
+
assert isinstance(result.dtype, StringDtype)
|
105 |
+
assert result.dtype.storage == "pyarrow"
|
106 |
+
|
107 |
+
|
108 |
+
def test_from_sequence_wrong_dtype_raises(using_infer_string):
|
109 |
+
pytest.importorskip("pyarrow")
|
110 |
+
with pd.option_context("string_storage", "python"):
|
111 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype="string")
|
112 |
+
|
113 |
+
with pd.option_context("string_storage", "pyarrow"):
|
114 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype="string")
|
115 |
+
|
116 |
+
with pytest.raises(AssertionError, match=None):
|
117 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype="string[python]")
|
118 |
+
|
119 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype="string[pyarrow]")
|
120 |
+
|
121 |
+
if not using_infer_string:
|
122 |
+
with pytest.raises(AssertionError, match=None):
|
123 |
+
with pd.option_context("string_storage", "python"):
|
124 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype=StringDtype())
|
125 |
+
|
126 |
+
with pd.option_context("string_storage", "pyarrow"):
|
127 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype=StringDtype())
|
128 |
+
|
129 |
+
if not using_infer_string:
|
130 |
+
with pytest.raises(AssertionError, match=None):
|
131 |
+
ArrowStringArray._from_sequence(
|
132 |
+
["a", None, "c"], dtype=StringDtype("python")
|
133 |
+
)
|
134 |
+
|
135 |
+
ArrowStringArray._from_sequence(["a", None, "c"], dtype=StringDtype("pyarrow"))
|
136 |
+
|
137 |
+
with pd.option_context("string_storage", "python"):
|
138 |
+
StringArray._from_sequence(["a", None, "c"], dtype="string")
|
139 |
+
|
140 |
+
with pd.option_context("string_storage", "pyarrow"):
|
141 |
+
StringArray._from_sequence(["a", None, "c"], dtype="string")
|
142 |
+
|
143 |
+
StringArray._from_sequence(["a", None, "c"], dtype="string[python]")
|
144 |
+
|
145 |
+
with pytest.raises(AssertionError, match=None):
|
146 |
+
StringArray._from_sequence(["a", None, "c"], dtype="string[pyarrow]")
|
147 |
+
|
148 |
+
if not using_infer_string:
|
149 |
+
with pd.option_context("string_storage", "python"):
|
150 |
+
StringArray._from_sequence(["a", None, "c"], dtype=StringDtype())
|
151 |
+
|
152 |
+
if not using_infer_string:
|
153 |
+
with pytest.raises(AssertionError, match=None):
|
154 |
+
with pd.option_context("string_storage", "pyarrow"):
|
155 |
+
StringArray._from_sequence(["a", None, "c"], dtype=StringDtype())
|
156 |
+
|
157 |
+
StringArray._from_sequence(["a", None, "c"], dtype=StringDtype("python"))
|
158 |
+
|
159 |
+
with pytest.raises(AssertionError, match=None):
|
160 |
+
StringArray._from_sequence(["a", None, "c"], dtype=StringDtype("pyarrow"))
|
161 |
+
|
162 |
+
|
163 |
+
@td.skip_if_installed("pyarrow")
|
164 |
+
def test_pyarrow_not_installed_raises():
|
165 |
+
msg = re.escape("pyarrow>=10.0.1 is required for PyArrow backed")
|
166 |
+
|
167 |
+
with pytest.raises(ImportError, match=msg):
|
168 |
+
StringDtype(storage="pyarrow")
|
169 |
+
|
170 |
+
with pytest.raises(ImportError, match=msg):
|
171 |
+
ArrowStringArray([])
|
172 |
+
|
173 |
+
with pytest.raises(ImportError, match=msg):
|
174 |
+
ArrowStringArrayNumpySemantics([])
|
175 |
+
|
176 |
+
with pytest.raises(ImportError, match=msg):
|
177 |
+
ArrowStringArray._from_sequence(["a", None, "b"])
|
178 |
+
|
179 |
+
|
180 |
+
@pytest.mark.parametrize("multiple_chunks", [False, True])
|
181 |
+
@pytest.mark.parametrize(
|
182 |
+
"key, value, expected",
|
183 |
+
[
|
184 |
+
(-1, "XX", ["a", "b", "c", "d", "XX"]),
|
185 |
+
(1, "XX", ["a", "XX", "c", "d", "e"]),
|
186 |
+
(1, None, ["a", None, "c", "d", "e"]),
|
187 |
+
(1, pd.NA, ["a", None, "c", "d", "e"]),
|
188 |
+
([1, 3], "XX", ["a", "XX", "c", "XX", "e"]),
|
189 |
+
([1, 3], ["XX", "YY"], ["a", "XX", "c", "YY", "e"]),
|
190 |
+
([1, 3], ["XX", None], ["a", "XX", "c", None, "e"]),
|
191 |
+
([1, 3], ["XX", pd.NA], ["a", "XX", "c", None, "e"]),
|
192 |
+
([0, -1], ["XX", "YY"], ["XX", "b", "c", "d", "YY"]),
|
193 |
+
([-1, 0], ["XX", "YY"], ["YY", "b", "c", "d", "XX"]),
|
194 |
+
(slice(3, None), "XX", ["a", "b", "c", "XX", "XX"]),
|
195 |
+
(slice(2, 4), ["XX", "YY"], ["a", "b", "XX", "YY", "e"]),
|
196 |
+
(slice(3, 1, -1), ["XX", "YY"], ["a", "b", "YY", "XX", "e"]),
|
197 |
+
(slice(None), "XX", ["XX", "XX", "XX", "XX", "XX"]),
|
198 |
+
([False, True, False, True, False], ["XX", "YY"], ["a", "XX", "c", "YY", "e"]),
|
199 |
+
],
|
200 |
+
)
|
201 |
+
def test_setitem(multiple_chunks, key, value, expected):
|
202 |
+
pa = pytest.importorskip("pyarrow")
|
203 |
+
|
204 |
+
result = pa.array(list("abcde"))
|
205 |
+
expected = pa.array(expected)
|
206 |
+
|
207 |
+
if multiple_chunks:
|
208 |
+
result = pa.chunked_array([result[:3], result[3:]])
|
209 |
+
expected = pa.chunked_array([expected[:3], expected[3:]])
|
210 |
+
|
211 |
+
result = ArrowStringArray(result)
|
212 |
+
expected = ArrowStringArray(expected)
|
213 |
+
|
214 |
+
result[key] = value
|
215 |
+
tm.assert_equal(result, expected)
|
216 |
+
|
217 |
+
|
218 |
+
def test_setitem_invalid_indexer_raises():
|
219 |
+
pa = pytest.importorskip("pyarrow")
|
220 |
+
|
221 |
+
arr = ArrowStringArray(pa.array(list("abcde")))
|
222 |
+
|
223 |
+
with pytest.raises(IndexError, match=None):
|
224 |
+
arr[5] = "foo"
|
225 |
+
|
226 |
+
with pytest.raises(IndexError, match=None):
|
227 |
+
arr[-6] = "foo"
|
228 |
+
|
229 |
+
with pytest.raises(IndexError, match=None):
|
230 |
+
arr[[0, 5]] = "foo"
|
231 |
+
|
232 |
+
with pytest.raises(IndexError, match=None):
|
233 |
+
arr[[0, -6]] = "foo"
|
234 |
+
|
235 |
+
with pytest.raises(IndexError, match=None):
|
236 |
+
arr[[True, True, False]] = "foo"
|
237 |
+
|
238 |
+
with pytest.raises(ValueError, match=None):
|
239 |
+
arr[[0, 1]] = ["foo", "bar", "baz"]
|
240 |
+
|
241 |
+
|
242 |
+
@pytest.mark.parametrize("dtype", ["string[pyarrow]", "string[pyarrow_numpy]"])
|
243 |
+
def test_pickle_roundtrip(dtype):
|
244 |
+
# GH 42600
|
245 |
+
pytest.importorskip("pyarrow")
|
246 |
+
expected = pd.Series(range(10), dtype=dtype)
|
247 |
+
expected_sliced = expected.head(2)
|
248 |
+
full_pickled = pickle.dumps(expected)
|
249 |
+
sliced_pickled = pickle.dumps(expected_sliced)
|
250 |
+
|
251 |
+
assert len(full_pickled) > len(sliced_pickled)
|
252 |
+
|
253 |
+
result = pickle.loads(full_pickled)
|
254 |
+
tm.assert_series_equal(result, expected)
|
255 |
+
|
256 |
+
result_sliced = pickle.loads(sliced_pickled)
|
257 |
+
tm.assert_series_equal(result_sliced, expected_sliced)
|
258 |
+
|
259 |
+
|
260 |
+
def test_string_dtype_error_message():
|
261 |
+
# GH#55051
|
262 |
+
pytest.importorskip("pyarrow")
|
263 |
+
msg = "Storage must be 'python', 'pyarrow' or 'pyarrow_numpy'."
|
264 |
+
with pytest.raises(ValueError, match=msg):
|
265 |
+
StringDtype("bla")
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (203 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_constructors.cpython-310.pyc
ADDED
Binary file (4.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_cumulative.cpython-310.pyc
ADDED
Binary file (1.19 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/__pycache__/test_reductions.cpython-310.pyc
ADDED
Binary file (5.25 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_constructors.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas._testing as tm
|
5 |
+
from pandas.core.arrays import TimedeltaArray
|
6 |
+
|
7 |
+
|
8 |
+
class TestTimedeltaArrayConstructor:
|
9 |
+
def test_only_1dim_accepted(self):
|
10 |
+
# GH#25282
|
11 |
+
arr = np.array([0, 1, 2, 3], dtype="m8[h]").astype("m8[ns]")
|
12 |
+
|
13 |
+
depr_msg = "TimedeltaArray.__init__ is deprecated"
|
14 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
15 |
+
with pytest.raises(ValueError, match="Only 1-dimensional"):
|
16 |
+
# 3-dim, we allow 2D to sneak in for ops purposes GH#29853
|
17 |
+
TimedeltaArray(arr.reshape(2, 2, 1))
|
18 |
+
|
19 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
20 |
+
with pytest.raises(ValueError, match="Only 1-dimensional"):
|
21 |
+
# 0-dim
|
22 |
+
TimedeltaArray(arr[[0]].squeeze())
|
23 |
+
|
24 |
+
def test_freq_validation(self):
|
25 |
+
# ensure that the public constructor cannot create an invalid instance
|
26 |
+
arr = np.array([0, 0, 1], dtype=np.int64) * 3600 * 10**9
|
27 |
+
|
28 |
+
msg = (
|
29 |
+
"Inferred frequency None from passed values does not "
|
30 |
+
"conform to passed frequency D"
|
31 |
+
)
|
32 |
+
depr_msg = "TimedeltaArray.__init__ is deprecated"
|
33 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
34 |
+
with pytest.raises(ValueError, match=msg):
|
35 |
+
TimedeltaArray(arr.view("timedelta64[ns]"), freq="D")
|
36 |
+
|
37 |
+
def test_non_array_raises(self):
|
38 |
+
depr_msg = "TimedeltaArray.__init__ is deprecated"
|
39 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
40 |
+
with pytest.raises(ValueError, match="list"):
|
41 |
+
TimedeltaArray([1, 2, 3])
|
42 |
+
|
43 |
+
def test_other_type_raises(self):
|
44 |
+
msg = r"dtype bool cannot be converted to timedelta64\[ns\]"
|
45 |
+
with pytest.raises(TypeError, match=msg):
|
46 |
+
TimedeltaArray._from_sequence(np.array([1, 2, 3], dtype="bool"))
|
47 |
+
|
48 |
+
def test_incorrect_dtype_raises(self):
|
49 |
+
msg = "dtype 'category' is invalid, should be np.timedelta64 dtype"
|
50 |
+
with pytest.raises(ValueError, match=msg):
|
51 |
+
TimedeltaArray._from_sequence(
|
52 |
+
np.array([1, 2, 3], dtype="i8"), dtype="category"
|
53 |
+
)
|
54 |
+
|
55 |
+
msg = "dtype 'int64' is invalid, should be np.timedelta64 dtype"
|
56 |
+
with pytest.raises(ValueError, match=msg):
|
57 |
+
TimedeltaArray._from_sequence(
|
58 |
+
np.array([1, 2, 3], dtype="i8"), dtype=np.dtype("int64")
|
59 |
+
)
|
60 |
+
|
61 |
+
msg = r"dtype 'datetime64\[ns\]' is invalid, should be np.timedelta64 dtype"
|
62 |
+
with pytest.raises(ValueError, match=msg):
|
63 |
+
TimedeltaArray._from_sequence(
|
64 |
+
np.array([1, 2, 3], dtype="i8"), dtype=np.dtype("M8[ns]")
|
65 |
+
)
|
66 |
+
|
67 |
+
msg = (
|
68 |
+
r"dtype 'datetime64\[us, UTC\]' is invalid, should be np.timedelta64 dtype"
|
69 |
+
)
|
70 |
+
with pytest.raises(ValueError, match=msg):
|
71 |
+
TimedeltaArray._from_sequence(
|
72 |
+
np.array([1, 2, 3], dtype="i8"), dtype="M8[us, UTC]"
|
73 |
+
)
|
74 |
+
|
75 |
+
msg = "Supported timedelta64 resolutions are 's', 'ms', 'us', 'ns'"
|
76 |
+
with pytest.raises(ValueError, match=msg):
|
77 |
+
TimedeltaArray._from_sequence(
|
78 |
+
np.array([1, 2, 3], dtype="i8"), dtype=np.dtype("m8[Y]")
|
79 |
+
)
|
80 |
+
|
81 |
+
def test_mismatched_values_dtype_units(self):
|
82 |
+
arr = np.array([1, 2, 3], dtype="m8[s]")
|
83 |
+
dtype = np.dtype("m8[ns]")
|
84 |
+
msg = r"Values resolution does not match dtype"
|
85 |
+
depr_msg = "TimedeltaArray.__init__ is deprecated"
|
86 |
+
|
87 |
+
with tm.assert_produces_warning(FutureWarning, match=depr_msg):
|
88 |
+
with pytest.raises(ValueError, match=msg):
|
89 |
+
TimedeltaArray(arr, dtype=dtype)
|
90 |
+
|
91 |
+
def test_copy(self):
|
92 |
+
data = np.array([1, 2, 3], dtype="m8[ns]")
|
93 |
+
arr = TimedeltaArray._from_sequence(data, copy=False)
|
94 |
+
assert arr._ndarray is data
|
95 |
+
|
96 |
+
arr = TimedeltaArray._from_sequence(data, copy=True)
|
97 |
+
assert arr._ndarray is not data
|
98 |
+
assert arr._ndarray.base is not data
|
99 |
+
|
100 |
+
def test_from_sequence_dtype(self):
|
101 |
+
msg = "dtype 'object' is invalid, should be np.timedelta64 dtype"
|
102 |
+
with pytest.raises(ValueError, match=msg):
|
103 |
+
TimedeltaArray._from_sequence([], dtype=object)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_cumulative.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
import pandas._testing as tm
|
4 |
+
from pandas.core.arrays import TimedeltaArray
|
5 |
+
|
6 |
+
|
7 |
+
class TestAccumulator:
|
8 |
+
def test_accumulators_disallowed(self):
|
9 |
+
# GH#50297
|
10 |
+
arr = TimedeltaArray._from_sequence(["1D", "2D"], dtype="m8[ns]")
|
11 |
+
with pytest.raises(TypeError, match="cumprod not supported"):
|
12 |
+
arr._accumulate("cumprod")
|
13 |
+
|
14 |
+
def test_cumsum(self, unit):
|
15 |
+
# GH#50297
|
16 |
+
dtype = f"m8[{unit}]"
|
17 |
+
arr = TimedeltaArray._from_sequence(["1D", "2D"], dtype=dtype)
|
18 |
+
result = arr._accumulate("cumsum")
|
19 |
+
expected = TimedeltaArray._from_sequence(["1D", "3D"], dtype=dtype)
|
20 |
+
tm.assert_timedelta_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/arrays/timedeltas/test_reductions.py
ADDED
@@ -0,0 +1,218 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import Timedelta
|
6 |
+
import pandas._testing as tm
|
7 |
+
from pandas.core import nanops
|
8 |
+
from pandas.core.arrays import TimedeltaArray
|
9 |
+
|
10 |
+
|
11 |
+
class TestReductions:
|
12 |
+
@pytest.mark.parametrize("name", ["std", "min", "max", "median", "mean"])
|
13 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
14 |
+
def test_reductions_empty(self, name, skipna):
|
15 |
+
tdi = pd.TimedeltaIndex([])
|
16 |
+
arr = tdi.array
|
17 |
+
|
18 |
+
result = getattr(tdi, name)(skipna=skipna)
|
19 |
+
assert result is pd.NaT
|
20 |
+
|
21 |
+
result = getattr(arr, name)(skipna=skipna)
|
22 |
+
assert result is pd.NaT
|
23 |
+
|
24 |
+
@pytest.mark.parametrize("skipna", [True, False])
|
25 |
+
def test_sum_empty(self, skipna):
|
26 |
+
tdi = pd.TimedeltaIndex([])
|
27 |
+
arr = tdi.array
|
28 |
+
|
29 |
+
result = tdi.sum(skipna=skipna)
|
30 |
+
assert isinstance(result, Timedelta)
|
31 |
+
assert result == Timedelta(0)
|
32 |
+
|
33 |
+
result = arr.sum(skipna=skipna)
|
34 |
+
assert isinstance(result, Timedelta)
|
35 |
+
assert result == Timedelta(0)
|
36 |
+
|
37 |
+
def test_min_max(self, unit):
|
38 |
+
dtype = f"m8[{unit}]"
|
39 |
+
arr = TimedeltaArray._from_sequence(
|
40 |
+
["3h", "3h", "NaT", "2h", "5h", "4h"], dtype=dtype
|
41 |
+
)
|
42 |
+
|
43 |
+
result = arr.min()
|
44 |
+
expected = Timedelta("2h")
|
45 |
+
assert result == expected
|
46 |
+
|
47 |
+
result = arr.max()
|
48 |
+
expected = Timedelta("5h")
|
49 |
+
assert result == expected
|
50 |
+
|
51 |
+
result = arr.min(skipna=False)
|
52 |
+
assert result is pd.NaT
|
53 |
+
|
54 |
+
result = arr.max(skipna=False)
|
55 |
+
assert result is pd.NaT
|
56 |
+
|
57 |
+
def test_sum(self):
|
58 |
+
tdi = pd.TimedeltaIndex(["3h", "3h", "NaT", "2h", "5h", "4h"])
|
59 |
+
arr = tdi.array
|
60 |
+
|
61 |
+
result = arr.sum(skipna=True)
|
62 |
+
expected = Timedelta(hours=17)
|
63 |
+
assert isinstance(result, Timedelta)
|
64 |
+
assert result == expected
|
65 |
+
|
66 |
+
result = tdi.sum(skipna=True)
|
67 |
+
assert isinstance(result, Timedelta)
|
68 |
+
assert result == expected
|
69 |
+
|
70 |
+
result = arr.sum(skipna=False)
|
71 |
+
assert result is pd.NaT
|
72 |
+
|
73 |
+
result = tdi.sum(skipna=False)
|
74 |
+
assert result is pd.NaT
|
75 |
+
|
76 |
+
result = arr.sum(min_count=9)
|
77 |
+
assert result is pd.NaT
|
78 |
+
|
79 |
+
result = tdi.sum(min_count=9)
|
80 |
+
assert result is pd.NaT
|
81 |
+
|
82 |
+
result = arr.sum(min_count=1)
|
83 |
+
assert isinstance(result, Timedelta)
|
84 |
+
assert result == expected
|
85 |
+
|
86 |
+
result = tdi.sum(min_count=1)
|
87 |
+
assert isinstance(result, Timedelta)
|
88 |
+
assert result == expected
|
89 |
+
|
90 |
+
def test_npsum(self):
|
91 |
+
# GH#25282, GH#25335 np.sum should return a Timedelta, not timedelta64
|
92 |
+
tdi = pd.TimedeltaIndex(["3h", "3h", "2h", "5h", "4h"])
|
93 |
+
arr = tdi.array
|
94 |
+
|
95 |
+
result = np.sum(tdi)
|
96 |
+
expected = Timedelta(hours=17)
|
97 |
+
assert isinstance(result, Timedelta)
|
98 |
+
assert result == expected
|
99 |
+
|
100 |
+
result = np.sum(arr)
|
101 |
+
assert isinstance(result, Timedelta)
|
102 |
+
assert result == expected
|
103 |
+
|
104 |
+
def test_sum_2d_skipna_false(self):
|
105 |
+
arr = np.arange(8).astype(np.int64).view("m8[s]").astype("m8[ns]").reshape(4, 2)
|
106 |
+
arr[-1, -1] = "Nat"
|
107 |
+
|
108 |
+
tda = TimedeltaArray._from_sequence(arr)
|
109 |
+
|
110 |
+
result = tda.sum(skipna=False)
|
111 |
+
assert result is pd.NaT
|
112 |
+
|
113 |
+
result = tda.sum(axis=0, skipna=False)
|
114 |
+
expected = pd.TimedeltaIndex([Timedelta(seconds=12), pd.NaT])._values
|
115 |
+
tm.assert_timedelta_array_equal(result, expected)
|
116 |
+
|
117 |
+
result = tda.sum(axis=1, skipna=False)
|
118 |
+
expected = pd.TimedeltaIndex(
|
119 |
+
[
|
120 |
+
Timedelta(seconds=1),
|
121 |
+
Timedelta(seconds=5),
|
122 |
+
Timedelta(seconds=9),
|
123 |
+
pd.NaT,
|
124 |
+
]
|
125 |
+
)._values
|
126 |
+
tm.assert_timedelta_array_equal(result, expected)
|
127 |
+
|
128 |
+
# Adding a Timestamp makes this a test for DatetimeArray.std
|
129 |
+
@pytest.mark.parametrize(
|
130 |
+
"add",
|
131 |
+
[
|
132 |
+
Timedelta(0),
|
133 |
+
pd.Timestamp("2021-01-01"),
|
134 |
+
pd.Timestamp("2021-01-01", tz="UTC"),
|
135 |
+
pd.Timestamp("2021-01-01", tz="Asia/Tokyo"),
|
136 |
+
],
|
137 |
+
)
|
138 |
+
def test_std(self, add):
|
139 |
+
tdi = pd.TimedeltaIndex(["0h", "4h", "NaT", "4h", "0h", "2h"]) + add
|
140 |
+
arr = tdi.array
|
141 |
+
|
142 |
+
result = arr.std(skipna=True)
|
143 |
+
expected = Timedelta(hours=2)
|
144 |
+
assert isinstance(result, Timedelta)
|
145 |
+
assert result == expected
|
146 |
+
|
147 |
+
result = tdi.std(skipna=True)
|
148 |
+
assert isinstance(result, Timedelta)
|
149 |
+
assert result == expected
|
150 |
+
|
151 |
+
if getattr(arr, "tz", None) is None:
|
152 |
+
result = nanops.nanstd(np.asarray(arr), skipna=True)
|
153 |
+
assert isinstance(result, np.timedelta64)
|
154 |
+
assert result == expected
|
155 |
+
|
156 |
+
result = arr.std(skipna=False)
|
157 |
+
assert result is pd.NaT
|
158 |
+
|
159 |
+
result = tdi.std(skipna=False)
|
160 |
+
assert result is pd.NaT
|
161 |
+
|
162 |
+
if getattr(arr, "tz", None) is None:
|
163 |
+
result = nanops.nanstd(np.asarray(arr), skipna=False)
|
164 |
+
assert isinstance(result, np.timedelta64)
|
165 |
+
assert np.isnat(result)
|
166 |
+
|
167 |
+
def test_median(self):
|
168 |
+
tdi = pd.TimedeltaIndex(["0h", "3h", "NaT", "5h06m", "0h", "2h"])
|
169 |
+
arr = tdi.array
|
170 |
+
|
171 |
+
result = arr.median(skipna=True)
|
172 |
+
expected = Timedelta(hours=2)
|
173 |
+
assert isinstance(result, Timedelta)
|
174 |
+
assert result == expected
|
175 |
+
|
176 |
+
result = tdi.median(skipna=True)
|
177 |
+
assert isinstance(result, Timedelta)
|
178 |
+
assert result == expected
|
179 |
+
|
180 |
+
result = arr.median(skipna=False)
|
181 |
+
assert result is pd.NaT
|
182 |
+
|
183 |
+
result = tdi.median(skipna=False)
|
184 |
+
assert result is pd.NaT
|
185 |
+
|
186 |
+
def test_mean(self):
|
187 |
+
tdi = pd.TimedeltaIndex(["0h", "3h", "NaT", "5h06m", "0h", "2h"])
|
188 |
+
arr = tdi._data
|
189 |
+
|
190 |
+
# manually verified result
|
191 |
+
expected = Timedelta(arr.dropna()._ndarray.mean())
|
192 |
+
|
193 |
+
result = arr.mean()
|
194 |
+
assert result == expected
|
195 |
+
result = arr.mean(skipna=False)
|
196 |
+
assert result is pd.NaT
|
197 |
+
|
198 |
+
result = arr.dropna().mean(skipna=False)
|
199 |
+
assert result == expected
|
200 |
+
|
201 |
+
result = arr.mean(axis=0)
|
202 |
+
assert result == expected
|
203 |
+
|
204 |
+
def test_mean_2d(self):
|
205 |
+
tdi = pd.timedelta_range("14 days", periods=6)
|
206 |
+
tda = tdi._data.reshape(3, 2)
|
207 |
+
|
208 |
+
result = tda.mean(axis=0)
|
209 |
+
expected = tda[1]
|
210 |
+
tm.assert_timedelta_array_equal(result, expected)
|
211 |
+
|
212 |
+
result = tda.mean(axis=1)
|
213 |
+
expected = tda[:, 0] + Timedelta(hours=12)
|
214 |
+
tm.assert_timedelta_array_equal(result, expected)
|
215 |
+
|
216 |
+
result = tda.mean(axis=None)
|
217 |
+
expected = tdi.mean()
|
218 |
+
assert result == expected
|