applied-ai-018 commited on
Commit
651d7ec
·
verified ·
1 Parent(s): fc0c20b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc +0 -0
  2. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc +0 -0
  3. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/__init__.py +0 -0
  9. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py +7 -0
  10. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_explode.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_values.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_add_prefix_suffix.py +41 -0
  13. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_align.py +249 -0
  14. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py +84 -0
  15. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py +205 -0
  16. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py +683 -0
  17. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_autocorr.py +30 -0
  18. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_between.py +75 -0
  19. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_case_when.py +148 -0
  20. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine.py +17 -0
  21. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_compare.py +141 -0
  22. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_convert_dtypes.py +306 -0
  23. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py +91 -0
  24. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_count.py +34 -0
  25. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py +203 -0
  26. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_diff.py +88 -0
  27. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop_duplicates.py +267 -0
  28. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_dropna.py +117 -0
  29. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py +7 -0
  30. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py +175 -0
  31. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py +1155 -0
  32. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py +56 -0
  33. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_info.py +181 -0
  34. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_monotonic.py +26 -0
  35. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py +35 -0
  36. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_item.py +59 -0
  37. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_nlargest.py +248 -0
  38. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_nunique.py +24 -0
  39. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_pct_change.py +128 -0
  40. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_pop.py +13 -0
  41. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_quantile.py +247 -0
  42. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_rank.py +519 -0
  43. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_reindex_like.py +41 -0
  44. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_rename_axis.py +47 -0
  45. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_replace.py +813 -0
  46. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_reset_index.py +225 -0
  47. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_round.py +74 -0
  48. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_searchsorted.py +77 -0
  49. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_set_name.py +21 -0
  50. env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_size.py +22 -0
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (183 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc ADDED
Binary file (3.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc ADDED
Binary file (8.03 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc ADDED
Binary file (11.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc ADDED
Binary file (23.7 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc ADDED
Binary file (1.44 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc ADDED
Binary file (3.23 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ Test files dedicated to individual (stand-alone) Series methods
3
+
4
+ Ideally these files/tests should correspond 1-to-1 with tests.frame.methods
5
+
6
+ These may also present opportunities for sharing/de-duplicating test code.
7
+ """
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_explode.cpython-310.pyc ADDED
Binary file (5.66 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_values.cpython-310.pyc ADDED
Binary file (1.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_add_prefix_suffix.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import Index
4
+ import pandas._testing as tm
5
+
6
+
7
+ def test_add_prefix_suffix(string_series):
8
+ with_prefix = string_series.add_prefix("foo#")
9
+ expected = Index([f"foo#{c}" for c in string_series.index])
10
+ tm.assert_index_equal(with_prefix.index, expected)
11
+
12
+ with_suffix = string_series.add_suffix("#foo")
13
+ expected = Index([f"{c}#foo" for c in string_series.index])
14
+ tm.assert_index_equal(with_suffix.index, expected)
15
+
16
+ with_pct_prefix = string_series.add_prefix("%")
17
+ expected = Index([f"%{c}" for c in string_series.index])
18
+ tm.assert_index_equal(with_pct_prefix.index, expected)
19
+
20
+ with_pct_suffix = string_series.add_suffix("%")
21
+ expected = Index([f"{c}%" for c in string_series.index])
22
+ tm.assert_index_equal(with_pct_suffix.index, expected)
23
+
24
+
25
+ def test_add_prefix_suffix_axis(string_series):
26
+ # GH 47819
27
+ with_prefix = string_series.add_prefix("foo#", axis=0)
28
+ expected = Index([f"foo#{c}" for c in string_series.index])
29
+ tm.assert_index_equal(with_prefix.index, expected)
30
+
31
+ with_pct_suffix = string_series.add_suffix("#foo", axis=0)
32
+ expected = Index([f"{c}#foo" for c in string_series.index])
33
+ tm.assert_index_equal(with_pct_suffix.index, expected)
34
+
35
+
36
+ def test_add_prefix_suffix_invalid_axis(string_series):
37
+ with pytest.raises(ValueError, match="No axis named 1 for object type Series"):
38
+ string_series.add_prefix("foo#", axis=1)
39
+
40
+ with pytest.raises(ValueError, match="No axis named 1 for object type Series"):
41
+ string_series.add_suffix("foo#", axis=1)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_align.py ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timezone
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Series,
9
+ date_range,
10
+ period_range,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ @pytest.mark.parametrize(
16
+ "first_slice,second_slice",
17
+ [
18
+ [[2, None], [None, -5]],
19
+ [[None, 0], [None, -5]],
20
+ [[None, -5], [None, 0]],
21
+ [[None, 0], [None, 0]],
22
+ ],
23
+ )
24
+ @pytest.mark.parametrize("fill", [None, -1])
25
+ def test_align(datetime_series, first_slice, second_slice, join_type, fill):
26
+ a = datetime_series[slice(*first_slice)]
27
+ b = datetime_series[slice(*second_slice)]
28
+
29
+ aa, ab = a.align(b, join=join_type, fill_value=fill)
30
+
31
+ join_index = a.index.join(b.index, how=join_type)
32
+ if fill is not None:
33
+ diff_a = aa.index.difference(join_index)
34
+ diff_b = ab.index.difference(join_index)
35
+ if len(diff_a) > 0:
36
+ assert (aa.reindex(diff_a) == fill).all()
37
+ if len(diff_b) > 0:
38
+ assert (ab.reindex(diff_b) == fill).all()
39
+
40
+ ea = a.reindex(join_index)
41
+ eb = b.reindex(join_index)
42
+
43
+ if fill is not None:
44
+ ea = ea.fillna(fill)
45
+ eb = eb.fillna(fill)
46
+
47
+ tm.assert_series_equal(aa, ea)
48
+ tm.assert_series_equal(ab, eb)
49
+ assert aa.name == "ts"
50
+ assert ea.name == "ts"
51
+ assert ab.name == "ts"
52
+ assert eb.name == "ts"
53
+
54
+
55
+ @pytest.mark.parametrize(
56
+ "first_slice,second_slice",
57
+ [
58
+ [[2, None], [None, -5]],
59
+ [[None, 0], [None, -5]],
60
+ [[None, -5], [None, 0]],
61
+ [[None, 0], [None, 0]],
62
+ ],
63
+ )
64
+ @pytest.mark.parametrize("method", ["pad", "bfill"])
65
+ @pytest.mark.parametrize("limit", [None, 1])
66
+ def test_align_fill_method(
67
+ datetime_series, first_slice, second_slice, join_type, method, limit
68
+ ):
69
+ a = datetime_series[slice(*first_slice)]
70
+ b = datetime_series[slice(*second_slice)]
71
+
72
+ msg = (
73
+ "The 'method', 'limit', and 'fill_axis' keywords in Series.align "
74
+ "are deprecated"
75
+ )
76
+ with tm.assert_produces_warning(FutureWarning, match=msg):
77
+ aa, ab = a.align(b, join=join_type, method=method, limit=limit)
78
+
79
+ join_index = a.index.join(b.index, how=join_type)
80
+ ea = a.reindex(join_index)
81
+ eb = b.reindex(join_index)
82
+
83
+ msg2 = "Series.fillna with 'method' is deprecated"
84
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
85
+ ea = ea.fillna(method=method, limit=limit)
86
+ eb = eb.fillna(method=method, limit=limit)
87
+
88
+ tm.assert_series_equal(aa, ea)
89
+ tm.assert_series_equal(ab, eb)
90
+
91
+
92
+ def test_align_nocopy(datetime_series, using_copy_on_write):
93
+ b = datetime_series[:5].copy()
94
+
95
+ # do copy
96
+ a = datetime_series.copy()
97
+ ra, _ = a.align(b, join="left")
98
+ ra[:5] = 5
99
+ assert not (a[:5] == 5).any()
100
+
101
+ # do not copy
102
+ a = datetime_series.copy()
103
+ ra, _ = a.align(b, join="left", copy=False)
104
+ ra[:5] = 5
105
+ if using_copy_on_write:
106
+ assert not (a[:5] == 5).any()
107
+ else:
108
+ assert (a[:5] == 5).all()
109
+
110
+ # do copy
111
+ a = datetime_series.copy()
112
+ b = datetime_series[:5].copy()
113
+ _, rb = a.align(b, join="right")
114
+ rb[:3] = 5
115
+ assert not (b[:3] == 5).any()
116
+
117
+ # do not copy
118
+ a = datetime_series.copy()
119
+ b = datetime_series[:5].copy()
120
+ _, rb = a.align(b, join="right", copy=False)
121
+ rb[:2] = 5
122
+ if using_copy_on_write:
123
+ assert not (b[:2] == 5).any()
124
+ else:
125
+ assert (b[:2] == 5).all()
126
+
127
+
128
+ def test_align_same_index(datetime_series, using_copy_on_write):
129
+ a, b = datetime_series.align(datetime_series, copy=False)
130
+ if not using_copy_on_write:
131
+ assert a.index is datetime_series.index
132
+ assert b.index is datetime_series.index
133
+ else:
134
+ assert a.index.is_(datetime_series.index)
135
+ assert b.index.is_(datetime_series.index)
136
+
137
+ a, b = datetime_series.align(datetime_series, copy=True)
138
+ assert a.index is not datetime_series.index
139
+ assert b.index is not datetime_series.index
140
+ assert a.index.is_(datetime_series.index)
141
+ assert b.index.is_(datetime_series.index)
142
+
143
+
144
+ def test_align_multiindex():
145
+ # GH 10665
146
+
147
+ midx = pd.MultiIndex.from_product(
148
+ [range(2), range(3), range(2)], names=("a", "b", "c")
149
+ )
150
+ idx = pd.Index(range(2), name="b")
151
+ s1 = Series(np.arange(12, dtype="int64"), index=midx)
152
+ s2 = Series(np.arange(2, dtype="int64"), index=idx)
153
+
154
+ # these must be the same results (but flipped)
155
+ res1l, res1r = s1.align(s2, join="left")
156
+ res2l, res2r = s2.align(s1, join="right")
157
+
158
+ expl = s1
159
+ tm.assert_series_equal(expl, res1l)
160
+ tm.assert_series_equal(expl, res2r)
161
+ expr = Series([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx)
162
+ tm.assert_series_equal(expr, res1r)
163
+ tm.assert_series_equal(expr, res2l)
164
+
165
+ res1l, res1r = s1.align(s2, join="right")
166
+ res2l, res2r = s2.align(s1, join="left")
167
+
168
+ exp_idx = pd.MultiIndex.from_product(
169
+ [range(2), range(2), range(2)], names=("a", "b", "c")
170
+ )
171
+ expl = Series([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx)
172
+ tm.assert_series_equal(expl, res1l)
173
+ tm.assert_series_equal(expl, res2r)
174
+ expr = Series([0, 0, 1, 1] * 2, index=exp_idx)
175
+ tm.assert_series_equal(expr, res1r)
176
+ tm.assert_series_equal(expr, res2l)
177
+
178
+
179
+ @pytest.mark.parametrize("method", ["backfill", "bfill", "pad", "ffill", None])
180
+ def test_align_with_dataframe_method(method):
181
+ # GH31788
182
+ ser = Series(range(3), index=range(3))
183
+ df = pd.DataFrame(0.0, index=range(3), columns=range(3))
184
+
185
+ msg = (
186
+ "The 'method', 'limit', and 'fill_axis' keywords in Series.align "
187
+ "are deprecated"
188
+ )
189
+ with tm.assert_produces_warning(FutureWarning, match=msg):
190
+ result_ser, result_df = ser.align(df, method=method)
191
+ tm.assert_series_equal(result_ser, ser)
192
+ tm.assert_frame_equal(result_df, df)
193
+
194
+
195
+ def test_align_dt64tzindex_mismatched_tzs():
196
+ idx1 = date_range("2001", periods=5, freq="h", tz="US/Eastern")
197
+ ser = Series(np.random.default_rng(2).standard_normal(len(idx1)), index=idx1)
198
+ ser_central = ser.tz_convert("US/Central")
199
+ # different timezones convert to UTC
200
+
201
+ new1, new2 = ser.align(ser_central)
202
+ assert new1.index.tz is timezone.utc
203
+ assert new2.index.tz is timezone.utc
204
+
205
+
206
+ def test_align_periodindex(join_type):
207
+ rng = period_range("1/1/2000", "1/1/2010", freq="Y")
208
+ ts = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng)
209
+
210
+ # TODO: assert something?
211
+ ts.align(ts[::2], join=join_type)
212
+
213
+
214
+ def test_align_left_fewer_levels():
215
+ # GH#45224
216
+ left = Series([2], index=pd.MultiIndex.from_tuples([(1, 3)], names=["a", "c"]))
217
+ right = Series(
218
+ [1], index=pd.MultiIndex.from_tuples([(1, 2, 3)], names=["a", "b", "c"])
219
+ )
220
+ result_left, result_right = left.align(right)
221
+
222
+ expected_right = Series(
223
+ [1], index=pd.MultiIndex.from_tuples([(1, 3, 2)], names=["a", "c", "b"])
224
+ )
225
+ expected_left = Series(
226
+ [2], index=pd.MultiIndex.from_tuples([(1, 3, 2)], names=["a", "c", "b"])
227
+ )
228
+ tm.assert_series_equal(result_left, expected_left)
229
+ tm.assert_series_equal(result_right, expected_right)
230
+
231
+
232
+ def test_align_left_different_named_levels():
233
+ # GH#45224
234
+ left = Series(
235
+ [2], index=pd.MultiIndex.from_tuples([(1, 4, 3)], names=["a", "d", "c"])
236
+ )
237
+ right = Series(
238
+ [1], index=pd.MultiIndex.from_tuples([(1, 2, 3)], names=["a", "b", "c"])
239
+ )
240
+ result_left, result_right = left.align(right)
241
+
242
+ expected_left = Series(
243
+ [2], index=pd.MultiIndex.from_tuples([(1, 4, 3, 2)], names=["a", "d", "c", "b"])
244
+ )
245
+ expected_right = Series(
246
+ [1], index=pd.MultiIndex.from_tuples([(1, 4, 3, 2)], names=["a", "d", "c", "b"])
247
+ )
248
+ tm.assert_series_equal(result_left, expected_left)
249
+ tm.assert_series_equal(result_right, expected_right)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ isna,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestSeriesArgsort:
13
+ def test_argsort_axis(self):
14
+ # GH#54257
15
+ ser = Series(range(3))
16
+
17
+ msg = "No axis named 2 for object type Series"
18
+ with pytest.raises(ValueError, match=msg):
19
+ ser.argsort(axis=2)
20
+
21
+ def test_argsort_numpy(self, datetime_series):
22
+ ser = datetime_series
23
+
24
+ res = np.argsort(ser).values
25
+ expected = np.argsort(np.array(ser))
26
+ tm.assert_numpy_array_equal(res, expected)
27
+
28
+ # with missing values
29
+ ts = ser.copy()
30
+ ts[::2] = np.nan
31
+
32
+ msg = "The behavior of Series.argsort in the presence of NA values"
33
+ with tm.assert_produces_warning(
34
+ FutureWarning, match=msg, check_stacklevel=False
35
+ ):
36
+ result = np.argsort(ts)[1::2]
37
+ expected = np.argsort(np.array(ts.dropna()))
38
+
39
+ tm.assert_numpy_array_equal(result.values, expected)
40
+
41
+ def test_argsort(self, datetime_series):
42
+ argsorted = datetime_series.argsort()
43
+ assert issubclass(argsorted.dtype.type, np.integer)
44
+
45
+ def test_argsort_dt64(self, unit):
46
+ # GH#2967 (introduced bug in 0.11-dev I think)
47
+ ser = Series(
48
+ [Timestamp(f"201301{i:02d}") for i in range(1, 6)], dtype=f"M8[{unit}]"
49
+ )
50
+ assert ser.dtype == f"datetime64[{unit}]"
51
+ shifted = ser.shift(-1)
52
+ assert shifted.dtype == f"datetime64[{unit}]"
53
+ assert isna(shifted[4])
54
+
55
+ result = ser.argsort()
56
+ expected = Series(range(5), dtype=np.intp)
57
+ tm.assert_series_equal(result, expected)
58
+
59
+ msg = "The behavior of Series.argsort in the presence of NA values"
60
+ with tm.assert_produces_warning(FutureWarning, match=msg):
61
+ result = shifted.argsort()
62
+ expected = Series(list(range(4)) + [-1], dtype=np.intp)
63
+ tm.assert_series_equal(result, expected)
64
+
65
+ def test_argsort_stable(self):
66
+ ser = Series(np.random.default_rng(2).integers(0, 100, size=10000))
67
+ mindexer = ser.argsort(kind="mergesort")
68
+ qindexer = ser.argsort()
69
+
70
+ mexpected = np.argsort(ser.values, kind="mergesort")
71
+ qexpected = np.argsort(ser.values, kind="quicksort")
72
+
73
+ tm.assert_series_equal(mindexer.astype(np.intp), Series(mexpected))
74
+ tm.assert_series_equal(qindexer.astype(np.intp), Series(qexpected))
75
+ msg = (
76
+ r"ndarray Expected type <class 'numpy\.ndarray'>, "
77
+ r"found <class 'pandas\.core\.series\.Series'> instead"
78
+ )
79
+ with pytest.raises(AssertionError, match=msg):
80
+ tm.assert_numpy_array_equal(qindexer, mindexer)
81
+
82
+ def test_argsort_preserve_name(self, datetime_series):
83
+ result = datetime_series.argsort()
84
+ assert result.name == datetime_series.name
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas._libs.tslibs import IncompatibleFrequency
5
+
6
+ from pandas import (
7
+ DatetimeIndex,
8
+ PeriodIndex,
9
+ Series,
10
+ Timestamp,
11
+ date_range,
12
+ isna,
13
+ notna,
14
+ offsets,
15
+ period_range,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ class TestSeriesAsof:
21
+ def test_asof_nanosecond_index_access(self):
22
+ ts = Timestamp("20130101").as_unit("ns")._value
23
+ dti = DatetimeIndex([ts + 50 + i for i in range(100)])
24
+ ser = Series(np.random.default_rng(2).standard_normal(100), index=dti)
25
+
26
+ first_value = ser.asof(ser.index[0])
27
+
28
+ # GH#46903 previously incorrectly was "day"
29
+ assert dti.resolution == "nanosecond"
30
+
31
+ # this used to not work bc parsing was done by dateutil that didn't
32
+ # handle nanoseconds
33
+ assert first_value == ser["2013-01-01 00:00:00.000000050"]
34
+
35
+ expected_ts = np.datetime64("2013-01-01 00:00:00.000000050", "ns")
36
+ assert first_value == ser[Timestamp(expected_ts)]
37
+
38
+ def test_basic(self):
39
+ # array or list or dates
40
+ N = 50
41
+ rng = date_range("1/1/1990", periods=N, freq="53s")
42
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
43
+ ts.iloc[15:30] = np.nan
44
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
45
+
46
+ result = ts.asof(dates)
47
+ assert notna(result).all()
48
+ lb = ts.index[14]
49
+ ub = ts.index[30]
50
+
51
+ result = ts.asof(list(dates))
52
+ assert notna(result).all()
53
+ lb = ts.index[14]
54
+ ub = ts.index[30]
55
+
56
+ mask = (result.index >= lb) & (result.index < ub)
57
+ rs = result[mask]
58
+ assert (rs == ts[lb]).all()
59
+
60
+ val = result[result.index[result.index >= ub][0]]
61
+ assert ts[ub] == val
62
+
63
+ def test_scalar(self):
64
+ N = 30
65
+ rng = date_range("1/1/1990", periods=N, freq="53s")
66
+ # Explicit cast to float avoid implicit cast when setting nan
67
+ ts = Series(np.arange(N), index=rng, dtype="float")
68
+ ts.iloc[5:10] = np.nan
69
+ ts.iloc[15:20] = np.nan
70
+
71
+ val1 = ts.asof(ts.index[7])
72
+ val2 = ts.asof(ts.index[19])
73
+
74
+ assert val1 == ts.iloc[4]
75
+ assert val2 == ts.iloc[14]
76
+
77
+ # accepts strings
78
+ val1 = ts.asof(str(ts.index[7]))
79
+ assert val1 == ts.iloc[4]
80
+
81
+ # in there
82
+ result = ts.asof(ts.index[3])
83
+ assert result == ts.iloc[3]
84
+
85
+ # no as of value
86
+ d = ts.index[0] - offsets.BDay()
87
+ assert np.isnan(ts.asof(d))
88
+
89
+ def test_with_nan(self):
90
+ # basic asof test
91
+ rng = date_range("1/1/2000", "1/2/2000", freq="4h")
92
+ s = Series(np.arange(len(rng)), index=rng)
93
+ r = s.resample("2h").mean()
94
+
95
+ result = r.asof(r.index)
96
+ expected = Series(
97
+ [0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6.0],
98
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
99
+ )
100
+ tm.assert_series_equal(result, expected)
101
+
102
+ r.iloc[3:5] = np.nan
103
+ result = r.asof(r.index)
104
+ expected = Series(
105
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 5, 5, 6.0],
106
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
107
+ )
108
+ tm.assert_series_equal(result, expected)
109
+
110
+ r.iloc[-3:] = np.nan
111
+ result = r.asof(r.index)
112
+ expected = Series(
113
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 4, 4, 4.0],
114
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
115
+ )
116
+ tm.assert_series_equal(result, expected)
117
+
118
+ def test_periodindex(self):
119
+ # array or list or dates
120
+ N = 50
121
+ rng = period_range("1/1/1990", periods=N, freq="h")
122
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
123
+ ts.iloc[15:30] = np.nan
124
+ dates = date_range("1/1/1990", periods=N * 3, freq="37min")
125
+
126
+ result = ts.asof(dates)
127
+ assert notna(result).all()
128
+ lb = ts.index[14]
129
+ ub = ts.index[30]
130
+
131
+ result = ts.asof(list(dates))
132
+ assert notna(result).all()
133
+ lb = ts.index[14]
134
+ ub = ts.index[30]
135
+
136
+ pix = PeriodIndex(result.index.values, freq="h")
137
+ mask = (pix >= lb) & (pix < ub)
138
+ rs = result[mask]
139
+ assert (rs == ts[lb]).all()
140
+
141
+ ts.iloc[5:10] = np.nan
142
+ ts.iloc[15:20] = np.nan
143
+
144
+ val1 = ts.asof(ts.index[7])
145
+ val2 = ts.asof(ts.index[19])
146
+
147
+ assert val1 == ts.iloc[4]
148
+ assert val2 == ts.iloc[14]
149
+
150
+ # accepts strings
151
+ val1 = ts.asof(str(ts.index[7]))
152
+ assert val1 == ts.iloc[4]
153
+
154
+ # in there
155
+ assert ts.asof(ts.index[3]) == ts.iloc[3]
156
+
157
+ # no as of value
158
+ d = ts.index[0].to_timestamp() - offsets.BDay()
159
+ assert isna(ts.asof(d))
160
+
161
+ # Mismatched freq
162
+ msg = "Input has different freq"
163
+ with pytest.raises(IncompatibleFrequency, match=msg):
164
+ ts.asof(rng.asfreq("D"))
165
+
166
+ def test_errors(self):
167
+ s = Series(
168
+ [1, 2, 3],
169
+ index=[Timestamp("20130101"), Timestamp("20130103"), Timestamp("20130102")],
170
+ )
171
+
172
+ # non-monotonic
173
+ assert not s.index.is_monotonic_increasing
174
+ with pytest.raises(ValueError, match="requires a sorted index"):
175
+ s.asof(s.index[0])
176
+
177
+ # subset with Series
178
+ N = 10
179
+ rng = date_range("1/1/1990", periods=N, freq="53s")
180
+ s = Series(np.random.default_rng(2).standard_normal(N), index=rng)
181
+ with pytest.raises(ValueError, match="not valid for Series"):
182
+ s.asof(s.index[0], subset="foo")
183
+
184
+ def test_all_nans(self):
185
+ # GH 15713
186
+ # series is all nans
187
+
188
+ # testing non-default indexes
189
+ N = 50
190
+ rng = date_range("1/1/1990", periods=N, freq="53s")
191
+
192
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
193
+ result = Series(np.nan, index=rng).asof(dates)
194
+ expected = Series(np.nan, index=dates)
195
+ tm.assert_series_equal(result, expected)
196
+
197
+ # testing scalar input
198
+ date = date_range("1/1/1990", periods=N * 3, freq="25s")[0]
199
+ result = Series(np.nan, index=rng).asof(date)
200
+ assert isna(result)
201
+
202
+ # test name is propagated
203
+ result = Series(np.nan, index=[1, 2, 3, 4], name="test").asof([4, 5])
204
+ expected = Series(np.nan, index=[4, 5], name="test")
205
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py ADDED
@@ -0,0 +1,683 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ )
5
+ from importlib import reload
6
+ import string
7
+ import sys
8
+
9
+ import numpy as np
10
+ import pytest
11
+
12
+ from pandas._libs.tslibs import iNaT
13
+ import pandas.util._test_decorators as td
14
+
15
+ from pandas import (
16
+ NA,
17
+ Categorical,
18
+ CategoricalDtype,
19
+ DatetimeTZDtype,
20
+ Index,
21
+ Interval,
22
+ NaT,
23
+ Series,
24
+ Timedelta,
25
+ Timestamp,
26
+ cut,
27
+ date_range,
28
+ to_datetime,
29
+ )
30
+ import pandas._testing as tm
31
+
32
+
33
+ def rand_str(nchars: int) -> str:
34
+ """
35
+ Generate one random byte string.
36
+ """
37
+ RANDS_CHARS = np.array(
38
+ list(string.ascii_letters + string.digits), dtype=(np.str_, 1)
39
+ )
40
+ return "".join(np.random.default_rng(2).choice(RANDS_CHARS, nchars))
41
+
42
+
43
+ class TestAstypeAPI:
44
+ def test_astype_unitless_dt64_raises(self):
45
+ # GH#47844
46
+ ser = Series(["1970-01-01", "1970-01-01", "1970-01-01"], dtype="datetime64[ns]")
47
+ df = ser.to_frame()
48
+
49
+ msg = "Casting to unit-less dtype 'datetime64' is not supported"
50
+ with pytest.raises(TypeError, match=msg):
51
+ ser.astype(np.datetime64)
52
+ with pytest.raises(TypeError, match=msg):
53
+ df.astype(np.datetime64)
54
+ with pytest.raises(TypeError, match=msg):
55
+ ser.astype("datetime64")
56
+ with pytest.raises(TypeError, match=msg):
57
+ df.astype("datetime64")
58
+
59
+ def test_arg_for_errors_in_astype(self):
60
+ # see GH#14878
61
+ ser = Series([1, 2, 3])
62
+
63
+ msg = (
64
+ r"Expected value of kwarg 'errors' to be one of \['raise', "
65
+ r"'ignore'\]\. Supplied value is 'False'"
66
+ )
67
+ with pytest.raises(ValueError, match=msg):
68
+ ser.astype(np.float64, errors=False)
69
+
70
+ ser.astype(np.int8, errors="raise")
71
+
72
+ @pytest.mark.parametrize("dtype_class", [dict, Series])
73
+ def test_astype_dict_like(self, dtype_class):
74
+ # see GH#7271
75
+ ser = Series(range(0, 10, 2), name="abc")
76
+
77
+ dt1 = dtype_class({"abc": str})
78
+ result = ser.astype(dt1)
79
+ expected = Series(["0", "2", "4", "6", "8"], name="abc", dtype=object)
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ dt2 = dtype_class({"abc": "float64"})
83
+ result = ser.astype(dt2)
84
+ expected = Series([0.0, 2.0, 4.0, 6.0, 8.0], dtype="float64", name="abc")
85
+ tm.assert_series_equal(result, expected)
86
+
87
+ dt3 = dtype_class({"abc": str, "def": str})
88
+ msg = (
89
+ "Only the Series name can be used for the key in Series dtype "
90
+ r"mappings\."
91
+ )
92
+ with pytest.raises(KeyError, match=msg):
93
+ ser.astype(dt3)
94
+
95
+ dt4 = dtype_class({0: str})
96
+ with pytest.raises(KeyError, match=msg):
97
+ ser.astype(dt4)
98
+
99
+ # GH#16717
100
+ # if dtypes provided is empty, it should error
101
+ if dtype_class is Series:
102
+ dt5 = dtype_class({}, dtype=object)
103
+ else:
104
+ dt5 = dtype_class({})
105
+
106
+ with pytest.raises(KeyError, match=msg):
107
+ ser.astype(dt5)
108
+
109
+
110
+ class TestAstype:
111
+ @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"])
112
+ def test_astype_object_to_dt64_non_nano(self, tz):
113
+ # GH#55756, GH#54620
114
+ ts = Timestamp("2999-01-01")
115
+ dtype = "M8[us]"
116
+ if tz is not None:
117
+ dtype = f"M8[us, {tz}]"
118
+ vals = [ts, "2999-01-02 03:04:05.678910", 2500]
119
+ ser = Series(vals, dtype=object)
120
+ result = ser.astype(dtype)
121
+
122
+ # The 2500 is interpreted as microseconds, consistent with what
123
+ # we would get if we created DatetimeIndexes from vals[:2] and vals[2:]
124
+ # and concated the results.
125
+ pointwise = [
126
+ vals[0].tz_localize(tz),
127
+ Timestamp(vals[1], tz=tz),
128
+ to_datetime(vals[2], unit="us", utc=True).tz_convert(tz),
129
+ ]
130
+ exp_vals = [x.as_unit("us").asm8 for x in pointwise]
131
+ exp_arr = np.array(exp_vals, dtype="M8[us]")
132
+ expected = Series(exp_arr, dtype="M8[us]")
133
+ if tz is not None:
134
+ expected = expected.dt.tz_localize("UTC").dt.tz_convert(tz)
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_astype_mixed_object_to_dt64tz(self):
138
+ # pre-2.0 this raised ValueError bc of tz mismatch
139
+ # xref GH#32581
140
+ ts = Timestamp("2016-01-04 05:06:07", tz="US/Pacific")
141
+ ts2 = ts.tz_convert("Asia/Tokyo")
142
+
143
+ ser = Series([ts, ts2], dtype=object)
144
+ res = ser.astype("datetime64[ns, Europe/Brussels]")
145
+ expected = Series(
146
+ [ts.tz_convert("Europe/Brussels"), ts2.tz_convert("Europe/Brussels")],
147
+ dtype="datetime64[ns, Europe/Brussels]",
148
+ )
149
+ tm.assert_series_equal(res, expected)
150
+
151
+ @pytest.mark.parametrize("dtype", np.typecodes["All"])
152
+ def test_astype_empty_constructor_equality(self, dtype):
153
+ # see GH#15524
154
+
155
+ if dtype not in (
156
+ "S",
157
+ "V", # poor support (if any) currently
158
+ "M",
159
+ "m", # Generic timestamps raise a ValueError. Already tested.
160
+ ):
161
+ init_empty = Series([], dtype=dtype)
162
+ as_type_empty = Series([]).astype(dtype)
163
+ tm.assert_series_equal(init_empty, as_type_empty)
164
+
165
+ @pytest.mark.parametrize("dtype", [str, np.str_])
166
+ @pytest.mark.parametrize(
167
+ "series",
168
+ [
169
+ Series([string.digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
170
+ Series([string.digits * 10, rand_str(63), rand_str(64), np.nan, 1.0]),
171
+ ],
172
+ )
173
+ def test_astype_str_map(self, dtype, series, using_infer_string):
174
+ # see GH#4405
175
+ result = series.astype(dtype)
176
+ expected = series.map(str)
177
+ if using_infer_string:
178
+ expected = expected.astype(object)
179
+ tm.assert_series_equal(result, expected)
180
+
181
+ def test_astype_float_to_period(self):
182
+ result = Series([np.nan]).astype("period[D]")
183
+ expected = Series([NaT], dtype="period[D]")
184
+ tm.assert_series_equal(result, expected)
185
+
186
+ def test_astype_no_pandas_dtype(self):
187
+ # https://github.com/pandas-dev/pandas/pull/24866
188
+ ser = Series([1, 2], dtype="int64")
189
+ # Don't have NumpyEADtype in the public API, so we use `.array.dtype`,
190
+ # which is a NumpyEADtype.
191
+ result = ser.astype(ser.array.dtype)
192
+ tm.assert_series_equal(result, ser)
193
+
194
+ @pytest.mark.parametrize("dtype", [np.datetime64, np.timedelta64])
195
+ def test_astype_generic_timestamp_no_frequency(self, dtype, request):
196
+ # see GH#15524, GH#15987
197
+ data = [1]
198
+ ser = Series(data)
199
+
200
+ if np.dtype(dtype).name not in ["timedelta64", "datetime64"]:
201
+ mark = pytest.mark.xfail(reason="GH#33890 Is assigned ns unit")
202
+ request.applymarker(mark)
203
+
204
+ msg = (
205
+ rf"The '{dtype.__name__}' dtype has no unit\. "
206
+ rf"Please pass in '{dtype.__name__}\[ns\]' instead."
207
+ )
208
+ with pytest.raises(ValueError, match=msg):
209
+ ser.astype(dtype)
210
+
211
+ def test_astype_dt64_to_str(self):
212
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
213
+ dti = date_range("2012-01-01", periods=3)
214
+ result = Series(dti).astype(str)
215
+ expected = Series(["2012-01-01", "2012-01-02", "2012-01-03"], dtype=object)
216
+ tm.assert_series_equal(result, expected)
217
+
218
+ def test_astype_dt64tz_to_str(self):
219
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
220
+ dti_tz = date_range("2012-01-01", periods=3, tz="US/Eastern")
221
+ result = Series(dti_tz).astype(str)
222
+ expected = Series(
223
+ [
224
+ "2012-01-01 00:00:00-05:00",
225
+ "2012-01-02 00:00:00-05:00",
226
+ "2012-01-03 00:00:00-05:00",
227
+ ],
228
+ dtype=object,
229
+ )
230
+ tm.assert_series_equal(result, expected)
231
+
232
+ def test_astype_datetime(self, unit):
233
+ ser = Series(iNaT, dtype=f"M8[{unit}]", index=range(5))
234
+
235
+ ser = ser.astype("O")
236
+ assert ser.dtype == np.object_
237
+
238
+ ser = Series([datetime(2001, 1, 2, 0, 0)])
239
+
240
+ ser = ser.astype("O")
241
+ assert ser.dtype == np.object_
242
+
243
+ ser = Series(
244
+ [datetime(2001, 1, 2, 0, 0) for i in range(3)], dtype=f"M8[{unit}]"
245
+ )
246
+
247
+ ser[1] = np.nan
248
+ assert ser.dtype == f"M8[{unit}]"
249
+
250
+ ser = ser.astype("O")
251
+ assert ser.dtype == np.object_
252
+
253
+ def test_astype_datetime64tz(self):
254
+ ser = Series(date_range("20130101", periods=3, tz="US/Eastern"))
255
+
256
+ # astype
257
+ result = ser.astype(object)
258
+ expected = Series(ser.astype(object), dtype=object)
259
+ tm.assert_series_equal(result, expected)
260
+
261
+ result = Series(ser.values).dt.tz_localize("UTC").dt.tz_convert(ser.dt.tz)
262
+ tm.assert_series_equal(result, ser)
263
+
264
+ # astype - object, preserves on construction
265
+ result = Series(ser.astype(object))
266
+ expected = ser.astype(object)
267
+ tm.assert_series_equal(result, expected)
268
+
269
+ # astype - datetime64[ns, tz]
270
+ msg = "Cannot use .astype to convert from timezone-naive"
271
+ with pytest.raises(TypeError, match=msg):
272
+ # dt64->dt64tz astype deprecated
273
+ Series(ser.values).astype("datetime64[ns, US/Eastern]")
274
+
275
+ with pytest.raises(TypeError, match=msg):
276
+ # dt64->dt64tz astype deprecated
277
+ Series(ser.values).astype(ser.dtype)
278
+
279
+ result = ser.astype("datetime64[ns, CET]")
280
+ expected = Series(date_range("20130101 06:00:00", periods=3, tz="CET"))
281
+ tm.assert_series_equal(result, expected)
282
+
283
+ def test_astype_str_cast_dt64(self):
284
+ # see GH#9757
285
+ ts = Series([Timestamp("2010-01-04 00:00:00")])
286
+ res = ts.astype(str)
287
+
288
+ expected = Series(["2010-01-04"], dtype=object)
289
+ tm.assert_series_equal(res, expected)
290
+
291
+ ts = Series([Timestamp("2010-01-04 00:00:00", tz="US/Eastern")])
292
+ res = ts.astype(str)
293
+
294
+ expected = Series(["2010-01-04 00:00:00-05:00"], dtype=object)
295
+ tm.assert_series_equal(res, expected)
296
+
297
+ def test_astype_str_cast_td64(self):
298
+ # see GH#9757
299
+
300
+ td = Series([Timedelta(1, unit="d")])
301
+ ser = td.astype(str)
302
+
303
+ expected = Series(["1 days"], dtype=object)
304
+ tm.assert_series_equal(ser, expected)
305
+
306
+ def test_dt64_series_astype_object(self):
307
+ dt64ser = Series(date_range("20130101", periods=3))
308
+ result = dt64ser.astype(object)
309
+ assert isinstance(result.iloc[0], datetime)
310
+ assert result.dtype == np.object_
311
+
312
+ def test_td64_series_astype_object(self):
313
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="timedelta64[ns]")
314
+ result = tdser.astype(object)
315
+ assert isinstance(result.iloc[0], timedelta)
316
+ assert result.dtype == np.object_
317
+
318
+ @pytest.mark.parametrize(
319
+ "data, dtype",
320
+ [
321
+ (["x", "y", "z"], "string[python]"),
322
+ pytest.param(
323
+ ["x", "y", "z"],
324
+ "string[pyarrow]",
325
+ marks=td.skip_if_no("pyarrow"),
326
+ ),
327
+ (["x", "y", "z"], "category"),
328
+ (3 * [Timestamp("2020-01-01", tz="UTC")], None),
329
+ (3 * [Interval(0, 1)], None),
330
+ ],
331
+ )
332
+ @pytest.mark.parametrize("errors", ["raise", "ignore"])
333
+ def test_astype_ignores_errors_for_extension_dtypes(self, data, dtype, errors):
334
+ # https://github.com/pandas-dev/pandas/issues/35471
335
+ ser = Series(data, dtype=dtype)
336
+ if errors == "ignore":
337
+ expected = ser
338
+ result = ser.astype(float, errors="ignore")
339
+ tm.assert_series_equal(result, expected)
340
+ else:
341
+ msg = "(Cannot cast)|(could not convert)"
342
+ with pytest.raises((ValueError, TypeError), match=msg):
343
+ ser.astype(float, errors=errors)
344
+
345
+ @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64])
346
+ def test_astype_from_float_to_str(self, dtype):
347
+ # https://github.com/pandas-dev/pandas/issues/36451
348
+ ser = Series([0.1], dtype=dtype)
349
+ result = ser.astype(str)
350
+ expected = Series(["0.1"], dtype=object)
351
+ tm.assert_series_equal(result, expected)
352
+
353
+ @pytest.mark.parametrize(
354
+ "value, string_value",
355
+ [
356
+ (None, "None"),
357
+ (np.nan, "nan"),
358
+ (NA, "<NA>"),
359
+ ],
360
+ )
361
+ def test_astype_to_str_preserves_na(self, value, string_value):
362
+ # https://github.com/pandas-dev/pandas/issues/36904
363
+ ser = Series(["a", "b", value], dtype=object)
364
+ result = ser.astype(str)
365
+ expected = Series(["a", "b", string_value], dtype=object)
366
+ tm.assert_series_equal(result, expected)
367
+
368
+ @pytest.mark.parametrize("dtype", ["float32", "float64", "int64", "int32"])
369
+ def test_astype(self, dtype):
370
+ ser = Series(np.random.default_rng(2).standard_normal(5), name="foo")
371
+ as_typed = ser.astype(dtype)
372
+
373
+ assert as_typed.dtype == dtype
374
+ assert as_typed.name == ser.name
375
+
376
+ @pytest.mark.parametrize("value", [np.nan, np.inf])
377
+ @pytest.mark.parametrize("dtype", [np.int32, np.int64])
378
+ def test_astype_cast_nan_inf_int(self, dtype, value):
379
+ # gh-14265: check NaN and inf raise error when converting to int
380
+ msg = "Cannot convert non-finite values \\(NA or inf\\) to integer"
381
+ ser = Series([value])
382
+
383
+ with pytest.raises(ValueError, match=msg):
384
+ ser.astype(dtype)
385
+
386
+ @pytest.mark.parametrize("dtype", [int, np.int8, np.int64])
387
+ def test_astype_cast_object_int_fail(self, dtype):
388
+ arr = Series(["car", "house", "tree", "1"])
389
+ msg = r"invalid literal for int\(\) with base 10: 'car'"
390
+ with pytest.raises(ValueError, match=msg):
391
+ arr.astype(dtype)
392
+
393
+ def test_astype_float_to_uint_negatives_raise(
394
+ self, float_numpy_dtype, any_unsigned_int_numpy_dtype
395
+ ):
396
+ # GH#45151 We don't cast negative numbers to nonsense values
397
+ # TODO: same for EA float/uint dtypes, signed integers?
398
+ arr = np.arange(5).astype(float_numpy_dtype) - 3 # includes negatives
399
+ ser = Series(arr)
400
+
401
+ msg = "Cannot losslessly cast from .* to .*"
402
+ with pytest.raises(ValueError, match=msg):
403
+ ser.astype(any_unsigned_int_numpy_dtype)
404
+
405
+ with pytest.raises(ValueError, match=msg):
406
+ ser.to_frame().astype(any_unsigned_int_numpy_dtype)
407
+
408
+ with pytest.raises(ValueError, match=msg):
409
+ # We currently catch and re-raise in Index.astype
410
+ Index(ser).astype(any_unsigned_int_numpy_dtype)
411
+
412
+ with pytest.raises(ValueError, match=msg):
413
+ ser.array.astype(any_unsigned_int_numpy_dtype)
414
+
415
+ def test_astype_cast_object_int(self):
416
+ arr = Series(["1", "2", "3", "4"], dtype=object)
417
+ result = arr.astype(int)
418
+
419
+ tm.assert_series_equal(result, Series(np.arange(1, 5)))
420
+
421
+ def test_astype_unicode(self, using_infer_string):
422
+ # see GH#7758: A bit of magic is required to set
423
+ # default encoding to utf-8
424
+ digits = string.digits
425
+ test_series = [
426
+ Series([digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
427
+ Series(["データーサイエンス、お前はもう死んでいる"]),
428
+ ]
429
+
430
+ former_encoding = None
431
+
432
+ if sys.getdefaultencoding() == "utf-8":
433
+ # GH#45326 as of 2.0 Series.astype matches Index.astype by handling
434
+ # bytes with obj.decode() instead of str(obj)
435
+ item = "野菜食べないとやばい"
436
+ ser = Series([item.encode()])
437
+ result = ser.astype(np.str_)
438
+ expected = Series([item], dtype=object)
439
+ tm.assert_series_equal(result, expected)
440
+
441
+ for ser in test_series:
442
+ res = ser.astype(np.str_)
443
+ expec = ser.map(str)
444
+ if using_infer_string:
445
+ expec = expec.astype(object)
446
+ tm.assert_series_equal(res, expec)
447
+
448
+ # Restore the former encoding
449
+ if former_encoding is not None and former_encoding != "utf-8":
450
+ reload(sys)
451
+ sys.setdefaultencoding(former_encoding)
452
+
453
+ def test_astype_bytes(self):
454
+ # GH#39474
455
+ result = Series(["foo", "bar", "baz"]).astype(bytes)
456
+ assert result.dtypes == np.dtype("S3")
457
+
458
+ def test_astype_nan_to_bool(self):
459
+ # GH#43018
460
+ ser = Series(np.nan, dtype="object")
461
+ result = ser.astype("bool")
462
+ expected = Series(True, dtype="bool")
463
+ tm.assert_series_equal(result, expected)
464
+
465
+ @pytest.mark.parametrize(
466
+ "dtype",
467
+ tm.ALL_INT_EA_DTYPES + tm.FLOAT_EA_DTYPES,
468
+ )
469
+ def test_astype_ea_to_datetimetzdtype(self, dtype):
470
+ # GH37553
471
+ ser = Series([4, 0, 9], dtype=dtype)
472
+ result = ser.astype(DatetimeTZDtype(tz="US/Pacific"))
473
+
474
+ expected = Series(
475
+ {
476
+ 0: Timestamp("1969-12-31 16:00:00.000000004-08:00", tz="US/Pacific"),
477
+ 1: Timestamp("1969-12-31 16:00:00.000000000-08:00", tz="US/Pacific"),
478
+ 2: Timestamp("1969-12-31 16:00:00.000000009-08:00", tz="US/Pacific"),
479
+ }
480
+ )
481
+
482
+ tm.assert_series_equal(result, expected)
483
+
484
+ def test_astype_retain_attrs(self, any_numpy_dtype):
485
+ # GH#44414
486
+ ser = Series([0, 1, 2, 3])
487
+ ser.attrs["Location"] = "Michigan"
488
+
489
+ result = ser.astype(any_numpy_dtype).attrs
490
+ expected = ser.attrs
491
+
492
+ tm.assert_dict_equal(expected, result)
493
+
494
+
495
+ class TestAstypeString:
496
+ @pytest.mark.parametrize(
497
+ "data, dtype",
498
+ [
499
+ ([True, NA], "boolean"),
500
+ (["A", NA], "category"),
501
+ (["2020-10-10", "2020-10-10"], "datetime64[ns]"),
502
+ (["2020-10-10", "2020-10-10", NaT], "datetime64[ns]"),
503
+ (
504
+ ["2012-01-01 00:00:00-05:00", NaT],
505
+ "datetime64[ns, US/Eastern]",
506
+ ),
507
+ ([1, None], "UInt16"),
508
+ (["1/1/2021", "2/1/2021"], "period[M]"),
509
+ (["1/1/2021", "2/1/2021", NaT], "period[M]"),
510
+ (["1 Day", "59 Days", NaT], "timedelta64[ns]"),
511
+ # currently no way to parse IntervalArray from a list of strings
512
+ ],
513
+ )
514
+ def test_astype_string_to_extension_dtype_roundtrip(
515
+ self, data, dtype, request, nullable_string_dtype
516
+ ):
517
+ if dtype == "boolean":
518
+ mark = pytest.mark.xfail(
519
+ reason="TODO StringArray.astype() with missing values #GH40566"
520
+ )
521
+ request.applymarker(mark)
522
+ # GH-40351
523
+ ser = Series(data, dtype=dtype)
524
+
525
+ # Note: just passing .astype(dtype) fails for dtype="category"
526
+ # with bc ser.dtype.categories will be object dtype whereas
527
+ # result.dtype.categories will have string dtype
528
+ result = ser.astype(nullable_string_dtype).astype(ser.dtype)
529
+ tm.assert_series_equal(result, ser)
530
+
531
+
532
+ class TestAstypeCategorical:
533
+ def test_astype_categorical_to_other(self):
534
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
535
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
536
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
537
+
538
+ expected = ser
539
+ tm.assert_series_equal(ser.astype("category"), expected)
540
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), expected)
541
+ msg = r"Cannot cast object|string dtype to float64"
542
+ with pytest.raises(ValueError, match=msg):
543
+ ser.astype("float64")
544
+
545
+ cat = Series(Categorical(["a", "b", "b", "a", "a", "c", "c", "c"]))
546
+ exp = Series(["a", "b", "b", "a", "a", "c", "c", "c"], dtype=object)
547
+ tm.assert_series_equal(cat.astype("str"), exp)
548
+ s2 = Series(Categorical(["1", "2", "3", "4"]))
549
+ exp2 = Series([1, 2, 3, 4]).astype("int")
550
+ tm.assert_series_equal(s2.astype("int"), exp2)
551
+
552
+ # object don't sort correctly, so just compare that we have the same
553
+ # values
554
+ def cmp(a, b):
555
+ tm.assert_almost_equal(np.sort(np.unique(a)), np.sort(np.unique(b)))
556
+
557
+ expected = Series(np.array(ser.values), name="value_group")
558
+ cmp(ser.astype("object"), expected)
559
+ cmp(ser.astype(np.object_), expected)
560
+
561
+ # array conversion
562
+ tm.assert_almost_equal(np.array(ser), np.array(ser.values))
563
+
564
+ tm.assert_series_equal(ser.astype("category"), ser)
565
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), ser)
566
+
567
+ roundtrip_expected = ser.cat.set_categories(
568
+ ser.cat.categories.sort_values()
569
+ ).cat.remove_unused_categories()
570
+ result = ser.astype("object").astype("category")
571
+ tm.assert_series_equal(result, roundtrip_expected)
572
+ result = ser.astype("object").astype(CategoricalDtype())
573
+ tm.assert_series_equal(result, roundtrip_expected)
574
+
575
+ def test_astype_categorical_invalid_conversions(self):
576
+ # invalid conversion (these are NOT a dtype)
577
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
578
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
579
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
580
+
581
+ msg = (
582
+ "dtype '<class 'pandas.core.arrays.categorical.Categorical'>' "
583
+ "not understood"
584
+ )
585
+ with pytest.raises(TypeError, match=msg):
586
+ ser.astype(Categorical)
587
+ with pytest.raises(TypeError, match=msg):
588
+ ser.astype("object").astype(Categorical)
589
+
590
+ def test_astype_categoricaldtype(self):
591
+ ser = Series(["a", "b", "a"])
592
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=True))
593
+ expected = Series(Categorical(["a", "b", "a"], ordered=True))
594
+ tm.assert_series_equal(result, expected)
595
+
596
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=False))
597
+ expected = Series(Categorical(["a", "b", "a"], ordered=False))
598
+ tm.assert_series_equal(result, expected)
599
+
600
+ result = ser.astype(CategoricalDtype(["a", "b", "c"], ordered=False))
601
+ expected = Series(
602
+ Categorical(["a", "b", "a"], categories=["a", "b", "c"], ordered=False)
603
+ )
604
+ tm.assert_series_equal(result, expected)
605
+ tm.assert_index_equal(result.cat.categories, Index(["a", "b", "c"]))
606
+
607
+ @pytest.mark.parametrize("name", [None, "foo"])
608
+ @pytest.mark.parametrize("dtype_ordered", [True, False])
609
+ @pytest.mark.parametrize("series_ordered", [True, False])
610
+ def test_astype_categorical_to_categorical(
611
+ self, name, dtype_ordered, series_ordered
612
+ ):
613
+ # GH#10696, GH#18593
614
+ s_data = list("abcaacbab")
615
+ s_dtype = CategoricalDtype(list("bac"), ordered=series_ordered)
616
+ ser = Series(s_data, dtype=s_dtype, name=name)
617
+
618
+ # unspecified categories
619
+ dtype = CategoricalDtype(ordered=dtype_ordered)
620
+ result = ser.astype(dtype)
621
+ exp_dtype = CategoricalDtype(s_dtype.categories, dtype_ordered)
622
+ expected = Series(s_data, name=name, dtype=exp_dtype)
623
+ tm.assert_series_equal(result, expected)
624
+
625
+ # different categories
626
+ dtype = CategoricalDtype(list("adc"), dtype_ordered)
627
+ result = ser.astype(dtype)
628
+ expected = Series(s_data, name=name, dtype=dtype)
629
+ tm.assert_series_equal(result, expected)
630
+
631
+ if dtype_ordered is False:
632
+ # not specifying ordered, so only test once
633
+ expected = ser
634
+ result = ser.astype("category")
635
+ tm.assert_series_equal(result, expected)
636
+
637
+ def test_astype_bool_missing_to_categorical(self):
638
+ # GH-19182
639
+ ser = Series([True, False, np.nan])
640
+ assert ser.dtypes == np.object_
641
+
642
+ result = ser.astype(CategoricalDtype(categories=[True, False]))
643
+ expected = Series(Categorical([True, False, np.nan], categories=[True, False]))
644
+ tm.assert_series_equal(result, expected)
645
+
646
+ def test_astype_categories_raises(self):
647
+ # deprecated GH#17636, removed in GH#27141
648
+ ser = Series(["a", "b", "a"])
649
+ with pytest.raises(TypeError, match="got an unexpected"):
650
+ ser.astype("category", categories=["a", "b"], ordered=True)
651
+
652
+ @pytest.mark.parametrize("items", [["a", "b", "c", "a"], [1, 2, 3, 1]])
653
+ def test_astype_from_categorical(self, items):
654
+ ser = Series(items)
655
+ exp = Series(Categorical(items))
656
+ res = ser.astype("category")
657
+ tm.assert_series_equal(res, exp)
658
+
659
+ def test_astype_from_categorical_with_keywords(self):
660
+ # with keywords
661
+ lst = ["a", "b", "c", "a"]
662
+ ser = Series(lst)
663
+ exp = Series(Categorical(lst, ordered=True))
664
+ res = ser.astype(CategoricalDtype(None, ordered=True))
665
+ tm.assert_series_equal(res, exp)
666
+
667
+ exp = Series(Categorical(lst, categories=list("abcdef"), ordered=True))
668
+ res = ser.astype(CategoricalDtype(list("abcdef"), ordered=True))
669
+ tm.assert_series_equal(res, exp)
670
+
671
+ def test_astype_timedelta64_with_np_nan(self):
672
+ # GH45798
673
+ result = Series([Timedelta(1), np.nan], dtype="timedelta64[ns]")
674
+ expected = Series([Timedelta(1), NaT], dtype="timedelta64[ns]")
675
+ tm.assert_series_equal(result, expected)
676
+
677
+ @td.skip_if_no("pyarrow")
678
+ def test_astype_int_na_string(self):
679
+ # GH#57418
680
+ ser = Series([12, NA], dtype="Int64[pyarrow]")
681
+ result = ser.astype("string[pyarrow]")
682
+ expected = Series(["12", NA], dtype="string[pyarrow]")
683
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_autocorr.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+
4
+ class TestAutoCorr:
5
+ def test_autocorr(self, datetime_series):
6
+ # Just run the function
7
+ corr1 = datetime_series.autocorr()
8
+
9
+ # Now run it with the lag parameter
10
+ corr2 = datetime_series.autocorr(lag=1)
11
+
12
+ # corr() with lag needs Series of at least length 2
13
+ if len(datetime_series) <= 2:
14
+ assert np.isnan(corr1)
15
+ assert np.isnan(corr2)
16
+ else:
17
+ assert corr1 == corr2
18
+
19
+ # Choose a random lag between 1 and length of Series - 2
20
+ # and compare the result with the Series corr() function
21
+ n = 1 + np.random.default_rng(2).integers(max(1, len(datetime_series) - 2))
22
+ corr1 = datetime_series.corr(datetime_series.shift(n))
23
+ corr2 = datetime_series.autocorr(lag=n)
24
+
25
+ # corr() with lag needs Series of at least length 2
26
+ if len(datetime_series) <= 2:
27
+ assert np.isnan(corr1)
28
+ assert np.isnan(corr2)
29
+ else:
30
+ assert corr1 == corr2
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_between.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ bdate_range,
7
+ date_range,
8
+ period_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestBetween:
14
+ def test_between(self):
15
+ series = Series(date_range("1/1/2000", periods=10))
16
+ left, right = series[[2, 7]]
17
+
18
+ result = series.between(left, right)
19
+ expected = (series >= left) & (series <= right)
20
+ tm.assert_series_equal(result, expected)
21
+
22
+ def test_between_datetime_object_dtype(self):
23
+ ser = Series(bdate_range("1/1/2000", periods=20), dtype=object)
24
+ ser[::2] = np.nan
25
+
26
+ result = ser[ser.between(ser[3], ser[17])]
27
+ expected = ser[3:18].dropna()
28
+ tm.assert_series_equal(result, expected)
29
+
30
+ result = ser[ser.between(ser[3], ser[17], inclusive="neither")]
31
+ expected = ser[5:16].dropna()
32
+ tm.assert_series_equal(result, expected)
33
+
34
+ def test_between_period_values(self):
35
+ ser = Series(period_range("2000-01-01", periods=10, freq="D"))
36
+ left, right = ser[[2, 7]]
37
+ result = ser.between(left, right)
38
+ expected = (ser >= left) & (ser <= right)
39
+ tm.assert_series_equal(result, expected)
40
+
41
+ def test_between_inclusive_string(self):
42
+ # GH 40628
43
+ series = Series(date_range("1/1/2000", periods=10))
44
+ left, right = series[[2, 7]]
45
+
46
+ result = series.between(left, right, inclusive="both")
47
+ expected = (series >= left) & (series <= right)
48
+ tm.assert_series_equal(result, expected)
49
+
50
+ result = series.between(left, right, inclusive="left")
51
+ expected = (series >= left) & (series < right)
52
+ tm.assert_series_equal(result, expected)
53
+
54
+ result = series.between(left, right, inclusive="right")
55
+ expected = (series > left) & (series <= right)
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ result = series.between(left, right, inclusive="neither")
59
+ expected = (series > left) & (series < right)
60
+ tm.assert_series_equal(result, expected)
61
+
62
+ @pytest.mark.parametrize("inclusive", ["yes", True, False])
63
+ def test_between_error_args(self, inclusive):
64
+ # GH 40628
65
+ series = Series(date_range("1/1/2000", periods=10))
66
+ left, right = series[[2, 7]]
67
+
68
+ value_error_msg = (
69
+ "Inclusive has to be either string of 'both',"
70
+ "'left', 'right', or 'neither'."
71
+ )
72
+
73
+ with pytest.raises(ValueError, match=value_error_msg):
74
+ series = Series(date_range("1/1/2000", periods=10))
75
+ series.between(left, right, inclusive=inclusive)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_case_when.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ Series,
7
+ array as pd_array,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ @pytest.fixture
14
+ def df():
15
+ """
16
+ base dataframe for testing
17
+ """
18
+ return DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
19
+
20
+
21
+ def test_case_when_caselist_is_not_a_list(df):
22
+ """
23
+ Raise ValueError if caselist is not a list.
24
+ """
25
+ msg = "The caselist argument should be a list; "
26
+ msg += "instead got.+"
27
+ with pytest.raises(TypeError, match=msg): # GH39154
28
+ df["a"].case_when(caselist=())
29
+
30
+
31
+ def test_case_when_no_caselist(df):
32
+ """
33
+ Raise ValueError if no caselist is provided.
34
+ """
35
+ msg = "provide at least one boolean condition, "
36
+ msg += "with a corresponding replacement."
37
+ with pytest.raises(ValueError, match=msg): # GH39154
38
+ df["a"].case_when([])
39
+
40
+
41
+ def test_case_when_odd_caselist(df):
42
+ """
43
+ Raise ValueError if no of caselist is odd.
44
+ """
45
+ msg = "Argument 0 must have length 2; "
46
+ msg += "a condition and replacement; instead got length 3."
47
+
48
+ with pytest.raises(ValueError, match=msg):
49
+ df["a"].case_when([(df["a"].eq(1), 1, df.a.gt(1))])
50
+
51
+
52
+ def test_case_when_raise_error_from_mask(df):
53
+ """
54
+ Raise Error from within Series.mask
55
+ """
56
+ msg = "Failed to apply condition0 and replacement0."
57
+ with pytest.raises(ValueError, match=msg):
58
+ df["a"].case_when([(df["a"].eq(1), [1, 2])])
59
+
60
+
61
+ def test_case_when_single_condition(df):
62
+ """
63
+ Test output on a single condition.
64
+ """
65
+ result = Series([np.nan, np.nan, np.nan]).case_when([(df.a.eq(1), 1)])
66
+ expected = Series([1, np.nan, np.nan])
67
+ tm.assert_series_equal(result, expected)
68
+
69
+
70
+ def test_case_when_multiple_conditions(df):
71
+ """
72
+ Test output when booleans are derived from a computation
73
+ """
74
+ result = Series([np.nan, np.nan, np.nan]).case_when(
75
+ [(df.a.eq(1), 1), (Series([False, True, False]), 2)]
76
+ )
77
+ expected = Series([1, 2, np.nan])
78
+ tm.assert_series_equal(result, expected)
79
+
80
+
81
+ def test_case_when_multiple_conditions_replacement_list(df):
82
+ """
83
+ Test output when replacement is a list
84
+ """
85
+ result = Series([np.nan, np.nan, np.nan]).case_when(
86
+ [([True, False, False], 1), (df["a"].gt(1) & df["b"].eq(5), [1, 2, 3])]
87
+ )
88
+ expected = Series([1, 2, np.nan])
89
+ tm.assert_series_equal(result, expected)
90
+
91
+
92
+ def test_case_when_multiple_conditions_replacement_extension_dtype(df):
93
+ """
94
+ Test output when replacement has an extension dtype
95
+ """
96
+ result = Series([np.nan, np.nan, np.nan]).case_when(
97
+ [
98
+ ([True, False, False], 1),
99
+ (df["a"].gt(1) & df["b"].eq(5), pd_array([1, 2, 3], dtype="Int64")),
100
+ ],
101
+ )
102
+ expected = Series([1, 2, np.nan], dtype="Float64")
103
+ tm.assert_series_equal(result, expected)
104
+
105
+
106
+ def test_case_when_multiple_conditions_replacement_series(df):
107
+ """
108
+ Test output when replacement is a Series
109
+ """
110
+ result = Series([np.nan, np.nan, np.nan]).case_when(
111
+ [
112
+ (np.array([True, False, False]), 1),
113
+ (df["a"].gt(1) & df["b"].eq(5), Series([1, 2, 3])),
114
+ ],
115
+ )
116
+ expected = Series([1, 2, np.nan])
117
+ tm.assert_series_equal(result, expected)
118
+
119
+
120
+ def test_case_when_non_range_index():
121
+ """
122
+ Test output if index is not RangeIndex
123
+ """
124
+ rng = np.random.default_rng(seed=123)
125
+ dates = date_range("1/1/2000", periods=8)
126
+ df = DataFrame(
127
+ rng.standard_normal(size=(8, 4)), index=dates, columns=["A", "B", "C", "D"]
128
+ )
129
+ result = Series(5, index=df.index, name="A").case_when([(df.A.gt(0), df.B)])
130
+ expected = df.A.mask(df.A.gt(0), df.B).where(df.A.gt(0), 5)
131
+ tm.assert_series_equal(result, expected)
132
+
133
+
134
+ def test_case_when_callable():
135
+ """
136
+ Test output on a callable
137
+ """
138
+ # https://numpy.org/doc/stable/reference/generated/numpy.piecewise.html
139
+ x = np.linspace(-2.5, 2.5, 6)
140
+ ser = Series(x)
141
+ result = ser.case_when(
142
+ caselist=[
143
+ (lambda df: df < 0, lambda df: -df),
144
+ (lambda df: df >= 0, lambda df: df),
145
+ ]
146
+ )
147
+ expected = np.piecewise(x, [x < 0, x >= 0], [lambda x: -x, lambda x: x])
148
+ tm.assert_series_equal(result, Series(expected))
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import Series
2
+ import pandas._testing as tm
3
+
4
+
5
+ class TestCombine:
6
+ def test_combine_scalar(self):
7
+ # GH#21248
8
+ # Note - combine() with another Series is tested elsewhere because
9
+ # it is used when testing operators
10
+ ser = Series([i * 10 for i in range(5)])
11
+ result = ser.combine(3, lambda x, y: x + y)
12
+ expected = Series([i * 10 + 3 for i in range(5)])
13
+ tm.assert_series_equal(result, expected)
14
+
15
+ result = ser.combine(22, lambda x, y: min(x, y))
16
+ expected = Series([min(i * 10, 22) for i in range(5)])
17
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_compare.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ @pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
9
+ def test_compare_axis(align_axis):
10
+ # GH#30429
11
+ s1 = pd.Series(["a", "b", "c"])
12
+ s2 = pd.Series(["x", "b", "z"])
13
+
14
+ result = s1.compare(s2, align_axis=align_axis)
15
+
16
+ if align_axis in (1, "columns"):
17
+ indices = pd.Index([0, 2])
18
+ columns = pd.Index(["self", "other"])
19
+ expected = pd.DataFrame(
20
+ [["a", "x"], ["c", "z"]], index=indices, columns=columns
21
+ )
22
+ tm.assert_frame_equal(result, expected)
23
+ else:
24
+ indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
25
+ expected = pd.Series(["a", "x", "c", "z"], index=indices)
26
+ tm.assert_series_equal(result, expected)
27
+
28
+
29
+ @pytest.mark.parametrize(
30
+ "keep_shape, keep_equal",
31
+ [
32
+ (True, False),
33
+ (False, True),
34
+ (True, True),
35
+ # False, False case is already covered in test_compare_axis
36
+ ],
37
+ )
38
+ def test_compare_various_formats(keep_shape, keep_equal):
39
+ s1 = pd.Series(["a", "b", "c"])
40
+ s2 = pd.Series(["x", "b", "z"])
41
+
42
+ result = s1.compare(s2, keep_shape=keep_shape, keep_equal=keep_equal)
43
+
44
+ if keep_shape:
45
+ indices = pd.Index([0, 1, 2])
46
+ columns = pd.Index(["self", "other"])
47
+ if keep_equal:
48
+ expected = pd.DataFrame(
49
+ [["a", "x"], ["b", "b"], ["c", "z"]], index=indices, columns=columns
50
+ )
51
+ else:
52
+ expected = pd.DataFrame(
53
+ [["a", "x"], [np.nan, np.nan], ["c", "z"]],
54
+ index=indices,
55
+ columns=columns,
56
+ )
57
+ else:
58
+ indices = pd.Index([0, 2])
59
+ columns = pd.Index(["self", "other"])
60
+ expected = pd.DataFrame(
61
+ [["a", "x"], ["c", "z"]], index=indices, columns=columns
62
+ )
63
+ tm.assert_frame_equal(result, expected)
64
+
65
+
66
+ def test_compare_with_equal_nulls():
67
+ # We want to make sure two NaNs are considered the same
68
+ # and dropped where applicable
69
+ s1 = pd.Series(["a", "b", np.nan])
70
+ s2 = pd.Series(["x", "b", np.nan])
71
+
72
+ result = s1.compare(s2)
73
+ expected = pd.DataFrame([["a", "x"]], columns=["self", "other"])
74
+ tm.assert_frame_equal(result, expected)
75
+
76
+
77
+ def test_compare_with_non_equal_nulls():
78
+ # We want to make sure the relevant NaNs do not get dropped
79
+ s1 = pd.Series(["a", "b", "c"])
80
+ s2 = pd.Series(["x", "b", np.nan])
81
+
82
+ result = s1.compare(s2, align_axis=0)
83
+
84
+ indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
85
+ expected = pd.Series(["a", "x", "c", np.nan], index=indices)
86
+ tm.assert_series_equal(result, expected)
87
+
88
+
89
+ def test_compare_multi_index():
90
+ index = pd.MultiIndex.from_arrays([[0, 0, 1], [0, 1, 2]])
91
+ s1 = pd.Series(["a", "b", "c"], index=index)
92
+ s2 = pd.Series(["x", "b", "z"], index=index)
93
+
94
+ result = s1.compare(s2, align_axis=0)
95
+
96
+ indices = pd.MultiIndex.from_arrays(
97
+ [[0, 0, 1, 1], [0, 0, 2, 2], ["self", "other", "self", "other"]]
98
+ )
99
+ expected = pd.Series(["a", "x", "c", "z"], index=indices)
100
+ tm.assert_series_equal(result, expected)
101
+
102
+
103
+ def test_compare_unaligned_objects():
104
+ # test Series with different indices
105
+ msg = "Can only compare identically-labeled Series objects"
106
+ with pytest.raises(ValueError, match=msg):
107
+ ser1 = pd.Series([1, 2, 3], index=["a", "b", "c"])
108
+ ser2 = pd.Series([1, 2, 3], index=["a", "b", "d"])
109
+ ser1.compare(ser2)
110
+
111
+ # test Series with different lengths
112
+ msg = "Can only compare identically-labeled Series objects"
113
+ with pytest.raises(ValueError, match=msg):
114
+ ser1 = pd.Series([1, 2, 3])
115
+ ser2 = pd.Series([1, 2, 3, 4])
116
+ ser1.compare(ser2)
117
+
118
+
119
+ def test_compare_datetime64_and_string():
120
+ # Issue https://github.com/pandas-dev/pandas/issues/45506
121
+ # Catch OverflowError when comparing datetime64 and string
122
+ data = [
123
+ {"a": "2015-07-01", "b": "08335394550"},
124
+ {"a": "2015-07-02", "b": "+49 (0) 0345 300033"},
125
+ {"a": "2015-07-03", "b": "+49(0)2598 04457"},
126
+ {"a": "2015-07-04", "b": "0741470003"},
127
+ {"a": "2015-07-05", "b": "04181 83668"},
128
+ ]
129
+ dtypes = {"a": "datetime64[ns]", "b": "string"}
130
+ df = pd.DataFrame(data=data).astype(dtypes)
131
+
132
+ result_eq1 = df["a"].eq(df["b"])
133
+ result_eq2 = df["a"] == df["b"]
134
+ result_neq = df["a"] != df["b"]
135
+
136
+ expected_eq = pd.Series([False] * 5) # For .eq and ==
137
+ expected_neq = pd.Series([True] * 5) # For !=
138
+
139
+ tm.assert_series_equal(result_eq1, expected_eq)
140
+ tm.assert_series_equal(result_eq2, expected_eq)
141
+ tm.assert_series_equal(result_neq, expected_neq)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_convert_dtypes.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import product
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas._libs import lib
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+ # Each test case consists of a tuple with the data and dtype to create the
12
+ # test Series, the default dtype for the expected result (which is valid
13
+ # for most cases), and the specific cases where the result deviates from
14
+ # this default. Those overrides are defined as a dict with (keyword, val) as
15
+ # dictionary key. In case of multiple items, the last override takes precedence.
16
+
17
+
18
+ @pytest.fixture(
19
+ params=[
20
+ (
21
+ # data
22
+ [1, 2, 3],
23
+ # original dtype
24
+ np.dtype("int32"),
25
+ # default expected dtype
26
+ "Int32",
27
+ # exceptions on expected dtype
28
+ {("convert_integer", False): np.dtype("int32")},
29
+ ),
30
+ (
31
+ [1, 2, 3],
32
+ np.dtype("int64"),
33
+ "Int64",
34
+ {("convert_integer", False): np.dtype("int64")},
35
+ ),
36
+ (
37
+ ["x", "y", "z"],
38
+ np.dtype("O"),
39
+ pd.StringDtype(),
40
+ {("convert_string", False): np.dtype("O")},
41
+ ),
42
+ (
43
+ [True, False, np.nan],
44
+ np.dtype("O"),
45
+ pd.BooleanDtype(),
46
+ {("convert_boolean", False): np.dtype("O")},
47
+ ),
48
+ (
49
+ ["h", "i", np.nan],
50
+ np.dtype("O"),
51
+ pd.StringDtype(),
52
+ {("convert_string", False): np.dtype("O")},
53
+ ),
54
+ ( # GH32117
55
+ ["h", "i", 1],
56
+ np.dtype("O"),
57
+ np.dtype("O"),
58
+ {},
59
+ ),
60
+ (
61
+ [10, np.nan, 20],
62
+ np.dtype("float"),
63
+ "Int64",
64
+ {
65
+ ("convert_integer", False, "convert_floating", True): "Float64",
66
+ ("convert_integer", False, "convert_floating", False): np.dtype(
67
+ "float"
68
+ ),
69
+ },
70
+ ),
71
+ (
72
+ [np.nan, 100.5, 200],
73
+ np.dtype("float"),
74
+ "Float64",
75
+ {("convert_floating", False): np.dtype("float")},
76
+ ),
77
+ (
78
+ [3, 4, 5],
79
+ "Int8",
80
+ "Int8",
81
+ {},
82
+ ),
83
+ (
84
+ [[1, 2], [3, 4], [5]],
85
+ None,
86
+ np.dtype("O"),
87
+ {},
88
+ ),
89
+ (
90
+ [4, 5, 6],
91
+ np.dtype("uint32"),
92
+ "UInt32",
93
+ {("convert_integer", False): np.dtype("uint32")},
94
+ ),
95
+ (
96
+ [-10, 12, 13],
97
+ np.dtype("i1"),
98
+ "Int8",
99
+ {("convert_integer", False): np.dtype("i1")},
100
+ ),
101
+ (
102
+ [1.2, 1.3],
103
+ np.dtype("float32"),
104
+ "Float32",
105
+ {("convert_floating", False): np.dtype("float32")},
106
+ ),
107
+ (
108
+ [1, 2.0],
109
+ object,
110
+ "Int64",
111
+ {
112
+ ("convert_integer", False): "Float64",
113
+ ("convert_integer", False, "convert_floating", False): np.dtype(
114
+ "float"
115
+ ),
116
+ ("infer_objects", False): np.dtype("object"),
117
+ },
118
+ ),
119
+ (
120
+ [1, 2.5],
121
+ object,
122
+ "Float64",
123
+ {
124
+ ("convert_floating", False): np.dtype("float"),
125
+ ("infer_objects", False): np.dtype("object"),
126
+ },
127
+ ),
128
+ (["a", "b"], pd.CategoricalDtype(), pd.CategoricalDtype(), {}),
129
+ (
130
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("s"),
131
+ pd.DatetimeTZDtype(tz="UTC"),
132
+ pd.DatetimeTZDtype(tz="UTC"),
133
+ {},
134
+ ),
135
+ (
136
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ms"),
137
+ pd.DatetimeTZDtype(tz="UTC"),
138
+ pd.DatetimeTZDtype(tz="UTC"),
139
+ {},
140
+ ),
141
+ (
142
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("us"),
143
+ pd.DatetimeTZDtype(tz="UTC"),
144
+ pd.DatetimeTZDtype(tz="UTC"),
145
+ {},
146
+ ),
147
+ (
148
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
149
+ pd.DatetimeTZDtype(tz="UTC"),
150
+ pd.DatetimeTZDtype(tz="UTC"),
151
+ {},
152
+ ),
153
+ (
154
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
155
+ "datetime64[ns]",
156
+ np.dtype("datetime64[ns]"),
157
+ {},
158
+ ),
159
+ (
160
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
161
+ object,
162
+ np.dtype("datetime64[ns]"),
163
+ {("infer_objects", False): np.dtype("object")},
164
+ ),
165
+ (
166
+ pd.period_range("1/1/2011", freq="M", periods=3),
167
+ None,
168
+ pd.PeriodDtype("M"),
169
+ {},
170
+ ),
171
+ (
172
+ pd.arrays.IntervalArray([pd.Interval(0, 1), pd.Interval(1, 5)]),
173
+ None,
174
+ pd.IntervalDtype("int64", "right"),
175
+ {},
176
+ ),
177
+ ]
178
+ )
179
+ def test_cases(request):
180
+ return request.param
181
+
182
+
183
+ class TestSeriesConvertDtypes:
184
+ @pytest.mark.parametrize("params", product(*[(True, False)] * 5))
185
+ def test_convert_dtypes(
186
+ self,
187
+ test_cases,
188
+ params,
189
+ using_infer_string,
190
+ ):
191
+ data, maindtype, expected_default, expected_other = test_cases
192
+ if (
193
+ hasattr(data, "dtype")
194
+ and lib.is_np_dtype(data.dtype, "M")
195
+ and isinstance(maindtype, pd.DatetimeTZDtype)
196
+ ):
197
+ # this astype is deprecated in favor of tz_localize
198
+ msg = "Cannot use .astype to convert from timezone-naive dtype"
199
+ with pytest.raises(TypeError, match=msg):
200
+ pd.Series(data, dtype=maindtype)
201
+ return
202
+
203
+ if maindtype is not None:
204
+ series = pd.Series(data, dtype=maindtype)
205
+ else:
206
+ series = pd.Series(data)
207
+
208
+ result = series.convert_dtypes(*params)
209
+
210
+ param_names = [
211
+ "infer_objects",
212
+ "convert_string",
213
+ "convert_integer",
214
+ "convert_boolean",
215
+ "convert_floating",
216
+ ]
217
+ params_dict = dict(zip(param_names, params))
218
+
219
+ expected_dtype = expected_default
220
+ for spec, dtype in expected_other.items():
221
+ if all(params_dict[key] is val for key, val in zip(spec[::2], spec[1::2])):
222
+ expected_dtype = dtype
223
+ if (
224
+ using_infer_string
225
+ and expected_default == "string"
226
+ and expected_dtype == object
227
+ and params[0]
228
+ and not params[1]
229
+ ):
230
+ # If we would convert with convert strings then infer_objects converts
231
+ # with the option
232
+ expected_dtype = "string[pyarrow_numpy]"
233
+
234
+ expected = pd.Series(data, dtype=expected_dtype)
235
+ tm.assert_series_equal(result, expected)
236
+
237
+ # Test that it is a copy
238
+ copy = series.copy(deep=True)
239
+
240
+ if result.notna().sum() > 0 and result.dtype in ["interval[int64, right]"]:
241
+ with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
242
+ result[result.notna()] = np.nan
243
+ else:
244
+ result[result.notna()] = np.nan
245
+
246
+ # Make sure original not changed
247
+ tm.assert_series_equal(series, copy)
248
+
249
+ def test_convert_string_dtype(self, nullable_string_dtype):
250
+ # https://github.com/pandas-dev/pandas/issues/31731 -> converting columns
251
+ # that are already string dtype
252
+ df = pd.DataFrame(
253
+ {"A": ["a", "b", pd.NA], "B": ["ä", "ö", "ü"]}, dtype=nullable_string_dtype
254
+ )
255
+ result = df.convert_dtypes()
256
+ tm.assert_frame_equal(df, result)
257
+
258
+ def test_convert_bool_dtype(self):
259
+ # GH32287
260
+ df = pd.DataFrame({"A": pd.array([True])})
261
+ tm.assert_frame_equal(df, df.convert_dtypes())
262
+
263
+ def test_convert_byte_string_dtype(self):
264
+ # GH-43183
265
+ byte_str = b"binary-string"
266
+
267
+ df = pd.DataFrame(data={"A": byte_str}, index=[0])
268
+ result = df.convert_dtypes()
269
+ expected = df
270
+ tm.assert_frame_equal(result, expected)
271
+
272
+ @pytest.mark.parametrize(
273
+ "infer_objects, dtype", [(True, "Int64"), (False, "object")]
274
+ )
275
+ def test_convert_dtype_object_with_na(self, infer_objects, dtype):
276
+ # GH#48791
277
+ ser = pd.Series([1, pd.NA])
278
+ result = ser.convert_dtypes(infer_objects=infer_objects)
279
+ expected = pd.Series([1, pd.NA], dtype=dtype)
280
+ tm.assert_series_equal(result, expected)
281
+
282
+ @pytest.mark.parametrize(
283
+ "infer_objects, dtype", [(True, "Float64"), (False, "object")]
284
+ )
285
+ def test_convert_dtype_object_with_na_float(self, infer_objects, dtype):
286
+ # GH#48791
287
+ ser = pd.Series([1.5, pd.NA])
288
+ result = ser.convert_dtypes(infer_objects=infer_objects)
289
+ expected = pd.Series([1.5, pd.NA], dtype=dtype)
290
+ tm.assert_series_equal(result, expected)
291
+
292
+ def test_convert_dtypes_pyarrow_to_np_nullable(self):
293
+ # GH 53648
294
+ pytest.importorskip("pyarrow")
295
+ ser = pd.Series(range(2), dtype="int32[pyarrow]")
296
+ result = ser.convert_dtypes(dtype_backend="numpy_nullable")
297
+ expected = pd.Series(range(2), dtype="Int32")
298
+ tm.assert_series_equal(result, expected)
299
+
300
+ def test_convert_dtypes_pyarrow_null(self):
301
+ # GH#55346
302
+ pa = pytest.importorskip("pyarrow")
303
+ ser = pd.Series([None, None])
304
+ result = ser.convert_dtypes(dtype_backend="pyarrow")
305
+ expected = pd.Series([None, None], dtype=pd.ArrowDtype(pa.null()))
306
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestCopy:
12
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
13
+ def test_copy(self, deep, using_copy_on_write, warn_copy_on_write):
14
+ ser = Series(np.arange(10), dtype="float64")
15
+
16
+ # default deep is True
17
+ if deep == "default":
18
+ ser2 = ser.copy()
19
+ else:
20
+ ser2 = ser.copy(deep=deep)
21
+
22
+ if using_copy_on_write:
23
+ # INFO(CoW) a shallow copy doesn't yet copy the data
24
+ # but parent will not be modified (CoW)
25
+ if deep is None or deep is False:
26
+ assert np.may_share_memory(ser.values, ser2.values)
27
+ else:
28
+ assert not np.may_share_memory(ser.values, ser2.values)
29
+
30
+ with tm.assert_cow_warning(warn_copy_on_write and deep is False):
31
+ ser2[::2] = np.nan
32
+
33
+ if deep is not False or using_copy_on_write:
34
+ # Did not modify original Series
35
+ assert np.isnan(ser2[0])
36
+ assert not np.isnan(ser[0])
37
+ else:
38
+ # we DID modify the original Series
39
+ assert np.isnan(ser2[0])
40
+ assert np.isnan(ser[0])
41
+
42
+ @pytest.mark.filterwarnings("ignore:Setting a value on a view:FutureWarning")
43
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
44
+ def test_copy_tzaware(self, deep, using_copy_on_write):
45
+ # GH#11794
46
+ # copy of tz-aware
47
+ expected = Series([Timestamp("2012/01/01", tz="UTC")])
48
+ expected2 = Series([Timestamp("1999/01/01", tz="UTC")])
49
+
50
+ ser = Series([Timestamp("2012/01/01", tz="UTC")])
51
+
52
+ if deep == "default":
53
+ ser2 = ser.copy()
54
+ else:
55
+ ser2 = ser.copy(deep=deep)
56
+
57
+ if using_copy_on_write:
58
+ # INFO(CoW) a shallow copy doesn't yet copy the data
59
+ # but parent will not be modified (CoW)
60
+ if deep is None or deep is False:
61
+ assert np.may_share_memory(ser.values, ser2.values)
62
+ else:
63
+ assert not np.may_share_memory(ser.values, ser2.values)
64
+
65
+ ser2[0] = Timestamp("1999/01/01", tz="UTC")
66
+
67
+ # default deep is True
68
+ if deep is not False or using_copy_on_write:
69
+ # Did not modify original Series
70
+ tm.assert_series_equal(ser2, expected2)
71
+ tm.assert_series_equal(ser, expected)
72
+ else:
73
+ # we DID modify the original Series
74
+ tm.assert_series_equal(ser2, expected2)
75
+ tm.assert_series_equal(ser, expected2)
76
+
77
+ def test_copy_name(self, datetime_series):
78
+ result = datetime_series.copy()
79
+ assert result.name == datetime_series.name
80
+
81
+ def test_copy_index_name_checking(self, datetime_series):
82
+ # don't want to be able to modify the index stored elsewhere after
83
+ # making a copy
84
+
85
+ datetime_series.index.name = None
86
+ assert datetime_series.index.name is None
87
+ assert datetime_series is datetime_series
88
+
89
+ cp = datetime_series.copy()
90
+ cp.index.name = "foo"
91
+ assert datetime_series.index.name is None
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_count.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ import pandas as pd
4
+ from pandas import (
5
+ Categorical,
6
+ Series,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesCount:
12
+ def test_count(self, datetime_series):
13
+ assert datetime_series.count() == len(datetime_series)
14
+
15
+ datetime_series[::2] = np.nan
16
+
17
+ assert datetime_series.count() == np.isfinite(datetime_series).sum()
18
+
19
+ def test_count_inf_as_na(self):
20
+ # GH#29478
21
+ ser = Series([pd.Timestamp("1990/1/1")])
22
+ msg = "use_inf_as_na option is deprecated"
23
+ with tm.assert_produces_warning(FutureWarning, match=msg):
24
+ with pd.option_context("use_inf_as_na", True):
25
+ assert ser.count() == 1
26
+
27
+ def test_count_categorical(self):
28
+ ser = Series(
29
+ Categorical(
30
+ [np.nan, 1, 2, np.nan], categories=[5, 4, 3, 2, 1], ordered=True
31
+ )
32
+ )
33
+ result = ser.count()
34
+ assert result == 2
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.compat.numpy import np_version_gte1p25
5
+
6
+ from pandas.core.dtypes.common import (
7
+ is_complex_dtype,
8
+ is_extension_array_dtype,
9
+ )
10
+
11
+ from pandas import (
12
+ NA,
13
+ Period,
14
+ Series,
15
+ Timedelta,
16
+ Timestamp,
17
+ date_range,
18
+ )
19
+ import pandas._testing as tm
20
+
21
+
22
+ class TestSeriesDescribe:
23
+ def test_describe_ints(self):
24
+ ser = Series([0, 1, 2, 3, 4], name="int_data")
25
+ result = ser.describe()
26
+ expected = Series(
27
+ [5, 2, ser.std(), 0, 1, 2, 3, 4],
28
+ name="int_data",
29
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
30
+ )
31
+ tm.assert_series_equal(result, expected)
32
+
33
+ def test_describe_bools(self):
34
+ ser = Series([True, True, False, False, False], name="bool_data")
35
+ result = ser.describe()
36
+ expected = Series(
37
+ [5, 2, False, 3], name="bool_data", index=["count", "unique", "top", "freq"]
38
+ )
39
+ tm.assert_series_equal(result, expected)
40
+
41
+ def test_describe_strs(self):
42
+ ser = Series(["a", "a", "b", "c", "d"], name="str_data")
43
+ result = ser.describe()
44
+ expected = Series(
45
+ [5, 4, "a", 2], name="str_data", index=["count", "unique", "top", "freq"]
46
+ )
47
+ tm.assert_series_equal(result, expected)
48
+
49
+ def test_describe_timedelta64(self):
50
+ ser = Series(
51
+ [
52
+ Timedelta("1 days"),
53
+ Timedelta("2 days"),
54
+ Timedelta("3 days"),
55
+ Timedelta("4 days"),
56
+ Timedelta("5 days"),
57
+ ],
58
+ name="timedelta_data",
59
+ )
60
+ result = ser.describe()
61
+ expected = Series(
62
+ [5, ser[2], ser.std(), ser[0], ser[1], ser[2], ser[3], ser[4]],
63
+ name="timedelta_data",
64
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
65
+ )
66
+ tm.assert_series_equal(result, expected)
67
+
68
+ def test_describe_period(self):
69
+ ser = Series(
70
+ [Period("2020-01", "M"), Period("2020-01", "M"), Period("2019-12", "M")],
71
+ name="period_data",
72
+ )
73
+ result = ser.describe()
74
+ expected = Series(
75
+ [3, 2, ser[0], 2],
76
+ name="period_data",
77
+ index=["count", "unique", "top", "freq"],
78
+ )
79
+ tm.assert_series_equal(result, expected)
80
+
81
+ def test_describe_empty_object(self):
82
+ # https://github.com/pandas-dev/pandas/issues/27183
83
+ s = Series([None, None], dtype=object)
84
+ result = s.describe()
85
+ expected = Series(
86
+ [0, 0, np.nan, np.nan],
87
+ dtype=object,
88
+ index=["count", "unique", "top", "freq"],
89
+ )
90
+ tm.assert_series_equal(result, expected)
91
+
92
+ result = s[:0].describe()
93
+ tm.assert_series_equal(result, expected)
94
+ # ensure NaN, not None
95
+ assert np.isnan(result.iloc[2])
96
+ assert np.isnan(result.iloc[3])
97
+
98
+ def test_describe_with_tz(self, tz_naive_fixture):
99
+ # GH 21332
100
+ tz = tz_naive_fixture
101
+ name = str(tz_naive_fixture)
102
+ start = Timestamp(2018, 1, 1)
103
+ end = Timestamp(2018, 1, 5)
104
+ s = Series(date_range(start, end, tz=tz), name=name)
105
+ result = s.describe()
106
+ expected = Series(
107
+ [
108
+ 5,
109
+ Timestamp(2018, 1, 3).tz_localize(tz),
110
+ start.tz_localize(tz),
111
+ s[1],
112
+ s[2],
113
+ s[3],
114
+ end.tz_localize(tz),
115
+ ],
116
+ name=name,
117
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
118
+ )
119
+ tm.assert_series_equal(result, expected)
120
+
121
+ def test_describe_with_tz_numeric(self):
122
+ name = tz = "CET"
123
+ start = Timestamp(2018, 1, 1)
124
+ end = Timestamp(2018, 1, 5)
125
+ s = Series(date_range(start, end, tz=tz), name=name)
126
+
127
+ result = s.describe()
128
+
129
+ expected = Series(
130
+ [
131
+ 5,
132
+ Timestamp("2018-01-03 00:00:00", tz=tz),
133
+ Timestamp("2018-01-01 00:00:00", tz=tz),
134
+ Timestamp("2018-01-02 00:00:00", tz=tz),
135
+ Timestamp("2018-01-03 00:00:00", tz=tz),
136
+ Timestamp("2018-01-04 00:00:00", tz=tz),
137
+ Timestamp("2018-01-05 00:00:00", tz=tz),
138
+ ],
139
+ name=name,
140
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
141
+ )
142
+ tm.assert_series_equal(result, expected)
143
+
144
+ def test_datetime_is_numeric_includes_datetime(self):
145
+ s = Series(date_range("2012", periods=3))
146
+ result = s.describe()
147
+ expected = Series(
148
+ [
149
+ 3,
150
+ Timestamp("2012-01-02"),
151
+ Timestamp("2012-01-01"),
152
+ Timestamp("2012-01-01T12:00:00"),
153
+ Timestamp("2012-01-02"),
154
+ Timestamp("2012-01-02T12:00:00"),
155
+ Timestamp("2012-01-03"),
156
+ ],
157
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ @pytest.mark.filterwarnings("ignore:Casting complex values to real discards")
162
+ def test_numeric_result_dtype(self, any_numeric_dtype):
163
+ # GH#48340 - describe should always return float on non-complex numeric input
164
+ if is_extension_array_dtype(any_numeric_dtype):
165
+ dtype = "Float64"
166
+ else:
167
+ dtype = "complex128" if is_complex_dtype(any_numeric_dtype) else None
168
+
169
+ ser = Series([0, 1], dtype=any_numeric_dtype)
170
+ if dtype == "complex128" and np_version_gte1p25:
171
+ with pytest.raises(
172
+ TypeError, match=r"^a must be an array of real numbers$"
173
+ ):
174
+ ser.describe()
175
+ return
176
+ result = ser.describe()
177
+ expected = Series(
178
+ [
179
+ 2.0,
180
+ 0.5,
181
+ ser.std(),
182
+ 0,
183
+ 0.25,
184
+ 0.5,
185
+ 0.75,
186
+ 1.0,
187
+ ],
188
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
189
+ dtype=dtype,
190
+ )
191
+ tm.assert_series_equal(result, expected)
192
+
193
+ def test_describe_one_element_ea(self):
194
+ # GH#52515
195
+ ser = Series([0.0], dtype="Float64")
196
+ with tm.assert_produces_warning(None):
197
+ result = ser.describe()
198
+ expected = Series(
199
+ [1, 0, NA, 0, 0, 0, 0, 0],
200
+ dtype="Float64",
201
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
202
+ )
203
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_diff.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ TimedeltaIndex,
7
+ date_range,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestSeriesDiff:
13
+ def test_diff_np(self):
14
+ # TODO(__array_function__): could make np.diff return a Series
15
+ # matching ser.diff()
16
+
17
+ ser = Series(np.arange(5))
18
+
19
+ res = np.diff(ser)
20
+ expected = np.array([1, 1, 1, 1])
21
+ tm.assert_numpy_array_equal(res, expected)
22
+
23
+ def test_diff_int(self):
24
+ # int dtype
25
+ a = 10000000000000000
26
+ b = a + 1
27
+ ser = Series([a, b])
28
+
29
+ result = ser.diff()
30
+ assert result[1] == 1
31
+
32
+ def test_diff_tz(self):
33
+ # Combined datetime diff, normal diff and boolean diff test
34
+ ts = Series(
35
+ np.arange(10, dtype=np.float64),
36
+ index=date_range("2020-01-01", periods=10),
37
+ name="ts",
38
+ )
39
+ ts.diff()
40
+
41
+ # neg n
42
+ result = ts.diff(-1)
43
+ expected = ts - ts.shift(-1)
44
+ tm.assert_series_equal(result, expected)
45
+
46
+ # 0
47
+ result = ts.diff(0)
48
+ expected = ts - ts
49
+ tm.assert_series_equal(result, expected)
50
+
51
+ def test_diff_dt64(self):
52
+ # datetime diff (GH#3100)
53
+ ser = Series(date_range("20130102", periods=5))
54
+ result = ser.diff()
55
+ expected = ser - ser.shift(1)
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ # timedelta diff
59
+ result = result - result.shift(1) # previous result
60
+ expected = expected.diff() # previously expected
61
+ tm.assert_series_equal(result, expected)
62
+
63
+ def test_diff_dt64tz(self):
64
+ # with tz
65
+ ser = Series(
66
+ date_range("2000-01-01 09:00:00", periods=5, tz="US/Eastern"), name="foo"
67
+ )
68
+ result = ser.diff()
69
+ expected = Series(TimedeltaIndex(["NaT"] + ["1 days"] * 4), name="foo")
70
+ tm.assert_series_equal(result, expected)
71
+
72
+ @pytest.mark.parametrize(
73
+ "input,output,diff",
74
+ [([False, True, True, False, False], [np.nan, True, False, True, False], 1)],
75
+ )
76
+ def test_diff_bool(self, input, output, diff):
77
+ # boolean series (test for fixing #17294)
78
+ ser = Series(input)
79
+ result = ser.diff()
80
+ expected = Series(output)
81
+ tm.assert_series_equal(result, expected)
82
+
83
+ def test_diff_object_dtype(self):
84
+ # object series
85
+ ser = Series([False, True, 5.0, np.nan, True, False])
86
+ result = ser.diff()
87
+ expected = ser - ser.shift(1)
88
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop_duplicates.py ADDED
@@ -0,0 +1,267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Categorical,
7
+ Series,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ @pytest.mark.parametrize(
13
+ "keep, expected",
14
+ [
15
+ ("first", Series([False, False, False, False, True, True, False])),
16
+ ("last", Series([False, True, True, False, False, False, False])),
17
+ (False, Series([False, True, True, False, True, True, False])),
18
+ ],
19
+ )
20
+ def test_drop_duplicates(any_numpy_dtype, keep, expected):
21
+ tc = Series([1, 0, 3, 5, 3, 0, 4], dtype=np.dtype(any_numpy_dtype))
22
+
23
+ if tc.dtype == "bool":
24
+ pytest.skip("tested separately in test_drop_duplicates_bool")
25
+
26
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
27
+ tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
28
+ sc = tc.copy()
29
+ return_value = sc.drop_duplicates(keep=keep, inplace=True)
30
+ assert return_value is None
31
+ tm.assert_series_equal(sc, tc[~expected])
32
+
33
+
34
+ @pytest.mark.parametrize(
35
+ "keep, expected",
36
+ [
37
+ ("first", Series([False, False, True, True])),
38
+ ("last", Series([True, True, False, False])),
39
+ (False, Series([True, True, True, True])),
40
+ ],
41
+ )
42
+ def test_drop_duplicates_bool(keep, expected):
43
+ tc = Series([True, False, True, False])
44
+
45
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
46
+ tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
47
+ sc = tc.copy()
48
+ return_value = sc.drop_duplicates(keep=keep, inplace=True)
49
+ tm.assert_series_equal(sc, tc[~expected])
50
+ assert return_value is None
51
+
52
+
53
+ @pytest.mark.parametrize("values", [[], list(range(5))])
54
+ def test_drop_duplicates_no_duplicates(any_numpy_dtype, keep, values):
55
+ tc = Series(values, dtype=np.dtype(any_numpy_dtype))
56
+ expected = Series([False] * len(tc), dtype="bool")
57
+
58
+ if tc.dtype == "bool":
59
+ # 0 -> False and 1-> True
60
+ # any other value would be duplicated
61
+ tc = tc[:2]
62
+ expected = expected[:2]
63
+
64
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
65
+
66
+ result_dropped = tc.drop_duplicates(keep=keep)
67
+ tm.assert_series_equal(result_dropped, tc)
68
+
69
+ # validate shallow copy
70
+ assert result_dropped is not tc
71
+
72
+
73
+ class TestSeriesDropDuplicates:
74
+ @pytest.fixture(
75
+ params=["int_", "uint", "float64", "str_", "timedelta64[h]", "datetime64[D]"]
76
+ )
77
+ def dtype(self, request):
78
+ return request.param
79
+
80
+ @pytest.fixture
81
+ def cat_series_unused_category(self, dtype, ordered):
82
+ # Test case 1
83
+ cat_array = np.array([1, 2, 3, 4, 5], dtype=np.dtype(dtype))
84
+
85
+ input1 = np.array([1, 2, 3, 3], dtype=np.dtype(dtype))
86
+ cat = Categorical(input1, categories=cat_array, ordered=ordered)
87
+ tc1 = Series(cat)
88
+ return tc1
89
+
90
+ def test_drop_duplicates_categorical_non_bool(self, cat_series_unused_category):
91
+ tc1 = cat_series_unused_category
92
+
93
+ expected = Series([False, False, False, True])
94
+
95
+ result = tc1.duplicated()
96
+ tm.assert_series_equal(result, expected)
97
+
98
+ result = tc1.drop_duplicates()
99
+ tm.assert_series_equal(result, tc1[~expected])
100
+
101
+ sc = tc1.copy()
102
+ return_value = sc.drop_duplicates(inplace=True)
103
+ assert return_value is None
104
+ tm.assert_series_equal(sc, tc1[~expected])
105
+
106
+ def test_drop_duplicates_categorical_non_bool_keeplast(
107
+ self, cat_series_unused_category
108
+ ):
109
+ tc1 = cat_series_unused_category
110
+
111
+ expected = Series([False, False, True, False])
112
+
113
+ result = tc1.duplicated(keep="last")
114
+ tm.assert_series_equal(result, expected)
115
+
116
+ result = tc1.drop_duplicates(keep="last")
117
+ tm.assert_series_equal(result, tc1[~expected])
118
+
119
+ sc = tc1.copy()
120
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
121
+ assert return_value is None
122
+ tm.assert_series_equal(sc, tc1[~expected])
123
+
124
+ def test_drop_duplicates_categorical_non_bool_keepfalse(
125
+ self, cat_series_unused_category
126
+ ):
127
+ tc1 = cat_series_unused_category
128
+
129
+ expected = Series([False, False, True, True])
130
+
131
+ result = tc1.duplicated(keep=False)
132
+ tm.assert_series_equal(result, expected)
133
+
134
+ result = tc1.drop_duplicates(keep=False)
135
+ tm.assert_series_equal(result, tc1[~expected])
136
+
137
+ sc = tc1.copy()
138
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
139
+ assert return_value is None
140
+ tm.assert_series_equal(sc, tc1[~expected])
141
+
142
+ @pytest.fixture
143
+ def cat_series(self, dtype, ordered):
144
+ # no unused categories, unlike cat_series_unused_category
145
+ cat_array = np.array([1, 2, 3, 4, 5], dtype=np.dtype(dtype))
146
+
147
+ input2 = np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype(dtype))
148
+ cat = Categorical(input2, categories=cat_array, ordered=ordered)
149
+ tc2 = Series(cat)
150
+ return tc2
151
+
152
+ def test_drop_duplicates_categorical_non_bool2(self, cat_series):
153
+ tc2 = cat_series
154
+
155
+ expected = Series([False, False, False, False, True, True, False])
156
+
157
+ result = tc2.duplicated()
158
+ tm.assert_series_equal(result, expected)
159
+
160
+ result = tc2.drop_duplicates()
161
+ tm.assert_series_equal(result, tc2[~expected])
162
+
163
+ sc = tc2.copy()
164
+ return_value = sc.drop_duplicates(inplace=True)
165
+ assert return_value is None
166
+ tm.assert_series_equal(sc, tc2[~expected])
167
+
168
+ def test_drop_duplicates_categorical_non_bool2_keeplast(self, cat_series):
169
+ tc2 = cat_series
170
+
171
+ expected = Series([False, True, True, False, False, False, False])
172
+
173
+ result = tc2.duplicated(keep="last")
174
+ tm.assert_series_equal(result, expected)
175
+
176
+ result = tc2.drop_duplicates(keep="last")
177
+ tm.assert_series_equal(result, tc2[~expected])
178
+
179
+ sc = tc2.copy()
180
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
181
+ assert return_value is None
182
+ tm.assert_series_equal(sc, tc2[~expected])
183
+
184
+ def test_drop_duplicates_categorical_non_bool2_keepfalse(self, cat_series):
185
+ tc2 = cat_series
186
+
187
+ expected = Series([False, True, True, False, True, True, False])
188
+
189
+ result = tc2.duplicated(keep=False)
190
+ tm.assert_series_equal(result, expected)
191
+
192
+ result = tc2.drop_duplicates(keep=False)
193
+ tm.assert_series_equal(result, tc2[~expected])
194
+
195
+ sc = tc2.copy()
196
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
197
+ assert return_value is None
198
+ tm.assert_series_equal(sc, tc2[~expected])
199
+
200
+ def test_drop_duplicates_categorical_bool(self, ordered):
201
+ tc = Series(
202
+ Categorical(
203
+ [True, False, True, False], categories=[True, False], ordered=ordered
204
+ )
205
+ )
206
+
207
+ expected = Series([False, False, True, True])
208
+ tm.assert_series_equal(tc.duplicated(), expected)
209
+ tm.assert_series_equal(tc.drop_duplicates(), tc[~expected])
210
+ sc = tc.copy()
211
+ return_value = sc.drop_duplicates(inplace=True)
212
+ assert return_value is None
213
+ tm.assert_series_equal(sc, tc[~expected])
214
+
215
+ expected = Series([True, True, False, False])
216
+ tm.assert_series_equal(tc.duplicated(keep="last"), expected)
217
+ tm.assert_series_equal(tc.drop_duplicates(keep="last"), tc[~expected])
218
+ sc = tc.copy()
219
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
220
+ assert return_value is None
221
+ tm.assert_series_equal(sc, tc[~expected])
222
+
223
+ expected = Series([True, True, True, True])
224
+ tm.assert_series_equal(tc.duplicated(keep=False), expected)
225
+ tm.assert_series_equal(tc.drop_duplicates(keep=False), tc[~expected])
226
+ sc = tc.copy()
227
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
228
+ assert return_value is None
229
+ tm.assert_series_equal(sc, tc[~expected])
230
+
231
+ def test_drop_duplicates_categorical_bool_na(self, nulls_fixture):
232
+ # GH#44351
233
+ ser = Series(
234
+ Categorical(
235
+ [True, False, True, False, nulls_fixture],
236
+ categories=[True, False],
237
+ ordered=True,
238
+ )
239
+ )
240
+ result = ser.drop_duplicates()
241
+ expected = Series(
242
+ Categorical([True, False, np.nan], categories=[True, False], ordered=True),
243
+ index=[0, 1, 4],
244
+ )
245
+ tm.assert_series_equal(result, expected)
246
+
247
+ def test_drop_duplicates_ignore_index(self):
248
+ # GH#48304
249
+ ser = Series([1, 2, 2, 3])
250
+ result = ser.drop_duplicates(ignore_index=True)
251
+ expected = Series([1, 2, 3])
252
+ tm.assert_series_equal(result, expected)
253
+
254
+ def test_duplicated_arrow_dtype(self):
255
+ pytest.importorskip("pyarrow")
256
+ ser = Series([True, False, None, False], dtype="bool[pyarrow]")
257
+ result = ser.drop_duplicates()
258
+ expected = Series([True, False, None], dtype="bool[pyarrow]")
259
+ tm.assert_series_equal(result, expected)
260
+
261
+ def test_drop_duplicates_arrow_strings(self):
262
+ # GH#54904
263
+ pa = pytest.importorskip("pyarrow")
264
+ ser = Series(["a", "a"], dtype=pd.ArrowDtype(pa.string()))
265
+ result = ser.drop_duplicates()
266
+ expecetd = Series(["a"], dtype=pd.ArrowDtype(pa.string()))
267
+ tm.assert_series_equal(result, expecetd)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_dropna.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DatetimeIndex,
6
+ IntervalIndex,
7
+ NaT,
8
+ Period,
9
+ Series,
10
+ Timestamp,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ class TestDropna:
16
+ def test_dropna_empty(self):
17
+ ser = Series([], dtype=object)
18
+
19
+ assert len(ser.dropna()) == 0
20
+ return_value = ser.dropna(inplace=True)
21
+ assert return_value is None
22
+ assert len(ser) == 0
23
+
24
+ # invalid axis
25
+ msg = "No axis named 1 for object type Series"
26
+ with pytest.raises(ValueError, match=msg):
27
+ ser.dropna(axis=1)
28
+
29
+ def test_dropna_preserve_name(self, datetime_series):
30
+ datetime_series[:5] = np.nan
31
+ result = datetime_series.dropna()
32
+ assert result.name == datetime_series.name
33
+ name = datetime_series.name
34
+ ts = datetime_series.copy()
35
+ return_value = ts.dropna(inplace=True)
36
+ assert return_value is None
37
+ assert ts.name == name
38
+
39
+ def test_dropna_no_nan(self):
40
+ for ser in [
41
+ Series([1, 2, 3], name="x"),
42
+ Series([False, True, False], name="x"),
43
+ ]:
44
+ result = ser.dropna()
45
+ tm.assert_series_equal(result, ser)
46
+ assert result is not ser
47
+
48
+ s2 = ser.copy()
49
+ return_value = s2.dropna(inplace=True)
50
+ assert return_value is None
51
+ tm.assert_series_equal(s2, ser)
52
+
53
+ def test_dropna_intervals(self):
54
+ ser = Series(
55
+ [np.nan, 1, 2, 3],
56
+ IntervalIndex.from_arrays([np.nan, 0, 1, 2], [np.nan, 1, 2, 3]),
57
+ )
58
+
59
+ result = ser.dropna()
60
+ expected = ser.iloc[1:]
61
+ tm.assert_series_equal(result, expected)
62
+
63
+ def test_dropna_period_dtype(self):
64
+ # GH#13737
65
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
66
+ result = ser.dropna()
67
+ expected = Series([Period("2011-01", freq="M")])
68
+
69
+ tm.assert_series_equal(result, expected)
70
+
71
+ def test_datetime64_tz_dropna(self, unit):
72
+ # DatetimeLikeBlock
73
+ ser = Series(
74
+ [
75
+ Timestamp("2011-01-01 10:00"),
76
+ NaT,
77
+ Timestamp("2011-01-03 10:00"),
78
+ NaT,
79
+ ],
80
+ dtype=f"M8[{unit}]",
81
+ )
82
+ result = ser.dropna()
83
+ expected = Series(
84
+ [Timestamp("2011-01-01 10:00"), Timestamp("2011-01-03 10:00")],
85
+ index=[0, 2],
86
+ dtype=f"M8[{unit}]",
87
+ )
88
+ tm.assert_series_equal(result, expected)
89
+
90
+ # DatetimeTZBlock
91
+ idx = DatetimeIndex(
92
+ ["2011-01-01 10:00", NaT, "2011-01-03 10:00", NaT], tz="Asia/Tokyo"
93
+ ).as_unit(unit)
94
+ ser = Series(idx)
95
+ assert ser.dtype == f"datetime64[{unit}, Asia/Tokyo]"
96
+ result = ser.dropna()
97
+ expected = Series(
98
+ [
99
+ Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"),
100
+ Timestamp("2011-01-03 10:00", tz="Asia/Tokyo"),
101
+ ],
102
+ index=[0, 2],
103
+ dtype=f"datetime64[{unit}, Asia/Tokyo]",
104
+ )
105
+ assert result.dtype == f"datetime64[{unit}, Asia/Tokyo]"
106
+ tm.assert_series_equal(result, expected)
107
+
108
+ @pytest.mark.parametrize("val", [1, 1.5])
109
+ def test_dropna_ignore_index(self, val):
110
+ # GH#31725
111
+ ser = Series([1, 2, val], index=[3, 2, 1])
112
+ result = ser.dropna(ignore_index=True)
113
+ expected = Series([1, 2, val])
114
+ tm.assert_series_equal(result, expected)
115
+
116
+ ser.dropna(ignore_index=True, inplace=True)
117
+ tm.assert_series_equal(ser, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+
4
+ class TestSeriesDtypes:
5
+ def test_dtype(self, datetime_series):
6
+ assert datetime_series.dtype == np.dtype("float64")
7
+ assert datetime_series.dtypes == np.dtype("float64")
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ def test_basic():
9
+ s = pd.Series([[0, 1, 2], np.nan, [], (3, 4)], index=list("abcd"), name="foo")
10
+ result = s.explode()
11
+ expected = pd.Series(
12
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=list("aaabcdd"), dtype=object, name="foo"
13
+ )
14
+ tm.assert_series_equal(result, expected)
15
+
16
+
17
+ def test_mixed_type():
18
+ s = pd.Series(
19
+ [[0, 1, 2], np.nan, None, np.array([]), pd.Series(["a", "b"])], name="foo"
20
+ )
21
+ result = s.explode()
22
+ expected = pd.Series(
23
+ [0, 1, 2, np.nan, None, np.nan, "a", "b"],
24
+ index=[0, 0, 0, 1, 2, 3, 4, 4],
25
+ dtype=object,
26
+ name="foo",
27
+ )
28
+ tm.assert_series_equal(result, expected)
29
+
30
+
31
+ def test_empty():
32
+ s = pd.Series(dtype=object)
33
+ result = s.explode()
34
+ expected = s.copy()
35
+ tm.assert_series_equal(result, expected)
36
+
37
+
38
+ def test_nested_lists():
39
+ s = pd.Series([[[1, 2, 3]], [1, 2], 1])
40
+ result = s.explode()
41
+ expected = pd.Series([[1, 2, 3], 1, 2, 1], index=[0, 1, 1, 2])
42
+ tm.assert_series_equal(result, expected)
43
+
44
+
45
+ def test_multi_index():
46
+ s = pd.Series(
47
+ [[0, 1, 2], np.nan, [], (3, 4)],
48
+ name="foo",
49
+ index=pd.MultiIndex.from_product([list("ab"), range(2)], names=["foo", "bar"]),
50
+ )
51
+ result = s.explode()
52
+ index = pd.MultiIndex.from_tuples(
53
+ [("a", 0), ("a", 0), ("a", 0), ("a", 1), ("b", 0), ("b", 1), ("b", 1)],
54
+ names=["foo", "bar"],
55
+ )
56
+ expected = pd.Series(
57
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=index, dtype=object, name="foo"
58
+ )
59
+ tm.assert_series_equal(result, expected)
60
+
61
+
62
+ def test_large():
63
+ s = pd.Series([range(256)]).explode()
64
+ result = s.explode()
65
+ tm.assert_series_equal(result, s)
66
+
67
+
68
+ def test_invert_array():
69
+ df = pd.DataFrame({"a": pd.date_range("20190101", periods=3, tz="UTC")})
70
+
71
+ listify = df.apply(lambda x: x.array, axis=1)
72
+ result = listify.explode()
73
+ tm.assert_series_equal(result, df["a"].rename())
74
+
75
+
76
+ @pytest.mark.parametrize(
77
+ "s", [pd.Series([1, 2, 3]), pd.Series(pd.date_range("2019", periods=3, tz="UTC"))]
78
+ )
79
+ def test_non_object_dtype(s):
80
+ result = s.explode()
81
+ tm.assert_series_equal(result, s)
82
+
83
+
84
+ def test_typical_usecase():
85
+ df = pd.DataFrame(
86
+ [{"var1": "a,b,c", "var2": 1}, {"var1": "d,e,f", "var2": 2}],
87
+ columns=["var1", "var2"],
88
+ )
89
+ exploded = df.var1.str.split(",").explode()
90
+ result = df[["var2"]].join(exploded)
91
+ expected = pd.DataFrame(
92
+ {"var2": [1, 1, 1, 2, 2, 2], "var1": list("abcdef")},
93
+ columns=["var2", "var1"],
94
+ index=[0, 0, 0, 1, 1, 1],
95
+ )
96
+ tm.assert_frame_equal(result, expected)
97
+
98
+
99
+ def test_nested_EA():
100
+ # a nested EA array
101
+ s = pd.Series(
102
+ [
103
+ pd.date_range("20170101", periods=3, tz="UTC"),
104
+ pd.date_range("20170104", periods=3, tz="UTC"),
105
+ ]
106
+ )
107
+ result = s.explode()
108
+ expected = pd.Series(
109
+ pd.date_range("20170101", periods=6, tz="UTC"), index=[0, 0, 0, 1, 1, 1]
110
+ )
111
+ tm.assert_series_equal(result, expected)
112
+
113
+
114
+ def test_duplicate_index():
115
+ # GH 28005
116
+ s = pd.Series([[1, 2], [3, 4]], index=[0, 0])
117
+ result = s.explode()
118
+ expected = pd.Series([1, 2, 3, 4], index=[0, 0, 0, 0], dtype=object)
119
+ tm.assert_series_equal(result, expected)
120
+
121
+
122
+ def test_ignore_index():
123
+ # GH 34932
124
+ s = pd.Series([[1, 2], [3, 4]])
125
+ result = s.explode(ignore_index=True)
126
+ expected = pd.Series([1, 2, 3, 4], index=[0, 1, 2, 3], dtype=object)
127
+ tm.assert_series_equal(result, expected)
128
+
129
+
130
+ def test_explode_sets():
131
+ # https://github.com/pandas-dev/pandas/issues/35614
132
+ s = pd.Series([{"a", "b", "c"}], index=[1])
133
+ result = s.explode().sort_values()
134
+ expected = pd.Series(["a", "b", "c"], index=[1, 1, 1])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+
138
+ def test_explode_scalars_can_ignore_index():
139
+ # https://github.com/pandas-dev/pandas/issues/40487
140
+ s = pd.Series([1, 2, 3], index=["a", "b", "c"])
141
+ result = s.explode(ignore_index=True)
142
+ expected = pd.Series([1, 2, 3])
143
+ tm.assert_series_equal(result, expected)
144
+
145
+
146
+ @pytest.mark.parametrize("ignore_index", [True, False])
147
+ def test_explode_pyarrow_list_type(ignore_index):
148
+ # GH 53602
149
+ pa = pytest.importorskip("pyarrow")
150
+
151
+ data = [
152
+ [None, None],
153
+ [1],
154
+ [],
155
+ [2, 3],
156
+ None,
157
+ ]
158
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.list_(pa.int64())))
159
+ result = ser.explode(ignore_index=ignore_index)
160
+ expected = pd.Series(
161
+ data=[None, None, 1, None, 2, 3, None],
162
+ index=None if ignore_index else [0, 0, 1, 2, 3, 3, 4],
163
+ dtype=pd.ArrowDtype(pa.int64()),
164
+ )
165
+ tm.assert_series_equal(result, expected)
166
+
167
+
168
+ @pytest.mark.parametrize("ignore_index", [True, False])
169
+ def test_explode_pyarrow_non_list_type(ignore_index):
170
+ pa = pytest.importorskip("pyarrow")
171
+ data = [1, 2, 3]
172
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.int64()))
173
+ result = ser.explode(ignore_index=ignore_index)
174
+ expected = pd.Series([1, 2, 3], dtype="int64[pyarrow]", index=[0, 1, 2])
175
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py ADDED
@@ -0,0 +1,1155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ timezone,
5
+ )
6
+
7
+ import numpy as np
8
+ import pytest
9
+ import pytz
10
+
11
+ from pandas import (
12
+ Categorical,
13
+ DataFrame,
14
+ DatetimeIndex,
15
+ NaT,
16
+ Period,
17
+ Series,
18
+ Timedelta,
19
+ Timestamp,
20
+ date_range,
21
+ isna,
22
+ timedelta_range,
23
+ )
24
+ import pandas._testing as tm
25
+ from pandas.core.arrays import period_array
26
+
27
+
28
+ @pytest.mark.filterwarnings(
29
+ "ignore:(Series|DataFrame).fillna with 'method' is deprecated:FutureWarning"
30
+ )
31
+ class TestSeriesFillNA:
32
+ def test_fillna_nat(self):
33
+ series = Series([0, 1, 2, NaT._value], dtype="M8[ns]")
34
+
35
+ filled = series.fillna(method="pad")
36
+ filled2 = series.fillna(value=series.values[2])
37
+
38
+ expected = series.copy()
39
+ expected.iloc[3] = expected.iloc[2]
40
+
41
+ tm.assert_series_equal(filled, expected)
42
+ tm.assert_series_equal(filled2, expected)
43
+
44
+ df = DataFrame({"A": series})
45
+ filled = df.fillna(method="pad")
46
+ filled2 = df.fillna(value=series.values[2])
47
+ expected = DataFrame({"A": expected})
48
+ tm.assert_frame_equal(filled, expected)
49
+ tm.assert_frame_equal(filled2, expected)
50
+
51
+ series = Series([NaT._value, 0, 1, 2], dtype="M8[ns]")
52
+
53
+ filled = series.fillna(method="bfill")
54
+ filled2 = series.fillna(value=series[1])
55
+
56
+ expected = series.copy()
57
+ expected[0] = expected[1]
58
+
59
+ tm.assert_series_equal(filled, expected)
60
+ tm.assert_series_equal(filled2, expected)
61
+
62
+ df = DataFrame({"A": series})
63
+ filled = df.fillna(method="bfill")
64
+ filled2 = df.fillna(value=series[1])
65
+ expected = DataFrame({"A": expected})
66
+ tm.assert_frame_equal(filled, expected)
67
+ tm.assert_frame_equal(filled2, expected)
68
+
69
+ def test_fillna_value_or_method(self, datetime_series):
70
+ msg = "Cannot specify both 'value' and 'method'"
71
+ with pytest.raises(ValueError, match=msg):
72
+ datetime_series.fillna(value=0, method="ffill")
73
+
74
+ def test_fillna(self):
75
+ ts = Series(
76
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
77
+ )
78
+
79
+ tm.assert_series_equal(ts, ts.fillna(method="ffill"))
80
+
81
+ ts.iloc[2] = np.nan
82
+
83
+ exp = Series([0.0, 1.0, 1.0, 3.0, 4.0], index=ts.index)
84
+ tm.assert_series_equal(ts.fillna(method="ffill"), exp)
85
+
86
+ exp = Series([0.0, 1.0, 3.0, 3.0, 4.0], index=ts.index)
87
+ tm.assert_series_equal(ts.fillna(method="backfill"), exp)
88
+
89
+ exp = Series([0.0, 1.0, 5.0, 3.0, 4.0], index=ts.index)
90
+ tm.assert_series_equal(ts.fillna(value=5), exp)
91
+
92
+ msg = "Must specify a fill 'value' or 'method'"
93
+ with pytest.raises(ValueError, match=msg):
94
+ ts.fillna()
95
+
96
+ def test_fillna_nonscalar(self):
97
+ # GH#5703
98
+ s1 = Series([np.nan])
99
+ s2 = Series([1])
100
+ result = s1.fillna(s2)
101
+ expected = Series([1.0])
102
+ tm.assert_series_equal(result, expected)
103
+ result = s1.fillna({})
104
+ tm.assert_series_equal(result, s1)
105
+ result = s1.fillna(Series((), dtype=object))
106
+ tm.assert_series_equal(result, s1)
107
+ result = s2.fillna(s1)
108
+ tm.assert_series_equal(result, s2)
109
+ result = s1.fillna({0: 1})
110
+ tm.assert_series_equal(result, expected)
111
+ result = s1.fillna({1: 1})
112
+ tm.assert_series_equal(result, Series([np.nan]))
113
+ result = s1.fillna({0: 1, 1: 1})
114
+ tm.assert_series_equal(result, expected)
115
+ result = s1.fillna(Series({0: 1, 1: 1}))
116
+ tm.assert_series_equal(result, expected)
117
+ result = s1.fillna(Series({0: 1, 1: 1}, index=[4, 5]))
118
+ tm.assert_series_equal(result, s1)
119
+
120
+ def test_fillna_aligns(self):
121
+ s1 = Series([0, 1, 2], list("abc"))
122
+ s2 = Series([0, np.nan, 2], list("bac"))
123
+ result = s2.fillna(s1)
124
+ expected = Series([0, 0, 2.0], list("bac"))
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ def test_fillna_limit(self):
128
+ ser = Series(np.nan, index=[0, 1, 2])
129
+ result = ser.fillna(999, limit=1)
130
+ expected = Series([999, np.nan, np.nan], index=[0, 1, 2])
131
+ tm.assert_series_equal(result, expected)
132
+
133
+ result = ser.fillna(999, limit=2)
134
+ expected = Series([999, 999, np.nan], index=[0, 1, 2])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_fillna_dont_cast_strings(self):
138
+ # GH#9043
139
+ # make sure a string representation of int/float values can be filled
140
+ # correctly without raising errors or being converted
141
+ vals = ["0", "1.5", "-0.3"]
142
+ for val in vals:
143
+ ser = Series([0, 1, np.nan, np.nan, 4], dtype="float64")
144
+ result = ser.fillna(val)
145
+ expected = Series([0, 1, val, val, 4], dtype="object")
146
+ tm.assert_series_equal(result, expected)
147
+
148
+ def test_fillna_consistency(self):
149
+ # GH#16402
150
+ # fillna with a tz aware to a tz-naive, should result in object
151
+
152
+ ser = Series([Timestamp("20130101"), NaT])
153
+
154
+ result = ser.fillna(Timestamp("20130101", tz="US/Eastern"))
155
+ expected = Series(
156
+ [Timestamp("20130101"), Timestamp("2013-01-01", tz="US/Eastern")],
157
+ dtype="object",
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
162
+ tm.assert_series_equal(result, expected)
163
+
164
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
165
+ tm.assert_series_equal(result, expected)
166
+
167
+ # with a non-datetime
168
+ result = ser.fillna("foo")
169
+ expected = Series([Timestamp("20130101"), "foo"])
170
+ tm.assert_series_equal(result, expected)
171
+
172
+ # assignment
173
+ ser2 = ser.copy()
174
+ with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
175
+ ser2[1] = "foo"
176
+ tm.assert_series_equal(ser2, expected)
177
+
178
+ def test_fillna_downcast(self):
179
+ # GH#15277
180
+ # infer int64 from float64
181
+ ser = Series([1.0, np.nan])
182
+ msg = "The 'downcast' keyword in fillna is deprecated"
183
+ with tm.assert_produces_warning(FutureWarning, match=msg):
184
+ result = ser.fillna(0, downcast="infer")
185
+ expected = Series([1, 0])
186
+ tm.assert_series_equal(result, expected)
187
+
188
+ # infer int64 from float64 when fillna value is a dict
189
+ ser = Series([1.0, np.nan])
190
+ with tm.assert_produces_warning(FutureWarning, match=msg):
191
+ result = ser.fillna({1: 0}, downcast="infer")
192
+ expected = Series([1, 0])
193
+ tm.assert_series_equal(result, expected)
194
+
195
+ def test_fillna_downcast_infer_objects_to_numeric(self):
196
+ # GH#44241 if we have object-dtype, 'downcast="infer"' should
197
+ # _actually_ infer
198
+
199
+ arr = np.arange(5).astype(object)
200
+ arr[3] = np.nan
201
+
202
+ ser = Series(arr)
203
+
204
+ msg = "The 'downcast' keyword in fillna is deprecated"
205
+ with tm.assert_produces_warning(FutureWarning, match=msg):
206
+ res = ser.fillna(3, downcast="infer")
207
+ expected = Series(np.arange(5), dtype=np.int64)
208
+ tm.assert_series_equal(res, expected)
209
+
210
+ msg = "The 'downcast' keyword in ffill is deprecated"
211
+ with tm.assert_produces_warning(FutureWarning, match=msg):
212
+ res = ser.ffill(downcast="infer")
213
+ expected = Series([0, 1, 2, 2, 4], dtype=np.int64)
214
+ tm.assert_series_equal(res, expected)
215
+
216
+ msg = "The 'downcast' keyword in bfill is deprecated"
217
+ with tm.assert_produces_warning(FutureWarning, match=msg):
218
+ res = ser.bfill(downcast="infer")
219
+ expected = Series([0, 1, 2, 4, 4], dtype=np.int64)
220
+ tm.assert_series_equal(res, expected)
221
+
222
+ # with a non-round float present, we will downcast to float64
223
+ ser[2] = 2.5
224
+
225
+ expected = Series([0, 1, 2.5, 3, 4], dtype=np.float64)
226
+ msg = "The 'downcast' keyword in fillna is deprecated"
227
+ with tm.assert_produces_warning(FutureWarning, match=msg):
228
+ res = ser.fillna(3, downcast="infer")
229
+ tm.assert_series_equal(res, expected)
230
+
231
+ msg = "The 'downcast' keyword in ffill is deprecated"
232
+ with tm.assert_produces_warning(FutureWarning, match=msg):
233
+ res = ser.ffill(downcast="infer")
234
+ expected = Series([0, 1, 2.5, 2.5, 4], dtype=np.float64)
235
+ tm.assert_series_equal(res, expected)
236
+
237
+ msg = "The 'downcast' keyword in bfill is deprecated"
238
+ with tm.assert_produces_warning(FutureWarning, match=msg):
239
+ res = ser.bfill(downcast="infer")
240
+ expected = Series([0, 1, 2.5, 4, 4], dtype=np.float64)
241
+ tm.assert_series_equal(res, expected)
242
+
243
+ def test_timedelta_fillna(self, frame_or_series, unit):
244
+ # GH#3371
245
+ ser = Series(
246
+ [
247
+ Timestamp("20130101"),
248
+ Timestamp("20130101"),
249
+ Timestamp("20130102"),
250
+ Timestamp("20130103 9:01:01"),
251
+ ],
252
+ dtype=f"M8[{unit}]",
253
+ )
254
+ td = ser.diff()
255
+ obj = frame_or_series(td).copy()
256
+
257
+ # reg fillna
258
+ result = obj.fillna(Timedelta(seconds=0))
259
+ expected = Series(
260
+ [
261
+ timedelta(0),
262
+ timedelta(0),
263
+ timedelta(1),
264
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
265
+ ],
266
+ dtype=f"m8[{unit}]",
267
+ )
268
+ expected = frame_or_series(expected)
269
+ tm.assert_equal(result, expected)
270
+
271
+ # GH#45746 pre-1.? ints were interpreted as seconds. then that was
272
+ # deprecated and changed to raise. In 2.0 it casts to common dtype,
273
+ # consistent with every other dtype's behavior
274
+ res = obj.fillna(1)
275
+ expected = obj.astype(object).fillna(1)
276
+ tm.assert_equal(res, expected)
277
+
278
+ result = obj.fillna(Timedelta(seconds=1))
279
+ expected = Series(
280
+ [
281
+ timedelta(seconds=1),
282
+ timedelta(0),
283
+ timedelta(1),
284
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
285
+ ],
286
+ dtype=f"m8[{unit}]",
287
+ )
288
+ expected = frame_or_series(expected)
289
+ tm.assert_equal(result, expected)
290
+
291
+ result = obj.fillna(timedelta(days=1, seconds=1))
292
+ expected = Series(
293
+ [
294
+ timedelta(days=1, seconds=1),
295
+ timedelta(0),
296
+ timedelta(1),
297
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
298
+ ],
299
+ dtype=f"m8[{unit}]",
300
+ )
301
+ expected = frame_or_series(expected)
302
+ tm.assert_equal(result, expected)
303
+
304
+ result = obj.fillna(np.timedelta64(10**9))
305
+ expected = Series(
306
+ [
307
+ timedelta(seconds=1),
308
+ timedelta(0),
309
+ timedelta(1),
310
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
311
+ ],
312
+ dtype=f"m8[{unit}]",
313
+ )
314
+ expected = frame_or_series(expected)
315
+ tm.assert_equal(result, expected)
316
+
317
+ result = obj.fillna(NaT)
318
+ expected = Series(
319
+ [
320
+ NaT,
321
+ timedelta(0),
322
+ timedelta(1),
323
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
324
+ ],
325
+ dtype=f"m8[{unit}]",
326
+ )
327
+ expected = frame_or_series(expected)
328
+ tm.assert_equal(result, expected)
329
+
330
+ # ffill
331
+ td[2] = np.nan
332
+ obj = frame_or_series(td).copy()
333
+ result = obj.ffill()
334
+ expected = td.fillna(Timedelta(seconds=0))
335
+ expected[0] = np.nan
336
+ expected = frame_or_series(expected)
337
+
338
+ tm.assert_equal(result, expected)
339
+
340
+ # bfill
341
+ td[2] = np.nan
342
+ obj = frame_or_series(td)
343
+ result = obj.bfill()
344
+ expected = td.fillna(Timedelta(seconds=0))
345
+ expected[2] = timedelta(days=1, seconds=9 * 3600 + 60 + 1)
346
+ expected = frame_or_series(expected)
347
+ tm.assert_equal(result, expected)
348
+
349
+ def test_datetime64_fillna(self):
350
+ ser = Series(
351
+ [
352
+ Timestamp("20130101"),
353
+ Timestamp("20130101"),
354
+ Timestamp("20130102"),
355
+ Timestamp("20130103 9:01:01"),
356
+ ]
357
+ )
358
+ ser[2] = np.nan
359
+
360
+ # ffill
361
+ result = ser.ffill()
362
+ expected = Series(
363
+ [
364
+ Timestamp("20130101"),
365
+ Timestamp("20130101"),
366
+ Timestamp("20130101"),
367
+ Timestamp("20130103 9:01:01"),
368
+ ]
369
+ )
370
+ tm.assert_series_equal(result, expected)
371
+
372
+ # bfill
373
+ result = ser.bfill()
374
+ expected = Series(
375
+ [
376
+ Timestamp("20130101"),
377
+ Timestamp("20130101"),
378
+ Timestamp("20130103 9:01:01"),
379
+ Timestamp("20130103 9:01:01"),
380
+ ]
381
+ )
382
+ tm.assert_series_equal(result, expected)
383
+
384
+ @pytest.mark.parametrize(
385
+ "scalar",
386
+ [
387
+ False,
388
+ pytest.param(
389
+ True,
390
+ marks=pytest.mark.xfail(
391
+ reason="GH#56410 scalar case not yet addressed"
392
+ ),
393
+ ),
394
+ ],
395
+ )
396
+ @pytest.mark.parametrize("tz", [None, "UTC"])
397
+ def test_datetime64_fillna_mismatched_reso_no_rounding(self, tz, scalar):
398
+ # GH#56410
399
+ dti = date_range("2016-01-01", periods=3, unit="s", tz=tz)
400
+ item = Timestamp("2016-02-03 04:05:06.789", tz=tz)
401
+ vec = date_range(item, periods=3, unit="ms")
402
+
403
+ exp_dtype = "M8[ms]" if tz is None else "M8[ms, UTC]"
404
+ expected = Series([item, dti[1], dti[2]], dtype=exp_dtype)
405
+
406
+ ser = Series(dti)
407
+ ser[0] = NaT
408
+ ser2 = ser.copy()
409
+
410
+ res = ser.fillna(item)
411
+ res2 = ser2.fillna(Series(vec))
412
+
413
+ if scalar:
414
+ tm.assert_series_equal(res, expected)
415
+ else:
416
+ tm.assert_series_equal(res2, expected)
417
+
418
+ @pytest.mark.parametrize(
419
+ "scalar",
420
+ [
421
+ False,
422
+ pytest.param(
423
+ True,
424
+ marks=pytest.mark.xfail(
425
+ reason="GH#56410 scalar case not yet addressed"
426
+ ),
427
+ ),
428
+ ],
429
+ )
430
+ def test_timedelta64_fillna_mismatched_reso_no_rounding(self, scalar):
431
+ # GH#56410
432
+ tdi = date_range("2016-01-01", periods=3, unit="s") - Timestamp("1970-01-01")
433
+ item = Timestamp("2016-02-03 04:05:06.789") - Timestamp("1970-01-01")
434
+ vec = timedelta_range(item, periods=3, unit="ms")
435
+
436
+ expected = Series([item, tdi[1], tdi[2]], dtype="m8[ms]")
437
+
438
+ ser = Series(tdi)
439
+ ser[0] = NaT
440
+ ser2 = ser.copy()
441
+
442
+ res = ser.fillna(item)
443
+ res2 = ser2.fillna(Series(vec))
444
+
445
+ if scalar:
446
+ tm.assert_series_equal(res, expected)
447
+ else:
448
+ tm.assert_series_equal(res2, expected)
449
+
450
+ def test_datetime64_fillna_backfill(self):
451
+ # GH#6587
452
+ # make sure that we are treating as integer when filling
453
+ ser = Series([NaT, NaT, "2013-08-05 15:30:00.000001"], dtype="M8[ns]")
454
+
455
+ expected = Series(
456
+ [
457
+ "2013-08-05 15:30:00.000001",
458
+ "2013-08-05 15:30:00.000001",
459
+ "2013-08-05 15:30:00.000001",
460
+ ],
461
+ dtype="M8[ns]",
462
+ )
463
+ result = ser.fillna(method="backfill")
464
+ tm.assert_series_equal(result, expected)
465
+
466
+ @pytest.mark.parametrize("tz", ["US/Eastern", "Asia/Tokyo"])
467
+ def test_datetime64_tz_fillna(self, tz, unit):
468
+ # DatetimeLikeBlock
469
+ ser = Series(
470
+ [
471
+ Timestamp("2011-01-01 10:00"),
472
+ NaT,
473
+ Timestamp("2011-01-03 10:00"),
474
+ NaT,
475
+ ],
476
+ dtype=f"M8[{unit}]",
477
+ )
478
+ null_loc = Series([False, True, False, True])
479
+
480
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
481
+ expected = Series(
482
+ [
483
+ Timestamp("2011-01-01 10:00"),
484
+ Timestamp("2011-01-02 10:00"),
485
+ Timestamp("2011-01-03 10:00"),
486
+ Timestamp("2011-01-02 10:00"),
487
+ ],
488
+ dtype=f"M8[{unit}]",
489
+ )
490
+ tm.assert_series_equal(expected, result)
491
+ # check s is not changed
492
+ tm.assert_series_equal(isna(ser), null_loc)
493
+
494
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
495
+ expected = Series(
496
+ [
497
+ Timestamp("2011-01-01 10:00"),
498
+ Timestamp("2011-01-02 10:00", tz=tz),
499
+ Timestamp("2011-01-03 10:00"),
500
+ Timestamp("2011-01-02 10:00", tz=tz),
501
+ ]
502
+ )
503
+ tm.assert_series_equal(expected, result)
504
+ tm.assert_series_equal(isna(ser), null_loc)
505
+
506
+ result = ser.fillna("AAA")
507
+ expected = Series(
508
+ [
509
+ Timestamp("2011-01-01 10:00"),
510
+ "AAA",
511
+ Timestamp("2011-01-03 10:00"),
512
+ "AAA",
513
+ ],
514
+ dtype=object,
515
+ )
516
+ tm.assert_series_equal(expected, result)
517
+ tm.assert_series_equal(isna(ser), null_loc)
518
+
519
+ result = ser.fillna(
520
+ {
521
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
522
+ 3: Timestamp("2011-01-04 10:00"),
523
+ }
524
+ )
525
+ expected = Series(
526
+ [
527
+ Timestamp("2011-01-01 10:00"),
528
+ Timestamp("2011-01-02 10:00", tz=tz),
529
+ Timestamp("2011-01-03 10:00"),
530
+ Timestamp("2011-01-04 10:00"),
531
+ ]
532
+ )
533
+ tm.assert_series_equal(expected, result)
534
+ tm.assert_series_equal(isna(ser), null_loc)
535
+
536
+ result = ser.fillna(
537
+ {1: Timestamp("2011-01-02 10:00"), 3: Timestamp("2011-01-04 10:00")}
538
+ )
539
+ expected = Series(
540
+ [
541
+ Timestamp("2011-01-01 10:00"),
542
+ Timestamp("2011-01-02 10:00"),
543
+ Timestamp("2011-01-03 10:00"),
544
+ Timestamp("2011-01-04 10:00"),
545
+ ],
546
+ dtype=f"M8[{unit}]",
547
+ )
548
+ tm.assert_series_equal(expected, result)
549
+ tm.assert_series_equal(isna(ser), null_loc)
550
+
551
+ # DatetimeTZBlock
552
+ idx = DatetimeIndex(
553
+ ["2011-01-01 10:00", NaT, "2011-01-03 10:00", NaT], tz=tz
554
+ ).as_unit(unit)
555
+ ser = Series(idx)
556
+ assert ser.dtype == f"datetime64[{unit}, {tz}]"
557
+ tm.assert_series_equal(isna(ser), null_loc)
558
+
559
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
560
+ expected = Series(
561
+ [
562
+ Timestamp("2011-01-01 10:00", tz=tz),
563
+ Timestamp("2011-01-02 10:00"),
564
+ Timestamp("2011-01-03 10:00", tz=tz),
565
+ Timestamp("2011-01-02 10:00"),
566
+ ]
567
+ )
568
+ tm.assert_series_equal(expected, result)
569
+ tm.assert_series_equal(isna(ser), null_loc)
570
+
571
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
572
+ idx = DatetimeIndex(
573
+ [
574
+ "2011-01-01 10:00",
575
+ "2011-01-02 10:00",
576
+ "2011-01-03 10:00",
577
+ "2011-01-02 10:00",
578
+ ],
579
+ tz=tz,
580
+ ).as_unit(unit)
581
+ expected = Series(idx)
582
+ tm.assert_series_equal(expected, result)
583
+ tm.assert_series_equal(isna(ser), null_loc)
584
+
585
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz).to_pydatetime())
586
+ idx = DatetimeIndex(
587
+ [
588
+ "2011-01-01 10:00",
589
+ "2011-01-02 10:00",
590
+ "2011-01-03 10:00",
591
+ "2011-01-02 10:00",
592
+ ],
593
+ tz=tz,
594
+ ).as_unit(unit)
595
+ expected = Series(idx)
596
+ tm.assert_series_equal(expected, result)
597
+ tm.assert_series_equal(isna(ser), null_loc)
598
+
599
+ result = ser.fillna("AAA")
600
+ expected = Series(
601
+ [
602
+ Timestamp("2011-01-01 10:00", tz=tz),
603
+ "AAA",
604
+ Timestamp("2011-01-03 10:00", tz=tz),
605
+ "AAA",
606
+ ],
607
+ dtype=object,
608
+ )
609
+ tm.assert_series_equal(expected, result)
610
+ tm.assert_series_equal(isna(ser), null_loc)
611
+
612
+ result = ser.fillna(
613
+ {
614
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
615
+ 3: Timestamp("2011-01-04 10:00"),
616
+ }
617
+ )
618
+ expected = Series(
619
+ [
620
+ Timestamp("2011-01-01 10:00", tz=tz),
621
+ Timestamp("2011-01-02 10:00", tz=tz),
622
+ Timestamp("2011-01-03 10:00", tz=tz),
623
+ Timestamp("2011-01-04 10:00"),
624
+ ]
625
+ )
626
+ tm.assert_series_equal(expected, result)
627
+ tm.assert_series_equal(isna(ser), null_loc)
628
+
629
+ result = ser.fillna(
630
+ {
631
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
632
+ 3: Timestamp("2011-01-04 10:00", tz=tz),
633
+ }
634
+ )
635
+ expected = Series(
636
+ [
637
+ Timestamp("2011-01-01 10:00", tz=tz),
638
+ Timestamp("2011-01-02 10:00", tz=tz),
639
+ Timestamp("2011-01-03 10:00", tz=tz),
640
+ Timestamp("2011-01-04 10:00", tz=tz),
641
+ ]
642
+ ).dt.as_unit(unit)
643
+ tm.assert_series_equal(expected, result)
644
+ tm.assert_series_equal(isna(ser), null_loc)
645
+
646
+ # filling with a naive/other zone, coerce to object
647
+ result = ser.fillna(Timestamp("20130101"))
648
+ expected = Series(
649
+ [
650
+ Timestamp("2011-01-01 10:00", tz=tz),
651
+ Timestamp("2013-01-01"),
652
+ Timestamp("2011-01-03 10:00", tz=tz),
653
+ Timestamp("2013-01-01"),
654
+ ]
655
+ )
656
+ tm.assert_series_equal(expected, result)
657
+ tm.assert_series_equal(isna(ser), null_loc)
658
+
659
+ # pre-2.0 fillna with mixed tzs would cast to object, in 2.0
660
+ # it retains dtype.
661
+ result = ser.fillna(Timestamp("20130101", tz="US/Pacific"))
662
+ expected = Series(
663
+ [
664
+ Timestamp("2011-01-01 10:00", tz=tz),
665
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
666
+ Timestamp("2011-01-03 10:00", tz=tz),
667
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
668
+ ]
669
+ ).dt.as_unit(unit)
670
+ tm.assert_series_equal(expected, result)
671
+ tm.assert_series_equal(isna(ser), null_loc)
672
+
673
+ def test_fillna_dt64tz_with_method(self):
674
+ # with timezone
675
+ # GH#15855
676
+ ser = Series([Timestamp("2012-11-11 00:00:00+01:00"), NaT])
677
+ exp = Series(
678
+ [
679
+ Timestamp("2012-11-11 00:00:00+01:00"),
680
+ Timestamp("2012-11-11 00:00:00+01:00"),
681
+ ]
682
+ )
683
+ tm.assert_series_equal(ser.fillna(method="pad"), exp)
684
+
685
+ ser = Series([NaT, Timestamp("2012-11-11 00:00:00+01:00")])
686
+ exp = Series(
687
+ [
688
+ Timestamp("2012-11-11 00:00:00+01:00"),
689
+ Timestamp("2012-11-11 00:00:00+01:00"),
690
+ ]
691
+ )
692
+ tm.assert_series_equal(ser.fillna(method="bfill"), exp)
693
+
694
+ def test_fillna_pytimedelta(self):
695
+ # GH#8209
696
+ ser = Series([np.nan, Timedelta("1 days")], index=["A", "B"])
697
+
698
+ result = ser.fillna(timedelta(1))
699
+ expected = Series(Timedelta("1 days"), index=["A", "B"])
700
+ tm.assert_series_equal(result, expected)
701
+
702
+ def test_fillna_period(self):
703
+ # GH#13737
704
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
705
+
706
+ res = ser.fillna(Period("2012-01", freq="M"))
707
+ exp = Series([Period("2011-01", freq="M"), Period("2012-01", freq="M")])
708
+ tm.assert_series_equal(res, exp)
709
+ assert res.dtype == "Period[M]"
710
+
711
+ def test_fillna_dt64_timestamp(self, frame_or_series):
712
+ ser = Series(
713
+ [
714
+ Timestamp("20130101"),
715
+ Timestamp("20130101"),
716
+ Timestamp("20130102"),
717
+ Timestamp("20130103 9:01:01"),
718
+ ]
719
+ )
720
+ ser[2] = np.nan
721
+ obj = frame_or_series(ser)
722
+
723
+ # reg fillna
724
+ result = obj.fillna(Timestamp("20130104"))
725
+ expected = Series(
726
+ [
727
+ Timestamp("20130101"),
728
+ Timestamp("20130101"),
729
+ Timestamp("20130104"),
730
+ Timestamp("20130103 9:01:01"),
731
+ ]
732
+ )
733
+ expected = frame_or_series(expected)
734
+ tm.assert_equal(result, expected)
735
+
736
+ result = obj.fillna(NaT)
737
+ expected = obj
738
+ tm.assert_equal(result, expected)
739
+
740
+ def test_fillna_dt64_non_nao(self):
741
+ # GH#27419
742
+ ser = Series([Timestamp("2010-01-01"), NaT, Timestamp("2000-01-01")])
743
+ val = np.datetime64("1975-04-05", "ms")
744
+
745
+ result = ser.fillna(val)
746
+ expected = Series(
747
+ [Timestamp("2010-01-01"), Timestamp("1975-04-05"), Timestamp("2000-01-01")]
748
+ )
749
+ tm.assert_series_equal(result, expected)
750
+
751
+ def test_fillna_numeric_inplace(self):
752
+ x = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
753
+ y = x.copy()
754
+
755
+ return_value = y.fillna(value=0, inplace=True)
756
+ assert return_value is None
757
+
758
+ expected = x.fillna(value=0)
759
+ tm.assert_series_equal(y, expected)
760
+
761
+ # ---------------------------------------------------------------
762
+ # CategoricalDtype
763
+
764
+ @pytest.mark.parametrize(
765
+ "fill_value, expected_output",
766
+ [
767
+ ("a", ["a", "a", "b", "a", "a"]),
768
+ ({1: "a", 3: "b", 4: "b"}, ["a", "a", "b", "b", "b"]),
769
+ ({1: "a"}, ["a", "a", "b", np.nan, np.nan]),
770
+ ({1: "a", 3: "b"}, ["a", "a", "b", "b", np.nan]),
771
+ (Series("a"), ["a", np.nan, "b", np.nan, np.nan]),
772
+ (Series("a", index=[1]), ["a", "a", "b", np.nan, np.nan]),
773
+ (Series({1: "a", 3: "b"}), ["a", "a", "b", "b", np.nan]),
774
+ (Series(["a", "b"], index=[3, 4]), ["a", np.nan, "b", "a", "b"]),
775
+ ],
776
+ )
777
+ def test_fillna_categorical(self, fill_value, expected_output):
778
+ # GH#17033
779
+ # Test fillna for a Categorical series
780
+ data = ["a", np.nan, "b", np.nan, np.nan]
781
+ ser = Series(Categorical(data, categories=["a", "b"]))
782
+ exp = Series(Categorical(expected_output, categories=["a", "b"]))
783
+ result = ser.fillna(fill_value)
784
+ tm.assert_series_equal(result, exp)
785
+
786
+ @pytest.mark.parametrize(
787
+ "fill_value, expected_output",
788
+ [
789
+ (Series(["a", "b", "c", "d", "e"]), ["a", "b", "b", "d", "e"]),
790
+ (Series(["b", "d", "a", "d", "a"]), ["a", "d", "b", "d", "a"]),
791
+ (
792
+ Series(
793
+ Categorical(
794
+ ["b", "d", "a", "d", "a"], categories=["b", "c", "d", "e", "a"]
795
+ )
796
+ ),
797
+ ["a", "d", "b", "d", "a"],
798
+ ),
799
+ ],
800
+ )
801
+ def test_fillna_categorical_with_new_categories(self, fill_value, expected_output):
802
+ # GH#26215
803
+ data = ["a", np.nan, "b", np.nan, np.nan]
804
+ ser = Series(Categorical(data, categories=["a", "b", "c", "d", "e"]))
805
+ exp = Series(Categorical(expected_output, categories=["a", "b", "c", "d", "e"]))
806
+ result = ser.fillna(fill_value)
807
+ tm.assert_series_equal(result, exp)
808
+
809
+ def test_fillna_categorical_raises(self):
810
+ data = ["a", np.nan, "b", np.nan, np.nan]
811
+ ser = Series(Categorical(data, categories=["a", "b"]))
812
+ cat = ser._values
813
+
814
+ msg = "Cannot setitem on a Categorical with a new category"
815
+ with pytest.raises(TypeError, match=msg):
816
+ ser.fillna("d")
817
+
818
+ msg2 = "Length of 'value' does not match."
819
+ with pytest.raises(ValueError, match=msg2):
820
+ cat.fillna(Series("d"))
821
+
822
+ with pytest.raises(TypeError, match=msg):
823
+ ser.fillna({1: "d", 3: "a"})
824
+
825
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
826
+ with pytest.raises(TypeError, match=msg):
827
+ ser.fillna(["a", "b"])
828
+
829
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
830
+ with pytest.raises(TypeError, match=msg):
831
+ ser.fillna(("a", "b"))
832
+
833
+ msg = (
834
+ '"value" parameter must be a scalar, dict '
835
+ 'or Series, but you passed a "DataFrame"'
836
+ )
837
+ with pytest.raises(TypeError, match=msg):
838
+ ser.fillna(DataFrame({1: ["a"], 3: ["b"]}))
839
+
840
+ @pytest.mark.parametrize("dtype", [float, "float32", "float64"])
841
+ @pytest.mark.parametrize("fill_type", tm.ALL_REAL_NUMPY_DTYPES)
842
+ @pytest.mark.parametrize("scalar", [True, False])
843
+ def test_fillna_float_casting(self, dtype, fill_type, scalar):
844
+ # GH-43424
845
+ ser = Series([np.nan, 1.2], dtype=dtype)
846
+ fill_values = Series([2, 2], dtype=fill_type)
847
+ if scalar:
848
+ fill_values = fill_values.dtype.type(2)
849
+
850
+ result = ser.fillna(fill_values)
851
+ expected = Series([2.0, 1.2], dtype=dtype)
852
+ tm.assert_series_equal(result, expected)
853
+
854
+ ser = Series([np.nan, 1.2], dtype=dtype)
855
+ mask = ser.isna().to_numpy()
856
+ ser[mask] = fill_values
857
+ tm.assert_series_equal(ser, expected)
858
+
859
+ ser = Series([np.nan, 1.2], dtype=dtype)
860
+ ser.mask(mask, fill_values, inplace=True)
861
+ tm.assert_series_equal(ser, expected)
862
+
863
+ ser = Series([np.nan, 1.2], dtype=dtype)
864
+ res = ser.where(~mask, fill_values)
865
+ tm.assert_series_equal(res, expected)
866
+
867
+ def test_fillna_f32_upcast_with_dict(self):
868
+ # GH-43424
869
+ ser = Series([np.nan, 1.2], dtype=np.float32)
870
+ result = ser.fillna({0: 1})
871
+ expected = Series([1.0, 1.2], dtype=np.float32)
872
+ tm.assert_series_equal(result, expected)
873
+
874
+ # ---------------------------------------------------------------
875
+ # Invalid Usages
876
+
877
+ def test_fillna_invalid_method(self, datetime_series):
878
+ try:
879
+ datetime_series.fillna(method="ffil")
880
+ except ValueError as inst:
881
+ assert "ffil" in str(inst)
882
+
883
+ def test_fillna_listlike_invalid(self):
884
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
885
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
886
+ with pytest.raises(TypeError, match=msg):
887
+ ser.fillna([1, 2])
888
+
889
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
890
+ with pytest.raises(TypeError, match=msg):
891
+ ser.fillna((1, 2))
892
+
893
+ def test_fillna_method_and_limit_invalid(self):
894
+ # related GH#9217, make sure limit is an int and greater than 0
895
+ ser = Series([1, 2, 3, None])
896
+ msg = "|".join(
897
+ [
898
+ r"Cannot specify both 'value' and 'method'\.",
899
+ "Limit must be greater than 0",
900
+ "Limit must be an integer",
901
+ ]
902
+ )
903
+ for limit in [-1, 0, 1.0, 2.0]:
904
+ for method in ["backfill", "bfill", "pad", "ffill", None]:
905
+ with pytest.raises(ValueError, match=msg):
906
+ ser.fillna(1, limit=limit, method=method)
907
+
908
+ def test_fillna_datetime64_with_timezone_tzinfo(self):
909
+ # https://github.com/pandas-dev/pandas/issues/38851
910
+ # different tzinfos representing UTC treated as equal
911
+ ser = Series(date_range("2020", periods=3, tz="UTC"))
912
+ expected = ser.copy()
913
+ ser[1] = NaT
914
+ result = ser.fillna(datetime(2020, 1, 2, tzinfo=timezone.utc))
915
+ tm.assert_series_equal(result, expected)
916
+
917
+ # pre-2.0 we cast to object with mixed tzs, in 2.0 we retain dtype
918
+ ts = Timestamp("2000-01-01", tz="US/Pacific")
919
+ ser2 = Series(ser._values.tz_convert("dateutil/US/Pacific"))
920
+ assert ser2.dtype.kind == "M"
921
+ result = ser2.fillna(ts)
922
+ expected = Series(
923
+ [ser2[0], ts.tz_convert(ser2.dtype.tz), ser2[2]],
924
+ dtype=ser2.dtype,
925
+ )
926
+ tm.assert_series_equal(result, expected)
927
+
928
+ @pytest.mark.parametrize(
929
+ "input, input_fillna, expected_data, expected_categories",
930
+ [
931
+ (["A", "B", None, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
932
+ (["A", "B", np.nan, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
933
+ ],
934
+ )
935
+ def test_fillna_categorical_accept_same_type(
936
+ self, input, input_fillna, expected_data, expected_categories
937
+ ):
938
+ # GH32414
939
+ cat = Categorical(input)
940
+ ser = Series(cat).fillna(input_fillna)
941
+ filled = cat.fillna(ser)
942
+ result = cat.fillna(filled)
943
+ expected = Categorical(expected_data, categories=expected_categories)
944
+ tm.assert_categorical_equal(result, expected)
945
+
946
+
947
+ @pytest.mark.filterwarnings(
948
+ "ignore:Series.fillna with 'method' is deprecated:FutureWarning"
949
+ )
950
+ class TestFillnaPad:
951
+ def test_fillna_bug(self):
952
+ ser = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
953
+ filled = ser.fillna(method="ffill")
954
+ expected = Series([np.nan, 1.0, 1.0, 3.0, 3.0], ser.index)
955
+ tm.assert_series_equal(filled, expected)
956
+
957
+ filled = ser.fillna(method="bfill")
958
+ expected = Series([1.0, 1.0, 3.0, 3.0, np.nan], ser.index)
959
+ tm.assert_series_equal(filled, expected)
960
+
961
+ def test_ffill(self):
962
+ ts = Series(
963
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
964
+ )
965
+ ts.iloc[2] = np.nan
966
+ tm.assert_series_equal(ts.ffill(), ts.fillna(method="ffill"))
967
+
968
+ def test_ffill_mixed_dtypes_without_missing_data(self):
969
+ # GH#14956
970
+ series = Series([datetime(2015, 1, 1, tzinfo=pytz.utc), 1])
971
+ result = series.ffill()
972
+ tm.assert_series_equal(series, result)
973
+
974
+ def test_bfill(self):
975
+ ts = Series(
976
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
977
+ )
978
+ ts.iloc[2] = np.nan
979
+ tm.assert_series_equal(ts.bfill(), ts.fillna(method="bfill"))
980
+
981
+ def test_pad_nan(self):
982
+ x = Series(
983
+ [np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"], dtype=float
984
+ )
985
+
986
+ return_value = x.fillna(method="pad", inplace=True)
987
+ assert return_value is None
988
+
989
+ expected = Series(
990
+ [np.nan, 1.0, 1.0, 3.0, 3.0], ["z", "a", "b", "c", "d"], dtype=float
991
+ )
992
+ tm.assert_series_equal(x[1:], expected[1:])
993
+ assert np.isnan(x.iloc[0]), np.isnan(expected.iloc[0])
994
+
995
+ def test_series_fillna_limit(self):
996
+ index = np.arange(10)
997
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
998
+
999
+ result = s[:2].reindex(index)
1000
+ result = result.fillna(method="pad", limit=5)
1001
+
1002
+ expected = s[:2].reindex(index).fillna(method="pad")
1003
+ expected[-3:] = np.nan
1004
+ tm.assert_series_equal(result, expected)
1005
+
1006
+ result = s[-2:].reindex(index)
1007
+ result = result.fillna(method="bfill", limit=5)
1008
+
1009
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1010
+ expected[:3] = np.nan
1011
+ tm.assert_series_equal(result, expected)
1012
+
1013
+ def test_series_pad_backfill_limit(self):
1014
+ index = np.arange(10)
1015
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
1016
+
1017
+ result = s[:2].reindex(index, method="pad", limit=5)
1018
+
1019
+ expected = s[:2].reindex(index).fillna(method="pad")
1020
+ expected[-3:] = np.nan
1021
+ tm.assert_series_equal(result, expected)
1022
+
1023
+ result = s[-2:].reindex(index, method="backfill", limit=5)
1024
+
1025
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1026
+ expected[:3] = np.nan
1027
+ tm.assert_series_equal(result, expected)
1028
+
1029
+ def test_fillna_int(self):
1030
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
1031
+ return_value = ser.fillna(method="ffill", inplace=True)
1032
+ assert return_value is None
1033
+ tm.assert_series_equal(ser.fillna(method="ffill", inplace=False), ser)
1034
+
1035
+ def test_datetime64tz_fillna_round_issue(self):
1036
+ # GH#14872
1037
+
1038
+ data = Series(
1039
+ [NaT, NaT, datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]
1040
+ )
1041
+
1042
+ filled = data.bfill()
1043
+
1044
+ expected = Series(
1045
+ [
1046
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1047
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1048
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1049
+ ]
1050
+ )
1051
+
1052
+ tm.assert_series_equal(filled, expected)
1053
+
1054
+ def test_fillna_parr(self):
1055
+ # GH-24537
1056
+ dti = date_range(
1057
+ Timestamp.max - Timedelta(nanoseconds=10), periods=5, freq="ns"
1058
+ )
1059
+ ser = Series(dti.to_period("ns"))
1060
+ ser[2] = NaT
1061
+ arr = period_array(
1062
+ [
1063
+ Timestamp("2262-04-11 23:47:16.854775797"),
1064
+ Timestamp("2262-04-11 23:47:16.854775798"),
1065
+ Timestamp("2262-04-11 23:47:16.854775798"),
1066
+ Timestamp("2262-04-11 23:47:16.854775800"),
1067
+ Timestamp("2262-04-11 23:47:16.854775801"),
1068
+ ],
1069
+ freq="ns",
1070
+ )
1071
+ expected = Series(arr)
1072
+
1073
+ filled = ser.ffill()
1074
+
1075
+ tm.assert_series_equal(filled, expected)
1076
+
1077
+ @pytest.mark.parametrize("func", ["pad", "backfill"])
1078
+ def test_pad_backfill_deprecated(self, func):
1079
+ # GH#33396
1080
+ ser = Series([1, 2, 3])
1081
+ with tm.assert_produces_warning(FutureWarning):
1082
+ getattr(ser, func)()
1083
+
1084
+
1085
+ @pytest.mark.parametrize(
1086
+ "data, expected_data, method, kwargs",
1087
+ (
1088
+ (
1089
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1090
+ [np.nan, np.nan, 3.0, 3.0, 3.0, 3.0, 7.0, np.nan, np.nan],
1091
+ "ffill",
1092
+ {"limit_area": "inside"},
1093
+ ),
1094
+ (
1095
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1096
+ [np.nan, np.nan, 3.0, 3.0, np.nan, np.nan, 7.0, np.nan, np.nan],
1097
+ "ffill",
1098
+ {"limit_area": "inside", "limit": 1},
1099
+ ),
1100
+ (
1101
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1102
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0],
1103
+ "ffill",
1104
+ {"limit_area": "outside"},
1105
+ ),
1106
+ (
1107
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1108
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan],
1109
+ "ffill",
1110
+ {"limit_area": "outside", "limit": 1},
1111
+ ),
1112
+ (
1113
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1114
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1115
+ "ffill",
1116
+ {"limit_area": "outside", "limit": 1},
1117
+ ),
1118
+ (
1119
+ range(5),
1120
+ range(5),
1121
+ "ffill",
1122
+ {"limit_area": "outside", "limit": 1},
1123
+ ),
1124
+ (
1125
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1126
+ [np.nan, np.nan, 3.0, 7.0, 7.0, 7.0, 7.0, np.nan, np.nan],
1127
+ "bfill",
1128
+ {"limit_area": "inside"},
1129
+ ),
1130
+ (
1131
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1132
+ [np.nan, np.nan, 3.0, np.nan, np.nan, 7.0, 7.0, np.nan, np.nan],
1133
+ "bfill",
1134
+ {"limit_area": "inside", "limit": 1},
1135
+ ),
1136
+ (
1137
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1138
+ [3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1139
+ "bfill",
1140
+ {"limit_area": "outside"},
1141
+ ),
1142
+ (
1143
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1144
+ [np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1145
+ "bfill",
1146
+ {"limit_area": "outside", "limit": 1},
1147
+ ),
1148
+ ),
1149
+ )
1150
+ def test_ffill_bfill_limit_area(data, expected_data, method, kwargs):
1151
+ # GH#56492
1152
+ s = Series(data)
1153
+ expected = Series(expected_data)
1154
+ result = getattr(s, method)(**kwargs)
1155
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Series,
5
+ interval_range,
6
+ )
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestInferObjects:
11
+ def test_copy(self, index_or_series):
12
+ # GH#50096
13
+ # case where we don't need to do inference because it is already non-object
14
+ obj = index_or_series(np.array([1, 2, 3], dtype="int64"))
15
+
16
+ result = obj.infer_objects(copy=False)
17
+ assert tm.shares_memory(result, obj)
18
+
19
+ # case where we try to do inference but can't do better than object
20
+ obj2 = index_or_series(np.array(["foo", 2], dtype=object))
21
+ result2 = obj2.infer_objects(copy=False)
22
+ assert tm.shares_memory(result2, obj2)
23
+
24
+ def test_infer_objects_series(self, index_or_series):
25
+ # GH#11221
26
+ actual = index_or_series(np.array([1, 2, 3], dtype="O")).infer_objects()
27
+ expected = index_or_series([1, 2, 3])
28
+ tm.assert_equal(actual, expected)
29
+
30
+ actual = index_or_series(np.array([1, 2, 3, None], dtype="O")).infer_objects()
31
+ expected = index_or_series([1.0, 2.0, 3.0, np.nan])
32
+ tm.assert_equal(actual, expected)
33
+
34
+ # only soft conversions, unconvertible pass thru unchanged
35
+
36
+ obj = index_or_series(np.array([1, 2, 3, None, "a"], dtype="O"))
37
+ actual = obj.infer_objects()
38
+ expected = index_or_series([1, 2, 3, None, "a"], dtype=object)
39
+
40
+ assert actual.dtype == "object"
41
+ tm.assert_equal(actual, expected)
42
+
43
+ def test_infer_objects_interval(self, index_or_series):
44
+ # GH#50090
45
+ ii = interval_range(1, 10)
46
+ obj = index_or_series(ii)
47
+
48
+ result = obj.astype(object).infer_objects()
49
+ tm.assert_equal(result, obj)
50
+
51
+ def test_infer_objects_bytes(self):
52
+ # GH#49650
53
+ ser = Series([b"a"], dtype="bytes")
54
+ expected = ser.copy()
55
+ result = ser.infer_objects()
56
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_info.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from io import StringIO
2
+ from string import ascii_uppercase
3
+ import textwrap
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas.compat import PYPY
9
+
10
+ from pandas import (
11
+ CategoricalIndex,
12
+ MultiIndex,
13
+ Series,
14
+ date_range,
15
+ )
16
+
17
+
18
+ def test_info_categorical_column_just_works():
19
+ n = 2500
20
+ data = np.array(list("abcdefghij")).take(
21
+ np.random.default_rng(2).integers(0, 10, size=n, dtype=int)
22
+ )
23
+ s = Series(data).astype("category")
24
+ s.isna()
25
+ buf = StringIO()
26
+ s.info(buf=buf)
27
+
28
+ s2 = s[s == "d"]
29
+ buf = StringIO()
30
+ s2.info(buf=buf)
31
+
32
+
33
+ def test_info_categorical():
34
+ # GH14298
35
+ idx = CategoricalIndex(["a", "b"])
36
+ s = Series(np.zeros(2), index=idx)
37
+ buf = StringIO()
38
+ s.info(buf=buf)
39
+
40
+
41
+ @pytest.mark.parametrize("verbose", [True, False])
42
+ def test_info_series(lexsorted_two_level_string_multiindex, verbose):
43
+ index = lexsorted_two_level_string_multiindex
44
+ ser = Series(range(len(index)), index=index, name="sth")
45
+ buf = StringIO()
46
+ ser.info(verbose=verbose, buf=buf)
47
+ result = buf.getvalue()
48
+
49
+ expected = textwrap.dedent(
50
+ """\
51
+ <class 'pandas.core.series.Series'>
52
+ MultiIndex: 10 entries, ('foo', 'one') to ('qux', 'three')
53
+ """
54
+ )
55
+ if verbose:
56
+ expected += textwrap.dedent(
57
+ """\
58
+ Series name: sth
59
+ Non-Null Count Dtype
60
+ -------------- -----
61
+ 10 non-null int64
62
+ """
63
+ )
64
+ expected += textwrap.dedent(
65
+ f"""\
66
+ dtypes: int64(1)
67
+ memory usage: {ser.memory_usage()}.0+ bytes
68
+ """
69
+ )
70
+ assert result == expected
71
+
72
+
73
+ def test_info_memory():
74
+ s = Series([1, 2], dtype="i8")
75
+ buf = StringIO()
76
+ s.info(buf=buf)
77
+ result = buf.getvalue()
78
+ memory_bytes = float(s.memory_usage())
79
+ expected = textwrap.dedent(
80
+ f"""\
81
+ <class 'pandas.core.series.Series'>
82
+ RangeIndex: 2 entries, 0 to 1
83
+ Series name: None
84
+ Non-Null Count Dtype
85
+ -------------- -----
86
+ 2 non-null int64
87
+ dtypes: int64(1)
88
+ memory usage: {memory_bytes} bytes
89
+ """
90
+ )
91
+ assert result == expected
92
+
93
+
94
+ def test_info_wide():
95
+ s = Series(np.random.default_rng(2).standard_normal(101))
96
+ msg = "Argument `max_cols` can only be passed in DataFrame.info, not Series.info"
97
+ with pytest.raises(ValueError, match=msg):
98
+ s.info(max_cols=1)
99
+
100
+
101
+ def test_info_shows_dtypes():
102
+ dtypes = [
103
+ "int64",
104
+ "float64",
105
+ "datetime64[ns]",
106
+ "timedelta64[ns]",
107
+ "complex128",
108
+ "object",
109
+ "bool",
110
+ ]
111
+ n = 10
112
+ for dtype in dtypes:
113
+ s = Series(np.random.default_rng(2).integers(2, size=n).astype(dtype))
114
+ buf = StringIO()
115
+ s.info(buf=buf)
116
+ res = buf.getvalue()
117
+ name = f"{n:d} non-null {dtype}"
118
+ assert name in res
119
+
120
+
121
+ @pytest.mark.xfail(PYPY, reason="on PyPy deep=True doesn't change result")
122
+ def test_info_memory_usage_deep_not_pypy():
123
+ s_with_object_index = Series({"a": [1]}, index=["foo"])
124
+ assert s_with_object_index.memory_usage(
125
+ index=True, deep=True
126
+ ) > s_with_object_index.memory_usage(index=True)
127
+
128
+ s_object = Series({"a": ["a"]})
129
+ assert s_object.memory_usage(deep=True) > s_object.memory_usage()
130
+
131
+
132
+ @pytest.mark.xfail(not PYPY, reason="on PyPy deep=True does not change result")
133
+ def test_info_memory_usage_deep_pypy():
134
+ s_with_object_index = Series({"a": [1]}, index=["foo"])
135
+ assert s_with_object_index.memory_usage(
136
+ index=True, deep=True
137
+ ) == s_with_object_index.memory_usage(index=True)
138
+
139
+ s_object = Series({"a": ["a"]})
140
+ assert s_object.memory_usage(deep=True) == s_object.memory_usage()
141
+
142
+
143
+ @pytest.mark.parametrize(
144
+ "series, plus",
145
+ [
146
+ (Series(1, index=[1, 2, 3]), False),
147
+ (Series(1, index=list("ABC")), True),
148
+ (Series(1, index=MultiIndex.from_product([range(3), range(3)])), False),
149
+ (
150
+ Series(1, index=MultiIndex.from_product([range(3), ["foo", "bar"]])),
151
+ True,
152
+ ),
153
+ ],
154
+ )
155
+ def test_info_memory_usage_qualified(series, plus):
156
+ buf = StringIO()
157
+ series.info(buf=buf)
158
+ if plus:
159
+ assert "+" in buf.getvalue()
160
+ else:
161
+ assert "+" not in buf.getvalue()
162
+
163
+
164
+ def test_info_memory_usage_bug_on_multiindex():
165
+ # GH 14308
166
+ # memory usage introspection should not materialize .values
167
+ N = 100
168
+ M = len(ascii_uppercase)
169
+ index = MultiIndex.from_product(
170
+ [list(ascii_uppercase), date_range("20160101", periods=N)],
171
+ names=["id", "date"],
172
+ )
173
+ s = Series(np.random.default_rng(2).standard_normal(N * M), index=index)
174
+
175
+ unstacked = s.unstack("id")
176
+ assert s.values.nbytes == unstacked.values.nbytes
177
+ assert s.memory_usage(deep=True) > unstacked.memory_usage(deep=True).sum()
178
+
179
+ # high upper bound
180
+ diff = unstacked.memory_usage(deep=True).sum() - s.memory_usage(deep=True)
181
+ assert diff < 2000
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_monotonic.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Series,
5
+ date_range,
6
+ )
7
+
8
+
9
+ class TestIsMonotonic:
10
+ def test_is_monotonic_numeric(self):
11
+ ser = Series(np.random.default_rng(2).integers(0, 10, size=1000))
12
+ assert not ser.is_monotonic_increasing
13
+ ser = Series(np.arange(1000))
14
+ assert ser.is_monotonic_increasing is True
15
+ assert ser.is_monotonic_increasing is True
16
+ ser = Series(np.arange(1000, 0, -1))
17
+ assert ser.is_monotonic_decreasing is True
18
+
19
+ def test_is_monotonic_dt64(self):
20
+ ser = Series(date_range("20130101", periods=10))
21
+ assert ser.is_monotonic_increasing is True
22
+ assert ser.is_monotonic_increasing is True
23
+
24
+ ser = Series(list(reversed(ser)))
25
+ assert ser.is_monotonic_increasing is False
26
+ assert ser.is_monotonic_decreasing is True
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ We also test Series.notna in this file.
3
+ """
4
+ import numpy as np
5
+
6
+ from pandas import (
7
+ Period,
8
+ Series,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestIsna:
14
+ def test_isna_period_dtype(self):
15
+ # GH#13737
16
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
17
+
18
+ expected = Series([False, True])
19
+
20
+ result = ser.isna()
21
+ tm.assert_series_equal(result, expected)
22
+
23
+ result = ser.notna()
24
+ tm.assert_series_equal(result, ~expected)
25
+
26
+ def test_isna(self):
27
+ ser = Series([0, 5.4, 3, np.nan, -0.001])
28
+ expected = Series([False, False, False, True, False])
29
+ tm.assert_series_equal(ser.isna(), expected)
30
+ tm.assert_series_equal(ser.notna(), ~expected)
31
+
32
+ ser = Series(["hi", "", np.nan])
33
+ expected = Series([False, False, True])
34
+ tm.assert_series_equal(ser.isna(), expected)
35
+ tm.assert_series_equal(ser.notna(), ~expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_item.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Series.item method, mainly testing that we get python scalars as opposed to
3
+ numpy scalars.
4
+ """
5
+ import pytest
6
+
7
+ from pandas import (
8
+ Series,
9
+ Timedelta,
10
+ Timestamp,
11
+ date_range,
12
+ )
13
+
14
+
15
+ class TestItem:
16
+ def test_item(self):
17
+ # We are testing that we get python scalars as opposed to numpy scalars
18
+ ser = Series([1])
19
+ result = ser.item()
20
+ assert result == 1
21
+ assert result == ser.iloc[0]
22
+ assert isinstance(result, int) # i.e. not np.int64
23
+
24
+ ser = Series([0.5], index=[3])
25
+ result = ser.item()
26
+ assert isinstance(result, float)
27
+ assert result == 0.5
28
+
29
+ ser = Series([1, 2])
30
+ msg = "can only convert an array of size 1"
31
+ with pytest.raises(ValueError, match=msg):
32
+ ser.item()
33
+
34
+ dti = date_range("2016-01-01", periods=2)
35
+ with pytest.raises(ValueError, match=msg):
36
+ dti.item()
37
+ with pytest.raises(ValueError, match=msg):
38
+ Series(dti).item()
39
+
40
+ val = dti[:1].item()
41
+ assert isinstance(val, Timestamp)
42
+ val = Series(dti)[:1].item()
43
+ assert isinstance(val, Timestamp)
44
+
45
+ tdi = dti - dti
46
+ with pytest.raises(ValueError, match=msg):
47
+ tdi.item()
48
+ with pytest.raises(ValueError, match=msg):
49
+ Series(tdi).item()
50
+
51
+ val = tdi[:1].item()
52
+ assert isinstance(val, Timedelta)
53
+ val = Series(tdi)[:1].item()
54
+ assert isinstance(val, Timedelta)
55
+
56
+ # Case where ser[0] would not work
57
+ ser = Series(dti, index=[5, 6])
58
+ val = ser.iloc[:1].item()
59
+ assert val == dti[0]
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_nlargest.py ADDED
@@ -0,0 +1,248 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Note: for naming purposes, most tests are title with as e.g. "test_nlargest_foo"
3
+ but are implicitly also testing nsmallest_foo.
4
+ """
5
+ from itertools import product
6
+
7
+ import numpy as np
8
+ import pytest
9
+
10
+ import pandas as pd
11
+ from pandas import Series
12
+ import pandas._testing as tm
13
+
14
+ main_dtypes = [
15
+ "datetime",
16
+ "datetimetz",
17
+ "timedelta",
18
+ "int8",
19
+ "int16",
20
+ "int32",
21
+ "int64",
22
+ "float32",
23
+ "float64",
24
+ "uint8",
25
+ "uint16",
26
+ "uint32",
27
+ "uint64",
28
+ ]
29
+
30
+
31
+ @pytest.fixture
32
+ def s_main_dtypes():
33
+ """
34
+ A DataFrame with many dtypes
35
+
36
+ * datetime
37
+ * datetimetz
38
+ * timedelta
39
+ * [u]int{8,16,32,64}
40
+ * float{32,64}
41
+
42
+ The columns are the name of the dtype.
43
+ """
44
+ df = pd.DataFrame(
45
+ {
46
+ "datetime": pd.to_datetime(["2003", "2002", "2001", "2002", "2005"]),
47
+ "datetimetz": pd.to_datetime(
48
+ ["2003", "2002", "2001", "2002", "2005"]
49
+ ).tz_localize("US/Eastern"),
50
+ "timedelta": pd.to_timedelta(["3d", "2d", "1d", "2d", "5d"]),
51
+ }
52
+ )
53
+
54
+ for dtype in [
55
+ "int8",
56
+ "int16",
57
+ "int32",
58
+ "int64",
59
+ "float32",
60
+ "float64",
61
+ "uint8",
62
+ "uint16",
63
+ "uint32",
64
+ "uint64",
65
+ ]:
66
+ df[dtype] = Series([3, 2, 1, 2, 5], dtype=dtype)
67
+
68
+ return df
69
+
70
+
71
+ @pytest.fixture(params=main_dtypes)
72
+ def s_main_dtypes_split(request, s_main_dtypes):
73
+ """Each series in s_main_dtypes."""
74
+ return s_main_dtypes[request.param]
75
+
76
+
77
+ def assert_check_nselect_boundary(vals, dtype, method):
78
+ # helper function for 'test_boundary_{dtype}' tests
79
+ ser = Series(vals, dtype=dtype)
80
+ result = getattr(ser, method)(3)
81
+ expected_idxr = [0, 1, 2] if method == "nsmallest" else [3, 2, 1]
82
+ expected = ser.loc[expected_idxr]
83
+ tm.assert_series_equal(result, expected)
84
+
85
+
86
+ class TestSeriesNLargestNSmallest:
87
+ @pytest.mark.parametrize(
88
+ "r",
89
+ [
90
+ Series([3.0, 2, 1, 2, "5"], dtype="object"),
91
+ Series([3.0, 2, 1, 2, 5], dtype="object"),
92
+ # not supported on some archs
93
+ # Series([3., 2, 1, 2, 5], dtype='complex256'),
94
+ Series([3.0, 2, 1, 2, 5], dtype="complex128"),
95
+ Series(list("abcde")),
96
+ Series(list("abcde"), dtype="category"),
97
+ ],
98
+ )
99
+ def test_nlargest_error(self, r):
100
+ dt = r.dtype
101
+ msg = f"Cannot use method 'n(largest|smallest)' with dtype {dt}"
102
+ args = 2, len(r), 0, -1
103
+ methods = r.nlargest, r.nsmallest
104
+ for method, arg in product(methods, args):
105
+ with pytest.raises(TypeError, match=msg):
106
+ method(arg)
107
+
108
+ def test_nsmallest_nlargest(self, s_main_dtypes_split):
109
+ # float, int, datetime64 (use i8), timedelts64 (same),
110
+ # object that are numbers, object that are strings
111
+ ser = s_main_dtypes_split
112
+
113
+ tm.assert_series_equal(ser.nsmallest(2), ser.iloc[[2, 1]])
114
+ tm.assert_series_equal(ser.nsmallest(2, keep="last"), ser.iloc[[2, 3]])
115
+
116
+ empty = ser.iloc[0:0]
117
+ tm.assert_series_equal(ser.nsmallest(0), empty)
118
+ tm.assert_series_equal(ser.nsmallest(-1), empty)
119
+ tm.assert_series_equal(ser.nlargest(0), empty)
120
+ tm.assert_series_equal(ser.nlargest(-1), empty)
121
+
122
+ tm.assert_series_equal(ser.nsmallest(len(ser)), ser.sort_values())
123
+ tm.assert_series_equal(ser.nsmallest(len(ser) + 1), ser.sort_values())
124
+ tm.assert_series_equal(ser.nlargest(len(ser)), ser.iloc[[4, 0, 1, 3, 2]])
125
+ tm.assert_series_equal(ser.nlargest(len(ser) + 1), ser.iloc[[4, 0, 1, 3, 2]])
126
+
127
+ def test_nlargest_misc(self):
128
+ ser = Series([3.0, np.nan, 1, 2, 5])
129
+ result = ser.nlargest()
130
+ expected = ser.iloc[[4, 0, 3, 2, 1]]
131
+ tm.assert_series_equal(result, expected)
132
+ result = ser.nsmallest()
133
+ expected = ser.iloc[[2, 3, 0, 4, 1]]
134
+ tm.assert_series_equal(result, expected)
135
+
136
+ msg = 'keep must be either "first", "last"'
137
+ with pytest.raises(ValueError, match=msg):
138
+ ser.nsmallest(keep="invalid")
139
+ with pytest.raises(ValueError, match=msg):
140
+ ser.nlargest(keep="invalid")
141
+
142
+ # GH#15297
143
+ ser = Series([1] * 5, index=[1, 2, 3, 4, 5])
144
+ expected_first = Series([1] * 3, index=[1, 2, 3])
145
+ expected_last = Series([1] * 3, index=[5, 4, 3])
146
+
147
+ result = ser.nsmallest(3)
148
+ tm.assert_series_equal(result, expected_first)
149
+
150
+ result = ser.nsmallest(3, keep="last")
151
+ tm.assert_series_equal(result, expected_last)
152
+
153
+ result = ser.nlargest(3)
154
+ tm.assert_series_equal(result, expected_first)
155
+
156
+ result = ser.nlargest(3, keep="last")
157
+ tm.assert_series_equal(result, expected_last)
158
+
159
+ @pytest.mark.parametrize("n", range(1, 5))
160
+ def test_nlargest_n(self, n):
161
+ # GH 13412
162
+ ser = Series([1, 4, 3, 2], index=[0, 0, 1, 1])
163
+ result = ser.nlargest(n)
164
+ expected = ser.sort_values(ascending=False).head(n)
165
+ tm.assert_series_equal(result, expected)
166
+
167
+ result = ser.nsmallest(n)
168
+ expected = ser.sort_values().head(n)
169
+ tm.assert_series_equal(result, expected)
170
+
171
+ def test_nlargest_boundary_integer(self, nselect_method, any_int_numpy_dtype):
172
+ # GH#21426
173
+ dtype_info = np.iinfo(any_int_numpy_dtype)
174
+ min_val, max_val = dtype_info.min, dtype_info.max
175
+ vals = [min_val, min_val + 1, max_val - 1, max_val]
176
+ assert_check_nselect_boundary(vals, any_int_numpy_dtype, nselect_method)
177
+
178
+ def test_nlargest_boundary_float(self, nselect_method, float_numpy_dtype):
179
+ # GH#21426
180
+ dtype_info = np.finfo(float_numpy_dtype)
181
+ min_val, max_val = dtype_info.min, dtype_info.max
182
+ min_2nd, max_2nd = np.nextafter([min_val, max_val], 0, dtype=float_numpy_dtype)
183
+ vals = [min_val, min_2nd, max_2nd, max_val]
184
+ assert_check_nselect_boundary(vals, float_numpy_dtype, nselect_method)
185
+
186
+ @pytest.mark.parametrize("dtype", ["datetime64[ns]", "timedelta64[ns]"])
187
+ def test_nlargest_boundary_datetimelike(self, nselect_method, dtype):
188
+ # GH#21426
189
+ # use int64 bounds and +1 to min_val since true minimum is NaT
190
+ # (include min_val/NaT at end to maintain same expected_idxr)
191
+ dtype_info = np.iinfo("int64")
192
+ min_val, max_val = dtype_info.min, dtype_info.max
193
+ vals = [min_val + 1, min_val + 2, max_val - 1, max_val, min_val]
194
+ assert_check_nselect_boundary(vals, dtype, nselect_method)
195
+
196
+ def test_nlargest_duplicate_keep_all_ties(self):
197
+ # see GH#16818
198
+ ser = Series([10, 9, 8, 7, 7, 7, 7, 6])
199
+ result = ser.nlargest(4, keep="all")
200
+ expected = Series([10, 9, 8, 7, 7, 7, 7])
201
+ tm.assert_series_equal(result, expected)
202
+
203
+ result = ser.nsmallest(2, keep="all")
204
+ expected = Series([6, 7, 7, 7, 7], index=[7, 3, 4, 5, 6])
205
+ tm.assert_series_equal(result, expected)
206
+
207
+ @pytest.mark.parametrize(
208
+ "data,expected", [([True, False], [True]), ([True, False, True, True], [True])]
209
+ )
210
+ def test_nlargest_boolean(self, data, expected):
211
+ # GH#26154 : ensure True > False
212
+ ser = Series(data)
213
+ result = ser.nlargest(1)
214
+ expected = Series(expected)
215
+ tm.assert_series_equal(result, expected)
216
+
217
+ def test_nlargest_nullable(self, any_numeric_ea_dtype):
218
+ # GH#42816
219
+ dtype = any_numeric_ea_dtype
220
+ if dtype.startswith("UInt"):
221
+ # Can't cast from negative float to uint on some platforms
222
+ arr = np.random.default_rng(2).integers(1, 10, 10)
223
+ else:
224
+ arr = np.random.default_rng(2).standard_normal(10)
225
+ arr = arr.astype(dtype.lower(), copy=False)
226
+
227
+ ser = Series(arr.copy(), dtype=dtype)
228
+ ser[1] = pd.NA
229
+ result = ser.nlargest(5)
230
+
231
+ expected = (
232
+ Series(np.delete(arr, 1), index=ser.index.delete(1))
233
+ .nlargest(5)
234
+ .astype(dtype)
235
+ )
236
+ tm.assert_series_equal(result, expected)
237
+
238
+ def test_nsmallest_nan_when_keep_is_all(self):
239
+ # GH#46589
240
+ s = Series([1, 2, 3, 3, 3, None])
241
+ result = s.nsmallest(3, keep="all")
242
+ expected = Series([1.0, 2.0, 3.0, 3.0, 3.0])
243
+ tm.assert_series_equal(result, expected)
244
+
245
+ s = Series([1, 2, None, None, None])
246
+ result = s.nsmallest(3, keep="all")
247
+ expected = Series([1, 2, None, None, None])
248
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_nunique.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Categorical,
5
+ Series,
6
+ )
7
+
8
+
9
+ def test_nunique():
10
+ # basics.rst doc example
11
+ series = Series(np.random.default_rng(2).standard_normal(500))
12
+ series[20:500] = np.nan
13
+ series[10:20] = 5000
14
+ result = series.nunique()
15
+ assert result == 11
16
+
17
+
18
+ def test_nunique_categorical():
19
+ # GH#18051
20
+ ser = Series(Categorical([]))
21
+ assert ser.nunique() == 0
22
+
23
+ ser = Series(Categorical([np.nan]))
24
+ assert ser.nunique() == 0
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_pct_change.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ date_range,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesPctChange:
12
+ def test_pct_change(self, datetime_series):
13
+ msg = (
14
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
15
+ "Series.pct_change are deprecated"
16
+ )
17
+
18
+ rs = datetime_series.pct_change(fill_method=None)
19
+ tm.assert_series_equal(rs, datetime_series / datetime_series.shift(1) - 1)
20
+
21
+ rs = datetime_series.pct_change(2)
22
+ filled = datetime_series.ffill()
23
+ tm.assert_series_equal(rs, filled / filled.shift(2) - 1)
24
+
25
+ with tm.assert_produces_warning(FutureWarning, match=msg):
26
+ rs = datetime_series.pct_change(fill_method="bfill", limit=1)
27
+ filled = datetime_series.bfill(limit=1)
28
+ tm.assert_series_equal(rs, filled / filled.shift(1) - 1)
29
+
30
+ rs = datetime_series.pct_change(freq="5D")
31
+ filled = datetime_series.ffill()
32
+ tm.assert_series_equal(
33
+ rs, (filled / filled.shift(freq="5D") - 1).reindex_like(filled)
34
+ )
35
+
36
+ def test_pct_change_with_duplicate_axis(self):
37
+ # GH#28664
38
+ common_idx = date_range("2019-11-14", periods=5, freq="D")
39
+ result = Series(range(5), common_idx).pct_change(freq="B")
40
+
41
+ # the reason that the expected should be like this is documented at PR 28681
42
+ expected = Series([np.nan, np.inf, np.nan, np.nan, 3.0], common_idx)
43
+
44
+ tm.assert_series_equal(result, expected)
45
+
46
+ def test_pct_change_shift_over_nas(self):
47
+ s = Series([1.0, 1.5, np.nan, 2.5, 3.0])
48
+
49
+ msg = "The default fill_method='pad' in Series.pct_change is deprecated"
50
+ with tm.assert_produces_warning(FutureWarning, match=msg):
51
+ chg = s.pct_change()
52
+
53
+ expected = Series([np.nan, 0.5, 0.0, 2.5 / 1.5 - 1, 0.2])
54
+ tm.assert_series_equal(chg, expected)
55
+
56
+ @pytest.mark.parametrize(
57
+ "freq, periods, fill_method, limit",
58
+ [
59
+ ("5B", 5, None, None),
60
+ ("3B", 3, None, None),
61
+ ("3B", 3, "bfill", None),
62
+ ("7B", 7, "pad", 1),
63
+ ("7B", 7, "bfill", 3),
64
+ ("14B", 14, None, None),
65
+ ],
66
+ )
67
+ def test_pct_change_periods_freq(
68
+ self, freq, periods, fill_method, limit, datetime_series
69
+ ):
70
+ msg = (
71
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
72
+ "Series.pct_change are deprecated"
73
+ )
74
+
75
+ # GH#7292
76
+ with tm.assert_produces_warning(FutureWarning, match=msg):
77
+ rs_freq = datetime_series.pct_change(
78
+ freq=freq, fill_method=fill_method, limit=limit
79
+ )
80
+ with tm.assert_produces_warning(FutureWarning, match=msg):
81
+ rs_periods = datetime_series.pct_change(
82
+ periods, fill_method=fill_method, limit=limit
83
+ )
84
+ tm.assert_series_equal(rs_freq, rs_periods)
85
+
86
+ empty_ts = Series(index=datetime_series.index, dtype=object)
87
+ with tm.assert_produces_warning(FutureWarning, match=msg):
88
+ rs_freq = empty_ts.pct_change(
89
+ freq=freq, fill_method=fill_method, limit=limit
90
+ )
91
+ with tm.assert_produces_warning(FutureWarning, match=msg):
92
+ rs_periods = empty_ts.pct_change(
93
+ periods, fill_method=fill_method, limit=limit
94
+ )
95
+ tm.assert_series_equal(rs_freq, rs_periods)
96
+
97
+
98
+ @pytest.mark.parametrize("fill_method", ["pad", "ffill", None])
99
+ def test_pct_change_with_duplicated_indices(fill_method):
100
+ # GH30463
101
+ s = Series([np.nan, 1, 2, 3, 9, 18], index=["a", "b"] * 3)
102
+
103
+ warn = None if fill_method is None else FutureWarning
104
+ msg = (
105
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
106
+ "Series.pct_change are deprecated"
107
+ )
108
+ with tm.assert_produces_warning(warn, match=msg):
109
+ result = s.pct_change(fill_method=fill_method)
110
+
111
+ expected = Series([np.nan, np.nan, 1.0, 0.5, 2.0, 1.0], index=["a", "b"] * 3)
112
+ tm.assert_series_equal(result, expected)
113
+
114
+
115
+ def test_pct_change_no_warning_na_beginning():
116
+ # GH#54981
117
+ ser = Series([None, None, 1, 2, 3])
118
+ result = ser.pct_change()
119
+ expected = Series([np.nan, np.nan, np.nan, 1, 0.5])
120
+ tm.assert_series_equal(result, expected)
121
+
122
+
123
+ def test_pct_change_empty():
124
+ # GH 57056
125
+ ser = Series([], dtype="float64")
126
+ expected = ser.copy()
127
+ result = ser.pct_change(periods=0)
128
+ tm.assert_series_equal(expected, result)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_pop.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import Series
2
+ import pandas._testing as tm
3
+
4
+
5
+ def test_pop():
6
+ # GH#6600
7
+ ser = Series([0, 4, 0], index=["A", "B", "C"], name=4)
8
+
9
+ result = ser.pop("B")
10
+ assert result == 4
11
+
12
+ expected = Series([0, 0], index=["A", "C"], name=4)
13
+ tm.assert_series_equal(ser, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_quantile.py ADDED
@@ -0,0 +1,247 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.core.dtypes.common import is_integer
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Index,
9
+ Series,
10
+ )
11
+ import pandas._testing as tm
12
+ from pandas.core.indexes.datetimes import Timestamp
13
+
14
+
15
+ class TestSeriesQuantile:
16
+ def test_quantile(self, datetime_series):
17
+ q = datetime_series.quantile(0.1)
18
+ assert q == np.percentile(datetime_series.dropna(), 10)
19
+
20
+ q = datetime_series.quantile(0.9)
21
+ assert q == np.percentile(datetime_series.dropna(), 90)
22
+
23
+ # object dtype
24
+ q = Series(datetime_series, dtype=object).quantile(0.9)
25
+ assert q == np.percentile(datetime_series.dropna(), 90)
26
+
27
+ # datetime64[ns] dtype
28
+ dts = datetime_series.index.to_series()
29
+ q = dts.quantile(0.2)
30
+ assert q == Timestamp("2000-01-10 19:12:00")
31
+
32
+ # timedelta64[ns] dtype
33
+ tds = dts.diff()
34
+ q = tds.quantile(0.25)
35
+ assert q == pd.to_timedelta("24:00:00")
36
+
37
+ # GH7661
38
+ result = Series([np.timedelta64("NaT")]).sum()
39
+ assert result == pd.Timedelta(0)
40
+
41
+ msg = "percentiles should all be in the interval \\[0, 1\\]"
42
+ for invalid in [-1, 2, [0.5, -1], [0.5, 2]]:
43
+ with pytest.raises(ValueError, match=msg):
44
+ datetime_series.quantile(invalid)
45
+
46
+ s = Series(np.random.default_rng(2).standard_normal(100))
47
+ percentile_array = [-0.5, 0.25, 1.5]
48
+ with pytest.raises(ValueError, match=msg):
49
+ s.quantile(percentile_array)
50
+
51
+ def test_quantile_multi(self, datetime_series, unit):
52
+ datetime_series.index = datetime_series.index.as_unit(unit)
53
+ qs = [0.1, 0.9]
54
+ result = datetime_series.quantile(qs)
55
+ expected = Series(
56
+ [
57
+ np.percentile(datetime_series.dropna(), 10),
58
+ np.percentile(datetime_series.dropna(), 90),
59
+ ],
60
+ index=qs,
61
+ name=datetime_series.name,
62
+ )
63
+ tm.assert_series_equal(result, expected)
64
+
65
+ dts = datetime_series.index.to_series()
66
+ dts.name = "xxx"
67
+ result = dts.quantile((0.2, 0.2))
68
+ expected = Series(
69
+ [Timestamp("2000-01-10 19:12:00"), Timestamp("2000-01-10 19:12:00")],
70
+ index=[0.2, 0.2],
71
+ name="xxx",
72
+ dtype=f"M8[{unit}]",
73
+ )
74
+ tm.assert_series_equal(result, expected)
75
+
76
+ result = datetime_series.quantile([])
77
+ expected = Series(
78
+ [], name=datetime_series.name, index=Index([], dtype=float), dtype="float64"
79
+ )
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ def test_quantile_interpolation(self, datetime_series):
83
+ # see gh-10174
84
+
85
+ # interpolation = linear (default case)
86
+ q = datetime_series.quantile(0.1, interpolation="linear")
87
+ assert q == np.percentile(datetime_series.dropna(), 10)
88
+ q1 = datetime_series.quantile(0.1)
89
+ assert q1 == np.percentile(datetime_series.dropna(), 10)
90
+
91
+ # test with and without interpolation keyword
92
+ assert q == q1
93
+
94
+ def test_quantile_interpolation_dtype(self):
95
+ # GH #10174
96
+
97
+ # interpolation = linear (default case)
98
+ q = Series([1, 3, 4]).quantile(0.5, interpolation="lower")
99
+ assert q == np.percentile(np.array([1, 3, 4]), 50)
100
+ assert is_integer(q)
101
+
102
+ q = Series([1, 3, 4]).quantile(0.5, interpolation="higher")
103
+ assert q == np.percentile(np.array([1, 3, 4]), 50)
104
+ assert is_integer(q)
105
+
106
+ def test_quantile_nan(self):
107
+ # GH 13098
108
+ ser = Series([1, 2, 3, 4, np.nan])
109
+ result = ser.quantile(0.5)
110
+ expected = 2.5
111
+ assert result == expected
112
+
113
+ # all nan/empty
114
+ s1 = Series([], dtype=object)
115
+ cases = [s1, Series([np.nan, np.nan])]
116
+
117
+ for ser in cases:
118
+ res = ser.quantile(0.5)
119
+ assert np.isnan(res)
120
+
121
+ res = ser.quantile([0.5])
122
+ tm.assert_series_equal(res, Series([np.nan], index=[0.5]))
123
+
124
+ res = ser.quantile([0.2, 0.3])
125
+ tm.assert_series_equal(res, Series([np.nan, np.nan], index=[0.2, 0.3]))
126
+
127
+ @pytest.mark.parametrize(
128
+ "case",
129
+ [
130
+ [
131
+ Timestamp("2011-01-01"),
132
+ Timestamp("2011-01-02"),
133
+ Timestamp("2011-01-03"),
134
+ ],
135
+ [
136
+ Timestamp("2011-01-01", tz="US/Eastern"),
137
+ Timestamp("2011-01-02", tz="US/Eastern"),
138
+ Timestamp("2011-01-03", tz="US/Eastern"),
139
+ ],
140
+ [pd.Timedelta("1 days"), pd.Timedelta("2 days"), pd.Timedelta("3 days")],
141
+ # NaT
142
+ [
143
+ Timestamp("2011-01-01"),
144
+ Timestamp("2011-01-02"),
145
+ Timestamp("2011-01-03"),
146
+ pd.NaT,
147
+ ],
148
+ [
149
+ Timestamp("2011-01-01", tz="US/Eastern"),
150
+ Timestamp("2011-01-02", tz="US/Eastern"),
151
+ Timestamp("2011-01-03", tz="US/Eastern"),
152
+ pd.NaT,
153
+ ],
154
+ [
155
+ pd.Timedelta("1 days"),
156
+ pd.Timedelta("2 days"),
157
+ pd.Timedelta("3 days"),
158
+ pd.NaT,
159
+ ],
160
+ ],
161
+ )
162
+ def test_quantile_box(self, case):
163
+ ser = Series(case, name="XXX")
164
+ res = ser.quantile(0.5)
165
+ assert res == case[1]
166
+
167
+ res = ser.quantile([0.5])
168
+ exp = Series([case[1]], index=[0.5], name="XXX")
169
+ tm.assert_series_equal(res, exp)
170
+
171
+ def test_datetime_timedelta_quantiles(self):
172
+ # covers #9694
173
+ assert pd.isna(Series([], dtype="M8[ns]").quantile(0.5))
174
+ assert pd.isna(Series([], dtype="m8[ns]").quantile(0.5))
175
+
176
+ def test_quantile_nat(self):
177
+ res = Series([pd.NaT, pd.NaT]).quantile(0.5)
178
+ assert res is pd.NaT
179
+
180
+ res = Series([pd.NaT, pd.NaT]).quantile([0.5])
181
+ tm.assert_series_equal(res, Series([pd.NaT], index=[0.5]))
182
+
183
+ @pytest.mark.parametrize(
184
+ "values, dtype",
185
+ [([0, 0, 0, 1, 2, 3], "Sparse[int]"), ([0.0, None, 1.0, 2.0], "Sparse[float]")],
186
+ )
187
+ def test_quantile_sparse(self, values, dtype):
188
+ ser = Series(values, dtype=dtype)
189
+ result = ser.quantile([0.5])
190
+ expected = Series(np.asarray(ser)).quantile([0.5]).astype("Sparse[float]")
191
+ tm.assert_series_equal(result, expected)
192
+
193
+ def test_quantile_empty_float64(self):
194
+ # floats
195
+ ser = Series([], dtype="float64")
196
+
197
+ res = ser.quantile(0.5)
198
+ assert np.isnan(res)
199
+
200
+ res = ser.quantile([0.5])
201
+ exp = Series([np.nan], index=[0.5])
202
+ tm.assert_series_equal(res, exp)
203
+
204
+ def test_quantile_empty_int64(self):
205
+ # int
206
+ ser = Series([], dtype="int64")
207
+
208
+ res = ser.quantile(0.5)
209
+ assert np.isnan(res)
210
+
211
+ res = ser.quantile([0.5])
212
+ exp = Series([np.nan], index=[0.5])
213
+ tm.assert_series_equal(res, exp)
214
+
215
+ def test_quantile_empty_dt64(self):
216
+ # datetime
217
+ ser = Series([], dtype="datetime64[ns]")
218
+
219
+ res = ser.quantile(0.5)
220
+ assert res is pd.NaT
221
+
222
+ res = ser.quantile([0.5])
223
+ exp = Series([pd.NaT], index=[0.5], dtype=ser.dtype)
224
+ tm.assert_series_equal(res, exp)
225
+
226
+ @pytest.mark.parametrize("dtype", [int, float, "Int64"])
227
+ def test_quantile_dtypes(self, dtype):
228
+ result = Series([1, 2, 3], dtype=dtype).quantile(np.arange(0, 1, 0.25))
229
+ expected = Series(np.arange(1, 3, 0.5), index=np.arange(0, 1, 0.25))
230
+ if dtype == "Int64":
231
+ expected = expected.astype("Float64")
232
+ tm.assert_series_equal(result, expected)
233
+
234
+ def test_quantile_all_na(self, any_int_ea_dtype):
235
+ # GH#50681
236
+ ser = Series([pd.NA, pd.NA], dtype=any_int_ea_dtype)
237
+ with tm.assert_produces_warning(None):
238
+ result = ser.quantile([0.1, 0.5])
239
+ expected = Series([pd.NA, pd.NA], dtype=any_int_ea_dtype, index=[0.1, 0.5])
240
+ tm.assert_series_equal(result, expected)
241
+
242
+ def test_quantile_dtype_size(self, any_int_ea_dtype):
243
+ # GH#50681
244
+ ser = Series([pd.NA, pd.NA, 1], dtype=any_int_ea_dtype)
245
+ result = ser.quantile([0.1, 0.5])
246
+ expected = Series([1, 1], dtype=any_int_ea_dtype, index=[0.1, 0.5])
247
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_rank.py ADDED
@@ -0,0 +1,519 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import chain
2
+ import operator
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas._libs.algos import (
8
+ Infinity,
9
+ NegInfinity,
10
+ )
11
+ import pandas.util._test_decorators as td
12
+
13
+ from pandas import (
14
+ NA,
15
+ NaT,
16
+ Series,
17
+ Timestamp,
18
+ date_range,
19
+ )
20
+ import pandas._testing as tm
21
+ from pandas.api.types import CategoricalDtype
22
+
23
+
24
+ @pytest.fixture
25
+ def ser():
26
+ return Series([1, 3, 4, 2, np.nan, 2, 1, 5, np.nan, 3])
27
+
28
+
29
+ @pytest.fixture(
30
+ params=[
31
+ ["average", np.array([1.5, 5.5, 7.0, 3.5, np.nan, 3.5, 1.5, 8.0, np.nan, 5.5])],
32
+ ["min", np.array([1, 5, 7, 3, np.nan, 3, 1, 8, np.nan, 5])],
33
+ ["max", np.array([2, 6, 7, 4, np.nan, 4, 2, 8, np.nan, 6])],
34
+ ["first", np.array([1, 5, 7, 3, np.nan, 4, 2, 8, np.nan, 6])],
35
+ ["dense", np.array([1, 3, 4, 2, np.nan, 2, 1, 5, np.nan, 3])],
36
+ ]
37
+ )
38
+ def results(request):
39
+ return request.param
40
+
41
+
42
+ @pytest.fixture(
43
+ params=[
44
+ "object",
45
+ "float64",
46
+ "int64",
47
+ "Float64",
48
+ "Int64",
49
+ pytest.param("float64[pyarrow]", marks=td.skip_if_no("pyarrow")),
50
+ pytest.param("int64[pyarrow]", marks=td.skip_if_no("pyarrow")),
51
+ ]
52
+ )
53
+ def dtype(request):
54
+ return request.param
55
+
56
+
57
+ class TestSeriesRank:
58
+ def test_rank(self, datetime_series):
59
+ sp_stats = pytest.importorskip("scipy.stats")
60
+
61
+ datetime_series[::2] = np.nan
62
+ datetime_series[:10:3] = 4.0
63
+
64
+ ranks = datetime_series.rank()
65
+ oranks = datetime_series.astype("O").rank()
66
+
67
+ tm.assert_series_equal(ranks, oranks)
68
+
69
+ mask = np.isnan(datetime_series)
70
+ filled = datetime_series.fillna(np.inf)
71
+
72
+ # rankdata returns a ndarray
73
+ exp = Series(sp_stats.rankdata(filled), index=filled.index, name="ts")
74
+ exp[mask] = np.nan
75
+
76
+ tm.assert_series_equal(ranks, exp)
77
+
78
+ iseries = Series(np.arange(5).repeat(2))
79
+
80
+ iranks = iseries.rank()
81
+ exp = iseries.astype(float).rank()
82
+ tm.assert_series_equal(iranks, exp)
83
+ iseries = Series(np.arange(5)) + 1.0
84
+ exp = iseries / 5.0
85
+ iranks = iseries.rank(pct=True)
86
+
87
+ tm.assert_series_equal(iranks, exp)
88
+
89
+ iseries = Series(np.repeat(1, 100))
90
+ exp = Series(np.repeat(0.505, 100))
91
+ iranks = iseries.rank(pct=True)
92
+ tm.assert_series_equal(iranks, exp)
93
+
94
+ # Explicit cast to float to avoid implicit cast when setting nan
95
+ iseries = iseries.astype("float")
96
+ iseries[1] = np.nan
97
+ exp = Series(np.repeat(50.0 / 99.0, 100))
98
+ exp[1] = np.nan
99
+ iranks = iseries.rank(pct=True)
100
+ tm.assert_series_equal(iranks, exp)
101
+
102
+ iseries = Series(np.arange(5)) + 1.0
103
+ iseries[4] = np.nan
104
+ exp = iseries / 4.0
105
+ iranks = iseries.rank(pct=True)
106
+ tm.assert_series_equal(iranks, exp)
107
+
108
+ iseries = Series(np.repeat(np.nan, 100))
109
+ exp = iseries.copy()
110
+ iranks = iseries.rank(pct=True)
111
+ tm.assert_series_equal(iranks, exp)
112
+
113
+ # Explicit cast to float to avoid implicit cast when setting nan
114
+ iseries = Series(np.arange(5), dtype="float") + 1
115
+ iseries[4] = np.nan
116
+ exp = iseries / 4.0
117
+ iranks = iseries.rank(pct=True)
118
+ tm.assert_series_equal(iranks, exp)
119
+
120
+ rng = date_range("1/1/1990", periods=5)
121
+ # Explicit cast to float to avoid implicit cast when setting nan
122
+ iseries = Series(np.arange(5), rng, dtype="float") + 1
123
+ iseries.iloc[4] = np.nan
124
+ exp = iseries / 4.0
125
+ iranks = iseries.rank(pct=True)
126
+ tm.assert_series_equal(iranks, exp)
127
+
128
+ iseries = Series([1e-50, 1e-100, 1e-20, 1e-2, 1e-20 + 1e-30, 1e-1])
129
+ exp = Series([2, 1, 3, 5, 4, 6.0])
130
+ iranks = iseries.rank()
131
+ tm.assert_series_equal(iranks, exp)
132
+
133
+ # GH 5968
134
+ iseries = Series(["3 day", "1 day 10m", "-2 day", NaT], dtype="m8[ns]")
135
+ exp = Series([3, 2, 1, np.nan])
136
+ iranks = iseries.rank()
137
+ tm.assert_series_equal(iranks, exp)
138
+
139
+ values = np.array(
140
+ [-50, -1, -1e-20, -1e-25, -1e-50, 0, 1e-40, 1e-20, 1e-10, 2, 40],
141
+ dtype="float64",
142
+ )
143
+ random_order = np.random.default_rng(2).permutation(len(values))
144
+ iseries = Series(values[random_order])
145
+ exp = Series(random_order + 1.0, dtype="float64")
146
+ iranks = iseries.rank()
147
+ tm.assert_series_equal(iranks, exp)
148
+
149
+ def test_rank_categorical(self):
150
+ # GH issue #15420 rank incorrectly orders ordered categories
151
+
152
+ # Test ascending/descending ranking for ordered categoricals
153
+ exp = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
154
+ exp_desc = Series([6.0, 5.0, 4.0, 3.0, 2.0, 1.0])
155
+ ordered = Series(
156
+ ["first", "second", "third", "fourth", "fifth", "sixth"]
157
+ ).astype(
158
+ CategoricalDtype(
159
+ categories=["first", "second", "third", "fourth", "fifth", "sixth"],
160
+ ordered=True,
161
+ )
162
+ )
163
+ tm.assert_series_equal(ordered.rank(), exp)
164
+ tm.assert_series_equal(ordered.rank(ascending=False), exp_desc)
165
+
166
+ # Unordered categoricals should be ranked as objects
167
+ unordered = Series(
168
+ ["first", "second", "third", "fourth", "fifth", "sixth"]
169
+ ).astype(
170
+ CategoricalDtype(
171
+ categories=["first", "second", "third", "fourth", "fifth", "sixth"],
172
+ ordered=False,
173
+ )
174
+ )
175
+ exp_unordered = Series([2.0, 4.0, 6.0, 3.0, 1.0, 5.0])
176
+ res = unordered.rank()
177
+ tm.assert_series_equal(res, exp_unordered)
178
+
179
+ unordered1 = Series([1, 2, 3, 4, 5, 6]).astype(
180
+ CategoricalDtype([1, 2, 3, 4, 5, 6], False)
181
+ )
182
+ exp_unordered1 = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0])
183
+ res1 = unordered1.rank()
184
+ tm.assert_series_equal(res1, exp_unordered1)
185
+
186
+ # Test na_option for rank data
187
+ na_ser = Series(
188
+ ["first", "second", "third", "fourth", "fifth", "sixth", np.nan]
189
+ ).astype(
190
+ CategoricalDtype(
191
+ ["first", "second", "third", "fourth", "fifth", "sixth", "seventh"],
192
+ True,
193
+ )
194
+ )
195
+
196
+ exp_top = Series([2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 1.0])
197
+ exp_bot = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
198
+ exp_keep = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, np.nan])
199
+
200
+ tm.assert_series_equal(na_ser.rank(na_option="top"), exp_top)
201
+ tm.assert_series_equal(na_ser.rank(na_option="bottom"), exp_bot)
202
+ tm.assert_series_equal(na_ser.rank(na_option="keep"), exp_keep)
203
+
204
+ # Test na_option for rank data with ascending False
205
+ exp_top = Series([7.0, 6.0, 5.0, 4.0, 3.0, 2.0, 1.0])
206
+ exp_bot = Series([6.0, 5.0, 4.0, 3.0, 2.0, 1.0, 7.0])
207
+ exp_keep = Series([6.0, 5.0, 4.0, 3.0, 2.0, 1.0, np.nan])
208
+
209
+ tm.assert_series_equal(na_ser.rank(na_option="top", ascending=False), exp_top)
210
+ tm.assert_series_equal(
211
+ na_ser.rank(na_option="bottom", ascending=False), exp_bot
212
+ )
213
+ tm.assert_series_equal(na_ser.rank(na_option="keep", ascending=False), exp_keep)
214
+
215
+ # Test invalid values for na_option
216
+ msg = "na_option must be one of 'keep', 'top', or 'bottom'"
217
+
218
+ with pytest.raises(ValueError, match=msg):
219
+ na_ser.rank(na_option="bad", ascending=False)
220
+
221
+ # invalid type
222
+ with pytest.raises(ValueError, match=msg):
223
+ na_ser.rank(na_option=True, ascending=False)
224
+
225
+ # Test with pct=True
226
+ na_ser = Series(["first", "second", "third", "fourth", np.nan]).astype(
227
+ CategoricalDtype(["first", "second", "third", "fourth"], True)
228
+ )
229
+ exp_top = Series([0.4, 0.6, 0.8, 1.0, 0.2])
230
+ exp_bot = Series([0.2, 0.4, 0.6, 0.8, 1.0])
231
+ exp_keep = Series([0.25, 0.5, 0.75, 1.0, np.nan])
232
+
233
+ tm.assert_series_equal(na_ser.rank(na_option="top", pct=True), exp_top)
234
+ tm.assert_series_equal(na_ser.rank(na_option="bottom", pct=True), exp_bot)
235
+ tm.assert_series_equal(na_ser.rank(na_option="keep", pct=True), exp_keep)
236
+
237
+ def test_rank_signature(self):
238
+ s = Series([0, 1])
239
+ s.rank(method="average")
240
+ msg = "No axis named average for object type Series"
241
+ with pytest.raises(ValueError, match=msg):
242
+ s.rank("average")
243
+
244
+ @pytest.mark.parametrize("dtype", [None, object])
245
+ def test_rank_tie_methods(self, ser, results, dtype):
246
+ method, exp = results
247
+ ser = ser if dtype is None else ser.astype(dtype)
248
+ result = ser.rank(method=method)
249
+ tm.assert_series_equal(result, Series(exp))
250
+
251
+ @pytest.mark.parametrize("ascending", [True, False])
252
+ @pytest.mark.parametrize("method", ["average", "min", "max", "first", "dense"])
253
+ @pytest.mark.parametrize("na_option", ["top", "bottom", "keep"])
254
+ @pytest.mark.parametrize(
255
+ "dtype, na_value, pos_inf, neg_inf",
256
+ [
257
+ ("object", None, Infinity(), NegInfinity()),
258
+ ("float64", np.nan, np.inf, -np.inf),
259
+ ("Float64", NA, np.inf, -np.inf),
260
+ pytest.param(
261
+ "float64[pyarrow]",
262
+ NA,
263
+ np.inf,
264
+ -np.inf,
265
+ marks=td.skip_if_no("pyarrow"),
266
+ ),
267
+ ],
268
+ )
269
+ def test_rank_tie_methods_on_infs_nans(
270
+ self, method, na_option, ascending, dtype, na_value, pos_inf, neg_inf
271
+ ):
272
+ pytest.importorskip("scipy")
273
+ if dtype == "float64[pyarrow]":
274
+ if method == "average":
275
+ exp_dtype = "float64[pyarrow]"
276
+ else:
277
+ exp_dtype = "uint64[pyarrow]"
278
+ else:
279
+ exp_dtype = "float64"
280
+
281
+ chunk = 3
282
+ in_arr = [neg_inf] * chunk + [na_value] * chunk + [pos_inf] * chunk
283
+ iseries = Series(in_arr, dtype=dtype)
284
+ exp_ranks = {
285
+ "average": ([2, 2, 2], [5, 5, 5], [8, 8, 8]),
286
+ "min": ([1, 1, 1], [4, 4, 4], [7, 7, 7]),
287
+ "max": ([3, 3, 3], [6, 6, 6], [9, 9, 9]),
288
+ "first": ([1, 2, 3], [4, 5, 6], [7, 8, 9]),
289
+ "dense": ([1, 1, 1], [2, 2, 2], [3, 3, 3]),
290
+ }
291
+ ranks = exp_ranks[method]
292
+ if na_option == "top":
293
+ order = [ranks[1], ranks[0], ranks[2]]
294
+ elif na_option == "bottom":
295
+ order = [ranks[0], ranks[2], ranks[1]]
296
+ else:
297
+ order = [ranks[0], [np.nan] * chunk, ranks[1]]
298
+ expected = order if ascending else order[::-1]
299
+ expected = list(chain.from_iterable(expected))
300
+ result = iseries.rank(method=method, na_option=na_option, ascending=ascending)
301
+ tm.assert_series_equal(result, Series(expected, dtype=exp_dtype))
302
+
303
+ def test_rank_desc_mix_nans_infs(self):
304
+ # GH 19538
305
+ # check descending ranking when mix nans and infs
306
+ iseries = Series([1, np.nan, np.inf, -np.inf, 25])
307
+ result = iseries.rank(ascending=False)
308
+ exp = Series([3, np.nan, 1, 4, 2], dtype="float64")
309
+ tm.assert_series_equal(result, exp)
310
+
311
+ @pytest.mark.parametrize("method", ["average", "min", "max", "first", "dense"])
312
+ @pytest.mark.parametrize(
313
+ "op, value",
314
+ [
315
+ [operator.add, 0],
316
+ [operator.add, 1e6],
317
+ [operator.mul, 1e-6],
318
+ ],
319
+ )
320
+ def test_rank_methods_series(self, method, op, value):
321
+ sp_stats = pytest.importorskip("scipy.stats")
322
+
323
+ xs = np.random.default_rng(2).standard_normal(9)
324
+ xs = np.concatenate([xs[i:] for i in range(0, 9, 2)]) # add duplicates
325
+ np.random.default_rng(2).shuffle(xs)
326
+
327
+ index = [chr(ord("a") + i) for i in range(len(xs))]
328
+ vals = op(xs, value)
329
+ ts = Series(vals, index=index)
330
+ result = ts.rank(method=method)
331
+ sprank = sp_stats.rankdata(vals, method if method != "first" else "ordinal")
332
+ expected = Series(sprank, index=index).astype("float64")
333
+ tm.assert_series_equal(result, expected)
334
+
335
+ @pytest.mark.parametrize(
336
+ "ser, exp",
337
+ [
338
+ ([1], [1]),
339
+ ([2], [1]),
340
+ ([0], [1]),
341
+ ([2, 2], [1, 1]),
342
+ ([1, 2, 3], [1, 2, 3]),
343
+ ([4, 2, 1], [3, 2, 1]),
344
+ ([1, 1, 5, 5, 3], [1, 1, 3, 3, 2]),
345
+ ([-5, -4, -3, -2, -1], [1, 2, 3, 4, 5]),
346
+ ],
347
+ )
348
+ def test_rank_dense_method(self, dtype, ser, exp):
349
+ s = Series(ser).astype(dtype)
350
+ result = s.rank(method="dense")
351
+ expected = Series(exp).astype(result.dtype)
352
+ tm.assert_series_equal(result, expected)
353
+
354
+ def test_rank_descending(self, ser, results, dtype):
355
+ method, _ = results
356
+ if "i" in dtype:
357
+ s = ser.dropna()
358
+ else:
359
+ s = ser.astype(dtype)
360
+
361
+ res = s.rank(ascending=False)
362
+ expected = (s.max() - s).rank()
363
+ tm.assert_series_equal(res, expected)
364
+
365
+ expected = (s.max() - s).rank(method=method)
366
+ res2 = s.rank(method=method, ascending=False)
367
+ tm.assert_series_equal(res2, expected)
368
+
369
+ def test_rank_int(self, ser, results):
370
+ method, exp = results
371
+ s = ser.dropna().astype("i8")
372
+
373
+ result = s.rank(method=method)
374
+ expected = Series(exp).dropna()
375
+ expected.index = result.index
376
+ tm.assert_series_equal(result, expected)
377
+
378
+ def test_rank_object_bug(self):
379
+ # GH 13445
380
+
381
+ # smoke tests
382
+ Series([np.nan] * 32).astype(object).rank(ascending=True)
383
+ Series([np.nan] * 32).astype(object).rank(ascending=False)
384
+
385
+ def test_rank_modify_inplace(self):
386
+ # GH 18521
387
+ # Check rank does not mutate series
388
+ s = Series([Timestamp("2017-01-05 10:20:27.569000"), NaT])
389
+ expected = s.copy()
390
+
391
+ s.rank()
392
+ result = s
393
+ tm.assert_series_equal(result, expected)
394
+
395
+ def test_rank_ea_small_values(self):
396
+ # GH#52471
397
+ ser = Series(
398
+ [5.4954145e29, -9.791984e-21, 9.3715776e-26, NA, 1.8790257e-28],
399
+ dtype="Float64",
400
+ )
401
+ result = ser.rank(method="min")
402
+ expected = Series([4, 1, 3, np.nan, 2])
403
+ tm.assert_series_equal(result, expected)
404
+
405
+
406
+ # GH15630, pct should be on 100% basis when method='dense'
407
+
408
+
409
+ @pytest.mark.parametrize(
410
+ "ser, exp",
411
+ [
412
+ ([1], [1.0]),
413
+ ([1, 2], [1.0 / 2, 2.0 / 2]),
414
+ ([2, 2], [1.0, 1.0]),
415
+ ([1, 2, 3], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
416
+ ([1, 2, 2], [1.0 / 2, 2.0 / 2, 2.0 / 2]),
417
+ ([4, 2, 1], [3.0 / 3, 2.0 / 3, 1.0 / 3]),
418
+ ([1, 1, 5, 5, 3], [1.0 / 3, 1.0 / 3, 3.0 / 3, 3.0 / 3, 2.0 / 3]),
419
+ ([1, 1, 3, 3, 5, 5], [1.0 / 3, 1.0 / 3, 2.0 / 3, 2.0 / 3, 3.0 / 3, 3.0 / 3]),
420
+ ([-5, -4, -3, -2, -1], [1.0 / 5, 2.0 / 5, 3.0 / 5, 4.0 / 5, 5.0 / 5]),
421
+ ],
422
+ )
423
+ def test_rank_dense_pct(dtype, ser, exp):
424
+ s = Series(ser).astype(dtype)
425
+ result = s.rank(method="dense", pct=True)
426
+ expected = Series(exp).astype(result.dtype)
427
+ tm.assert_series_equal(result, expected)
428
+
429
+
430
+ @pytest.mark.parametrize(
431
+ "ser, exp",
432
+ [
433
+ ([1], [1.0]),
434
+ ([1, 2], [1.0 / 2, 2.0 / 2]),
435
+ ([2, 2], [1.0 / 2, 1.0 / 2]),
436
+ ([1, 2, 3], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
437
+ ([1, 2, 2], [1.0 / 3, 2.0 / 3, 2.0 / 3]),
438
+ ([4, 2, 1], [3.0 / 3, 2.0 / 3, 1.0 / 3]),
439
+ ([1, 1, 5, 5, 3], [1.0 / 5, 1.0 / 5, 4.0 / 5, 4.0 / 5, 3.0 / 5]),
440
+ ([1, 1, 3, 3, 5, 5], [1.0 / 6, 1.0 / 6, 3.0 / 6, 3.0 / 6, 5.0 / 6, 5.0 / 6]),
441
+ ([-5, -4, -3, -2, -1], [1.0 / 5, 2.0 / 5, 3.0 / 5, 4.0 / 5, 5.0 / 5]),
442
+ ],
443
+ )
444
+ def test_rank_min_pct(dtype, ser, exp):
445
+ s = Series(ser).astype(dtype)
446
+ result = s.rank(method="min", pct=True)
447
+ expected = Series(exp).astype(result.dtype)
448
+ tm.assert_series_equal(result, expected)
449
+
450
+
451
+ @pytest.mark.parametrize(
452
+ "ser, exp",
453
+ [
454
+ ([1], [1.0]),
455
+ ([1, 2], [1.0 / 2, 2.0 / 2]),
456
+ ([2, 2], [1.0, 1.0]),
457
+ ([1, 2, 3], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
458
+ ([1, 2, 2], [1.0 / 3, 3.0 / 3, 3.0 / 3]),
459
+ ([4, 2, 1], [3.0 / 3, 2.0 / 3, 1.0 / 3]),
460
+ ([1, 1, 5, 5, 3], [2.0 / 5, 2.0 / 5, 5.0 / 5, 5.0 / 5, 3.0 / 5]),
461
+ ([1, 1, 3, 3, 5, 5], [2.0 / 6, 2.0 / 6, 4.0 / 6, 4.0 / 6, 6.0 / 6, 6.0 / 6]),
462
+ ([-5, -4, -3, -2, -1], [1.0 / 5, 2.0 / 5, 3.0 / 5, 4.0 / 5, 5.0 / 5]),
463
+ ],
464
+ )
465
+ def test_rank_max_pct(dtype, ser, exp):
466
+ s = Series(ser).astype(dtype)
467
+ result = s.rank(method="max", pct=True)
468
+ expected = Series(exp).astype(result.dtype)
469
+ tm.assert_series_equal(result, expected)
470
+
471
+
472
+ @pytest.mark.parametrize(
473
+ "ser, exp",
474
+ [
475
+ ([1], [1.0]),
476
+ ([1, 2], [1.0 / 2, 2.0 / 2]),
477
+ ([2, 2], [1.5 / 2, 1.5 / 2]),
478
+ ([1, 2, 3], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
479
+ ([1, 2, 2], [1.0 / 3, 2.5 / 3, 2.5 / 3]),
480
+ ([4, 2, 1], [3.0 / 3, 2.0 / 3, 1.0 / 3]),
481
+ ([1, 1, 5, 5, 3], [1.5 / 5, 1.5 / 5, 4.5 / 5, 4.5 / 5, 3.0 / 5]),
482
+ ([1, 1, 3, 3, 5, 5], [1.5 / 6, 1.5 / 6, 3.5 / 6, 3.5 / 6, 5.5 / 6, 5.5 / 6]),
483
+ ([-5, -4, -3, -2, -1], [1.0 / 5, 2.0 / 5, 3.0 / 5, 4.0 / 5, 5.0 / 5]),
484
+ ],
485
+ )
486
+ def test_rank_average_pct(dtype, ser, exp):
487
+ s = Series(ser).astype(dtype)
488
+ result = s.rank(method="average", pct=True)
489
+ expected = Series(exp).astype(result.dtype)
490
+ tm.assert_series_equal(result, expected)
491
+
492
+
493
+ @pytest.mark.parametrize(
494
+ "ser, exp",
495
+ [
496
+ ([1], [1.0]),
497
+ ([1, 2], [1.0 / 2, 2.0 / 2]),
498
+ ([2, 2], [1.0 / 2, 2.0 / 2.0]),
499
+ ([1, 2, 3], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
500
+ ([1, 2, 2], [1.0 / 3, 2.0 / 3, 3.0 / 3]),
501
+ ([4, 2, 1], [3.0 / 3, 2.0 / 3, 1.0 / 3]),
502
+ ([1, 1, 5, 5, 3], [1.0 / 5, 2.0 / 5, 4.0 / 5, 5.0 / 5, 3.0 / 5]),
503
+ ([1, 1, 3, 3, 5, 5], [1.0 / 6, 2.0 / 6, 3.0 / 6, 4.0 / 6, 5.0 / 6, 6.0 / 6]),
504
+ ([-5, -4, -3, -2, -1], [1.0 / 5, 2.0 / 5, 3.0 / 5, 4.0 / 5, 5.0 / 5]),
505
+ ],
506
+ )
507
+ def test_rank_first_pct(dtype, ser, exp):
508
+ s = Series(ser).astype(dtype)
509
+ result = s.rank(method="first", pct=True)
510
+ expected = Series(exp).astype(result.dtype)
511
+ tm.assert_series_equal(result, expected)
512
+
513
+
514
+ @pytest.mark.single_cpu
515
+ def test_pct_max_many_rows():
516
+ # GH 18271
517
+ s = Series(np.arange(2**24 + 1))
518
+ result = s.rank(pct=True).max()
519
+ assert result == 1
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_reindex_like.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+
5
+ from pandas import Series
6
+ import pandas._testing as tm
7
+
8
+
9
+ def test_reindex_like(datetime_series):
10
+ other = datetime_series[::2]
11
+ tm.assert_series_equal(
12
+ datetime_series.reindex(other.index), datetime_series.reindex_like(other)
13
+ )
14
+
15
+ # GH#7179
16
+ day1 = datetime(2013, 3, 5)
17
+ day2 = datetime(2013, 5, 5)
18
+ day3 = datetime(2014, 3, 5)
19
+
20
+ series1 = Series([5, None, None], [day1, day2, day3])
21
+ series2 = Series([None, None], [day1, day3])
22
+
23
+ result = series1.reindex_like(series2, method="pad")
24
+ expected = Series([5, np.nan], index=[day1, day3])
25
+ tm.assert_series_equal(result, expected)
26
+
27
+
28
+ def test_reindex_like_nearest():
29
+ ser = Series(np.arange(10, dtype="int64"))
30
+
31
+ target = [0.1, 0.9, 1.5, 2.0]
32
+ other = ser.reindex(target, method="nearest")
33
+ expected = Series(np.around(target).astype("int64"), target)
34
+
35
+ result = ser.reindex_like(other, method="nearest")
36
+ tm.assert_series_equal(expected, result)
37
+
38
+ result = ser.reindex_like(other, method="nearest", tolerance=1)
39
+ tm.assert_series_equal(expected, result)
40
+ result = ser.reindex_like(other, method="nearest", tolerance=[1, 2, 3, 4])
41
+ tm.assert_series_equal(expected, result)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_rename_axis.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import (
4
+ Index,
5
+ MultiIndex,
6
+ Series,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesRenameAxis:
12
+ def test_rename_axis_mapper(self):
13
+ # GH 19978
14
+ mi = MultiIndex.from_product([["a", "b", "c"], [1, 2]], names=["ll", "nn"])
15
+ ser = Series(list(range(len(mi))), index=mi)
16
+
17
+ result = ser.rename_axis(index={"ll": "foo"})
18
+ assert result.index.names == ["foo", "nn"]
19
+
20
+ result = ser.rename_axis(index=str.upper, axis=0)
21
+ assert result.index.names == ["LL", "NN"]
22
+
23
+ result = ser.rename_axis(index=["foo", "goo"])
24
+ assert result.index.names == ["foo", "goo"]
25
+
26
+ with pytest.raises(TypeError, match="unexpected"):
27
+ ser.rename_axis(columns="wrong")
28
+
29
+ def test_rename_axis_inplace(self, datetime_series):
30
+ # GH 15704
31
+ expected = datetime_series.rename_axis("foo")
32
+ result = datetime_series
33
+ no_return = result.rename_axis("foo", inplace=True)
34
+
35
+ assert no_return is None
36
+ tm.assert_series_equal(result, expected)
37
+
38
+ @pytest.mark.parametrize("kwargs", [{"mapper": None}, {"index": None}, {}])
39
+ def test_rename_axis_none(self, kwargs):
40
+ # GH 25034
41
+ index = Index(list("abc"), name="foo")
42
+ ser = Series([1, 2, 3], index=index)
43
+
44
+ result = ser.rename_axis(**kwargs)
45
+ expected_index = index.rename(None) if kwargs else index
46
+ expected = Series([1, 2, 3], index=expected_index)
47
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_replace.py ADDED
@@ -0,0 +1,813 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas._config import using_pyarrow_string_dtype
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+ from pandas.core.arrays import IntervalArray
11
+
12
+
13
+ class TestSeriesReplace:
14
+ def test_replace_explicit_none(self):
15
+ # GH#36984 if the user explicitly passes value=None, give it to them
16
+ ser = pd.Series([0, 0, ""], dtype=object)
17
+ result = ser.replace("", None)
18
+ expected = pd.Series([0, 0, None], dtype=object)
19
+ tm.assert_series_equal(result, expected)
20
+
21
+ # Cast column 2 to object to avoid implicit cast when setting entry to ""
22
+ df = pd.DataFrame(np.zeros((3, 3))).astype({2: object})
23
+ df.iloc[2, 2] = ""
24
+ result = df.replace("", None)
25
+ expected = pd.DataFrame(
26
+ {
27
+ 0: np.zeros(3),
28
+ 1: np.zeros(3),
29
+ 2: np.array([0.0, 0.0, None], dtype=object),
30
+ }
31
+ )
32
+ assert expected.iloc[2, 2] is None
33
+ tm.assert_frame_equal(result, expected)
34
+
35
+ # GH#19998 same thing with object dtype
36
+ ser = pd.Series([10, 20, 30, "a", "a", "b", "a"])
37
+ result = ser.replace("a", None)
38
+ expected = pd.Series([10, 20, 30, None, None, "b", None])
39
+ assert expected.iloc[-1] is None
40
+ tm.assert_series_equal(result, expected)
41
+
42
+ def test_replace_noop_doesnt_downcast(self):
43
+ # GH#44498
44
+ ser = pd.Series([None, None, pd.Timestamp("2021-12-16 17:31")], dtype=object)
45
+ res = ser.replace({np.nan: None}) # should be a no-op
46
+ tm.assert_series_equal(res, ser)
47
+ assert res.dtype == object
48
+
49
+ # same thing but different calling convention
50
+ res = ser.replace(np.nan, None)
51
+ tm.assert_series_equal(res, ser)
52
+ assert res.dtype == object
53
+
54
+ def test_replace(self):
55
+ N = 50
56
+ ser = pd.Series(np.random.default_rng(2).standard_normal(N))
57
+ ser[0:4] = np.nan
58
+ ser[6:10] = 0
59
+
60
+ # replace list with a single value
61
+ return_value = ser.replace([np.nan], -1, inplace=True)
62
+ assert return_value is None
63
+
64
+ exp = ser.fillna(-1)
65
+ tm.assert_series_equal(ser, exp)
66
+
67
+ rs = ser.replace(0.0, np.nan)
68
+ ser[ser == 0.0] = np.nan
69
+ tm.assert_series_equal(rs, ser)
70
+
71
+ ser = pd.Series(
72
+ np.fabs(np.random.default_rng(2).standard_normal(N)),
73
+ pd.date_range("2020-01-01", periods=N),
74
+ dtype=object,
75
+ )
76
+ ser[:5] = np.nan
77
+ ser[6:10] = "foo"
78
+ ser[20:30] = "bar"
79
+
80
+ # replace list with a single value
81
+ msg = "Downcasting behavior in `replace`"
82
+ with tm.assert_produces_warning(FutureWarning, match=msg):
83
+ rs = ser.replace([np.nan, "foo", "bar"], -1)
84
+
85
+ assert (rs[:5] == -1).all()
86
+ assert (rs[6:10] == -1).all()
87
+ assert (rs[20:30] == -1).all()
88
+ assert (pd.isna(ser[:5])).all()
89
+
90
+ # replace with different values
91
+ with tm.assert_produces_warning(FutureWarning, match=msg):
92
+ rs = ser.replace({np.nan: -1, "foo": -2, "bar": -3})
93
+
94
+ assert (rs[:5] == -1).all()
95
+ assert (rs[6:10] == -2).all()
96
+ assert (rs[20:30] == -3).all()
97
+ assert (pd.isna(ser[:5])).all()
98
+
99
+ # replace with different values with 2 lists
100
+ with tm.assert_produces_warning(FutureWarning, match=msg):
101
+ rs2 = ser.replace([np.nan, "foo", "bar"], [-1, -2, -3])
102
+ tm.assert_series_equal(rs, rs2)
103
+
104
+ # replace inplace
105
+ with tm.assert_produces_warning(FutureWarning, match=msg):
106
+ return_value = ser.replace([np.nan, "foo", "bar"], -1, inplace=True)
107
+ assert return_value is None
108
+
109
+ assert (ser[:5] == -1).all()
110
+ assert (ser[6:10] == -1).all()
111
+ assert (ser[20:30] == -1).all()
112
+
113
+ def test_replace_nan_with_inf(self):
114
+ ser = pd.Series([np.nan, 0, np.inf])
115
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
116
+
117
+ ser = pd.Series([np.nan, 0, "foo", "bar", np.inf, None, pd.NaT])
118
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
119
+ filled = ser.copy()
120
+ filled[4] = 0
121
+ tm.assert_series_equal(ser.replace(np.inf, 0), filled)
122
+
123
+ def test_replace_listlike_value_listlike_target(self, datetime_series):
124
+ ser = pd.Series(datetime_series.index)
125
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
126
+
127
+ # malformed
128
+ msg = r"Replacement lists must match in length\. Expecting 3 got 2"
129
+ with pytest.raises(ValueError, match=msg):
130
+ ser.replace([1, 2, 3], [np.nan, 0])
131
+
132
+ # ser is dt64 so can't hold 1 or 2, so this replace is a no-op
133
+ result = ser.replace([1, 2], [np.nan, 0])
134
+ tm.assert_series_equal(result, ser)
135
+
136
+ ser = pd.Series([0, 1, 2, 3, 4])
137
+ result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
138
+ tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
139
+
140
+ def test_replace_gh5319(self):
141
+ # API change from 0.12?
142
+ # GH 5319
143
+ ser = pd.Series([0, np.nan, 2, 3, 4])
144
+ expected = ser.ffill()
145
+ msg = (
146
+ "Series.replace without 'value' and with non-dict-like "
147
+ "'to_replace' is deprecated"
148
+ )
149
+ with tm.assert_produces_warning(FutureWarning, match=msg):
150
+ result = ser.replace([np.nan])
151
+ tm.assert_series_equal(result, expected)
152
+
153
+ ser = pd.Series([0, np.nan, 2, 3, 4])
154
+ expected = ser.ffill()
155
+ with tm.assert_produces_warning(FutureWarning, match=msg):
156
+ result = ser.replace(np.nan)
157
+ tm.assert_series_equal(result, expected)
158
+
159
+ def test_replace_datetime64(self):
160
+ # GH 5797
161
+ ser = pd.Series(pd.date_range("20130101", periods=5))
162
+ expected = ser.copy()
163
+ expected.loc[2] = pd.Timestamp("20120101")
164
+ result = ser.replace({pd.Timestamp("20130103"): pd.Timestamp("20120101")})
165
+ tm.assert_series_equal(result, expected)
166
+ result = ser.replace(pd.Timestamp("20130103"), pd.Timestamp("20120101"))
167
+ tm.assert_series_equal(result, expected)
168
+
169
+ def test_replace_nat_with_tz(self):
170
+ # GH 11792: Test with replacing NaT in a list with tz data
171
+ ts = pd.Timestamp("2015/01/01", tz="UTC")
172
+ s = pd.Series([pd.NaT, pd.Timestamp("2015/01/01", tz="UTC")])
173
+ result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
174
+ expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
175
+ tm.assert_series_equal(expected, result)
176
+
177
+ def test_replace_timedelta_td64(self):
178
+ tdi = pd.timedelta_range(0, periods=5)
179
+ ser = pd.Series(tdi)
180
+
181
+ # Using a single dict argument means we go through replace_list
182
+ result = ser.replace({ser[1]: ser[3]})
183
+
184
+ expected = pd.Series([ser[0], ser[3], ser[2], ser[3], ser[4]])
185
+ tm.assert_series_equal(result, expected)
186
+
187
+ def test_replace_with_single_list(self):
188
+ ser = pd.Series([0, 1, 2, 3, 4])
189
+ msg2 = (
190
+ "Series.replace without 'value' and with non-dict-like "
191
+ "'to_replace' is deprecated"
192
+ )
193
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
194
+ result = ser.replace([1, 2, 3])
195
+ tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
196
+
197
+ s = ser.copy()
198
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
199
+ return_value = s.replace([1, 2, 3], inplace=True)
200
+ assert return_value is None
201
+ tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
202
+
203
+ # make sure things don't get corrupted when fillna call fails
204
+ s = ser.copy()
205
+ msg = (
206
+ r"Invalid fill method\. Expecting pad \(ffill\) or backfill "
207
+ r"\(bfill\)\. Got crash_cymbal"
208
+ )
209
+ msg3 = "The 'method' keyword in Series.replace is deprecated"
210
+ with pytest.raises(ValueError, match=msg):
211
+ with tm.assert_produces_warning(FutureWarning, match=msg3):
212
+ return_value = s.replace([1, 2, 3], inplace=True, method="crash_cymbal")
213
+ assert return_value is None
214
+ tm.assert_series_equal(s, ser)
215
+
216
+ def test_replace_mixed_types(self):
217
+ ser = pd.Series(np.arange(5), dtype="int64")
218
+
219
+ def check_replace(to_rep, val, expected):
220
+ sc = ser.copy()
221
+ result = ser.replace(to_rep, val)
222
+ return_value = sc.replace(to_rep, val, inplace=True)
223
+ assert return_value is None
224
+ tm.assert_series_equal(expected, result)
225
+ tm.assert_series_equal(expected, sc)
226
+
227
+ # 3.0 can still be held in our int64 series, so we do not upcast GH#44940
228
+ tr, v = [3], [3.0]
229
+ check_replace(tr, v, ser)
230
+ # Note this matches what we get with the scalars 3 and 3.0
231
+ check_replace(tr[0], v[0], ser)
232
+
233
+ # MUST upcast to float
234
+ e = pd.Series([0, 1, 2, 3.5, 4])
235
+ tr, v = [3], [3.5]
236
+ check_replace(tr, v, e)
237
+
238
+ # casts to object
239
+ e = pd.Series([0, 1, 2, 3.5, "a"])
240
+ tr, v = [3, 4], [3.5, "a"]
241
+ check_replace(tr, v, e)
242
+
243
+ # again casts to object
244
+ e = pd.Series([0, 1, 2, 3.5, pd.Timestamp("20130101")])
245
+ tr, v = [3, 4], [3.5, pd.Timestamp("20130101")]
246
+ check_replace(tr, v, e)
247
+
248
+ # casts to object
249
+ e = pd.Series([0, 1, 2, 3.5, True], dtype="object")
250
+ tr, v = [3, 4], [3.5, True]
251
+ check_replace(tr, v, e)
252
+
253
+ # test an object with dates + floats + integers + strings
254
+ dr = pd.Series(pd.date_range("1/1/2001", "1/10/2001", freq="D"))
255
+ result = dr.astype(object).replace([dr[0], dr[1], dr[2]], [1.0, 2, "a"])
256
+ expected = pd.Series([1.0, 2, "a"] + dr[3:].tolist(), dtype=object)
257
+ tm.assert_series_equal(result, expected)
258
+
259
+ def test_replace_bool_with_string_no_op(self):
260
+ s = pd.Series([True, False, True])
261
+ result = s.replace("fun", "in-the-sun")
262
+ tm.assert_series_equal(s, result)
263
+
264
+ def test_replace_bool_with_string(self):
265
+ # nonexistent elements
266
+ s = pd.Series([True, False, True])
267
+ result = s.replace(True, "2u")
268
+ expected = pd.Series(["2u", False, "2u"])
269
+ tm.assert_series_equal(expected, result)
270
+
271
+ def test_replace_bool_with_bool(self):
272
+ s = pd.Series([True, False, True])
273
+ result = s.replace(True, False)
274
+ expected = pd.Series([False] * len(s))
275
+ tm.assert_series_equal(expected, result)
276
+
277
+ def test_replace_with_dict_with_bool_keys(self):
278
+ s = pd.Series([True, False, True])
279
+ result = s.replace({"asdf": "asdb", True: "yes"})
280
+ expected = pd.Series(["yes", False, "yes"])
281
+ tm.assert_series_equal(result, expected)
282
+
283
+ def test_replace_Int_with_na(self, any_int_ea_dtype):
284
+ # GH 38267
285
+ result = pd.Series([0, None], dtype=any_int_ea_dtype).replace(0, pd.NA)
286
+ expected = pd.Series([pd.NA, pd.NA], dtype=any_int_ea_dtype)
287
+ tm.assert_series_equal(result, expected)
288
+ result = pd.Series([0, 1], dtype=any_int_ea_dtype).replace(0, pd.NA)
289
+ result.replace(1, pd.NA, inplace=True)
290
+ tm.assert_series_equal(result, expected)
291
+
292
+ def test_replace2(self):
293
+ N = 50
294
+ ser = pd.Series(
295
+ np.fabs(np.random.default_rng(2).standard_normal(N)),
296
+ pd.date_range("2020-01-01", periods=N),
297
+ dtype=object,
298
+ )
299
+ ser[:5] = np.nan
300
+ ser[6:10] = "foo"
301
+ ser[20:30] = "bar"
302
+
303
+ # replace list with a single value
304
+ msg = "Downcasting behavior in `replace`"
305
+ with tm.assert_produces_warning(FutureWarning, match=msg):
306
+ rs = ser.replace([np.nan, "foo", "bar"], -1)
307
+
308
+ assert (rs[:5] == -1).all()
309
+ assert (rs[6:10] == -1).all()
310
+ assert (rs[20:30] == -1).all()
311
+ assert (pd.isna(ser[:5])).all()
312
+
313
+ # replace with different values
314
+ with tm.assert_produces_warning(FutureWarning, match=msg):
315
+ rs = ser.replace({np.nan: -1, "foo": -2, "bar": -3})
316
+
317
+ assert (rs[:5] == -1).all()
318
+ assert (rs[6:10] == -2).all()
319
+ assert (rs[20:30] == -3).all()
320
+ assert (pd.isna(ser[:5])).all()
321
+
322
+ # replace with different values with 2 lists
323
+ with tm.assert_produces_warning(FutureWarning, match=msg):
324
+ rs2 = ser.replace([np.nan, "foo", "bar"], [-1, -2, -3])
325
+ tm.assert_series_equal(rs, rs2)
326
+
327
+ # replace inplace
328
+ with tm.assert_produces_warning(FutureWarning, match=msg):
329
+ return_value = ser.replace([np.nan, "foo", "bar"], -1, inplace=True)
330
+ assert return_value is None
331
+ assert (ser[:5] == -1).all()
332
+ assert (ser[6:10] == -1).all()
333
+ assert (ser[20:30] == -1).all()
334
+
335
+ @pytest.mark.parametrize("inplace", [True, False])
336
+ def test_replace_cascade(self, inplace):
337
+ # Test that replaced values are not replaced again
338
+ # GH #50778
339
+ ser = pd.Series([1, 2, 3])
340
+ expected = pd.Series([2, 3, 4])
341
+
342
+ res = ser.replace([1, 2, 3], [2, 3, 4], inplace=inplace)
343
+ if inplace:
344
+ tm.assert_series_equal(ser, expected)
345
+ else:
346
+ tm.assert_series_equal(res, expected)
347
+
348
+ def test_replace_with_dictlike_and_string_dtype(self, nullable_string_dtype):
349
+ # GH 32621, GH#44940
350
+ ser = pd.Series(["one", "two", np.nan], dtype=nullable_string_dtype)
351
+ expected = pd.Series(["1", "2", np.nan], dtype=nullable_string_dtype)
352
+ result = ser.replace({"one": "1", "two": "2"})
353
+ tm.assert_series_equal(expected, result)
354
+
355
+ def test_replace_with_empty_dictlike(self):
356
+ # GH 15289
357
+ s = pd.Series(list("abcd"))
358
+ tm.assert_series_equal(s, s.replace({}))
359
+
360
+ empty_series = pd.Series([])
361
+ tm.assert_series_equal(s, s.replace(empty_series))
362
+
363
+ def test_replace_string_with_number(self):
364
+ # GH 15743
365
+ s = pd.Series([1, 2, 3])
366
+ result = s.replace("2", np.nan)
367
+ expected = pd.Series([1, 2, 3])
368
+ tm.assert_series_equal(expected, result)
369
+
370
+ def test_replace_replacer_equals_replacement(self):
371
+ # GH 20656
372
+ # make sure all replacers are matching against original values
373
+ s = pd.Series(["a", "b"])
374
+ expected = pd.Series(["b", "a"])
375
+ result = s.replace({"a": "b", "b": "a"})
376
+ tm.assert_series_equal(expected, result)
377
+
378
+ def test_replace_unicode_with_number(self):
379
+ # GH 15743
380
+ s = pd.Series([1, 2, 3])
381
+ result = s.replace("2", np.nan)
382
+ expected = pd.Series([1, 2, 3])
383
+ tm.assert_series_equal(expected, result)
384
+
385
+ def test_replace_mixed_types_with_string(self):
386
+ # Testing mixed
387
+ s = pd.Series([1, 2, 3, "4", 4, 5])
388
+ msg = "Downcasting behavior in `replace`"
389
+ with tm.assert_produces_warning(FutureWarning, match=msg):
390
+ result = s.replace([2, "4"], np.nan)
391
+ expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
392
+ tm.assert_series_equal(expected, result)
393
+
394
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 0 in string")
395
+ @pytest.mark.parametrize(
396
+ "categorical, numeric",
397
+ [
398
+ (pd.Categorical(["A"], categories=["A", "B"]), [1]),
399
+ (pd.Categorical(["A", "B"], categories=["A", "B"]), [1, 2]),
400
+ ],
401
+ )
402
+ def test_replace_categorical(self, categorical, numeric):
403
+ # GH 24971, GH#23305
404
+ ser = pd.Series(categorical)
405
+ msg = "Downcasting behavior in `replace`"
406
+ msg = "with CategoricalDtype is deprecated"
407
+ with tm.assert_produces_warning(FutureWarning, match=msg):
408
+ result = ser.replace({"A": 1, "B": 2})
409
+ expected = pd.Series(numeric).astype("category")
410
+ if 2 not in expected.cat.categories:
411
+ # i.e. categories should be [1, 2] even if there are no "B"s present
412
+ # GH#44940
413
+ expected = expected.cat.add_categories(2)
414
+ tm.assert_series_equal(expected, result)
415
+
416
+ @pytest.mark.parametrize(
417
+ "data, data_exp", [(["a", "b", "c"], ["b", "b", "c"]), (["a"], ["b"])]
418
+ )
419
+ def test_replace_categorical_inplace(self, data, data_exp):
420
+ # GH 53358
421
+ result = pd.Series(data, dtype="category")
422
+ msg = "with CategoricalDtype is deprecated"
423
+ with tm.assert_produces_warning(FutureWarning, match=msg):
424
+ result.replace(to_replace="a", value="b", inplace=True)
425
+ expected = pd.Series(data_exp, dtype="category")
426
+ tm.assert_series_equal(result, expected)
427
+
428
+ def test_replace_categorical_single(self):
429
+ # GH 26988
430
+ dti = pd.date_range("2016-01-01", periods=3, tz="US/Pacific")
431
+ s = pd.Series(dti)
432
+ c = s.astype("category")
433
+
434
+ expected = c.copy()
435
+ expected = expected.cat.add_categories("foo")
436
+ expected[2] = "foo"
437
+ expected = expected.cat.remove_unused_categories()
438
+ assert c[2] != "foo"
439
+
440
+ msg = "with CategoricalDtype is deprecated"
441
+ with tm.assert_produces_warning(FutureWarning, match=msg):
442
+ result = c.replace(c[2], "foo")
443
+ tm.assert_series_equal(expected, result)
444
+ assert c[2] != "foo" # ensure non-inplace call does not alter original
445
+
446
+ msg = "with CategoricalDtype is deprecated"
447
+ with tm.assert_produces_warning(FutureWarning, match=msg):
448
+ return_value = c.replace(c[2], "foo", inplace=True)
449
+ assert return_value is None
450
+ tm.assert_series_equal(expected, c)
451
+
452
+ first_value = c[0]
453
+ msg = "with CategoricalDtype is deprecated"
454
+ with tm.assert_produces_warning(FutureWarning, match=msg):
455
+ return_value = c.replace(c[1], c[0], inplace=True)
456
+ assert return_value is None
457
+ assert c[0] == c[1] == first_value # test replacing with existing value
458
+
459
+ def test_replace_with_no_overflowerror(self):
460
+ # GH 25616
461
+ # casts to object without Exception from OverflowError
462
+ s = pd.Series([0, 1, 2, 3, 4])
463
+ result = s.replace([3], ["100000000000000000000"])
464
+ expected = pd.Series([0, 1, 2, "100000000000000000000", 4])
465
+ tm.assert_series_equal(result, expected)
466
+
467
+ s = pd.Series([0, "100000000000000000000", "100000000000000000001"])
468
+ result = s.replace(["100000000000000000000"], [1])
469
+ expected = pd.Series([0, 1, "100000000000000000001"])
470
+ tm.assert_series_equal(result, expected)
471
+
472
+ @pytest.mark.parametrize(
473
+ "ser, to_replace, exp",
474
+ [
475
+ ([1, 2, 3], {1: 2, 2: 3, 3: 4}, [2, 3, 4]),
476
+ (["1", "2", "3"], {"1": "2", "2": "3", "3": "4"}, ["2", "3", "4"]),
477
+ ],
478
+ )
479
+ def test_replace_commutative(self, ser, to_replace, exp):
480
+ # GH 16051
481
+ # DataFrame.replace() overwrites when values are non-numeric
482
+
483
+ series = pd.Series(ser)
484
+
485
+ expected = pd.Series(exp)
486
+ result = series.replace(to_replace)
487
+
488
+ tm.assert_series_equal(result, expected)
489
+
490
+ @pytest.mark.parametrize(
491
+ "ser, exp", [([1, 2, 3], [1, True, 3]), (["x", 2, 3], ["x", True, 3])]
492
+ )
493
+ def test_replace_no_cast(self, ser, exp):
494
+ # GH 9113
495
+ # BUG: replace int64 dtype with bool coerces to int64
496
+
497
+ series = pd.Series(ser)
498
+ result = series.replace(2, True)
499
+ expected = pd.Series(exp)
500
+
501
+ tm.assert_series_equal(result, expected)
502
+
503
+ def test_replace_invalid_to_replace(self):
504
+ # GH 18634
505
+ # API: replace() should raise an exception if invalid argument is given
506
+ series = pd.Series(["a", "b", "c "])
507
+ msg = (
508
+ r"Expecting 'to_replace' to be either a scalar, array-like, "
509
+ r"dict or None, got invalid type.*"
510
+ )
511
+ msg2 = (
512
+ "Series.replace without 'value' and with non-dict-like "
513
+ "'to_replace' is deprecated"
514
+ )
515
+ with pytest.raises(TypeError, match=msg):
516
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
517
+ series.replace(lambda x: x.strip())
518
+
519
+ @pytest.mark.parametrize("frame", [False, True])
520
+ def test_replace_nonbool_regex(self, frame):
521
+ obj = pd.Series(["a", "b", "c "])
522
+ if frame:
523
+ obj = obj.to_frame()
524
+
525
+ msg = "'to_replace' must be 'None' if 'regex' is not a bool"
526
+ with pytest.raises(ValueError, match=msg):
527
+ obj.replace(to_replace=["a"], regex="foo")
528
+
529
+ @pytest.mark.parametrize("frame", [False, True])
530
+ def test_replace_empty_copy(self, frame):
531
+ obj = pd.Series([], dtype=np.float64)
532
+ if frame:
533
+ obj = obj.to_frame()
534
+
535
+ res = obj.replace(4, 5, inplace=True)
536
+ assert res is None
537
+
538
+ res = obj.replace(4, 5, inplace=False)
539
+ tm.assert_equal(res, obj)
540
+ assert res is not obj
541
+
542
+ def test_replace_only_one_dictlike_arg(self, fixed_now_ts):
543
+ # GH#33340
544
+
545
+ ser = pd.Series([1, 2, "A", fixed_now_ts, True])
546
+ to_replace = {0: 1, 2: "A"}
547
+ value = "foo"
548
+ msg = "Series.replace cannot use dict-like to_replace and non-None value"
549
+ with pytest.raises(ValueError, match=msg):
550
+ ser.replace(to_replace, value)
551
+
552
+ to_replace = 1
553
+ value = {0: "foo", 2: "bar"}
554
+ msg = "Series.replace cannot use dict-value and non-None to_replace"
555
+ with pytest.raises(ValueError, match=msg):
556
+ ser.replace(to_replace, value)
557
+
558
+ def test_replace_extension_other(self, frame_or_series):
559
+ # https://github.com/pandas-dev/pandas/issues/34530
560
+ obj = frame_or_series(pd.array([1, 2, 3], dtype="Int64"))
561
+ result = obj.replace("", "") # no exception
562
+ # should not have changed dtype
563
+ tm.assert_equal(obj, result)
564
+
565
+ def _check_replace_with_method(self, ser: pd.Series):
566
+ df = ser.to_frame()
567
+
568
+ msg1 = "The 'method' keyword in Series.replace is deprecated"
569
+ with tm.assert_produces_warning(FutureWarning, match=msg1):
570
+ res = ser.replace(ser[1], method="pad")
571
+ expected = pd.Series([ser[0], ser[0]] + list(ser[2:]), dtype=ser.dtype)
572
+ tm.assert_series_equal(res, expected)
573
+
574
+ msg2 = "The 'method' keyword in DataFrame.replace is deprecated"
575
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
576
+ res_df = df.replace(ser[1], method="pad")
577
+ tm.assert_frame_equal(res_df, expected.to_frame())
578
+
579
+ ser2 = ser.copy()
580
+ with tm.assert_produces_warning(FutureWarning, match=msg1):
581
+ res2 = ser2.replace(ser[1], method="pad", inplace=True)
582
+ assert res2 is None
583
+ tm.assert_series_equal(ser2, expected)
584
+
585
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
586
+ res_df2 = df.replace(ser[1], method="pad", inplace=True)
587
+ assert res_df2 is None
588
+ tm.assert_frame_equal(df, expected.to_frame())
589
+
590
+ def test_replace_ea_dtype_with_method(self, any_numeric_ea_dtype):
591
+ arr = pd.array([1, 2, pd.NA, 4], dtype=any_numeric_ea_dtype)
592
+ ser = pd.Series(arr)
593
+
594
+ self._check_replace_with_method(ser)
595
+
596
+ @pytest.mark.parametrize("as_categorical", [True, False])
597
+ def test_replace_interval_with_method(self, as_categorical):
598
+ # in particular interval that can't hold NA
599
+
600
+ idx = pd.IntervalIndex.from_breaks(range(4))
601
+ ser = pd.Series(idx)
602
+ if as_categorical:
603
+ ser = ser.astype("category")
604
+
605
+ self._check_replace_with_method(ser)
606
+
607
+ @pytest.mark.parametrize("as_period", [True, False])
608
+ @pytest.mark.parametrize("as_categorical", [True, False])
609
+ def test_replace_datetimelike_with_method(self, as_period, as_categorical):
610
+ idx = pd.date_range("2016-01-01", periods=5, tz="US/Pacific")
611
+ if as_period:
612
+ idx = idx.tz_localize(None).to_period("D")
613
+
614
+ ser = pd.Series(idx)
615
+ ser.iloc[-2] = pd.NaT
616
+ if as_categorical:
617
+ ser = ser.astype("category")
618
+
619
+ self._check_replace_with_method(ser)
620
+
621
+ def test_replace_with_compiled_regex(self):
622
+ # https://github.com/pandas-dev/pandas/issues/35680
623
+ s = pd.Series(["a", "b", "c"])
624
+ regex = re.compile("^a$")
625
+ result = s.replace({regex: "z"}, regex=True)
626
+ expected = pd.Series(["z", "b", "c"])
627
+ tm.assert_series_equal(result, expected)
628
+
629
+ def test_pandas_replace_na(self):
630
+ # GH#43344
631
+ ser = pd.Series(["AA", "BB", "CC", "DD", "EE", "", pd.NA], dtype="string")
632
+ regex_mapping = {
633
+ "AA": "CC",
634
+ "BB": "CC",
635
+ "EE": "CC",
636
+ "CC": "CC-REPL",
637
+ }
638
+ result = ser.replace(regex_mapping, regex=True)
639
+ exp = pd.Series(["CC", "CC", "CC-REPL", "DD", "CC", "", pd.NA], dtype="string")
640
+ tm.assert_series_equal(result, exp)
641
+
642
+ @pytest.mark.parametrize(
643
+ "dtype, input_data, to_replace, expected_data",
644
+ [
645
+ ("bool", [True, False], {True: False}, [False, False]),
646
+ ("int64", [1, 2], {1: 10, 2: 20}, [10, 20]),
647
+ ("Int64", [1, 2], {1: 10, 2: 20}, [10, 20]),
648
+ ("float64", [1.1, 2.2], {1.1: 10.1, 2.2: 20.5}, [10.1, 20.5]),
649
+ ("Float64", [1.1, 2.2], {1.1: 10.1, 2.2: 20.5}, [10.1, 20.5]),
650
+ ("string", ["one", "two"], {"one": "1", "two": "2"}, ["1", "2"]),
651
+ (
652
+ pd.IntervalDtype("int64"),
653
+ IntervalArray([pd.Interval(1, 2), pd.Interval(2, 3)]),
654
+ {pd.Interval(1, 2): pd.Interval(10, 20)},
655
+ IntervalArray([pd.Interval(10, 20), pd.Interval(2, 3)]),
656
+ ),
657
+ (
658
+ pd.IntervalDtype("float64"),
659
+ IntervalArray([pd.Interval(1.0, 2.7), pd.Interval(2.8, 3.1)]),
660
+ {pd.Interval(1.0, 2.7): pd.Interval(10.6, 20.8)},
661
+ IntervalArray([pd.Interval(10.6, 20.8), pd.Interval(2.8, 3.1)]),
662
+ ),
663
+ (
664
+ pd.PeriodDtype("M"),
665
+ [pd.Period("2020-05", freq="M")],
666
+ {pd.Period("2020-05", freq="M"): pd.Period("2020-06", freq="M")},
667
+ [pd.Period("2020-06", freq="M")],
668
+ ),
669
+ ],
670
+ )
671
+ def test_replace_dtype(self, dtype, input_data, to_replace, expected_data):
672
+ # GH#33484
673
+ ser = pd.Series(input_data, dtype=dtype)
674
+ result = ser.replace(to_replace)
675
+ expected = pd.Series(expected_data, dtype=dtype)
676
+ tm.assert_series_equal(result, expected)
677
+
678
+ def test_replace_string_dtype(self):
679
+ # GH#40732, GH#44940
680
+ ser = pd.Series(["one", "two", np.nan], dtype="string")
681
+ res = ser.replace({"one": "1", "two": "2"})
682
+ expected = pd.Series(["1", "2", np.nan], dtype="string")
683
+ tm.assert_series_equal(res, expected)
684
+
685
+ # GH#31644
686
+ ser2 = pd.Series(["A", np.nan], dtype="string")
687
+ res2 = ser2.replace("A", "B")
688
+ expected2 = pd.Series(["B", np.nan], dtype="string")
689
+ tm.assert_series_equal(res2, expected2)
690
+
691
+ ser3 = pd.Series(["A", "B"], dtype="string")
692
+ res3 = ser3.replace("A", pd.NA)
693
+ expected3 = pd.Series([pd.NA, "B"], dtype="string")
694
+ tm.assert_series_equal(res3, expected3)
695
+
696
+ def test_replace_string_dtype_list_to_replace(self):
697
+ # GH#41215, GH#44940
698
+ ser = pd.Series(["abc", "def"], dtype="string")
699
+ res = ser.replace(["abc", "any other string"], "xyz")
700
+ expected = pd.Series(["xyz", "def"], dtype="string")
701
+ tm.assert_series_equal(res, expected)
702
+
703
+ def test_replace_string_dtype_regex(self):
704
+ # GH#31644
705
+ ser = pd.Series(["A", "B"], dtype="string")
706
+ res = ser.replace(r".", "C", regex=True)
707
+ expected = pd.Series(["C", "C"], dtype="string")
708
+ tm.assert_series_equal(res, expected)
709
+
710
+ def test_replace_nullable_numeric(self):
711
+ # GH#40732, GH#44940
712
+
713
+ floats = pd.Series([1.0, 2.0, 3.999, 4.4], dtype=pd.Float64Dtype())
714
+ assert floats.replace({1.0: 9}).dtype == floats.dtype
715
+ assert floats.replace(1.0, 9).dtype == floats.dtype
716
+ assert floats.replace({1.0: 9.0}).dtype == floats.dtype
717
+ assert floats.replace(1.0, 9.0).dtype == floats.dtype
718
+
719
+ res = floats.replace(to_replace=[1.0, 2.0], value=[9.0, 10.0])
720
+ assert res.dtype == floats.dtype
721
+
722
+ ints = pd.Series([1, 2, 3, 4], dtype=pd.Int64Dtype())
723
+ assert ints.replace({1: 9}).dtype == ints.dtype
724
+ assert ints.replace(1, 9).dtype == ints.dtype
725
+ assert ints.replace({1: 9.0}).dtype == ints.dtype
726
+ assert ints.replace(1, 9.0).dtype == ints.dtype
727
+
728
+ # nullable (for now) raises instead of casting
729
+ with pytest.raises(TypeError, match="Invalid value"):
730
+ ints.replace({1: 9.5})
731
+ with pytest.raises(TypeError, match="Invalid value"):
732
+ ints.replace(1, 9.5)
733
+
734
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 1 in string")
735
+ @pytest.mark.parametrize("regex", [False, True])
736
+ def test_replace_regex_dtype_series(self, regex):
737
+ # GH-48644
738
+ series = pd.Series(["0"])
739
+ expected = pd.Series([1])
740
+ msg = "Downcasting behavior in `replace`"
741
+ with tm.assert_produces_warning(FutureWarning, match=msg):
742
+ result = series.replace(to_replace="0", value=1, regex=regex)
743
+ tm.assert_series_equal(result, expected)
744
+
745
+ def test_replace_different_int_types(self, any_int_numpy_dtype):
746
+ # GH#45311
747
+ labs = pd.Series([1, 1, 1, 0, 0, 2, 2, 2], dtype=any_int_numpy_dtype)
748
+
749
+ maps = pd.Series([0, 2, 1], dtype=any_int_numpy_dtype)
750
+ map_dict = dict(zip(maps.values, maps.index))
751
+
752
+ result = labs.replace(map_dict)
753
+ expected = labs.replace({0: 0, 2: 1, 1: 2})
754
+ tm.assert_series_equal(result, expected)
755
+
756
+ @pytest.mark.parametrize("val", [2, np.nan, 2.0])
757
+ def test_replace_value_none_dtype_numeric(self, val):
758
+ # GH#48231
759
+ ser = pd.Series([1, val])
760
+ result = ser.replace(val, None)
761
+ expected = pd.Series([1, None], dtype=object)
762
+ tm.assert_series_equal(result, expected)
763
+
764
+ def test_replace_change_dtype_series(self, using_infer_string):
765
+ # GH#25797
766
+ df = pd.DataFrame.from_dict({"Test": ["0.5", True, "0.6"]})
767
+ warn = FutureWarning if using_infer_string else None
768
+ with tm.assert_produces_warning(warn, match="Downcasting"):
769
+ df["Test"] = df["Test"].replace([True], [np.nan])
770
+ expected = pd.DataFrame.from_dict({"Test": ["0.5", np.nan, "0.6"]})
771
+ tm.assert_frame_equal(df, expected)
772
+
773
+ df = pd.DataFrame.from_dict({"Test": ["0.5", None, "0.6"]})
774
+ df["Test"] = df["Test"].replace([None], [np.nan])
775
+ tm.assert_frame_equal(df, expected)
776
+
777
+ df = pd.DataFrame.from_dict({"Test": ["0.5", None, "0.6"]})
778
+ df["Test"] = df["Test"].fillna(np.nan)
779
+ tm.assert_frame_equal(df, expected)
780
+
781
+ @pytest.mark.parametrize("dtype", ["object", "Int64"])
782
+ def test_replace_na_in_obj_column(self, dtype):
783
+ # GH#47480
784
+ ser = pd.Series([0, 1, pd.NA], dtype=dtype)
785
+ expected = pd.Series([0, 2, pd.NA], dtype=dtype)
786
+ result = ser.replace(to_replace=1, value=2)
787
+ tm.assert_series_equal(result, expected)
788
+
789
+ ser.replace(to_replace=1, value=2, inplace=True)
790
+ tm.assert_series_equal(ser, expected)
791
+
792
+ @pytest.mark.parametrize("val", [0, 0.5])
793
+ def test_replace_numeric_column_with_na(self, val):
794
+ # GH#50758
795
+ ser = pd.Series([val, 1])
796
+ expected = pd.Series([val, pd.NA])
797
+ result = ser.replace(to_replace=1, value=pd.NA)
798
+ tm.assert_series_equal(result, expected)
799
+
800
+ ser.replace(to_replace=1, value=pd.NA, inplace=True)
801
+ tm.assert_series_equal(ser, expected)
802
+
803
+ def test_replace_ea_float_with_bool(self):
804
+ # GH#55398
805
+ ser = pd.Series([0.0], dtype="Float64")
806
+ expected = ser.copy()
807
+ result = ser.replace(False, 1.0)
808
+ tm.assert_series_equal(result, expected)
809
+
810
+ ser = pd.Series([False], dtype="boolean")
811
+ expected = ser.copy()
812
+ result = ser.replace(0.0, True)
813
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_reset_index.py ADDED
@@ -0,0 +1,225 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ DataFrame,
9
+ Index,
10
+ MultiIndex,
11
+ RangeIndex,
12
+ Series,
13
+ date_range,
14
+ option_context,
15
+ )
16
+ import pandas._testing as tm
17
+
18
+
19
+ class TestResetIndex:
20
+ def test_reset_index_dti_round_trip(self):
21
+ dti = date_range(start="1/1/2001", end="6/1/2001", freq="D")._with_freq(None)
22
+ d1 = DataFrame({"v": np.random.default_rng(2).random(len(dti))}, index=dti)
23
+ d2 = d1.reset_index()
24
+ assert d2.dtypes.iloc[0] == np.dtype("M8[ns]")
25
+ d3 = d2.set_index("index")
26
+ tm.assert_frame_equal(d1, d3, check_names=False)
27
+
28
+ # GH#2329
29
+ stamp = datetime(2012, 11, 22)
30
+ df = DataFrame([[stamp, 12.1]], columns=["Date", "Value"])
31
+ df = df.set_index("Date")
32
+
33
+ assert df.index[0] == stamp
34
+ assert df.reset_index()["Date"].iloc[0] == stamp
35
+
36
+ def test_reset_index(self):
37
+ df = DataFrame(
38
+ 1.1 * np.arange(120).reshape((30, 4)),
39
+ columns=Index(list("ABCD"), dtype=object),
40
+ index=Index([f"i-{i}" for i in range(30)], dtype=object),
41
+ )[:5]
42
+ ser = df.stack(future_stack=True)
43
+ ser.index.names = ["hash", "category"]
44
+
45
+ ser.name = "value"
46
+ df = ser.reset_index()
47
+ assert "value" in df
48
+
49
+ df = ser.reset_index(name="value2")
50
+ assert "value2" in df
51
+
52
+ # check inplace
53
+ s = ser.reset_index(drop=True)
54
+ s2 = ser
55
+ return_value = s2.reset_index(drop=True, inplace=True)
56
+ assert return_value is None
57
+ tm.assert_series_equal(s, s2)
58
+
59
+ # level
60
+ index = MultiIndex(
61
+ levels=[["bar"], ["one", "two", "three"], [0, 1]],
62
+ codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]],
63
+ )
64
+ s = Series(np.random.default_rng(2).standard_normal(6), index=index)
65
+ rs = s.reset_index(level=1)
66
+ assert len(rs.columns) == 2
67
+
68
+ rs = s.reset_index(level=[0, 2], drop=True)
69
+ tm.assert_index_equal(rs.index, Index(index.get_level_values(1)))
70
+ assert isinstance(rs, Series)
71
+
72
+ def test_reset_index_name(self):
73
+ s = Series([1, 2, 3], index=Index(range(3), name="x"))
74
+ assert s.reset_index().index.name is None
75
+ assert s.reset_index(drop=True).index.name is None
76
+
77
+ def test_reset_index_level(self):
78
+ df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=["A", "B", "C"])
79
+
80
+ for levels in ["A", "B"], [0, 1]:
81
+ # With MultiIndex
82
+ s = df.set_index(["A", "B"])["C"]
83
+
84
+ result = s.reset_index(level=levels[0])
85
+ tm.assert_frame_equal(result, df.set_index("B"))
86
+
87
+ result = s.reset_index(level=levels[:1])
88
+ tm.assert_frame_equal(result, df.set_index("B"))
89
+
90
+ result = s.reset_index(level=levels)
91
+ tm.assert_frame_equal(result, df)
92
+
93
+ result = df.set_index(["A", "B"]).reset_index(level=levels, drop=True)
94
+ tm.assert_frame_equal(result, df[["C"]])
95
+
96
+ with pytest.raises(KeyError, match="Level E "):
97
+ s.reset_index(level=["A", "E"])
98
+
99
+ # With single-level Index
100
+ s = df.set_index("A")["B"]
101
+
102
+ result = s.reset_index(level=levels[0])
103
+ tm.assert_frame_equal(result, df[["A", "B"]])
104
+
105
+ result = s.reset_index(level=levels[:1])
106
+ tm.assert_frame_equal(result, df[["A", "B"]])
107
+
108
+ result = s.reset_index(level=levels[0], drop=True)
109
+ tm.assert_series_equal(result, df["B"])
110
+
111
+ with pytest.raises(IndexError, match="Too many levels"):
112
+ s.reset_index(level=[0, 1, 2])
113
+
114
+ # Check that .reset_index([],drop=True) doesn't fail
115
+ result = Series(range(4)).reset_index([], drop=True)
116
+ expected = Series(range(4))
117
+ tm.assert_series_equal(result, expected)
118
+
119
+ def test_reset_index_range(self):
120
+ # GH 12071
121
+ s = Series(range(2), name="A", dtype="int64")
122
+ series_result = s.reset_index()
123
+ assert isinstance(series_result.index, RangeIndex)
124
+ series_expected = DataFrame(
125
+ [[0, 0], [1, 1]], columns=["index", "A"], index=RangeIndex(stop=2)
126
+ )
127
+ tm.assert_frame_equal(series_result, series_expected)
128
+
129
+ def test_reset_index_drop_errors(self):
130
+ # GH 20925
131
+
132
+ # KeyError raised for series index when passed level name is missing
133
+ s = Series(range(4))
134
+ with pytest.raises(KeyError, match="does not match index name"):
135
+ s.reset_index("wrong", drop=True)
136
+ with pytest.raises(KeyError, match="does not match index name"):
137
+ s.reset_index("wrong")
138
+
139
+ # KeyError raised for series when level to be dropped is missing
140
+ s = Series(range(4), index=MultiIndex.from_product([[1, 2]] * 2))
141
+ with pytest.raises(KeyError, match="not found"):
142
+ s.reset_index("wrong", drop=True)
143
+
144
+ def test_reset_index_with_drop(self):
145
+ arrays = [
146
+ ["bar", "bar", "baz", "baz", "qux", "qux", "foo", "foo"],
147
+ ["one", "two", "one", "two", "one", "two", "one", "two"],
148
+ ]
149
+ tuples = zip(*arrays)
150
+ index = MultiIndex.from_tuples(tuples)
151
+ data = np.random.default_rng(2).standard_normal(8)
152
+ ser = Series(data, index=index)
153
+ ser.iloc[3] = np.nan
154
+
155
+ deleveled = ser.reset_index()
156
+ assert isinstance(deleveled, DataFrame)
157
+ assert len(deleveled.columns) == len(ser.index.levels) + 1
158
+ assert deleveled.index.name == ser.index.name
159
+
160
+ deleveled = ser.reset_index(drop=True)
161
+ assert isinstance(deleveled, Series)
162
+ assert deleveled.index.name == ser.index.name
163
+
164
+ def test_reset_index_inplace_and_drop_ignore_name(self):
165
+ # GH#44575
166
+ ser = Series(range(2), name="old")
167
+ ser.reset_index(name="new", drop=True, inplace=True)
168
+ expected = Series(range(2), name="old")
169
+ tm.assert_series_equal(ser, expected)
170
+
171
+ def test_reset_index_drop_infer_string(self):
172
+ # GH#56160
173
+ pytest.importorskip("pyarrow")
174
+ ser = Series(["a", "b", "c"], dtype=object)
175
+ with option_context("future.infer_string", True):
176
+ result = ser.reset_index(drop=True)
177
+ tm.assert_series_equal(result, ser)
178
+
179
+
180
+ @pytest.mark.parametrize(
181
+ "array, dtype",
182
+ [
183
+ (["a", "b"], object),
184
+ (
185
+ pd.period_range("12-1-2000", periods=2, freq="Q-DEC"),
186
+ pd.PeriodDtype(freq="Q-DEC"),
187
+ ),
188
+ ],
189
+ )
190
+ def test_reset_index_dtypes_on_empty_series_with_multiindex(
191
+ array, dtype, using_infer_string
192
+ ):
193
+ # GH 19602 - Preserve dtype on empty Series with MultiIndex
194
+ idx = MultiIndex.from_product([[0, 1], [0.5, 1.0], array])
195
+ result = Series(dtype=object, index=idx)[:0].reset_index().dtypes
196
+ exp = "string" if using_infer_string else object
197
+ expected = Series(
198
+ {
199
+ "level_0": np.int64,
200
+ "level_1": np.float64,
201
+ "level_2": exp if dtype == object else dtype,
202
+ 0: object,
203
+ }
204
+ )
205
+ tm.assert_series_equal(result, expected)
206
+
207
+
208
+ @pytest.mark.parametrize(
209
+ "names, expected_names",
210
+ [
211
+ (["A", "A"], ["A", "A"]),
212
+ (["level_1", None], ["level_1", "level_1"]),
213
+ ],
214
+ )
215
+ @pytest.mark.parametrize("allow_duplicates", [False, True])
216
+ def test_column_name_duplicates(names, expected_names, allow_duplicates):
217
+ # GH#44755 reset_index with duplicate column labels
218
+ s = Series([1], index=MultiIndex.from_arrays([[1], [1]], names=names))
219
+ if allow_duplicates:
220
+ result = s.reset_index(allow_duplicates=True)
221
+ expected = DataFrame([[1, 1, 1]], columns=expected_names + [0])
222
+ tm.assert_frame_equal(result, expected)
223
+ else:
224
+ with pytest.raises(ValueError, match="cannot insert"):
225
+ s.reset_index()
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_round.py ADDED
@@ -0,0 +1,74 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import Series
6
+ import pandas._testing as tm
7
+
8
+
9
+ class TestSeriesRound:
10
+ def test_round(self, datetime_series):
11
+ datetime_series.index.name = "index_name"
12
+ result = datetime_series.round(2)
13
+ expected = Series(
14
+ np.round(datetime_series.values, 2), index=datetime_series.index, name="ts"
15
+ )
16
+ tm.assert_series_equal(result, expected)
17
+ assert result.name == datetime_series.name
18
+
19
+ def test_round_numpy(self, any_float_dtype):
20
+ # See GH#12600
21
+ ser = Series([1.53, 1.36, 0.06], dtype=any_float_dtype)
22
+ out = np.round(ser, decimals=0)
23
+ expected = Series([2.0, 1.0, 0.0], dtype=any_float_dtype)
24
+ tm.assert_series_equal(out, expected)
25
+
26
+ msg = "the 'out' parameter is not supported"
27
+ with pytest.raises(ValueError, match=msg):
28
+ np.round(ser, decimals=0, out=ser)
29
+
30
+ def test_round_numpy_with_nan(self, any_float_dtype):
31
+ # See GH#14197
32
+ ser = Series([1.53, np.nan, 0.06], dtype=any_float_dtype)
33
+ with tm.assert_produces_warning(None):
34
+ result = ser.round()
35
+ expected = Series([2.0, np.nan, 0.0], dtype=any_float_dtype)
36
+ tm.assert_series_equal(result, expected)
37
+
38
+ def test_round_builtin(self, any_float_dtype):
39
+ ser = Series(
40
+ [1.123, 2.123, 3.123],
41
+ index=range(3),
42
+ dtype=any_float_dtype,
43
+ )
44
+ result = round(ser)
45
+ expected_rounded0 = Series(
46
+ [1.0, 2.0, 3.0], index=range(3), dtype=any_float_dtype
47
+ )
48
+ tm.assert_series_equal(result, expected_rounded0)
49
+
50
+ decimals = 2
51
+ expected_rounded = Series(
52
+ [1.12, 2.12, 3.12], index=range(3), dtype=any_float_dtype
53
+ )
54
+ result = round(ser, decimals)
55
+ tm.assert_series_equal(result, expected_rounded)
56
+
57
+ @pytest.mark.parametrize("method", ["round", "floor", "ceil"])
58
+ @pytest.mark.parametrize("freq", ["s", "5s", "min", "5min", "h", "5h"])
59
+ def test_round_nat(self, method, freq, unit):
60
+ # GH14940, GH#56158
61
+ ser = Series([pd.NaT], dtype=f"M8[{unit}]")
62
+ expected = Series(pd.NaT, dtype=f"M8[{unit}]")
63
+ round_method = getattr(ser.dt, method)
64
+ result = round_method(freq)
65
+ tm.assert_series_equal(result, expected)
66
+
67
+ def test_round_ea_boolean(self):
68
+ # GH#55936
69
+ ser = Series([True, False], dtype="boolean")
70
+ expected = ser.copy()
71
+ result = ser.round(2)
72
+ tm.assert_series_equal(result, expected)
73
+ result.iloc[0] = False
74
+ tm.assert_series_equal(ser, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_searchsorted.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Series,
7
+ Timestamp,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+ from pandas.api.types import is_scalar
12
+
13
+
14
+ class TestSeriesSearchSorted:
15
+ def test_searchsorted(self):
16
+ ser = Series([1, 2, 3])
17
+
18
+ result = ser.searchsorted(1, side="left")
19
+ assert is_scalar(result)
20
+ assert result == 0
21
+
22
+ result = ser.searchsorted(1, side="right")
23
+ assert is_scalar(result)
24
+ assert result == 1
25
+
26
+ def test_searchsorted_numeric_dtypes_scalar(self):
27
+ ser = Series([1, 2, 90, 1000, 3e9])
28
+ res = ser.searchsorted(30)
29
+ assert is_scalar(res)
30
+ assert res == 2
31
+
32
+ res = ser.searchsorted([30])
33
+ exp = np.array([2], dtype=np.intp)
34
+ tm.assert_numpy_array_equal(res, exp)
35
+
36
+ def test_searchsorted_numeric_dtypes_vector(self):
37
+ ser = Series([1, 2, 90, 1000, 3e9])
38
+ res = ser.searchsorted([91, 2e6])
39
+ exp = np.array([3, 4], dtype=np.intp)
40
+ tm.assert_numpy_array_equal(res, exp)
41
+
42
+ def test_searchsorted_datetime64_scalar(self):
43
+ ser = Series(date_range("20120101", periods=10, freq="2D"))
44
+ val = Timestamp("20120102")
45
+ res = ser.searchsorted(val)
46
+ assert is_scalar(res)
47
+ assert res == 1
48
+
49
+ def test_searchsorted_datetime64_scalar_mixed_timezones(self):
50
+ # GH 30086
51
+ ser = Series(date_range("20120101", periods=10, freq="2D", tz="UTC"))
52
+ val = Timestamp("20120102", tz="America/New_York")
53
+ res = ser.searchsorted(val)
54
+ assert is_scalar(res)
55
+ assert res == 1
56
+
57
+ def test_searchsorted_datetime64_list(self):
58
+ ser = Series(date_range("20120101", periods=10, freq="2D"))
59
+ vals = [Timestamp("20120102"), Timestamp("20120104")]
60
+ res = ser.searchsorted(vals)
61
+ exp = np.array([1, 2], dtype=np.intp)
62
+ tm.assert_numpy_array_equal(res, exp)
63
+
64
+ def test_searchsorted_sorter(self):
65
+ # GH8490
66
+ ser = Series([3, 1, 2])
67
+ res = ser.searchsorted([0, 3], sorter=np.argsort(ser))
68
+ exp = np.array([0, 2], dtype=np.intp)
69
+ tm.assert_numpy_array_equal(res, exp)
70
+
71
+ def test_searchsorted_dataframe_fail(self):
72
+ # GH#49620
73
+ ser = Series([1, 2, 3, 4, 5])
74
+ vals = pd.DataFrame([[1, 2], [3, 4]])
75
+ msg = "Value must be 1-D array-like or scalar, DataFrame is not supported"
76
+ with pytest.raises(ValueError, match=msg):
77
+ ser.searchsorted(vals)
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_set_name.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ from pandas import Series
4
+
5
+
6
+ class TestSetName:
7
+ def test_set_name(self):
8
+ ser = Series([1, 2, 3])
9
+ ser2 = ser._set_name("foo")
10
+ assert ser2.name == "foo"
11
+ assert ser.name is None
12
+ assert ser is not ser2
13
+
14
+ def test_set_name_attribute(self):
15
+ ser = Series([1, 2, 3])
16
+ ser2 = Series([1, 2, 3], name="bar")
17
+ for name in [7, 7.0, "name", datetime(2001, 1, 1), (1,), "\u05D0"]:
18
+ ser.name = name
19
+ assert ser.name == name
20
+ ser2.name = name
21
+ assert ser2.name == name
env-llmeval/lib/python3.10/site-packages/pandas/tests/series/methods/test_size.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import Series
4
+
5
+
6
+ @pytest.mark.parametrize(
7
+ "data, index, expected",
8
+ [
9
+ ([1, 2, 3], None, 3),
10
+ ({"a": 1, "b": 2, "c": 3}, None, 3),
11
+ ([1, 2, 3], ["x", "y", "z"], 3),
12
+ ([1, 2, 3, 4, 5], ["x", "y", "z", "w", "n"], 5),
13
+ ([1, 2, 3], None, 3),
14
+ ([1, 2, 3], ["x", "y", "z"], 3),
15
+ ([1, 2, 3, 4], ["x", "y", "z", "w"], 4),
16
+ ],
17
+ )
18
+ def test_series(data, index, expected):
19
+ # GH#52897
20
+ ser = Series(data, index=index)
21
+ assert ser.size == expected
22
+ assert isinstance(ser.size, int)