applied-ai-018 commited on
Commit
11e0ef6
·
verified ·
1 Parent(s): 55a1ea5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_190_mp_rank_02_optim_states.pt +3 -0
  2. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_53_mp_rank_03_optim_states.pt +3 -0
  3. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_7_mp_rank_01_optim_states.pt +3 -0
  4. ckpts/llama-3b/global_step100/layer_24-model_00-model_states.pt +3 -0
  5. venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_dt_accessor.cpython-310.pyc +0 -0
  6. venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_str_accessor.cpython-310.pyc +0 -0
  7. venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_struct_accessor.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py +7 -0
  9. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_combine_first.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_head_tail.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_round.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_size.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_values.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_view.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_add_prefix_suffix.py +41 -0
  16. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_align.py +249 -0
  17. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py +84 -0
  18. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py +205 -0
  19. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py +683 -0
  20. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_case_when.py +148 -0
  21. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_clip.py +146 -0
  22. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine.py +17 -0
  23. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine_first.py +149 -0
  24. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_compare.py +141 -0
  25. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_convert_dtypes.py +306 -0
  26. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py +91 -0
  27. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_count.py +34 -0
  28. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_cov_corr.py +185 -0
  29. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py +203 -0
  30. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_diff.py +88 -0
  31. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop.py +99 -0
  32. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop_duplicates.py +267 -0
  33. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_dropna.py +117 -0
  34. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py +7 -0
  35. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_equals.py +145 -0
  36. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py +175 -0
  37. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py +1155 -0
  38. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_get_numeric_data.py +38 -0
  39. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py +56 -0
  40. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_info.py +181 -0
  41. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_interpolate.py +868 -0
  42. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_monotonic.py +26 -0
  43. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_unique.py +40 -0
  44. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_isin.py +252 -0
  45. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py +35 -0
  46. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_item.py +59 -0
  47. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_map.py +609 -0
  48. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_matmul.py +82 -0
  49. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_nunique.py +24 -0
  50. venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_pct_change.py +128 -0
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_190_mp_rank_02_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3ee245d9b8158e5a18d39ffd9c5038a3e6510f18776cd3ae455eb0e06997a343
3
+ size 41830340
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_53_mp_rank_03_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90c86106f7427f754687ceebac29d04393f519099b9e9c22b47168ae69b6576f
3
+ size 41830394
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_7_mp_rank_01_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:adc788ab0ddcc8662da5d228bf2721d6c62ab9b5bf4e511e8bd6f805bd88ab2f
3
+ size 41830128
ckpts/llama-3b/global_step100/layer_24-model_00-model_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:40e9988a549b5d5ab65068cf501481315a128dc9fc06d1296e99b95527e56a06
3
+ size 116407086
venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_dt_accessor.cpython-310.pyc ADDED
Binary file (24.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_str_accessor.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_struct_accessor.cpython-310.pyc ADDED
Binary file (4.05 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ Test files dedicated to individual (stand-alone) Series methods
3
+
4
+ Ideally these files/tests should correspond 1-to-1 with tests.frame.methods
5
+
6
+ These may also present opportunities for sharing/de-duplicating test code.
7
+ """
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_combine_first.cpython-310.pyc ADDED
Binary file (5.22 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_head_tail.cpython-310.pyc ADDED
Binary file (523 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_round.cpython-310.pyc ADDED
Binary file (2.93 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_size.cpython-310.pyc ADDED
Binary file (800 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_values.cpython-310.pyc ADDED
Binary file (1.2 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/__pycache__/test_view.cpython-310.pyc ADDED
Binary file (2.08 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_add_prefix_suffix.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import Index
4
+ import pandas._testing as tm
5
+
6
+
7
+ def test_add_prefix_suffix(string_series):
8
+ with_prefix = string_series.add_prefix("foo#")
9
+ expected = Index([f"foo#{c}" for c in string_series.index])
10
+ tm.assert_index_equal(with_prefix.index, expected)
11
+
12
+ with_suffix = string_series.add_suffix("#foo")
13
+ expected = Index([f"{c}#foo" for c in string_series.index])
14
+ tm.assert_index_equal(with_suffix.index, expected)
15
+
16
+ with_pct_prefix = string_series.add_prefix("%")
17
+ expected = Index([f"%{c}" for c in string_series.index])
18
+ tm.assert_index_equal(with_pct_prefix.index, expected)
19
+
20
+ with_pct_suffix = string_series.add_suffix("%")
21
+ expected = Index([f"{c}%" for c in string_series.index])
22
+ tm.assert_index_equal(with_pct_suffix.index, expected)
23
+
24
+
25
+ def test_add_prefix_suffix_axis(string_series):
26
+ # GH 47819
27
+ with_prefix = string_series.add_prefix("foo#", axis=0)
28
+ expected = Index([f"foo#{c}" for c in string_series.index])
29
+ tm.assert_index_equal(with_prefix.index, expected)
30
+
31
+ with_pct_suffix = string_series.add_suffix("#foo", axis=0)
32
+ expected = Index([f"{c}#foo" for c in string_series.index])
33
+ tm.assert_index_equal(with_pct_suffix.index, expected)
34
+
35
+
36
+ def test_add_prefix_suffix_invalid_axis(string_series):
37
+ with pytest.raises(ValueError, match="No axis named 1 for object type Series"):
38
+ string_series.add_prefix("foo#", axis=1)
39
+
40
+ with pytest.raises(ValueError, match="No axis named 1 for object type Series"):
41
+ string_series.add_suffix("foo#", axis=1)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_align.py ADDED
@@ -0,0 +1,249 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timezone
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Series,
9
+ date_range,
10
+ period_range,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ @pytest.mark.parametrize(
16
+ "first_slice,second_slice",
17
+ [
18
+ [[2, None], [None, -5]],
19
+ [[None, 0], [None, -5]],
20
+ [[None, -5], [None, 0]],
21
+ [[None, 0], [None, 0]],
22
+ ],
23
+ )
24
+ @pytest.mark.parametrize("fill", [None, -1])
25
+ def test_align(datetime_series, first_slice, second_slice, join_type, fill):
26
+ a = datetime_series[slice(*first_slice)]
27
+ b = datetime_series[slice(*second_slice)]
28
+
29
+ aa, ab = a.align(b, join=join_type, fill_value=fill)
30
+
31
+ join_index = a.index.join(b.index, how=join_type)
32
+ if fill is not None:
33
+ diff_a = aa.index.difference(join_index)
34
+ diff_b = ab.index.difference(join_index)
35
+ if len(diff_a) > 0:
36
+ assert (aa.reindex(diff_a) == fill).all()
37
+ if len(diff_b) > 0:
38
+ assert (ab.reindex(diff_b) == fill).all()
39
+
40
+ ea = a.reindex(join_index)
41
+ eb = b.reindex(join_index)
42
+
43
+ if fill is not None:
44
+ ea = ea.fillna(fill)
45
+ eb = eb.fillna(fill)
46
+
47
+ tm.assert_series_equal(aa, ea)
48
+ tm.assert_series_equal(ab, eb)
49
+ assert aa.name == "ts"
50
+ assert ea.name == "ts"
51
+ assert ab.name == "ts"
52
+ assert eb.name == "ts"
53
+
54
+
55
+ @pytest.mark.parametrize(
56
+ "first_slice,second_slice",
57
+ [
58
+ [[2, None], [None, -5]],
59
+ [[None, 0], [None, -5]],
60
+ [[None, -5], [None, 0]],
61
+ [[None, 0], [None, 0]],
62
+ ],
63
+ )
64
+ @pytest.mark.parametrize("method", ["pad", "bfill"])
65
+ @pytest.mark.parametrize("limit", [None, 1])
66
+ def test_align_fill_method(
67
+ datetime_series, first_slice, second_slice, join_type, method, limit
68
+ ):
69
+ a = datetime_series[slice(*first_slice)]
70
+ b = datetime_series[slice(*second_slice)]
71
+
72
+ msg = (
73
+ "The 'method', 'limit', and 'fill_axis' keywords in Series.align "
74
+ "are deprecated"
75
+ )
76
+ with tm.assert_produces_warning(FutureWarning, match=msg):
77
+ aa, ab = a.align(b, join=join_type, method=method, limit=limit)
78
+
79
+ join_index = a.index.join(b.index, how=join_type)
80
+ ea = a.reindex(join_index)
81
+ eb = b.reindex(join_index)
82
+
83
+ msg2 = "Series.fillna with 'method' is deprecated"
84
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
85
+ ea = ea.fillna(method=method, limit=limit)
86
+ eb = eb.fillna(method=method, limit=limit)
87
+
88
+ tm.assert_series_equal(aa, ea)
89
+ tm.assert_series_equal(ab, eb)
90
+
91
+
92
+ def test_align_nocopy(datetime_series, using_copy_on_write):
93
+ b = datetime_series[:5].copy()
94
+
95
+ # do copy
96
+ a = datetime_series.copy()
97
+ ra, _ = a.align(b, join="left")
98
+ ra[:5] = 5
99
+ assert not (a[:5] == 5).any()
100
+
101
+ # do not copy
102
+ a = datetime_series.copy()
103
+ ra, _ = a.align(b, join="left", copy=False)
104
+ ra[:5] = 5
105
+ if using_copy_on_write:
106
+ assert not (a[:5] == 5).any()
107
+ else:
108
+ assert (a[:5] == 5).all()
109
+
110
+ # do copy
111
+ a = datetime_series.copy()
112
+ b = datetime_series[:5].copy()
113
+ _, rb = a.align(b, join="right")
114
+ rb[:3] = 5
115
+ assert not (b[:3] == 5).any()
116
+
117
+ # do not copy
118
+ a = datetime_series.copy()
119
+ b = datetime_series[:5].copy()
120
+ _, rb = a.align(b, join="right", copy=False)
121
+ rb[:2] = 5
122
+ if using_copy_on_write:
123
+ assert not (b[:2] == 5).any()
124
+ else:
125
+ assert (b[:2] == 5).all()
126
+
127
+
128
+ def test_align_same_index(datetime_series, using_copy_on_write):
129
+ a, b = datetime_series.align(datetime_series, copy=False)
130
+ if not using_copy_on_write:
131
+ assert a.index is datetime_series.index
132
+ assert b.index is datetime_series.index
133
+ else:
134
+ assert a.index.is_(datetime_series.index)
135
+ assert b.index.is_(datetime_series.index)
136
+
137
+ a, b = datetime_series.align(datetime_series, copy=True)
138
+ assert a.index is not datetime_series.index
139
+ assert b.index is not datetime_series.index
140
+ assert a.index.is_(datetime_series.index)
141
+ assert b.index.is_(datetime_series.index)
142
+
143
+
144
+ def test_align_multiindex():
145
+ # GH 10665
146
+
147
+ midx = pd.MultiIndex.from_product(
148
+ [range(2), range(3), range(2)], names=("a", "b", "c")
149
+ )
150
+ idx = pd.Index(range(2), name="b")
151
+ s1 = Series(np.arange(12, dtype="int64"), index=midx)
152
+ s2 = Series(np.arange(2, dtype="int64"), index=idx)
153
+
154
+ # these must be the same results (but flipped)
155
+ res1l, res1r = s1.align(s2, join="left")
156
+ res2l, res2r = s2.align(s1, join="right")
157
+
158
+ expl = s1
159
+ tm.assert_series_equal(expl, res1l)
160
+ tm.assert_series_equal(expl, res2r)
161
+ expr = Series([0, 0, 1, 1, np.nan, np.nan] * 2, index=midx)
162
+ tm.assert_series_equal(expr, res1r)
163
+ tm.assert_series_equal(expr, res2l)
164
+
165
+ res1l, res1r = s1.align(s2, join="right")
166
+ res2l, res2r = s2.align(s1, join="left")
167
+
168
+ exp_idx = pd.MultiIndex.from_product(
169
+ [range(2), range(2), range(2)], names=("a", "b", "c")
170
+ )
171
+ expl = Series([0, 1, 2, 3, 6, 7, 8, 9], index=exp_idx)
172
+ tm.assert_series_equal(expl, res1l)
173
+ tm.assert_series_equal(expl, res2r)
174
+ expr = Series([0, 0, 1, 1] * 2, index=exp_idx)
175
+ tm.assert_series_equal(expr, res1r)
176
+ tm.assert_series_equal(expr, res2l)
177
+
178
+
179
+ @pytest.mark.parametrize("method", ["backfill", "bfill", "pad", "ffill", None])
180
+ def test_align_with_dataframe_method(method):
181
+ # GH31788
182
+ ser = Series(range(3), index=range(3))
183
+ df = pd.DataFrame(0.0, index=range(3), columns=range(3))
184
+
185
+ msg = (
186
+ "The 'method', 'limit', and 'fill_axis' keywords in Series.align "
187
+ "are deprecated"
188
+ )
189
+ with tm.assert_produces_warning(FutureWarning, match=msg):
190
+ result_ser, result_df = ser.align(df, method=method)
191
+ tm.assert_series_equal(result_ser, ser)
192
+ tm.assert_frame_equal(result_df, df)
193
+
194
+
195
+ def test_align_dt64tzindex_mismatched_tzs():
196
+ idx1 = date_range("2001", periods=5, freq="h", tz="US/Eastern")
197
+ ser = Series(np.random.default_rng(2).standard_normal(len(idx1)), index=idx1)
198
+ ser_central = ser.tz_convert("US/Central")
199
+ # different timezones convert to UTC
200
+
201
+ new1, new2 = ser.align(ser_central)
202
+ assert new1.index.tz is timezone.utc
203
+ assert new2.index.tz is timezone.utc
204
+
205
+
206
+ def test_align_periodindex(join_type):
207
+ rng = period_range("1/1/2000", "1/1/2010", freq="Y")
208
+ ts = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng)
209
+
210
+ # TODO: assert something?
211
+ ts.align(ts[::2], join=join_type)
212
+
213
+
214
+ def test_align_left_fewer_levels():
215
+ # GH#45224
216
+ left = Series([2], index=pd.MultiIndex.from_tuples([(1, 3)], names=["a", "c"]))
217
+ right = Series(
218
+ [1], index=pd.MultiIndex.from_tuples([(1, 2, 3)], names=["a", "b", "c"])
219
+ )
220
+ result_left, result_right = left.align(right)
221
+
222
+ expected_right = Series(
223
+ [1], index=pd.MultiIndex.from_tuples([(1, 3, 2)], names=["a", "c", "b"])
224
+ )
225
+ expected_left = Series(
226
+ [2], index=pd.MultiIndex.from_tuples([(1, 3, 2)], names=["a", "c", "b"])
227
+ )
228
+ tm.assert_series_equal(result_left, expected_left)
229
+ tm.assert_series_equal(result_right, expected_right)
230
+
231
+
232
+ def test_align_left_different_named_levels():
233
+ # GH#45224
234
+ left = Series(
235
+ [2], index=pd.MultiIndex.from_tuples([(1, 4, 3)], names=["a", "d", "c"])
236
+ )
237
+ right = Series(
238
+ [1], index=pd.MultiIndex.from_tuples([(1, 2, 3)], names=["a", "b", "c"])
239
+ )
240
+ result_left, result_right = left.align(right)
241
+
242
+ expected_left = Series(
243
+ [2], index=pd.MultiIndex.from_tuples([(1, 4, 3, 2)], names=["a", "d", "c", "b"])
244
+ )
245
+ expected_right = Series(
246
+ [1], index=pd.MultiIndex.from_tuples([(1, 4, 3, 2)], names=["a", "d", "c", "b"])
247
+ )
248
+ tm.assert_series_equal(result_left, expected_left)
249
+ tm.assert_series_equal(result_right, expected_right)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ isna,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestSeriesArgsort:
13
+ def test_argsort_axis(self):
14
+ # GH#54257
15
+ ser = Series(range(3))
16
+
17
+ msg = "No axis named 2 for object type Series"
18
+ with pytest.raises(ValueError, match=msg):
19
+ ser.argsort(axis=2)
20
+
21
+ def test_argsort_numpy(self, datetime_series):
22
+ ser = datetime_series
23
+
24
+ res = np.argsort(ser).values
25
+ expected = np.argsort(np.array(ser))
26
+ tm.assert_numpy_array_equal(res, expected)
27
+
28
+ # with missing values
29
+ ts = ser.copy()
30
+ ts[::2] = np.nan
31
+
32
+ msg = "The behavior of Series.argsort in the presence of NA values"
33
+ with tm.assert_produces_warning(
34
+ FutureWarning, match=msg, check_stacklevel=False
35
+ ):
36
+ result = np.argsort(ts)[1::2]
37
+ expected = np.argsort(np.array(ts.dropna()))
38
+
39
+ tm.assert_numpy_array_equal(result.values, expected)
40
+
41
+ def test_argsort(self, datetime_series):
42
+ argsorted = datetime_series.argsort()
43
+ assert issubclass(argsorted.dtype.type, np.integer)
44
+
45
+ def test_argsort_dt64(self, unit):
46
+ # GH#2967 (introduced bug in 0.11-dev I think)
47
+ ser = Series(
48
+ [Timestamp(f"201301{i:02d}") for i in range(1, 6)], dtype=f"M8[{unit}]"
49
+ )
50
+ assert ser.dtype == f"datetime64[{unit}]"
51
+ shifted = ser.shift(-1)
52
+ assert shifted.dtype == f"datetime64[{unit}]"
53
+ assert isna(shifted[4])
54
+
55
+ result = ser.argsort()
56
+ expected = Series(range(5), dtype=np.intp)
57
+ tm.assert_series_equal(result, expected)
58
+
59
+ msg = "The behavior of Series.argsort in the presence of NA values"
60
+ with tm.assert_produces_warning(FutureWarning, match=msg):
61
+ result = shifted.argsort()
62
+ expected = Series(list(range(4)) + [-1], dtype=np.intp)
63
+ tm.assert_series_equal(result, expected)
64
+
65
+ def test_argsort_stable(self):
66
+ ser = Series(np.random.default_rng(2).integers(0, 100, size=10000))
67
+ mindexer = ser.argsort(kind="mergesort")
68
+ qindexer = ser.argsort()
69
+
70
+ mexpected = np.argsort(ser.values, kind="mergesort")
71
+ qexpected = np.argsort(ser.values, kind="quicksort")
72
+
73
+ tm.assert_series_equal(mindexer.astype(np.intp), Series(mexpected))
74
+ tm.assert_series_equal(qindexer.astype(np.intp), Series(qexpected))
75
+ msg = (
76
+ r"ndarray Expected type <class 'numpy\.ndarray'>, "
77
+ r"found <class 'pandas\.core\.series\.Series'> instead"
78
+ )
79
+ with pytest.raises(AssertionError, match=msg):
80
+ tm.assert_numpy_array_equal(qindexer, mindexer)
81
+
82
+ def test_argsort_preserve_name(self, datetime_series):
83
+ result = datetime_series.argsort()
84
+ assert result.name == datetime_series.name
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas._libs.tslibs import IncompatibleFrequency
5
+
6
+ from pandas import (
7
+ DatetimeIndex,
8
+ PeriodIndex,
9
+ Series,
10
+ Timestamp,
11
+ date_range,
12
+ isna,
13
+ notna,
14
+ offsets,
15
+ period_range,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ class TestSeriesAsof:
21
+ def test_asof_nanosecond_index_access(self):
22
+ ts = Timestamp("20130101").as_unit("ns")._value
23
+ dti = DatetimeIndex([ts + 50 + i for i in range(100)])
24
+ ser = Series(np.random.default_rng(2).standard_normal(100), index=dti)
25
+
26
+ first_value = ser.asof(ser.index[0])
27
+
28
+ # GH#46903 previously incorrectly was "day"
29
+ assert dti.resolution == "nanosecond"
30
+
31
+ # this used to not work bc parsing was done by dateutil that didn't
32
+ # handle nanoseconds
33
+ assert first_value == ser["2013-01-01 00:00:00.000000050"]
34
+
35
+ expected_ts = np.datetime64("2013-01-01 00:00:00.000000050", "ns")
36
+ assert first_value == ser[Timestamp(expected_ts)]
37
+
38
+ def test_basic(self):
39
+ # array or list or dates
40
+ N = 50
41
+ rng = date_range("1/1/1990", periods=N, freq="53s")
42
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
43
+ ts.iloc[15:30] = np.nan
44
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
45
+
46
+ result = ts.asof(dates)
47
+ assert notna(result).all()
48
+ lb = ts.index[14]
49
+ ub = ts.index[30]
50
+
51
+ result = ts.asof(list(dates))
52
+ assert notna(result).all()
53
+ lb = ts.index[14]
54
+ ub = ts.index[30]
55
+
56
+ mask = (result.index >= lb) & (result.index < ub)
57
+ rs = result[mask]
58
+ assert (rs == ts[lb]).all()
59
+
60
+ val = result[result.index[result.index >= ub][0]]
61
+ assert ts[ub] == val
62
+
63
+ def test_scalar(self):
64
+ N = 30
65
+ rng = date_range("1/1/1990", periods=N, freq="53s")
66
+ # Explicit cast to float avoid implicit cast when setting nan
67
+ ts = Series(np.arange(N), index=rng, dtype="float")
68
+ ts.iloc[5:10] = np.nan
69
+ ts.iloc[15:20] = np.nan
70
+
71
+ val1 = ts.asof(ts.index[7])
72
+ val2 = ts.asof(ts.index[19])
73
+
74
+ assert val1 == ts.iloc[4]
75
+ assert val2 == ts.iloc[14]
76
+
77
+ # accepts strings
78
+ val1 = ts.asof(str(ts.index[7]))
79
+ assert val1 == ts.iloc[4]
80
+
81
+ # in there
82
+ result = ts.asof(ts.index[3])
83
+ assert result == ts.iloc[3]
84
+
85
+ # no as of value
86
+ d = ts.index[0] - offsets.BDay()
87
+ assert np.isnan(ts.asof(d))
88
+
89
+ def test_with_nan(self):
90
+ # basic asof test
91
+ rng = date_range("1/1/2000", "1/2/2000", freq="4h")
92
+ s = Series(np.arange(len(rng)), index=rng)
93
+ r = s.resample("2h").mean()
94
+
95
+ result = r.asof(r.index)
96
+ expected = Series(
97
+ [0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6.0],
98
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
99
+ )
100
+ tm.assert_series_equal(result, expected)
101
+
102
+ r.iloc[3:5] = np.nan
103
+ result = r.asof(r.index)
104
+ expected = Series(
105
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 5, 5, 6.0],
106
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
107
+ )
108
+ tm.assert_series_equal(result, expected)
109
+
110
+ r.iloc[-3:] = np.nan
111
+ result = r.asof(r.index)
112
+ expected = Series(
113
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 4, 4, 4.0],
114
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
115
+ )
116
+ tm.assert_series_equal(result, expected)
117
+
118
+ def test_periodindex(self):
119
+ # array or list or dates
120
+ N = 50
121
+ rng = period_range("1/1/1990", periods=N, freq="h")
122
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
123
+ ts.iloc[15:30] = np.nan
124
+ dates = date_range("1/1/1990", periods=N * 3, freq="37min")
125
+
126
+ result = ts.asof(dates)
127
+ assert notna(result).all()
128
+ lb = ts.index[14]
129
+ ub = ts.index[30]
130
+
131
+ result = ts.asof(list(dates))
132
+ assert notna(result).all()
133
+ lb = ts.index[14]
134
+ ub = ts.index[30]
135
+
136
+ pix = PeriodIndex(result.index.values, freq="h")
137
+ mask = (pix >= lb) & (pix < ub)
138
+ rs = result[mask]
139
+ assert (rs == ts[lb]).all()
140
+
141
+ ts.iloc[5:10] = np.nan
142
+ ts.iloc[15:20] = np.nan
143
+
144
+ val1 = ts.asof(ts.index[7])
145
+ val2 = ts.asof(ts.index[19])
146
+
147
+ assert val1 == ts.iloc[4]
148
+ assert val2 == ts.iloc[14]
149
+
150
+ # accepts strings
151
+ val1 = ts.asof(str(ts.index[7]))
152
+ assert val1 == ts.iloc[4]
153
+
154
+ # in there
155
+ assert ts.asof(ts.index[3]) == ts.iloc[3]
156
+
157
+ # no as of value
158
+ d = ts.index[0].to_timestamp() - offsets.BDay()
159
+ assert isna(ts.asof(d))
160
+
161
+ # Mismatched freq
162
+ msg = "Input has different freq"
163
+ with pytest.raises(IncompatibleFrequency, match=msg):
164
+ ts.asof(rng.asfreq("D"))
165
+
166
+ def test_errors(self):
167
+ s = Series(
168
+ [1, 2, 3],
169
+ index=[Timestamp("20130101"), Timestamp("20130103"), Timestamp("20130102")],
170
+ )
171
+
172
+ # non-monotonic
173
+ assert not s.index.is_monotonic_increasing
174
+ with pytest.raises(ValueError, match="requires a sorted index"):
175
+ s.asof(s.index[0])
176
+
177
+ # subset with Series
178
+ N = 10
179
+ rng = date_range("1/1/1990", periods=N, freq="53s")
180
+ s = Series(np.random.default_rng(2).standard_normal(N), index=rng)
181
+ with pytest.raises(ValueError, match="not valid for Series"):
182
+ s.asof(s.index[0], subset="foo")
183
+
184
+ def test_all_nans(self):
185
+ # GH 15713
186
+ # series is all nans
187
+
188
+ # testing non-default indexes
189
+ N = 50
190
+ rng = date_range("1/1/1990", periods=N, freq="53s")
191
+
192
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
193
+ result = Series(np.nan, index=rng).asof(dates)
194
+ expected = Series(np.nan, index=dates)
195
+ tm.assert_series_equal(result, expected)
196
+
197
+ # testing scalar input
198
+ date = date_range("1/1/1990", periods=N * 3, freq="25s")[0]
199
+ result = Series(np.nan, index=rng).asof(date)
200
+ assert isna(result)
201
+
202
+ # test name is propagated
203
+ result = Series(np.nan, index=[1, 2, 3, 4], name="test").asof([4, 5])
204
+ expected = Series(np.nan, index=[4, 5], name="test")
205
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py ADDED
@@ -0,0 +1,683 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ )
5
+ from importlib import reload
6
+ import string
7
+ import sys
8
+
9
+ import numpy as np
10
+ import pytest
11
+
12
+ from pandas._libs.tslibs import iNaT
13
+ import pandas.util._test_decorators as td
14
+
15
+ from pandas import (
16
+ NA,
17
+ Categorical,
18
+ CategoricalDtype,
19
+ DatetimeTZDtype,
20
+ Index,
21
+ Interval,
22
+ NaT,
23
+ Series,
24
+ Timedelta,
25
+ Timestamp,
26
+ cut,
27
+ date_range,
28
+ to_datetime,
29
+ )
30
+ import pandas._testing as tm
31
+
32
+
33
+ def rand_str(nchars: int) -> str:
34
+ """
35
+ Generate one random byte string.
36
+ """
37
+ RANDS_CHARS = np.array(
38
+ list(string.ascii_letters + string.digits), dtype=(np.str_, 1)
39
+ )
40
+ return "".join(np.random.default_rng(2).choice(RANDS_CHARS, nchars))
41
+
42
+
43
+ class TestAstypeAPI:
44
+ def test_astype_unitless_dt64_raises(self):
45
+ # GH#47844
46
+ ser = Series(["1970-01-01", "1970-01-01", "1970-01-01"], dtype="datetime64[ns]")
47
+ df = ser.to_frame()
48
+
49
+ msg = "Casting to unit-less dtype 'datetime64' is not supported"
50
+ with pytest.raises(TypeError, match=msg):
51
+ ser.astype(np.datetime64)
52
+ with pytest.raises(TypeError, match=msg):
53
+ df.astype(np.datetime64)
54
+ with pytest.raises(TypeError, match=msg):
55
+ ser.astype("datetime64")
56
+ with pytest.raises(TypeError, match=msg):
57
+ df.astype("datetime64")
58
+
59
+ def test_arg_for_errors_in_astype(self):
60
+ # see GH#14878
61
+ ser = Series([1, 2, 3])
62
+
63
+ msg = (
64
+ r"Expected value of kwarg 'errors' to be one of \['raise', "
65
+ r"'ignore'\]\. Supplied value is 'False'"
66
+ )
67
+ with pytest.raises(ValueError, match=msg):
68
+ ser.astype(np.float64, errors=False)
69
+
70
+ ser.astype(np.int8, errors="raise")
71
+
72
+ @pytest.mark.parametrize("dtype_class", [dict, Series])
73
+ def test_astype_dict_like(self, dtype_class):
74
+ # see GH#7271
75
+ ser = Series(range(0, 10, 2), name="abc")
76
+
77
+ dt1 = dtype_class({"abc": str})
78
+ result = ser.astype(dt1)
79
+ expected = Series(["0", "2", "4", "6", "8"], name="abc", dtype=object)
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ dt2 = dtype_class({"abc": "float64"})
83
+ result = ser.astype(dt2)
84
+ expected = Series([0.0, 2.0, 4.0, 6.0, 8.0], dtype="float64", name="abc")
85
+ tm.assert_series_equal(result, expected)
86
+
87
+ dt3 = dtype_class({"abc": str, "def": str})
88
+ msg = (
89
+ "Only the Series name can be used for the key in Series dtype "
90
+ r"mappings\."
91
+ )
92
+ with pytest.raises(KeyError, match=msg):
93
+ ser.astype(dt3)
94
+
95
+ dt4 = dtype_class({0: str})
96
+ with pytest.raises(KeyError, match=msg):
97
+ ser.astype(dt4)
98
+
99
+ # GH#16717
100
+ # if dtypes provided is empty, it should error
101
+ if dtype_class is Series:
102
+ dt5 = dtype_class({}, dtype=object)
103
+ else:
104
+ dt5 = dtype_class({})
105
+
106
+ with pytest.raises(KeyError, match=msg):
107
+ ser.astype(dt5)
108
+
109
+
110
+ class TestAstype:
111
+ @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"])
112
+ def test_astype_object_to_dt64_non_nano(self, tz):
113
+ # GH#55756, GH#54620
114
+ ts = Timestamp("2999-01-01")
115
+ dtype = "M8[us]"
116
+ if tz is not None:
117
+ dtype = f"M8[us, {tz}]"
118
+ vals = [ts, "2999-01-02 03:04:05.678910", 2500]
119
+ ser = Series(vals, dtype=object)
120
+ result = ser.astype(dtype)
121
+
122
+ # The 2500 is interpreted as microseconds, consistent with what
123
+ # we would get if we created DatetimeIndexes from vals[:2] and vals[2:]
124
+ # and concated the results.
125
+ pointwise = [
126
+ vals[0].tz_localize(tz),
127
+ Timestamp(vals[1], tz=tz),
128
+ to_datetime(vals[2], unit="us", utc=True).tz_convert(tz),
129
+ ]
130
+ exp_vals = [x.as_unit("us").asm8 for x in pointwise]
131
+ exp_arr = np.array(exp_vals, dtype="M8[us]")
132
+ expected = Series(exp_arr, dtype="M8[us]")
133
+ if tz is not None:
134
+ expected = expected.dt.tz_localize("UTC").dt.tz_convert(tz)
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_astype_mixed_object_to_dt64tz(self):
138
+ # pre-2.0 this raised ValueError bc of tz mismatch
139
+ # xref GH#32581
140
+ ts = Timestamp("2016-01-04 05:06:07", tz="US/Pacific")
141
+ ts2 = ts.tz_convert("Asia/Tokyo")
142
+
143
+ ser = Series([ts, ts2], dtype=object)
144
+ res = ser.astype("datetime64[ns, Europe/Brussels]")
145
+ expected = Series(
146
+ [ts.tz_convert("Europe/Brussels"), ts2.tz_convert("Europe/Brussels")],
147
+ dtype="datetime64[ns, Europe/Brussels]",
148
+ )
149
+ tm.assert_series_equal(res, expected)
150
+
151
+ @pytest.mark.parametrize("dtype", np.typecodes["All"])
152
+ def test_astype_empty_constructor_equality(self, dtype):
153
+ # see GH#15524
154
+
155
+ if dtype not in (
156
+ "S",
157
+ "V", # poor support (if any) currently
158
+ "M",
159
+ "m", # Generic timestamps raise a ValueError. Already tested.
160
+ ):
161
+ init_empty = Series([], dtype=dtype)
162
+ as_type_empty = Series([]).astype(dtype)
163
+ tm.assert_series_equal(init_empty, as_type_empty)
164
+
165
+ @pytest.mark.parametrize("dtype", [str, np.str_])
166
+ @pytest.mark.parametrize(
167
+ "series",
168
+ [
169
+ Series([string.digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
170
+ Series([string.digits * 10, rand_str(63), rand_str(64), np.nan, 1.0]),
171
+ ],
172
+ )
173
+ def test_astype_str_map(self, dtype, series, using_infer_string):
174
+ # see GH#4405
175
+ result = series.astype(dtype)
176
+ expected = series.map(str)
177
+ if using_infer_string:
178
+ expected = expected.astype(object)
179
+ tm.assert_series_equal(result, expected)
180
+
181
+ def test_astype_float_to_period(self):
182
+ result = Series([np.nan]).astype("period[D]")
183
+ expected = Series([NaT], dtype="period[D]")
184
+ tm.assert_series_equal(result, expected)
185
+
186
+ def test_astype_no_pandas_dtype(self):
187
+ # https://github.com/pandas-dev/pandas/pull/24866
188
+ ser = Series([1, 2], dtype="int64")
189
+ # Don't have NumpyEADtype in the public API, so we use `.array.dtype`,
190
+ # which is a NumpyEADtype.
191
+ result = ser.astype(ser.array.dtype)
192
+ tm.assert_series_equal(result, ser)
193
+
194
+ @pytest.mark.parametrize("dtype", [np.datetime64, np.timedelta64])
195
+ def test_astype_generic_timestamp_no_frequency(self, dtype, request):
196
+ # see GH#15524, GH#15987
197
+ data = [1]
198
+ ser = Series(data)
199
+
200
+ if np.dtype(dtype).name not in ["timedelta64", "datetime64"]:
201
+ mark = pytest.mark.xfail(reason="GH#33890 Is assigned ns unit")
202
+ request.applymarker(mark)
203
+
204
+ msg = (
205
+ rf"The '{dtype.__name__}' dtype has no unit\. "
206
+ rf"Please pass in '{dtype.__name__}\[ns\]' instead."
207
+ )
208
+ with pytest.raises(ValueError, match=msg):
209
+ ser.astype(dtype)
210
+
211
+ def test_astype_dt64_to_str(self):
212
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
213
+ dti = date_range("2012-01-01", periods=3)
214
+ result = Series(dti).astype(str)
215
+ expected = Series(["2012-01-01", "2012-01-02", "2012-01-03"], dtype=object)
216
+ tm.assert_series_equal(result, expected)
217
+
218
+ def test_astype_dt64tz_to_str(self):
219
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
220
+ dti_tz = date_range("2012-01-01", periods=3, tz="US/Eastern")
221
+ result = Series(dti_tz).astype(str)
222
+ expected = Series(
223
+ [
224
+ "2012-01-01 00:00:00-05:00",
225
+ "2012-01-02 00:00:00-05:00",
226
+ "2012-01-03 00:00:00-05:00",
227
+ ],
228
+ dtype=object,
229
+ )
230
+ tm.assert_series_equal(result, expected)
231
+
232
+ def test_astype_datetime(self, unit):
233
+ ser = Series(iNaT, dtype=f"M8[{unit}]", index=range(5))
234
+
235
+ ser = ser.astype("O")
236
+ assert ser.dtype == np.object_
237
+
238
+ ser = Series([datetime(2001, 1, 2, 0, 0)])
239
+
240
+ ser = ser.astype("O")
241
+ assert ser.dtype == np.object_
242
+
243
+ ser = Series(
244
+ [datetime(2001, 1, 2, 0, 0) for i in range(3)], dtype=f"M8[{unit}]"
245
+ )
246
+
247
+ ser[1] = np.nan
248
+ assert ser.dtype == f"M8[{unit}]"
249
+
250
+ ser = ser.astype("O")
251
+ assert ser.dtype == np.object_
252
+
253
+ def test_astype_datetime64tz(self):
254
+ ser = Series(date_range("20130101", periods=3, tz="US/Eastern"))
255
+
256
+ # astype
257
+ result = ser.astype(object)
258
+ expected = Series(ser.astype(object), dtype=object)
259
+ tm.assert_series_equal(result, expected)
260
+
261
+ result = Series(ser.values).dt.tz_localize("UTC").dt.tz_convert(ser.dt.tz)
262
+ tm.assert_series_equal(result, ser)
263
+
264
+ # astype - object, preserves on construction
265
+ result = Series(ser.astype(object))
266
+ expected = ser.astype(object)
267
+ tm.assert_series_equal(result, expected)
268
+
269
+ # astype - datetime64[ns, tz]
270
+ msg = "Cannot use .astype to convert from timezone-naive"
271
+ with pytest.raises(TypeError, match=msg):
272
+ # dt64->dt64tz astype deprecated
273
+ Series(ser.values).astype("datetime64[ns, US/Eastern]")
274
+
275
+ with pytest.raises(TypeError, match=msg):
276
+ # dt64->dt64tz astype deprecated
277
+ Series(ser.values).astype(ser.dtype)
278
+
279
+ result = ser.astype("datetime64[ns, CET]")
280
+ expected = Series(date_range("20130101 06:00:00", periods=3, tz="CET"))
281
+ tm.assert_series_equal(result, expected)
282
+
283
+ def test_astype_str_cast_dt64(self):
284
+ # see GH#9757
285
+ ts = Series([Timestamp("2010-01-04 00:00:00")])
286
+ res = ts.astype(str)
287
+
288
+ expected = Series(["2010-01-04"], dtype=object)
289
+ tm.assert_series_equal(res, expected)
290
+
291
+ ts = Series([Timestamp("2010-01-04 00:00:00", tz="US/Eastern")])
292
+ res = ts.astype(str)
293
+
294
+ expected = Series(["2010-01-04 00:00:00-05:00"], dtype=object)
295
+ tm.assert_series_equal(res, expected)
296
+
297
+ def test_astype_str_cast_td64(self):
298
+ # see GH#9757
299
+
300
+ td = Series([Timedelta(1, unit="d")])
301
+ ser = td.astype(str)
302
+
303
+ expected = Series(["1 days"], dtype=object)
304
+ tm.assert_series_equal(ser, expected)
305
+
306
+ def test_dt64_series_astype_object(self):
307
+ dt64ser = Series(date_range("20130101", periods=3))
308
+ result = dt64ser.astype(object)
309
+ assert isinstance(result.iloc[0], datetime)
310
+ assert result.dtype == np.object_
311
+
312
+ def test_td64_series_astype_object(self):
313
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="timedelta64[ns]")
314
+ result = tdser.astype(object)
315
+ assert isinstance(result.iloc[0], timedelta)
316
+ assert result.dtype == np.object_
317
+
318
+ @pytest.mark.parametrize(
319
+ "data, dtype",
320
+ [
321
+ (["x", "y", "z"], "string[python]"),
322
+ pytest.param(
323
+ ["x", "y", "z"],
324
+ "string[pyarrow]",
325
+ marks=td.skip_if_no("pyarrow"),
326
+ ),
327
+ (["x", "y", "z"], "category"),
328
+ (3 * [Timestamp("2020-01-01", tz="UTC")], None),
329
+ (3 * [Interval(0, 1)], None),
330
+ ],
331
+ )
332
+ @pytest.mark.parametrize("errors", ["raise", "ignore"])
333
+ def test_astype_ignores_errors_for_extension_dtypes(self, data, dtype, errors):
334
+ # https://github.com/pandas-dev/pandas/issues/35471
335
+ ser = Series(data, dtype=dtype)
336
+ if errors == "ignore":
337
+ expected = ser
338
+ result = ser.astype(float, errors="ignore")
339
+ tm.assert_series_equal(result, expected)
340
+ else:
341
+ msg = "(Cannot cast)|(could not convert)"
342
+ with pytest.raises((ValueError, TypeError), match=msg):
343
+ ser.astype(float, errors=errors)
344
+
345
+ @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64])
346
+ def test_astype_from_float_to_str(self, dtype):
347
+ # https://github.com/pandas-dev/pandas/issues/36451
348
+ ser = Series([0.1], dtype=dtype)
349
+ result = ser.astype(str)
350
+ expected = Series(["0.1"], dtype=object)
351
+ tm.assert_series_equal(result, expected)
352
+
353
+ @pytest.mark.parametrize(
354
+ "value, string_value",
355
+ [
356
+ (None, "None"),
357
+ (np.nan, "nan"),
358
+ (NA, "<NA>"),
359
+ ],
360
+ )
361
+ def test_astype_to_str_preserves_na(self, value, string_value):
362
+ # https://github.com/pandas-dev/pandas/issues/36904
363
+ ser = Series(["a", "b", value], dtype=object)
364
+ result = ser.astype(str)
365
+ expected = Series(["a", "b", string_value], dtype=object)
366
+ tm.assert_series_equal(result, expected)
367
+
368
+ @pytest.mark.parametrize("dtype", ["float32", "float64", "int64", "int32"])
369
+ def test_astype(self, dtype):
370
+ ser = Series(np.random.default_rng(2).standard_normal(5), name="foo")
371
+ as_typed = ser.astype(dtype)
372
+
373
+ assert as_typed.dtype == dtype
374
+ assert as_typed.name == ser.name
375
+
376
+ @pytest.mark.parametrize("value", [np.nan, np.inf])
377
+ @pytest.mark.parametrize("dtype", [np.int32, np.int64])
378
+ def test_astype_cast_nan_inf_int(self, dtype, value):
379
+ # gh-14265: check NaN and inf raise error when converting to int
380
+ msg = "Cannot convert non-finite values \\(NA or inf\\) to integer"
381
+ ser = Series([value])
382
+
383
+ with pytest.raises(ValueError, match=msg):
384
+ ser.astype(dtype)
385
+
386
+ @pytest.mark.parametrize("dtype", [int, np.int8, np.int64])
387
+ def test_astype_cast_object_int_fail(self, dtype):
388
+ arr = Series(["car", "house", "tree", "1"])
389
+ msg = r"invalid literal for int\(\) with base 10: 'car'"
390
+ with pytest.raises(ValueError, match=msg):
391
+ arr.astype(dtype)
392
+
393
+ def test_astype_float_to_uint_negatives_raise(
394
+ self, float_numpy_dtype, any_unsigned_int_numpy_dtype
395
+ ):
396
+ # GH#45151 We don't cast negative numbers to nonsense values
397
+ # TODO: same for EA float/uint dtypes, signed integers?
398
+ arr = np.arange(5).astype(float_numpy_dtype) - 3 # includes negatives
399
+ ser = Series(arr)
400
+
401
+ msg = "Cannot losslessly cast from .* to .*"
402
+ with pytest.raises(ValueError, match=msg):
403
+ ser.astype(any_unsigned_int_numpy_dtype)
404
+
405
+ with pytest.raises(ValueError, match=msg):
406
+ ser.to_frame().astype(any_unsigned_int_numpy_dtype)
407
+
408
+ with pytest.raises(ValueError, match=msg):
409
+ # We currently catch and re-raise in Index.astype
410
+ Index(ser).astype(any_unsigned_int_numpy_dtype)
411
+
412
+ with pytest.raises(ValueError, match=msg):
413
+ ser.array.astype(any_unsigned_int_numpy_dtype)
414
+
415
+ def test_astype_cast_object_int(self):
416
+ arr = Series(["1", "2", "3", "4"], dtype=object)
417
+ result = arr.astype(int)
418
+
419
+ tm.assert_series_equal(result, Series(np.arange(1, 5)))
420
+
421
+ def test_astype_unicode(self, using_infer_string):
422
+ # see GH#7758: A bit of magic is required to set
423
+ # default encoding to utf-8
424
+ digits = string.digits
425
+ test_series = [
426
+ Series([digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
427
+ Series(["データーサイエンス、お前はもう死んでいる"]),
428
+ ]
429
+
430
+ former_encoding = None
431
+
432
+ if sys.getdefaultencoding() == "utf-8":
433
+ # GH#45326 as of 2.0 Series.astype matches Index.astype by handling
434
+ # bytes with obj.decode() instead of str(obj)
435
+ item = "野菜食べないとやばい"
436
+ ser = Series([item.encode()])
437
+ result = ser.astype(np.str_)
438
+ expected = Series([item], dtype=object)
439
+ tm.assert_series_equal(result, expected)
440
+
441
+ for ser in test_series:
442
+ res = ser.astype(np.str_)
443
+ expec = ser.map(str)
444
+ if using_infer_string:
445
+ expec = expec.astype(object)
446
+ tm.assert_series_equal(res, expec)
447
+
448
+ # Restore the former encoding
449
+ if former_encoding is not None and former_encoding != "utf-8":
450
+ reload(sys)
451
+ sys.setdefaultencoding(former_encoding)
452
+
453
+ def test_astype_bytes(self):
454
+ # GH#39474
455
+ result = Series(["foo", "bar", "baz"]).astype(bytes)
456
+ assert result.dtypes == np.dtype("S3")
457
+
458
+ def test_astype_nan_to_bool(self):
459
+ # GH#43018
460
+ ser = Series(np.nan, dtype="object")
461
+ result = ser.astype("bool")
462
+ expected = Series(True, dtype="bool")
463
+ tm.assert_series_equal(result, expected)
464
+
465
+ @pytest.mark.parametrize(
466
+ "dtype",
467
+ tm.ALL_INT_EA_DTYPES + tm.FLOAT_EA_DTYPES,
468
+ )
469
+ def test_astype_ea_to_datetimetzdtype(self, dtype):
470
+ # GH37553
471
+ ser = Series([4, 0, 9], dtype=dtype)
472
+ result = ser.astype(DatetimeTZDtype(tz="US/Pacific"))
473
+
474
+ expected = Series(
475
+ {
476
+ 0: Timestamp("1969-12-31 16:00:00.000000004-08:00", tz="US/Pacific"),
477
+ 1: Timestamp("1969-12-31 16:00:00.000000000-08:00", tz="US/Pacific"),
478
+ 2: Timestamp("1969-12-31 16:00:00.000000009-08:00", tz="US/Pacific"),
479
+ }
480
+ )
481
+
482
+ tm.assert_series_equal(result, expected)
483
+
484
+ def test_astype_retain_attrs(self, any_numpy_dtype):
485
+ # GH#44414
486
+ ser = Series([0, 1, 2, 3])
487
+ ser.attrs["Location"] = "Michigan"
488
+
489
+ result = ser.astype(any_numpy_dtype).attrs
490
+ expected = ser.attrs
491
+
492
+ tm.assert_dict_equal(expected, result)
493
+
494
+
495
+ class TestAstypeString:
496
+ @pytest.mark.parametrize(
497
+ "data, dtype",
498
+ [
499
+ ([True, NA], "boolean"),
500
+ (["A", NA], "category"),
501
+ (["2020-10-10", "2020-10-10"], "datetime64[ns]"),
502
+ (["2020-10-10", "2020-10-10", NaT], "datetime64[ns]"),
503
+ (
504
+ ["2012-01-01 00:00:00-05:00", NaT],
505
+ "datetime64[ns, US/Eastern]",
506
+ ),
507
+ ([1, None], "UInt16"),
508
+ (["1/1/2021", "2/1/2021"], "period[M]"),
509
+ (["1/1/2021", "2/1/2021", NaT], "period[M]"),
510
+ (["1 Day", "59 Days", NaT], "timedelta64[ns]"),
511
+ # currently no way to parse IntervalArray from a list of strings
512
+ ],
513
+ )
514
+ def test_astype_string_to_extension_dtype_roundtrip(
515
+ self, data, dtype, request, nullable_string_dtype
516
+ ):
517
+ if dtype == "boolean":
518
+ mark = pytest.mark.xfail(
519
+ reason="TODO StringArray.astype() with missing values #GH40566"
520
+ )
521
+ request.applymarker(mark)
522
+ # GH-40351
523
+ ser = Series(data, dtype=dtype)
524
+
525
+ # Note: just passing .astype(dtype) fails for dtype="category"
526
+ # with bc ser.dtype.categories will be object dtype whereas
527
+ # result.dtype.categories will have string dtype
528
+ result = ser.astype(nullable_string_dtype).astype(ser.dtype)
529
+ tm.assert_series_equal(result, ser)
530
+
531
+
532
+ class TestAstypeCategorical:
533
+ def test_astype_categorical_to_other(self):
534
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
535
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
536
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
537
+
538
+ expected = ser
539
+ tm.assert_series_equal(ser.astype("category"), expected)
540
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), expected)
541
+ msg = r"Cannot cast object|string dtype to float64"
542
+ with pytest.raises(ValueError, match=msg):
543
+ ser.astype("float64")
544
+
545
+ cat = Series(Categorical(["a", "b", "b", "a", "a", "c", "c", "c"]))
546
+ exp = Series(["a", "b", "b", "a", "a", "c", "c", "c"], dtype=object)
547
+ tm.assert_series_equal(cat.astype("str"), exp)
548
+ s2 = Series(Categorical(["1", "2", "3", "4"]))
549
+ exp2 = Series([1, 2, 3, 4]).astype("int")
550
+ tm.assert_series_equal(s2.astype("int"), exp2)
551
+
552
+ # object don't sort correctly, so just compare that we have the same
553
+ # values
554
+ def cmp(a, b):
555
+ tm.assert_almost_equal(np.sort(np.unique(a)), np.sort(np.unique(b)))
556
+
557
+ expected = Series(np.array(ser.values), name="value_group")
558
+ cmp(ser.astype("object"), expected)
559
+ cmp(ser.astype(np.object_), expected)
560
+
561
+ # array conversion
562
+ tm.assert_almost_equal(np.array(ser), np.array(ser.values))
563
+
564
+ tm.assert_series_equal(ser.astype("category"), ser)
565
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), ser)
566
+
567
+ roundtrip_expected = ser.cat.set_categories(
568
+ ser.cat.categories.sort_values()
569
+ ).cat.remove_unused_categories()
570
+ result = ser.astype("object").astype("category")
571
+ tm.assert_series_equal(result, roundtrip_expected)
572
+ result = ser.astype("object").astype(CategoricalDtype())
573
+ tm.assert_series_equal(result, roundtrip_expected)
574
+
575
+ def test_astype_categorical_invalid_conversions(self):
576
+ # invalid conversion (these are NOT a dtype)
577
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
578
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
579
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
580
+
581
+ msg = (
582
+ "dtype '<class 'pandas.core.arrays.categorical.Categorical'>' "
583
+ "not understood"
584
+ )
585
+ with pytest.raises(TypeError, match=msg):
586
+ ser.astype(Categorical)
587
+ with pytest.raises(TypeError, match=msg):
588
+ ser.astype("object").astype(Categorical)
589
+
590
+ def test_astype_categoricaldtype(self):
591
+ ser = Series(["a", "b", "a"])
592
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=True))
593
+ expected = Series(Categorical(["a", "b", "a"], ordered=True))
594
+ tm.assert_series_equal(result, expected)
595
+
596
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=False))
597
+ expected = Series(Categorical(["a", "b", "a"], ordered=False))
598
+ tm.assert_series_equal(result, expected)
599
+
600
+ result = ser.astype(CategoricalDtype(["a", "b", "c"], ordered=False))
601
+ expected = Series(
602
+ Categorical(["a", "b", "a"], categories=["a", "b", "c"], ordered=False)
603
+ )
604
+ tm.assert_series_equal(result, expected)
605
+ tm.assert_index_equal(result.cat.categories, Index(["a", "b", "c"]))
606
+
607
+ @pytest.mark.parametrize("name", [None, "foo"])
608
+ @pytest.mark.parametrize("dtype_ordered", [True, False])
609
+ @pytest.mark.parametrize("series_ordered", [True, False])
610
+ def test_astype_categorical_to_categorical(
611
+ self, name, dtype_ordered, series_ordered
612
+ ):
613
+ # GH#10696, GH#18593
614
+ s_data = list("abcaacbab")
615
+ s_dtype = CategoricalDtype(list("bac"), ordered=series_ordered)
616
+ ser = Series(s_data, dtype=s_dtype, name=name)
617
+
618
+ # unspecified categories
619
+ dtype = CategoricalDtype(ordered=dtype_ordered)
620
+ result = ser.astype(dtype)
621
+ exp_dtype = CategoricalDtype(s_dtype.categories, dtype_ordered)
622
+ expected = Series(s_data, name=name, dtype=exp_dtype)
623
+ tm.assert_series_equal(result, expected)
624
+
625
+ # different categories
626
+ dtype = CategoricalDtype(list("adc"), dtype_ordered)
627
+ result = ser.astype(dtype)
628
+ expected = Series(s_data, name=name, dtype=dtype)
629
+ tm.assert_series_equal(result, expected)
630
+
631
+ if dtype_ordered is False:
632
+ # not specifying ordered, so only test once
633
+ expected = ser
634
+ result = ser.astype("category")
635
+ tm.assert_series_equal(result, expected)
636
+
637
+ def test_astype_bool_missing_to_categorical(self):
638
+ # GH-19182
639
+ ser = Series([True, False, np.nan])
640
+ assert ser.dtypes == np.object_
641
+
642
+ result = ser.astype(CategoricalDtype(categories=[True, False]))
643
+ expected = Series(Categorical([True, False, np.nan], categories=[True, False]))
644
+ tm.assert_series_equal(result, expected)
645
+
646
+ def test_astype_categories_raises(self):
647
+ # deprecated GH#17636, removed in GH#27141
648
+ ser = Series(["a", "b", "a"])
649
+ with pytest.raises(TypeError, match="got an unexpected"):
650
+ ser.astype("category", categories=["a", "b"], ordered=True)
651
+
652
+ @pytest.mark.parametrize("items", [["a", "b", "c", "a"], [1, 2, 3, 1]])
653
+ def test_astype_from_categorical(self, items):
654
+ ser = Series(items)
655
+ exp = Series(Categorical(items))
656
+ res = ser.astype("category")
657
+ tm.assert_series_equal(res, exp)
658
+
659
+ def test_astype_from_categorical_with_keywords(self):
660
+ # with keywords
661
+ lst = ["a", "b", "c", "a"]
662
+ ser = Series(lst)
663
+ exp = Series(Categorical(lst, ordered=True))
664
+ res = ser.astype(CategoricalDtype(None, ordered=True))
665
+ tm.assert_series_equal(res, exp)
666
+
667
+ exp = Series(Categorical(lst, categories=list("abcdef"), ordered=True))
668
+ res = ser.astype(CategoricalDtype(list("abcdef"), ordered=True))
669
+ tm.assert_series_equal(res, exp)
670
+
671
+ def test_astype_timedelta64_with_np_nan(self):
672
+ # GH45798
673
+ result = Series([Timedelta(1), np.nan], dtype="timedelta64[ns]")
674
+ expected = Series([Timedelta(1), NaT], dtype="timedelta64[ns]")
675
+ tm.assert_series_equal(result, expected)
676
+
677
+ @td.skip_if_no("pyarrow")
678
+ def test_astype_int_na_string(self):
679
+ # GH#57418
680
+ ser = Series([12, NA], dtype="Int64[pyarrow]")
681
+ result = ser.astype("string[pyarrow]")
682
+ expected = Series(["12", NA], dtype="string[pyarrow]")
683
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_case_when.py ADDED
@@ -0,0 +1,148 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ Series,
7
+ array as pd_array,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ @pytest.fixture
14
+ def df():
15
+ """
16
+ base dataframe for testing
17
+ """
18
+ return DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
19
+
20
+
21
+ def test_case_when_caselist_is_not_a_list(df):
22
+ """
23
+ Raise ValueError if caselist is not a list.
24
+ """
25
+ msg = "The caselist argument should be a list; "
26
+ msg += "instead got.+"
27
+ with pytest.raises(TypeError, match=msg): # GH39154
28
+ df["a"].case_when(caselist=())
29
+
30
+
31
+ def test_case_when_no_caselist(df):
32
+ """
33
+ Raise ValueError if no caselist is provided.
34
+ """
35
+ msg = "provide at least one boolean condition, "
36
+ msg += "with a corresponding replacement."
37
+ with pytest.raises(ValueError, match=msg): # GH39154
38
+ df["a"].case_when([])
39
+
40
+
41
+ def test_case_when_odd_caselist(df):
42
+ """
43
+ Raise ValueError if no of caselist is odd.
44
+ """
45
+ msg = "Argument 0 must have length 2; "
46
+ msg += "a condition and replacement; instead got length 3."
47
+
48
+ with pytest.raises(ValueError, match=msg):
49
+ df["a"].case_when([(df["a"].eq(1), 1, df.a.gt(1))])
50
+
51
+
52
+ def test_case_when_raise_error_from_mask(df):
53
+ """
54
+ Raise Error from within Series.mask
55
+ """
56
+ msg = "Failed to apply condition0 and replacement0."
57
+ with pytest.raises(ValueError, match=msg):
58
+ df["a"].case_when([(df["a"].eq(1), [1, 2])])
59
+
60
+
61
+ def test_case_when_single_condition(df):
62
+ """
63
+ Test output on a single condition.
64
+ """
65
+ result = Series([np.nan, np.nan, np.nan]).case_when([(df.a.eq(1), 1)])
66
+ expected = Series([1, np.nan, np.nan])
67
+ tm.assert_series_equal(result, expected)
68
+
69
+
70
+ def test_case_when_multiple_conditions(df):
71
+ """
72
+ Test output when booleans are derived from a computation
73
+ """
74
+ result = Series([np.nan, np.nan, np.nan]).case_when(
75
+ [(df.a.eq(1), 1), (Series([False, True, False]), 2)]
76
+ )
77
+ expected = Series([1, 2, np.nan])
78
+ tm.assert_series_equal(result, expected)
79
+
80
+
81
+ def test_case_when_multiple_conditions_replacement_list(df):
82
+ """
83
+ Test output when replacement is a list
84
+ """
85
+ result = Series([np.nan, np.nan, np.nan]).case_when(
86
+ [([True, False, False], 1), (df["a"].gt(1) & df["b"].eq(5), [1, 2, 3])]
87
+ )
88
+ expected = Series([1, 2, np.nan])
89
+ tm.assert_series_equal(result, expected)
90
+
91
+
92
+ def test_case_when_multiple_conditions_replacement_extension_dtype(df):
93
+ """
94
+ Test output when replacement has an extension dtype
95
+ """
96
+ result = Series([np.nan, np.nan, np.nan]).case_when(
97
+ [
98
+ ([True, False, False], 1),
99
+ (df["a"].gt(1) & df["b"].eq(5), pd_array([1, 2, 3], dtype="Int64")),
100
+ ],
101
+ )
102
+ expected = Series([1, 2, np.nan], dtype="Float64")
103
+ tm.assert_series_equal(result, expected)
104
+
105
+
106
+ def test_case_when_multiple_conditions_replacement_series(df):
107
+ """
108
+ Test output when replacement is a Series
109
+ """
110
+ result = Series([np.nan, np.nan, np.nan]).case_when(
111
+ [
112
+ (np.array([True, False, False]), 1),
113
+ (df["a"].gt(1) & df["b"].eq(5), Series([1, 2, 3])),
114
+ ],
115
+ )
116
+ expected = Series([1, 2, np.nan])
117
+ tm.assert_series_equal(result, expected)
118
+
119
+
120
+ def test_case_when_non_range_index():
121
+ """
122
+ Test output if index is not RangeIndex
123
+ """
124
+ rng = np.random.default_rng(seed=123)
125
+ dates = date_range("1/1/2000", periods=8)
126
+ df = DataFrame(
127
+ rng.standard_normal(size=(8, 4)), index=dates, columns=["A", "B", "C", "D"]
128
+ )
129
+ result = Series(5, index=df.index, name="A").case_when([(df.A.gt(0), df.B)])
130
+ expected = df.A.mask(df.A.gt(0), df.B).where(df.A.gt(0), 5)
131
+ tm.assert_series_equal(result, expected)
132
+
133
+
134
+ def test_case_when_callable():
135
+ """
136
+ Test output on a callable
137
+ """
138
+ # https://numpy.org/doc/stable/reference/generated/numpy.piecewise.html
139
+ x = np.linspace(-2.5, 2.5, 6)
140
+ ser = Series(x)
141
+ result = ser.case_when(
142
+ caselist=[
143
+ (lambda df: df < 0, lambda df: -df),
144
+ (lambda df: df >= 0, lambda df: df),
145
+ ]
146
+ )
147
+ expected = np.piecewise(x, [x < 0, x >= 0], [lambda x: -x, lambda x: x])
148
+ tm.assert_series_equal(result, Series(expected))
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_clip.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Series,
9
+ Timestamp,
10
+ isna,
11
+ notna,
12
+ )
13
+ import pandas._testing as tm
14
+
15
+
16
+ class TestSeriesClip:
17
+ def test_clip(self, datetime_series):
18
+ val = datetime_series.median()
19
+
20
+ assert datetime_series.clip(lower=val).min() == val
21
+ assert datetime_series.clip(upper=val).max() == val
22
+
23
+ result = datetime_series.clip(-0.5, 0.5)
24
+ expected = np.clip(datetime_series, -0.5, 0.5)
25
+ tm.assert_series_equal(result, expected)
26
+ assert isinstance(expected, Series)
27
+
28
+ def test_clip_types_and_nulls(self):
29
+ sers = [
30
+ Series([np.nan, 1.0, 2.0, 3.0]),
31
+ Series([None, "a", "b", "c"]),
32
+ Series(pd.to_datetime([np.nan, 1, 2, 3], unit="D")),
33
+ ]
34
+
35
+ for s in sers:
36
+ thresh = s[2]
37
+ lower = s.clip(lower=thresh)
38
+ upper = s.clip(upper=thresh)
39
+ assert lower[notna(lower)].min() == thresh
40
+ assert upper[notna(upper)].max() == thresh
41
+ assert list(isna(s)) == list(isna(lower))
42
+ assert list(isna(s)) == list(isna(upper))
43
+
44
+ def test_series_clipping_with_na_values(self, any_numeric_ea_dtype, nulls_fixture):
45
+ # Ensure that clipping method can handle NA values with out failing
46
+ # GH#40581
47
+
48
+ if nulls_fixture is pd.NaT:
49
+ # constructor will raise, see
50
+ # test_constructor_mismatched_null_nullable_dtype
51
+ pytest.skip("See test_constructor_mismatched_null_nullable_dtype")
52
+
53
+ ser = Series([nulls_fixture, 1.0, 3.0], dtype=any_numeric_ea_dtype)
54
+ s_clipped_upper = ser.clip(upper=2.0)
55
+ s_clipped_lower = ser.clip(lower=2.0)
56
+
57
+ expected_upper = Series([nulls_fixture, 1.0, 2.0], dtype=any_numeric_ea_dtype)
58
+ expected_lower = Series([nulls_fixture, 2.0, 3.0], dtype=any_numeric_ea_dtype)
59
+
60
+ tm.assert_series_equal(s_clipped_upper, expected_upper)
61
+ tm.assert_series_equal(s_clipped_lower, expected_lower)
62
+
63
+ def test_clip_with_na_args(self):
64
+ """Should process np.nan argument as None"""
65
+ # GH#17276
66
+ s = Series([1, 2, 3])
67
+
68
+ tm.assert_series_equal(s.clip(np.nan), Series([1, 2, 3]))
69
+ tm.assert_series_equal(s.clip(upper=np.nan, lower=np.nan), Series([1, 2, 3]))
70
+
71
+ # GH#19992
72
+ msg = "Downcasting behavior in Series and DataFrame methods 'where'"
73
+ # TODO: avoid this warning here? seems like we should never be upcasting
74
+ # in the first place?
75
+ with tm.assert_produces_warning(FutureWarning, match=msg):
76
+ res = s.clip(lower=[0, 4, np.nan])
77
+ tm.assert_series_equal(res, Series([1, 4, 3]))
78
+ with tm.assert_produces_warning(FutureWarning, match=msg):
79
+ res = s.clip(upper=[1, np.nan, 1])
80
+ tm.assert_series_equal(res, Series([1, 2, 1]))
81
+
82
+ # GH#40420
83
+ s = Series([1, 2, 3])
84
+ result = s.clip(0, [np.nan, np.nan, np.nan])
85
+ tm.assert_series_equal(s, result)
86
+
87
+ def test_clip_against_series(self):
88
+ # GH#6966
89
+
90
+ s = Series([1.0, 1.0, 4.0])
91
+
92
+ lower = Series([1.0, 2.0, 3.0])
93
+ upper = Series([1.5, 2.5, 3.5])
94
+
95
+ tm.assert_series_equal(s.clip(lower, upper), Series([1.0, 2.0, 3.5]))
96
+ tm.assert_series_equal(s.clip(1.5, upper), Series([1.5, 1.5, 3.5]))
97
+
98
+ @pytest.mark.parametrize("inplace", [True, False])
99
+ @pytest.mark.parametrize("upper", [[1, 2, 3], np.asarray([1, 2, 3])])
100
+ def test_clip_against_list_like(self, inplace, upper):
101
+ # GH#15390
102
+ original = Series([5, 6, 7])
103
+ result = original.clip(upper=upper, inplace=inplace)
104
+ expected = Series([1, 2, 3])
105
+
106
+ if inplace:
107
+ result = original
108
+ tm.assert_series_equal(result, expected, check_exact=True)
109
+
110
+ def test_clip_with_datetimes(self):
111
+ # GH#11838
112
+ # naive and tz-aware datetimes
113
+
114
+ t = Timestamp("2015-12-01 09:30:30")
115
+ s = Series([Timestamp("2015-12-01 09:30:00"), Timestamp("2015-12-01 09:31:00")])
116
+ result = s.clip(upper=t)
117
+ expected = Series(
118
+ [Timestamp("2015-12-01 09:30:00"), Timestamp("2015-12-01 09:30:30")]
119
+ )
120
+ tm.assert_series_equal(result, expected)
121
+
122
+ t = Timestamp("2015-12-01 09:30:30", tz="US/Eastern")
123
+ s = Series(
124
+ [
125
+ Timestamp("2015-12-01 09:30:00", tz="US/Eastern"),
126
+ Timestamp("2015-12-01 09:31:00", tz="US/Eastern"),
127
+ ]
128
+ )
129
+ result = s.clip(upper=t)
130
+ expected = Series(
131
+ [
132
+ Timestamp("2015-12-01 09:30:00", tz="US/Eastern"),
133
+ Timestamp("2015-12-01 09:30:30", tz="US/Eastern"),
134
+ ]
135
+ )
136
+ tm.assert_series_equal(result, expected)
137
+
138
+ @pytest.mark.parametrize("dtype", [object, "M8[us]"])
139
+ def test_clip_with_timestamps_and_oob_datetimes(self, dtype):
140
+ # GH-42794
141
+ ser = Series([datetime(1, 1, 1), datetime(9999, 9, 9)], dtype=dtype)
142
+
143
+ result = ser.clip(lower=Timestamp.min, upper=Timestamp.max)
144
+ expected = Series([Timestamp.min, Timestamp.max], dtype=dtype)
145
+
146
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import Series
2
+ import pandas._testing as tm
3
+
4
+
5
+ class TestCombine:
6
+ def test_combine_scalar(self):
7
+ # GH#21248
8
+ # Note - combine() with another Series is tested elsewhere because
9
+ # it is used when testing operators
10
+ ser = Series([i * 10 for i in range(5)])
11
+ result = ser.combine(3, lambda x, y: x + y)
12
+ expected = Series([i * 10 + 3 for i in range(5)])
13
+ tm.assert_series_equal(result, expected)
14
+
15
+ result = ser.combine(22, lambda x, y: min(x, y))
16
+ expected = Series([min(i * 10, 22) for i in range(5)])
17
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_combine_first.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+
5
+ import pandas as pd
6
+ from pandas import (
7
+ Period,
8
+ Series,
9
+ date_range,
10
+ period_range,
11
+ to_datetime,
12
+ )
13
+ import pandas._testing as tm
14
+
15
+
16
+ class TestCombineFirst:
17
+ def test_combine_first_period_datetime(self):
18
+ # GH#3367
19
+ didx = date_range(start="1950-01-31", end="1950-07-31", freq="ME")
20
+ pidx = period_range(start=Period("1950-1"), end=Period("1950-7"), freq="M")
21
+ # check to be consistent with DatetimeIndex
22
+ for idx in [didx, pidx]:
23
+ a = Series([1, np.nan, np.nan, 4, 5, np.nan, 7], index=idx)
24
+ b = Series([9, 9, 9, 9, 9, 9, 9], index=idx)
25
+
26
+ result = a.combine_first(b)
27
+ expected = Series([1, 9, 9, 4, 5, 9, 7], index=idx, dtype=np.float64)
28
+ tm.assert_series_equal(result, expected)
29
+
30
+ def test_combine_first_name(self, datetime_series):
31
+ result = datetime_series.combine_first(datetime_series[:5])
32
+ assert result.name == datetime_series.name
33
+
34
+ def test_combine_first(self):
35
+ values = np.arange(20, dtype=np.float64)
36
+ series = Series(values, index=np.arange(20, dtype=np.int64))
37
+
38
+ series_copy = series * 2
39
+ series_copy[::2] = np.nan
40
+
41
+ # nothing used from the input
42
+ combined = series.combine_first(series_copy)
43
+
44
+ tm.assert_series_equal(combined, series)
45
+
46
+ # Holes filled from input
47
+ combined = series_copy.combine_first(series)
48
+ assert np.isfinite(combined).all()
49
+
50
+ tm.assert_series_equal(combined[::2], series[::2])
51
+ tm.assert_series_equal(combined[1::2], series_copy[1::2])
52
+
53
+ # mixed types
54
+ index = pd.Index([str(i) for i in range(20)])
55
+ floats = Series(np.random.default_rng(2).standard_normal(20), index=index)
56
+ strings = Series([str(i) for i in range(10)], index=index[::2], dtype=object)
57
+
58
+ combined = strings.combine_first(floats)
59
+
60
+ tm.assert_series_equal(strings, combined.loc[index[::2]])
61
+ tm.assert_series_equal(floats[1::2].astype(object), combined.loc[index[1::2]])
62
+
63
+ # corner case
64
+ ser = Series([1.0, 2, 3], index=[0, 1, 2])
65
+ empty = Series([], index=[], dtype=object)
66
+ msg = "The behavior of array concatenation with empty entries is deprecated"
67
+ with tm.assert_produces_warning(FutureWarning, match=msg):
68
+ result = ser.combine_first(empty)
69
+ ser.index = ser.index.astype("O")
70
+ tm.assert_series_equal(ser, result)
71
+
72
+ def test_combine_first_dt64(self, unit):
73
+ s0 = to_datetime(Series(["2010", np.nan])).dt.as_unit(unit)
74
+ s1 = to_datetime(Series([np.nan, "2011"])).dt.as_unit(unit)
75
+ rs = s0.combine_first(s1)
76
+ xp = to_datetime(Series(["2010", "2011"])).dt.as_unit(unit)
77
+ tm.assert_series_equal(rs, xp)
78
+
79
+ s0 = to_datetime(Series(["2010", np.nan])).dt.as_unit(unit)
80
+ s1 = Series([np.nan, "2011"])
81
+ rs = s0.combine_first(s1)
82
+
83
+ xp = Series([datetime(2010, 1, 1), "2011"], dtype="datetime64[ns]")
84
+
85
+ tm.assert_series_equal(rs, xp)
86
+
87
+ def test_combine_first_dt_tz_values(self, tz_naive_fixture):
88
+ ser1 = Series(
89
+ pd.DatetimeIndex(["20150101", "20150102", "20150103"], tz=tz_naive_fixture),
90
+ name="ser1",
91
+ )
92
+ ser2 = Series(
93
+ pd.DatetimeIndex(["20160514", "20160515", "20160516"], tz=tz_naive_fixture),
94
+ index=[2, 3, 4],
95
+ name="ser2",
96
+ )
97
+ result = ser1.combine_first(ser2)
98
+ exp_vals = pd.DatetimeIndex(
99
+ ["20150101", "20150102", "20150103", "20160515", "20160516"],
100
+ tz=tz_naive_fixture,
101
+ )
102
+ exp = Series(exp_vals, name="ser1")
103
+ tm.assert_series_equal(exp, result)
104
+
105
+ def test_combine_first_timezone_series_with_empty_series(self):
106
+ # GH 41800
107
+ time_index = date_range(
108
+ datetime(2021, 1, 1, 1),
109
+ datetime(2021, 1, 1, 10),
110
+ freq="h",
111
+ tz="Europe/Rome",
112
+ )
113
+ s1 = Series(range(10), index=time_index)
114
+ s2 = Series(index=time_index)
115
+ msg = "The behavior of array concatenation with empty entries is deprecated"
116
+ with tm.assert_produces_warning(FutureWarning, match=msg):
117
+ result = s1.combine_first(s2)
118
+ tm.assert_series_equal(result, s1)
119
+
120
+ def test_combine_first_preserves_dtype(self):
121
+ # GH51764
122
+ s1 = Series([1666880195890293744, 1666880195890293837])
123
+ s2 = Series([1, 2, 3])
124
+ result = s1.combine_first(s2)
125
+ expected = Series([1666880195890293744, 1666880195890293837, 3])
126
+ tm.assert_series_equal(result, expected)
127
+
128
+ def test_combine_mixed_timezone(self):
129
+ # GH 26283
130
+ uniform_tz = Series({pd.Timestamp("2019-05-01", tz="UTC"): 1.0})
131
+ multi_tz = Series(
132
+ {
133
+ pd.Timestamp("2019-05-01 01:00:00+0100", tz="Europe/London"): 2.0,
134
+ pd.Timestamp("2019-05-02", tz="UTC"): 3.0,
135
+ }
136
+ )
137
+
138
+ result = uniform_tz.combine_first(multi_tz)
139
+ expected = Series(
140
+ [1.0, 3.0],
141
+ index=pd.Index(
142
+ [
143
+ pd.Timestamp("2019-05-01 00:00:00+00:00", tz="UTC"),
144
+ pd.Timestamp("2019-05-02 00:00:00+00:00", tz="UTC"),
145
+ ],
146
+ dtype="object",
147
+ ),
148
+ )
149
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_compare.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ @pytest.mark.parametrize("align_axis", [0, 1, "index", "columns"])
9
+ def test_compare_axis(align_axis):
10
+ # GH#30429
11
+ s1 = pd.Series(["a", "b", "c"])
12
+ s2 = pd.Series(["x", "b", "z"])
13
+
14
+ result = s1.compare(s2, align_axis=align_axis)
15
+
16
+ if align_axis in (1, "columns"):
17
+ indices = pd.Index([0, 2])
18
+ columns = pd.Index(["self", "other"])
19
+ expected = pd.DataFrame(
20
+ [["a", "x"], ["c", "z"]], index=indices, columns=columns
21
+ )
22
+ tm.assert_frame_equal(result, expected)
23
+ else:
24
+ indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
25
+ expected = pd.Series(["a", "x", "c", "z"], index=indices)
26
+ tm.assert_series_equal(result, expected)
27
+
28
+
29
+ @pytest.mark.parametrize(
30
+ "keep_shape, keep_equal",
31
+ [
32
+ (True, False),
33
+ (False, True),
34
+ (True, True),
35
+ # False, False case is already covered in test_compare_axis
36
+ ],
37
+ )
38
+ def test_compare_various_formats(keep_shape, keep_equal):
39
+ s1 = pd.Series(["a", "b", "c"])
40
+ s2 = pd.Series(["x", "b", "z"])
41
+
42
+ result = s1.compare(s2, keep_shape=keep_shape, keep_equal=keep_equal)
43
+
44
+ if keep_shape:
45
+ indices = pd.Index([0, 1, 2])
46
+ columns = pd.Index(["self", "other"])
47
+ if keep_equal:
48
+ expected = pd.DataFrame(
49
+ [["a", "x"], ["b", "b"], ["c", "z"]], index=indices, columns=columns
50
+ )
51
+ else:
52
+ expected = pd.DataFrame(
53
+ [["a", "x"], [np.nan, np.nan], ["c", "z"]],
54
+ index=indices,
55
+ columns=columns,
56
+ )
57
+ else:
58
+ indices = pd.Index([0, 2])
59
+ columns = pd.Index(["self", "other"])
60
+ expected = pd.DataFrame(
61
+ [["a", "x"], ["c", "z"]], index=indices, columns=columns
62
+ )
63
+ tm.assert_frame_equal(result, expected)
64
+
65
+
66
+ def test_compare_with_equal_nulls():
67
+ # We want to make sure two NaNs are considered the same
68
+ # and dropped where applicable
69
+ s1 = pd.Series(["a", "b", np.nan])
70
+ s2 = pd.Series(["x", "b", np.nan])
71
+
72
+ result = s1.compare(s2)
73
+ expected = pd.DataFrame([["a", "x"]], columns=["self", "other"])
74
+ tm.assert_frame_equal(result, expected)
75
+
76
+
77
+ def test_compare_with_non_equal_nulls():
78
+ # We want to make sure the relevant NaNs do not get dropped
79
+ s1 = pd.Series(["a", "b", "c"])
80
+ s2 = pd.Series(["x", "b", np.nan])
81
+
82
+ result = s1.compare(s2, align_axis=0)
83
+
84
+ indices = pd.MultiIndex.from_product([[0, 2], ["self", "other"]])
85
+ expected = pd.Series(["a", "x", "c", np.nan], index=indices)
86
+ tm.assert_series_equal(result, expected)
87
+
88
+
89
+ def test_compare_multi_index():
90
+ index = pd.MultiIndex.from_arrays([[0, 0, 1], [0, 1, 2]])
91
+ s1 = pd.Series(["a", "b", "c"], index=index)
92
+ s2 = pd.Series(["x", "b", "z"], index=index)
93
+
94
+ result = s1.compare(s2, align_axis=0)
95
+
96
+ indices = pd.MultiIndex.from_arrays(
97
+ [[0, 0, 1, 1], [0, 0, 2, 2], ["self", "other", "self", "other"]]
98
+ )
99
+ expected = pd.Series(["a", "x", "c", "z"], index=indices)
100
+ tm.assert_series_equal(result, expected)
101
+
102
+
103
+ def test_compare_unaligned_objects():
104
+ # test Series with different indices
105
+ msg = "Can only compare identically-labeled Series objects"
106
+ with pytest.raises(ValueError, match=msg):
107
+ ser1 = pd.Series([1, 2, 3], index=["a", "b", "c"])
108
+ ser2 = pd.Series([1, 2, 3], index=["a", "b", "d"])
109
+ ser1.compare(ser2)
110
+
111
+ # test Series with different lengths
112
+ msg = "Can only compare identically-labeled Series objects"
113
+ with pytest.raises(ValueError, match=msg):
114
+ ser1 = pd.Series([1, 2, 3])
115
+ ser2 = pd.Series([1, 2, 3, 4])
116
+ ser1.compare(ser2)
117
+
118
+
119
+ def test_compare_datetime64_and_string():
120
+ # Issue https://github.com/pandas-dev/pandas/issues/45506
121
+ # Catch OverflowError when comparing datetime64 and string
122
+ data = [
123
+ {"a": "2015-07-01", "b": "08335394550"},
124
+ {"a": "2015-07-02", "b": "+49 (0) 0345 300033"},
125
+ {"a": "2015-07-03", "b": "+49(0)2598 04457"},
126
+ {"a": "2015-07-04", "b": "0741470003"},
127
+ {"a": "2015-07-05", "b": "04181 83668"},
128
+ ]
129
+ dtypes = {"a": "datetime64[ns]", "b": "string"}
130
+ df = pd.DataFrame(data=data).astype(dtypes)
131
+
132
+ result_eq1 = df["a"].eq(df["b"])
133
+ result_eq2 = df["a"] == df["b"]
134
+ result_neq = df["a"] != df["b"]
135
+
136
+ expected_eq = pd.Series([False] * 5) # For .eq and ==
137
+ expected_neq = pd.Series([True] * 5) # For !=
138
+
139
+ tm.assert_series_equal(result_eq1, expected_eq)
140
+ tm.assert_series_equal(result_eq2, expected_eq)
141
+ tm.assert_series_equal(result_neq, expected_neq)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_convert_dtypes.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from itertools import product
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas._libs import lib
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+ # Each test case consists of a tuple with the data and dtype to create the
12
+ # test Series, the default dtype for the expected result (which is valid
13
+ # for most cases), and the specific cases where the result deviates from
14
+ # this default. Those overrides are defined as a dict with (keyword, val) as
15
+ # dictionary key. In case of multiple items, the last override takes precedence.
16
+
17
+
18
+ @pytest.fixture(
19
+ params=[
20
+ (
21
+ # data
22
+ [1, 2, 3],
23
+ # original dtype
24
+ np.dtype("int32"),
25
+ # default expected dtype
26
+ "Int32",
27
+ # exceptions on expected dtype
28
+ {("convert_integer", False): np.dtype("int32")},
29
+ ),
30
+ (
31
+ [1, 2, 3],
32
+ np.dtype("int64"),
33
+ "Int64",
34
+ {("convert_integer", False): np.dtype("int64")},
35
+ ),
36
+ (
37
+ ["x", "y", "z"],
38
+ np.dtype("O"),
39
+ pd.StringDtype(),
40
+ {("convert_string", False): np.dtype("O")},
41
+ ),
42
+ (
43
+ [True, False, np.nan],
44
+ np.dtype("O"),
45
+ pd.BooleanDtype(),
46
+ {("convert_boolean", False): np.dtype("O")},
47
+ ),
48
+ (
49
+ ["h", "i", np.nan],
50
+ np.dtype("O"),
51
+ pd.StringDtype(),
52
+ {("convert_string", False): np.dtype("O")},
53
+ ),
54
+ ( # GH32117
55
+ ["h", "i", 1],
56
+ np.dtype("O"),
57
+ np.dtype("O"),
58
+ {},
59
+ ),
60
+ (
61
+ [10, np.nan, 20],
62
+ np.dtype("float"),
63
+ "Int64",
64
+ {
65
+ ("convert_integer", False, "convert_floating", True): "Float64",
66
+ ("convert_integer", False, "convert_floating", False): np.dtype(
67
+ "float"
68
+ ),
69
+ },
70
+ ),
71
+ (
72
+ [np.nan, 100.5, 200],
73
+ np.dtype("float"),
74
+ "Float64",
75
+ {("convert_floating", False): np.dtype("float")},
76
+ ),
77
+ (
78
+ [3, 4, 5],
79
+ "Int8",
80
+ "Int8",
81
+ {},
82
+ ),
83
+ (
84
+ [[1, 2], [3, 4], [5]],
85
+ None,
86
+ np.dtype("O"),
87
+ {},
88
+ ),
89
+ (
90
+ [4, 5, 6],
91
+ np.dtype("uint32"),
92
+ "UInt32",
93
+ {("convert_integer", False): np.dtype("uint32")},
94
+ ),
95
+ (
96
+ [-10, 12, 13],
97
+ np.dtype("i1"),
98
+ "Int8",
99
+ {("convert_integer", False): np.dtype("i1")},
100
+ ),
101
+ (
102
+ [1.2, 1.3],
103
+ np.dtype("float32"),
104
+ "Float32",
105
+ {("convert_floating", False): np.dtype("float32")},
106
+ ),
107
+ (
108
+ [1, 2.0],
109
+ object,
110
+ "Int64",
111
+ {
112
+ ("convert_integer", False): "Float64",
113
+ ("convert_integer", False, "convert_floating", False): np.dtype(
114
+ "float"
115
+ ),
116
+ ("infer_objects", False): np.dtype("object"),
117
+ },
118
+ ),
119
+ (
120
+ [1, 2.5],
121
+ object,
122
+ "Float64",
123
+ {
124
+ ("convert_floating", False): np.dtype("float"),
125
+ ("infer_objects", False): np.dtype("object"),
126
+ },
127
+ ),
128
+ (["a", "b"], pd.CategoricalDtype(), pd.CategoricalDtype(), {}),
129
+ (
130
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("s"),
131
+ pd.DatetimeTZDtype(tz="UTC"),
132
+ pd.DatetimeTZDtype(tz="UTC"),
133
+ {},
134
+ ),
135
+ (
136
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ms"),
137
+ pd.DatetimeTZDtype(tz="UTC"),
138
+ pd.DatetimeTZDtype(tz="UTC"),
139
+ {},
140
+ ),
141
+ (
142
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("us"),
143
+ pd.DatetimeTZDtype(tz="UTC"),
144
+ pd.DatetimeTZDtype(tz="UTC"),
145
+ {},
146
+ ),
147
+ (
148
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
149
+ pd.DatetimeTZDtype(tz="UTC"),
150
+ pd.DatetimeTZDtype(tz="UTC"),
151
+ {},
152
+ ),
153
+ (
154
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
155
+ "datetime64[ns]",
156
+ np.dtype("datetime64[ns]"),
157
+ {},
158
+ ),
159
+ (
160
+ pd.to_datetime(["2020-01-14 10:00", "2020-01-15 11:11"]).as_unit("ns"),
161
+ object,
162
+ np.dtype("datetime64[ns]"),
163
+ {("infer_objects", False): np.dtype("object")},
164
+ ),
165
+ (
166
+ pd.period_range("1/1/2011", freq="M", periods=3),
167
+ None,
168
+ pd.PeriodDtype("M"),
169
+ {},
170
+ ),
171
+ (
172
+ pd.arrays.IntervalArray([pd.Interval(0, 1), pd.Interval(1, 5)]),
173
+ None,
174
+ pd.IntervalDtype("int64", "right"),
175
+ {},
176
+ ),
177
+ ]
178
+ )
179
+ def test_cases(request):
180
+ return request.param
181
+
182
+
183
+ class TestSeriesConvertDtypes:
184
+ @pytest.mark.parametrize("params", product(*[(True, False)] * 5))
185
+ def test_convert_dtypes(
186
+ self,
187
+ test_cases,
188
+ params,
189
+ using_infer_string,
190
+ ):
191
+ data, maindtype, expected_default, expected_other = test_cases
192
+ if (
193
+ hasattr(data, "dtype")
194
+ and lib.is_np_dtype(data.dtype, "M")
195
+ and isinstance(maindtype, pd.DatetimeTZDtype)
196
+ ):
197
+ # this astype is deprecated in favor of tz_localize
198
+ msg = "Cannot use .astype to convert from timezone-naive dtype"
199
+ with pytest.raises(TypeError, match=msg):
200
+ pd.Series(data, dtype=maindtype)
201
+ return
202
+
203
+ if maindtype is not None:
204
+ series = pd.Series(data, dtype=maindtype)
205
+ else:
206
+ series = pd.Series(data)
207
+
208
+ result = series.convert_dtypes(*params)
209
+
210
+ param_names = [
211
+ "infer_objects",
212
+ "convert_string",
213
+ "convert_integer",
214
+ "convert_boolean",
215
+ "convert_floating",
216
+ ]
217
+ params_dict = dict(zip(param_names, params))
218
+
219
+ expected_dtype = expected_default
220
+ for spec, dtype in expected_other.items():
221
+ if all(params_dict[key] is val for key, val in zip(spec[::2], spec[1::2])):
222
+ expected_dtype = dtype
223
+ if (
224
+ using_infer_string
225
+ and expected_default == "string"
226
+ and expected_dtype == object
227
+ and params[0]
228
+ and not params[1]
229
+ ):
230
+ # If we would convert with convert strings then infer_objects converts
231
+ # with the option
232
+ expected_dtype = "string[pyarrow_numpy]"
233
+
234
+ expected = pd.Series(data, dtype=expected_dtype)
235
+ tm.assert_series_equal(result, expected)
236
+
237
+ # Test that it is a copy
238
+ copy = series.copy(deep=True)
239
+
240
+ if result.notna().sum() > 0 and result.dtype in ["interval[int64, right]"]:
241
+ with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
242
+ result[result.notna()] = np.nan
243
+ else:
244
+ result[result.notna()] = np.nan
245
+
246
+ # Make sure original not changed
247
+ tm.assert_series_equal(series, copy)
248
+
249
+ def test_convert_string_dtype(self, nullable_string_dtype):
250
+ # https://github.com/pandas-dev/pandas/issues/31731 -> converting columns
251
+ # that are already string dtype
252
+ df = pd.DataFrame(
253
+ {"A": ["a", "b", pd.NA], "B": ["ä", "ö", "ü"]}, dtype=nullable_string_dtype
254
+ )
255
+ result = df.convert_dtypes()
256
+ tm.assert_frame_equal(df, result)
257
+
258
+ def test_convert_bool_dtype(self):
259
+ # GH32287
260
+ df = pd.DataFrame({"A": pd.array([True])})
261
+ tm.assert_frame_equal(df, df.convert_dtypes())
262
+
263
+ def test_convert_byte_string_dtype(self):
264
+ # GH-43183
265
+ byte_str = b"binary-string"
266
+
267
+ df = pd.DataFrame(data={"A": byte_str}, index=[0])
268
+ result = df.convert_dtypes()
269
+ expected = df
270
+ tm.assert_frame_equal(result, expected)
271
+
272
+ @pytest.mark.parametrize(
273
+ "infer_objects, dtype", [(True, "Int64"), (False, "object")]
274
+ )
275
+ def test_convert_dtype_object_with_na(self, infer_objects, dtype):
276
+ # GH#48791
277
+ ser = pd.Series([1, pd.NA])
278
+ result = ser.convert_dtypes(infer_objects=infer_objects)
279
+ expected = pd.Series([1, pd.NA], dtype=dtype)
280
+ tm.assert_series_equal(result, expected)
281
+
282
+ @pytest.mark.parametrize(
283
+ "infer_objects, dtype", [(True, "Float64"), (False, "object")]
284
+ )
285
+ def test_convert_dtype_object_with_na_float(self, infer_objects, dtype):
286
+ # GH#48791
287
+ ser = pd.Series([1.5, pd.NA])
288
+ result = ser.convert_dtypes(infer_objects=infer_objects)
289
+ expected = pd.Series([1.5, pd.NA], dtype=dtype)
290
+ tm.assert_series_equal(result, expected)
291
+
292
+ def test_convert_dtypes_pyarrow_to_np_nullable(self):
293
+ # GH 53648
294
+ pytest.importorskip("pyarrow")
295
+ ser = pd.Series(range(2), dtype="int32[pyarrow]")
296
+ result = ser.convert_dtypes(dtype_backend="numpy_nullable")
297
+ expected = pd.Series(range(2), dtype="Int32")
298
+ tm.assert_series_equal(result, expected)
299
+
300
+ def test_convert_dtypes_pyarrow_null(self):
301
+ # GH#55346
302
+ pa = pytest.importorskip("pyarrow")
303
+ ser = pd.Series([None, None])
304
+ result = ser.convert_dtypes(dtype_backend="pyarrow")
305
+ expected = pd.Series([None, None], dtype=pd.ArrowDtype(pa.null()))
306
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestCopy:
12
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
13
+ def test_copy(self, deep, using_copy_on_write, warn_copy_on_write):
14
+ ser = Series(np.arange(10), dtype="float64")
15
+
16
+ # default deep is True
17
+ if deep == "default":
18
+ ser2 = ser.copy()
19
+ else:
20
+ ser2 = ser.copy(deep=deep)
21
+
22
+ if using_copy_on_write:
23
+ # INFO(CoW) a shallow copy doesn't yet copy the data
24
+ # but parent will not be modified (CoW)
25
+ if deep is None or deep is False:
26
+ assert np.may_share_memory(ser.values, ser2.values)
27
+ else:
28
+ assert not np.may_share_memory(ser.values, ser2.values)
29
+
30
+ with tm.assert_cow_warning(warn_copy_on_write and deep is False):
31
+ ser2[::2] = np.nan
32
+
33
+ if deep is not False or using_copy_on_write:
34
+ # Did not modify original Series
35
+ assert np.isnan(ser2[0])
36
+ assert not np.isnan(ser[0])
37
+ else:
38
+ # we DID modify the original Series
39
+ assert np.isnan(ser2[0])
40
+ assert np.isnan(ser[0])
41
+
42
+ @pytest.mark.filterwarnings("ignore:Setting a value on a view:FutureWarning")
43
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
44
+ def test_copy_tzaware(self, deep, using_copy_on_write):
45
+ # GH#11794
46
+ # copy of tz-aware
47
+ expected = Series([Timestamp("2012/01/01", tz="UTC")])
48
+ expected2 = Series([Timestamp("1999/01/01", tz="UTC")])
49
+
50
+ ser = Series([Timestamp("2012/01/01", tz="UTC")])
51
+
52
+ if deep == "default":
53
+ ser2 = ser.copy()
54
+ else:
55
+ ser2 = ser.copy(deep=deep)
56
+
57
+ if using_copy_on_write:
58
+ # INFO(CoW) a shallow copy doesn't yet copy the data
59
+ # but parent will not be modified (CoW)
60
+ if deep is None or deep is False:
61
+ assert np.may_share_memory(ser.values, ser2.values)
62
+ else:
63
+ assert not np.may_share_memory(ser.values, ser2.values)
64
+
65
+ ser2[0] = Timestamp("1999/01/01", tz="UTC")
66
+
67
+ # default deep is True
68
+ if deep is not False or using_copy_on_write:
69
+ # Did not modify original Series
70
+ tm.assert_series_equal(ser2, expected2)
71
+ tm.assert_series_equal(ser, expected)
72
+ else:
73
+ # we DID modify the original Series
74
+ tm.assert_series_equal(ser2, expected2)
75
+ tm.assert_series_equal(ser, expected2)
76
+
77
+ def test_copy_name(self, datetime_series):
78
+ result = datetime_series.copy()
79
+ assert result.name == datetime_series.name
80
+
81
+ def test_copy_index_name_checking(self, datetime_series):
82
+ # don't want to be able to modify the index stored elsewhere after
83
+ # making a copy
84
+
85
+ datetime_series.index.name = None
86
+ assert datetime_series.index.name is None
87
+ assert datetime_series is datetime_series
88
+
89
+ cp = datetime_series.copy()
90
+ cp.index.name = "foo"
91
+ assert datetime_series.index.name is None
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_count.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ import pandas as pd
4
+ from pandas import (
5
+ Categorical,
6
+ Series,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesCount:
12
+ def test_count(self, datetime_series):
13
+ assert datetime_series.count() == len(datetime_series)
14
+
15
+ datetime_series[::2] = np.nan
16
+
17
+ assert datetime_series.count() == np.isfinite(datetime_series).sum()
18
+
19
+ def test_count_inf_as_na(self):
20
+ # GH#29478
21
+ ser = Series([pd.Timestamp("1990/1/1")])
22
+ msg = "use_inf_as_na option is deprecated"
23
+ with tm.assert_produces_warning(FutureWarning, match=msg):
24
+ with pd.option_context("use_inf_as_na", True):
25
+ assert ser.count() == 1
26
+
27
+ def test_count_categorical(self):
28
+ ser = Series(
29
+ Categorical(
30
+ [np.nan, 1, 2, np.nan], categories=[5, 4, 3, 2, 1], ordered=True
31
+ )
32
+ )
33
+ result = ser.count()
34
+ assert result == 2
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_cov_corr.py ADDED
@@ -0,0 +1,185 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import math
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Series,
9
+ date_range,
10
+ isna,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ class TestSeriesCov:
16
+ def test_cov(self, datetime_series):
17
+ # full overlap
18
+ tm.assert_almost_equal(
19
+ datetime_series.cov(datetime_series), datetime_series.std() ** 2
20
+ )
21
+
22
+ # partial overlap
23
+ tm.assert_almost_equal(
24
+ datetime_series[:15].cov(datetime_series[5:]),
25
+ datetime_series[5:15].std() ** 2,
26
+ )
27
+
28
+ # No overlap
29
+ assert np.isnan(datetime_series[::2].cov(datetime_series[1::2]))
30
+
31
+ # all NA
32
+ cp = datetime_series[:10].copy()
33
+ cp[:] = np.nan
34
+ assert isna(cp.cov(cp))
35
+
36
+ # min_periods
37
+ assert isna(datetime_series[:15].cov(datetime_series[5:], min_periods=12))
38
+
39
+ ts1 = datetime_series[:15].reindex(datetime_series.index)
40
+ ts2 = datetime_series[5:].reindex(datetime_series.index)
41
+ assert isna(ts1.cov(ts2, min_periods=12))
42
+
43
+ @pytest.mark.parametrize("test_ddof", [None, 0, 1, 2, 3])
44
+ @pytest.mark.parametrize("dtype", ["float64", "Float64"])
45
+ def test_cov_ddof(self, test_ddof, dtype):
46
+ # GH#34611
47
+ np_array1 = np.random.default_rng(2).random(10)
48
+ np_array2 = np.random.default_rng(2).random(10)
49
+
50
+ s1 = Series(np_array1, dtype=dtype)
51
+ s2 = Series(np_array2, dtype=dtype)
52
+
53
+ result = s1.cov(s2, ddof=test_ddof)
54
+ expected = np.cov(np_array1, np_array2, ddof=test_ddof)[0][1]
55
+ assert math.isclose(expected, result)
56
+
57
+
58
+ class TestSeriesCorr:
59
+ @pytest.mark.parametrize("dtype", ["float64", "Float64"])
60
+ def test_corr(self, datetime_series, dtype):
61
+ stats = pytest.importorskip("scipy.stats")
62
+
63
+ datetime_series = datetime_series.astype(dtype)
64
+
65
+ # full overlap
66
+ tm.assert_almost_equal(datetime_series.corr(datetime_series), 1)
67
+
68
+ # partial overlap
69
+ tm.assert_almost_equal(datetime_series[:15].corr(datetime_series[5:]), 1)
70
+
71
+ assert isna(datetime_series[:15].corr(datetime_series[5:], min_periods=12))
72
+
73
+ ts1 = datetime_series[:15].reindex(datetime_series.index)
74
+ ts2 = datetime_series[5:].reindex(datetime_series.index)
75
+ assert isna(ts1.corr(ts2, min_periods=12))
76
+
77
+ # No overlap
78
+ assert np.isnan(datetime_series[::2].corr(datetime_series[1::2]))
79
+
80
+ # all NA
81
+ cp = datetime_series[:10].copy()
82
+ cp[:] = np.nan
83
+ assert isna(cp.corr(cp))
84
+
85
+ A = Series(
86
+ np.arange(10, dtype=np.float64),
87
+ index=date_range("2020-01-01", periods=10),
88
+ name="ts",
89
+ )
90
+ B = A.copy()
91
+ result = A.corr(B)
92
+ expected, _ = stats.pearsonr(A, B)
93
+ tm.assert_almost_equal(result, expected)
94
+
95
+ def test_corr_rank(self):
96
+ stats = pytest.importorskip("scipy.stats")
97
+
98
+ # kendall and spearman
99
+ A = Series(
100
+ np.arange(10, dtype=np.float64),
101
+ index=date_range("2020-01-01", periods=10),
102
+ name="ts",
103
+ )
104
+ B = A.copy()
105
+ A[-5:] = A[:5].copy()
106
+ result = A.corr(B, method="kendall")
107
+ expected = stats.kendalltau(A, B)[0]
108
+ tm.assert_almost_equal(result, expected)
109
+
110
+ result = A.corr(B, method="spearman")
111
+ expected = stats.spearmanr(A, B)[0]
112
+ tm.assert_almost_equal(result, expected)
113
+
114
+ # results from R
115
+ A = Series(
116
+ [
117
+ -0.89926396,
118
+ 0.94209606,
119
+ -1.03289164,
120
+ -0.95445587,
121
+ 0.76910310,
122
+ -0.06430576,
123
+ -2.09704447,
124
+ 0.40660407,
125
+ -0.89926396,
126
+ 0.94209606,
127
+ ]
128
+ )
129
+ B = Series(
130
+ [
131
+ -1.01270225,
132
+ -0.62210117,
133
+ -1.56895827,
134
+ 0.59592943,
135
+ -0.01680292,
136
+ 1.17258718,
137
+ -1.06009347,
138
+ -0.10222060,
139
+ -0.89076239,
140
+ 0.89372375,
141
+ ]
142
+ )
143
+ kexp = 0.4319297
144
+ sexp = 0.5853767
145
+ tm.assert_almost_equal(A.corr(B, method="kendall"), kexp)
146
+ tm.assert_almost_equal(A.corr(B, method="spearman"), sexp)
147
+
148
+ def test_corr_invalid_method(self):
149
+ # GH PR #22298
150
+ s1 = Series(np.random.default_rng(2).standard_normal(10))
151
+ s2 = Series(np.random.default_rng(2).standard_normal(10))
152
+ msg = "method must be either 'pearson', 'spearman', 'kendall', or a callable, "
153
+ with pytest.raises(ValueError, match=msg):
154
+ s1.corr(s2, method="____")
155
+
156
+ def test_corr_callable_method(self, datetime_series):
157
+ # simple correlation example
158
+ # returns 1 if exact equality, 0 otherwise
159
+ my_corr = lambda a, b: 1.0 if (a == b).all() else 0.0
160
+
161
+ # simple example
162
+ s1 = Series([1, 2, 3, 4, 5])
163
+ s2 = Series([5, 4, 3, 2, 1])
164
+ expected = 0
165
+ tm.assert_almost_equal(s1.corr(s2, method=my_corr), expected)
166
+
167
+ # full overlap
168
+ tm.assert_almost_equal(
169
+ datetime_series.corr(datetime_series, method=my_corr), 1.0
170
+ )
171
+
172
+ # partial overlap
173
+ tm.assert_almost_equal(
174
+ datetime_series[:15].corr(datetime_series[5:], method=my_corr), 1.0
175
+ )
176
+
177
+ # No overlap
178
+ assert np.isnan(
179
+ datetime_series[::2].corr(datetime_series[1::2], method=my_corr)
180
+ )
181
+
182
+ # dataframe example
183
+ df = pd.DataFrame([s1, s2])
184
+ expected = pd.DataFrame([{0: 1.0, 1: 0}, {0: 0, 1: 1.0}])
185
+ tm.assert_almost_equal(df.transpose().corr(method=my_corr), expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.compat.numpy import np_version_gte1p25
5
+
6
+ from pandas.core.dtypes.common import (
7
+ is_complex_dtype,
8
+ is_extension_array_dtype,
9
+ )
10
+
11
+ from pandas import (
12
+ NA,
13
+ Period,
14
+ Series,
15
+ Timedelta,
16
+ Timestamp,
17
+ date_range,
18
+ )
19
+ import pandas._testing as tm
20
+
21
+
22
+ class TestSeriesDescribe:
23
+ def test_describe_ints(self):
24
+ ser = Series([0, 1, 2, 3, 4], name="int_data")
25
+ result = ser.describe()
26
+ expected = Series(
27
+ [5, 2, ser.std(), 0, 1, 2, 3, 4],
28
+ name="int_data",
29
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
30
+ )
31
+ tm.assert_series_equal(result, expected)
32
+
33
+ def test_describe_bools(self):
34
+ ser = Series([True, True, False, False, False], name="bool_data")
35
+ result = ser.describe()
36
+ expected = Series(
37
+ [5, 2, False, 3], name="bool_data", index=["count", "unique", "top", "freq"]
38
+ )
39
+ tm.assert_series_equal(result, expected)
40
+
41
+ def test_describe_strs(self):
42
+ ser = Series(["a", "a", "b", "c", "d"], name="str_data")
43
+ result = ser.describe()
44
+ expected = Series(
45
+ [5, 4, "a", 2], name="str_data", index=["count", "unique", "top", "freq"]
46
+ )
47
+ tm.assert_series_equal(result, expected)
48
+
49
+ def test_describe_timedelta64(self):
50
+ ser = Series(
51
+ [
52
+ Timedelta("1 days"),
53
+ Timedelta("2 days"),
54
+ Timedelta("3 days"),
55
+ Timedelta("4 days"),
56
+ Timedelta("5 days"),
57
+ ],
58
+ name="timedelta_data",
59
+ )
60
+ result = ser.describe()
61
+ expected = Series(
62
+ [5, ser[2], ser.std(), ser[0], ser[1], ser[2], ser[3], ser[4]],
63
+ name="timedelta_data",
64
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
65
+ )
66
+ tm.assert_series_equal(result, expected)
67
+
68
+ def test_describe_period(self):
69
+ ser = Series(
70
+ [Period("2020-01", "M"), Period("2020-01", "M"), Period("2019-12", "M")],
71
+ name="period_data",
72
+ )
73
+ result = ser.describe()
74
+ expected = Series(
75
+ [3, 2, ser[0], 2],
76
+ name="period_data",
77
+ index=["count", "unique", "top", "freq"],
78
+ )
79
+ tm.assert_series_equal(result, expected)
80
+
81
+ def test_describe_empty_object(self):
82
+ # https://github.com/pandas-dev/pandas/issues/27183
83
+ s = Series([None, None], dtype=object)
84
+ result = s.describe()
85
+ expected = Series(
86
+ [0, 0, np.nan, np.nan],
87
+ dtype=object,
88
+ index=["count", "unique", "top", "freq"],
89
+ )
90
+ tm.assert_series_equal(result, expected)
91
+
92
+ result = s[:0].describe()
93
+ tm.assert_series_equal(result, expected)
94
+ # ensure NaN, not None
95
+ assert np.isnan(result.iloc[2])
96
+ assert np.isnan(result.iloc[3])
97
+
98
+ def test_describe_with_tz(self, tz_naive_fixture):
99
+ # GH 21332
100
+ tz = tz_naive_fixture
101
+ name = str(tz_naive_fixture)
102
+ start = Timestamp(2018, 1, 1)
103
+ end = Timestamp(2018, 1, 5)
104
+ s = Series(date_range(start, end, tz=tz), name=name)
105
+ result = s.describe()
106
+ expected = Series(
107
+ [
108
+ 5,
109
+ Timestamp(2018, 1, 3).tz_localize(tz),
110
+ start.tz_localize(tz),
111
+ s[1],
112
+ s[2],
113
+ s[3],
114
+ end.tz_localize(tz),
115
+ ],
116
+ name=name,
117
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
118
+ )
119
+ tm.assert_series_equal(result, expected)
120
+
121
+ def test_describe_with_tz_numeric(self):
122
+ name = tz = "CET"
123
+ start = Timestamp(2018, 1, 1)
124
+ end = Timestamp(2018, 1, 5)
125
+ s = Series(date_range(start, end, tz=tz), name=name)
126
+
127
+ result = s.describe()
128
+
129
+ expected = Series(
130
+ [
131
+ 5,
132
+ Timestamp("2018-01-03 00:00:00", tz=tz),
133
+ Timestamp("2018-01-01 00:00:00", tz=tz),
134
+ Timestamp("2018-01-02 00:00:00", tz=tz),
135
+ Timestamp("2018-01-03 00:00:00", tz=tz),
136
+ Timestamp("2018-01-04 00:00:00", tz=tz),
137
+ Timestamp("2018-01-05 00:00:00", tz=tz),
138
+ ],
139
+ name=name,
140
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
141
+ )
142
+ tm.assert_series_equal(result, expected)
143
+
144
+ def test_datetime_is_numeric_includes_datetime(self):
145
+ s = Series(date_range("2012", periods=3))
146
+ result = s.describe()
147
+ expected = Series(
148
+ [
149
+ 3,
150
+ Timestamp("2012-01-02"),
151
+ Timestamp("2012-01-01"),
152
+ Timestamp("2012-01-01T12:00:00"),
153
+ Timestamp("2012-01-02"),
154
+ Timestamp("2012-01-02T12:00:00"),
155
+ Timestamp("2012-01-03"),
156
+ ],
157
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ @pytest.mark.filterwarnings("ignore:Casting complex values to real discards")
162
+ def test_numeric_result_dtype(self, any_numeric_dtype):
163
+ # GH#48340 - describe should always return float on non-complex numeric input
164
+ if is_extension_array_dtype(any_numeric_dtype):
165
+ dtype = "Float64"
166
+ else:
167
+ dtype = "complex128" if is_complex_dtype(any_numeric_dtype) else None
168
+
169
+ ser = Series([0, 1], dtype=any_numeric_dtype)
170
+ if dtype == "complex128" and np_version_gte1p25:
171
+ with pytest.raises(
172
+ TypeError, match=r"^a must be an array of real numbers$"
173
+ ):
174
+ ser.describe()
175
+ return
176
+ result = ser.describe()
177
+ expected = Series(
178
+ [
179
+ 2.0,
180
+ 0.5,
181
+ ser.std(),
182
+ 0,
183
+ 0.25,
184
+ 0.5,
185
+ 0.75,
186
+ 1.0,
187
+ ],
188
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
189
+ dtype=dtype,
190
+ )
191
+ tm.assert_series_equal(result, expected)
192
+
193
+ def test_describe_one_element_ea(self):
194
+ # GH#52515
195
+ ser = Series([0.0], dtype="Float64")
196
+ with tm.assert_produces_warning(None):
197
+ result = ser.describe()
198
+ expected = Series(
199
+ [1, 0, NA, 0, 0, 0, 0, 0],
200
+ dtype="Float64",
201
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
202
+ )
203
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_diff.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ TimedeltaIndex,
7
+ date_range,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestSeriesDiff:
13
+ def test_diff_np(self):
14
+ # TODO(__array_function__): could make np.diff return a Series
15
+ # matching ser.diff()
16
+
17
+ ser = Series(np.arange(5))
18
+
19
+ res = np.diff(ser)
20
+ expected = np.array([1, 1, 1, 1])
21
+ tm.assert_numpy_array_equal(res, expected)
22
+
23
+ def test_diff_int(self):
24
+ # int dtype
25
+ a = 10000000000000000
26
+ b = a + 1
27
+ ser = Series([a, b])
28
+
29
+ result = ser.diff()
30
+ assert result[1] == 1
31
+
32
+ def test_diff_tz(self):
33
+ # Combined datetime diff, normal diff and boolean diff test
34
+ ts = Series(
35
+ np.arange(10, dtype=np.float64),
36
+ index=date_range("2020-01-01", periods=10),
37
+ name="ts",
38
+ )
39
+ ts.diff()
40
+
41
+ # neg n
42
+ result = ts.diff(-1)
43
+ expected = ts - ts.shift(-1)
44
+ tm.assert_series_equal(result, expected)
45
+
46
+ # 0
47
+ result = ts.diff(0)
48
+ expected = ts - ts
49
+ tm.assert_series_equal(result, expected)
50
+
51
+ def test_diff_dt64(self):
52
+ # datetime diff (GH#3100)
53
+ ser = Series(date_range("20130102", periods=5))
54
+ result = ser.diff()
55
+ expected = ser - ser.shift(1)
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ # timedelta diff
59
+ result = result - result.shift(1) # previous result
60
+ expected = expected.diff() # previously expected
61
+ tm.assert_series_equal(result, expected)
62
+
63
+ def test_diff_dt64tz(self):
64
+ # with tz
65
+ ser = Series(
66
+ date_range("2000-01-01 09:00:00", periods=5, tz="US/Eastern"), name="foo"
67
+ )
68
+ result = ser.diff()
69
+ expected = Series(TimedeltaIndex(["NaT"] + ["1 days"] * 4), name="foo")
70
+ tm.assert_series_equal(result, expected)
71
+
72
+ @pytest.mark.parametrize(
73
+ "input,output,diff",
74
+ [([False, True, True, False, False], [np.nan, True, False, True, False], 1)],
75
+ )
76
+ def test_diff_bool(self, input, output, diff):
77
+ # boolean series (test for fixing #17294)
78
+ ser = Series(input)
79
+ result = ser.diff()
80
+ expected = Series(output)
81
+ tm.assert_series_equal(result, expected)
82
+
83
+ def test_diff_object_dtype(self):
84
+ # object series
85
+ ser = Series([False, True, 5.0, np.nan, True, False])
86
+ result = ser.diff()
87
+ expected = ser - ser.shift(1)
88
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import (
4
+ Index,
5
+ Series,
6
+ )
7
+ import pandas._testing as tm
8
+ from pandas.api.types import is_bool_dtype
9
+
10
+
11
+ @pytest.mark.parametrize(
12
+ "data, index, drop_labels, axis, expected_data, expected_index",
13
+ [
14
+ # Unique Index
15
+ ([1, 2], ["one", "two"], ["two"], 0, [1], ["one"]),
16
+ ([1, 2], ["one", "two"], ["two"], "rows", [1], ["one"]),
17
+ ([1, 1, 2], ["one", "two", "one"], ["two"], 0, [1, 2], ["one", "one"]),
18
+ # GH 5248 Non-Unique Index
19
+ ([1, 1, 2], ["one", "two", "one"], "two", 0, [1, 2], ["one", "one"]),
20
+ ([1, 1, 2], ["one", "two", "one"], ["one"], 0, [1], ["two"]),
21
+ ([1, 1, 2], ["one", "two", "one"], "one", 0, [1], ["two"]),
22
+ ],
23
+ )
24
+ def test_drop_unique_and_non_unique_index(
25
+ data, index, axis, drop_labels, expected_data, expected_index
26
+ ):
27
+ ser = Series(data=data, index=index)
28
+ result = ser.drop(drop_labels, axis=axis)
29
+ expected = Series(data=expected_data, index=expected_index)
30
+ tm.assert_series_equal(result, expected)
31
+
32
+
33
+ @pytest.mark.parametrize(
34
+ "data, index, drop_labels, axis, error_type, error_desc",
35
+ [
36
+ # single string/tuple-like
37
+ (range(3), list("abc"), "bc", 0, KeyError, "not found in axis"),
38
+ # bad axis
39
+ (range(3), list("abc"), ("a",), 0, KeyError, "not found in axis"),
40
+ (range(3), list("abc"), "one", "columns", ValueError, "No axis named columns"),
41
+ ],
42
+ )
43
+ def test_drop_exception_raised(data, index, drop_labels, axis, error_type, error_desc):
44
+ ser = Series(data, index=index)
45
+ with pytest.raises(error_type, match=error_desc):
46
+ ser.drop(drop_labels, axis=axis)
47
+
48
+
49
+ def test_drop_with_ignore_errors():
50
+ # errors='ignore'
51
+ ser = Series(range(3), index=list("abc"))
52
+ result = ser.drop("bc", errors="ignore")
53
+ tm.assert_series_equal(result, ser)
54
+ result = ser.drop(["a", "d"], errors="ignore")
55
+ expected = ser.iloc[1:]
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ # GH 8522
59
+ ser = Series([2, 3], index=[True, False])
60
+ assert is_bool_dtype(ser.index)
61
+ assert ser.index.dtype == bool
62
+ result = ser.drop(True)
63
+ expected = Series([3], index=[False])
64
+ tm.assert_series_equal(result, expected)
65
+
66
+
67
+ @pytest.mark.parametrize("index", [[1, 2, 3], [1, 1, 3]])
68
+ @pytest.mark.parametrize("drop_labels", [[], [1], [3]])
69
+ def test_drop_empty_list(index, drop_labels):
70
+ # GH 21494
71
+ expected_index = [i for i in index if i not in drop_labels]
72
+ series = Series(index=index, dtype=object).drop(drop_labels)
73
+ expected = Series(index=expected_index, dtype=object)
74
+ tm.assert_series_equal(series, expected)
75
+
76
+
77
+ @pytest.mark.parametrize(
78
+ "data, index, drop_labels",
79
+ [
80
+ (None, [1, 2, 3], [1, 4]),
81
+ (None, [1, 2, 2], [1, 4]),
82
+ ([2, 3], [0, 1], [False, True]),
83
+ ],
84
+ )
85
+ def test_drop_non_empty_list(data, index, drop_labels):
86
+ # GH 21494 and GH 16877
87
+ dtype = object if data is None else None
88
+ ser = Series(data=data, index=index, dtype=dtype)
89
+ with pytest.raises(KeyError, match="not found in axis"):
90
+ ser.drop(drop_labels)
91
+
92
+
93
+ def test_drop_index_ea_dtype(any_numeric_ea_dtype):
94
+ # GH#45860
95
+ df = Series(100, index=Index([1, 2, 2], dtype=any_numeric_ea_dtype))
96
+ idx = Index([df.index[1]])
97
+ result = df.drop(idx)
98
+ expected = Series(100, index=Index([1], dtype=any_numeric_ea_dtype))
99
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop_duplicates.py ADDED
@@ -0,0 +1,267 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Categorical,
7
+ Series,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ @pytest.mark.parametrize(
13
+ "keep, expected",
14
+ [
15
+ ("first", Series([False, False, False, False, True, True, False])),
16
+ ("last", Series([False, True, True, False, False, False, False])),
17
+ (False, Series([False, True, True, False, True, True, False])),
18
+ ],
19
+ )
20
+ def test_drop_duplicates(any_numpy_dtype, keep, expected):
21
+ tc = Series([1, 0, 3, 5, 3, 0, 4], dtype=np.dtype(any_numpy_dtype))
22
+
23
+ if tc.dtype == "bool":
24
+ pytest.skip("tested separately in test_drop_duplicates_bool")
25
+
26
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
27
+ tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
28
+ sc = tc.copy()
29
+ return_value = sc.drop_duplicates(keep=keep, inplace=True)
30
+ assert return_value is None
31
+ tm.assert_series_equal(sc, tc[~expected])
32
+
33
+
34
+ @pytest.mark.parametrize(
35
+ "keep, expected",
36
+ [
37
+ ("first", Series([False, False, True, True])),
38
+ ("last", Series([True, True, False, False])),
39
+ (False, Series([True, True, True, True])),
40
+ ],
41
+ )
42
+ def test_drop_duplicates_bool(keep, expected):
43
+ tc = Series([True, False, True, False])
44
+
45
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
46
+ tm.assert_series_equal(tc.drop_duplicates(keep=keep), tc[~expected])
47
+ sc = tc.copy()
48
+ return_value = sc.drop_duplicates(keep=keep, inplace=True)
49
+ tm.assert_series_equal(sc, tc[~expected])
50
+ assert return_value is None
51
+
52
+
53
+ @pytest.mark.parametrize("values", [[], list(range(5))])
54
+ def test_drop_duplicates_no_duplicates(any_numpy_dtype, keep, values):
55
+ tc = Series(values, dtype=np.dtype(any_numpy_dtype))
56
+ expected = Series([False] * len(tc), dtype="bool")
57
+
58
+ if tc.dtype == "bool":
59
+ # 0 -> False and 1-> True
60
+ # any other value would be duplicated
61
+ tc = tc[:2]
62
+ expected = expected[:2]
63
+
64
+ tm.assert_series_equal(tc.duplicated(keep=keep), expected)
65
+
66
+ result_dropped = tc.drop_duplicates(keep=keep)
67
+ tm.assert_series_equal(result_dropped, tc)
68
+
69
+ # validate shallow copy
70
+ assert result_dropped is not tc
71
+
72
+
73
+ class TestSeriesDropDuplicates:
74
+ @pytest.fixture(
75
+ params=["int_", "uint", "float64", "str_", "timedelta64[h]", "datetime64[D]"]
76
+ )
77
+ def dtype(self, request):
78
+ return request.param
79
+
80
+ @pytest.fixture
81
+ def cat_series_unused_category(self, dtype, ordered):
82
+ # Test case 1
83
+ cat_array = np.array([1, 2, 3, 4, 5], dtype=np.dtype(dtype))
84
+
85
+ input1 = np.array([1, 2, 3, 3], dtype=np.dtype(dtype))
86
+ cat = Categorical(input1, categories=cat_array, ordered=ordered)
87
+ tc1 = Series(cat)
88
+ return tc1
89
+
90
+ def test_drop_duplicates_categorical_non_bool(self, cat_series_unused_category):
91
+ tc1 = cat_series_unused_category
92
+
93
+ expected = Series([False, False, False, True])
94
+
95
+ result = tc1.duplicated()
96
+ tm.assert_series_equal(result, expected)
97
+
98
+ result = tc1.drop_duplicates()
99
+ tm.assert_series_equal(result, tc1[~expected])
100
+
101
+ sc = tc1.copy()
102
+ return_value = sc.drop_duplicates(inplace=True)
103
+ assert return_value is None
104
+ tm.assert_series_equal(sc, tc1[~expected])
105
+
106
+ def test_drop_duplicates_categorical_non_bool_keeplast(
107
+ self, cat_series_unused_category
108
+ ):
109
+ tc1 = cat_series_unused_category
110
+
111
+ expected = Series([False, False, True, False])
112
+
113
+ result = tc1.duplicated(keep="last")
114
+ tm.assert_series_equal(result, expected)
115
+
116
+ result = tc1.drop_duplicates(keep="last")
117
+ tm.assert_series_equal(result, tc1[~expected])
118
+
119
+ sc = tc1.copy()
120
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
121
+ assert return_value is None
122
+ tm.assert_series_equal(sc, tc1[~expected])
123
+
124
+ def test_drop_duplicates_categorical_non_bool_keepfalse(
125
+ self, cat_series_unused_category
126
+ ):
127
+ tc1 = cat_series_unused_category
128
+
129
+ expected = Series([False, False, True, True])
130
+
131
+ result = tc1.duplicated(keep=False)
132
+ tm.assert_series_equal(result, expected)
133
+
134
+ result = tc1.drop_duplicates(keep=False)
135
+ tm.assert_series_equal(result, tc1[~expected])
136
+
137
+ sc = tc1.copy()
138
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
139
+ assert return_value is None
140
+ tm.assert_series_equal(sc, tc1[~expected])
141
+
142
+ @pytest.fixture
143
+ def cat_series(self, dtype, ordered):
144
+ # no unused categories, unlike cat_series_unused_category
145
+ cat_array = np.array([1, 2, 3, 4, 5], dtype=np.dtype(dtype))
146
+
147
+ input2 = np.array([1, 2, 3, 5, 3, 2, 4], dtype=np.dtype(dtype))
148
+ cat = Categorical(input2, categories=cat_array, ordered=ordered)
149
+ tc2 = Series(cat)
150
+ return tc2
151
+
152
+ def test_drop_duplicates_categorical_non_bool2(self, cat_series):
153
+ tc2 = cat_series
154
+
155
+ expected = Series([False, False, False, False, True, True, False])
156
+
157
+ result = tc2.duplicated()
158
+ tm.assert_series_equal(result, expected)
159
+
160
+ result = tc2.drop_duplicates()
161
+ tm.assert_series_equal(result, tc2[~expected])
162
+
163
+ sc = tc2.copy()
164
+ return_value = sc.drop_duplicates(inplace=True)
165
+ assert return_value is None
166
+ tm.assert_series_equal(sc, tc2[~expected])
167
+
168
+ def test_drop_duplicates_categorical_non_bool2_keeplast(self, cat_series):
169
+ tc2 = cat_series
170
+
171
+ expected = Series([False, True, True, False, False, False, False])
172
+
173
+ result = tc2.duplicated(keep="last")
174
+ tm.assert_series_equal(result, expected)
175
+
176
+ result = tc2.drop_duplicates(keep="last")
177
+ tm.assert_series_equal(result, tc2[~expected])
178
+
179
+ sc = tc2.copy()
180
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
181
+ assert return_value is None
182
+ tm.assert_series_equal(sc, tc2[~expected])
183
+
184
+ def test_drop_duplicates_categorical_non_bool2_keepfalse(self, cat_series):
185
+ tc2 = cat_series
186
+
187
+ expected = Series([False, True, True, False, True, True, False])
188
+
189
+ result = tc2.duplicated(keep=False)
190
+ tm.assert_series_equal(result, expected)
191
+
192
+ result = tc2.drop_duplicates(keep=False)
193
+ tm.assert_series_equal(result, tc2[~expected])
194
+
195
+ sc = tc2.copy()
196
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
197
+ assert return_value is None
198
+ tm.assert_series_equal(sc, tc2[~expected])
199
+
200
+ def test_drop_duplicates_categorical_bool(self, ordered):
201
+ tc = Series(
202
+ Categorical(
203
+ [True, False, True, False], categories=[True, False], ordered=ordered
204
+ )
205
+ )
206
+
207
+ expected = Series([False, False, True, True])
208
+ tm.assert_series_equal(tc.duplicated(), expected)
209
+ tm.assert_series_equal(tc.drop_duplicates(), tc[~expected])
210
+ sc = tc.copy()
211
+ return_value = sc.drop_duplicates(inplace=True)
212
+ assert return_value is None
213
+ tm.assert_series_equal(sc, tc[~expected])
214
+
215
+ expected = Series([True, True, False, False])
216
+ tm.assert_series_equal(tc.duplicated(keep="last"), expected)
217
+ tm.assert_series_equal(tc.drop_duplicates(keep="last"), tc[~expected])
218
+ sc = tc.copy()
219
+ return_value = sc.drop_duplicates(keep="last", inplace=True)
220
+ assert return_value is None
221
+ tm.assert_series_equal(sc, tc[~expected])
222
+
223
+ expected = Series([True, True, True, True])
224
+ tm.assert_series_equal(tc.duplicated(keep=False), expected)
225
+ tm.assert_series_equal(tc.drop_duplicates(keep=False), tc[~expected])
226
+ sc = tc.copy()
227
+ return_value = sc.drop_duplicates(keep=False, inplace=True)
228
+ assert return_value is None
229
+ tm.assert_series_equal(sc, tc[~expected])
230
+
231
+ def test_drop_duplicates_categorical_bool_na(self, nulls_fixture):
232
+ # GH#44351
233
+ ser = Series(
234
+ Categorical(
235
+ [True, False, True, False, nulls_fixture],
236
+ categories=[True, False],
237
+ ordered=True,
238
+ )
239
+ )
240
+ result = ser.drop_duplicates()
241
+ expected = Series(
242
+ Categorical([True, False, np.nan], categories=[True, False], ordered=True),
243
+ index=[0, 1, 4],
244
+ )
245
+ tm.assert_series_equal(result, expected)
246
+
247
+ def test_drop_duplicates_ignore_index(self):
248
+ # GH#48304
249
+ ser = Series([1, 2, 2, 3])
250
+ result = ser.drop_duplicates(ignore_index=True)
251
+ expected = Series([1, 2, 3])
252
+ tm.assert_series_equal(result, expected)
253
+
254
+ def test_duplicated_arrow_dtype(self):
255
+ pytest.importorskip("pyarrow")
256
+ ser = Series([True, False, None, False], dtype="bool[pyarrow]")
257
+ result = ser.drop_duplicates()
258
+ expected = Series([True, False, None], dtype="bool[pyarrow]")
259
+ tm.assert_series_equal(result, expected)
260
+
261
+ def test_drop_duplicates_arrow_strings(self):
262
+ # GH#54904
263
+ pa = pytest.importorskip("pyarrow")
264
+ ser = Series(["a", "a"], dtype=pd.ArrowDtype(pa.string()))
265
+ result = ser.drop_duplicates()
266
+ expecetd = Series(["a"], dtype=pd.ArrowDtype(pa.string()))
267
+ tm.assert_series_equal(result, expecetd)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_dropna.py ADDED
@@ -0,0 +1,117 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DatetimeIndex,
6
+ IntervalIndex,
7
+ NaT,
8
+ Period,
9
+ Series,
10
+ Timestamp,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ class TestDropna:
16
+ def test_dropna_empty(self):
17
+ ser = Series([], dtype=object)
18
+
19
+ assert len(ser.dropna()) == 0
20
+ return_value = ser.dropna(inplace=True)
21
+ assert return_value is None
22
+ assert len(ser) == 0
23
+
24
+ # invalid axis
25
+ msg = "No axis named 1 for object type Series"
26
+ with pytest.raises(ValueError, match=msg):
27
+ ser.dropna(axis=1)
28
+
29
+ def test_dropna_preserve_name(self, datetime_series):
30
+ datetime_series[:5] = np.nan
31
+ result = datetime_series.dropna()
32
+ assert result.name == datetime_series.name
33
+ name = datetime_series.name
34
+ ts = datetime_series.copy()
35
+ return_value = ts.dropna(inplace=True)
36
+ assert return_value is None
37
+ assert ts.name == name
38
+
39
+ def test_dropna_no_nan(self):
40
+ for ser in [
41
+ Series([1, 2, 3], name="x"),
42
+ Series([False, True, False], name="x"),
43
+ ]:
44
+ result = ser.dropna()
45
+ tm.assert_series_equal(result, ser)
46
+ assert result is not ser
47
+
48
+ s2 = ser.copy()
49
+ return_value = s2.dropna(inplace=True)
50
+ assert return_value is None
51
+ tm.assert_series_equal(s2, ser)
52
+
53
+ def test_dropna_intervals(self):
54
+ ser = Series(
55
+ [np.nan, 1, 2, 3],
56
+ IntervalIndex.from_arrays([np.nan, 0, 1, 2], [np.nan, 1, 2, 3]),
57
+ )
58
+
59
+ result = ser.dropna()
60
+ expected = ser.iloc[1:]
61
+ tm.assert_series_equal(result, expected)
62
+
63
+ def test_dropna_period_dtype(self):
64
+ # GH#13737
65
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
66
+ result = ser.dropna()
67
+ expected = Series([Period("2011-01", freq="M")])
68
+
69
+ tm.assert_series_equal(result, expected)
70
+
71
+ def test_datetime64_tz_dropna(self, unit):
72
+ # DatetimeLikeBlock
73
+ ser = Series(
74
+ [
75
+ Timestamp("2011-01-01 10:00"),
76
+ NaT,
77
+ Timestamp("2011-01-03 10:00"),
78
+ NaT,
79
+ ],
80
+ dtype=f"M8[{unit}]",
81
+ )
82
+ result = ser.dropna()
83
+ expected = Series(
84
+ [Timestamp("2011-01-01 10:00"), Timestamp("2011-01-03 10:00")],
85
+ index=[0, 2],
86
+ dtype=f"M8[{unit}]",
87
+ )
88
+ tm.assert_series_equal(result, expected)
89
+
90
+ # DatetimeTZBlock
91
+ idx = DatetimeIndex(
92
+ ["2011-01-01 10:00", NaT, "2011-01-03 10:00", NaT], tz="Asia/Tokyo"
93
+ ).as_unit(unit)
94
+ ser = Series(idx)
95
+ assert ser.dtype == f"datetime64[{unit}, Asia/Tokyo]"
96
+ result = ser.dropna()
97
+ expected = Series(
98
+ [
99
+ Timestamp("2011-01-01 10:00", tz="Asia/Tokyo"),
100
+ Timestamp("2011-01-03 10:00", tz="Asia/Tokyo"),
101
+ ],
102
+ index=[0, 2],
103
+ dtype=f"datetime64[{unit}, Asia/Tokyo]",
104
+ )
105
+ assert result.dtype == f"datetime64[{unit}, Asia/Tokyo]"
106
+ tm.assert_series_equal(result, expected)
107
+
108
+ @pytest.mark.parametrize("val", [1, 1.5])
109
+ def test_dropna_ignore_index(self, val):
110
+ # GH#31725
111
+ ser = Series([1, 2, val], index=[3, 2, 1])
112
+ result = ser.dropna(ignore_index=True)
113
+ expected = Series([1, 2, val])
114
+ tm.assert_series_equal(result, expected)
115
+
116
+ ser.dropna(ignore_index=True, inplace=True)
117
+ tm.assert_series_equal(ser, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+
4
+ class TestSeriesDtypes:
5
+ def test_dtype(self, datetime_series):
6
+ assert datetime_series.dtype == np.dtype("float64")
7
+ assert datetime_series.dtypes == np.dtype("float64")
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_equals.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import nullcontext
2
+ import copy
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas._libs.missing import is_matching_na
8
+ from pandas.compat.numpy import np_version_gte1p25
9
+
10
+ from pandas.core.dtypes.common import is_float
11
+
12
+ from pandas import (
13
+ Index,
14
+ MultiIndex,
15
+ Series,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ @pytest.mark.parametrize(
21
+ "arr, idx",
22
+ [
23
+ ([1, 2, 3, 4], [0, 2, 1, 3]),
24
+ ([1, np.nan, 3, np.nan], [0, 2, 1, 3]),
25
+ (
26
+ [1, np.nan, 3, np.nan],
27
+ MultiIndex.from_tuples([(0, "a"), (1, "b"), (2, "c"), (3, "c")]),
28
+ ),
29
+ ],
30
+ )
31
+ def test_equals(arr, idx):
32
+ s1 = Series(arr, index=idx)
33
+ s2 = s1.copy()
34
+ assert s1.equals(s2)
35
+
36
+ s1[1] = 9
37
+ assert not s1.equals(s2)
38
+
39
+
40
+ @pytest.mark.parametrize(
41
+ "val", [1, 1.1, 1 + 1j, True, "abc", [1, 2], (1, 2), {1, 2}, {"a": 1}, None]
42
+ )
43
+ def test_equals_list_array(val):
44
+ # GH20676 Verify equals operator for list of Numpy arrays
45
+ arr = np.array([1, 2])
46
+ s1 = Series([arr, arr])
47
+ s2 = s1.copy()
48
+ assert s1.equals(s2)
49
+
50
+ s1[1] = val
51
+
52
+ cm = (
53
+ tm.assert_produces_warning(FutureWarning, check_stacklevel=False)
54
+ if isinstance(val, str) and not np_version_gte1p25
55
+ else nullcontext()
56
+ )
57
+ with cm:
58
+ assert not s1.equals(s2)
59
+
60
+
61
+ def test_equals_false_negative():
62
+ # GH8437 Verify false negative behavior of equals function for dtype object
63
+ arr = [False, np.nan]
64
+ s1 = Series(arr)
65
+ s2 = s1.copy()
66
+ s3 = Series(index=range(2), dtype=object)
67
+ s4 = s3.copy()
68
+ s5 = s3.copy()
69
+ s6 = s3.copy()
70
+
71
+ s3[:-1] = s4[:-1] = s5[0] = s6[0] = False
72
+ assert s1.equals(s1)
73
+ assert s1.equals(s2)
74
+ assert s1.equals(s3)
75
+ assert s1.equals(s4)
76
+ assert s1.equals(s5)
77
+ assert s5.equals(s6)
78
+
79
+
80
+ def test_equals_matching_nas():
81
+ # matching but not identical NAs
82
+ left = Series([np.datetime64("NaT")], dtype=object)
83
+ right = Series([np.datetime64("NaT")], dtype=object)
84
+ assert left.equals(right)
85
+ with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
86
+ assert Index(left).equals(Index(right))
87
+ assert left.array.equals(right.array)
88
+
89
+ left = Series([np.timedelta64("NaT")], dtype=object)
90
+ right = Series([np.timedelta64("NaT")], dtype=object)
91
+ assert left.equals(right)
92
+ with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
93
+ assert Index(left).equals(Index(right))
94
+ assert left.array.equals(right.array)
95
+
96
+ left = Series([np.float64("NaN")], dtype=object)
97
+ right = Series([np.float64("NaN")], dtype=object)
98
+ assert left.equals(right)
99
+ assert Index(left, dtype=left.dtype).equals(Index(right, dtype=right.dtype))
100
+ assert left.array.equals(right.array)
101
+
102
+
103
+ def test_equals_mismatched_nas(nulls_fixture, nulls_fixture2):
104
+ # GH#39650
105
+ left = nulls_fixture
106
+ right = nulls_fixture2
107
+ if hasattr(right, "copy"):
108
+ right = right.copy()
109
+ else:
110
+ right = copy.copy(right)
111
+
112
+ ser = Series([left], dtype=object)
113
+ ser2 = Series([right], dtype=object)
114
+
115
+ if is_matching_na(left, right):
116
+ assert ser.equals(ser2)
117
+ elif (left is None and is_float(right)) or (right is None and is_float(left)):
118
+ assert ser.equals(ser2)
119
+ else:
120
+ assert not ser.equals(ser2)
121
+
122
+
123
+ def test_equals_none_vs_nan():
124
+ # GH#39650
125
+ ser = Series([1, None], dtype=object)
126
+ ser2 = Series([1, np.nan], dtype=object)
127
+
128
+ assert ser.equals(ser2)
129
+ assert Index(ser, dtype=ser.dtype).equals(Index(ser2, dtype=ser2.dtype))
130
+ assert ser.array.equals(ser2.array)
131
+
132
+
133
+ def test_equals_None_vs_float():
134
+ # GH#44190
135
+ left = Series([-np.inf, np.nan, -1.0, 0.0, 1.0, 10 / 3, np.inf], dtype=object)
136
+ right = Series([None] * len(left))
137
+
138
+ # these series were found to be equal due to a bug, check that they are correctly
139
+ # found to not equal
140
+ assert not left.equals(right)
141
+ assert not right.equals(left)
142
+ assert not left.to_frame().equals(right.to_frame())
143
+ assert not right.to_frame().equals(left.to_frame())
144
+ assert not Index(left, dtype="object").equals(Index(right, dtype="object"))
145
+ assert not Index(right, dtype="object").equals(Index(left, dtype="object"))
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ def test_basic():
9
+ s = pd.Series([[0, 1, 2], np.nan, [], (3, 4)], index=list("abcd"), name="foo")
10
+ result = s.explode()
11
+ expected = pd.Series(
12
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=list("aaabcdd"), dtype=object, name="foo"
13
+ )
14
+ tm.assert_series_equal(result, expected)
15
+
16
+
17
+ def test_mixed_type():
18
+ s = pd.Series(
19
+ [[0, 1, 2], np.nan, None, np.array([]), pd.Series(["a", "b"])], name="foo"
20
+ )
21
+ result = s.explode()
22
+ expected = pd.Series(
23
+ [0, 1, 2, np.nan, None, np.nan, "a", "b"],
24
+ index=[0, 0, 0, 1, 2, 3, 4, 4],
25
+ dtype=object,
26
+ name="foo",
27
+ )
28
+ tm.assert_series_equal(result, expected)
29
+
30
+
31
+ def test_empty():
32
+ s = pd.Series(dtype=object)
33
+ result = s.explode()
34
+ expected = s.copy()
35
+ tm.assert_series_equal(result, expected)
36
+
37
+
38
+ def test_nested_lists():
39
+ s = pd.Series([[[1, 2, 3]], [1, 2], 1])
40
+ result = s.explode()
41
+ expected = pd.Series([[1, 2, 3], 1, 2, 1], index=[0, 1, 1, 2])
42
+ tm.assert_series_equal(result, expected)
43
+
44
+
45
+ def test_multi_index():
46
+ s = pd.Series(
47
+ [[0, 1, 2], np.nan, [], (3, 4)],
48
+ name="foo",
49
+ index=pd.MultiIndex.from_product([list("ab"), range(2)], names=["foo", "bar"]),
50
+ )
51
+ result = s.explode()
52
+ index = pd.MultiIndex.from_tuples(
53
+ [("a", 0), ("a", 0), ("a", 0), ("a", 1), ("b", 0), ("b", 1), ("b", 1)],
54
+ names=["foo", "bar"],
55
+ )
56
+ expected = pd.Series(
57
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=index, dtype=object, name="foo"
58
+ )
59
+ tm.assert_series_equal(result, expected)
60
+
61
+
62
+ def test_large():
63
+ s = pd.Series([range(256)]).explode()
64
+ result = s.explode()
65
+ tm.assert_series_equal(result, s)
66
+
67
+
68
+ def test_invert_array():
69
+ df = pd.DataFrame({"a": pd.date_range("20190101", periods=3, tz="UTC")})
70
+
71
+ listify = df.apply(lambda x: x.array, axis=1)
72
+ result = listify.explode()
73
+ tm.assert_series_equal(result, df["a"].rename())
74
+
75
+
76
+ @pytest.mark.parametrize(
77
+ "s", [pd.Series([1, 2, 3]), pd.Series(pd.date_range("2019", periods=3, tz="UTC"))]
78
+ )
79
+ def test_non_object_dtype(s):
80
+ result = s.explode()
81
+ tm.assert_series_equal(result, s)
82
+
83
+
84
+ def test_typical_usecase():
85
+ df = pd.DataFrame(
86
+ [{"var1": "a,b,c", "var2": 1}, {"var1": "d,e,f", "var2": 2}],
87
+ columns=["var1", "var2"],
88
+ )
89
+ exploded = df.var1.str.split(",").explode()
90
+ result = df[["var2"]].join(exploded)
91
+ expected = pd.DataFrame(
92
+ {"var2": [1, 1, 1, 2, 2, 2], "var1": list("abcdef")},
93
+ columns=["var2", "var1"],
94
+ index=[0, 0, 0, 1, 1, 1],
95
+ )
96
+ tm.assert_frame_equal(result, expected)
97
+
98
+
99
+ def test_nested_EA():
100
+ # a nested EA array
101
+ s = pd.Series(
102
+ [
103
+ pd.date_range("20170101", periods=3, tz="UTC"),
104
+ pd.date_range("20170104", periods=3, tz="UTC"),
105
+ ]
106
+ )
107
+ result = s.explode()
108
+ expected = pd.Series(
109
+ pd.date_range("20170101", periods=6, tz="UTC"), index=[0, 0, 0, 1, 1, 1]
110
+ )
111
+ tm.assert_series_equal(result, expected)
112
+
113
+
114
+ def test_duplicate_index():
115
+ # GH 28005
116
+ s = pd.Series([[1, 2], [3, 4]], index=[0, 0])
117
+ result = s.explode()
118
+ expected = pd.Series([1, 2, 3, 4], index=[0, 0, 0, 0], dtype=object)
119
+ tm.assert_series_equal(result, expected)
120
+
121
+
122
+ def test_ignore_index():
123
+ # GH 34932
124
+ s = pd.Series([[1, 2], [3, 4]])
125
+ result = s.explode(ignore_index=True)
126
+ expected = pd.Series([1, 2, 3, 4], index=[0, 1, 2, 3], dtype=object)
127
+ tm.assert_series_equal(result, expected)
128
+
129
+
130
+ def test_explode_sets():
131
+ # https://github.com/pandas-dev/pandas/issues/35614
132
+ s = pd.Series([{"a", "b", "c"}], index=[1])
133
+ result = s.explode().sort_values()
134
+ expected = pd.Series(["a", "b", "c"], index=[1, 1, 1])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+
138
+ def test_explode_scalars_can_ignore_index():
139
+ # https://github.com/pandas-dev/pandas/issues/40487
140
+ s = pd.Series([1, 2, 3], index=["a", "b", "c"])
141
+ result = s.explode(ignore_index=True)
142
+ expected = pd.Series([1, 2, 3])
143
+ tm.assert_series_equal(result, expected)
144
+
145
+
146
+ @pytest.mark.parametrize("ignore_index", [True, False])
147
+ def test_explode_pyarrow_list_type(ignore_index):
148
+ # GH 53602
149
+ pa = pytest.importorskip("pyarrow")
150
+
151
+ data = [
152
+ [None, None],
153
+ [1],
154
+ [],
155
+ [2, 3],
156
+ None,
157
+ ]
158
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.list_(pa.int64())))
159
+ result = ser.explode(ignore_index=ignore_index)
160
+ expected = pd.Series(
161
+ data=[None, None, 1, None, 2, 3, None],
162
+ index=None if ignore_index else [0, 0, 1, 2, 3, 3, 4],
163
+ dtype=pd.ArrowDtype(pa.int64()),
164
+ )
165
+ tm.assert_series_equal(result, expected)
166
+
167
+
168
+ @pytest.mark.parametrize("ignore_index", [True, False])
169
+ def test_explode_pyarrow_non_list_type(ignore_index):
170
+ pa = pytest.importorskip("pyarrow")
171
+ data = [1, 2, 3]
172
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.int64()))
173
+ result = ser.explode(ignore_index=ignore_index)
174
+ expected = pd.Series([1, 2, 3], dtype="int64[pyarrow]", index=[0, 1, 2])
175
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py ADDED
@@ -0,0 +1,1155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ timezone,
5
+ )
6
+
7
+ import numpy as np
8
+ import pytest
9
+ import pytz
10
+
11
+ from pandas import (
12
+ Categorical,
13
+ DataFrame,
14
+ DatetimeIndex,
15
+ NaT,
16
+ Period,
17
+ Series,
18
+ Timedelta,
19
+ Timestamp,
20
+ date_range,
21
+ isna,
22
+ timedelta_range,
23
+ )
24
+ import pandas._testing as tm
25
+ from pandas.core.arrays import period_array
26
+
27
+
28
+ @pytest.mark.filterwarnings(
29
+ "ignore:(Series|DataFrame).fillna with 'method' is deprecated:FutureWarning"
30
+ )
31
+ class TestSeriesFillNA:
32
+ def test_fillna_nat(self):
33
+ series = Series([0, 1, 2, NaT._value], dtype="M8[ns]")
34
+
35
+ filled = series.fillna(method="pad")
36
+ filled2 = series.fillna(value=series.values[2])
37
+
38
+ expected = series.copy()
39
+ expected.iloc[3] = expected.iloc[2]
40
+
41
+ tm.assert_series_equal(filled, expected)
42
+ tm.assert_series_equal(filled2, expected)
43
+
44
+ df = DataFrame({"A": series})
45
+ filled = df.fillna(method="pad")
46
+ filled2 = df.fillna(value=series.values[2])
47
+ expected = DataFrame({"A": expected})
48
+ tm.assert_frame_equal(filled, expected)
49
+ tm.assert_frame_equal(filled2, expected)
50
+
51
+ series = Series([NaT._value, 0, 1, 2], dtype="M8[ns]")
52
+
53
+ filled = series.fillna(method="bfill")
54
+ filled2 = series.fillna(value=series[1])
55
+
56
+ expected = series.copy()
57
+ expected[0] = expected[1]
58
+
59
+ tm.assert_series_equal(filled, expected)
60
+ tm.assert_series_equal(filled2, expected)
61
+
62
+ df = DataFrame({"A": series})
63
+ filled = df.fillna(method="bfill")
64
+ filled2 = df.fillna(value=series[1])
65
+ expected = DataFrame({"A": expected})
66
+ tm.assert_frame_equal(filled, expected)
67
+ tm.assert_frame_equal(filled2, expected)
68
+
69
+ def test_fillna_value_or_method(self, datetime_series):
70
+ msg = "Cannot specify both 'value' and 'method'"
71
+ with pytest.raises(ValueError, match=msg):
72
+ datetime_series.fillna(value=0, method="ffill")
73
+
74
+ def test_fillna(self):
75
+ ts = Series(
76
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
77
+ )
78
+
79
+ tm.assert_series_equal(ts, ts.fillna(method="ffill"))
80
+
81
+ ts.iloc[2] = np.nan
82
+
83
+ exp = Series([0.0, 1.0, 1.0, 3.0, 4.0], index=ts.index)
84
+ tm.assert_series_equal(ts.fillna(method="ffill"), exp)
85
+
86
+ exp = Series([0.0, 1.0, 3.0, 3.0, 4.0], index=ts.index)
87
+ tm.assert_series_equal(ts.fillna(method="backfill"), exp)
88
+
89
+ exp = Series([0.0, 1.0, 5.0, 3.0, 4.0], index=ts.index)
90
+ tm.assert_series_equal(ts.fillna(value=5), exp)
91
+
92
+ msg = "Must specify a fill 'value' or 'method'"
93
+ with pytest.raises(ValueError, match=msg):
94
+ ts.fillna()
95
+
96
+ def test_fillna_nonscalar(self):
97
+ # GH#5703
98
+ s1 = Series([np.nan])
99
+ s2 = Series([1])
100
+ result = s1.fillna(s2)
101
+ expected = Series([1.0])
102
+ tm.assert_series_equal(result, expected)
103
+ result = s1.fillna({})
104
+ tm.assert_series_equal(result, s1)
105
+ result = s1.fillna(Series((), dtype=object))
106
+ tm.assert_series_equal(result, s1)
107
+ result = s2.fillna(s1)
108
+ tm.assert_series_equal(result, s2)
109
+ result = s1.fillna({0: 1})
110
+ tm.assert_series_equal(result, expected)
111
+ result = s1.fillna({1: 1})
112
+ tm.assert_series_equal(result, Series([np.nan]))
113
+ result = s1.fillna({0: 1, 1: 1})
114
+ tm.assert_series_equal(result, expected)
115
+ result = s1.fillna(Series({0: 1, 1: 1}))
116
+ tm.assert_series_equal(result, expected)
117
+ result = s1.fillna(Series({0: 1, 1: 1}, index=[4, 5]))
118
+ tm.assert_series_equal(result, s1)
119
+
120
+ def test_fillna_aligns(self):
121
+ s1 = Series([0, 1, 2], list("abc"))
122
+ s2 = Series([0, np.nan, 2], list("bac"))
123
+ result = s2.fillna(s1)
124
+ expected = Series([0, 0, 2.0], list("bac"))
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ def test_fillna_limit(self):
128
+ ser = Series(np.nan, index=[0, 1, 2])
129
+ result = ser.fillna(999, limit=1)
130
+ expected = Series([999, np.nan, np.nan], index=[0, 1, 2])
131
+ tm.assert_series_equal(result, expected)
132
+
133
+ result = ser.fillna(999, limit=2)
134
+ expected = Series([999, 999, np.nan], index=[0, 1, 2])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_fillna_dont_cast_strings(self):
138
+ # GH#9043
139
+ # make sure a string representation of int/float values can be filled
140
+ # correctly without raising errors or being converted
141
+ vals = ["0", "1.5", "-0.3"]
142
+ for val in vals:
143
+ ser = Series([0, 1, np.nan, np.nan, 4], dtype="float64")
144
+ result = ser.fillna(val)
145
+ expected = Series([0, 1, val, val, 4], dtype="object")
146
+ tm.assert_series_equal(result, expected)
147
+
148
+ def test_fillna_consistency(self):
149
+ # GH#16402
150
+ # fillna with a tz aware to a tz-naive, should result in object
151
+
152
+ ser = Series([Timestamp("20130101"), NaT])
153
+
154
+ result = ser.fillna(Timestamp("20130101", tz="US/Eastern"))
155
+ expected = Series(
156
+ [Timestamp("20130101"), Timestamp("2013-01-01", tz="US/Eastern")],
157
+ dtype="object",
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
162
+ tm.assert_series_equal(result, expected)
163
+
164
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
165
+ tm.assert_series_equal(result, expected)
166
+
167
+ # with a non-datetime
168
+ result = ser.fillna("foo")
169
+ expected = Series([Timestamp("20130101"), "foo"])
170
+ tm.assert_series_equal(result, expected)
171
+
172
+ # assignment
173
+ ser2 = ser.copy()
174
+ with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
175
+ ser2[1] = "foo"
176
+ tm.assert_series_equal(ser2, expected)
177
+
178
+ def test_fillna_downcast(self):
179
+ # GH#15277
180
+ # infer int64 from float64
181
+ ser = Series([1.0, np.nan])
182
+ msg = "The 'downcast' keyword in fillna is deprecated"
183
+ with tm.assert_produces_warning(FutureWarning, match=msg):
184
+ result = ser.fillna(0, downcast="infer")
185
+ expected = Series([1, 0])
186
+ tm.assert_series_equal(result, expected)
187
+
188
+ # infer int64 from float64 when fillna value is a dict
189
+ ser = Series([1.0, np.nan])
190
+ with tm.assert_produces_warning(FutureWarning, match=msg):
191
+ result = ser.fillna({1: 0}, downcast="infer")
192
+ expected = Series([1, 0])
193
+ tm.assert_series_equal(result, expected)
194
+
195
+ def test_fillna_downcast_infer_objects_to_numeric(self):
196
+ # GH#44241 if we have object-dtype, 'downcast="infer"' should
197
+ # _actually_ infer
198
+
199
+ arr = np.arange(5).astype(object)
200
+ arr[3] = np.nan
201
+
202
+ ser = Series(arr)
203
+
204
+ msg = "The 'downcast' keyword in fillna is deprecated"
205
+ with tm.assert_produces_warning(FutureWarning, match=msg):
206
+ res = ser.fillna(3, downcast="infer")
207
+ expected = Series(np.arange(5), dtype=np.int64)
208
+ tm.assert_series_equal(res, expected)
209
+
210
+ msg = "The 'downcast' keyword in ffill is deprecated"
211
+ with tm.assert_produces_warning(FutureWarning, match=msg):
212
+ res = ser.ffill(downcast="infer")
213
+ expected = Series([0, 1, 2, 2, 4], dtype=np.int64)
214
+ tm.assert_series_equal(res, expected)
215
+
216
+ msg = "The 'downcast' keyword in bfill is deprecated"
217
+ with tm.assert_produces_warning(FutureWarning, match=msg):
218
+ res = ser.bfill(downcast="infer")
219
+ expected = Series([0, 1, 2, 4, 4], dtype=np.int64)
220
+ tm.assert_series_equal(res, expected)
221
+
222
+ # with a non-round float present, we will downcast to float64
223
+ ser[2] = 2.5
224
+
225
+ expected = Series([0, 1, 2.5, 3, 4], dtype=np.float64)
226
+ msg = "The 'downcast' keyword in fillna is deprecated"
227
+ with tm.assert_produces_warning(FutureWarning, match=msg):
228
+ res = ser.fillna(3, downcast="infer")
229
+ tm.assert_series_equal(res, expected)
230
+
231
+ msg = "The 'downcast' keyword in ffill is deprecated"
232
+ with tm.assert_produces_warning(FutureWarning, match=msg):
233
+ res = ser.ffill(downcast="infer")
234
+ expected = Series([0, 1, 2.5, 2.5, 4], dtype=np.float64)
235
+ tm.assert_series_equal(res, expected)
236
+
237
+ msg = "The 'downcast' keyword in bfill is deprecated"
238
+ with tm.assert_produces_warning(FutureWarning, match=msg):
239
+ res = ser.bfill(downcast="infer")
240
+ expected = Series([0, 1, 2.5, 4, 4], dtype=np.float64)
241
+ tm.assert_series_equal(res, expected)
242
+
243
+ def test_timedelta_fillna(self, frame_or_series, unit):
244
+ # GH#3371
245
+ ser = Series(
246
+ [
247
+ Timestamp("20130101"),
248
+ Timestamp("20130101"),
249
+ Timestamp("20130102"),
250
+ Timestamp("20130103 9:01:01"),
251
+ ],
252
+ dtype=f"M8[{unit}]",
253
+ )
254
+ td = ser.diff()
255
+ obj = frame_or_series(td).copy()
256
+
257
+ # reg fillna
258
+ result = obj.fillna(Timedelta(seconds=0))
259
+ expected = Series(
260
+ [
261
+ timedelta(0),
262
+ timedelta(0),
263
+ timedelta(1),
264
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
265
+ ],
266
+ dtype=f"m8[{unit}]",
267
+ )
268
+ expected = frame_or_series(expected)
269
+ tm.assert_equal(result, expected)
270
+
271
+ # GH#45746 pre-1.? ints were interpreted as seconds. then that was
272
+ # deprecated and changed to raise. In 2.0 it casts to common dtype,
273
+ # consistent with every other dtype's behavior
274
+ res = obj.fillna(1)
275
+ expected = obj.astype(object).fillna(1)
276
+ tm.assert_equal(res, expected)
277
+
278
+ result = obj.fillna(Timedelta(seconds=1))
279
+ expected = Series(
280
+ [
281
+ timedelta(seconds=1),
282
+ timedelta(0),
283
+ timedelta(1),
284
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
285
+ ],
286
+ dtype=f"m8[{unit}]",
287
+ )
288
+ expected = frame_or_series(expected)
289
+ tm.assert_equal(result, expected)
290
+
291
+ result = obj.fillna(timedelta(days=1, seconds=1))
292
+ expected = Series(
293
+ [
294
+ timedelta(days=1, seconds=1),
295
+ timedelta(0),
296
+ timedelta(1),
297
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
298
+ ],
299
+ dtype=f"m8[{unit}]",
300
+ )
301
+ expected = frame_or_series(expected)
302
+ tm.assert_equal(result, expected)
303
+
304
+ result = obj.fillna(np.timedelta64(10**9))
305
+ expected = Series(
306
+ [
307
+ timedelta(seconds=1),
308
+ timedelta(0),
309
+ timedelta(1),
310
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
311
+ ],
312
+ dtype=f"m8[{unit}]",
313
+ )
314
+ expected = frame_or_series(expected)
315
+ tm.assert_equal(result, expected)
316
+
317
+ result = obj.fillna(NaT)
318
+ expected = Series(
319
+ [
320
+ NaT,
321
+ timedelta(0),
322
+ timedelta(1),
323
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
324
+ ],
325
+ dtype=f"m8[{unit}]",
326
+ )
327
+ expected = frame_or_series(expected)
328
+ tm.assert_equal(result, expected)
329
+
330
+ # ffill
331
+ td[2] = np.nan
332
+ obj = frame_or_series(td).copy()
333
+ result = obj.ffill()
334
+ expected = td.fillna(Timedelta(seconds=0))
335
+ expected[0] = np.nan
336
+ expected = frame_or_series(expected)
337
+
338
+ tm.assert_equal(result, expected)
339
+
340
+ # bfill
341
+ td[2] = np.nan
342
+ obj = frame_or_series(td)
343
+ result = obj.bfill()
344
+ expected = td.fillna(Timedelta(seconds=0))
345
+ expected[2] = timedelta(days=1, seconds=9 * 3600 + 60 + 1)
346
+ expected = frame_or_series(expected)
347
+ tm.assert_equal(result, expected)
348
+
349
+ def test_datetime64_fillna(self):
350
+ ser = Series(
351
+ [
352
+ Timestamp("20130101"),
353
+ Timestamp("20130101"),
354
+ Timestamp("20130102"),
355
+ Timestamp("20130103 9:01:01"),
356
+ ]
357
+ )
358
+ ser[2] = np.nan
359
+
360
+ # ffill
361
+ result = ser.ffill()
362
+ expected = Series(
363
+ [
364
+ Timestamp("20130101"),
365
+ Timestamp("20130101"),
366
+ Timestamp("20130101"),
367
+ Timestamp("20130103 9:01:01"),
368
+ ]
369
+ )
370
+ tm.assert_series_equal(result, expected)
371
+
372
+ # bfill
373
+ result = ser.bfill()
374
+ expected = Series(
375
+ [
376
+ Timestamp("20130101"),
377
+ Timestamp("20130101"),
378
+ Timestamp("20130103 9:01:01"),
379
+ Timestamp("20130103 9:01:01"),
380
+ ]
381
+ )
382
+ tm.assert_series_equal(result, expected)
383
+
384
+ @pytest.mark.parametrize(
385
+ "scalar",
386
+ [
387
+ False,
388
+ pytest.param(
389
+ True,
390
+ marks=pytest.mark.xfail(
391
+ reason="GH#56410 scalar case not yet addressed"
392
+ ),
393
+ ),
394
+ ],
395
+ )
396
+ @pytest.mark.parametrize("tz", [None, "UTC"])
397
+ def test_datetime64_fillna_mismatched_reso_no_rounding(self, tz, scalar):
398
+ # GH#56410
399
+ dti = date_range("2016-01-01", periods=3, unit="s", tz=tz)
400
+ item = Timestamp("2016-02-03 04:05:06.789", tz=tz)
401
+ vec = date_range(item, periods=3, unit="ms")
402
+
403
+ exp_dtype = "M8[ms]" if tz is None else "M8[ms, UTC]"
404
+ expected = Series([item, dti[1], dti[2]], dtype=exp_dtype)
405
+
406
+ ser = Series(dti)
407
+ ser[0] = NaT
408
+ ser2 = ser.copy()
409
+
410
+ res = ser.fillna(item)
411
+ res2 = ser2.fillna(Series(vec))
412
+
413
+ if scalar:
414
+ tm.assert_series_equal(res, expected)
415
+ else:
416
+ tm.assert_series_equal(res2, expected)
417
+
418
+ @pytest.mark.parametrize(
419
+ "scalar",
420
+ [
421
+ False,
422
+ pytest.param(
423
+ True,
424
+ marks=pytest.mark.xfail(
425
+ reason="GH#56410 scalar case not yet addressed"
426
+ ),
427
+ ),
428
+ ],
429
+ )
430
+ def test_timedelta64_fillna_mismatched_reso_no_rounding(self, scalar):
431
+ # GH#56410
432
+ tdi = date_range("2016-01-01", periods=3, unit="s") - Timestamp("1970-01-01")
433
+ item = Timestamp("2016-02-03 04:05:06.789") - Timestamp("1970-01-01")
434
+ vec = timedelta_range(item, periods=3, unit="ms")
435
+
436
+ expected = Series([item, tdi[1], tdi[2]], dtype="m8[ms]")
437
+
438
+ ser = Series(tdi)
439
+ ser[0] = NaT
440
+ ser2 = ser.copy()
441
+
442
+ res = ser.fillna(item)
443
+ res2 = ser2.fillna(Series(vec))
444
+
445
+ if scalar:
446
+ tm.assert_series_equal(res, expected)
447
+ else:
448
+ tm.assert_series_equal(res2, expected)
449
+
450
+ def test_datetime64_fillna_backfill(self):
451
+ # GH#6587
452
+ # make sure that we are treating as integer when filling
453
+ ser = Series([NaT, NaT, "2013-08-05 15:30:00.000001"], dtype="M8[ns]")
454
+
455
+ expected = Series(
456
+ [
457
+ "2013-08-05 15:30:00.000001",
458
+ "2013-08-05 15:30:00.000001",
459
+ "2013-08-05 15:30:00.000001",
460
+ ],
461
+ dtype="M8[ns]",
462
+ )
463
+ result = ser.fillna(method="backfill")
464
+ tm.assert_series_equal(result, expected)
465
+
466
+ @pytest.mark.parametrize("tz", ["US/Eastern", "Asia/Tokyo"])
467
+ def test_datetime64_tz_fillna(self, tz, unit):
468
+ # DatetimeLikeBlock
469
+ ser = Series(
470
+ [
471
+ Timestamp("2011-01-01 10:00"),
472
+ NaT,
473
+ Timestamp("2011-01-03 10:00"),
474
+ NaT,
475
+ ],
476
+ dtype=f"M8[{unit}]",
477
+ )
478
+ null_loc = Series([False, True, False, True])
479
+
480
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
481
+ expected = Series(
482
+ [
483
+ Timestamp("2011-01-01 10:00"),
484
+ Timestamp("2011-01-02 10:00"),
485
+ Timestamp("2011-01-03 10:00"),
486
+ Timestamp("2011-01-02 10:00"),
487
+ ],
488
+ dtype=f"M8[{unit}]",
489
+ )
490
+ tm.assert_series_equal(expected, result)
491
+ # check s is not changed
492
+ tm.assert_series_equal(isna(ser), null_loc)
493
+
494
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
495
+ expected = Series(
496
+ [
497
+ Timestamp("2011-01-01 10:00"),
498
+ Timestamp("2011-01-02 10:00", tz=tz),
499
+ Timestamp("2011-01-03 10:00"),
500
+ Timestamp("2011-01-02 10:00", tz=tz),
501
+ ]
502
+ )
503
+ tm.assert_series_equal(expected, result)
504
+ tm.assert_series_equal(isna(ser), null_loc)
505
+
506
+ result = ser.fillna("AAA")
507
+ expected = Series(
508
+ [
509
+ Timestamp("2011-01-01 10:00"),
510
+ "AAA",
511
+ Timestamp("2011-01-03 10:00"),
512
+ "AAA",
513
+ ],
514
+ dtype=object,
515
+ )
516
+ tm.assert_series_equal(expected, result)
517
+ tm.assert_series_equal(isna(ser), null_loc)
518
+
519
+ result = ser.fillna(
520
+ {
521
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
522
+ 3: Timestamp("2011-01-04 10:00"),
523
+ }
524
+ )
525
+ expected = Series(
526
+ [
527
+ Timestamp("2011-01-01 10:00"),
528
+ Timestamp("2011-01-02 10:00", tz=tz),
529
+ Timestamp("2011-01-03 10:00"),
530
+ Timestamp("2011-01-04 10:00"),
531
+ ]
532
+ )
533
+ tm.assert_series_equal(expected, result)
534
+ tm.assert_series_equal(isna(ser), null_loc)
535
+
536
+ result = ser.fillna(
537
+ {1: Timestamp("2011-01-02 10:00"), 3: Timestamp("2011-01-04 10:00")}
538
+ )
539
+ expected = Series(
540
+ [
541
+ Timestamp("2011-01-01 10:00"),
542
+ Timestamp("2011-01-02 10:00"),
543
+ Timestamp("2011-01-03 10:00"),
544
+ Timestamp("2011-01-04 10:00"),
545
+ ],
546
+ dtype=f"M8[{unit}]",
547
+ )
548
+ tm.assert_series_equal(expected, result)
549
+ tm.assert_series_equal(isna(ser), null_loc)
550
+
551
+ # DatetimeTZBlock
552
+ idx = DatetimeIndex(
553
+ ["2011-01-01 10:00", NaT, "2011-01-03 10:00", NaT], tz=tz
554
+ ).as_unit(unit)
555
+ ser = Series(idx)
556
+ assert ser.dtype == f"datetime64[{unit}, {tz}]"
557
+ tm.assert_series_equal(isna(ser), null_loc)
558
+
559
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
560
+ expected = Series(
561
+ [
562
+ Timestamp("2011-01-01 10:00", tz=tz),
563
+ Timestamp("2011-01-02 10:00"),
564
+ Timestamp("2011-01-03 10:00", tz=tz),
565
+ Timestamp("2011-01-02 10:00"),
566
+ ]
567
+ )
568
+ tm.assert_series_equal(expected, result)
569
+ tm.assert_series_equal(isna(ser), null_loc)
570
+
571
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
572
+ idx = DatetimeIndex(
573
+ [
574
+ "2011-01-01 10:00",
575
+ "2011-01-02 10:00",
576
+ "2011-01-03 10:00",
577
+ "2011-01-02 10:00",
578
+ ],
579
+ tz=tz,
580
+ ).as_unit(unit)
581
+ expected = Series(idx)
582
+ tm.assert_series_equal(expected, result)
583
+ tm.assert_series_equal(isna(ser), null_loc)
584
+
585
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz).to_pydatetime())
586
+ idx = DatetimeIndex(
587
+ [
588
+ "2011-01-01 10:00",
589
+ "2011-01-02 10:00",
590
+ "2011-01-03 10:00",
591
+ "2011-01-02 10:00",
592
+ ],
593
+ tz=tz,
594
+ ).as_unit(unit)
595
+ expected = Series(idx)
596
+ tm.assert_series_equal(expected, result)
597
+ tm.assert_series_equal(isna(ser), null_loc)
598
+
599
+ result = ser.fillna("AAA")
600
+ expected = Series(
601
+ [
602
+ Timestamp("2011-01-01 10:00", tz=tz),
603
+ "AAA",
604
+ Timestamp("2011-01-03 10:00", tz=tz),
605
+ "AAA",
606
+ ],
607
+ dtype=object,
608
+ )
609
+ tm.assert_series_equal(expected, result)
610
+ tm.assert_series_equal(isna(ser), null_loc)
611
+
612
+ result = ser.fillna(
613
+ {
614
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
615
+ 3: Timestamp("2011-01-04 10:00"),
616
+ }
617
+ )
618
+ expected = Series(
619
+ [
620
+ Timestamp("2011-01-01 10:00", tz=tz),
621
+ Timestamp("2011-01-02 10:00", tz=tz),
622
+ Timestamp("2011-01-03 10:00", tz=tz),
623
+ Timestamp("2011-01-04 10:00"),
624
+ ]
625
+ )
626
+ tm.assert_series_equal(expected, result)
627
+ tm.assert_series_equal(isna(ser), null_loc)
628
+
629
+ result = ser.fillna(
630
+ {
631
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
632
+ 3: Timestamp("2011-01-04 10:00", tz=tz),
633
+ }
634
+ )
635
+ expected = Series(
636
+ [
637
+ Timestamp("2011-01-01 10:00", tz=tz),
638
+ Timestamp("2011-01-02 10:00", tz=tz),
639
+ Timestamp("2011-01-03 10:00", tz=tz),
640
+ Timestamp("2011-01-04 10:00", tz=tz),
641
+ ]
642
+ ).dt.as_unit(unit)
643
+ tm.assert_series_equal(expected, result)
644
+ tm.assert_series_equal(isna(ser), null_loc)
645
+
646
+ # filling with a naive/other zone, coerce to object
647
+ result = ser.fillna(Timestamp("20130101"))
648
+ expected = Series(
649
+ [
650
+ Timestamp("2011-01-01 10:00", tz=tz),
651
+ Timestamp("2013-01-01"),
652
+ Timestamp("2011-01-03 10:00", tz=tz),
653
+ Timestamp("2013-01-01"),
654
+ ]
655
+ )
656
+ tm.assert_series_equal(expected, result)
657
+ tm.assert_series_equal(isna(ser), null_loc)
658
+
659
+ # pre-2.0 fillna with mixed tzs would cast to object, in 2.0
660
+ # it retains dtype.
661
+ result = ser.fillna(Timestamp("20130101", tz="US/Pacific"))
662
+ expected = Series(
663
+ [
664
+ Timestamp("2011-01-01 10:00", tz=tz),
665
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
666
+ Timestamp("2011-01-03 10:00", tz=tz),
667
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
668
+ ]
669
+ ).dt.as_unit(unit)
670
+ tm.assert_series_equal(expected, result)
671
+ tm.assert_series_equal(isna(ser), null_loc)
672
+
673
+ def test_fillna_dt64tz_with_method(self):
674
+ # with timezone
675
+ # GH#15855
676
+ ser = Series([Timestamp("2012-11-11 00:00:00+01:00"), NaT])
677
+ exp = Series(
678
+ [
679
+ Timestamp("2012-11-11 00:00:00+01:00"),
680
+ Timestamp("2012-11-11 00:00:00+01:00"),
681
+ ]
682
+ )
683
+ tm.assert_series_equal(ser.fillna(method="pad"), exp)
684
+
685
+ ser = Series([NaT, Timestamp("2012-11-11 00:00:00+01:00")])
686
+ exp = Series(
687
+ [
688
+ Timestamp("2012-11-11 00:00:00+01:00"),
689
+ Timestamp("2012-11-11 00:00:00+01:00"),
690
+ ]
691
+ )
692
+ tm.assert_series_equal(ser.fillna(method="bfill"), exp)
693
+
694
+ def test_fillna_pytimedelta(self):
695
+ # GH#8209
696
+ ser = Series([np.nan, Timedelta("1 days")], index=["A", "B"])
697
+
698
+ result = ser.fillna(timedelta(1))
699
+ expected = Series(Timedelta("1 days"), index=["A", "B"])
700
+ tm.assert_series_equal(result, expected)
701
+
702
+ def test_fillna_period(self):
703
+ # GH#13737
704
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
705
+
706
+ res = ser.fillna(Period("2012-01", freq="M"))
707
+ exp = Series([Period("2011-01", freq="M"), Period("2012-01", freq="M")])
708
+ tm.assert_series_equal(res, exp)
709
+ assert res.dtype == "Period[M]"
710
+
711
+ def test_fillna_dt64_timestamp(self, frame_or_series):
712
+ ser = Series(
713
+ [
714
+ Timestamp("20130101"),
715
+ Timestamp("20130101"),
716
+ Timestamp("20130102"),
717
+ Timestamp("20130103 9:01:01"),
718
+ ]
719
+ )
720
+ ser[2] = np.nan
721
+ obj = frame_or_series(ser)
722
+
723
+ # reg fillna
724
+ result = obj.fillna(Timestamp("20130104"))
725
+ expected = Series(
726
+ [
727
+ Timestamp("20130101"),
728
+ Timestamp("20130101"),
729
+ Timestamp("20130104"),
730
+ Timestamp("20130103 9:01:01"),
731
+ ]
732
+ )
733
+ expected = frame_or_series(expected)
734
+ tm.assert_equal(result, expected)
735
+
736
+ result = obj.fillna(NaT)
737
+ expected = obj
738
+ tm.assert_equal(result, expected)
739
+
740
+ def test_fillna_dt64_non_nao(self):
741
+ # GH#27419
742
+ ser = Series([Timestamp("2010-01-01"), NaT, Timestamp("2000-01-01")])
743
+ val = np.datetime64("1975-04-05", "ms")
744
+
745
+ result = ser.fillna(val)
746
+ expected = Series(
747
+ [Timestamp("2010-01-01"), Timestamp("1975-04-05"), Timestamp("2000-01-01")]
748
+ )
749
+ tm.assert_series_equal(result, expected)
750
+
751
+ def test_fillna_numeric_inplace(self):
752
+ x = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
753
+ y = x.copy()
754
+
755
+ return_value = y.fillna(value=0, inplace=True)
756
+ assert return_value is None
757
+
758
+ expected = x.fillna(value=0)
759
+ tm.assert_series_equal(y, expected)
760
+
761
+ # ---------------------------------------------------------------
762
+ # CategoricalDtype
763
+
764
+ @pytest.mark.parametrize(
765
+ "fill_value, expected_output",
766
+ [
767
+ ("a", ["a", "a", "b", "a", "a"]),
768
+ ({1: "a", 3: "b", 4: "b"}, ["a", "a", "b", "b", "b"]),
769
+ ({1: "a"}, ["a", "a", "b", np.nan, np.nan]),
770
+ ({1: "a", 3: "b"}, ["a", "a", "b", "b", np.nan]),
771
+ (Series("a"), ["a", np.nan, "b", np.nan, np.nan]),
772
+ (Series("a", index=[1]), ["a", "a", "b", np.nan, np.nan]),
773
+ (Series({1: "a", 3: "b"}), ["a", "a", "b", "b", np.nan]),
774
+ (Series(["a", "b"], index=[3, 4]), ["a", np.nan, "b", "a", "b"]),
775
+ ],
776
+ )
777
+ def test_fillna_categorical(self, fill_value, expected_output):
778
+ # GH#17033
779
+ # Test fillna for a Categorical series
780
+ data = ["a", np.nan, "b", np.nan, np.nan]
781
+ ser = Series(Categorical(data, categories=["a", "b"]))
782
+ exp = Series(Categorical(expected_output, categories=["a", "b"]))
783
+ result = ser.fillna(fill_value)
784
+ tm.assert_series_equal(result, exp)
785
+
786
+ @pytest.mark.parametrize(
787
+ "fill_value, expected_output",
788
+ [
789
+ (Series(["a", "b", "c", "d", "e"]), ["a", "b", "b", "d", "e"]),
790
+ (Series(["b", "d", "a", "d", "a"]), ["a", "d", "b", "d", "a"]),
791
+ (
792
+ Series(
793
+ Categorical(
794
+ ["b", "d", "a", "d", "a"], categories=["b", "c", "d", "e", "a"]
795
+ )
796
+ ),
797
+ ["a", "d", "b", "d", "a"],
798
+ ),
799
+ ],
800
+ )
801
+ def test_fillna_categorical_with_new_categories(self, fill_value, expected_output):
802
+ # GH#26215
803
+ data = ["a", np.nan, "b", np.nan, np.nan]
804
+ ser = Series(Categorical(data, categories=["a", "b", "c", "d", "e"]))
805
+ exp = Series(Categorical(expected_output, categories=["a", "b", "c", "d", "e"]))
806
+ result = ser.fillna(fill_value)
807
+ tm.assert_series_equal(result, exp)
808
+
809
+ def test_fillna_categorical_raises(self):
810
+ data = ["a", np.nan, "b", np.nan, np.nan]
811
+ ser = Series(Categorical(data, categories=["a", "b"]))
812
+ cat = ser._values
813
+
814
+ msg = "Cannot setitem on a Categorical with a new category"
815
+ with pytest.raises(TypeError, match=msg):
816
+ ser.fillna("d")
817
+
818
+ msg2 = "Length of 'value' does not match."
819
+ with pytest.raises(ValueError, match=msg2):
820
+ cat.fillna(Series("d"))
821
+
822
+ with pytest.raises(TypeError, match=msg):
823
+ ser.fillna({1: "d", 3: "a"})
824
+
825
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
826
+ with pytest.raises(TypeError, match=msg):
827
+ ser.fillna(["a", "b"])
828
+
829
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
830
+ with pytest.raises(TypeError, match=msg):
831
+ ser.fillna(("a", "b"))
832
+
833
+ msg = (
834
+ '"value" parameter must be a scalar, dict '
835
+ 'or Series, but you passed a "DataFrame"'
836
+ )
837
+ with pytest.raises(TypeError, match=msg):
838
+ ser.fillna(DataFrame({1: ["a"], 3: ["b"]}))
839
+
840
+ @pytest.mark.parametrize("dtype", [float, "float32", "float64"])
841
+ @pytest.mark.parametrize("fill_type", tm.ALL_REAL_NUMPY_DTYPES)
842
+ @pytest.mark.parametrize("scalar", [True, False])
843
+ def test_fillna_float_casting(self, dtype, fill_type, scalar):
844
+ # GH-43424
845
+ ser = Series([np.nan, 1.2], dtype=dtype)
846
+ fill_values = Series([2, 2], dtype=fill_type)
847
+ if scalar:
848
+ fill_values = fill_values.dtype.type(2)
849
+
850
+ result = ser.fillna(fill_values)
851
+ expected = Series([2.0, 1.2], dtype=dtype)
852
+ tm.assert_series_equal(result, expected)
853
+
854
+ ser = Series([np.nan, 1.2], dtype=dtype)
855
+ mask = ser.isna().to_numpy()
856
+ ser[mask] = fill_values
857
+ tm.assert_series_equal(ser, expected)
858
+
859
+ ser = Series([np.nan, 1.2], dtype=dtype)
860
+ ser.mask(mask, fill_values, inplace=True)
861
+ tm.assert_series_equal(ser, expected)
862
+
863
+ ser = Series([np.nan, 1.2], dtype=dtype)
864
+ res = ser.where(~mask, fill_values)
865
+ tm.assert_series_equal(res, expected)
866
+
867
+ def test_fillna_f32_upcast_with_dict(self):
868
+ # GH-43424
869
+ ser = Series([np.nan, 1.2], dtype=np.float32)
870
+ result = ser.fillna({0: 1})
871
+ expected = Series([1.0, 1.2], dtype=np.float32)
872
+ tm.assert_series_equal(result, expected)
873
+
874
+ # ---------------------------------------------------------------
875
+ # Invalid Usages
876
+
877
+ def test_fillna_invalid_method(self, datetime_series):
878
+ try:
879
+ datetime_series.fillna(method="ffil")
880
+ except ValueError as inst:
881
+ assert "ffil" in str(inst)
882
+
883
+ def test_fillna_listlike_invalid(self):
884
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
885
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
886
+ with pytest.raises(TypeError, match=msg):
887
+ ser.fillna([1, 2])
888
+
889
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
890
+ with pytest.raises(TypeError, match=msg):
891
+ ser.fillna((1, 2))
892
+
893
+ def test_fillna_method_and_limit_invalid(self):
894
+ # related GH#9217, make sure limit is an int and greater than 0
895
+ ser = Series([1, 2, 3, None])
896
+ msg = "|".join(
897
+ [
898
+ r"Cannot specify both 'value' and 'method'\.",
899
+ "Limit must be greater than 0",
900
+ "Limit must be an integer",
901
+ ]
902
+ )
903
+ for limit in [-1, 0, 1.0, 2.0]:
904
+ for method in ["backfill", "bfill", "pad", "ffill", None]:
905
+ with pytest.raises(ValueError, match=msg):
906
+ ser.fillna(1, limit=limit, method=method)
907
+
908
+ def test_fillna_datetime64_with_timezone_tzinfo(self):
909
+ # https://github.com/pandas-dev/pandas/issues/38851
910
+ # different tzinfos representing UTC treated as equal
911
+ ser = Series(date_range("2020", periods=3, tz="UTC"))
912
+ expected = ser.copy()
913
+ ser[1] = NaT
914
+ result = ser.fillna(datetime(2020, 1, 2, tzinfo=timezone.utc))
915
+ tm.assert_series_equal(result, expected)
916
+
917
+ # pre-2.0 we cast to object with mixed tzs, in 2.0 we retain dtype
918
+ ts = Timestamp("2000-01-01", tz="US/Pacific")
919
+ ser2 = Series(ser._values.tz_convert("dateutil/US/Pacific"))
920
+ assert ser2.dtype.kind == "M"
921
+ result = ser2.fillna(ts)
922
+ expected = Series(
923
+ [ser2[0], ts.tz_convert(ser2.dtype.tz), ser2[2]],
924
+ dtype=ser2.dtype,
925
+ )
926
+ tm.assert_series_equal(result, expected)
927
+
928
+ @pytest.mark.parametrize(
929
+ "input, input_fillna, expected_data, expected_categories",
930
+ [
931
+ (["A", "B", None, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
932
+ (["A", "B", np.nan, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
933
+ ],
934
+ )
935
+ def test_fillna_categorical_accept_same_type(
936
+ self, input, input_fillna, expected_data, expected_categories
937
+ ):
938
+ # GH32414
939
+ cat = Categorical(input)
940
+ ser = Series(cat).fillna(input_fillna)
941
+ filled = cat.fillna(ser)
942
+ result = cat.fillna(filled)
943
+ expected = Categorical(expected_data, categories=expected_categories)
944
+ tm.assert_categorical_equal(result, expected)
945
+
946
+
947
+ @pytest.mark.filterwarnings(
948
+ "ignore:Series.fillna with 'method' is deprecated:FutureWarning"
949
+ )
950
+ class TestFillnaPad:
951
+ def test_fillna_bug(self):
952
+ ser = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
953
+ filled = ser.fillna(method="ffill")
954
+ expected = Series([np.nan, 1.0, 1.0, 3.0, 3.0], ser.index)
955
+ tm.assert_series_equal(filled, expected)
956
+
957
+ filled = ser.fillna(method="bfill")
958
+ expected = Series([1.0, 1.0, 3.0, 3.0, np.nan], ser.index)
959
+ tm.assert_series_equal(filled, expected)
960
+
961
+ def test_ffill(self):
962
+ ts = Series(
963
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
964
+ )
965
+ ts.iloc[2] = np.nan
966
+ tm.assert_series_equal(ts.ffill(), ts.fillna(method="ffill"))
967
+
968
+ def test_ffill_mixed_dtypes_without_missing_data(self):
969
+ # GH#14956
970
+ series = Series([datetime(2015, 1, 1, tzinfo=pytz.utc), 1])
971
+ result = series.ffill()
972
+ tm.assert_series_equal(series, result)
973
+
974
+ def test_bfill(self):
975
+ ts = Series(
976
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
977
+ )
978
+ ts.iloc[2] = np.nan
979
+ tm.assert_series_equal(ts.bfill(), ts.fillna(method="bfill"))
980
+
981
+ def test_pad_nan(self):
982
+ x = Series(
983
+ [np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"], dtype=float
984
+ )
985
+
986
+ return_value = x.fillna(method="pad", inplace=True)
987
+ assert return_value is None
988
+
989
+ expected = Series(
990
+ [np.nan, 1.0, 1.0, 3.0, 3.0], ["z", "a", "b", "c", "d"], dtype=float
991
+ )
992
+ tm.assert_series_equal(x[1:], expected[1:])
993
+ assert np.isnan(x.iloc[0]), np.isnan(expected.iloc[0])
994
+
995
+ def test_series_fillna_limit(self):
996
+ index = np.arange(10)
997
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
998
+
999
+ result = s[:2].reindex(index)
1000
+ result = result.fillna(method="pad", limit=5)
1001
+
1002
+ expected = s[:2].reindex(index).fillna(method="pad")
1003
+ expected[-3:] = np.nan
1004
+ tm.assert_series_equal(result, expected)
1005
+
1006
+ result = s[-2:].reindex(index)
1007
+ result = result.fillna(method="bfill", limit=5)
1008
+
1009
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1010
+ expected[:3] = np.nan
1011
+ tm.assert_series_equal(result, expected)
1012
+
1013
+ def test_series_pad_backfill_limit(self):
1014
+ index = np.arange(10)
1015
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
1016
+
1017
+ result = s[:2].reindex(index, method="pad", limit=5)
1018
+
1019
+ expected = s[:2].reindex(index).fillna(method="pad")
1020
+ expected[-3:] = np.nan
1021
+ tm.assert_series_equal(result, expected)
1022
+
1023
+ result = s[-2:].reindex(index, method="backfill", limit=5)
1024
+
1025
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1026
+ expected[:3] = np.nan
1027
+ tm.assert_series_equal(result, expected)
1028
+
1029
+ def test_fillna_int(self):
1030
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
1031
+ return_value = ser.fillna(method="ffill", inplace=True)
1032
+ assert return_value is None
1033
+ tm.assert_series_equal(ser.fillna(method="ffill", inplace=False), ser)
1034
+
1035
+ def test_datetime64tz_fillna_round_issue(self):
1036
+ # GH#14872
1037
+
1038
+ data = Series(
1039
+ [NaT, NaT, datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]
1040
+ )
1041
+
1042
+ filled = data.bfill()
1043
+
1044
+ expected = Series(
1045
+ [
1046
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1047
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1048
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1049
+ ]
1050
+ )
1051
+
1052
+ tm.assert_series_equal(filled, expected)
1053
+
1054
+ def test_fillna_parr(self):
1055
+ # GH-24537
1056
+ dti = date_range(
1057
+ Timestamp.max - Timedelta(nanoseconds=10), periods=5, freq="ns"
1058
+ )
1059
+ ser = Series(dti.to_period("ns"))
1060
+ ser[2] = NaT
1061
+ arr = period_array(
1062
+ [
1063
+ Timestamp("2262-04-11 23:47:16.854775797"),
1064
+ Timestamp("2262-04-11 23:47:16.854775798"),
1065
+ Timestamp("2262-04-11 23:47:16.854775798"),
1066
+ Timestamp("2262-04-11 23:47:16.854775800"),
1067
+ Timestamp("2262-04-11 23:47:16.854775801"),
1068
+ ],
1069
+ freq="ns",
1070
+ )
1071
+ expected = Series(arr)
1072
+
1073
+ filled = ser.ffill()
1074
+
1075
+ tm.assert_series_equal(filled, expected)
1076
+
1077
+ @pytest.mark.parametrize("func", ["pad", "backfill"])
1078
+ def test_pad_backfill_deprecated(self, func):
1079
+ # GH#33396
1080
+ ser = Series([1, 2, 3])
1081
+ with tm.assert_produces_warning(FutureWarning):
1082
+ getattr(ser, func)()
1083
+
1084
+
1085
+ @pytest.mark.parametrize(
1086
+ "data, expected_data, method, kwargs",
1087
+ (
1088
+ (
1089
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1090
+ [np.nan, np.nan, 3.0, 3.0, 3.0, 3.0, 7.0, np.nan, np.nan],
1091
+ "ffill",
1092
+ {"limit_area": "inside"},
1093
+ ),
1094
+ (
1095
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1096
+ [np.nan, np.nan, 3.0, 3.0, np.nan, np.nan, 7.0, np.nan, np.nan],
1097
+ "ffill",
1098
+ {"limit_area": "inside", "limit": 1},
1099
+ ),
1100
+ (
1101
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1102
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0],
1103
+ "ffill",
1104
+ {"limit_area": "outside"},
1105
+ ),
1106
+ (
1107
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1108
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan],
1109
+ "ffill",
1110
+ {"limit_area": "outside", "limit": 1},
1111
+ ),
1112
+ (
1113
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1114
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1115
+ "ffill",
1116
+ {"limit_area": "outside", "limit": 1},
1117
+ ),
1118
+ (
1119
+ range(5),
1120
+ range(5),
1121
+ "ffill",
1122
+ {"limit_area": "outside", "limit": 1},
1123
+ ),
1124
+ (
1125
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1126
+ [np.nan, np.nan, 3.0, 7.0, 7.0, 7.0, 7.0, np.nan, np.nan],
1127
+ "bfill",
1128
+ {"limit_area": "inside"},
1129
+ ),
1130
+ (
1131
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1132
+ [np.nan, np.nan, 3.0, np.nan, np.nan, 7.0, 7.0, np.nan, np.nan],
1133
+ "bfill",
1134
+ {"limit_area": "inside", "limit": 1},
1135
+ ),
1136
+ (
1137
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1138
+ [3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1139
+ "bfill",
1140
+ {"limit_area": "outside"},
1141
+ ),
1142
+ (
1143
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1144
+ [np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1145
+ "bfill",
1146
+ {"limit_area": "outside", "limit": 1},
1147
+ ),
1148
+ ),
1149
+ )
1150
+ def test_ffill_bfill_limit_area(data, expected_data, method, kwargs):
1151
+ # GH#56492
1152
+ s = Series(data)
1153
+ expected = Series(expected_data)
1154
+ result = getattr(s, method)(**kwargs)
1155
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_get_numeric_data.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import (
2
+ Index,
3
+ Series,
4
+ date_range,
5
+ )
6
+ import pandas._testing as tm
7
+
8
+
9
+ class TestGetNumericData:
10
+ def test_get_numeric_data_preserve_dtype(
11
+ self, using_copy_on_write, warn_copy_on_write
12
+ ):
13
+ # get the numeric data
14
+ obj = Series([1, 2, 3])
15
+ result = obj._get_numeric_data()
16
+ tm.assert_series_equal(result, obj)
17
+
18
+ # returned object is a shallow copy
19
+ with tm.assert_cow_warning(warn_copy_on_write):
20
+ result.iloc[0] = 0
21
+ if using_copy_on_write:
22
+ assert obj.iloc[0] == 1
23
+ else:
24
+ assert obj.iloc[0] == 0
25
+
26
+ obj = Series([1, "2", 3.0])
27
+ result = obj._get_numeric_data()
28
+ expected = Series([], dtype=object, index=Index([], dtype=object))
29
+ tm.assert_series_equal(result, expected)
30
+
31
+ obj = Series([True, False, True])
32
+ result = obj._get_numeric_data()
33
+ tm.assert_series_equal(result, obj)
34
+
35
+ obj = Series(date_range("20130101", periods=3))
36
+ result = obj._get_numeric_data()
37
+ expected = Series([], dtype="M8[ns]", index=Index([], dtype=object))
38
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Series,
5
+ interval_range,
6
+ )
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestInferObjects:
11
+ def test_copy(self, index_or_series):
12
+ # GH#50096
13
+ # case where we don't need to do inference because it is already non-object
14
+ obj = index_or_series(np.array([1, 2, 3], dtype="int64"))
15
+
16
+ result = obj.infer_objects(copy=False)
17
+ assert tm.shares_memory(result, obj)
18
+
19
+ # case where we try to do inference but can't do better than object
20
+ obj2 = index_or_series(np.array(["foo", 2], dtype=object))
21
+ result2 = obj2.infer_objects(copy=False)
22
+ assert tm.shares_memory(result2, obj2)
23
+
24
+ def test_infer_objects_series(self, index_or_series):
25
+ # GH#11221
26
+ actual = index_or_series(np.array([1, 2, 3], dtype="O")).infer_objects()
27
+ expected = index_or_series([1, 2, 3])
28
+ tm.assert_equal(actual, expected)
29
+
30
+ actual = index_or_series(np.array([1, 2, 3, None], dtype="O")).infer_objects()
31
+ expected = index_or_series([1.0, 2.0, 3.0, np.nan])
32
+ tm.assert_equal(actual, expected)
33
+
34
+ # only soft conversions, unconvertible pass thru unchanged
35
+
36
+ obj = index_or_series(np.array([1, 2, 3, None, "a"], dtype="O"))
37
+ actual = obj.infer_objects()
38
+ expected = index_or_series([1, 2, 3, None, "a"], dtype=object)
39
+
40
+ assert actual.dtype == "object"
41
+ tm.assert_equal(actual, expected)
42
+
43
+ def test_infer_objects_interval(self, index_or_series):
44
+ # GH#50090
45
+ ii = interval_range(1, 10)
46
+ obj = index_or_series(ii)
47
+
48
+ result = obj.astype(object).infer_objects()
49
+ tm.assert_equal(result, obj)
50
+
51
+ def test_infer_objects_bytes(self):
52
+ # GH#49650
53
+ ser = Series([b"a"], dtype="bytes")
54
+ expected = ser.copy()
55
+ result = ser.infer_objects()
56
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_info.py ADDED
@@ -0,0 +1,181 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from io import StringIO
2
+ from string import ascii_uppercase
3
+ import textwrap
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas.compat import PYPY
9
+
10
+ from pandas import (
11
+ CategoricalIndex,
12
+ MultiIndex,
13
+ Series,
14
+ date_range,
15
+ )
16
+
17
+
18
+ def test_info_categorical_column_just_works():
19
+ n = 2500
20
+ data = np.array(list("abcdefghij")).take(
21
+ np.random.default_rng(2).integers(0, 10, size=n, dtype=int)
22
+ )
23
+ s = Series(data).astype("category")
24
+ s.isna()
25
+ buf = StringIO()
26
+ s.info(buf=buf)
27
+
28
+ s2 = s[s == "d"]
29
+ buf = StringIO()
30
+ s2.info(buf=buf)
31
+
32
+
33
+ def test_info_categorical():
34
+ # GH14298
35
+ idx = CategoricalIndex(["a", "b"])
36
+ s = Series(np.zeros(2), index=idx)
37
+ buf = StringIO()
38
+ s.info(buf=buf)
39
+
40
+
41
+ @pytest.mark.parametrize("verbose", [True, False])
42
+ def test_info_series(lexsorted_two_level_string_multiindex, verbose):
43
+ index = lexsorted_two_level_string_multiindex
44
+ ser = Series(range(len(index)), index=index, name="sth")
45
+ buf = StringIO()
46
+ ser.info(verbose=verbose, buf=buf)
47
+ result = buf.getvalue()
48
+
49
+ expected = textwrap.dedent(
50
+ """\
51
+ <class 'pandas.core.series.Series'>
52
+ MultiIndex: 10 entries, ('foo', 'one') to ('qux', 'three')
53
+ """
54
+ )
55
+ if verbose:
56
+ expected += textwrap.dedent(
57
+ """\
58
+ Series name: sth
59
+ Non-Null Count Dtype
60
+ -------------- -----
61
+ 10 non-null int64
62
+ """
63
+ )
64
+ expected += textwrap.dedent(
65
+ f"""\
66
+ dtypes: int64(1)
67
+ memory usage: {ser.memory_usage()}.0+ bytes
68
+ """
69
+ )
70
+ assert result == expected
71
+
72
+
73
+ def test_info_memory():
74
+ s = Series([1, 2], dtype="i8")
75
+ buf = StringIO()
76
+ s.info(buf=buf)
77
+ result = buf.getvalue()
78
+ memory_bytes = float(s.memory_usage())
79
+ expected = textwrap.dedent(
80
+ f"""\
81
+ <class 'pandas.core.series.Series'>
82
+ RangeIndex: 2 entries, 0 to 1
83
+ Series name: None
84
+ Non-Null Count Dtype
85
+ -------------- -----
86
+ 2 non-null int64
87
+ dtypes: int64(1)
88
+ memory usage: {memory_bytes} bytes
89
+ """
90
+ )
91
+ assert result == expected
92
+
93
+
94
+ def test_info_wide():
95
+ s = Series(np.random.default_rng(2).standard_normal(101))
96
+ msg = "Argument `max_cols` can only be passed in DataFrame.info, not Series.info"
97
+ with pytest.raises(ValueError, match=msg):
98
+ s.info(max_cols=1)
99
+
100
+
101
+ def test_info_shows_dtypes():
102
+ dtypes = [
103
+ "int64",
104
+ "float64",
105
+ "datetime64[ns]",
106
+ "timedelta64[ns]",
107
+ "complex128",
108
+ "object",
109
+ "bool",
110
+ ]
111
+ n = 10
112
+ for dtype in dtypes:
113
+ s = Series(np.random.default_rng(2).integers(2, size=n).astype(dtype))
114
+ buf = StringIO()
115
+ s.info(buf=buf)
116
+ res = buf.getvalue()
117
+ name = f"{n:d} non-null {dtype}"
118
+ assert name in res
119
+
120
+
121
+ @pytest.mark.xfail(PYPY, reason="on PyPy deep=True doesn't change result")
122
+ def test_info_memory_usage_deep_not_pypy():
123
+ s_with_object_index = Series({"a": [1]}, index=["foo"])
124
+ assert s_with_object_index.memory_usage(
125
+ index=True, deep=True
126
+ ) > s_with_object_index.memory_usage(index=True)
127
+
128
+ s_object = Series({"a": ["a"]})
129
+ assert s_object.memory_usage(deep=True) > s_object.memory_usage()
130
+
131
+
132
+ @pytest.mark.xfail(not PYPY, reason="on PyPy deep=True does not change result")
133
+ def test_info_memory_usage_deep_pypy():
134
+ s_with_object_index = Series({"a": [1]}, index=["foo"])
135
+ assert s_with_object_index.memory_usage(
136
+ index=True, deep=True
137
+ ) == s_with_object_index.memory_usage(index=True)
138
+
139
+ s_object = Series({"a": ["a"]})
140
+ assert s_object.memory_usage(deep=True) == s_object.memory_usage()
141
+
142
+
143
+ @pytest.mark.parametrize(
144
+ "series, plus",
145
+ [
146
+ (Series(1, index=[1, 2, 3]), False),
147
+ (Series(1, index=list("ABC")), True),
148
+ (Series(1, index=MultiIndex.from_product([range(3), range(3)])), False),
149
+ (
150
+ Series(1, index=MultiIndex.from_product([range(3), ["foo", "bar"]])),
151
+ True,
152
+ ),
153
+ ],
154
+ )
155
+ def test_info_memory_usage_qualified(series, plus):
156
+ buf = StringIO()
157
+ series.info(buf=buf)
158
+ if plus:
159
+ assert "+" in buf.getvalue()
160
+ else:
161
+ assert "+" not in buf.getvalue()
162
+
163
+
164
+ def test_info_memory_usage_bug_on_multiindex():
165
+ # GH 14308
166
+ # memory usage introspection should not materialize .values
167
+ N = 100
168
+ M = len(ascii_uppercase)
169
+ index = MultiIndex.from_product(
170
+ [list(ascii_uppercase), date_range("20160101", periods=N)],
171
+ names=["id", "date"],
172
+ )
173
+ s = Series(np.random.default_rng(2).standard_normal(N * M), index=index)
174
+
175
+ unstacked = s.unstack("id")
176
+ assert s.values.nbytes == unstacked.values.nbytes
177
+ assert s.memory_usage(deep=True) > unstacked.memory_usage(deep=True).sum()
178
+
179
+ # high upper bound
180
+ diff = unstacked.memory_usage(deep=True).sum() - s.memory_usage(deep=True)
181
+ assert diff < 2000
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_interpolate.py ADDED
@@ -0,0 +1,868 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas.util._test_decorators as td
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Index,
9
+ MultiIndex,
10
+ Series,
11
+ date_range,
12
+ isna,
13
+ )
14
+ import pandas._testing as tm
15
+
16
+
17
+ @pytest.fixture(
18
+ params=[
19
+ "linear",
20
+ "index",
21
+ "values",
22
+ "nearest",
23
+ "slinear",
24
+ "zero",
25
+ "quadratic",
26
+ "cubic",
27
+ "barycentric",
28
+ "krogh",
29
+ "polynomial",
30
+ "spline",
31
+ "piecewise_polynomial",
32
+ "from_derivatives",
33
+ "pchip",
34
+ "akima",
35
+ "cubicspline",
36
+ ]
37
+ )
38
+ def nontemporal_method(request):
39
+ """Fixture that returns an (method name, required kwargs) pair.
40
+
41
+ This fixture does not include method 'time' as a parameterization; that
42
+ method requires a Series with a DatetimeIndex, and is generally tested
43
+ separately from these non-temporal methods.
44
+ """
45
+ method = request.param
46
+ kwargs = {"order": 1} if method in ("spline", "polynomial") else {}
47
+ return method, kwargs
48
+
49
+
50
+ @pytest.fixture(
51
+ params=[
52
+ "linear",
53
+ "slinear",
54
+ "zero",
55
+ "quadratic",
56
+ "cubic",
57
+ "barycentric",
58
+ "krogh",
59
+ "polynomial",
60
+ "spline",
61
+ "piecewise_polynomial",
62
+ "from_derivatives",
63
+ "pchip",
64
+ "akima",
65
+ "cubicspline",
66
+ ]
67
+ )
68
+ def interp_methods_ind(request):
69
+ """Fixture that returns a (method name, required kwargs) pair to
70
+ be tested for various Index types.
71
+
72
+ This fixture does not include methods - 'time', 'index', 'nearest',
73
+ 'values' as a parameterization
74
+ """
75
+ method = request.param
76
+ kwargs = {"order": 1} if method in ("spline", "polynomial") else {}
77
+ return method, kwargs
78
+
79
+
80
+ class TestSeriesInterpolateData:
81
+ @pytest.mark.xfail(reason="EA.fillna does not handle 'linear' method")
82
+ def test_interpolate_period_values(self):
83
+ orig = Series(date_range("2012-01-01", periods=5))
84
+ ser = orig.copy()
85
+ ser[2] = pd.NaT
86
+
87
+ # period cast
88
+ ser_per = ser.dt.to_period("D")
89
+ res_per = ser_per.interpolate()
90
+ expected_per = orig.dt.to_period("D")
91
+ tm.assert_series_equal(res_per, expected_per)
92
+
93
+ def test_interpolate(self, datetime_series):
94
+ ts = Series(np.arange(len(datetime_series), dtype=float), datetime_series.index)
95
+
96
+ ts_copy = ts.copy()
97
+ ts_copy[5:10] = np.nan
98
+
99
+ linear_interp = ts_copy.interpolate(method="linear")
100
+ tm.assert_series_equal(linear_interp, ts)
101
+
102
+ ord_ts = Series(
103
+ [d.toordinal() for d in datetime_series.index], index=datetime_series.index
104
+ ).astype(float)
105
+
106
+ ord_ts_copy = ord_ts.copy()
107
+ ord_ts_copy[5:10] = np.nan
108
+
109
+ time_interp = ord_ts_copy.interpolate(method="time")
110
+ tm.assert_series_equal(time_interp, ord_ts)
111
+
112
+ def test_interpolate_time_raises_for_non_timeseries(self):
113
+ # When method='time' is used on a non-TimeSeries that contains a null
114
+ # value, a ValueError should be raised.
115
+ non_ts = Series([0, 1, 2, np.nan])
116
+ msg = "time-weighted interpolation only works on Series.* with a DatetimeIndex"
117
+ with pytest.raises(ValueError, match=msg):
118
+ non_ts.interpolate(method="time")
119
+
120
+ def test_interpolate_cubicspline(self):
121
+ pytest.importorskip("scipy")
122
+ ser = Series([10, 11, 12, 13])
123
+
124
+ expected = Series(
125
+ [11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
126
+ index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
127
+ )
128
+ # interpolate at new_index
129
+ new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
130
+ float
131
+ )
132
+ result = ser.reindex(new_index).interpolate(method="cubicspline").loc[1:3]
133
+ tm.assert_series_equal(result, expected)
134
+
135
+ def test_interpolate_pchip(self):
136
+ pytest.importorskip("scipy")
137
+ ser = Series(np.sort(np.random.default_rng(2).uniform(size=100)))
138
+
139
+ # interpolate at new_index
140
+ new_index = ser.index.union(
141
+ Index([49.25, 49.5, 49.75, 50.25, 50.5, 50.75])
142
+ ).astype(float)
143
+ interp_s = ser.reindex(new_index).interpolate(method="pchip")
144
+ # does not blow up, GH5977
145
+ interp_s.loc[49:51]
146
+
147
+ def test_interpolate_akima(self):
148
+ pytest.importorskip("scipy")
149
+ ser = Series([10, 11, 12, 13])
150
+
151
+ # interpolate at new_index where `der` is zero
152
+ expected = Series(
153
+ [11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
154
+ index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
155
+ )
156
+ new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
157
+ float
158
+ )
159
+ interp_s = ser.reindex(new_index).interpolate(method="akima")
160
+ tm.assert_series_equal(interp_s.loc[1:3], expected)
161
+
162
+ # interpolate at new_index where `der` is a non-zero int
163
+ expected = Series(
164
+ [11.0, 1.0, 1.0, 1.0, 12.0, 1.0, 1.0, 1.0, 13.0],
165
+ index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
166
+ )
167
+ new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
168
+ float
169
+ )
170
+ interp_s = ser.reindex(new_index).interpolate(method="akima", der=1)
171
+ tm.assert_series_equal(interp_s.loc[1:3], expected)
172
+
173
+ def test_interpolate_piecewise_polynomial(self):
174
+ pytest.importorskip("scipy")
175
+ ser = Series([10, 11, 12, 13])
176
+
177
+ expected = Series(
178
+ [11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
179
+ index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
180
+ )
181
+ # interpolate at new_index
182
+ new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
183
+ float
184
+ )
185
+ interp_s = ser.reindex(new_index).interpolate(method="piecewise_polynomial")
186
+ tm.assert_series_equal(interp_s.loc[1:3], expected)
187
+
188
+ def test_interpolate_from_derivatives(self):
189
+ pytest.importorskip("scipy")
190
+ ser = Series([10, 11, 12, 13])
191
+
192
+ expected = Series(
193
+ [11.00, 11.25, 11.50, 11.75, 12.00, 12.25, 12.50, 12.75, 13.00],
194
+ index=Index([1.0, 1.25, 1.5, 1.75, 2.0, 2.25, 2.5, 2.75, 3.0]),
195
+ )
196
+ # interpolate at new_index
197
+ new_index = ser.index.union(Index([1.25, 1.5, 1.75, 2.25, 2.5, 2.75])).astype(
198
+ float
199
+ )
200
+ interp_s = ser.reindex(new_index).interpolate(method="from_derivatives")
201
+ tm.assert_series_equal(interp_s.loc[1:3], expected)
202
+
203
+ @pytest.mark.parametrize(
204
+ "kwargs",
205
+ [
206
+ {},
207
+ pytest.param(
208
+ {"method": "polynomial", "order": 1}, marks=td.skip_if_no("scipy")
209
+ ),
210
+ ],
211
+ )
212
+ def test_interpolate_corners(self, kwargs):
213
+ s = Series([np.nan, np.nan])
214
+ tm.assert_series_equal(s.interpolate(**kwargs), s)
215
+
216
+ s = Series([], dtype=object).interpolate()
217
+ tm.assert_series_equal(s.interpolate(**kwargs), s)
218
+
219
+ def test_interpolate_index_values(self):
220
+ s = Series(np.nan, index=np.sort(np.random.default_rng(2).random(30)))
221
+ s.loc[::3] = np.random.default_rng(2).standard_normal(10)
222
+
223
+ vals = s.index.values.astype(float)
224
+
225
+ result = s.interpolate(method="index")
226
+
227
+ expected = s.copy()
228
+ bad = isna(expected.values)
229
+ good = ~bad
230
+ expected = Series(
231
+ np.interp(vals[bad], vals[good], s.values[good]), index=s.index[bad]
232
+ )
233
+
234
+ tm.assert_series_equal(result[bad], expected)
235
+
236
+ # 'values' is synonymous with 'index' for the method kwarg
237
+ other_result = s.interpolate(method="values")
238
+
239
+ tm.assert_series_equal(other_result, result)
240
+ tm.assert_series_equal(other_result[bad], expected)
241
+
242
+ def test_interpolate_non_ts(self):
243
+ s = Series([1, 3, np.nan, np.nan, np.nan, 11])
244
+ msg = (
245
+ "time-weighted interpolation only works on Series or DataFrames "
246
+ "with a DatetimeIndex"
247
+ )
248
+ with pytest.raises(ValueError, match=msg):
249
+ s.interpolate(method="time")
250
+
251
+ @pytest.mark.parametrize(
252
+ "kwargs",
253
+ [
254
+ {},
255
+ pytest.param(
256
+ {"method": "polynomial", "order": 1}, marks=td.skip_if_no("scipy")
257
+ ),
258
+ ],
259
+ )
260
+ def test_nan_interpolate(self, kwargs):
261
+ s = Series([0, 1, np.nan, 3])
262
+ result = s.interpolate(**kwargs)
263
+ expected = Series([0.0, 1.0, 2.0, 3.0])
264
+ tm.assert_series_equal(result, expected)
265
+
266
+ def test_nan_irregular_index(self):
267
+ s = Series([1, 2, np.nan, 4], index=[1, 3, 5, 9])
268
+ result = s.interpolate()
269
+ expected = Series([1.0, 2.0, 3.0, 4.0], index=[1, 3, 5, 9])
270
+ tm.assert_series_equal(result, expected)
271
+
272
+ def test_nan_str_index(self):
273
+ s = Series([0, 1, 2, np.nan], index=list("abcd"))
274
+ result = s.interpolate()
275
+ expected = Series([0.0, 1.0, 2.0, 2.0], index=list("abcd"))
276
+ tm.assert_series_equal(result, expected)
277
+
278
+ def test_interp_quad(self):
279
+ pytest.importorskip("scipy")
280
+ sq = Series([1, 4, np.nan, 16], index=[1, 2, 3, 4])
281
+ result = sq.interpolate(method="quadratic")
282
+ expected = Series([1.0, 4.0, 9.0, 16.0], index=[1, 2, 3, 4])
283
+ tm.assert_series_equal(result, expected)
284
+
285
+ def test_interp_scipy_basic(self):
286
+ pytest.importorskip("scipy")
287
+ s = Series([1, 3, np.nan, 12, np.nan, 25])
288
+ # slinear
289
+ expected = Series([1.0, 3.0, 7.5, 12.0, 18.5, 25.0])
290
+ result = s.interpolate(method="slinear")
291
+ tm.assert_series_equal(result, expected)
292
+
293
+ msg = "The 'downcast' keyword in Series.interpolate is deprecated"
294
+ with tm.assert_produces_warning(FutureWarning, match=msg):
295
+ result = s.interpolate(method="slinear", downcast="infer")
296
+ tm.assert_series_equal(result, expected)
297
+ # nearest
298
+ expected = Series([1, 3, 3, 12, 12, 25])
299
+ result = s.interpolate(method="nearest")
300
+ tm.assert_series_equal(result, expected.astype("float"))
301
+
302
+ with tm.assert_produces_warning(FutureWarning, match=msg):
303
+ result = s.interpolate(method="nearest", downcast="infer")
304
+ tm.assert_series_equal(result, expected)
305
+ # zero
306
+ expected = Series([1, 3, 3, 12, 12, 25])
307
+ result = s.interpolate(method="zero")
308
+ tm.assert_series_equal(result, expected.astype("float"))
309
+
310
+ with tm.assert_produces_warning(FutureWarning, match=msg):
311
+ result = s.interpolate(method="zero", downcast="infer")
312
+ tm.assert_series_equal(result, expected)
313
+ # quadratic
314
+ # GH #15662.
315
+ expected = Series([1, 3.0, 6.823529, 12.0, 18.058824, 25.0])
316
+ result = s.interpolate(method="quadratic")
317
+ tm.assert_series_equal(result, expected)
318
+
319
+ with tm.assert_produces_warning(FutureWarning, match=msg):
320
+ result = s.interpolate(method="quadratic", downcast="infer")
321
+ tm.assert_series_equal(result, expected)
322
+ # cubic
323
+ expected = Series([1.0, 3.0, 6.8, 12.0, 18.2, 25.0])
324
+ result = s.interpolate(method="cubic")
325
+ tm.assert_series_equal(result, expected)
326
+
327
+ def test_interp_limit(self):
328
+ s = Series([1, 3, np.nan, np.nan, np.nan, 11])
329
+
330
+ expected = Series([1.0, 3.0, 5.0, 7.0, np.nan, 11.0])
331
+ result = s.interpolate(method="linear", limit=2)
332
+ tm.assert_series_equal(result, expected)
333
+
334
+ @pytest.mark.parametrize("limit", [-1, 0])
335
+ def test_interpolate_invalid_nonpositive_limit(self, nontemporal_method, limit):
336
+ # GH 9217: make sure limit is greater than zero.
337
+ s = Series([1, 2, np.nan, 4])
338
+ method, kwargs = nontemporal_method
339
+ with pytest.raises(ValueError, match="Limit must be greater than 0"):
340
+ s.interpolate(limit=limit, method=method, **kwargs)
341
+
342
+ def test_interpolate_invalid_float_limit(self, nontemporal_method):
343
+ # GH 9217: make sure limit is an integer.
344
+ s = Series([1, 2, np.nan, 4])
345
+ method, kwargs = nontemporal_method
346
+ limit = 2.0
347
+ with pytest.raises(ValueError, match="Limit must be an integer"):
348
+ s.interpolate(limit=limit, method=method, **kwargs)
349
+
350
+ @pytest.mark.parametrize("invalid_method", [None, "nonexistent_method"])
351
+ def test_interp_invalid_method(self, invalid_method):
352
+ s = Series([1, 3, np.nan, 12, np.nan, 25])
353
+
354
+ msg = f"method must be one of.* Got '{invalid_method}' instead"
355
+ if invalid_method is None:
356
+ msg = "'method' should be a string, not None"
357
+ with pytest.raises(ValueError, match=msg):
358
+ s.interpolate(method=invalid_method)
359
+
360
+ # When an invalid method and invalid limit (such as -1) are
361
+ # provided, the error message reflects the invalid method.
362
+ with pytest.raises(ValueError, match=msg):
363
+ s.interpolate(method=invalid_method, limit=-1)
364
+
365
+ def test_interp_invalid_method_and_value(self):
366
+ # GH#36624
367
+ ser = Series([1, 3, np.nan, 12, np.nan, 25])
368
+
369
+ msg = "'fill_value' is not a valid keyword for Series.interpolate"
370
+ msg2 = "Series.interpolate with method=pad"
371
+ with pytest.raises(ValueError, match=msg):
372
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
373
+ ser.interpolate(fill_value=3, method="pad")
374
+
375
+ def test_interp_limit_forward(self):
376
+ s = Series([1, 3, np.nan, np.nan, np.nan, 11])
377
+
378
+ # Provide 'forward' (the default) explicitly here.
379
+ expected = Series([1.0, 3.0, 5.0, 7.0, np.nan, 11.0])
380
+
381
+ result = s.interpolate(method="linear", limit=2, limit_direction="forward")
382
+ tm.assert_series_equal(result, expected)
383
+
384
+ result = s.interpolate(method="linear", limit=2, limit_direction="FORWARD")
385
+ tm.assert_series_equal(result, expected)
386
+
387
+ def test_interp_unlimited(self):
388
+ # these test are for issue #16282 default Limit=None is unlimited
389
+ s = Series([np.nan, 1.0, 3.0, np.nan, np.nan, np.nan, 11.0, np.nan])
390
+ expected = Series([1.0, 1.0, 3.0, 5.0, 7.0, 9.0, 11.0, 11.0])
391
+ result = s.interpolate(method="linear", limit_direction="both")
392
+ tm.assert_series_equal(result, expected)
393
+
394
+ expected = Series([np.nan, 1.0, 3.0, 5.0, 7.0, 9.0, 11.0, 11.0])
395
+ result = s.interpolate(method="linear", limit_direction="forward")
396
+ tm.assert_series_equal(result, expected)
397
+
398
+ expected = Series([1.0, 1.0, 3.0, 5.0, 7.0, 9.0, 11.0, np.nan])
399
+ result = s.interpolate(method="linear", limit_direction="backward")
400
+ tm.assert_series_equal(result, expected)
401
+
402
+ def test_interp_limit_bad_direction(self):
403
+ s = Series([1, 3, np.nan, np.nan, np.nan, 11])
404
+
405
+ msg = (
406
+ r"Invalid limit_direction: expecting one of \['forward', "
407
+ r"'backward', 'both'\], got 'abc'"
408
+ )
409
+ with pytest.raises(ValueError, match=msg):
410
+ s.interpolate(method="linear", limit=2, limit_direction="abc")
411
+
412
+ # raises an error even if no limit is specified.
413
+ with pytest.raises(ValueError, match=msg):
414
+ s.interpolate(method="linear", limit_direction="abc")
415
+
416
+ # limit_area introduced GH #16284
417
+ def test_interp_limit_area(self):
418
+ # These tests are for issue #9218 -- fill NaNs in both directions.
419
+ s = Series([np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan])
420
+
421
+ expected = Series([np.nan, np.nan, 3.0, 4.0, 5.0, 6.0, 7.0, np.nan, np.nan])
422
+ result = s.interpolate(method="linear", limit_area="inside")
423
+ tm.assert_series_equal(result, expected)
424
+
425
+ expected = Series(
426
+ [np.nan, np.nan, 3.0, 4.0, np.nan, np.nan, 7.0, np.nan, np.nan]
427
+ )
428
+ result = s.interpolate(method="linear", limit_area="inside", limit=1)
429
+ tm.assert_series_equal(result, expected)
430
+
431
+ expected = Series([np.nan, np.nan, 3.0, 4.0, np.nan, 6.0, 7.0, np.nan, np.nan])
432
+ result = s.interpolate(
433
+ method="linear", limit_area="inside", limit_direction="both", limit=1
434
+ )
435
+ tm.assert_series_equal(result, expected)
436
+
437
+ expected = Series([np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0])
438
+ result = s.interpolate(method="linear", limit_area="outside")
439
+ tm.assert_series_equal(result, expected)
440
+
441
+ expected = Series(
442
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan]
443
+ )
444
+ result = s.interpolate(method="linear", limit_area="outside", limit=1)
445
+ tm.assert_series_equal(result, expected)
446
+
447
+ expected = Series([np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan])
448
+ result = s.interpolate(
449
+ method="linear", limit_area="outside", limit_direction="both", limit=1
450
+ )
451
+ tm.assert_series_equal(result, expected)
452
+
453
+ expected = Series([3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan])
454
+ result = s.interpolate(
455
+ method="linear", limit_area="outside", limit_direction="backward"
456
+ )
457
+ tm.assert_series_equal(result, expected)
458
+
459
+ # raises an error even if limit type is wrong.
460
+ msg = r"Invalid limit_area: expecting one of \['inside', 'outside'\], got abc"
461
+ with pytest.raises(ValueError, match=msg):
462
+ s.interpolate(method="linear", limit_area="abc")
463
+
464
+ @pytest.mark.parametrize(
465
+ "method, limit_direction, expected",
466
+ [
467
+ ("pad", "backward", "forward"),
468
+ ("ffill", "backward", "forward"),
469
+ ("backfill", "forward", "backward"),
470
+ ("bfill", "forward", "backward"),
471
+ ("pad", "both", "forward"),
472
+ ("ffill", "both", "forward"),
473
+ ("backfill", "both", "backward"),
474
+ ("bfill", "both", "backward"),
475
+ ],
476
+ )
477
+ def test_interp_limit_direction_raises(self, method, limit_direction, expected):
478
+ # https://github.com/pandas-dev/pandas/pull/34746
479
+ s = Series([1, 2, 3])
480
+
481
+ msg = f"`limit_direction` must be '{expected}' for method `{method}`"
482
+ msg2 = "Series.interpolate with method="
483
+ with pytest.raises(ValueError, match=msg):
484
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
485
+ s.interpolate(method=method, limit_direction=limit_direction)
486
+
487
+ @pytest.mark.parametrize(
488
+ "data, expected_data, kwargs",
489
+ (
490
+ (
491
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
492
+ [np.nan, np.nan, 3.0, 3.0, 3.0, 3.0, 7.0, np.nan, np.nan],
493
+ {"method": "pad", "limit_area": "inside"},
494
+ ),
495
+ (
496
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
497
+ [np.nan, np.nan, 3.0, 3.0, np.nan, np.nan, 7.0, np.nan, np.nan],
498
+ {"method": "pad", "limit_area": "inside", "limit": 1},
499
+ ),
500
+ (
501
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
502
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0],
503
+ {"method": "pad", "limit_area": "outside"},
504
+ ),
505
+ (
506
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
507
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan],
508
+ {"method": "pad", "limit_area": "outside", "limit": 1},
509
+ ),
510
+ (
511
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
512
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
513
+ {"method": "pad", "limit_area": "outside", "limit": 1},
514
+ ),
515
+ (
516
+ range(5),
517
+ range(5),
518
+ {"method": "pad", "limit_area": "outside", "limit": 1},
519
+ ),
520
+ ),
521
+ )
522
+ def test_interp_limit_area_with_pad(self, data, expected_data, kwargs):
523
+ # GH26796
524
+
525
+ s = Series(data)
526
+ expected = Series(expected_data)
527
+ msg = "Series.interpolate with method=pad"
528
+ with tm.assert_produces_warning(FutureWarning, match=msg):
529
+ result = s.interpolate(**kwargs)
530
+ tm.assert_series_equal(result, expected)
531
+
532
+ @pytest.mark.parametrize(
533
+ "data, expected_data, kwargs",
534
+ (
535
+ (
536
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
537
+ [np.nan, np.nan, 3.0, 7.0, 7.0, 7.0, 7.0, np.nan, np.nan],
538
+ {"method": "bfill", "limit_area": "inside"},
539
+ ),
540
+ (
541
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
542
+ [np.nan, np.nan, 3.0, np.nan, np.nan, 7.0, 7.0, np.nan, np.nan],
543
+ {"method": "bfill", "limit_area": "inside", "limit": 1},
544
+ ),
545
+ (
546
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
547
+ [3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
548
+ {"method": "bfill", "limit_area": "outside"},
549
+ ),
550
+ (
551
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
552
+ [np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
553
+ {"method": "bfill", "limit_area": "outside", "limit": 1},
554
+ ),
555
+ ),
556
+ )
557
+ def test_interp_limit_area_with_backfill(self, data, expected_data, kwargs):
558
+ # GH26796
559
+
560
+ s = Series(data)
561
+ expected = Series(expected_data)
562
+ msg = "Series.interpolate with method=bfill"
563
+ with tm.assert_produces_warning(FutureWarning, match=msg):
564
+ result = s.interpolate(**kwargs)
565
+ tm.assert_series_equal(result, expected)
566
+
567
+ def test_interp_limit_direction(self):
568
+ # These tests are for issue #9218 -- fill NaNs in both directions.
569
+ s = Series([1, 3, np.nan, np.nan, np.nan, 11])
570
+
571
+ expected = Series([1.0, 3.0, np.nan, 7.0, 9.0, 11.0])
572
+ result = s.interpolate(method="linear", limit=2, limit_direction="backward")
573
+ tm.assert_series_equal(result, expected)
574
+
575
+ expected = Series([1.0, 3.0, 5.0, np.nan, 9.0, 11.0])
576
+ result = s.interpolate(method="linear", limit=1, limit_direction="both")
577
+ tm.assert_series_equal(result, expected)
578
+
579
+ # Check that this works on a longer series of nans.
580
+ s = Series([1, 3, np.nan, np.nan, np.nan, 7, 9, np.nan, np.nan, 12, np.nan])
581
+
582
+ expected = Series([1.0, 3.0, 4.0, 5.0, 6.0, 7.0, 9.0, 10.0, 11.0, 12.0, 12.0])
583
+ result = s.interpolate(method="linear", limit=2, limit_direction="both")
584
+ tm.assert_series_equal(result, expected)
585
+
586
+ expected = Series(
587
+ [1.0, 3.0, 4.0, np.nan, 6.0, 7.0, 9.0, 10.0, 11.0, 12.0, 12.0]
588
+ )
589
+ result = s.interpolate(method="linear", limit=1, limit_direction="both")
590
+ tm.assert_series_equal(result, expected)
591
+
592
+ def test_interp_limit_to_ends(self):
593
+ # These test are for issue #10420 -- flow back to beginning.
594
+ s = Series([np.nan, np.nan, 5, 7, 9, np.nan])
595
+
596
+ expected = Series([5.0, 5.0, 5.0, 7.0, 9.0, np.nan])
597
+ result = s.interpolate(method="linear", limit=2, limit_direction="backward")
598
+ tm.assert_series_equal(result, expected)
599
+
600
+ expected = Series([5.0, 5.0, 5.0, 7.0, 9.0, 9.0])
601
+ result = s.interpolate(method="linear", limit=2, limit_direction="both")
602
+ tm.assert_series_equal(result, expected)
603
+
604
+ def test_interp_limit_before_ends(self):
605
+ # These test are for issue #11115 -- limit ends properly.
606
+ s = Series([np.nan, np.nan, 5, 7, np.nan, np.nan])
607
+
608
+ expected = Series([np.nan, np.nan, 5.0, 7.0, 7.0, np.nan])
609
+ result = s.interpolate(method="linear", limit=1, limit_direction="forward")
610
+ tm.assert_series_equal(result, expected)
611
+
612
+ expected = Series([np.nan, 5.0, 5.0, 7.0, np.nan, np.nan])
613
+ result = s.interpolate(method="linear", limit=1, limit_direction="backward")
614
+ tm.assert_series_equal(result, expected)
615
+
616
+ expected = Series([np.nan, 5.0, 5.0, 7.0, 7.0, np.nan])
617
+ result = s.interpolate(method="linear", limit=1, limit_direction="both")
618
+ tm.assert_series_equal(result, expected)
619
+
620
+ def test_interp_all_good(self):
621
+ pytest.importorskip("scipy")
622
+ s = Series([1, 2, 3])
623
+ result = s.interpolate(method="polynomial", order=1)
624
+ tm.assert_series_equal(result, s)
625
+
626
+ # non-scipy
627
+ result = s.interpolate()
628
+ tm.assert_series_equal(result, s)
629
+
630
+ @pytest.mark.parametrize(
631
+ "check_scipy", [False, pytest.param(True, marks=td.skip_if_no("scipy"))]
632
+ )
633
+ def test_interp_multiIndex(self, check_scipy):
634
+ idx = MultiIndex.from_tuples([(0, "a"), (1, "b"), (2, "c")])
635
+ s = Series([1, 2, np.nan], index=idx)
636
+
637
+ expected = s.copy()
638
+ expected.loc[2] = 2
639
+ result = s.interpolate()
640
+ tm.assert_series_equal(result, expected)
641
+
642
+ msg = "Only `method=linear` interpolation is supported on MultiIndexes"
643
+ if check_scipy:
644
+ with pytest.raises(ValueError, match=msg):
645
+ s.interpolate(method="polynomial", order=1)
646
+
647
+ def test_interp_nonmono_raise(self):
648
+ pytest.importorskip("scipy")
649
+ s = Series([1, np.nan, 3], index=[0, 2, 1])
650
+ msg = "krogh interpolation requires that the index be monotonic"
651
+ with pytest.raises(ValueError, match=msg):
652
+ s.interpolate(method="krogh")
653
+
654
+ @pytest.mark.parametrize("method", ["nearest", "pad"])
655
+ def test_interp_datetime64(self, method, tz_naive_fixture):
656
+ pytest.importorskip("scipy")
657
+ df = Series(
658
+ [1, np.nan, 3], index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture)
659
+ )
660
+ warn = None if method == "nearest" else FutureWarning
661
+ msg = "Series.interpolate with method=pad is deprecated"
662
+ with tm.assert_produces_warning(warn, match=msg):
663
+ result = df.interpolate(method=method)
664
+ if warn is not None:
665
+ # check the "use ffill instead" is equivalent
666
+ alt = df.ffill()
667
+ tm.assert_series_equal(result, alt)
668
+
669
+ expected = Series(
670
+ [1.0, 1.0, 3.0],
671
+ index=date_range("1/1/2000", periods=3, tz=tz_naive_fixture),
672
+ )
673
+ tm.assert_series_equal(result, expected)
674
+
675
+ def test_interp_pad_datetime64tz_values(self):
676
+ # GH#27628 missing.interpolate_2d should handle datetimetz values
677
+ dti = date_range("2015-04-05", periods=3, tz="US/Central")
678
+ ser = Series(dti)
679
+ ser[1] = pd.NaT
680
+
681
+ msg = "Series.interpolate with method=pad is deprecated"
682
+ with tm.assert_produces_warning(FutureWarning, match=msg):
683
+ result = ser.interpolate(method="pad")
684
+ # check the "use ffill instead" is equivalent
685
+ alt = ser.ffill()
686
+ tm.assert_series_equal(result, alt)
687
+
688
+ expected = Series(dti)
689
+ expected[1] = expected[0]
690
+ tm.assert_series_equal(result, expected)
691
+
692
+ def test_interp_limit_no_nans(self):
693
+ # GH 7173
694
+ s = Series([1.0, 2.0, 3.0])
695
+ result = s.interpolate(limit=1)
696
+ expected = s
697
+ tm.assert_series_equal(result, expected)
698
+
699
+ @pytest.mark.parametrize("method", ["polynomial", "spline"])
700
+ def test_no_order(self, method):
701
+ # see GH-10633, GH-24014
702
+ pytest.importorskip("scipy")
703
+ s = Series([0, 1, np.nan, 3])
704
+ msg = "You must specify the order of the spline or polynomial"
705
+ with pytest.raises(ValueError, match=msg):
706
+ s.interpolate(method=method)
707
+
708
+ @pytest.mark.parametrize("order", [-1, -1.0, 0, 0.0, np.nan])
709
+ def test_interpolate_spline_invalid_order(self, order):
710
+ pytest.importorskip("scipy")
711
+ s = Series([0, 1, np.nan, 3])
712
+ msg = "order needs to be specified and greater than 0"
713
+ with pytest.raises(ValueError, match=msg):
714
+ s.interpolate(method="spline", order=order)
715
+
716
+ def test_spline(self):
717
+ pytest.importorskip("scipy")
718
+ s = Series([1, 2, np.nan, 4, 5, np.nan, 7])
719
+ result = s.interpolate(method="spline", order=1)
720
+ expected = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
721
+ tm.assert_series_equal(result, expected)
722
+
723
+ def test_spline_extrapolate(self):
724
+ pytest.importorskip("scipy")
725
+ s = Series([1, 2, 3, 4, np.nan, 6, np.nan])
726
+ result3 = s.interpolate(method="spline", order=1, ext=3)
727
+ expected3 = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 6.0])
728
+ tm.assert_series_equal(result3, expected3)
729
+
730
+ result1 = s.interpolate(method="spline", order=1, ext=0)
731
+ expected1 = Series([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0])
732
+ tm.assert_series_equal(result1, expected1)
733
+
734
+ def test_spline_smooth(self):
735
+ pytest.importorskip("scipy")
736
+ s = Series([1, 2, np.nan, 4, 5.1, np.nan, 7])
737
+ assert (
738
+ s.interpolate(method="spline", order=3, s=0)[5]
739
+ != s.interpolate(method="spline", order=3)[5]
740
+ )
741
+
742
+ def test_spline_interpolation(self):
743
+ # Explicit cast to float to avoid implicit cast when setting np.nan
744
+ pytest.importorskip("scipy")
745
+ s = Series(np.arange(10) ** 2, dtype="float")
746
+ s[np.random.default_rng(2).integers(0, 9, 3)] = np.nan
747
+ result1 = s.interpolate(method="spline", order=1)
748
+ expected1 = s.interpolate(method="spline", order=1)
749
+ tm.assert_series_equal(result1, expected1)
750
+
751
+ def test_interp_timedelta64(self):
752
+ # GH 6424
753
+ df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 3]))
754
+ result = df.interpolate(method="time")
755
+ expected = Series([1.0, 2.0, 3.0], index=pd.to_timedelta([1, 2, 3]))
756
+ tm.assert_series_equal(result, expected)
757
+
758
+ # test for non uniform spacing
759
+ df = Series([1, np.nan, 3], index=pd.to_timedelta([1, 2, 4]))
760
+ result = df.interpolate(method="time")
761
+ expected = Series([1.0, 1.666667, 3.0], index=pd.to_timedelta([1, 2, 4]))
762
+ tm.assert_series_equal(result, expected)
763
+
764
+ def test_series_interpolate_method_values(self):
765
+ # GH#1646
766
+ rng = date_range("1/1/2000", "1/20/2000", freq="D")
767
+ ts = Series(np.random.default_rng(2).standard_normal(len(rng)), index=rng)
768
+
769
+ ts[::2] = np.nan
770
+
771
+ result = ts.interpolate(method="values")
772
+ exp = ts.interpolate()
773
+ tm.assert_series_equal(result, exp)
774
+
775
+ def test_series_interpolate_intraday(self):
776
+ # #1698
777
+ index = date_range("1/1/2012", periods=4, freq="12D")
778
+ ts = Series([0, 12, 24, 36], index)
779
+ new_index = index.append(index + pd.DateOffset(days=1)).sort_values()
780
+
781
+ exp = ts.reindex(new_index).interpolate(method="time")
782
+
783
+ index = date_range("1/1/2012", periods=4, freq="12h")
784
+ ts = Series([0, 12, 24, 36], index)
785
+ new_index = index.append(index + pd.DateOffset(hours=1)).sort_values()
786
+ result = ts.reindex(new_index).interpolate(method="time")
787
+
788
+ tm.assert_numpy_array_equal(result.values, exp.values)
789
+
790
+ @pytest.mark.parametrize(
791
+ "ind",
792
+ [
793
+ ["a", "b", "c", "d"],
794
+ pd.period_range(start="2019-01-01", periods=4),
795
+ pd.interval_range(start=0, end=4),
796
+ ],
797
+ )
798
+ def test_interp_non_timedelta_index(self, interp_methods_ind, ind):
799
+ # gh 21662
800
+ df = pd.DataFrame([0, 1, np.nan, 3], index=ind)
801
+
802
+ method, kwargs = interp_methods_ind
803
+ if method == "pchip":
804
+ pytest.importorskip("scipy")
805
+
806
+ if method == "linear":
807
+ result = df[0].interpolate(**kwargs)
808
+ expected = Series([0.0, 1.0, 2.0, 3.0], name=0, index=ind)
809
+ tm.assert_series_equal(result, expected)
810
+ else:
811
+ expected_error = (
812
+ "Index column must be numeric or datetime type when "
813
+ f"using {method} method other than linear. "
814
+ "Try setting a numeric or datetime index column before "
815
+ "interpolating."
816
+ )
817
+ with pytest.raises(ValueError, match=expected_error):
818
+ df[0].interpolate(method=method, **kwargs)
819
+
820
+ def test_interpolate_timedelta_index(self, request, interp_methods_ind):
821
+ """
822
+ Tests for non numerical index types - object, period, timedelta
823
+ Note that all methods except time, index, nearest and values
824
+ are tested here.
825
+ """
826
+ # gh 21662
827
+ pytest.importorskip("scipy")
828
+ ind = pd.timedelta_range(start=1, periods=4)
829
+ df = pd.DataFrame([0, 1, np.nan, 3], index=ind)
830
+
831
+ method, kwargs = interp_methods_ind
832
+
833
+ if method in {"cubic", "zero"}:
834
+ request.applymarker(
835
+ pytest.mark.xfail(
836
+ reason=f"{method} interpolation is not supported for TimedeltaIndex"
837
+ )
838
+ )
839
+ result = df[0].interpolate(method=method, **kwargs)
840
+ expected = Series([0.0, 1.0, 2.0, 3.0], name=0, index=ind)
841
+ tm.assert_series_equal(result, expected)
842
+
843
+ @pytest.mark.parametrize(
844
+ "ascending, expected_values",
845
+ [(True, [1, 2, 3, 9, 10]), (False, [10, 9, 3, 2, 1])],
846
+ )
847
+ def test_interpolate_unsorted_index(self, ascending, expected_values):
848
+ # GH 21037
849
+ ts = Series(data=[10, 9, np.nan, 2, 1], index=[10, 9, 3, 2, 1])
850
+ result = ts.sort_index(ascending=ascending).interpolate(method="index")
851
+ expected = Series(data=expected_values, index=expected_values, dtype=float)
852
+ tm.assert_series_equal(result, expected)
853
+
854
+ def test_interpolate_asfreq_raises(self):
855
+ ser = Series(["a", None, "b"], dtype=object)
856
+ msg2 = "Series.interpolate with object dtype"
857
+ msg = "Invalid fill method"
858
+ with pytest.raises(ValueError, match=msg):
859
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
860
+ ser.interpolate(method="asfreq")
861
+
862
+ def test_interpolate_fill_value(self):
863
+ # GH#54920
864
+ pytest.importorskip("scipy")
865
+ ser = Series([np.nan, 0, 1, np.nan, 3, np.nan])
866
+ result = ser.interpolate(method="nearest", fill_value=0)
867
+ expected = Series([np.nan, 0, 1, 1, 3, 0])
868
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_monotonic.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Series,
5
+ date_range,
6
+ )
7
+
8
+
9
+ class TestIsMonotonic:
10
+ def test_is_monotonic_numeric(self):
11
+ ser = Series(np.random.default_rng(2).integers(0, 10, size=1000))
12
+ assert not ser.is_monotonic_increasing
13
+ ser = Series(np.arange(1000))
14
+ assert ser.is_monotonic_increasing is True
15
+ assert ser.is_monotonic_increasing is True
16
+ ser = Series(np.arange(1000, 0, -1))
17
+ assert ser.is_monotonic_decreasing is True
18
+
19
+ def test_is_monotonic_dt64(self):
20
+ ser = Series(date_range("20130101", periods=10))
21
+ assert ser.is_monotonic_increasing is True
22
+ assert ser.is_monotonic_increasing is True
23
+
24
+ ser = Series(list(reversed(ser)))
25
+ assert ser.is_monotonic_increasing is False
26
+ assert ser.is_monotonic_decreasing is True
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_is_unique.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import Series
5
+
6
+
7
+ @pytest.mark.parametrize(
8
+ "data, expected",
9
+ [
10
+ (np.random.default_rng(2).integers(0, 10, size=1000), False),
11
+ (np.arange(1000), True),
12
+ ([], True),
13
+ ([np.nan], True),
14
+ (["foo", "bar", np.nan], True),
15
+ (["foo", "foo", np.nan], False),
16
+ (["foo", "bar", np.nan, np.nan], False),
17
+ ],
18
+ )
19
+ def test_is_unique(data, expected):
20
+ # GH#11946 / GH#25180
21
+ ser = Series(data)
22
+ assert ser.is_unique is expected
23
+
24
+
25
+ def test_is_unique_class_ne(capsys):
26
+ # GH#20661
27
+ class Foo:
28
+ def __init__(self, val) -> None:
29
+ self._value = val
30
+
31
+ def __ne__(self, other):
32
+ raise Exception("NEQ not supported")
33
+
34
+ with capsys.disabled():
35
+ li = [Foo(i) for i in range(5)]
36
+ ser = Series(li, index=list(range(5)))
37
+
38
+ ser.is_unique
39
+ captured = capsys.readouterr()
40
+ assert len(captured.err) == 0
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_isin.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Series,
7
+ date_range,
8
+ )
9
+ import pandas._testing as tm
10
+ from pandas.core import algorithms
11
+ from pandas.core.arrays import PeriodArray
12
+
13
+
14
+ class TestSeriesIsIn:
15
+ def test_isin(self):
16
+ s = Series(["A", "B", "C", "a", "B", "B", "A", "C"])
17
+
18
+ result = s.isin(["A", "C"])
19
+ expected = Series([True, False, True, False, False, False, True, True])
20
+ tm.assert_series_equal(result, expected)
21
+
22
+ # GH#16012
23
+ # This specific issue has to have a series over 1e6 in len, but the
24
+ # comparison array (in_list) must be large enough so that numpy doesn't
25
+ # do a manual masking trick that will avoid this issue altogether
26
+ s = Series(list("abcdefghijk" * 10**5))
27
+ # If numpy doesn't do the manual comparison/mask, these
28
+ # unorderable mixed types are what cause the exception in numpy
29
+ in_list = [-1, "a", "b", "G", "Y", "Z", "E", "K", "E", "S", "I", "R", "R"] * 6
30
+
31
+ assert s.isin(in_list).sum() == 200000
32
+
33
+ def test_isin_with_string_scalar(self):
34
+ # GH#4763
35
+ s = Series(["A", "B", "C", "a", "B", "B", "A", "C"])
36
+ msg = (
37
+ r"only list-like objects are allowed to be passed to isin\(\), "
38
+ r"you passed a `str`"
39
+ )
40
+ with pytest.raises(TypeError, match=msg):
41
+ s.isin("a")
42
+
43
+ s = Series(["aaa", "b", "c"])
44
+ with pytest.raises(TypeError, match=msg):
45
+ s.isin("aaa")
46
+
47
+ def test_isin_datetimelike_mismatched_reso(self):
48
+ expected = Series([True, True, False, False, False])
49
+
50
+ ser = Series(date_range("jan-01-2013", "jan-05-2013"))
51
+
52
+ # fails on dtype conversion in the first place
53
+ day_values = np.asarray(ser[0:2].values).astype("datetime64[D]")
54
+ result = ser.isin(day_values)
55
+ tm.assert_series_equal(result, expected)
56
+
57
+ dta = ser[:2]._values.astype("M8[s]")
58
+ result = ser.isin(dta)
59
+ tm.assert_series_equal(result, expected)
60
+
61
+ def test_isin_datetimelike_mismatched_reso_list(self):
62
+ expected = Series([True, True, False, False, False])
63
+
64
+ ser = Series(date_range("jan-01-2013", "jan-05-2013"))
65
+
66
+ dta = ser[:2]._values.astype("M8[s]")
67
+ result = ser.isin(list(dta))
68
+ tm.assert_series_equal(result, expected)
69
+
70
+ def test_isin_with_i8(self):
71
+ # GH#5021
72
+
73
+ expected = Series([True, True, False, False, False])
74
+ expected2 = Series([False, True, False, False, False])
75
+
76
+ # datetime64[ns]
77
+ s = Series(date_range("jan-01-2013", "jan-05-2013"))
78
+
79
+ result = s.isin(s[0:2])
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ result = s.isin(s[0:2].values)
83
+ tm.assert_series_equal(result, expected)
84
+
85
+ result = s.isin([s[1]])
86
+ tm.assert_series_equal(result, expected2)
87
+
88
+ result = s.isin([np.datetime64(s[1])])
89
+ tm.assert_series_equal(result, expected2)
90
+
91
+ result = s.isin(set(s[0:2]))
92
+ tm.assert_series_equal(result, expected)
93
+
94
+ # timedelta64[ns]
95
+ s = Series(pd.to_timedelta(range(5), unit="d"))
96
+ result = s.isin(s[0:2])
97
+ tm.assert_series_equal(result, expected)
98
+
99
+ @pytest.mark.parametrize("empty", [[], Series(dtype=object), np.array([])])
100
+ def test_isin_empty(self, empty):
101
+ # see GH#16991
102
+ s = Series(["a", "b"])
103
+ expected = Series([False, False])
104
+
105
+ result = s.isin(empty)
106
+ tm.assert_series_equal(expected, result)
107
+
108
+ def test_isin_read_only(self):
109
+ # https://github.com/pandas-dev/pandas/issues/37174
110
+ arr = np.array([1, 2, 3])
111
+ arr.setflags(write=False)
112
+ s = Series([1, 2, 3])
113
+ result = s.isin(arr)
114
+ expected = Series([True, True, True])
115
+ tm.assert_series_equal(result, expected)
116
+
117
+ @pytest.mark.parametrize("dtype", [object, None])
118
+ def test_isin_dt64_values_vs_ints(self, dtype):
119
+ # GH#36621 dont cast integers to datetimes for isin
120
+ dti = date_range("2013-01-01", "2013-01-05")
121
+ ser = Series(dti)
122
+
123
+ comps = np.asarray([1356998400000000000], dtype=dtype)
124
+
125
+ res = dti.isin(comps)
126
+ expected = np.array([False] * len(dti), dtype=bool)
127
+ tm.assert_numpy_array_equal(res, expected)
128
+
129
+ res = ser.isin(comps)
130
+ tm.assert_series_equal(res, Series(expected))
131
+
132
+ res = pd.core.algorithms.isin(ser, comps)
133
+ tm.assert_numpy_array_equal(res, expected)
134
+
135
+ def test_isin_tzawareness_mismatch(self):
136
+ dti = date_range("2013-01-01", "2013-01-05")
137
+ ser = Series(dti)
138
+
139
+ other = dti.tz_localize("UTC")
140
+
141
+ res = dti.isin(other)
142
+ expected = np.array([False] * len(dti), dtype=bool)
143
+ tm.assert_numpy_array_equal(res, expected)
144
+
145
+ res = ser.isin(other)
146
+ tm.assert_series_equal(res, Series(expected))
147
+
148
+ res = pd.core.algorithms.isin(ser, other)
149
+ tm.assert_numpy_array_equal(res, expected)
150
+
151
+ def test_isin_period_freq_mismatch(self):
152
+ dti = date_range("2013-01-01", "2013-01-05")
153
+ pi = dti.to_period("M")
154
+ ser = Series(pi)
155
+
156
+ # We construct another PeriodIndex with the same i8 values
157
+ # but different dtype
158
+ dtype = dti.to_period("Y").dtype
159
+ other = PeriodArray._simple_new(pi.asi8, dtype=dtype)
160
+
161
+ res = pi.isin(other)
162
+ expected = np.array([False] * len(pi), dtype=bool)
163
+ tm.assert_numpy_array_equal(res, expected)
164
+
165
+ res = ser.isin(other)
166
+ tm.assert_series_equal(res, Series(expected))
167
+
168
+ res = pd.core.algorithms.isin(ser, other)
169
+ tm.assert_numpy_array_equal(res, expected)
170
+
171
+ @pytest.mark.parametrize("values", [[-9.0, 0.0], [-9, 0]])
172
+ def test_isin_float_in_int_series(self, values):
173
+ # GH#19356 GH#21804
174
+ ser = Series(values)
175
+ result = ser.isin([-9, -0.5])
176
+ expected = Series([True, False])
177
+ tm.assert_series_equal(result, expected)
178
+
179
+ @pytest.mark.parametrize("dtype", ["boolean", "Int64", "Float64"])
180
+ @pytest.mark.parametrize(
181
+ "data,values,expected",
182
+ [
183
+ ([0, 1, 0], [1], [False, True, False]),
184
+ ([0, 1, 0], [1, pd.NA], [False, True, False]),
185
+ ([0, pd.NA, 0], [1, 0], [True, False, True]),
186
+ ([0, 1, pd.NA], [1, pd.NA], [False, True, True]),
187
+ ([0, 1, pd.NA], [1, np.nan], [False, True, False]),
188
+ ([0, pd.NA, pd.NA], [np.nan, pd.NaT, None], [False, False, False]),
189
+ ],
190
+ )
191
+ def test_isin_masked_types(self, dtype, data, values, expected):
192
+ # GH#42405
193
+ ser = Series(data, dtype=dtype)
194
+
195
+ result = ser.isin(values)
196
+ expected = Series(expected, dtype="boolean")
197
+
198
+ tm.assert_series_equal(result, expected)
199
+
200
+
201
+ def test_isin_large_series_mixed_dtypes_and_nan(monkeypatch):
202
+ # https://github.com/pandas-dev/pandas/issues/37094
203
+ # combination of object dtype for the values
204
+ # and > _MINIMUM_COMP_ARR_LEN elements
205
+ min_isin_comp = 5
206
+ ser = Series([1, 2, np.nan] * min_isin_comp)
207
+ with monkeypatch.context() as m:
208
+ m.setattr(algorithms, "_MINIMUM_COMP_ARR_LEN", min_isin_comp)
209
+ result = ser.isin({"foo", "bar"})
210
+ expected = Series([False] * 3 * min_isin_comp)
211
+ tm.assert_series_equal(result, expected)
212
+
213
+
214
+ @pytest.mark.parametrize(
215
+ "array,expected",
216
+ [
217
+ (
218
+ [0, 1j, 1j, 1, 1 + 1j, 1 + 2j, 1 + 1j],
219
+ Series([False, True, True, False, True, True, True], dtype=bool),
220
+ )
221
+ ],
222
+ )
223
+ def test_isin_complex_numbers(array, expected):
224
+ # GH 17927
225
+ result = Series(array).isin([1j, 1 + 1j, 1 + 2j])
226
+ tm.assert_series_equal(result, expected)
227
+
228
+
229
+ @pytest.mark.parametrize(
230
+ "data,is_in",
231
+ [([1, [2]], [1]), (["simple str", [{"values": 3}]], ["simple str"])],
232
+ )
233
+ def test_isin_filtering_with_mixed_object_types(data, is_in):
234
+ # GH 20883
235
+
236
+ ser = Series(data)
237
+ result = ser.isin(is_in)
238
+ expected = Series([True, False])
239
+
240
+ tm.assert_series_equal(result, expected)
241
+
242
+
243
+ @pytest.mark.parametrize("data", [[1, 2, 3], [1.0, 2.0, 3.0]])
244
+ @pytest.mark.parametrize("isin", [[1, 2], [1.0, 2.0]])
245
+ def test_isin_filtering_on_iterable(data, isin):
246
+ # GH 50234
247
+
248
+ ser = Series(data)
249
+ result = ser.isin(i for i in isin)
250
+ expected_result = Series([True, True, False])
251
+
252
+ tm.assert_series_equal(result, expected_result)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ We also test Series.notna in this file.
3
+ """
4
+ import numpy as np
5
+
6
+ from pandas import (
7
+ Period,
8
+ Series,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestIsna:
14
+ def test_isna_period_dtype(self):
15
+ # GH#13737
16
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
17
+
18
+ expected = Series([False, True])
19
+
20
+ result = ser.isna()
21
+ tm.assert_series_equal(result, expected)
22
+
23
+ result = ser.notna()
24
+ tm.assert_series_equal(result, ~expected)
25
+
26
+ def test_isna(self):
27
+ ser = Series([0, 5.4, 3, np.nan, -0.001])
28
+ expected = Series([False, False, False, True, False])
29
+ tm.assert_series_equal(ser.isna(), expected)
30
+ tm.assert_series_equal(ser.notna(), ~expected)
31
+
32
+ ser = Series(["hi", "", np.nan])
33
+ expected = Series([False, False, True])
34
+ tm.assert_series_equal(ser.isna(), expected)
35
+ tm.assert_series_equal(ser.notna(), ~expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_item.py ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Series.item method, mainly testing that we get python scalars as opposed to
3
+ numpy scalars.
4
+ """
5
+ import pytest
6
+
7
+ from pandas import (
8
+ Series,
9
+ Timedelta,
10
+ Timestamp,
11
+ date_range,
12
+ )
13
+
14
+
15
+ class TestItem:
16
+ def test_item(self):
17
+ # We are testing that we get python scalars as opposed to numpy scalars
18
+ ser = Series([1])
19
+ result = ser.item()
20
+ assert result == 1
21
+ assert result == ser.iloc[0]
22
+ assert isinstance(result, int) # i.e. not np.int64
23
+
24
+ ser = Series([0.5], index=[3])
25
+ result = ser.item()
26
+ assert isinstance(result, float)
27
+ assert result == 0.5
28
+
29
+ ser = Series([1, 2])
30
+ msg = "can only convert an array of size 1"
31
+ with pytest.raises(ValueError, match=msg):
32
+ ser.item()
33
+
34
+ dti = date_range("2016-01-01", periods=2)
35
+ with pytest.raises(ValueError, match=msg):
36
+ dti.item()
37
+ with pytest.raises(ValueError, match=msg):
38
+ Series(dti).item()
39
+
40
+ val = dti[:1].item()
41
+ assert isinstance(val, Timestamp)
42
+ val = Series(dti)[:1].item()
43
+ assert isinstance(val, Timestamp)
44
+
45
+ tdi = dti - dti
46
+ with pytest.raises(ValueError, match=msg):
47
+ tdi.item()
48
+ with pytest.raises(ValueError, match=msg):
49
+ Series(tdi).item()
50
+
51
+ val = tdi[:1].item()
52
+ assert isinstance(val, Timedelta)
53
+ val = Series(tdi)[:1].item()
54
+ assert isinstance(val, Timedelta)
55
+
56
+ # Case where ser[0] would not work
57
+ ser = Series(dti, index=[5, 6])
58
+ val = ser.iloc[:1].item()
59
+ assert val == dti[0]
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_map.py ADDED
@@ -0,0 +1,609 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import (
2
+ Counter,
3
+ defaultdict,
4
+ )
5
+ from decimal import Decimal
6
+ import math
7
+
8
+ import numpy as np
9
+ import pytest
10
+
11
+ import pandas as pd
12
+ from pandas import (
13
+ DataFrame,
14
+ Index,
15
+ MultiIndex,
16
+ Series,
17
+ bdate_range,
18
+ date_range,
19
+ isna,
20
+ timedelta_range,
21
+ )
22
+ import pandas._testing as tm
23
+
24
+
25
+ def test_series_map_box_timedelta():
26
+ # GH#11349
27
+ ser = Series(timedelta_range("1 day 1 s", periods=5, freq="h"))
28
+
29
+ def f(x):
30
+ return x.total_seconds()
31
+
32
+ ser.map(f)
33
+
34
+
35
+ def test_map_callable(datetime_series):
36
+ with np.errstate(all="ignore"):
37
+ tm.assert_series_equal(datetime_series.map(np.sqrt), np.sqrt(datetime_series))
38
+
39
+ # map function element-wise
40
+ tm.assert_series_equal(datetime_series.map(math.exp), np.exp(datetime_series))
41
+
42
+ # empty series
43
+ s = Series(dtype=object, name="foo", index=Index([], name="bar"))
44
+ rs = s.map(lambda x: x)
45
+ tm.assert_series_equal(s, rs)
46
+
47
+ # check all metadata (GH 9322)
48
+ assert s is not rs
49
+ assert s.index is rs.index
50
+ assert s.dtype == rs.dtype
51
+ assert s.name == rs.name
52
+
53
+ # index but no data
54
+ s = Series(index=[1, 2, 3], dtype=np.float64)
55
+ rs = s.map(lambda x: x)
56
+ tm.assert_series_equal(s, rs)
57
+
58
+
59
+ def test_map_same_length_inference_bug():
60
+ s = Series([1, 2])
61
+
62
+ def f(x):
63
+ return (x, x + 1)
64
+
65
+ s = Series([1, 2, 3])
66
+ result = s.map(f)
67
+ expected = Series([(1, 2), (2, 3), (3, 4)])
68
+ tm.assert_series_equal(result, expected)
69
+
70
+ s = Series(["foo,bar"])
71
+ result = s.map(lambda x: x.split(","))
72
+ expected = Series([("foo", "bar")])
73
+ tm.assert_series_equal(result, expected)
74
+
75
+
76
+ def test_series_map_box_timestamps():
77
+ # GH#2689, GH#2627
78
+ ser = Series(date_range("1/1/2000", periods=3))
79
+
80
+ def func(x):
81
+ return (x.hour, x.day, x.month)
82
+
83
+ result = ser.map(func)
84
+ expected = Series([(0, 1, 1), (0, 2, 1), (0, 3, 1)])
85
+ tm.assert_series_equal(result, expected)
86
+
87
+
88
+ def test_map_series_stringdtype(any_string_dtype, using_infer_string):
89
+ # map test on StringDType, GH#40823
90
+ ser1 = Series(
91
+ data=["cat", "dog", "rabbit"],
92
+ index=["id1", "id2", "id3"],
93
+ dtype=any_string_dtype,
94
+ )
95
+ ser2 = Series(["id3", "id2", "id1", "id7000"], dtype=any_string_dtype)
96
+ result = ser2.map(ser1)
97
+
98
+ item = pd.NA
99
+ if ser2.dtype == object:
100
+ item = np.nan
101
+
102
+ expected = Series(data=["rabbit", "dog", "cat", item], dtype=any_string_dtype)
103
+ if using_infer_string and any_string_dtype == "object":
104
+ expected = expected.astype("string[pyarrow_numpy]")
105
+
106
+ tm.assert_series_equal(result, expected)
107
+
108
+
109
+ @pytest.mark.parametrize(
110
+ "data, expected_dtype",
111
+ [(["1-1", "1-1", np.nan], "category"), (["1-1", "1-2", np.nan], object)],
112
+ )
113
+ def test_map_categorical_with_nan_values(data, expected_dtype, using_infer_string):
114
+ # GH 20714 bug fixed in: GH 24275
115
+ def func(val):
116
+ return val.split("-")[0]
117
+
118
+ s = Series(data, dtype="category")
119
+
120
+ result = s.map(func, na_action="ignore")
121
+ if using_infer_string and expected_dtype == object:
122
+ expected_dtype = "string[pyarrow_numpy]"
123
+ expected = Series(["1", "1", np.nan], dtype=expected_dtype)
124
+ tm.assert_series_equal(result, expected)
125
+
126
+
127
+ def test_map_empty_integer_series():
128
+ # GH52384
129
+ s = Series([], dtype=int)
130
+ result = s.map(lambda x: x)
131
+ tm.assert_series_equal(result, s)
132
+
133
+
134
+ def test_map_empty_integer_series_with_datetime_index():
135
+ # GH 21245
136
+ s = Series([], index=date_range(start="2018-01-01", periods=0), dtype=int)
137
+ result = s.map(lambda x: x)
138
+ tm.assert_series_equal(result, s)
139
+
140
+
141
+ @pytest.mark.parametrize("func", [str, lambda x: str(x)])
142
+ def test_map_simple_str_callables_same_as_astype(
143
+ string_series, func, using_infer_string
144
+ ):
145
+ # test that we are evaluating row-by-row first
146
+ # before vectorized evaluation
147
+ result = string_series.map(func)
148
+ expected = string_series.astype(
149
+ str if not using_infer_string else "string[pyarrow_numpy]"
150
+ )
151
+ tm.assert_series_equal(result, expected)
152
+
153
+
154
+ def test_list_raises(string_series):
155
+ with pytest.raises(TypeError, match="'list' object is not callable"):
156
+ string_series.map([lambda x: x])
157
+
158
+
159
+ def test_map():
160
+ data = {
161
+ "A": [0.0, 1.0, 2.0, 3.0, 4.0],
162
+ "B": [0.0, 1.0, 0.0, 1.0, 0.0],
163
+ "C": ["foo1", "foo2", "foo3", "foo4", "foo5"],
164
+ "D": bdate_range("1/1/2009", periods=5),
165
+ }
166
+
167
+ source = Series(data["B"], index=data["C"])
168
+ target = Series(data["C"][:4], index=data["D"][:4])
169
+
170
+ merged = target.map(source)
171
+
172
+ for k, v in merged.items():
173
+ assert v == source[target[k]]
174
+
175
+ # input could be a dict
176
+ merged = target.map(source.to_dict())
177
+
178
+ for k, v in merged.items():
179
+ assert v == source[target[k]]
180
+
181
+
182
+ def test_map_datetime(datetime_series):
183
+ # function
184
+ result = datetime_series.map(lambda x: x * 2)
185
+ tm.assert_series_equal(result, datetime_series * 2)
186
+
187
+
188
+ def test_map_category():
189
+ # GH 10324
190
+ a = Series([1, 2, 3, 4])
191
+ b = Series(["even", "odd", "even", "odd"], dtype="category")
192
+ c = Series(["even", "odd", "even", "odd"])
193
+
194
+ exp = Series(["odd", "even", "odd", np.nan], dtype="category")
195
+ tm.assert_series_equal(a.map(b), exp)
196
+ exp = Series(["odd", "even", "odd", np.nan])
197
+ tm.assert_series_equal(a.map(c), exp)
198
+
199
+
200
+ def test_map_category_numeric():
201
+ a = Series(["a", "b", "c", "d"])
202
+ b = Series([1, 2, 3, 4], index=pd.CategoricalIndex(["b", "c", "d", "e"]))
203
+ c = Series([1, 2, 3, 4], index=Index(["b", "c", "d", "e"]))
204
+
205
+ exp = Series([np.nan, 1, 2, 3])
206
+ tm.assert_series_equal(a.map(b), exp)
207
+ exp = Series([np.nan, 1, 2, 3])
208
+ tm.assert_series_equal(a.map(c), exp)
209
+
210
+
211
+ def test_map_category_string():
212
+ a = Series(["a", "b", "c", "d"])
213
+ b = Series(
214
+ ["B", "C", "D", "E"],
215
+ dtype="category",
216
+ index=pd.CategoricalIndex(["b", "c", "d", "e"]),
217
+ )
218
+ c = Series(["B", "C", "D", "E"], index=Index(["b", "c", "d", "e"]))
219
+
220
+ exp = Series(
221
+ pd.Categorical([np.nan, "B", "C", "D"], categories=["B", "C", "D", "E"])
222
+ )
223
+ tm.assert_series_equal(a.map(b), exp)
224
+ exp = Series([np.nan, "B", "C", "D"])
225
+ tm.assert_series_equal(a.map(c), exp)
226
+
227
+
228
+ def test_map_empty(request, index):
229
+ if isinstance(index, MultiIndex):
230
+ request.applymarker(
231
+ pytest.mark.xfail(
232
+ reason="Initializing a Series from a MultiIndex is not supported"
233
+ )
234
+ )
235
+
236
+ s = Series(index)
237
+ result = s.map({})
238
+
239
+ expected = Series(np.nan, index=s.index)
240
+ tm.assert_series_equal(result, expected)
241
+
242
+
243
+ def test_map_compat():
244
+ # related GH 8024
245
+ s = Series([True, True, False], index=[1, 2, 3])
246
+ result = s.map({True: "foo", False: "bar"})
247
+ expected = Series(["foo", "foo", "bar"], index=[1, 2, 3])
248
+ tm.assert_series_equal(result, expected)
249
+
250
+
251
+ def test_map_int():
252
+ left = Series({"a": 1.0, "b": 2.0, "c": 3.0, "d": 4})
253
+ right = Series({1: 11, 2: 22, 3: 33})
254
+
255
+ assert left.dtype == np.float64
256
+ assert issubclass(right.dtype.type, np.integer)
257
+
258
+ merged = left.map(right)
259
+ assert merged.dtype == np.float64
260
+ assert isna(merged["d"])
261
+ assert not isna(merged["c"])
262
+
263
+
264
+ def test_map_type_inference():
265
+ s = Series(range(3))
266
+ s2 = s.map(lambda x: np.where(x == 0, 0, 1))
267
+ assert issubclass(s2.dtype.type, np.integer)
268
+
269
+
270
+ def test_map_decimal(string_series):
271
+ result = string_series.map(lambda x: Decimal(str(x)))
272
+ assert result.dtype == np.object_
273
+ assert isinstance(result.iloc[0], Decimal)
274
+
275
+
276
+ def test_map_na_exclusion():
277
+ s = Series([1.5, np.nan, 3, np.nan, 5])
278
+
279
+ result = s.map(lambda x: x * 2, na_action="ignore")
280
+ exp = s * 2
281
+ tm.assert_series_equal(result, exp)
282
+
283
+
284
+ def test_map_dict_with_tuple_keys():
285
+ """
286
+ Due to new MultiIndex-ing behaviour in v0.14.0,
287
+ dicts with tuple keys passed to map were being
288
+ converted to a multi-index, preventing tuple values
289
+ from being mapped properly.
290
+ """
291
+ # GH 18496
292
+ df = DataFrame({"a": [(1,), (2,), (3, 4), (5, 6)]})
293
+ label_mappings = {(1,): "A", (2,): "B", (3, 4): "A", (5, 6): "B"}
294
+
295
+ df["labels"] = df["a"].map(label_mappings)
296
+ df["expected_labels"] = Series(["A", "B", "A", "B"], index=df.index)
297
+ # All labels should be filled now
298
+ tm.assert_series_equal(df["labels"], df["expected_labels"], check_names=False)
299
+
300
+
301
+ def test_map_counter():
302
+ s = Series(["a", "b", "c"], index=[1, 2, 3])
303
+ counter = Counter()
304
+ counter["b"] = 5
305
+ counter["c"] += 1
306
+ result = s.map(counter)
307
+ expected = Series([0, 5, 1], index=[1, 2, 3])
308
+ tm.assert_series_equal(result, expected)
309
+
310
+
311
+ def test_map_defaultdict():
312
+ s = Series([1, 2, 3], index=["a", "b", "c"])
313
+ default_dict = defaultdict(lambda: "blank")
314
+ default_dict[1] = "stuff"
315
+ result = s.map(default_dict)
316
+ expected = Series(["stuff", "blank", "blank"], index=["a", "b", "c"])
317
+ tm.assert_series_equal(result, expected)
318
+
319
+
320
+ def test_map_dict_na_key():
321
+ # https://github.com/pandas-dev/pandas/issues/17648
322
+ # Checks that np.nan key is appropriately mapped
323
+ s = Series([1, 2, np.nan])
324
+ expected = Series(["a", "b", "c"])
325
+ result = s.map({1: "a", 2: "b", np.nan: "c"})
326
+ tm.assert_series_equal(result, expected)
327
+
328
+
329
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
330
+ def test_map_defaultdict_na_key(na_action):
331
+ # GH 48813
332
+ s = Series([1, 2, np.nan])
333
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", np.nan: "c"})
334
+ result = s.map(default_map, na_action=na_action)
335
+ expected = Series({0: "a", 1: "b", 2: "c" if na_action is None else np.nan})
336
+ tm.assert_series_equal(result, expected)
337
+
338
+
339
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
340
+ def test_map_defaultdict_missing_key(na_action):
341
+ # GH 48813
342
+ s = Series([1, 2, np.nan])
343
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", 3: "c"})
344
+ result = s.map(default_map, na_action=na_action)
345
+ expected = Series({0: "a", 1: "b", 2: "missing" if na_action is None else np.nan})
346
+ tm.assert_series_equal(result, expected)
347
+
348
+
349
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
350
+ def test_map_defaultdict_unmutated(na_action):
351
+ # GH 48813
352
+ s = Series([1, 2, np.nan])
353
+ default_map = defaultdict(lambda: "missing", {1: "a", 2: "b", np.nan: "c"})
354
+ expected_default_map = default_map.copy()
355
+ s.map(default_map, na_action=na_action)
356
+ assert default_map == expected_default_map
357
+
358
+
359
+ @pytest.mark.parametrize("arg_func", [dict, Series])
360
+ def test_map_dict_ignore_na(arg_func):
361
+ # GH#47527
362
+ mapping = arg_func({1: 10, np.nan: 42})
363
+ ser = Series([1, np.nan, 2])
364
+ result = ser.map(mapping, na_action="ignore")
365
+ expected = Series([10, np.nan, np.nan])
366
+ tm.assert_series_equal(result, expected)
367
+
368
+
369
+ def test_map_defaultdict_ignore_na():
370
+ # GH#47527
371
+ mapping = defaultdict(int, {1: 10, np.nan: 42})
372
+ ser = Series([1, np.nan, 2])
373
+ result = ser.map(mapping)
374
+ expected = Series([10, 42, 0])
375
+ tm.assert_series_equal(result, expected)
376
+
377
+
378
+ @pytest.mark.parametrize(
379
+ "na_action, expected",
380
+ [(None, Series([10.0, 42.0, np.nan])), ("ignore", Series([10, np.nan, np.nan]))],
381
+ )
382
+ def test_map_categorical_na_ignore(na_action, expected):
383
+ # GH#47527
384
+ values = pd.Categorical([1, np.nan, 2], categories=[10, 1, 2])
385
+ ser = Series(values)
386
+ result = ser.map({1: 10, np.nan: 42}, na_action=na_action)
387
+ tm.assert_series_equal(result, expected)
388
+
389
+
390
+ def test_map_dict_subclass_with_missing():
391
+ """
392
+ Test Series.map with a dictionary subclass that defines __missing__,
393
+ i.e. sets a default value (GH #15999).
394
+ """
395
+
396
+ class DictWithMissing(dict):
397
+ def __missing__(self, key):
398
+ return "missing"
399
+
400
+ s = Series([1, 2, 3])
401
+ dictionary = DictWithMissing({3: "three"})
402
+ result = s.map(dictionary)
403
+ expected = Series(["missing", "missing", "three"])
404
+ tm.assert_series_equal(result, expected)
405
+
406
+
407
+ def test_map_dict_subclass_without_missing():
408
+ class DictWithoutMissing(dict):
409
+ pass
410
+
411
+ s = Series([1, 2, 3])
412
+ dictionary = DictWithoutMissing({3: "three"})
413
+ result = s.map(dictionary)
414
+ expected = Series([np.nan, np.nan, "three"])
415
+ tm.assert_series_equal(result, expected)
416
+
417
+
418
+ def test_map_abc_mapping(non_dict_mapping_subclass):
419
+ # https://github.com/pandas-dev/pandas/issues/29733
420
+ # Check collections.abc.Mapping support as mapper for Series.map
421
+ s = Series([1, 2, 3])
422
+ not_a_dictionary = non_dict_mapping_subclass({3: "three"})
423
+ result = s.map(not_a_dictionary)
424
+ expected = Series([np.nan, np.nan, "three"])
425
+ tm.assert_series_equal(result, expected)
426
+
427
+
428
+ def test_map_abc_mapping_with_missing(non_dict_mapping_subclass):
429
+ # https://github.com/pandas-dev/pandas/issues/29733
430
+ # Check collections.abc.Mapping support as mapper for Series.map
431
+ class NonDictMappingWithMissing(non_dict_mapping_subclass):
432
+ def __missing__(self, key):
433
+ return "missing"
434
+
435
+ s = Series([1, 2, 3])
436
+ not_a_dictionary = NonDictMappingWithMissing({3: "three"})
437
+ result = s.map(not_a_dictionary)
438
+ # __missing__ is a dict concept, not a Mapping concept,
439
+ # so it should not change the result!
440
+ expected = Series([np.nan, np.nan, "three"])
441
+ tm.assert_series_equal(result, expected)
442
+
443
+
444
+ def test_map_box_dt64(unit):
445
+ vals = [pd.Timestamp("2011-01-01"), pd.Timestamp("2011-01-02")]
446
+ ser = Series(vals).dt.as_unit(unit)
447
+ assert ser.dtype == f"datetime64[{unit}]"
448
+ # boxed value must be Timestamp instance
449
+ res = ser.map(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
450
+ exp = Series(["Timestamp_1_None", "Timestamp_2_None"])
451
+ tm.assert_series_equal(res, exp)
452
+
453
+
454
+ def test_map_box_dt64tz(unit):
455
+ vals = [
456
+ pd.Timestamp("2011-01-01", tz="US/Eastern"),
457
+ pd.Timestamp("2011-01-02", tz="US/Eastern"),
458
+ ]
459
+ ser = Series(vals).dt.as_unit(unit)
460
+ assert ser.dtype == f"datetime64[{unit}, US/Eastern]"
461
+ res = ser.map(lambda x: f"{type(x).__name__}_{x.day}_{x.tz}")
462
+ exp = Series(["Timestamp_1_US/Eastern", "Timestamp_2_US/Eastern"])
463
+ tm.assert_series_equal(res, exp)
464
+
465
+
466
+ def test_map_box_td64(unit):
467
+ # timedelta
468
+ vals = [pd.Timedelta("1 days"), pd.Timedelta("2 days")]
469
+ ser = Series(vals).dt.as_unit(unit)
470
+ assert ser.dtype == f"timedelta64[{unit}]"
471
+ res = ser.map(lambda x: f"{type(x).__name__}_{x.days}")
472
+ exp = Series(["Timedelta_1", "Timedelta_2"])
473
+ tm.assert_series_equal(res, exp)
474
+
475
+
476
+ def test_map_box_period():
477
+ # period
478
+ vals = [pd.Period("2011-01-01", freq="M"), pd.Period("2011-01-02", freq="M")]
479
+ ser = Series(vals)
480
+ assert ser.dtype == "Period[M]"
481
+ res = ser.map(lambda x: f"{type(x).__name__}_{x.freqstr}")
482
+ exp = Series(["Period_M", "Period_M"])
483
+ tm.assert_series_equal(res, exp)
484
+
485
+
486
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
487
+ def test_map_categorical(na_action, using_infer_string):
488
+ values = pd.Categorical(list("ABBABCD"), categories=list("DCBA"), ordered=True)
489
+ s = Series(values, name="XX", index=list("abcdefg"))
490
+
491
+ result = s.map(lambda x: x.lower(), na_action=na_action)
492
+ exp_values = pd.Categorical(list("abbabcd"), categories=list("dcba"), ordered=True)
493
+ exp = Series(exp_values, name="XX", index=list("abcdefg"))
494
+ tm.assert_series_equal(result, exp)
495
+ tm.assert_categorical_equal(result.values, exp_values)
496
+
497
+ result = s.map(lambda x: "A", na_action=na_action)
498
+ exp = Series(["A"] * 7, name="XX", index=list("abcdefg"))
499
+ tm.assert_series_equal(result, exp)
500
+ assert result.dtype == object if not using_infer_string else "string"
501
+
502
+
503
+ @pytest.mark.parametrize(
504
+ "na_action, expected",
505
+ (
506
+ [None, Series(["A", "B", "nan"], name="XX")],
507
+ [
508
+ "ignore",
509
+ Series(
510
+ ["A", "B", np.nan],
511
+ name="XX",
512
+ dtype=pd.CategoricalDtype(list("DCBA"), True),
513
+ ),
514
+ ],
515
+ ),
516
+ )
517
+ def test_map_categorical_na_action(na_action, expected):
518
+ dtype = pd.CategoricalDtype(list("DCBA"), ordered=True)
519
+ values = pd.Categorical(list("AB") + [np.nan], dtype=dtype)
520
+ s = Series(values, name="XX")
521
+ result = s.map(str, na_action=na_action)
522
+ tm.assert_series_equal(result, expected)
523
+
524
+
525
+ def test_map_datetimetz():
526
+ values = date_range("2011-01-01", "2011-01-02", freq="h").tz_localize("Asia/Tokyo")
527
+ s = Series(values, name="XX")
528
+
529
+ # keep tz
530
+ result = s.map(lambda x: x + pd.offsets.Day())
531
+ exp_values = date_range("2011-01-02", "2011-01-03", freq="h").tz_localize(
532
+ "Asia/Tokyo"
533
+ )
534
+ exp = Series(exp_values, name="XX")
535
+ tm.assert_series_equal(result, exp)
536
+
537
+ result = s.map(lambda x: x.hour)
538
+ exp = Series(list(range(24)) + [0], name="XX", dtype=np.int64)
539
+ tm.assert_series_equal(result, exp)
540
+
541
+ # not vectorized
542
+ def f(x):
543
+ if not isinstance(x, pd.Timestamp):
544
+ raise ValueError
545
+ return str(x.tz)
546
+
547
+ result = s.map(f)
548
+ exp = Series(["Asia/Tokyo"] * 25, name="XX")
549
+ tm.assert_series_equal(result, exp)
550
+
551
+
552
+ @pytest.mark.parametrize(
553
+ "vals,mapping,exp",
554
+ [
555
+ (list("abc"), {np.nan: "not NaN"}, [np.nan] * 3 + ["not NaN"]),
556
+ (list("abc"), {"a": "a letter"}, ["a letter"] + [np.nan] * 3),
557
+ (list(range(3)), {0: 42}, [42] + [np.nan] * 3),
558
+ ],
559
+ )
560
+ def test_map_missing_mixed(vals, mapping, exp, using_infer_string):
561
+ # GH20495
562
+ s = Series(vals + [np.nan])
563
+ result = s.map(mapping)
564
+ exp = Series(exp)
565
+ if using_infer_string and mapping == {np.nan: "not NaN"}:
566
+ exp.iloc[-1] = np.nan
567
+ tm.assert_series_equal(result, exp)
568
+
569
+
570
+ def test_map_scalar_on_date_time_index_aware_series():
571
+ # GH 25959
572
+ # Calling map on a localized time series should not cause an error
573
+ series = Series(
574
+ np.arange(10, dtype=np.float64),
575
+ index=date_range("2020-01-01", periods=10, tz="UTC"),
576
+ name="ts",
577
+ )
578
+ result = Series(series.index).map(lambda x: 1)
579
+ tm.assert_series_equal(result, Series(np.ones(len(series)), dtype="int64"))
580
+
581
+
582
+ def test_map_float_to_string_precision():
583
+ # GH 13228
584
+ ser = Series(1 / 3)
585
+ result = ser.map(lambda val: str(val)).to_dict()
586
+ expected = {0: "0.3333333333333333"}
587
+ assert result == expected
588
+
589
+
590
+ def test_map_to_timedelta():
591
+ list_of_valid_strings = ["00:00:01", "00:00:02"]
592
+ a = pd.to_timedelta(list_of_valid_strings)
593
+ b = Series(list_of_valid_strings).map(pd.to_timedelta)
594
+ tm.assert_series_equal(Series(a), b)
595
+
596
+ list_of_strings = ["00:00:01", np.nan, pd.NaT, pd.NaT]
597
+
598
+ a = pd.to_timedelta(list_of_strings)
599
+ ser = Series(list_of_strings)
600
+ b = ser.map(pd.to_timedelta)
601
+ tm.assert_series_equal(Series(a), b)
602
+
603
+
604
+ def test_map_type():
605
+ # GH 46719
606
+ s = Series([3, "string", float], index=["a", "b", "c"])
607
+ result = s.map(type)
608
+ expected = Series([int, str, type], index=["a", "b", "c"])
609
+ tm.assert_series_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_matmul.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas import (
7
+ DataFrame,
8
+ Series,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestMatmul:
14
+ def test_matmul(self):
15
+ # matmul test is for GH#10259
16
+ a = Series(
17
+ np.random.default_rng(2).standard_normal(4), index=["p", "q", "r", "s"]
18
+ )
19
+ b = DataFrame(
20
+ np.random.default_rng(2).standard_normal((3, 4)),
21
+ index=["1", "2", "3"],
22
+ columns=["p", "q", "r", "s"],
23
+ ).T
24
+
25
+ # Series @ DataFrame -> Series
26
+ result = operator.matmul(a, b)
27
+ expected = Series(np.dot(a.values, b.values), index=["1", "2", "3"])
28
+ tm.assert_series_equal(result, expected)
29
+
30
+ # DataFrame @ Series -> Series
31
+ result = operator.matmul(b.T, a)
32
+ expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
33
+ tm.assert_series_equal(result, expected)
34
+
35
+ # Series @ Series -> scalar
36
+ result = operator.matmul(a, a)
37
+ expected = np.dot(a.values, a.values)
38
+ tm.assert_almost_equal(result, expected)
39
+
40
+ # GH#21530
41
+ # vector (1D np.array) @ Series (__rmatmul__)
42
+ result = operator.matmul(a.values, a)
43
+ expected = np.dot(a.values, a.values)
44
+ tm.assert_almost_equal(result, expected)
45
+
46
+ # GH#21530
47
+ # vector (1D list) @ Series (__rmatmul__)
48
+ result = operator.matmul(a.values.tolist(), a)
49
+ expected = np.dot(a.values, a.values)
50
+ tm.assert_almost_equal(result, expected)
51
+
52
+ # GH#21530
53
+ # matrix (2D np.array) @ Series (__rmatmul__)
54
+ result = operator.matmul(b.T.values, a)
55
+ expected = np.dot(b.T.values, a.values)
56
+ tm.assert_almost_equal(result, expected)
57
+
58
+ # GH#21530
59
+ # matrix (2D nested lists) @ Series (__rmatmul__)
60
+ result = operator.matmul(b.T.values.tolist(), a)
61
+ expected = np.dot(b.T.values, a.values)
62
+ tm.assert_almost_equal(result, expected)
63
+
64
+ # mixed dtype DataFrame @ Series
65
+ a["p"] = int(a.p)
66
+ result = operator.matmul(b.T, a)
67
+ expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
68
+ tm.assert_series_equal(result, expected)
69
+
70
+ # different dtypes DataFrame @ Series
71
+ a = a.astype(int)
72
+ result = operator.matmul(b.T, a)
73
+ expected = Series(np.dot(b.T.values, a.T.values), index=["1", "2", "3"])
74
+ tm.assert_series_equal(result, expected)
75
+
76
+ msg = r"Dot product shape mismatch, \(4,\) vs \(3,\)"
77
+ # exception raised is of type Exception
78
+ with pytest.raises(Exception, match=msg):
79
+ a.dot(a.values[:3])
80
+ msg = "matrices are not aligned"
81
+ with pytest.raises(ValueError, match=msg):
82
+ a.dot(b.T)
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_nunique.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Categorical,
5
+ Series,
6
+ )
7
+
8
+
9
+ def test_nunique():
10
+ # basics.rst doc example
11
+ series = Series(np.random.default_rng(2).standard_normal(500))
12
+ series[20:500] = np.nan
13
+ series[10:20] = 5000
14
+ result = series.nunique()
15
+ assert result == 11
16
+
17
+
18
+ def test_nunique_categorical():
19
+ # GH#18051
20
+ ser = Series(Categorical([]))
21
+ assert ser.nunique() == 0
22
+
23
+ ser = Series(Categorical([np.nan]))
24
+ assert ser.nunique() == 0
venv/lib/python3.10/site-packages/pandas/tests/series/methods/test_pct_change.py ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ date_range,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesPctChange:
12
+ def test_pct_change(self, datetime_series):
13
+ msg = (
14
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
15
+ "Series.pct_change are deprecated"
16
+ )
17
+
18
+ rs = datetime_series.pct_change(fill_method=None)
19
+ tm.assert_series_equal(rs, datetime_series / datetime_series.shift(1) - 1)
20
+
21
+ rs = datetime_series.pct_change(2)
22
+ filled = datetime_series.ffill()
23
+ tm.assert_series_equal(rs, filled / filled.shift(2) - 1)
24
+
25
+ with tm.assert_produces_warning(FutureWarning, match=msg):
26
+ rs = datetime_series.pct_change(fill_method="bfill", limit=1)
27
+ filled = datetime_series.bfill(limit=1)
28
+ tm.assert_series_equal(rs, filled / filled.shift(1) - 1)
29
+
30
+ rs = datetime_series.pct_change(freq="5D")
31
+ filled = datetime_series.ffill()
32
+ tm.assert_series_equal(
33
+ rs, (filled / filled.shift(freq="5D") - 1).reindex_like(filled)
34
+ )
35
+
36
+ def test_pct_change_with_duplicate_axis(self):
37
+ # GH#28664
38
+ common_idx = date_range("2019-11-14", periods=5, freq="D")
39
+ result = Series(range(5), common_idx).pct_change(freq="B")
40
+
41
+ # the reason that the expected should be like this is documented at PR 28681
42
+ expected = Series([np.nan, np.inf, np.nan, np.nan, 3.0], common_idx)
43
+
44
+ tm.assert_series_equal(result, expected)
45
+
46
+ def test_pct_change_shift_over_nas(self):
47
+ s = Series([1.0, 1.5, np.nan, 2.5, 3.0])
48
+
49
+ msg = "The default fill_method='pad' in Series.pct_change is deprecated"
50
+ with tm.assert_produces_warning(FutureWarning, match=msg):
51
+ chg = s.pct_change()
52
+
53
+ expected = Series([np.nan, 0.5, 0.0, 2.5 / 1.5 - 1, 0.2])
54
+ tm.assert_series_equal(chg, expected)
55
+
56
+ @pytest.mark.parametrize(
57
+ "freq, periods, fill_method, limit",
58
+ [
59
+ ("5B", 5, None, None),
60
+ ("3B", 3, None, None),
61
+ ("3B", 3, "bfill", None),
62
+ ("7B", 7, "pad", 1),
63
+ ("7B", 7, "bfill", 3),
64
+ ("14B", 14, None, None),
65
+ ],
66
+ )
67
+ def test_pct_change_periods_freq(
68
+ self, freq, periods, fill_method, limit, datetime_series
69
+ ):
70
+ msg = (
71
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
72
+ "Series.pct_change are deprecated"
73
+ )
74
+
75
+ # GH#7292
76
+ with tm.assert_produces_warning(FutureWarning, match=msg):
77
+ rs_freq = datetime_series.pct_change(
78
+ freq=freq, fill_method=fill_method, limit=limit
79
+ )
80
+ with tm.assert_produces_warning(FutureWarning, match=msg):
81
+ rs_periods = datetime_series.pct_change(
82
+ periods, fill_method=fill_method, limit=limit
83
+ )
84
+ tm.assert_series_equal(rs_freq, rs_periods)
85
+
86
+ empty_ts = Series(index=datetime_series.index, dtype=object)
87
+ with tm.assert_produces_warning(FutureWarning, match=msg):
88
+ rs_freq = empty_ts.pct_change(
89
+ freq=freq, fill_method=fill_method, limit=limit
90
+ )
91
+ with tm.assert_produces_warning(FutureWarning, match=msg):
92
+ rs_periods = empty_ts.pct_change(
93
+ periods, fill_method=fill_method, limit=limit
94
+ )
95
+ tm.assert_series_equal(rs_freq, rs_periods)
96
+
97
+
98
+ @pytest.mark.parametrize("fill_method", ["pad", "ffill", None])
99
+ def test_pct_change_with_duplicated_indices(fill_method):
100
+ # GH30463
101
+ s = Series([np.nan, 1, 2, 3, 9, 18], index=["a", "b"] * 3)
102
+
103
+ warn = None if fill_method is None else FutureWarning
104
+ msg = (
105
+ "The 'fill_method' keyword being not None and the 'limit' keyword in "
106
+ "Series.pct_change are deprecated"
107
+ )
108
+ with tm.assert_produces_warning(warn, match=msg):
109
+ result = s.pct_change(fill_method=fill_method)
110
+
111
+ expected = Series([np.nan, np.nan, 1.0, 0.5, 2.0, 1.0], index=["a", "b"] * 3)
112
+ tm.assert_series_equal(result, expected)
113
+
114
+
115
+ def test_pct_change_no_warning_na_beginning():
116
+ # GH#54981
117
+ ser = Series([None, None, 1, 2, 3])
118
+ result = ser.pct_change()
119
+ expected = Series([np.nan, np.nan, np.nan, 1, 0.5])
120
+ tm.assert_series_equal(result, expected)
121
+
122
+
123
+ def test_pct_change_empty():
124
+ # GH 57056
125
+ ser = Series([], dtype="float64")
126
+ expected = ser.copy()
127
+ result = ser.pct_change(periods=0)
128
+ tm.assert_series_equal(expected, result)