applied-ai-018 commited on
Commit
7cb89f8
·
verified ·
1 Parent(s): de5794a

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/__init__.py +0 -0
  2. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/common.py +63 -0
  3. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/conftest.py +100 -0
  4. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_assign.py +84 -0
  5. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_copy.py +64 -0
  6. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_fillna.py +932 -0
  7. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_first_valid_index.py +78 -0
  8. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_map.py +216 -0
  9. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_quantile.py +972 -0
  10. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_to_records.py +523 -0
  11. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_tz_localize.py +68 -0
  12. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_alter_axes.py +30 -0
  13. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_arithmetic.py +2136 -0
  14. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_block_internals.py +457 -0
  15. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_constructors.py +0 -0
  16. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_cumulative.py +81 -0
  17. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_iteration.py +160 -0
  18. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_logical_ops.py +218 -0
  19. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_nonunique_indexes.py +337 -0
  20. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_npfuncs.py +89 -0
  21. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_query_eval.py +1425 -0
  22. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_reductions.py +2157 -0
  23. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_stack_unstack.py +0 -0
  24. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_ufunc.py +311 -0
  25. env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_unary.py +204 -0
  26. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/__init__.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/conftest.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_all_methods.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_api.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_apply.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_apply_mutate.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_bin_groupby.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_categorical.cpython-310.pyc +0 -0
  34. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_counting.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_cumulative.cpython-310.pyc +0 -0
  36. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_filters.cpython-310.pyc +0 -0
  37. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby.cpython-310.pyc +0 -0
  38. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby_dropna.cpython-310.pyc +0 -0
  39. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby_subclass.cpython-310.pyc +0 -0
  40. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_grouping.cpython-310.pyc +0 -0
  41. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_index_as_string.cpython-310.pyc +0 -0
  42. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_indexing.cpython-310.pyc +0 -0
  43. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_libgroupby.cpython-310.pyc +0 -0
  44. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_missing.cpython-310.pyc +0 -0
  45. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_numba.cpython-310.pyc +0 -0
  46. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_numeric_only.cpython-310.pyc +0 -0
  47. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_pipe.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_raises.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_reductions.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_timegrouper.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/common.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import TYPE_CHECKING
4
+
5
+ from pandas import (
6
+ DataFrame,
7
+ concat,
8
+ )
9
+
10
+ if TYPE_CHECKING:
11
+ from pandas._typing import AxisInt
12
+
13
+
14
+ def _check_mixed_float(df, dtype=None):
15
+ # float16 are most likely to be upcasted to float32
16
+ dtypes = {"A": "float32", "B": "float32", "C": "float16", "D": "float64"}
17
+ if isinstance(dtype, str):
18
+ dtypes = {k: dtype for k, v in dtypes.items()}
19
+ elif isinstance(dtype, dict):
20
+ dtypes.update(dtype)
21
+ if dtypes.get("A"):
22
+ assert df.dtypes["A"] == dtypes["A"]
23
+ if dtypes.get("B"):
24
+ assert df.dtypes["B"] == dtypes["B"]
25
+ if dtypes.get("C"):
26
+ assert df.dtypes["C"] == dtypes["C"]
27
+ if dtypes.get("D"):
28
+ assert df.dtypes["D"] == dtypes["D"]
29
+
30
+
31
+ def _check_mixed_int(df, dtype=None):
32
+ dtypes = {"A": "int32", "B": "uint64", "C": "uint8", "D": "int64"}
33
+ if isinstance(dtype, str):
34
+ dtypes = {k: dtype for k, v in dtypes.items()}
35
+ elif isinstance(dtype, dict):
36
+ dtypes.update(dtype)
37
+ if dtypes.get("A"):
38
+ assert df.dtypes["A"] == dtypes["A"]
39
+ if dtypes.get("B"):
40
+ assert df.dtypes["B"] == dtypes["B"]
41
+ if dtypes.get("C"):
42
+ assert df.dtypes["C"] == dtypes["C"]
43
+ if dtypes.get("D"):
44
+ assert df.dtypes["D"] == dtypes["D"]
45
+
46
+
47
+ def zip_frames(frames: list[DataFrame], axis: AxisInt = 1) -> DataFrame:
48
+ """
49
+ take a list of frames, zip them together under the
50
+ assumption that these all have the first frames' index/columns.
51
+
52
+ Returns
53
+ -------
54
+ new_frame : DataFrame
55
+ """
56
+ if axis == 1:
57
+ columns = frames[0].columns
58
+ zipped = [f.loc[:, c] for c in columns for f in frames]
59
+ return concat(zipped, axis=1)
60
+ else:
61
+ index = frames[0].index
62
+ zipped = [f.loc[i, :] for i in index for f in frames]
63
+ return DataFrame(zipped)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/conftest.py ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ Index,
7
+ NaT,
8
+ date_range,
9
+ )
10
+
11
+
12
+ @pytest.fixture
13
+ def datetime_frame() -> DataFrame:
14
+ """
15
+ Fixture for DataFrame of floats with DatetimeIndex
16
+
17
+ Columns are ['A', 'B', 'C', 'D']
18
+ """
19
+ return DataFrame(
20
+ np.random.default_rng(2).standard_normal((100, 4)),
21
+ columns=Index(list("ABCD"), dtype=object),
22
+ index=date_range("2000-01-01", periods=100, freq="B"),
23
+ )
24
+
25
+
26
+ @pytest.fixture
27
+ def float_string_frame():
28
+ """
29
+ Fixture for DataFrame of floats and strings with index of unique strings
30
+
31
+ Columns are ['A', 'B', 'C', 'D', 'foo'].
32
+ """
33
+ df = DataFrame(
34
+ np.random.default_rng(2).standard_normal((30, 4)),
35
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
36
+ columns=Index(list("ABCD"), dtype=object),
37
+ )
38
+ df["foo"] = "bar"
39
+ return df
40
+
41
+
42
+ @pytest.fixture
43
+ def mixed_float_frame():
44
+ """
45
+ Fixture for DataFrame of different float types with index of unique strings
46
+
47
+ Columns are ['A', 'B', 'C', 'D'].
48
+ """
49
+ df = DataFrame(
50
+ {
51
+ col: np.random.default_rng(2).random(30, dtype=dtype)
52
+ for col, dtype in zip(
53
+ list("ABCD"), ["float32", "float32", "float32", "float64"]
54
+ )
55
+ },
56
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
57
+ )
58
+ # not supported by numpy random
59
+ df["C"] = df["C"].astype("float16")
60
+ return df
61
+
62
+
63
+ @pytest.fixture
64
+ def mixed_int_frame():
65
+ """
66
+ Fixture for DataFrame of different int types with index of unique strings
67
+
68
+ Columns are ['A', 'B', 'C', 'D'].
69
+ """
70
+ return DataFrame(
71
+ {
72
+ col: np.ones(30, dtype=dtype)
73
+ for col, dtype in zip(list("ABCD"), ["int32", "uint64", "uint8", "int64"])
74
+ },
75
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
76
+ )
77
+
78
+
79
+ @pytest.fixture
80
+ def timezone_frame():
81
+ """
82
+ Fixture for DataFrame of date_range Series with different time zones
83
+
84
+ Columns are ['A', 'B', 'C']; some entries are missing
85
+
86
+ A B C
87
+ 0 2013-01-01 2013-01-01 00:00:00-05:00 2013-01-01 00:00:00+01:00
88
+ 1 2013-01-02 NaT NaT
89
+ 2 2013-01-03 2013-01-03 00:00:00-05:00 2013-01-03 00:00:00+01:00
90
+ """
91
+ df = DataFrame(
92
+ {
93
+ "A": date_range("20130101", periods=3),
94
+ "B": date_range("20130101", periods=3, tz="US/Eastern"),
95
+ "C": date_range("20130101", periods=3, tz="CET"),
96
+ }
97
+ )
98
+ df.iloc[1, 1] = NaT
99
+ df.iloc[1, 2] = NaT
100
+ return df
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_assign.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import DataFrame
4
+ import pandas._testing as tm
5
+
6
+
7
+ class TestAssign:
8
+ def test_assign(self):
9
+ df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
10
+ original = df.copy()
11
+ result = df.assign(C=df.B / df.A)
12
+ expected = df.copy()
13
+ expected["C"] = [4, 2.5, 2]
14
+ tm.assert_frame_equal(result, expected)
15
+
16
+ # lambda syntax
17
+ result = df.assign(C=lambda x: x.B / x.A)
18
+ tm.assert_frame_equal(result, expected)
19
+
20
+ # original is unmodified
21
+ tm.assert_frame_equal(df, original)
22
+
23
+ # Non-Series array-like
24
+ result = df.assign(C=[4, 2.5, 2])
25
+ tm.assert_frame_equal(result, expected)
26
+ # original is unmodified
27
+ tm.assert_frame_equal(df, original)
28
+
29
+ result = df.assign(B=df.B / df.A)
30
+ expected = expected.drop("B", axis=1).rename(columns={"C": "B"})
31
+ tm.assert_frame_equal(result, expected)
32
+
33
+ # overwrite
34
+ result = df.assign(A=df.A + df.B)
35
+ expected = df.copy()
36
+ expected["A"] = [5, 7, 9]
37
+ tm.assert_frame_equal(result, expected)
38
+
39
+ # lambda
40
+ result = df.assign(A=lambda x: x.A + x.B)
41
+ tm.assert_frame_equal(result, expected)
42
+
43
+ def test_assign_multiple(self):
44
+ df = DataFrame([[1, 4], [2, 5], [3, 6]], columns=["A", "B"])
45
+ result = df.assign(C=[7, 8, 9], D=df.A, E=lambda x: x.B)
46
+ expected = DataFrame(
47
+ [[1, 4, 7, 1, 4], [2, 5, 8, 2, 5], [3, 6, 9, 3, 6]], columns=list("ABCDE")
48
+ )
49
+ tm.assert_frame_equal(result, expected)
50
+
51
+ def test_assign_order(self):
52
+ # GH 9818
53
+ df = DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
54
+ result = df.assign(D=df.A + df.B, C=df.A - df.B)
55
+
56
+ expected = DataFrame([[1, 2, 3, -1], [3, 4, 7, -1]], columns=list("ABDC"))
57
+ tm.assert_frame_equal(result, expected)
58
+ result = df.assign(C=df.A - df.B, D=df.A + df.B)
59
+
60
+ expected = DataFrame([[1, 2, -1, 3], [3, 4, -1, 7]], columns=list("ABCD"))
61
+
62
+ tm.assert_frame_equal(result, expected)
63
+
64
+ def test_assign_bad(self):
65
+ df = DataFrame({"A": [1, 2, 3], "B": [4, 5, 6]})
66
+
67
+ # non-keyword argument
68
+ msg = r"assign\(\) takes 1 positional argument but 2 were given"
69
+ with pytest.raises(TypeError, match=msg):
70
+ df.assign(lambda x: x.A)
71
+ msg = "'DataFrame' object has no attribute 'C'"
72
+ with pytest.raises(AttributeError, match=msg):
73
+ df.assign(C=df.A, D=df.A + df.C)
74
+
75
+ def test_assign_dependent(self):
76
+ df = DataFrame({"A": [1, 2], "B": [3, 4]})
77
+
78
+ result = df.assign(C=df.A, D=lambda x: x["A"] + x["C"])
79
+ expected = DataFrame([[1, 3, 1, 2], [2, 4, 2, 4]], columns=list("ABCD"))
80
+ tm.assert_frame_equal(result, expected)
81
+
82
+ result = df.assign(C=lambda df: df.A, D=lambda df: df["A"] + df["C"])
83
+ expected = DataFrame([[1, 3, 1, 2], [2, 4, 2, 4]], columns=list("ABCD"))
84
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_copy.py ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas.util._test_decorators as td
5
+
6
+ from pandas import DataFrame
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestCopy:
11
+ @pytest.mark.parametrize("attr", ["index", "columns"])
12
+ def test_copy_index_name_checking(self, float_frame, attr):
13
+ # don't want to be able to modify the index stored elsewhere after
14
+ # making a copy
15
+ ind = getattr(float_frame, attr)
16
+ ind.name = None
17
+ cp = float_frame.copy()
18
+ getattr(cp, attr).name = "foo"
19
+ assert getattr(float_frame, attr).name is None
20
+
21
+ @td.skip_copy_on_write_invalid_test
22
+ def test_copy_cache(self):
23
+ # GH#31784 _item_cache not cleared on copy causes incorrect reads after updates
24
+ df = DataFrame({"a": [1]})
25
+
26
+ df["x"] = [0]
27
+ df["a"]
28
+
29
+ df.copy()
30
+
31
+ df["a"].values[0] = -1
32
+
33
+ tm.assert_frame_equal(df, DataFrame({"a": [-1], "x": [0]}))
34
+
35
+ df["y"] = [0]
36
+
37
+ assert df["a"].values[0] == -1
38
+ tm.assert_frame_equal(df, DataFrame({"a": [-1], "x": [0], "y": [0]}))
39
+
40
+ def test_copy(self, float_frame, float_string_frame):
41
+ cop = float_frame.copy()
42
+ cop["E"] = cop["A"]
43
+ assert "E" not in float_frame
44
+
45
+ # copy objects
46
+ copy = float_string_frame.copy()
47
+ assert copy._mgr is not float_string_frame._mgr
48
+
49
+ @td.skip_array_manager_invalid_test
50
+ def test_copy_consolidates(self):
51
+ # GH#42477
52
+ df = DataFrame(
53
+ {
54
+ "a": np.random.default_rng(2).integers(0, 100, size=55),
55
+ "b": np.random.default_rng(2).integers(0, 100, size=55),
56
+ }
57
+ )
58
+
59
+ for i in range(10):
60
+ df.loc[:, f"n_{i}"] = np.random.default_rng(2).integers(0, 100, size=55)
61
+
62
+ assert len(df._mgr.blocks) == 11
63
+ result = df.copy()
64
+ assert len(result._mgr.blocks) == 1
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_fillna.py ADDED
@@ -0,0 +1,932 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas._config import using_pyarrow_string_dtype
5
+
6
+ import pandas.util._test_decorators as td
7
+
8
+ from pandas import (
9
+ Categorical,
10
+ DataFrame,
11
+ DatetimeIndex,
12
+ NaT,
13
+ PeriodIndex,
14
+ Series,
15
+ TimedeltaIndex,
16
+ Timestamp,
17
+ date_range,
18
+ to_datetime,
19
+ )
20
+ import pandas._testing as tm
21
+ from pandas.tests.frame.common import _check_mixed_float
22
+
23
+
24
+ class TestFillNA:
25
+ def test_fillna_dict_inplace_nonunique_columns(
26
+ self, using_copy_on_write, warn_copy_on_write
27
+ ):
28
+ df = DataFrame(
29
+ {"A": [np.nan] * 3, "B": [NaT, Timestamp(1), NaT], "C": [np.nan, "foo", 2]}
30
+ )
31
+ df.columns = ["A", "A", "A"]
32
+ orig = df[:]
33
+
34
+ # TODO(CoW-warn) better warning message
35
+ with tm.assert_cow_warning(warn_copy_on_write):
36
+ df.fillna({"A": 2}, inplace=True)
37
+ # The first and third columns can be set inplace, while the second cannot.
38
+
39
+ expected = DataFrame(
40
+ {"A": [2.0] * 3, "B": [2, Timestamp(1), 2], "C": [2, "foo", 2]}
41
+ )
42
+ expected.columns = ["A", "A", "A"]
43
+ tm.assert_frame_equal(df, expected)
44
+
45
+ # TODO: what's the expected/desired behavior with CoW?
46
+ if not using_copy_on_write:
47
+ assert tm.shares_memory(df.iloc[:, 0], orig.iloc[:, 0])
48
+ assert not tm.shares_memory(df.iloc[:, 1], orig.iloc[:, 1])
49
+ if not using_copy_on_write:
50
+ assert tm.shares_memory(df.iloc[:, 2], orig.iloc[:, 2])
51
+
52
+ @td.skip_array_manager_not_yet_implemented
53
+ def test_fillna_on_column_view(self, using_copy_on_write):
54
+ # GH#46149 avoid unnecessary copies
55
+ arr = np.full((40, 50), np.nan)
56
+ df = DataFrame(arr, copy=False)
57
+
58
+ if using_copy_on_write:
59
+ with tm.raises_chained_assignment_error():
60
+ df[0].fillna(-1, inplace=True)
61
+ assert np.isnan(arr[:, 0]).all()
62
+ else:
63
+ with tm.assert_produces_warning(FutureWarning, match="inplace method"):
64
+ df[0].fillna(-1, inplace=True)
65
+ assert (arr[:, 0] == -1).all()
66
+
67
+ # i.e. we didn't create a new 49-column block
68
+ assert len(df._mgr.arrays) == 1
69
+ assert np.shares_memory(df.values, arr)
70
+
71
+ def test_fillna_datetime(self, datetime_frame):
72
+ tf = datetime_frame
73
+ tf.loc[tf.index[:5], "A"] = np.nan
74
+ tf.loc[tf.index[-5:], "A"] = np.nan
75
+
76
+ zero_filled = datetime_frame.fillna(0)
77
+ assert (zero_filled.loc[zero_filled.index[:5], "A"] == 0).all()
78
+
79
+ msg = "DataFrame.fillna with 'method' is deprecated"
80
+ with tm.assert_produces_warning(FutureWarning, match=msg):
81
+ padded = datetime_frame.fillna(method="pad")
82
+ assert np.isnan(padded.loc[padded.index[:5], "A"]).all()
83
+ assert (
84
+ padded.loc[padded.index[-5:], "A"] == padded.loc[padded.index[-5], "A"]
85
+ ).all()
86
+
87
+ msg = "Must specify a fill 'value' or 'method'"
88
+ with pytest.raises(ValueError, match=msg):
89
+ datetime_frame.fillna()
90
+ msg = "Cannot specify both 'value' and 'method'"
91
+ with pytest.raises(ValueError, match=msg):
92
+ datetime_frame.fillna(5, method="ffill")
93
+
94
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 0 in string")
95
+ def test_fillna_mixed_type(self, float_string_frame):
96
+ mf = float_string_frame
97
+ mf.loc[mf.index[5:20], "foo"] = np.nan
98
+ mf.loc[mf.index[-10:], "A"] = np.nan
99
+ # TODO: make stronger assertion here, GH 25640
100
+ mf.fillna(value=0)
101
+ msg = "DataFrame.fillna with 'method' is deprecated"
102
+ with tm.assert_produces_warning(FutureWarning, match=msg):
103
+ mf.fillna(method="pad")
104
+
105
+ def test_fillna_mixed_float(self, mixed_float_frame):
106
+ # mixed numeric (but no float16)
107
+ mf = mixed_float_frame.reindex(columns=["A", "B", "D"])
108
+ mf.loc[mf.index[-10:], "A"] = np.nan
109
+ result = mf.fillna(value=0)
110
+ _check_mixed_float(result, dtype={"C": None})
111
+
112
+ msg = "DataFrame.fillna with 'method' is deprecated"
113
+ with tm.assert_produces_warning(FutureWarning, match=msg):
114
+ result = mf.fillna(method="pad")
115
+ _check_mixed_float(result, dtype={"C": None})
116
+
117
+ def test_fillna_empty(self, using_copy_on_write):
118
+ if using_copy_on_write:
119
+ pytest.skip("condition is unnecessary complex and is deprecated anyway")
120
+ # empty frame (GH#2778)
121
+ df = DataFrame(columns=["x"])
122
+ for m in ["pad", "backfill"]:
123
+ msg = "Series.fillna with 'method' is deprecated"
124
+ with tm.assert_produces_warning(FutureWarning, match=msg):
125
+ df.x.fillna(method=m, inplace=True)
126
+ df.x.fillna(method=m)
127
+
128
+ def test_fillna_different_dtype(self, using_infer_string):
129
+ # with different dtype (GH#3386)
130
+ df = DataFrame(
131
+ [["a", "a", np.nan, "a"], ["b", "b", np.nan, "b"], ["c", "c", np.nan, "c"]]
132
+ )
133
+
134
+ if using_infer_string:
135
+ with tm.assert_produces_warning(FutureWarning, match="Downcasting"):
136
+ result = df.fillna({2: "foo"})
137
+ else:
138
+ result = df.fillna({2: "foo"})
139
+ expected = DataFrame(
140
+ [["a", "a", "foo", "a"], ["b", "b", "foo", "b"], ["c", "c", "foo", "c"]]
141
+ )
142
+ tm.assert_frame_equal(result, expected)
143
+
144
+ if using_infer_string:
145
+ with tm.assert_produces_warning(FutureWarning, match="Downcasting"):
146
+ return_value = df.fillna({2: "foo"}, inplace=True)
147
+ else:
148
+ return_value = df.fillna({2: "foo"}, inplace=True)
149
+ tm.assert_frame_equal(df, expected)
150
+ assert return_value is None
151
+
152
+ def test_fillna_limit_and_value(self):
153
+ # limit and value
154
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 3)))
155
+ df.iloc[2:7, 0] = np.nan
156
+ df.iloc[3:5, 2] = np.nan
157
+
158
+ expected = df.copy()
159
+ expected.iloc[2, 0] = 999
160
+ expected.iloc[3, 2] = 999
161
+ result = df.fillna(999, limit=1)
162
+ tm.assert_frame_equal(result, expected)
163
+
164
+ def test_fillna_datelike(self):
165
+ # with datelike
166
+ # GH#6344
167
+ df = DataFrame(
168
+ {
169
+ "Date": [NaT, Timestamp("2014-1-1")],
170
+ "Date2": [Timestamp("2013-1-1"), NaT],
171
+ }
172
+ )
173
+
174
+ expected = df.copy()
175
+ expected["Date"] = expected["Date"].fillna(df.loc[df.index[0], "Date2"])
176
+ result = df.fillna(value={"Date": df["Date2"]})
177
+ tm.assert_frame_equal(result, expected)
178
+
179
+ def test_fillna_tzaware(self):
180
+ # with timezone
181
+ # GH#15855
182
+ df = DataFrame({"A": [Timestamp("2012-11-11 00:00:00+01:00"), NaT]})
183
+ exp = DataFrame(
184
+ {
185
+ "A": [
186
+ Timestamp("2012-11-11 00:00:00+01:00"),
187
+ Timestamp("2012-11-11 00:00:00+01:00"),
188
+ ]
189
+ }
190
+ )
191
+ msg = "DataFrame.fillna with 'method' is deprecated"
192
+ with tm.assert_produces_warning(FutureWarning, match=msg):
193
+ res = df.fillna(method="pad")
194
+ tm.assert_frame_equal(res, exp)
195
+
196
+ df = DataFrame({"A": [NaT, Timestamp("2012-11-11 00:00:00+01:00")]})
197
+ exp = DataFrame(
198
+ {
199
+ "A": [
200
+ Timestamp("2012-11-11 00:00:00+01:00"),
201
+ Timestamp("2012-11-11 00:00:00+01:00"),
202
+ ]
203
+ }
204
+ )
205
+ msg = "DataFrame.fillna with 'method' is deprecated"
206
+ with tm.assert_produces_warning(FutureWarning, match=msg):
207
+ res = df.fillna(method="bfill")
208
+ tm.assert_frame_equal(res, exp)
209
+
210
+ def test_fillna_tzaware_different_column(self):
211
+ # with timezone in another column
212
+ # GH#15522
213
+ df = DataFrame(
214
+ {
215
+ "A": date_range("20130101", periods=4, tz="US/Eastern"),
216
+ "B": [1, 2, np.nan, np.nan],
217
+ }
218
+ )
219
+ msg = "DataFrame.fillna with 'method' is deprecated"
220
+ with tm.assert_produces_warning(FutureWarning, match=msg):
221
+ result = df.fillna(method="pad")
222
+ expected = DataFrame(
223
+ {
224
+ "A": date_range("20130101", periods=4, tz="US/Eastern"),
225
+ "B": [1.0, 2.0, 2.0, 2.0],
226
+ }
227
+ )
228
+ tm.assert_frame_equal(result, expected)
229
+
230
+ def test_na_actions_categorical(self):
231
+ cat = Categorical([1, 2, 3, np.nan], categories=[1, 2, 3])
232
+ vals = ["a", "b", np.nan, "d"]
233
+ df = DataFrame({"cats": cat, "vals": vals})
234
+ cat2 = Categorical([1, 2, 3, 3], categories=[1, 2, 3])
235
+ vals2 = ["a", "b", "b", "d"]
236
+ df_exp_fill = DataFrame({"cats": cat2, "vals": vals2})
237
+ cat3 = Categorical([1, 2, 3], categories=[1, 2, 3])
238
+ vals3 = ["a", "b", np.nan]
239
+ df_exp_drop_cats = DataFrame({"cats": cat3, "vals": vals3})
240
+ cat4 = Categorical([1, 2], categories=[1, 2, 3])
241
+ vals4 = ["a", "b"]
242
+ df_exp_drop_all = DataFrame({"cats": cat4, "vals": vals4})
243
+
244
+ # fillna
245
+ res = df.fillna(value={"cats": 3, "vals": "b"})
246
+ tm.assert_frame_equal(res, df_exp_fill)
247
+
248
+ msg = "Cannot setitem on a Categorical with a new category"
249
+ with pytest.raises(TypeError, match=msg):
250
+ df.fillna(value={"cats": 4, "vals": "c"})
251
+
252
+ msg = "DataFrame.fillna with 'method' is deprecated"
253
+ with tm.assert_produces_warning(FutureWarning, match=msg):
254
+ res = df.fillna(method="pad")
255
+ tm.assert_frame_equal(res, df_exp_fill)
256
+
257
+ # dropna
258
+ res = df.dropna(subset=["cats"])
259
+ tm.assert_frame_equal(res, df_exp_drop_cats)
260
+
261
+ res = df.dropna()
262
+ tm.assert_frame_equal(res, df_exp_drop_all)
263
+
264
+ # make sure that fillna takes missing values into account
265
+ c = Categorical([np.nan, "b", np.nan], categories=["a", "b"])
266
+ df = DataFrame({"cats": c, "vals": [1, 2, 3]})
267
+
268
+ cat_exp = Categorical(["a", "b", "a"], categories=["a", "b"])
269
+ df_exp = DataFrame({"cats": cat_exp, "vals": [1, 2, 3]})
270
+
271
+ res = df.fillna("a")
272
+ tm.assert_frame_equal(res, df_exp)
273
+
274
+ def test_fillna_categorical_nan(self):
275
+ # GH#14021
276
+ # np.nan should always be a valid filler
277
+ cat = Categorical([np.nan, 2, np.nan])
278
+ val = Categorical([np.nan, np.nan, np.nan])
279
+ df = DataFrame({"cats": cat, "vals": val})
280
+
281
+ # GH#32950 df.median() is poorly behaved because there is no
282
+ # Categorical.median
283
+ median = Series({"cats": 2.0, "vals": np.nan})
284
+
285
+ res = df.fillna(median)
286
+ v_exp = [np.nan, np.nan, np.nan]
287
+ df_exp = DataFrame({"cats": [2, 2, 2], "vals": v_exp}, dtype="category")
288
+ tm.assert_frame_equal(res, df_exp)
289
+
290
+ result = df.cats.fillna(np.nan)
291
+ tm.assert_series_equal(result, df.cats)
292
+
293
+ result = df.vals.fillna(np.nan)
294
+ tm.assert_series_equal(result, df.vals)
295
+
296
+ idx = DatetimeIndex(
297
+ ["2011-01-01 09:00", "2016-01-01 23:45", "2011-01-01 09:00", NaT, NaT]
298
+ )
299
+ df = DataFrame({"a": Categorical(idx)})
300
+ tm.assert_frame_equal(df.fillna(value=NaT), df)
301
+
302
+ idx = PeriodIndex(["2011-01", "2011-01", "2011-01", NaT, NaT], freq="M")
303
+ df = DataFrame({"a": Categorical(idx)})
304
+ tm.assert_frame_equal(df.fillna(value=NaT), df)
305
+
306
+ idx = TimedeltaIndex(["1 days", "2 days", "1 days", NaT, NaT])
307
+ df = DataFrame({"a": Categorical(idx)})
308
+ tm.assert_frame_equal(df.fillna(value=NaT), df)
309
+
310
+ def test_fillna_downcast(self):
311
+ # GH#15277
312
+ # infer int64 from float64
313
+ df = DataFrame({"a": [1.0, np.nan]})
314
+ msg = "The 'downcast' keyword in fillna is deprecated"
315
+ with tm.assert_produces_warning(FutureWarning, match=msg):
316
+ result = df.fillna(0, downcast="infer")
317
+ expected = DataFrame({"a": [1, 0]})
318
+ tm.assert_frame_equal(result, expected)
319
+
320
+ # infer int64 from float64 when fillna value is a dict
321
+ df = DataFrame({"a": [1.0, np.nan]})
322
+ with tm.assert_produces_warning(FutureWarning, match=msg):
323
+ result = df.fillna({"a": 0}, downcast="infer")
324
+ expected = DataFrame({"a": [1, 0]})
325
+ tm.assert_frame_equal(result, expected)
326
+
327
+ def test_fillna_downcast_false(self, frame_or_series):
328
+ # GH#45603 preserve object dtype with downcast=False
329
+ obj = frame_or_series([1, 2, 3], dtype="object")
330
+ msg = "The 'downcast' keyword in fillna"
331
+ with tm.assert_produces_warning(FutureWarning, match=msg):
332
+ result = obj.fillna("", downcast=False)
333
+ tm.assert_equal(result, obj)
334
+
335
+ def test_fillna_downcast_noop(self, frame_or_series):
336
+ # GH#45423
337
+ # Two relevant paths:
338
+ # 1) not _can_hold_na (e.g. integer)
339
+ # 2) _can_hold_na + noop + not can_hold_element
340
+
341
+ obj = frame_or_series([1, 2, 3], dtype=np.int64)
342
+
343
+ msg = "The 'downcast' keyword in fillna"
344
+ with tm.assert_produces_warning(FutureWarning, match=msg):
345
+ # GH#40988
346
+ res = obj.fillna("foo", downcast=np.dtype(np.int32))
347
+ expected = obj.astype(np.int32)
348
+ tm.assert_equal(res, expected)
349
+
350
+ obj2 = obj.astype(np.float64)
351
+ with tm.assert_produces_warning(FutureWarning, match=msg):
352
+ res2 = obj2.fillna("foo", downcast="infer")
353
+ expected2 = obj # get back int64
354
+ tm.assert_equal(res2, expected2)
355
+
356
+ with tm.assert_produces_warning(FutureWarning, match=msg):
357
+ # GH#40988
358
+ res3 = obj2.fillna("foo", downcast=np.dtype(np.int32))
359
+ tm.assert_equal(res3, expected)
360
+
361
+ @pytest.mark.parametrize("columns", [["A", "A", "B"], ["A", "A"]])
362
+ def test_fillna_dictlike_value_duplicate_colnames(self, columns):
363
+ # GH#43476
364
+ df = DataFrame(np.nan, index=[0, 1], columns=columns)
365
+ with tm.assert_produces_warning(None):
366
+ result = df.fillna({"A": 0})
367
+
368
+ expected = df.copy()
369
+ expected["A"] = 0.0
370
+ tm.assert_frame_equal(result, expected)
371
+
372
+ def test_fillna_dtype_conversion(self, using_infer_string):
373
+ # make sure that fillna on an empty frame works
374
+ df = DataFrame(index=["A", "B", "C"], columns=[1, 2, 3, 4, 5])
375
+ result = df.dtypes
376
+ expected = Series([np.dtype("object")] * 5, index=[1, 2, 3, 4, 5])
377
+ tm.assert_series_equal(result, expected)
378
+
379
+ msg = "Downcasting object dtype arrays"
380
+ with tm.assert_produces_warning(FutureWarning, match=msg):
381
+ result = df.fillna(1)
382
+ expected = DataFrame(1, index=["A", "B", "C"], columns=[1, 2, 3, 4, 5])
383
+ tm.assert_frame_equal(result, expected)
384
+
385
+ # empty block
386
+ df = DataFrame(index=range(3), columns=["A", "B"], dtype="float64")
387
+ if using_infer_string:
388
+ with tm.assert_produces_warning(FutureWarning, match="Downcasting"):
389
+ result = df.fillna("nan")
390
+ else:
391
+ result = df.fillna("nan")
392
+ expected = DataFrame("nan", index=range(3), columns=["A", "B"])
393
+ tm.assert_frame_equal(result, expected)
394
+
395
+ @pytest.mark.parametrize("val", ["", 1, np.nan, 1.0])
396
+ def test_fillna_dtype_conversion_equiv_replace(self, val):
397
+ df = DataFrame({"A": [1, np.nan], "B": [1.0, 2.0]})
398
+ expected = df.replace(np.nan, val)
399
+ result = df.fillna(val)
400
+ tm.assert_frame_equal(result, expected)
401
+
402
+ def test_fillna_datetime_columns(self):
403
+ # GH#7095
404
+ df = DataFrame(
405
+ {
406
+ "A": [-1, -2, np.nan],
407
+ "B": date_range("20130101", periods=3),
408
+ "C": ["foo", "bar", None],
409
+ "D": ["foo2", "bar2", None],
410
+ },
411
+ index=date_range("20130110", periods=3),
412
+ )
413
+ result = df.fillna("?")
414
+ expected = DataFrame(
415
+ {
416
+ "A": [-1, -2, "?"],
417
+ "B": date_range("20130101", periods=3),
418
+ "C": ["foo", "bar", "?"],
419
+ "D": ["foo2", "bar2", "?"],
420
+ },
421
+ index=date_range("20130110", periods=3),
422
+ )
423
+ tm.assert_frame_equal(result, expected)
424
+
425
+ df = DataFrame(
426
+ {
427
+ "A": [-1, -2, np.nan],
428
+ "B": [Timestamp("2013-01-01"), Timestamp("2013-01-02"), NaT],
429
+ "C": ["foo", "bar", None],
430
+ "D": ["foo2", "bar2", None],
431
+ },
432
+ index=date_range("20130110", periods=3),
433
+ )
434
+ result = df.fillna("?")
435
+ expected = DataFrame(
436
+ {
437
+ "A": [-1, -2, "?"],
438
+ "B": [Timestamp("2013-01-01"), Timestamp("2013-01-02"), "?"],
439
+ "C": ["foo", "bar", "?"],
440
+ "D": ["foo2", "bar2", "?"],
441
+ },
442
+ index=date_range("20130110", periods=3),
443
+ )
444
+ tm.assert_frame_equal(result, expected)
445
+
446
+ def test_ffill(self, datetime_frame):
447
+ datetime_frame.loc[datetime_frame.index[:5], "A"] = np.nan
448
+ datetime_frame.loc[datetime_frame.index[-5:], "A"] = np.nan
449
+
450
+ msg = "DataFrame.fillna with 'method' is deprecated"
451
+ with tm.assert_produces_warning(FutureWarning, match=msg):
452
+ alt = datetime_frame.fillna(method="ffill")
453
+ tm.assert_frame_equal(datetime_frame.ffill(), alt)
454
+
455
+ def test_bfill(self, datetime_frame):
456
+ datetime_frame.loc[datetime_frame.index[:5], "A"] = np.nan
457
+ datetime_frame.loc[datetime_frame.index[-5:], "A"] = np.nan
458
+
459
+ msg = "DataFrame.fillna with 'method' is deprecated"
460
+ with tm.assert_produces_warning(FutureWarning, match=msg):
461
+ alt = datetime_frame.fillna(method="bfill")
462
+
463
+ tm.assert_frame_equal(datetime_frame.bfill(), alt)
464
+
465
+ def test_frame_pad_backfill_limit(self):
466
+ index = np.arange(10)
467
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)), index=index)
468
+
469
+ result = df[:2].reindex(index, method="pad", limit=5)
470
+
471
+ msg = "DataFrame.fillna with 'method' is deprecated"
472
+ with tm.assert_produces_warning(FutureWarning, match=msg):
473
+ expected = df[:2].reindex(index).fillna(method="pad")
474
+ expected.iloc[-3:] = np.nan
475
+ tm.assert_frame_equal(result, expected)
476
+
477
+ result = df[-2:].reindex(index, method="backfill", limit=5)
478
+
479
+ with tm.assert_produces_warning(FutureWarning, match=msg):
480
+ expected = df[-2:].reindex(index).fillna(method="backfill")
481
+ expected.iloc[:3] = np.nan
482
+ tm.assert_frame_equal(result, expected)
483
+
484
+ def test_frame_fillna_limit(self):
485
+ index = np.arange(10)
486
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)), index=index)
487
+
488
+ result = df[:2].reindex(index)
489
+ msg = "DataFrame.fillna with 'method' is deprecated"
490
+ with tm.assert_produces_warning(FutureWarning, match=msg):
491
+ result = result.fillna(method="pad", limit=5)
492
+
493
+ with tm.assert_produces_warning(FutureWarning, match=msg):
494
+ expected = df[:2].reindex(index).fillna(method="pad")
495
+ expected.iloc[-3:] = np.nan
496
+ tm.assert_frame_equal(result, expected)
497
+
498
+ result = df[-2:].reindex(index)
499
+ with tm.assert_produces_warning(FutureWarning, match=msg):
500
+ result = result.fillna(method="backfill", limit=5)
501
+
502
+ with tm.assert_produces_warning(FutureWarning, match=msg):
503
+ expected = df[-2:].reindex(index).fillna(method="backfill")
504
+ expected.iloc[:3] = np.nan
505
+ tm.assert_frame_equal(result, expected)
506
+
507
+ def test_fillna_skip_certain_blocks(self):
508
+ # don't try to fill boolean, int blocks
509
+
510
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)).astype(int))
511
+
512
+ # it works!
513
+ df.fillna(np.nan)
514
+
515
+ @pytest.mark.parametrize("type", [int, float])
516
+ def test_fillna_positive_limit(self, type):
517
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4))).astype(type)
518
+
519
+ msg = "Limit must be greater than 0"
520
+ with pytest.raises(ValueError, match=msg):
521
+ df.fillna(0, limit=-5)
522
+
523
+ @pytest.mark.parametrize("type", [int, float])
524
+ def test_fillna_integer_limit(self, type):
525
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4))).astype(type)
526
+
527
+ msg = "Limit must be an integer"
528
+ with pytest.raises(ValueError, match=msg):
529
+ df.fillna(0, limit=0.5)
530
+
531
+ def test_fillna_inplace(self):
532
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
533
+ df.loc[:4, 1] = np.nan
534
+ df.loc[-4:, 3] = np.nan
535
+
536
+ expected = df.fillna(value=0)
537
+ assert expected is not df
538
+
539
+ df.fillna(value=0, inplace=True)
540
+ tm.assert_frame_equal(df, expected)
541
+
542
+ expected = df.fillna(value={0: 0}, inplace=True)
543
+ assert expected is None
544
+
545
+ df.loc[:4, 1] = np.nan
546
+ df.loc[-4:, 3] = np.nan
547
+ msg = "DataFrame.fillna with 'method' is deprecated"
548
+ with tm.assert_produces_warning(FutureWarning, match=msg):
549
+ expected = df.fillna(method="ffill")
550
+ assert expected is not df
551
+
552
+ with tm.assert_produces_warning(FutureWarning, match=msg):
553
+ df.fillna(method="ffill", inplace=True)
554
+ tm.assert_frame_equal(df, expected)
555
+
556
+ def test_fillna_dict_series(self):
557
+ df = DataFrame(
558
+ {
559
+ "a": [np.nan, 1, 2, np.nan, np.nan],
560
+ "b": [1, 2, 3, np.nan, np.nan],
561
+ "c": [np.nan, 1, 2, 3, 4],
562
+ }
563
+ )
564
+
565
+ result = df.fillna({"a": 0, "b": 5})
566
+
567
+ expected = df.copy()
568
+ expected["a"] = expected["a"].fillna(0)
569
+ expected["b"] = expected["b"].fillna(5)
570
+ tm.assert_frame_equal(result, expected)
571
+
572
+ # it works
573
+ result = df.fillna({"a": 0, "b": 5, "d": 7})
574
+
575
+ # Series treated same as dict
576
+ result = df.fillna(df.max())
577
+ expected = df.fillna(df.max().to_dict())
578
+ tm.assert_frame_equal(result, expected)
579
+
580
+ # disable this for now
581
+ with pytest.raises(NotImplementedError, match="column by column"):
582
+ df.fillna(df.max(1), axis=1)
583
+
584
+ def test_fillna_dataframe(self):
585
+ # GH#8377
586
+ df = DataFrame(
587
+ {
588
+ "a": [np.nan, 1, 2, np.nan, np.nan],
589
+ "b": [1, 2, 3, np.nan, np.nan],
590
+ "c": [np.nan, 1, 2, 3, 4],
591
+ },
592
+ index=list("VWXYZ"),
593
+ )
594
+
595
+ # df2 may have different index and columns
596
+ df2 = DataFrame(
597
+ {
598
+ "a": [np.nan, 10, 20, 30, 40],
599
+ "b": [50, 60, 70, 80, 90],
600
+ "foo": ["bar"] * 5,
601
+ },
602
+ index=list("VWXuZ"),
603
+ )
604
+
605
+ result = df.fillna(df2)
606
+
607
+ # only those columns and indices which are shared get filled
608
+ expected = DataFrame(
609
+ {
610
+ "a": [np.nan, 1, 2, np.nan, 40],
611
+ "b": [1, 2, 3, np.nan, 90],
612
+ "c": [np.nan, 1, 2, 3, 4],
613
+ },
614
+ index=list("VWXYZ"),
615
+ )
616
+
617
+ tm.assert_frame_equal(result, expected)
618
+
619
+ def test_fillna_columns(self):
620
+ arr = np.random.default_rng(2).standard_normal((10, 10))
621
+ arr[:, ::2] = np.nan
622
+ df = DataFrame(arr)
623
+
624
+ msg = "DataFrame.fillna with 'method' is deprecated"
625
+ with tm.assert_produces_warning(FutureWarning, match=msg):
626
+ result = df.fillna(method="ffill", axis=1)
627
+ with tm.assert_produces_warning(FutureWarning, match=msg):
628
+ expected = df.T.fillna(method="pad").T
629
+ tm.assert_frame_equal(result, expected)
630
+
631
+ df.insert(6, "foo", 5)
632
+ with tm.assert_produces_warning(FutureWarning, match=msg):
633
+ result = df.fillna(method="ffill", axis=1)
634
+ with tm.assert_produces_warning(FutureWarning, match=msg):
635
+ expected = df.astype(float).fillna(method="ffill", axis=1)
636
+ tm.assert_frame_equal(result, expected)
637
+
638
+ def test_fillna_invalid_method(self, float_frame):
639
+ with pytest.raises(ValueError, match="ffil"):
640
+ float_frame.fillna(method="ffil")
641
+
642
+ def test_fillna_invalid_value(self, float_frame):
643
+ # list
644
+ msg = '"value" parameter must be a scalar or dict, but you passed a "{}"'
645
+ with pytest.raises(TypeError, match=msg.format("list")):
646
+ float_frame.fillna([1, 2])
647
+ # tuple
648
+ with pytest.raises(TypeError, match=msg.format("tuple")):
649
+ float_frame.fillna((1, 2))
650
+ # frame with series
651
+ msg = (
652
+ '"value" parameter must be a scalar, dict or Series, but you '
653
+ 'passed a "DataFrame"'
654
+ )
655
+ with pytest.raises(TypeError, match=msg):
656
+ float_frame.iloc[:, 0].fillna(float_frame)
657
+
658
+ def test_fillna_col_reordering(self):
659
+ cols = ["COL." + str(i) for i in range(5, 0, -1)]
660
+ data = np.random.default_rng(2).random((20, 5))
661
+ df = DataFrame(index=range(20), columns=cols, data=data)
662
+ msg = "DataFrame.fillna with 'method' is deprecated"
663
+ with tm.assert_produces_warning(FutureWarning, match=msg):
664
+ filled = df.fillna(method="ffill")
665
+ assert df.columns.tolist() == filled.columns.tolist()
666
+
667
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 0 in string")
668
+ def test_fill_corner(self, float_frame, float_string_frame):
669
+ mf = float_string_frame
670
+ mf.loc[mf.index[5:20], "foo"] = np.nan
671
+ mf.loc[mf.index[-10:], "A"] = np.nan
672
+
673
+ filled = float_string_frame.fillna(value=0)
674
+ assert (filled.loc[filled.index[5:20], "foo"] == 0).all()
675
+ del float_string_frame["foo"]
676
+
677
+ float_frame.reindex(columns=[]).fillna(value=0)
678
+
679
+ def test_fillna_downcast_dict(self):
680
+ # GH#40809
681
+ df = DataFrame({"col1": [1, np.nan]})
682
+
683
+ msg = "The 'downcast' keyword in fillna"
684
+ with tm.assert_produces_warning(FutureWarning, match=msg):
685
+ result = df.fillna({"col1": 2}, downcast={"col1": "int64"})
686
+ expected = DataFrame({"col1": [1, 2]})
687
+ tm.assert_frame_equal(result, expected)
688
+
689
+ def test_fillna_with_columns_and_limit(self):
690
+ # GH40989
691
+ df = DataFrame(
692
+ [
693
+ [np.nan, 2, np.nan, 0],
694
+ [3, 4, np.nan, 1],
695
+ [np.nan, np.nan, np.nan, 5],
696
+ [np.nan, 3, np.nan, 4],
697
+ ],
698
+ columns=list("ABCD"),
699
+ )
700
+ result = df.fillna(axis=1, value=100, limit=1)
701
+ result2 = df.fillna(axis=1, value=100, limit=2)
702
+
703
+ expected = DataFrame(
704
+ {
705
+ "A": Series([100, 3, 100, 100], dtype="float64"),
706
+ "B": [2, 4, np.nan, 3],
707
+ "C": [np.nan, 100, np.nan, np.nan],
708
+ "D": Series([0, 1, 5, 4], dtype="float64"),
709
+ },
710
+ index=[0, 1, 2, 3],
711
+ )
712
+ expected2 = DataFrame(
713
+ {
714
+ "A": Series([100, 3, 100, 100], dtype="float64"),
715
+ "B": Series([2, 4, 100, 3], dtype="float64"),
716
+ "C": [100, 100, np.nan, 100],
717
+ "D": Series([0, 1, 5, 4], dtype="float64"),
718
+ },
719
+ index=[0, 1, 2, 3],
720
+ )
721
+
722
+ tm.assert_frame_equal(result, expected)
723
+ tm.assert_frame_equal(result2, expected2)
724
+
725
+ def test_fillna_datetime_inplace(self):
726
+ # GH#48863
727
+ df = DataFrame(
728
+ {
729
+ "date1": to_datetime(["2018-05-30", None]),
730
+ "date2": to_datetime(["2018-09-30", None]),
731
+ }
732
+ )
733
+ expected = df.copy()
734
+ df.fillna(np.nan, inplace=True)
735
+ tm.assert_frame_equal(df, expected)
736
+
737
+ def test_fillna_inplace_with_columns_limit_and_value(self):
738
+ # GH40989
739
+ df = DataFrame(
740
+ [
741
+ [np.nan, 2, np.nan, 0],
742
+ [3, 4, np.nan, 1],
743
+ [np.nan, np.nan, np.nan, 5],
744
+ [np.nan, 3, np.nan, 4],
745
+ ],
746
+ columns=list("ABCD"),
747
+ )
748
+
749
+ expected = df.fillna(axis=1, value=100, limit=1)
750
+ assert expected is not df
751
+
752
+ df.fillna(axis=1, value=100, limit=1, inplace=True)
753
+ tm.assert_frame_equal(df, expected)
754
+
755
+ @td.skip_array_manager_invalid_test
756
+ @pytest.mark.parametrize("val", [-1, {"x": -1, "y": -1}])
757
+ def test_inplace_dict_update_view(
758
+ self, val, using_copy_on_write, warn_copy_on_write
759
+ ):
760
+ # GH#47188
761
+ df = DataFrame({"x": [np.nan, 2], "y": [np.nan, 2]})
762
+ df_orig = df.copy()
763
+ result_view = df[:]
764
+ with tm.assert_cow_warning(warn_copy_on_write):
765
+ df.fillna(val, inplace=True)
766
+ expected = DataFrame({"x": [-1, 2.0], "y": [-1.0, 2]})
767
+ tm.assert_frame_equal(df, expected)
768
+ if using_copy_on_write:
769
+ tm.assert_frame_equal(result_view, df_orig)
770
+ else:
771
+ tm.assert_frame_equal(result_view, expected)
772
+
773
+ def test_single_block_df_with_horizontal_axis(self):
774
+ # GH 47713
775
+ df = DataFrame(
776
+ {
777
+ "col1": [5, 0, np.nan, 10, np.nan],
778
+ "col2": [7, np.nan, np.nan, 5, 3],
779
+ "col3": [12, np.nan, 1, 2, 0],
780
+ "col4": [np.nan, 1, 1, np.nan, 18],
781
+ }
782
+ )
783
+ result = df.fillna(50, limit=1, axis=1)
784
+ expected = DataFrame(
785
+ [
786
+ [5.0, 7.0, 12.0, 50.0],
787
+ [0.0, 50.0, np.nan, 1.0],
788
+ [50.0, np.nan, 1.0, 1.0],
789
+ [10.0, 5.0, 2.0, 50.0],
790
+ [50.0, 3.0, 0.0, 18.0],
791
+ ],
792
+ columns=["col1", "col2", "col3", "col4"],
793
+ )
794
+ tm.assert_frame_equal(result, expected)
795
+
796
+ def test_fillna_with_multi_index_frame(self):
797
+ # GH 47649
798
+ pdf = DataFrame(
799
+ {
800
+ ("x", "a"): [np.nan, 2.0, 3.0],
801
+ ("x", "b"): [1.0, 2.0, np.nan],
802
+ ("y", "c"): [1.0, 2.0, np.nan],
803
+ }
804
+ )
805
+ expected = DataFrame(
806
+ {
807
+ ("x", "a"): [-1.0, 2.0, 3.0],
808
+ ("x", "b"): [1.0, 2.0, -1.0],
809
+ ("y", "c"): [1.0, 2.0, np.nan],
810
+ }
811
+ )
812
+ tm.assert_frame_equal(pdf.fillna({"x": -1}), expected)
813
+ tm.assert_frame_equal(pdf.fillna({"x": -1, ("x", "b"): -2}), expected)
814
+
815
+ expected = DataFrame(
816
+ {
817
+ ("x", "a"): [-1.0, 2.0, 3.0],
818
+ ("x", "b"): [1.0, 2.0, -2.0],
819
+ ("y", "c"): [1.0, 2.0, np.nan],
820
+ }
821
+ )
822
+ tm.assert_frame_equal(pdf.fillna({("x", "b"): -2, "x": -1}), expected)
823
+
824
+
825
+ def test_fillna_nonconsolidated_frame():
826
+ # https://github.com/pandas-dev/pandas/issues/36495
827
+ df = DataFrame(
828
+ [
829
+ [1, 1, 1, 1.0],
830
+ [2, 2, 2, 2.0],
831
+ [3, 3, 3, 3.0],
832
+ ],
833
+ columns=["i1", "i2", "i3", "f1"],
834
+ )
835
+ df_nonconsol = df.pivot(index="i1", columns="i2")
836
+ result = df_nonconsol.fillna(0)
837
+ assert result.isna().sum().sum() == 0
838
+
839
+
840
+ def test_fillna_nones_inplace():
841
+ # GH 48480
842
+ df = DataFrame(
843
+ [[None, None], [None, None]],
844
+ columns=["A", "B"],
845
+ )
846
+ msg = "Downcasting object dtype arrays"
847
+ with tm.assert_produces_warning(FutureWarning, match=msg):
848
+ df.fillna(value={"A": 1, "B": 2}, inplace=True)
849
+
850
+ expected = DataFrame([[1, 2], [1, 2]], columns=["A", "B"])
851
+ tm.assert_frame_equal(df, expected)
852
+
853
+
854
+ @pytest.mark.parametrize("func", ["pad", "backfill"])
855
+ def test_pad_backfill_deprecated(func):
856
+ # GH#33396
857
+ df = DataFrame({"a": [1, 2, 3]})
858
+ with tm.assert_produces_warning(FutureWarning):
859
+ getattr(df, func)()
860
+
861
+
862
+ @pytest.mark.parametrize(
863
+ "data, expected_data, method, kwargs",
864
+ (
865
+ (
866
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
867
+ [np.nan, np.nan, 3.0, 3.0, 3.0, 3.0, 7.0, np.nan, np.nan],
868
+ "ffill",
869
+ {"limit_area": "inside"},
870
+ ),
871
+ (
872
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
873
+ [np.nan, np.nan, 3.0, 3.0, np.nan, np.nan, 7.0, np.nan, np.nan],
874
+ "ffill",
875
+ {"limit_area": "inside", "limit": 1},
876
+ ),
877
+ (
878
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
879
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0],
880
+ "ffill",
881
+ {"limit_area": "outside"},
882
+ ),
883
+ (
884
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
885
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan],
886
+ "ffill",
887
+ {"limit_area": "outside", "limit": 1},
888
+ ),
889
+ (
890
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
891
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
892
+ "ffill",
893
+ {"limit_area": "outside", "limit": 1},
894
+ ),
895
+ (
896
+ range(5),
897
+ range(5),
898
+ "ffill",
899
+ {"limit_area": "outside", "limit": 1},
900
+ ),
901
+ (
902
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
903
+ [np.nan, np.nan, 3.0, 7.0, 7.0, 7.0, 7.0, np.nan, np.nan],
904
+ "bfill",
905
+ {"limit_area": "inside"},
906
+ ),
907
+ (
908
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
909
+ [np.nan, np.nan, 3.0, np.nan, np.nan, 7.0, 7.0, np.nan, np.nan],
910
+ "bfill",
911
+ {"limit_area": "inside", "limit": 1},
912
+ ),
913
+ (
914
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
915
+ [3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
916
+ "bfill",
917
+ {"limit_area": "outside"},
918
+ ),
919
+ (
920
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
921
+ [np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
922
+ "bfill",
923
+ {"limit_area": "outside", "limit": 1},
924
+ ),
925
+ ),
926
+ )
927
+ def test_ffill_bfill_limit_area(data, expected_data, method, kwargs):
928
+ # GH#56492
929
+ df = DataFrame(data)
930
+ expected = DataFrame(expected_data)
931
+ result = getattr(df, method)(**kwargs)
932
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_first_valid_index.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Includes test for last_valid_index.
3
+ """
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas import (
8
+ DataFrame,
9
+ Index,
10
+ Series,
11
+ date_range,
12
+ )
13
+
14
+
15
+ class TestFirstValidIndex:
16
+ def test_first_valid_index_single_nan(self, frame_or_series):
17
+ # GH#9752 Series/DataFrame should both return None, not raise
18
+ obj = frame_or_series([np.nan])
19
+
20
+ assert obj.first_valid_index() is None
21
+ assert obj.iloc[:0].first_valid_index() is None
22
+
23
+ @pytest.mark.parametrize(
24
+ "empty", [DataFrame(), Series(dtype=object), Series([], index=[], dtype=object)]
25
+ )
26
+ def test_first_valid_index_empty(self, empty):
27
+ # GH#12800
28
+ assert empty.last_valid_index() is None
29
+ assert empty.first_valid_index() is None
30
+
31
+ @pytest.mark.parametrize(
32
+ "data,idx,expected_first,expected_last",
33
+ [
34
+ ({"A": [1, 2, 3]}, [1, 1, 2], 1, 2),
35
+ ({"A": [1, 2, 3]}, [1, 2, 2], 1, 2),
36
+ ({"A": [1, 2, 3, 4]}, ["d", "d", "d", "d"], "d", "d"),
37
+ ({"A": [1, np.nan, 3]}, [1, 1, 2], 1, 2),
38
+ ({"A": [np.nan, np.nan, 3]}, [1, 1, 2], 2, 2),
39
+ ({"A": [1, np.nan, 3]}, [1, 2, 2], 1, 2),
40
+ ],
41
+ )
42
+ def test_first_last_valid_frame(self, data, idx, expected_first, expected_last):
43
+ # GH#21441
44
+ df = DataFrame(data, index=idx)
45
+ assert expected_first == df.first_valid_index()
46
+ assert expected_last == df.last_valid_index()
47
+
48
+ @pytest.mark.parametrize(
49
+ "index",
50
+ [Index([str(i) for i in range(20)]), date_range("2020-01-01", periods=20)],
51
+ )
52
+ def test_first_last_valid(self, index):
53
+ mat = np.random.default_rng(2).standard_normal(len(index))
54
+ mat[:5] = np.nan
55
+ mat[-5:] = np.nan
56
+
57
+ frame = DataFrame({"foo": mat}, index=index)
58
+ assert frame.first_valid_index() == frame.index[5]
59
+ assert frame.last_valid_index() == frame.index[-6]
60
+
61
+ ser = frame["foo"]
62
+ assert ser.first_valid_index() == frame.index[5]
63
+ assert ser.last_valid_index() == frame.index[-6]
64
+
65
+ @pytest.mark.parametrize(
66
+ "index",
67
+ [Index([str(i) for i in range(10)]), date_range("2020-01-01", periods=10)],
68
+ )
69
+ def test_first_last_valid_all_nan(self, index):
70
+ # GH#17400: no valid entries
71
+ frame = DataFrame(np.nan, columns=["foo"], index=index)
72
+
73
+ assert frame.last_valid_index() is None
74
+ assert frame.first_valid_index() is None
75
+
76
+ ser = frame["foo"]
77
+ assert ser.first_valid_index() is None
78
+ assert ser.last_valid_index() is None
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_map.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ DataFrame,
9
+ Series,
10
+ Timestamp,
11
+ date_range,
12
+ )
13
+ import pandas._testing as tm
14
+
15
+ from pandas.tseries.offsets import BDay
16
+
17
+
18
+ def test_map(float_frame):
19
+ result = float_frame.map(lambda x: x * 2)
20
+ tm.assert_frame_equal(result, float_frame * 2)
21
+ float_frame.map(type)
22
+
23
+ # GH 465: function returning tuples
24
+ result = float_frame.map(lambda x: (x, x))["A"].iloc[0]
25
+ assert isinstance(result, tuple)
26
+
27
+
28
+ @pytest.mark.parametrize("val", [1, 1.0])
29
+ def test_map_float_object_conversion(val):
30
+ # GH 2909: object conversion to float in constructor?
31
+ df = DataFrame(data=[val, "a"])
32
+ result = df.map(lambda x: x).dtypes[0]
33
+ assert result == object
34
+
35
+
36
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
37
+ def test_map_keeps_dtype(na_action):
38
+ # GH52219
39
+ arr = Series(["a", np.nan, "b"])
40
+ sparse_arr = arr.astype(pd.SparseDtype(object))
41
+ df = DataFrame(data={"a": arr, "b": sparse_arr})
42
+
43
+ def func(x):
44
+ return str.upper(x) if not pd.isna(x) else x
45
+
46
+ result = df.map(func, na_action=na_action)
47
+
48
+ expected_sparse = pd.array(["A", np.nan, "B"], dtype=pd.SparseDtype(object))
49
+ expected_arr = expected_sparse.astype(object)
50
+ expected = DataFrame({"a": expected_arr, "b": expected_sparse})
51
+
52
+ tm.assert_frame_equal(result, expected)
53
+
54
+ result_empty = df.iloc[:0, :].map(func, na_action=na_action)
55
+ expected_empty = expected.iloc[:0, :]
56
+ tm.assert_frame_equal(result_empty, expected_empty)
57
+
58
+
59
+ def test_map_str():
60
+ # GH 2786
61
+ df = DataFrame(np.random.default_rng(2).random((3, 4)))
62
+ df2 = df.copy()
63
+ cols = ["a", "a", "a", "a"]
64
+ df.columns = cols
65
+
66
+ expected = df2.map(str)
67
+ expected.columns = cols
68
+ result = df.map(str)
69
+ tm.assert_frame_equal(result, expected)
70
+
71
+
72
+ @pytest.mark.parametrize(
73
+ "col, val",
74
+ [["datetime", Timestamp("20130101")], ["timedelta", pd.Timedelta("1 min")]],
75
+ )
76
+ def test_map_datetimelike(col, val):
77
+ # datetime/timedelta
78
+ df = DataFrame(np.random.default_rng(2).random((3, 4)))
79
+ df[col] = val
80
+ result = df.map(str)
81
+ assert result.loc[0, col] == str(df.loc[0, col])
82
+
83
+
84
+ @pytest.mark.parametrize(
85
+ "expected",
86
+ [
87
+ DataFrame(),
88
+ DataFrame(columns=list("ABC")),
89
+ DataFrame(index=list("ABC")),
90
+ DataFrame({"A": [], "B": [], "C": []}),
91
+ ],
92
+ )
93
+ @pytest.mark.parametrize("func", [round, lambda x: x])
94
+ def test_map_empty(expected, func):
95
+ # GH 8222
96
+ result = expected.map(func)
97
+ tm.assert_frame_equal(result, expected)
98
+
99
+
100
+ def test_map_kwargs():
101
+ # GH 40652
102
+ result = DataFrame([[1, 2], [3, 4]]).map(lambda x, y: x + y, y=2)
103
+ expected = DataFrame([[3, 4], [5, 6]])
104
+ tm.assert_frame_equal(result, expected)
105
+
106
+
107
+ def test_map_na_ignore(float_frame):
108
+ # GH 23803
109
+ strlen_frame = float_frame.map(lambda x: len(str(x)))
110
+ float_frame_with_na = float_frame.copy()
111
+ mask = np.random.default_rng(2).integers(0, 2, size=float_frame.shape, dtype=bool)
112
+ float_frame_with_na[mask] = pd.NA
113
+ strlen_frame_na_ignore = float_frame_with_na.map(
114
+ lambda x: len(str(x)), na_action="ignore"
115
+ )
116
+ # Set float64 type to avoid upcast when setting NA below
117
+ strlen_frame_with_na = strlen_frame.copy().astype("float64")
118
+ strlen_frame_with_na[mask] = pd.NA
119
+ tm.assert_frame_equal(strlen_frame_na_ignore, strlen_frame_with_na)
120
+
121
+
122
+ def test_map_box_timestamps():
123
+ # GH 2689, GH 2627
124
+ ser = Series(date_range("1/1/2000", periods=10))
125
+
126
+ def func(x):
127
+ return (x.hour, x.day, x.month)
128
+
129
+ # it works!
130
+ DataFrame(ser).map(func)
131
+
132
+
133
+ def test_map_box():
134
+ # ufunc will not be boxed. Same test cases as the test_map_box
135
+ df = DataFrame(
136
+ {
137
+ "a": [Timestamp("2011-01-01"), Timestamp("2011-01-02")],
138
+ "b": [
139
+ Timestamp("2011-01-01", tz="US/Eastern"),
140
+ Timestamp("2011-01-02", tz="US/Eastern"),
141
+ ],
142
+ "c": [pd.Timedelta("1 days"), pd.Timedelta("2 days")],
143
+ "d": [
144
+ pd.Period("2011-01-01", freq="M"),
145
+ pd.Period("2011-01-02", freq="M"),
146
+ ],
147
+ }
148
+ )
149
+
150
+ result = df.map(lambda x: type(x).__name__)
151
+ expected = DataFrame(
152
+ {
153
+ "a": ["Timestamp", "Timestamp"],
154
+ "b": ["Timestamp", "Timestamp"],
155
+ "c": ["Timedelta", "Timedelta"],
156
+ "d": ["Period", "Period"],
157
+ }
158
+ )
159
+ tm.assert_frame_equal(result, expected)
160
+
161
+
162
+ def test_frame_map_dont_convert_datetime64():
163
+ df = DataFrame({"x1": [datetime(1996, 1, 1)]})
164
+
165
+ df = df.map(lambda x: x + BDay())
166
+ df = df.map(lambda x: x + BDay())
167
+
168
+ result = df.x1.dtype
169
+ assert result == "M8[ns]"
170
+
171
+
172
+ def test_map_function_runs_once():
173
+ df = DataFrame({"a": [1, 2, 3]})
174
+ values = [] # Save values function is applied to
175
+
176
+ def reducing_function(val):
177
+ values.append(val)
178
+
179
+ def non_reducing_function(val):
180
+ values.append(val)
181
+ return val
182
+
183
+ for func in [reducing_function, non_reducing_function]:
184
+ del values[:]
185
+
186
+ df.map(func)
187
+ assert values == df.a.to_list()
188
+
189
+
190
+ def test_map_type():
191
+ # GH 46719
192
+ df = DataFrame(
193
+ {"col1": [3, "string", float], "col2": [0.25, datetime(2020, 1, 1), np.nan]},
194
+ index=["a", "b", "c"],
195
+ )
196
+
197
+ result = df.map(type)
198
+ expected = DataFrame(
199
+ {"col1": [int, str, type], "col2": [float, datetime, float]},
200
+ index=["a", "b", "c"],
201
+ )
202
+ tm.assert_frame_equal(result, expected)
203
+
204
+
205
+ def test_map_invalid_na_action(float_frame):
206
+ # GH 23803
207
+ with pytest.raises(ValueError, match="na_action must be .*Got 'abc'"):
208
+ float_frame.map(lambda x: len(str(x)), na_action="abc")
209
+
210
+
211
+ def test_applymap_deprecated():
212
+ # GH52353
213
+ df = DataFrame({"a": [1, 2, 3]})
214
+ msg = "DataFrame.applymap has been deprecated. Use DataFrame.map instead."
215
+ with tm.assert_produces_warning(FutureWarning, match=msg):
216
+ df.applymap(lambda x: x)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_quantile.py ADDED
@@ -0,0 +1,972 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ DataFrame,
7
+ Index,
8
+ Series,
9
+ Timestamp,
10
+ )
11
+ import pandas._testing as tm
12
+
13
+
14
+ @pytest.fixture(
15
+ params=[["linear", "single"], ["nearest", "table"]], ids=lambda x: "-".join(x)
16
+ )
17
+ def interp_method(request):
18
+ """(interpolation, method) arguments for quantile"""
19
+ return request.param
20
+
21
+
22
+ class TestDataFrameQuantile:
23
+ @pytest.mark.parametrize(
24
+ "df,expected",
25
+ [
26
+ [
27
+ DataFrame(
28
+ {
29
+ 0: Series(pd.arrays.SparseArray([1, 2])),
30
+ 1: Series(pd.arrays.SparseArray([3, 4])),
31
+ }
32
+ ),
33
+ Series([1.5, 3.5], name=0.5),
34
+ ],
35
+ [
36
+ DataFrame(Series([0.0, None, 1.0, 2.0], dtype="Sparse[float]")),
37
+ Series([1.0], name=0.5),
38
+ ],
39
+ ],
40
+ )
41
+ def test_quantile_sparse(self, df, expected):
42
+ # GH#17198
43
+ # GH#24600
44
+ result = df.quantile()
45
+ expected = expected.astype("Sparse[float]")
46
+ tm.assert_series_equal(result, expected)
47
+
48
+ def test_quantile(
49
+ self, datetime_frame, interp_method, using_array_manager, request
50
+ ):
51
+ interpolation, method = interp_method
52
+ df = datetime_frame
53
+ result = df.quantile(
54
+ 0.1, axis=0, numeric_only=True, interpolation=interpolation, method=method
55
+ )
56
+ expected = Series(
57
+ [np.percentile(df[col], 10) for col in df.columns],
58
+ index=df.columns,
59
+ name=0.1,
60
+ )
61
+ if interpolation == "linear":
62
+ # np.percentile values only comparable to linear interpolation
63
+ tm.assert_series_equal(result, expected)
64
+ else:
65
+ tm.assert_index_equal(result.index, expected.index)
66
+ request.applymarker(
67
+ pytest.mark.xfail(
68
+ using_array_manager, reason="Name set incorrectly for arraymanager"
69
+ )
70
+ )
71
+ assert result.name == expected.name
72
+
73
+ result = df.quantile(
74
+ 0.9, axis=1, numeric_only=True, interpolation=interpolation, method=method
75
+ )
76
+ expected = Series(
77
+ [np.percentile(df.loc[date], 90) for date in df.index],
78
+ index=df.index,
79
+ name=0.9,
80
+ )
81
+ if interpolation == "linear":
82
+ # np.percentile values only comparable to linear interpolation
83
+ tm.assert_series_equal(result, expected)
84
+ else:
85
+ tm.assert_index_equal(result.index, expected.index)
86
+ request.applymarker(
87
+ pytest.mark.xfail(
88
+ using_array_manager, reason="Name set incorrectly for arraymanager"
89
+ )
90
+ )
91
+ assert result.name == expected.name
92
+
93
+ def test_empty(self, interp_method):
94
+ interpolation, method = interp_method
95
+ q = DataFrame({"x": [], "y": []}).quantile(
96
+ 0.1, axis=0, numeric_only=True, interpolation=interpolation, method=method
97
+ )
98
+ assert np.isnan(q["x"]) and np.isnan(q["y"])
99
+
100
+ def test_non_numeric_exclusion(self, interp_method, request, using_array_manager):
101
+ interpolation, method = interp_method
102
+ df = DataFrame({"col1": ["A", "A", "B", "B"], "col2": [1, 2, 3, 4]})
103
+ rs = df.quantile(
104
+ 0.5, numeric_only=True, interpolation=interpolation, method=method
105
+ )
106
+ xp = df.median(numeric_only=True).rename(0.5)
107
+ if interpolation == "nearest":
108
+ xp = (xp + 0.5).astype(np.int64)
109
+ if method == "table" and using_array_manager:
110
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
111
+ tm.assert_series_equal(rs, xp)
112
+
113
+ def test_axis(self, interp_method, request, using_array_manager):
114
+ # axis
115
+ interpolation, method = interp_method
116
+ df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3])
117
+ result = df.quantile(0.5, axis=1, interpolation=interpolation, method=method)
118
+ expected = Series([1.5, 2.5, 3.5], index=[1, 2, 3], name=0.5)
119
+ if interpolation == "nearest":
120
+ expected = expected.astype(np.int64)
121
+ if method == "table" and using_array_manager:
122
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
123
+ tm.assert_series_equal(result, expected)
124
+
125
+ result = df.quantile(
126
+ [0.5, 0.75], axis=1, interpolation=interpolation, method=method
127
+ )
128
+ expected = DataFrame(
129
+ {1: [1.5, 1.75], 2: [2.5, 2.75], 3: [3.5, 3.75]}, index=[0.5, 0.75]
130
+ )
131
+ if interpolation == "nearest":
132
+ expected.iloc[0, :] -= 0.5
133
+ expected.iloc[1, :] += 0.25
134
+ expected = expected.astype(np.int64)
135
+ tm.assert_frame_equal(result, expected, check_index_type=True)
136
+
137
+ def test_axis_numeric_only_true(self, interp_method, request, using_array_manager):
138
+ # We may want to break API in the future to change this
139
+ # so that we exclude non-numeric along the same axis
140
+ # See GH #7312
141
+ interpolation, method = interp_method
142
+ df = DataFrame([[1, 2, 3], ["a", "b", 4]])
143
+ result = df.quantile(
144
+ 0.5, axis=1, numeric_only=True, interpolation=interpolation, method=method
145
+ )
146
+ expected = Series([3.0, 4.0], index=[0, 1], name=0.5)
147
+ if interpolation == "nearest":
148
+ expected = expected.astype(np.int64)
149
+ if method == "table" and using_array_manager:
150
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
151
+ tm.assert_series_equal(result, expected)
152
+
153
+ def test_quantile_date_range(self, interp_method, request, using_array_manager):
154
+ # GH 2460
155
+ interpolation, method = interp_method
156
+ dti = pd.date_range("2016-01-01", periods=3, tz="US/Pacific")
157
+ ser = Series(dti)
158
+ df = DataFrame(ser)
159
+
160
+ result = df.quantile(
161
+ numeric_only=False, interpolation=interpolation, method=method
162
+ )
163
+ expected = Series(
164
+ ["2016-01-02 00:00:00"], name=0.5, dtype="datetime64[ns, US/Pacific]"
165
+ )
166
+ if method == "table" and using_array_manager:
167
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
168
+
169
+ tm.assert_series_equal(result, expected)
170
+
171
+ def test_quantile_axis_mixed(self, interp_method, request, using_array_manager):
172
+ # mixed on axis=1
173
+ interpolation, method = interp_method
174
+ df = DataFrame(
175
+ {
176
+ "A": [1, 2, 3],
177
+ "B": [2.0, 3.0, 4.0],
178
+ "C": pd.date_range("20130101", periods=3),
179
+ "D": ["foo", "bar", "baz"],
180
+ }
181
+ )
182
+ result = df.quantile(
183
+ 0.5, axis=1, numeric_only=True, interpolation=interpolation, method=method
184
+ )
185
+ expected = Series([1.5, 2.5, 3.5], name=0.5)
186
+ if interpolation == "nearest":
187
+ expected -= 0.5
188
+ if method == "table" and using_array_manager:
189
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
190
+ tm.assert_series_equal(result, expected)
191
+
192
+ # must raise
193
+ msg = "'<' not supported between instances of 'Timestamp' and 'float'"
194
+ with pytest.raises(TypeError, match=msg):
195
+ df.quantile(0.5, axis=1, numeric_only=False)
196
+
197
+ def test_quantile_axis_parameter(self, interp_method, request, using_array_manager):
198
+ # GH 9543/9544
199
+ interpolation, method = interp_method
200
+ if method == "table" and using_array_manager:
201
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
202
+ df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3])
203
+
204
+ result = df.quantile(0.5, axis=0, interpolation=interpolation, method=method)
205
+
206
+ expected = Series([2.0, 3.0], index=["A", "B"], name=0.5)
207
+ if interpolation == "nearest":
208
+ expected = expected.astype(np.int64)
209
+ tm.assert_series_equal(result, expected)
210
+
211
+ expected = df.quantile(
212
+ 0.5, axis="index", interpolation=interpolation, method=method
213
+ )
214
+ if interpolation == "nearest":
215
+ expected = expected.astype(np.int64)
216
+ tm.assert_series_equal(result, expected)
217
+
218
+ result = df.quantile(0.5, axis=1, interpolation=interpolation, method=method)
219
+
220
+ expected = Series([1.5, 2.5, 3.5], index=[1, 2, 3], name=0.5)
221
+ if interpolation == "nearest":
222
+ expected = expected.astype(np.int64)
223
+ tm.assert_series_equal(result, expected)
224
+
225
+ result = df.quantile(
226
+ 0.5, axis="columns", interpolation=interpolation, method=method
227
+ )
228
+ tm.assert_series_equal(result, expected)
229
+
230
+ msg = "No axis named -1 for object type DataFrame"
231
+ with pytest.raises(ValueError, match=msg):
232
+ df.quantile(0.1, axis=-1, interpolation=interpolation, method=method)
233
+ msg = "No axis named column for object type DataFrame"
234
+ with pytest.raises(ValueError, match=msg):
235
+ df.quantile(0.1, axis="column")
236
+
237
+ def test_quantile_interpolation(self):
238
+ # see gh-10174
239
+
240
+ # interpolation method other than default linear
241
+ df = DataFrame({"A": [1, 2, 3], "B": [2, 3, 4]}, index=[1, 2, 3])
242
+ result = df.quantile(0.5, axis=1, interpolation="nearest")
243
+ expected = Series([1, 2, 3], index=[1, 2, 3], name=0.5)
244
+ tm.assert_series_equal(result, expected)
245
+
246
+ # cross-check interpolation=nearest results in original dtype
247
+ exp = np.percentile(
248
+ np.array([[1, 2, 3], [2, 3, 4]]),
249
+ 0.5,
250
+ axis=0,
251
+ method="nearest",
252
+ )
253
+ expected = Series(exp, index=[1, 2, 3], name=0.5, dtype="int64")
254
+ tm.assert_series_equal(result, expected)
255
+
256
+ # float
257
+ df = DataFrame({"A": [1.0, 2.0, 3.0], "B": [2.0, 3.0, 4.0]}, index=[1, 2, 3])
258
+ result = df.quantile(0.5, axis=1, interpolation="nearest")
259
+ expected = Series([1.0, 2.0, 3.0], index=[1, 2, 3], name=0.5)
260
+ tm.assert_series_equal(result, expected)
261
+ exp = np.percentile(
262
+ np.array([[1.0, 2.0, 3.0], [2.0, 3.0, 4.0]]),
263
+ 0.5,
264
+ axis=0,
265
+ method="nearest",
266
+ )
267
+ expected = Series(exp, index=[1, 2, 3], name=0.5, dtype="float64")
268
+ tm.assert_series_equal(result, expected)
269
+
270
+ # axis
271
+ result = df.quantile([0.5, 0.75], axis=1, interpolation="lower")
272
+ expected = DataFrame(
273
+ {1: [1.0, 1.0], 2: [2.0, 2.0], 3: [3.0, 3.0]}, index=[0.5, 0.75]
274
+ )
275
+ tm.assert_frame_equal(result, expected)
276
+
277
+ # test degenerate case
278
+ df = DataFrame({"x": [], "y": []})
279
+ q = df.quantile(0.1, axis=0, interpolation="higher")
280
+ assert np.isnan(q["x"]) and np.isnan(q["y"])
281
+
282
+ # multi
283
+ df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]], columns=["a", "b", "c"])
284
+ result = df.quantile([0.25, 0.5], interpolation="midpoint")
285
+
286
+ # https://github.com/numpy/numpy/issues/7163
287
+ expected = DataFrame(
288
+ [[1.5, 1.5, 1.5], [2.0, 2.0, 2.0]],
289
+ index=[0.25, 0.5],
290
+ columns=["a", "b", "c"],
291
+ )
292
+ tm.assert_frame_equal(result, expected)
293
+
294
+ def test_quantile_interpolation_datetime(self, datetime_frame):
295
+ # see gh-10174
296
+
297
+ # interpolation = linear (default case)
298
+ df = datetime_frame
299
+ q = df.quantile(0.1, axis=0, numeric_only=True, interpolation="linear")
300
+ assert q["A"] == np.percentile(df["A"], 10)
301
+
302
+ def test_quantile_interpolation_int(self, int_frame):
303
+ # see gh-10174
304
+
305
+ df = int_frame
306
+ # interpolation = linear (default case)
307
+ q = df.quantile(0.1)
308
+ assert q["A"] == np.percentile(df["A"], 10)
309
+
310
+ # test with and without interpolation keyword
311
+ q1 = df.quantile(0.1, axis=0, interpolation="linear")
312
+ assert q1["A"] == np.percentile(df["A"], 10)
313
+ tm.assert_series_equal(q, q1)
314
+
315
+ def test_quantile_multi(self, interp_method, request, using_array_manager):
316
+ interpolation, method = interp_method
317
+ df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]], columns=["a", "b", "c"])
318
+ result = df.quantile([0.25, 0.5], interpolation=interpolation, method=method)
319
+ expected = DataFrame(
320
+ [[1.5, 1.5, 1.5], [2.0, 2.0, 2.0]],
321
+ index=[0.25, 0.5],
322
+ columns=["a", "b", "c"],
323
+ )
324
+ if interpolation == "nearest":
325
+ expected = expected.astype(np.int64)
326
+ if method == "table" and using_array_manager:
327
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
328
+ tm.assert_frame_equal(result, expected)
329
+
330
+ def test_quantile_multi_axis_1(self, interp_method, request, using_array_manager):
331
+ interpolation, method = interp_method
332
+ df = DataFrame([[1, 1, 1], [2, 2, 2], [3, 3, 3]], columns=["a", "b", "c"])
333
+ result = df.quantile(
334
+ [0.25, 0.5], axis=1, interpolation=interpolation, method=method
335
+ )
336
+ expected = DataFrame(
337
+ [[1.0, 2.0, 3.0]] * 2, index=[0.25, 0.5], columns=[0, 1, 2]
338
+ )
339
+ if interpolation == "nearest":
340
+ expected = expected.astype(np.int64)
341
+ if method == "table" and using_array_manager:
342
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
343
+ tm.assert_frame_equal(result, expected)
344
+
345
+ def test_quantile_multi_empty(self, interp_method):
346
+ interpolation, method = interp_method
347
+ result = DataFrame({"x": [], "y": []}).quantile(
348
+ [0.1, 0.9], axis=0, interpolation=interpolation, method=method
349
+ )
350
+ expected = DataFrame(
351
+ {"x": [np.nan, np.nan], "y": [np.nan, np.nan]}, index=[0.1, 0.9]
352
+ )
353
+ tm.assert_frame_equal(result, expected)
354
+
355
+ def test_quantile_datetime(self, unit):
356
+ dti = pd.to_datetime(["2010", "2011"]).as_unit(unit)
357
+ df = DataFrame({"a": dti, "b": [0, 5]})
358
+
359
+ # exclude datetime
360
+ result = df.quantile(0.5, numeric_only=True)
361
+ expected = Series([2.5], index=["b"], name=0.5)
362
+ tm.assert_series_equal(result, expected)
363
+
364
+ # datetime
365
+ result = df.quantile(0.5, numeric_only=False)
366
+ expected = Series(
367
+ [Timestamp("2010-07-02 12:00:00"), 2.5], index=["a", "b"], name=0.5
368
+ )
369
+ tm.assert_series_equal(result, expected)
370
+
371
+ # datetime w/ multi
372
+ result = df.quantile([0.5], numeric_only=False)
373
+ expected = DataFrame(
374
+ {"a": Timestamp("2010-07-02 12:00:00").as_unit(unit), "b": 2.5},
375
+ index=[0.5],
376
+ )
377
+ tm.assert_frame_equal(result, expected)
378
+
379
+ # axis = 1
380
+ df["c"] = pd.to_datetime(["2011", "2012"]).as_unit(unit)
381
+ result = df[["a", "c"]].quantile(0.5, axis=1, numeric_only=False)
382
+ expected = Series(
383
+ [Timestamp("2010-07-02 12:00:00"), Timestamp("2011-07-02 12:00:00")],
384
+ index=[0, 1],
385
+ name=0.5,
386
+ dtype=f"M8[{unit}]",
387
+ )
388
+ tm.assert_series_equal(result, expected)
389
+
390
+ result = df[["a", "c"]].quantile([0.5], axis=1, numeric_only=False)
391
+ expected = DataFrame(
392
+ [[Timestamp("2010-07-02 12:00:00"), Timestamp("2011-07-02 12:00:00")]],
393
+ index=[0.5],
394
+ columns=[0, 1],
395
+ dtype=f"M8[{unit}]",
396
+ )
397
+ tm.assert_frame_equal(result, expected)
398
+
399
+ # empty when numeric_only=True
400
+ result = df[["a", "c"]].quantile(0.5, numeric_only=True)
401
+ expected = Series([], index=[], dtype=np.float64, name=0.5)
402
+ tm.assert_series_equal(result, expected)
403
+
404
+ result = df[["a", "c"]].quantile([0.5], numeric_only=True)
405
+ expected = DataFrame(index=[0.5], columns=[])
406
+ tm.assert_frame_equal(result, expected)
407
+
408
+ @pytest.mark.parametrize(
409
+ "dtype",
410
+ [
411
+ "datetime64[ns]",
412
+ "datetime64[ns, US/Pacific]",
413
+ "timedelta64[ns]",
414
+ "Period[D]",
415
+ ],
416
+ )
417
+ def test_quantile_dt64_empty(self, dtype, interp_method):
418
+ # GH#41544
419
+ interpolation, method = interp_method
420
+ df = DataFrame(columns=["a", "b"], dtype=dtype)
421
+
422
+ res = df.quantile(
423
+ 0.5, axis=1, numeric_only=False, interpolation=interpolation, method=method
424
+ )
425
+ expected = Series([], index=[], name=0.5, dtype=dtype)
426
+ tm.assert_series_equal(res, expected)
427
+
428
+ # no columns in result, so no dtype preservation
429
+ res = df.quantile(
430
+ [0.5],
431
+ axis=1,
432
+ numeric_only=False,
433
+ interpolation=interpolation,
434
+ method=method,
435
+ )
436
+ expected = DataFrame(index=[0.5], columns=[])
437
+ tm.assert_frame_equal(res, expected)
438
+
439
+ @pytest.mark.parametrize("invalid", [-1, 2, [0.5, -1], [0.5, 2]])
440
+ def test_quantile_invalid(self, invalid, datetime_frame, interp_method):
441
+ msg = "percentiles should all be in the interval \\[0, 1\\]"
442
+ interpolation, method = interp_method
443
+ with pytest.raises(ValueError, match=msg):
444
+ datetime_frame.quantile(invalid, interpolation=interpolation, method=method)
445
+
446
+ def test_quantile_box(self, interp_method, request, using_array_manager):
447
+ interpolation, method = interp_method
448
+ if method == "table" and using_array_manager:
449
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
450
+ df = DataFrame(
451
+ {
452
+ "A": [
453
+ Timestamp("2011-01-01"),
454
+ Timestamp("2011-01-02"),
455
+ Timestamp("2011-01-03"),
456
+ ],
457
+ "B": [
458
+ Timestamp("2011-01-01", tz="US/Eastern"),
459
+ Timestamp("2011-01-02", tz="US/Eastern"),
460
+ Timestamp("2011-01-03", tz="US/Eastern"),
461
+ ],
462
+ "C": [
463
+ pd.Timedelta("1 days"),
464
+ pd.Timedelta("2 days"),
465
+ pd.Timedelta("3 days"),
466
+ ],
467
+ }
468
+ )
469
+
470
+ res = df.quantile(
471
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
472
+ )
473
+
474
+ exp = Series(
475
+ [
476
+ Timestamp("2011-01-02"),
477
+ Timestamp("2011-01-02", tz="US/Eastern"),
478
+ pd.Timedelta("2 days"),
479
+ ],
480
+ name=0.5,
481
+ index=["A", "B", "C"],
482
+ )
483
+ tm.assert_series_equal(res, exp)
484
+
485
+ res = df.quantile(
486
+ [0.5], numeric_only=False, interpolation=interpolation, method=method
487
+ )
488
+ exp = DataFrame(
489
+ [
490
+ [
491
+ Timestamp("2011-01-02"),
492
+ Timestamp("2011-01-02", tz="US/Eastern"),
493
+ pd.Timedelta("2 days"),
494
+ ]
495
+ ],
496
+ index=[0.5],
497
+ columns=["A", "B", "C"],
498
+ )
499
+ tm.assert_frame_equal(res, exp)
500
+
501
+ def test_quantile_box_nat(self):
502
+ # DatetimeLikeBlock may be consolidated and contain NaT in different loc
503
+ df = DataFrame(
504
+ {
505
+ "A": [
506
+ Timestamp("2011-01-01"),
507
+ pd.NaT,
508
+ Timestamp("2011-01-02"),
509
+ Timestamp("2011-01-03"),
510
+ ],
511
+ "a": [
512
+ Timestamp("2011-01-01"),
513
+ Timestamp("2011-01-02"),
514
+ pd.NaT,
515
+ Timestamp("2011-01-03"),
516
+ ],
517
+ "B": [
518
+ Timestamp("2011-01-01", tz="US/Eastern"),
519
+ pd.NaT,
520
+ Timestamp("2011-01-02", tz="US/Eastern"),
521
+ Timestamp("2011-01-03", tz="US/Eastern"),
522
+ ],
523
+ "b": [
524
+ Timestamp("2011-01-01", tz="US/Eastern"),
525
+ Timestamp("2011-01-02", tz="US/Eastern"),
526
+ pd.NaT,
527
+ Timestamp("2011-01-03", tz="US/Eastern"),
528
+ ],
529
+ "C": [
530
+ pd.Timedelta("1 days"),
531
+ pd.Timedelta("2 days"),
532
+ pd.Timedelta("3 days"),
533
+ pd.NaT,
534
+ ],
535
+ "c": [
536
+ pd.NaT,
537
+ pd.Timedelta("1 days"),
538
+ pd.Timedelta("2 days"),
539
+ pd.Timedelta("3 days"),
540
+ ],
541
+ },
542
+ columns=list("AaBbCc"),
543
+ )
544
+
545
+ res = df.quantile(0.5, numeric_only=False)
546
+ exp = Series(
547
+ [
548
+ Timestamp("2011-01-02"),
549
+ Timestamp("2011-01-02"),
550
+ Timestamp("2011-01-02", tz="US/Eastern"),
551
+ Timestamp("2011-01-02", tz="US/Eastern"),
552
+ pd.Timedelta("2 days"),
553
+ pd.Timedelta("2 days"),
554
+ ],
555
+ name=0.5,
556
+ index=list("AaBbCc"),
557
+ )
558
+ tm.assert_series_equal(res, exp)
559
+
560
+ res = df.quantile([0.5], numeric_only=False)
561
+ exp = DataFrame(
562
+ [
563
+ [
564
+ Timestamp("2011-01-02"),
565
+ Timestamp("2011-01-02"),
566
+ Timestamp("2011-01-02", tz="US/Eastern"),
567
+ Timestamp("2011-01-02", tz="US/Eastern"),
568
+ pd.Timedelta("2 days"),
569
+ pd.Timedelta("2 days"),
570
+ ]
571
+ ],
572
+ index=[0.5],
573
+ columns=list("AaBbCc"),
574
+ )
575
+ tm.assert_frame_equal(res, exp)
576
+
577
+ def test_quantile_nan(self, interp_method, request, using_array_manager):
578
+ interpolation, method = interp_method
579
+ if method == "table" and using_array_manager:
580
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
581
+ # GH 14357 - float block where some cols have missing values
582
+ df = DataFrame({"a": np.arange(1, 6.0), "b": np.arange(1, 6.0)})
583
+ df.iloc[-1, 1] = np.nan
584
+
585
+ res = df.quantile(0.5, interpolation=interpolation, method=method)
586
+ exp = Series(
587
+ [3.0, 2.5 if interpolation == "linear" else 3.0], index=["a", "b"], name=0.5
588
+ )
589
+ tm.assert_series_equal(res, exp)
590
+
591
+ res = df.quantile([0.5, 0.75], interpolation=interpolation, method=method)
592
+ exp = DataFrame(
593
+ {
594
+ "a": [3.0, 4.0],
595
+ "b": [2.5, 3.25] if interpolation == "linear" else [3.0, 4.0],
596
+ },
597
+ index=[0.5, 0.75],
598
+ )
599
+ tm.assert_frame_equal(res, exp)
600
+
601
+ res = df.quantile(0.5, axis=1, interpolation=interpolation, method=method)
602
+ exp = Series(np.arange(1.0, 6.0), name=0.5)
603
+ tm.assert_series_equal(res, exp)
604
+
605
+ res = df.quantile(
606
+ [0.5, 0.75], axis=1, interpolation=interpolation, method=method
607
+ )
608
+ exp = DataFrame([np.arange(1.0, 6.0)] * 2, index=[0.5, 0.75])
609
+ if interpolation == "nearest":
610
+ exp.iloc[1, -1] = np.nan
611
+ tm.assert_frame_equal(res, exp)
612
+
613
+ # full-nan column
614
+ df["b"] = np.nan
615
+
616
+ res = df.quantile(0.5, interpolation=interpolation, method=method)
617
+ exp = Series([3.0, np.nan], index=["a", "b"], name=0.5)
618
+ tm.assert_series_equal(res, exp)
619
+
620
+ res = df.quantile([0.5, 0.75], interpolation=interpolation, method=method)
621
+ exp = DataFrame({"a": [3.0, 4.0], "b": [np.nan, np.nan]}, index=[0.5, 0.75])
622
+ tm.assert_frame_equal(res, exp)
623
+
624
+ def test_quantile_nat(self, interp_method, request, using_array_manager, unit):
625
+ interpolation, method = interp_method
626
+ if method == "table" and using_array_manager:
627
+ request.applymarker(pytest.mark.xfail(reason="Axis name incorrectly set."))
628
+ # full NaT column
629
+ df = DataFrame({"a": [pd.NaT, pd.NaT, pd.NaT]}, dtype=f"M8[{unit}]")
630
+
631
+ res = df.quantile(
632
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
633
+ )
634
+ exp = Series([pd.NaT], index=["a"], name=0.5, dtype=f"M8[{unit}]")
635
+ tm.assert_series_equal(res, exp)
636
+
637
+ res = df.quantile(
638
+ [0.5], numeric_only=False, interpolation=interpolation, method=method
639
+ )
640
+ exp = DataFrame({"a": [pd.NaT]}, index=[0.5], dtype=f"M8[{unit}]")
641
+ tm.assert_frame_equal(res, exp)
642
+
643
+ # mixed non-null / full null column
644
+ df = DataFrame(
645
+ {
646
+ "a": [
647
+ Timestamp("2012-01-01"),
648
+ Timestamp("2012-01-02"),
649
+ Timestamp("2012-01-03"),
650
+ ],
651
+ "b": [pd.NaT, pd.NaT, pd.NaT],
652
+ },
653
+ dtype=f"M8[{unit}]",
654
+ )
655
+
656
+ res = df.quantile(
657
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
658
+ )
659
+ exp = Series(
660
+ [Timestamp("2012-01-02"), pd.NaT],
661
+ index=["a", "b"],
662
+ name=0.5,
663
+ dtype=f"M8[{unit}]",
664
+ )
665
+ tm.assert_series_equal(res, exp)
666
+
667
+ res = df.quantile(
668
+ [0.5], numeric_only=False, interpolation=interpolation, method=method
669
+ )
670
+ exp = DataFrame(
671
+ [[Timestamp("2012-01-02"), pd.NaT]],
672
+ index=[0.5],
673
+ columns=["a", "b"],
674
+ dtype=f"M8[{unit}]",
675
+ )
676
+ tm.assert_frame_equal(res, exp)
677
+
678
+ def test_quantile_empty_no_rows_floats(self, interp_method):
679
+ interpolation, method = interp_method
680
+
681
+ df = DataFrame(columns=["a", "b"], dtype="float64")
682
+
683
+ res = df.quantile(0.5, interpolation=interpolation, method=method)
684
+ exp = Series([np.nan, np.nan], index=["a", "b"], name=0.5)
685
+ tm.assert_series_equal(res, exp)
686
+
687
+ res = df.quantile([0.5], interpolation=interpolation, method=method)
688
+ exp = DataFrame([[np.nan, np.nan]], columns=["a", "b"], index=[0.5])
689
+ tm.assert_frame_equal(res, exp)
690
+
691
+ res = df.quantile(0.5, axis=1, interpolation=interpolation, method=method)
692
+ exp = Series([], index=[], dtype="float64", name=0.5)
693
+ tm.assert_series_equal(res, exp)
694
+
695
+ res = df.quantile([0.5], axis=1, interpolation=interpolation, method=method)
696
+ exp = DataFrame(columns=[], index=[0.5])
697
+ tm.assert_frame_equal(res, exp)
698
+
699
+ def test_quantile_empty_no_rows_ints(self, interp_method):
700
+ interpolation, method = interp_method
701
+ df = DataFrame(columns=["a", "b"], dtype="int64")
702
+
703
+ res = df.quantile(0.5, interpolation=interpolation, method=method)
704
+ exp = Series([np.nan, np.nan], index=["a", "b"], name=0.5)
705
+ tm.assert_series_equal(res, exp)
706
+
707
+ def test_quantile_empty_no_rows_dt64(self, interp_method):
708
+ interpolation, method = interp_method
709
+ # datetimes
710
+ df = DataFrame(columns=["a", "b"], dtype="datetime64[ns]")
711
+
712
+ res = df.quantile(
713
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
714
+ )
715
+ exp = Series(
716
+ [pd.NaT, pd.NaT], index=["a", "b"], dtype="datetime64[ns]", name=0.5
717
+ )
718
+ tm.assert_series_equal(res, exp)
719
+
720
+ # Mixed dt64/dt64tz
721
+ df["a"] = df["a"].dt.tz_localize("US/Central")
722
+ res = df.quantile(
723
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
724
+ )
725
+ exp = exp.astype(object)
726
+ if interpolation == "nearest":
727
+ # GH#18463 TODO: would we prefer NaTs here?
728
+ msg = "The 'downcast' keyword in fillna is deprecated"
729
+ with tm.assert_produces_warning(FutureWarning, match=msg):
730
+ exp = exp.fillna(np.nan, downcast=False)
731
+ tm.assert_series_equal(res, exp)
732
+
733
+ # both dt64tz
734
+ df["b"] = df["b"].dt.tz_localize("US/Central")
735
+ res = df.quantile(
736
+ 0.5, numeric_only=False, interpolation=interpolation, method=method
737
+ )
738
+ exp = exp.astype(df["b"].dtype)
739
+ tm.assert_series_equal(res, exp)
740
+
741
+ def test_quantile_empty_no_columns(self, interp_method):
742
+ # GH#23925 _get_numeric_data may drop all columns
743
+ interpolation, method = interp_method
744
+ df = DataFrame(pd.date_range("1/1/18", periods=5))
745
+ df.columns.name = "captain tightpants"
746
+ result = df.quantile(
747
+ 0.5, numeric_only=True, interpolation=interpolation, method=method
748
+ )
749
+ expected = Series([], index=[], name=0.5, dtype=np.float64)
750
+ expected.index.name = "captain tightpants"
751
+ tm.assert_series_equal(result, expected)
752
+
753
+ result = df.quantile(
754
+ [0.5], numeric_only=True, interpolation=interpolation, method=method
755
+ )
756
+ expected = DataFrame([], index=[0.5], columns=[])
757
+ expected.columns.name = "captain tightpants"
758
+ tm.assert_frame_equal(result, expected)
759
+
760
+ def test_quantile_item_cache(
761
+ self, using_array_manager, interp_method, using_copy_on_write
762
+ ):
763
+ # previous behavior incorrect retained an invalid _item_cache entry
764
+ interpolation, method = interp_method
765
+ df = DataFrame(
766
+ np.random.default_rng(2).standard_normal((4, 3)), columns=["A", "B", "C"]
767
+ )
768
+ df["D"] = df["A"] * 2
769
+ ser = df["A"]
770
+ if not using_array_manager:
771
+ assert len(df._mgr.blocks) == 2
772
+
773
+ df.quantile(numeric_only=False, interpolation=interpolation, method=method)
774
+
775
+ if using_copy_on_write:
776
+ ser.iloc[0] = 99
777
+ assert df.iloc[0, 0] == df["A"][0]
778
+ assert df.iloc[0, 0] != 99
779
+ else:
780
+ ser.values[0] = 99
781
+ assert df.iloc[0, 0] == df["A"][0]
782
+ assert df.iloc[0, 0] == 99
783
+
784
+ def test_invalid_method(self):
785
+ with pytest.raises(ValueError, match="Invalid method: foo"):
786
+ DataFrame(range(1)).quantile(0.5, method="foo")
787
+
788
+ def test_table_invalid_interpolation(self):
789
+ with pytest.raises(ValueError, match="Invalid interpolation: foo"):
790
+ DataFrame(range(1)).quantile(0.5, method="table", interpolation="foo")
791
+
792
+
793
+ class TestQuantileExtensionDtype:
794
+ # TODO: tests for axis=1?
795
+ # TODO: empty case?
796
+
797
+ @pytest.fixture(
798
+ params=[
799
+ pytest.param(
800
+ pd.IntervalIndex.from_breaks(range(10)),
801
+ marks=pytest.mark.xfail(reason="raises when trying to add Intervals"),
802
+ ),
803
+ pd.period_range("2016-01-01", periods=9, freq="D"),
804
+ pd.date_range("2016-01-01", periods=9, tz="US/Pacific"),
805
+ pd.timedelta_range("1 Day", periods=9),
806
+ pd.array(np.arange(9), dtype="Int64"),
807
+ pd.array(np.arange(9), dtype="Float64"),
808
+ ],
809
+ ids=lambda x: str(x.dtype),
810
+ )
811
+ def index(self, request):
812
+ # NB: not actually an Index object
813
+ idx = request.param
814
+ idx.name = "A"
815
+ return idx
816
+
817
+ @pytest.fixture
818
+ def obj(self, index, frame_or_series):
819
+ # bc index is not always an Index (yet), we need to re-patch .name
820
+ obj = frame_or_series(index).copy()
821
+
822
+ if frame_or_series is Series:
823
+ obj.name = "A"
824
+ else:
825
+ obj.columns = ["A"]
826
+ return obj
827
+
828
+ def compute_quantile(self, obj, qs):
829
+ if isinstance(obj, Series):
830
+ result = obj.quantile(qs)
831
+ else:
832
+ result = obj.quantile(qs, numeric_only=False)
833
+ return result
834
+
835
+ def test_quantile_ea(self, request, obj, index):
836
+ # result should be invariant to shuffling
837
+ indexer = np.arange(len(index), dtype=np.intp)
838
+ np.random.default_rng(2).shuffle(indexer)
839
+ obj = obj.iloc[indexer]
840
+
841
+ qs = [0.5, 0, 1]
842
+ result = self.compute_quantile(obj, qs)
843
+
844
+ exp_dtype = index.dtype
845
+ if index.dtype == "Int64":
846
+ # match non-nullable casting behavior
847
+ exp_dtype = "Float64"
848
+
849
+ # expected here assumes len(index) == 9
850
+ expected = Series(
851
+ [index[4], index[0], index[-1]], dtype=exp_dtype, index=qs, name="A"
852
+ )
853
+ expected = type(obj)(expected)
854
+
855
+ tm.assert_equal(result, expected)
856
+
857
+ def test_quantile_ea_with_na(self, obj, index):
858
+ obj.iloc[0] = index._na_value
859
+ obj.iloc[-1] = index._na_value
860
+
861
+ # result should be invariant to shuffling
862
+ indexer = np.arange(len(index), dtype=np.intp)
863
+ np.random.default_rng(2).shuffle(indexer)
864
+ obj = obj.iloc[indexer]
865
+
866
+ qs = [0.5, 0, 1]
867
+ result = self.compute_quantile(obj, qs)
868
+
869
+ # expected here assumes len(index) == 9
870
+ expected = Series(
871
+ [index[4], index[1], index[-2]], dtype=index.dtype, index=qs, name="A"
872
+ )
873
+ expected = type(obj)(expected)
874
+ tm.assert_equal(result, expected)
875
+
876
+ def test_quantile_ea_all_na(self, request, obj, index):
877
+ obj.iloc[:] = index._na_value
878
+ # Check dtypes were preserved; this was once a problem see GH#39763
879
+ assert np.all(obj.dtypes == index.dtype)
880
+
881
+ # result should be invariant to shuffling
882
+ indexer = np.arange(len(index), dtype=np.intp)
883
+ np.random.default_rng(2).shuffle(indexer)
884
+ obj = obj.iloc[indexer]
885
+
886
+ qs = [0.5, 0, 1]
887
+ result = self.compute_quantile(obj, qs)
888
+
889
+ expected = index.take([-1, -1, -1], allow_fill=True, fill_value=index._na_value)
890
+ expected = Series(expected, index=qs, name="A")
891
+ expected = type(obj)(expected)
892
+ tm.assert_equal(result, expected)
893
+
894
+ def test_quantile_ea_scalar(self, request, obj, index):
895
+ # scalar qs
896
+
897
+ # result should be invariant to shuffling
898
+ indexer = np.arange(len(index), dtype=np.intp)
899
+ np.random.default_rng(2).shuffle(indexer)
900
+ obj = obj.iloc[indexer]
901
+
902
+ qs = 0.5
903
+ result = self.compute_quantile(obj, qs)
904
+
905
+ exp_dtype = index.dtype
906
+ if index.dtype == "Int64":
907
+ exp_dtype = "Float64"
908
+
909
+ expected = Series({"A": index[4]}, dtype=exp_dtype, name=0.5)
910
+ if isinstance(obj, Series):
911
+ expected = expected["A"]
912
+ assert result == expected
913
+ else:
914
+ tm.assert_series_equal(result, expected)
915
+
916
+ @pytest.mark.parametrize(
917
+ "dtype, expected_data, expected_index, axis",
918
+ [
919
+ ["float64", [], [], 1],
920
+ ["int64", [], [], 1],
921
+ ["float64", [np.nan, np.nan], ["a", "b"], 0],
922
+ ["int64", [np.nan, np.nan], ["a", "b"], 0],
923
+ ],
924
+ )
925
+ def test_empty_numeric(self, dtype, expected_data, expected_index, axis):
926
+ # GH 14564
927
+ df = DataFrame(columns=["a", "b"], dtype=dtype)
928
+ result = df.quantile(0.5, axis=axis)
929
+ expected = Series(
930
+ expected_data, name=0.5, index=Index(expected_index), dtype="float64"
931
+ )
932
+ tm.assert_series_equal(result, expected)
933
+
934
+ @pytest.mark.parametrize(
935
+ "dtype, expected_data, expected_index, axis, expected_dtype",
936
+ [
937
+ ["datetime64[ns]", [], [], 1, "datetime64[ns]"],
938
+ ["datetime64[ns]", [pd.NaT, pd.NaT], ["a", "b"], 0, "datetime64[ns]"],
939
+ ],
940
+ )
941
+ def test_empty_datelike(
942
+ self, dtype, expected_data, expected_index, axis, expected_dtype
943
+ ):
944
+ # GH 14564
945
+ df = DataFrame(columns=["a", "b"], dtype=dtype)
946
+ result = df.quantile(0.5, axis=axis, numeric_only=False)
947
+ expected = Series(
948
+ expected_data, name=0.5, index=Index(expected_index), dtype=expected_dtype
949
+ )
950
+ tm.assert_series_equal(result, expected)
951
+
952
+ @pytest.mark.parametrize(
953
+ "expected_data, expected_index, axis",
954
+ [
955
+ [[np.nan, np.nan], range(2), 1],
956
+ [[], [], 0],
957
+ ],
958
+ )
959
+ def test_datelike_numeric_only(self, expected_data, expected_index, axis):
960
+ # GH 14564
961
+ df = DataFrame(
962
+ {
963
+ "a": pd.to_datetime(["2010", "2011"]),
964
+ "b": [0, 5],
965
+ "c": pd.to_datetime(["2011", "2012"]),
966
+ }
967
+ )
968
+ result = df[["a", "c"]].quantile(0.5, axis=axis, numeric_only=True)
969
+ expected = Series(
970
+ expected_data, name=0.5, index=Index(expected_index), dtype=np.float64
971
+ )
972
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_to_records.py ADDED
@@ -0,0 +1,523 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import abc
2
+ import email
3
+ from email.parser import Parser
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas import (
9
+ CategoricalDtype,
10
+ DataFrame,
11
+ MultiIndex,
12
+ Series,
13
+ Timestamp,
14
+ date_range,
15
+ )
16
+ import pandas._testing as tm
17
+
18
+
19
+ class TestDataFrameToRecords:
20
+ def test_to_records_timeseries(self):
21
+ index = date_range("1/1/2000", periods=10)
22
+ df = DataFrame(
23
+ np.random.default_rng(2).standard_normal((10, 3)),
24
+ index=index,
25
+ columns=["a", "b", "c"],
26
+ )
27
+
28
+ result = df.to_records()
29
+ assert result["index"].dtype == "M8[ns]"
30
+
31
+ result = df.to_records(index=False)
32
+
33
+ def test_to_records_dt64(self):
34
+ df = DataFrame(
35
+ [["one", "two", "three"], ["four", "five", "six"]],
36
+ index=date_range("2012-01-01", "2012-01-02"),
37
+ )
38
+
39
+ expected = df.index.values[0]
40
+ result = df.to_records()["index"][0]
41
+ assert expected == result
42
+
43
+ def test_to_records_dt64tz_column(self):
44
+ # GH#32535 dont less tz in to_records
45
+ df = DataFrame({"A": date_range("2012-01-01", "2012-01-02", tz="US/Eastern")})
46
+
47
+ result = df.to_records()
48
+
49
+ assert result.dtype["A"] == object
50
+ val = result[0][1]
51
+ assert isinstance(val, Timestamp)
52
+ assert val == df.loc[0, "A"]
53
+
54
+ def test_to_records_with_multindex(self):
55
+ # GH#3189
56
+ index = [
57
+ ["bar", "bar", "baz", "baz", "foo", "foo", "qux", "qux"],
58
+ ["one", "two", "one", "two", "one", "two", "one", "two"],
59
+ ]
60
+ data = np.zeros((8, 4))
61
+ df = DataFrame(data, index=index)
62
+ r = df.to_records(index=True)["level_0"]
63
+ assert "bar" in r
64
+ assert "one" not in r
65
+
66
+ def test_to_records_with_Mapping_type(self):
67
+ abc.Mapping.register(email.message.Message)
68
+
69
+ headers = Parser().parsestr(
70
+ "From: <[email protected]>\n"
71
+ "To: <[email protected]>\n"
72
+ "Subject: Test message\n"
73
+ "\n"
74
+ "Body would go here\n"
75
+ )
76
+
77
+ frame = DataFrame.from_records([headers])
78
+ all(x in frame for x in ["Type", "Subject", "From"])
79
+
80
+ def test_to_records_floats(self):
81
+ df = DataFrame(np.random.default_rng(2).random((10, 10)))
82
+ df.to_records()
83
+
84
+ def test_to_records_index_name(self):
85
+ df = DataFrame(np.random.default_rng(2).standard_normal((3, 3)))
86
+ df.index.name = "X"
87
+ rs = df.to_records()
88
+ assert "X" in rs.dtype.fields
89
+
90
+ df = DataFrame(np.random.default_rng(2).standard_normal((3, 3)))
91
+ rs = df.to_records()
92
+ assert "index" in rs.dtype.fields
93
+
94
+ df.index = MultiIndex.from_tuples([("a", "x"), ("a", "y"), ("b", "z")])
95
+ df.index.names = ["A", None]
96
+ result = df.to_records()
97
+ expected = np.rec.fromarrays(
98
+ [np.array(["a", "a", "b"]), np.array(["x", "y", "z"])]
99
+ + [np.asarray(df.iloc[:, i]) for i in range(3)],
100
+ dtype={
101
+ "names": ["A", "level_1", "0", "1", "2"],
102
+ "formats": [
103
+ "O",
104
+ "O",
105
+ f"{tm.ENDIAN}f8",
106
+ f"{tm.ENDIAN}f8",
107
+ f"{tm.ENDIAN}f8",
108
+ ],
109
+ },
110
+ )
111
+ tm.assert_numpy_array_equal(result, expected)
112
+
113
+ def test_to_records_with_unicode_index(self):
114
+ # GH#13172
115
+ # unicode_literals conflict with to_records
116
+ result = DataFrame([{"a": "x", "b": "y"}]).set_index("a").to_records()
117
+ expected = np.rec.array([("x", "y")], dtype=[("a", "O"), ("b", "O")])
118
+ tm.assert_almost_equal(result, expected)
119
+
120
+ def test_to_records_index_dtype(self):
121
+ # GH 47263: consistent data types for Index and MultiIndex
122
+ df = DataFrame(
123
+ {
124
+ 1: date_range("2022-01-01", periods=2),
125
+ 2: date_range("2022-01-01", periods=2),
126
+ 3: date_range("2022-01-01", periods=2),
127
+ }
128
+ )
129
+
130
+ expected = np.rec.array(
131
+ [
132
+ ("2022-01-01", "2022-01-01", "2022-01-01"),
133
+ ("2022-01-02", "2022-01-02", "2022-01-02"),
134
+ ],
135
+ dtype=[
136
+ ("1", f"{tm.ENDIAN}M8[ns]"),
137
+ ("2", f"{tm.ENDIAN}M8[ns]"),
138
+ ("3", f"{tm.ENDIAN}M8[ns]"),
139
+ ],
140
+ )
141
+
142
+ result = df.to_records(index=False)
143
+ tm.assert_almost_equal(result, expected)
144
+
145
+ result = df.set_index(1).to_records(index=True)
146
+ tm.assert_almost_equal(result, expected)
147
+
148
+ result = df.set_index([1, 2]).to_records(index=True)
149
+ tm.assert_almost_equal(result, expected)
150
+
151
+ def test_to_records_with_unicode_column_names(self):
152
+ # xref issue: https://github.com/numpy/numpy/issues/2407
153
+ # Issue GH#11879. to_records used to raise an exception when used
154
+ # with column names containing non-ascii characters in Python 2
155
+ result = DataFrame(data={"accented_name_é": [1.0]}).to_records()
156
+
157
+ # Note that numpy allows for unicode field names but dtypes need
158
+ # to be specified using dictionary instead of list of tuples.
159
+ expected = np.rec.array(
160
+ [(0, 1.0)],
161
+ dtype={"names": ["index", "accented_name_é"], "formats": ["=i8", "=f8"]},
162
+ )
163
+ tm.assert_almost_equal(result, expected)
164
+
165
+ def test_to_records_with_categorical(self):
166
+ # GH#8626
167
+
168
+ # dict creation
169
+ df = DataFrame({"A": list("abc")}, dtype="category")
170
+ expected = Series(list("abc"), dtype="category", name="A")
171
+ tm.assert_series_equal(df["A"], expected)
172
+
173
+ # list-like creation
174
+ df = DataFrame(list("abc"), dtype="category")
175
+ expected = Series(list("abc"), dtype="category", name=0)
176
+ tm.assert_series_equal(df[0], expected)
177
+
178
+ # to record array
179
+ # this coerces
180
+ result = df.to_records()
181
+ expected = np.rec.array(
182
+ [(0, "a"), (1, "b"), (2, "c")], dtype=[("index", "=i8"), ("0", "O")]
183
+ )
184
+ tm.assert_almost_equal(result, expected)
185
+
186
+ @pytest.mark.parametrize(
187
+ "kwargs,expected",
188
+ [
189
+ # No dtypes --> default to array dtypes.
190
+ (
191
+ {},
192
+ np.rec.array(
193
+ [(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
194
+ dtype=[
195
+ ("index", f"{tm.ENDIAN}i8"),
196
+ ("A", f"{tm.ENDIAN}i8"),
197
+ ("B", f"{tm.ENDIAN}f8"),
198
+ ("C", "O"),
199
+ ],
200
+ ),
201
+ ),
202
+ # Should have no effect in this case.
203
+ (
204
+ {"index": True},
205
+ np.rec.array(
206
+ [(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
207
+ dtype=[
208
+ ("index", f"{tm.ENDIAN}i8"),
209
+ ("A", f"{tm.ENDIAN}i8"),
210
+ ("B", f"{tm.ENDIAN}f8"),
211
+ ("C", "O"),
212
+ ],
213
+ ),
214
+ ),
215
+ # Column dtype applied across the board. Index unaffected.
216
+ (
217
+ {"column_dtypes": f"{tm.ENDIAN}U4"},
218
+ np.rec.array(
219
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
220
+ dtype=[
221
+ ("index", f"{tm.ENDIAN}i8"),
222
+ ("A", f"{tm.ENDIAN}U4"),
223
+ ("B", f"{tm.ENDIAN}U4"),
224
+ ("C", f"{tm.ENDIAN}U4"),
225
+ ],
226
+ ),
227
+ ),
228
+ # Index dtype applied across the board. Columns unaffected.
229
+ (
230
+ {"index_dtypes": f"{tm.ENDIAN}U1"},
231
+ np.rec.array(
232
+ [("0", 1, 0.2, "a"), ("1", 2, 1.5, "bc")],
233
+ dtype=[
234
+ ("index", f"{tm.ENDIAN}U1"),
235
+ ("A", f"{tm.ENDIAN}i8"),
236
+ ("B", f"{tm.ENDIAN}f8"),
237
+ ("C", "O"),
238
+ ],
239
+ ),
240
+ ),
241
+ # Pass in a type instance.
242
+ (
243
+ {"column_dtypes": str},
244
+ np.rec.array(
245
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
246
+ dtype=[
247
+ ("index", f"{tm.ENDIAN}i8"),
248
+ ("A", f"{tm.ENDIAN}U"),
249
+ ("B", f"{tm.ENDIAN}U"),
250
+ ("C", f"{tm.ENDIAN}U"),
251
+ ],
252
+ ),
253
+ ),
254
+ # Pass in a dtype instance.
255
+ (
256
+ {"column_dtypes": np.dtype(np.str_)},
257
+ np.rec.array(
258
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
259
+ dtype=[
260
+ ("index", f"{tm.ENDIAN}i8"),
261
+ ("A", f"{tm.ENDIAN}U"),
262
+ ("B", f"{tm.ENDIAN}U"),
263
+ ("C", f"{tm.ENDIAN}U"),
264
+ ],
265
+ ),
266
+ ),
267
+ # Pass in a dictionary (name-only).
268
+ (
269
+ {
270
+ "column_dtypes": {
271
+ "A": np.int8,
272
+ "B": np.float32,
273
+ "C": f"{tm.ENDIAN}U2",
274
+ }
275
+ },
276
+ np.rec.array(
277
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
278
+ dtype=[
279
+ ("index", f"{tm.ENDIAN}i8"),
280
+ ("A", "i1"),
281
+ ("B", f"{tm.ENDIAN}f4"),
282
+ ("C", f"{tm.ENDIAN}U2"),
283
+ ],
284
+ ),
285
+ ),
286
+ # Pass in a dictionary (indices-only).
287
+ (
288
+ {"index_dtypes": {0: "int16"}},
289
+ np.rec.array(
290
+ [(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
291
+ dtype=[
292
+ ("index", "i2"),
293
+ ("A", f"{tm.ENDIAN}i8"),
294
+ ("B", f"{tm.ENDIAN}f8"),
295
+ ("C", "O"),
296
+ ],
297
+ ),
298
+ ),
299
+ # Ignore index mappings if index is not True.
300
+ (
301
+ {"index": False, "index_dtypes": f"{tm.ENDIAN}U2"},
302
+ np.rec.array(
303
+ [(1, 0.2, "a"), (2, 1.5, "bc")],
304
+ dtype=[
305
+ ("A", f"{tm.ENDIAN}i8"),
306
+ ("B", f"{tm.ENDIAN}f8"),
307
+ ("C", "O"),
308
+ ],
309
+ ),
310
+ ),
311
+ # Non-existent names / indices in mapping should not error.
312
+ (
313
+ {"index_dtypes": {0: "int16", "not-there": "float32"}},
314
+ np.rec.array(
315
+ [(0, 1, 0.2, "a"), (1, 2, 1.5, "bc")],
316
+ dtype=[
317
+ ("index", "i2"),
318
+ ("A", f"{tm.ENDIAN}i8"),
319
+ ("B", f"{tm.ENDIAN}f8"),
320
+ ("C", "O"),
321
+ ],
322
+ ),
323
+ ),
324
+ # Names / indices not in mapping default to array dtype.
325
+ (
326
+ {"column_dtypes": {"A": np.int8, "B": np.float32}},
327
+ np.rec.array(
328
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
329
+ dtype=[
330
+ ("index", f"{tm.ENDIAN}i8"),
331
+ ("A", "i1"),
332
+ ("B", f"{tm.ENDIAN}f4"),
333
+ ("C", "O"),
334
+ ],
335
+ ),
336
+ ),
337
+ # Names / indices not in dtype mapping default to array dtype.
338
+ (
339
+ {"column_dtypes": {"A": np.dtype("int8"), "B": np.dtype("float32")}},
340
+ np.rec.array(
341
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
342
+ dtype=[
343
+ ("index", f"{tm.ENDIAN}i8"),
344
+ ("A", "i1"),
345
+ ("B", f"{tm.ENDIAN}f4"),
346
+ ("C", "O"),
347
+ ],
348
+ ),
349
+ ),
350
+ # Mixture of everything.
351
+ (
352
+ {
353
+ "column_dtypes": {"A": np.int8, "B": np.float32},
354
+ "index_dtypes": f"{tm.ENDIAN}U2",
355
+ },
356
+ np.rec.array(
357
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
358
+ dtype=[
359
+ ("index", f"{tm.ENDIAN}U2"),
360
+ ("A", "i1"),
361
+ ("B", f"{tm.ENDIAN}f4"),
362
+ ("C", "O"),
363
+ ],
364
+ ),
365
+ ),
366
+ # Invalid dype values.
367
+ (
368
+ {"index": False, "column_dtypes": []},
369
+ (ValueError, "Invalid dtype \\[\\] specified for column A"),
370
+ ),
371
+ (
372
+ {"index": False, "column_dtypes": {"A": "int32", "B": 5}},
373
+ (ValueError, "Invalid dtype 5 specified for column B"),
374
+ ),
375
+ # Numpy can't handle EA types, so check error is raised
376
+ (
377
+ {
378
+ "index": False,
379
+ "column_dtypes": {"A": "int32", "B": CategoricalDtype(["a", "b"])},
380
+ },
381
+ (ValueError, "Invalid dtype category specified for column B"),
382
+ ),
383
+ # Check that bad types raise
384
+ (
385
+ {"index": False, "column_dtypes": {"A": "int32", "B": "foo"}},
386
+ (TypeError, "data type [\"']foo[\"'] not understood"),
387
+ ),
388
+ ],
389
+ )
390
+ def test_to_records_dtype(self, kwargs, expected):
391
+ # see GH#18146
392
+ df = DataFrame({"A": [1, 2], "B": [0.2, 1.5], "C": ["a", "bc"]})
393
+
394
+ if not isinstance(expected, np.rec.recarray):
395
+ with pytest.raises(expected[0], match=expected[1]):
396
+ df.to_records(**kwargs)
397
+ else:
398
+ result = df.to_records(**kwargs)
399
+ tm.assert_almost_equal(result, expected)
400
+
401
+ @pytest.mark.parametrize(
402
+ "df,kwargs,expected",
403
+ [
404
+ # MultiIndex in the index.
405
+ (
406
+ DataFrame(
407
+ [[1, 2, 3], [4, 5, 6], [7, 8, 9]], columns=list("abc")
408
+ ).set_index(["a", "b"]),
409
+ {"column_dtypes": "float64", "index_dtypes": {0: "int32", 1: "int8"}},
410
+ np.rec.array(
411
+ [(1, 2, 3.0), (4, 5, 6.0), (7, 8, 9.0)],
412
+ dtype=[
413
+ ("a", f"{tm.ENDIAN}i4"),
414
+ ("b", "i1"),
415
+ ("c", f"{tm.ENDIAN}f8"),
416
+ ],
417
+ ),
418
+ ),
419
+ # MultiIndex in the columns.
420
+ (
421
+ DataFrame(
422
+ [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
423
+ columns=MultiIndex.from_tuples(
424
+ [("a", "d"), ("b", "e"), ("c", "f")]
425
+ ),
426
+ ),
427
+ {
428
+ "column_dtypes": {0: f"{tm.ENDIAN}U1", 2: "float32"},
429
+ "index_dtypes": "float32",
430
+ },
431
+ np.rec.array(
432
+ [(0.0, "1", 2, 3.0), (1.0, "4", 5, 6.0), (2.0, "7", 8, 9.0)],
433
+ dtype=[
434
+ ("index", f"{tm.ENDIAN}f4"),
435
+ ("('a', 'd')", f"{tm.ENDIAN}U1"),
436
+ ("('b', 'e')", f"{tm.ENDIAN}i8"),
437
+ ("('c', 'f')", f"{tm.ENDIAN}f4"),
438
+ ],
439
+ ),
440
+ ),
441
+ # MultiIndex in both the columns and index.
442
+ (
443
+ DataFrame(
444
+ [[1, 2, 3], [4, 5, 6], [7, 8, 9]],
445
+ columns=MultiIndex.from_tuples(
446
+ [("a", "d"), ("b", "e"), ("c", "f")], names=list("ab")
447
+ ),
448
+ index=MultiIndex.from_tuples(
449
+ [("d", -4), ("d", -5), ("f", -6)], names=list("cd")
450
+ ),
451
+ ),
452
+ {
453
+ "column_dtypes": "float64",
454
+ "index_dtypes": {0: f"{tm.ENDIAN}U2", 1: "int8"},
455
+ },
456
+ np.rec.array(
457
+ [
458
+ ("d", -4, 1.0, 2.0, 3.0),
459
+ ("d", -5, 4.0, 5.0, 6.0),
460
+ ("f", -6, 7, 8, 9.0),
461
+ ],
462
+ dtype=[
463
+ ("c", f"{tm.ENDIAN}U2"),
464
+ ("d", "i1"),
465
+ ("('a', 'd')", f"{tm.ENDIAN}f8"),
466
+ ("('b', 'e')", f"{tm.ENDIAN}f8"),
467
+ ("('c', 'f')", f"{tm.ENDIAN}f8"),
468
+ ],
469
+ ),
470
+ ),
471
+ ],
472
+ )
473
+ def test_to_records_dtype_mi(self, df, kwargs, expected):
474
+ # see GH#18146
475
+ result = df.to_records(**kwargs)
476
+ tm.assert_almost_equal(result, expected)
477
+
478
+ def test_to_records_dict_like(self):
479
+ # see GH#18146
480
+ class DictLike:
481
+ def __init__(self, **kwargs) -> None:
482
+ self.d = kwargs.copy()
483
+
484
+ def __getitem__(self, key):
485
+ return self.d.__getitem__(key)
486
+
487
+ def __contains__(self, key) -> bool:
488
+ return key in self.d
489
+
490
+ def keys(self):
491
+ return self.d.keys()
492
+
493
+ df = DataFrame({"A": [1, 2], "B": [0.2, 1.5], "C": ["a", "bc"]})
494
+
495
+ dtype_mappings = {
496
+ "column_dtypes": DictLike(A=np.int8, B=np.float32),
497
+ "index_dtypes": f"{tm.ENDIAN}U2",
498
+ }
499
+
500
+ result = df.to_records(**dtype_mappings)
501
+ expected = np.rec.array(
502
+ [("0", "1", "0.2", "a"), ("1", "2", "1.5", "bc")],
503
+ dtype=[
504
+ ("index", f"{tm.ENDIAN}U2"),
505
+ ("A", "i1"),
506
+ ("B", f"{tm.ENDIAN}f4"),
507
+ ("C", "O"),
508
+ ],
509
+ )
510
+ tm.assert_almost_equal(result, expected)
511
+
512
+ @pytest.mark.parametrize("tz", ["UTC", "GMT", "US/Eastern"])
513
+ def test_to_records_datetimeindex_with_tz(self, tz):
514
+ # GH#13937
515
+ dr = date_range("2016-01-01", periods=10, freq="s", tz=tz)
516
+
517
+ df = DataFrame({"datetime": dr}, index=dr)
518
+
519
+ expected = df.to_records()
520
+ result = df.tz_convert("UTC").to_records()
521
+
522
+ # both converted to UTC, so they are equal
523
+ tm.assert_numpy_array_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/methods/test_tz_localize.py ADDED
@@ -0,0 +1,68 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timezone
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas import (
7
+ DataFrame,
8
+ Series,
9
+ date_range,
10
+ )
11
+ import pandas._testing as tm
12
+
13
+
14
+ class TestTZLocalize:
15
+ # See also:
16
+ # test_tz_convert_and_localize in test_tz_convert
17
+
18
+ def test_tz_localize(self, frame_or_series):
19
+ rng = date_range("1/1/2011", periods=100, freq="h")
20
+
21
+ obj = DataFrame({"a": 1}, index=rng)
22
+ obj = tm.get_obj(obj, frame_or_series)
23
+
24
+ result = obj.tz_localize("utc")
25
+ expected = DataFrame({"a": 1}, rng.tz_localize("UTC"))
26
+ expected = tm.get_obj(expected, frame_or_series)
27
+
28
+ assert result.index.tz is timezone.utc
29
+ tm.assert_equal(result, expected)
30
+
31
+ def test_tz_localize_axis1(self):
32
+ rng = date_range("1/1/2011", periods=100, freq="h")
33
+
34
+ df = DataFrame({"a": 1}, index=rng)
35
+
36
+ df = df.T
37
+ result = df.tz_localize("utc", axis=1)
38
+ assert result.columns.tz is timezone.utc
39
+
40
+ expected = DataFrame({"a": 1}, rng.tz_localize("UTC"))
41
+
42
+ tm.assert_frame_equal(result, expected.T)
43
+
44
+ def test_tz_localize_naive(self, frame_or_series):
45
+ # Can't localize if already tz-aware
46
+ rng = date_range("1/1/2011", periods=100, freq="h", tz="utc")
47
+ ts = Series(1, index=rng)
48
+ ts = frame_or_series(ts)
49
+
50
+ with pytest.raises(TypeError, match="Already tz-aware"):
51
+ ts.tz_localize("US/Eastern")
52
+
53
+ @pytest.mark.parametrize("copy", [True, False])
54
+ def test_tz_localize_copy_inplace_mutate(self, copy, frame_or_series):
55
+ # GH#6326
56
+ obj = frame_or_series(
57
+ np.arange(0, 5), index=date_range("20131027", periods=5, freq="1h", tz=None)
58
+ )
59
+ orig = obj.copy()
60
+ result = obj.tz_localize("UTC", copy=copy)
61
+ expected = frame_or_series(
62
+ np.arange(0, 5),
63
+ index=date_range("20131027", periods=5, freq="1h", tz="UTC"),
64
+ )
65
+ tm.assert_equal(result, expected)
66
+ tm.assert_equal(obj, orig)
67
+ assert result.index is not obj.index
68
+ assert result is not obj
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_alter_axes.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import pytz
4
+
5
+ from pandas import DataFrame
6
+ import pandas._testing as tm
7
+
8
+
9
+ class TestDataFrameAlterAxes:
10
+ # Tests for setting index/columns attributes directly (i.e. __setattr__)
11
+
12
+ def test_set_axis_setattr_index(self):
13
+ # GH 6785
14
+ # set the index manually
15
+
16
+ df = DataFrame([{"ts": datetime(2014, 4, 1, tzinfo=pytz.utc), "foo": 1}])
17
+ expected = df.set_index("ts")
18
+ df.index = df["ts"]
19
+ df.pop("ts")
20
+ tm.assert_frame_equal(df, expected)
21
+
22
+ # Renaming
23
+
24
+ def test_assign_columns(self, float_frame):
25
+ float_frame["hi"] = "there"
26
+
27
+ df = float_frame.copy()
28
+ df.columns = ["foo", "bar", "baz", "quux", "foo2"]
29
+ tm.assert_series_equal(float_frame["C"], df["baz"], check_names=False)
30
+ tm.assert_series_equal(float_frame["hi"], df["foo2"], check_names=False)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_arithmetic.py ADDED
@@ -0,0 +1,2136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from collections import deque
2
+ from datetime import (
3
+ datetime,
4
+ timezone,
5
+ )
6
+ from enum import Enum
7
+ import functools
8
+ import operator
9
+ import re
10
+
11
+ import numpy as np
12
+ import pytest
13
+
14
+ from pandas._config import using_pyarrow_string_dtype
15
+
16
+ import pandas.util._test_decorators as td
17
+
18
+ import pandas as pd
19
+ from pandas import (
20
+ DataFrame,
21
+ Index,
22
+ MultiIndex,
23
+ Series,
24
+ )
25
+ import pandas._testing as tm
26
+ from pandas.core.computation import expressions as expr
27
+ from pandas.tests.frame.common import (
28
+ _check_mixed_float,
29
+ _check_mixed_int,
30
+ )
31
+
32
+
33
+ @pytest.fixture
34
+ def simple_frame():
35
+ """
36
+ Fixture for simple 3x3 DataFrame
37
+
38
+ Columns are ['one', 'two', 'three'], index is ['a', 'b', 'c'].
39
+
40
+ one two three
41
+ a 1.0 2.0 3.0
42
+ b 4.0 5.0 6.0
43
+ c 7.0 8.0 9.0
44
+ """
45
+ arr = np.array([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0], [7.0, 8.0, 9.0]])
46
+
47
+ return DataFrame(arr, columns=["one", "two", "three"], index=["a", "b", "c"])
48
+
49
+
50
+ @pytest.fixture(autouse=True, params=[0, 100], ids=["numexpr", "python"])
51
+ def switch_numexpr_min_elements(request, monkeypatch):
52
+ with monkeypatch.context() as m:
53
+ m.setattr(expr, "_MIN_ELEMENTS", request.param)
54
+ yield request.param
55
+
56
+
57
+ class DummyElement:
58
+ def __init__(self, value, dtype) -> None:
59
+ self.value = value
60
+ self.dtype = np.dtype(dtype)
61
+
62
+ def __array__(self, dtype=None, copy=None):
63
+ return np.array(self.value, dtype=self.dtype)
64
+
65
+ def __str__(self) -> str:
66
+ return f"DummyElement({self.value}, {self.dtype})"
67
+
68
+ def __repr__(self) -> str:
69
+ return str(self)
70
+
71
+ def astype(self, dtype, copy=False):
72
+ self.dtype = dtype
73
+ return self
74
+
75
+ def view(self, dtype):
76
+ return type(self)(self.value.view(dtype), dtype)
77
+
78
+ def any(self, axis=None):
79
+ return bool(self.value)
80
+
81
+
82
+ # -------------------------------------------------------------------
83
+ # Comparisons
84
+
85
+
86
+ class TestFrameComparisons:
87
+ # Specifically _not_ flex-comparisons
88
+
89
+ def test_comparison_with_categorical_dtype(self):
90
+ # GH#12564
91
+
92
+ df = DataFrame({"A": ["foo", "bar", "baz"]})
93
+ exp = DataFrame({"A": [True, False, False]})
94
+
95
+ res = df == "foo"
96
+ tm.assert_frame_equal(res, exp)
97
+
98
+ # casting to categorical shouldn't affect the result
99
+ df["A"] = df["A"].astype("category")
100
+
101
+ res = df == "foo"
102
+ tm.assert_frame_equal(res, exp)
103
+
104
+ def test_frame_in_list(self):
105
+ # GH#12689 this should raise at the DataFrame level, not blocks
106
+ df = DataFrame(
107
+ np.random.default_rng(2).standard_normal((6, 4)), columns=list("ABCD")
108
+ )
109
+ msg = "The truth value of a DataFrame is ambiguous"
110
+ with pytest.raises(ValueError, match=msg):
111
+ df in [None]
112
+
113
+ @pytest.mark.parametrize(
114
+ "arg, arg2",
115
+ [
116
+ [
117
+ {
118
+ "a": np.random.default_rng(2).integers(10, size=10),
119
+ "b": pd.date_range("20010101", periods=10),
120
+ },
121
+ {
122
+ "a": np.random.default_rng(2).integers(10, size=10),
123
+ "b": np.random.default_rng(2).integers(10, size=10),
124
+ },
125
+ ],
126
+ [
127
+ {
128
+ "a": np.random.default_rng(2).integers(10, size=10),
129
+ "b": np.random.default_rng(2).integers(10, size=10),
130
+ },
131
+ {
132
+ "a": np.random.default_rng(2).integers(10, size=10),
133
+ "b": pd.date_range("20010101", periods=10),
134
+ },
135
+ ],
136
+ [
137
+ {
138
+ "a": pd.date_range("20010101", periods=10),
139
+ "b": pd.date_range("20010101", periods=10),
140
+ },
141
+ {
142
+ "a": np.random.default_rng(2).integers(10, size=10),
143
+ "b": np.random.default_rng(2).integers(10, size=10),
144
+ },
145
+ ],
146
+ [
147
+ {
148
+ "a": np.random.default_rng(2).integers(10, size=10),
149
+ "b": pd.date_range("20010101", periods=10),
150
+ },
151
+ {
152
+ "a": pd.date_range("20010101", periods=10),
153
+ "b": pd.date_range("20010101", periods=10),
154
+ },
155
+ ],
156
+ ],
157
+ )
158
+ def test_comparison_invalid(self, arg, arg2):
159
+ # GH4968
160
+ # invalid date/int comparisons
161
+ x = DataFrame(arg)
162
+ y = DataFrame(arg2)
163
+ # we expect the result to match Series comparisons for
164
+ # == and !=, inequalities should raise
165
+ result = x == y
166
+ expected = DataFrame(
167
+ {col: x[col] == y[col] for col in x.columns},
168
+ index=x.index,
169
+ columns=x.columns,
170
+ )
171
+ tm.assert_frame_equal(result, expected)
172
+
173
+ result = x != y
174
+ expected = DataFrame(
175
+ {col: x[col] != y[col] for col in x.columns},
176
+ index=x.index,
177
+ columns=x.columns,
178
+ )
179
+ tm.assert_frame_equal(result, expected)
180
+
181
+ msgs = [
182
+ r"Invalid comparison between dtype=datetime64\[ns\] and ndarray",
183
+ "invalid type promotion",
184
+ (
185
+ # npdev 1.20.0
186
+ r"The DTypes <class 'numpy.dtype\[.*\]'> and "
187
+ r"<class 'numpy.dtype\[.*\]'> do not have a common DType."
188
+ ),
189
+ ]
190
+ msg = "|".join(msgs)
191
+ with pytest.raises(TypeError, match=msg):
192
+ x >= y
193
+ with pytest.raises(TypeError, match=msg):
194
+ x > y
195
+ with pytest.raises(TypeError, match=msg):
196
+ x < y
197
+ with pytest.raises(TypeError, match=msg):
198
+ x <= y
199
+
200
+ @pytest.mark.parametrize(
201
+ "left, right",
202
+ [
203
+ ("gt", "lt"),
204
+ ("lt", "gt"),
205
+ ("ge", "le"),
206
+ ("le", "ge"),
207
+ ("eq", "eq"),
208
+ ("ne", "ne"),
209
+ ],
210
+ )
211
+ def test_timestamp_compare(self, left, right):
212
+ # make sure we can compare Timestamps on the right AND left hand side
213
+ # GH#4982
214
+ df = DataFrame(
215
+ {
216
+ "dates1": pd.date_range("20010101", periods=10),
217
+ "dates2": pd.date_range("20010102", periods=10),
218
+ "intcol": np.random.default_rng(2).integers(1000000000, size=10),
219
+ "floatcol": np.random.default_rng(2).standard_normal(10),
220
+ "stringcol": [chr(100 + i) for i in range(10)],
221
+ }
222
+ )
223
+ df.loc[np.random.default_rng(2).random(len(df)) > 0.5, "dates2"] = pd.NaT
224
+ left_f = getattr(operator, left)
225
+ right_f = getattr(operator, right)
226
+
227
+ # no nats
228
+ if left in ["eq", "ne"]:
229
+ expected = left_f(df, pd.Timestamp("20010109"))
230
+ result = right_f(pd.Timestamp("20010109"), df)
231
+ tm.assert_frame_equal(result, expected)
232
+ else:
233
+ msg = (
234
+ "'(<|>)=?' not supported between "
235
+ "instances of 'numpy.ndarray' and 'Timestamp'"
236
+ )
237
+ with pytest.raises(TypeError, match=msg):
238
+ left_f(df, pd.Timestamp("20010109"))
239
+ with pytest.raises(TypeError, match=msg):
240
+ right_f(pd.Timestamp("20010109"), df)
241
+ # nats
242
+ if left in ["eq", "ne"]:
243
+ expected = left_f(df, pd.Timestamp("nat"))
244
+ result = right_f(pd.Timestamp("nat"), df)
245
+ tm.assert_frame_equal(result, expected)
246
+ else:
247
+ msg = (
248
+ "'(<|>)=?' not supported between "
249
+ "instances of 'numpy.ndarray' and 'NaTType'"
250
+ )
251
+ with pytest.raises(TypeError, match=msg):
252
+ left_f(df, pd.Timestamp("nat"))
253
+ with pytest.raises(TypeError, match=msg):
254
+ right_f(pd.Timestamp("nat"), df)
255
+
256
+ @pytest.mark.xfail(
257
+ using_pyarrow_string_dtype(), reason="can't compare string and int"
258
+ )
259
+ def test_mixed_comparison(self):
260
+ # GH#13128, GH#22163 != datetime64 vs non-dt64 should be False,
261
+ # not raise TypeError
262
+ # (this appears to be fixed before GH#22163, not sure when)
263
+ df = DataFrame([["1989-08-01", 1], ["1989-08-01", 2]])
264
+ other = DataFrame([["a", "b"], ["c", "d"]])
265
+
266
+ result = df == other
267
+ assert not result.any().any()
268
+
269
+ result = df != other
270
+ assert result.all().all()
271
+
272
+ def test_df_boolean_comparison_error(self):
273
+ # GH#4576, GH#22880
274
+ # comparing DataFrame against list/tuple with len(obj) matching
275
+ # len(df.columns) is supported as of GH#22800
276
+ df = DataFrame(np.arange(6).reshape((3, 2)))
277
+
278
+ expected = DataFrame([[False, False], [True, False], [False, False]])
279
+
280
+ result = df == (2, 2)
281
+ tm.assert_frame_equal(result, expected)
282
+
283
+ result = df == [2, 2]
284
+ tm.assert_frame_equal(result, expected)
285
+
286
+ def test_df_float_none_comparison(self):
287
+ df = DataFrame(
288
+ np.random.default_rng(2).standard_normal((8, 3)),
289
+ index=range(8),
290
+ columns=["A", "B", "C"],
291
+ )
292
+
293
+ result = df.__eq__(None)
294
+ assert not result.any().any()
295
+
296
+ def test_df_string_comparison(self):
297
+ df = DataFrame([{"a": 1, "b": "foo"}, {"a": 2, "b": "bar"}])
298
+ mask_a = df.a > 1
299
+ tm.assert_frame_equal(df[mask_a], df.loc[1:1, :])
300
+ tm.assert_frame_equal(df[-mask_a], df.loc[0:0, :])
301
+
302
+ mask_b = df.b == "foo"
303
+ tm.assert_frame_equal(df[mask_b], df.loc[0:0, :])
304
+ tm.assert_frame_equal(df[-mask_b], df.loc[1:1, :])
305
+
306
+
307
+ class TestFrameFlexComparisons:
308
+ # TODO: test_bool_flex_frame needs a better name
309
+ @pytest.mark.parametrize("op", ["eq", "ne", "gt", "lt", "ge", "le"])
310
+ def test_bool_flex_frame(self, op):
311
+ data = np.random.default_rng(2).standard_normal((5, 3))
312
+ other_data = np.random.default_rng(2).standard_normal((5, 3))
313
+ df = DataFrame(data)
314
+ other = DataFrame(other_data)
315
+ ndim_5 = np.ones(df.shape + (1, 3))
316
+
317
+ # DataFrame
318
+ assert df.eq(df).values.all()
319
+ assert not df.ne(df).values.any()
320
+ f = getattr(df, op)
321
+ o = getattr(operator, op)
322
+ # No NAs
323
+ tm.assert_frame_equal(f(other), o(df, other))
324
+ # Unaligned
325
+ part_o = other.loc[3:, 1:].copy()
326
+ rs = f(part_o)
327
+ xp = o(df, part_o.reindex(index=df.index, columns=df.columns))
328
+ tm.assert_frame_equal(rs, xp)
329
+ # ndarray
330
+ tm.assert_frame_equal(f(other.values), o(df, other.values))
331
+ # scalar
332
+ tm.assert_frame_equal(f(0), o(df, 0))
333
+ # NAs
334
+ msg = "Unable to coerce to Series/DataFrame"
335
+ tm.assert_frame_equal(f(np.nan), o(df, np.nan))
336
+ with pytest.raises(ValueError, match=msg):
337
+ f(ndim_5)
338
+
339
+ @pytest.mark.parametrize("box", [np.array, Series])
340
+ def test_bool_flex_series(self, box):
341
+ # Series
342
+ # list/tuple
343
+ data = np.random.default_rng(2).standard_normal((5, 3))
344
+ df = DataFrame(data)
345
+ idx_ser = box(np.random.default_rng(2).standard_normal(5))
346
+ col_ser = box(np.random.default_rng(2).standard_normal(3))
347
+
348
+ idx_eq = df.eq(idx_ser, axis=0)
349
+ col_eq = df.eq(col_ser)
350
+ idx_ne = df.ne(idx_ser, axis=0)
351
+ col_ne = df.ne(col_ser)
352
+ tm.assert_frame_equal(col_eq, df == Series(col_ser))
353
+ tm.assert_frame_equal(col_eq, -col_ne)
354
+ tm.assert_frame_equal(idx_eq, -idx_ne)
355
+ tm.assert_frame_equal(idx_eq, df.T.eq(idx_ser).T)
356
+ tm.assert_frame_equal(col_eq, df.eq(list(col_ser)))
357
+ tm.assert_frame_equal(idx_eq, df.eq(Series(idx_ser), axis=0))
358
+ tm.assert_frame_equal(idx_eq, df.eq(list(idx_ser), axis=0))
359
+
360
+ idx_gt = df.gt(idx_ser, axis=0)
361
+ col_gt = df.gt(col_ser)
362
+ idx_le = df.le(idx_ser, axis=0)
363
+ col_le = df.le(col_ser)
364
+
365
+ tm.assert_frame_equal(col_gt, df > Series(col_ser))
366
+ tm.assert_frame_equal(col_gt, -col_le)
367
+ tm.assert_frame_equal(idx_gt, -idx_le)
368
+ tm.assert_frame_equal(idx_gt, df.T.gt(idx_ser).T)
369
+
370
+ idx_ge = df.ge(idx_ser, axis=0)
371
+ col_ge = df.ge(col_ser)
372
+ idx_lt = df.lt(idx_ser, axis=0)
373
+ col_lt = df.lt(col_ser)
374
+ tm.assert_frame_equal(col_ge, df >= Series(col_ser))
375
+ tm.assert_frame_equal(col_ge, -col_lt)
376
+ tm.assert_frame_equal(idx_ge, -idx_lt)
377
+ tm.assert_frame_equal(idx_ge, df.T.ge(idx_ser).T)
378
+
379
+ idx_ser = Series(np.random.default_rng(2).standard_normal(5))
380
+ col_ser = Series(np.random.default_rng(2).standard_normal(3))
381
+
382
+ def test_bool_flex_frame_na(self):
383
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
384
+ # NA
385
+ df.loc[0, 0] = np.nan
386
+ rs = df.eq(df)
387
+ assert not rs.loc[0, 0]
388
+ rs = df.ne(df)
389
+ assert rs.loc[0, 0]
390
+ rs = df.gt(df)
391
+ assert not rs.loc[0, 0]
392
+ rs = df.lt(df)
393
+ assert not rs.loc[0, 0]
394
+ rs = df.ge(df)
395
+ assert not rs.loc[0, 0]
396
+ rs = df.le(df)
397
+ assert not rs.loc[0, 0]
398
+
399
+ def test_bool_flex_frame_complex_dtype(self):
400
+ # complex
401
+ arr = np.array([np.nan, 1, 6, np.nan])
402
+ arr2 = np.array([2j, np.nan, 7, None])
403
+ df = DataFrame({"a": arr})
404
+ df2 = DataFrame({"a": arr2})
405
+
406
+ msg = "|".join(
407
+ [
408
+ "'>' not supported between instances of '.*' and 'complex'",
409
+ r"unorderable types: .*complex\(\)", # PY35
410
+ ]
411
+ )
412
+ with pytest.raises(TypeError, match=msg):
413
+ # inequalities are not well-defined for complex numbers
414
+ df.gt(df2)
415
+ with pytest.raises(TypeError, match=msg):
416
+ # regression test that we get the same behavior for Series
417
+ df["a"].gt(df2["a"])
418
+ with pytest.raises(TypeError, match=msg):
419
+ # Check that we match numpy behavior here
420
+ df.values > df2.values
421
+
422
+ rs = df.ne(df2)
423
+ assert rs.values.all()
424
+
425
+ arr3 = np.array([2j, np.nan, None])
426
+ df3 = DataFrame({"a": arr3})
427
+
428
+ with pytest.raises(TypeError, match=msg):
429
+ # inequalities are not well-defined for complex numbers
430
+ df3.gt(2j)
431
+ with pytest.raises(TypeError, match=msg):
432
+ # regression test that we get the same behavior for Series
433
+ df3["a"].gt(2j)
434
+ with pytest.raises(TypeError, match=msg):
435
+ # Check that we match numpy behavior here
436
+ df3.values > 2j
437
+
438
+ def test_bool_flex_frame_object_dtype(self):
439
+ # corner, dtype=object
440
+ df1 = DataFrame({"col": ["foo", np.nan, "bar"]}, dtype=object)
441
+ df2 = DataFrame({"col": ["foo", datetime.now(), "bar"]}, dtype=object)
442
+ result = df1.ne(df2)
443
+ exp = DataFrame({"col": [False, True, False]})
444
+ tm.assert_frame_equal(result, exp)
445
+
446
+ def test_flex_comparison_nat(self):
447
+ # GH 15697, GH 22163 df.eq(pd.NaT) should behave like df == pd.NaT,
448
+ # and _definitely_ not be NaN
449
+ df = DataFrame([pd.NaT])
450
+
451
+ result = df == pd.NaT
452
+ # result.iloc[0, 0] is a np.bool_ object
453
+ assert result.iloc[0, 0].item() is False
454
+
455
+ result = df.eq(pd.NaT)
456
+ assert result.iloc[0, 0].item() is False
457
+
458
+ result = df != pd.NaT
459
+ assert result.iloc[0, 0].item() is True
460
+
461
+ result = df.ne(pd.NaT)
462
+ assert result.iloc[0, 0].item() is True
463
+
464
+ @pytest.mark.parametrize("opname", ["eq", "ne", "gt", "lt", "ge", "le"])
465
+ def test_df_flex_cmp_constant_return_types(self, opname):
466
+ # GH 15077, non-empty DataFrame
467
+ df = DataFrame({"x": [1, 2, 3], "y": [1.0, 2.0, 3.0]})
468
+ const = 2
469
+
470
+ result = getattr(df, opname)(const).dtypes.value_counts()
471
+ tm.assert_series_equal(
472
+ result, Series([2], index=[np.dtype(bool)], name="count")
473
+ )
474
+
475
+ @pytest.mark.parametrize("opname", ["eq", "ne", "gt", "lt", "ge", "le"])
476
+ def test_df_flex_cmp_constant_return_types_empty(self, opname):
477
+ # GH 15077 empty DataFrame
478
+ df = DataFrame({"x": [1, 2, 3], "y": [1.0, 2.0, 3.0]})
479
+ const = 2
480
+
481
+ empty = df.iloc[:0]
482
+ result = getattr(empty, opname)(const).dtypes.value_counts()
483
+ tm.assert_series_equal(
484
+ result, Series([2], index=[np.dtype(bool)], name="count")
485
+ )
486
+
487
+ def test_df_flex_cmp_ea_dtype_with_ndarray_series(self):
488
+ ii = pd.IntervalIndex.from_breaks([1, 2, 3])
489
+ df = DataFrame({"A": ii, "B": ii})
490
+
491
+ ser = Series([0, 0])
492
+ res = df.eq(ser, axis=0)
493
+
494
+ expected = DataFrame({"A": [False, False], "B": [False, False]})
495
+ tm.assert_frame_equal(res, expected)
496
+
497
+ ser2 = Series([1, 2], index=["A", "B"])
498
+ res2 = df.eq(ser2, axis=1)
499
+ tm.assert_frame_equal(res2, expected)
500
+
501
+
502
+ # -------------------------------------------------------------------
503
+ # Arithmetic
504
+
505
+
506
+ class TestFrameFlexArithmetic:
507
+ def test_floordiv_axis0(self):
508
+ # make sure we df.floordiv(ser, axis=0) matches column-wise result
509
+ arr = np.arange(3)
510
+ ser = Series(arr)
511
+ df = DataFrame({"A": ser, "B": ser})
512
+
513
+ result = df.floordiv(ser, axis=0)
514
+
515
+ expected = DataFrame({col: df[col] // ser for col in df.columns})
516
+
517
+ tm.assert_frame_equal(result, expected)
518
+
519
+ result2 = df.floordiv(ser.values, axis=0)
520
+ tm.assert_frame_equal(result2, expected)
521
+
522
+ def test_df_add_td64_columnwise(self):
523
+ # GH 22534 Check that column-wise addition broadcasts correctly
524
+ dti = pd.date_range("2016-01-01", periods=10)
525
+ tdi = pd.timedelta_range("1", periods=10)
526
+ tser = Series(tdi)
527
+ df = DataFrame({0: dti, 1: tdi})
528
+
529
+ result = df.add(tser, axis=0)
530
+ expected = DataFrame({0: dti + tdi, 1: tdi + tdi})
531
+ tm.assert_frame_equal(result, expected)
532
+
533
+ def test_df_add_flex_filled_mixed_dtypes(self):
534
+ # GH 19611
535
+ dti = pd.date_range("2016-01-01", periods=3)
536
+ ser = Series(["1 Day", "NaT", "2 Days"], dtype="timedelta64[ns]")
537
+ df = DataFrame({"A": dti, "B": ser})
538
+ other = DataFrame({"A": ser, "B": ser})
539
+ fill = pd.Timedelta(days=1).to_timedelta64()
540
+ result = df.add(other, fill_value=fill)
541
+
542
+ expected = DataFrame(
543
+ {
544
+ "A": Series(
545
+ ["2016-01-02", "2016-01-03", "2016-01-05"], dtype="datetime64[ns]"
546
+ ),
547
+ "B": ser * 2,
548
+ }
549
+ )
550
+ tm.assert_frame_equal(result, expected)
551
+
552
+ def test_arith_flex_frame(
553
+ self, all_arithmetic_operators, float_frame, mixed_float_frame
554
+ ):
555
+ # one instance of parametrized fixture
556
+ op = all_arithmetic_operators
557
+
558
+ def f(x, y):
559
+ # r-versions not in operator-stdlib; get op without "r" and invert
560
+ if op.startswith("__r"):
561
+ return getattr(operator, op.replace("__r", "__"))(y, x)
562
+ return getattr(operator, op)(x, y)
563
+
564
+ result = getattr(float_frame, op)(2 * float_frame)
565
+ expected = f(float_frame, 2 * float_frame)
566
+ tm.assert_frame_equal(result, expected)
567
+
568
+ # vs mix float
569
+ result = getattr(mixed_float_frame, op)(2 * mixed_float_frame)
570
+ expected = f(mixed_float_frame, 2 * mixed_float_frame)
571
+ tm.assert_frame_equal(result, expected)
572
+ _check_mixed_float(result, dtype={"C": None})
573
+
574
+ @pytest.mark.parametrize("op", ["__add__", "__sub__", "__mul__"])
575
+ def test_arith_flex_frame_mixed(
576
+ self,
577
+ op,
578
+ int_frame,
579
+ mixed_int_frame,
580
+ mixed_float_frame,
581
+ switch_numexpr_min_elements,
582
+ ):
583
+ f = getattr(operator, op)
584
+
585
+ # vs mix int
586
+ result = getattr(mixed_int_frame, op)(2 + mixed_int_frame)
587
+ expected = f(mixed_int_frame, 2 + mixed_int_frame)
588
+
589
+ # no overflow in the uint
590
+ dtype = None
591
+ if op in ["__sub__"]:
592
+ dtype = {"B": "uint64", "C": None}
593
+ elif op in ["__add__", "__mul__"]:
594
+ dtype = {"C": None}
595
+ if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
596
+ # when using numexpr, the casting rules are slightly different:
597
+ # in the `2 + mixed_int_frame` operation, int32 column becomes
598
+ # and int64 column (not preserving dtype in operation with Python
599
+ # scalar), and then the int32/int64 combo results in int64 result
600
+ dtype["A"] = (2 + mixed_int_frame)["A"].dtype
601
+ tm.assert_frame_equal(result, expected)
602
+ _check_mixed_int(result, dtype=dtype)
603
+
604
+ # vs mix float
605
+ result = getattr(mixed_float_frame, op)(2 * mixed_float_frame)
606
+ expected = f(mixed_float_frame, 2 * mixed_float_frame)
607
+ tm.assert_frame_equal(result, expected)
608
+ _check_mixed_float(result, dtype={"C": None})
609
+
610
+ # vs plain int
611
+ result = getattr(int_frame, op)(2 * int_frame)
612
+ expected = f(int_frame, 2 * int_frame)
613
+ tm.assert_frame_equal(result, expected)
614
+
615
+ @pytest.mark.parametrize("dim", range(3, 6))
616
+ def test_arith_flex_frame_raise(self, all_arithmetic_operators, float_frame, dim):
617
+ # one instance of parametrized fixture
618
+ op = all_arithmetic_operators
619
+
620
+ # Check that arrays with dim >= 3 raise
621
+ arr = np.ones((1,) * dim)
622
+ msg = "Unable to coerce to Series/DataFrame"
623
+ with pytest.raises(ValueError, match=msg):
624
+ getattr(float_frame, op)(arr)
625
+
626
+ def test_arith_flex_frame_corner(self, float_frame):
627
+ const_add = float_frame.add(1)
628
+ tm.assert_frame_equal(const_add, float_frame + 1)
629
+
630
+ # corner cases
631
+ result = float_frame.add(float_frame[:0])
632
+ expected = float_frame.sort_index() * np.nan
633
+ tm.assert_frame_equal(result, expected)
634
+
635
+ result = float_frame[:0].add(float_frame)
636
+ expected = float_frame.sort_index() * np.nan
637
+ tm.assert_frame_equal(result, expected)
638
+
639
+ with pytest.raises(NotImplementedError, match="fill_value"):
640
+ float_frame.add(float_frame.iloc[0], fill_value=3)
641
+
642
+ with pytest.raises(NotImplementedError, match="fill_value"):
643
+ float_frame.add(float_frame.iloc[0], axis="index", fill_value=3)
644
+
645
+ @pytest.mark.parametrize("op", ["add", "sub", "mul", "mod"])
646
+ def test_arith_flex_series_ops(self, simple_frame, op):
647
+ # after arithmetic refactor, add truediv here
648
+ df = simple_frame
649
+
650
+ row = df.xs("a")
651
+ col = df["two"]
652
+ f = getattr(df, op)
653
+ op = getattr(operator, op)
654
+ tm.assert_frame_equal(f(row), op(df, row))
655
+ tm.assert_frame_equal(f(col, axis=0), op(df.T, col).T)
656
+
657
+ def test_arith_flex_series(self, simple_frame):
658
+ df = simple_frame
659
+
660
+ row = df.xs("a")
661
+ col = df["two"]
662
+ # special case for some reason
663
+ tm.assert_frame_equal(df.add(row, axis=None), df + row)
664
+
665
+ # cases which will be refactored after big arithmetic refactor
666
+ tm.assert_frame_equal(df.div(row), df / row)
667
+ tm.assert_frame_equal(df.div(col, axis=0), (df.T / col).T)
668
+
669
+ @pytest.mark.parametrize("dtype", ["int64", "float64"])
670
+ def test_arith_flex_series_broadcasting(self, dtype):
671
+ # broadcasting issue in GH 7325
672
+ df = DataFrame(np.arange(3 * 2).reshape((3, 2)), dtype=dtype)
673
+ expected = DataFrame([[np.nan, np.inf], [1.0, 1.5], [1.0, 1.25]])
674
+ result = df.div(df[0], axis="index")
675
+ tm.assert_frame_equal(result, expected)
676
+
677
+ def test_arith_flex_zero_len_raises(self):
678
+ # GH 19522 passing fill_value to frame flex arith methods should
679
+ # raise even in the zero-length special cases
680
+ ser_len0 = Series([], dtype=object)
681
+ df_len0 = DataFrame(columns=["A", "B"])
682
+ df = DataFrame([[1, 2], [3, 4]], columns=["A", "B"])
683
+
684
+ with pytest.raises(NotImplementedError, match="fill_value"):
685
+ df.add(ser_len0, fill_value="E")
686
+
687
+ with pytest.raises(NotImplementedError, match="fill_value"):
688
+ df_len0.sub(df["A"], axis=None, fill_value=3)
689
+
690
+ def test_flex_add_scalar_fill_value(self):
691
+ # GH#12723
692
+ dat = np.array([0, 1, np.nan, 3, 4, 5], dtype="float")
693
+ df = DataFrame({"foo": dat}, index=range(6))
694
+
695
+ exp = df.fillna(0).add(2)
696
+ res = df.add(2, fill_value=0)
697
+ tm.assert_frame_equal(res, exp)
698
+
699
+ def test_sub_alignment_with_duplicate_index(self):
700
+ # GH#5185 dup aligning operations should work
701
+ df1 = DataFrame([1, 2, 3, 4, 5], index=[1, 2, 1, 2, 3])
702
+ df2 = DataFrame([1, 2, 3], index=[1, 2, 3])
703
+ expected = DataFrame([0, 2, 0, 2, 2], index=[1, 1, 2, 2, 3])
704
+ result = df1.sub(df2)
705
+ tm.assert_frame_equal(result, expected)
706
+
707
+ @pytest.mark.parametrize("op", ["__add__", "__mul__", "__sub__", "__truediv__"])
708
+ def test_arithmetic_with_duplicate_columns(self, op):
709
+ # operations
710
+ df = DataFrame({"A": np.arange(10), "B": np.random.default_rng(2).random(10)})
711
+ expected = getattr(df, op)(df)
712
+ expected.columns = ["A", "A"]
713
+ df.columns = ["A", "A"]
714
+ result = getattr(df, op)(df)
715
+ tm.assert_frame_equal(result, expected)
716
+
717
+ @pytest.mark.parametrize("level", [0, None])
718
+ def test_broadcast_multiindex(self, level):
719
+ # GH34388
720
+ df1 = DataFrame({"A": [0, 1, 2], "B": [1, 2, 3]})
721
+ df1.columns = df1.columns.set_names("L1")
722
+
723
+ df2 = DataFrame({("A", "C"): [0, 0, 0], ("A", "D"): [0, 0, 0]})
724
+ df2.columns = df2.columns.set_names(["L1", "L2"])
725
+
726
+ result = df1.add(df2, level=level)
727
+ expected = DataFrame({("A", "C"): [0, 1, 2], ("A", "D"): [0, 1, 2]})
728
+ expected.columns = expected.columns.set_names(["L1", "L2"])
729
+
730
+ tm.assert_frame_equal(result, expected)
731
+
732
+ def test_frame_multiindex_operations(self):
733
+ # GH 43321
734
+ df = DataFrame(
735
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
736
+ index=MultiIndex.from_product(
737
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
738
+ ),
739
+ )
740
+
741
+ series = Series(
742
+ [0.4],
743
+ index=MultiIndex.from_product([["b"], ["a"]], names=["mod", "scen"]),
744
+ )
745
+
746
+ expected = DataFrame(
747
+ {2010: [1.4, 2.4, 3.4], 2020: [3.4, 4.4, 5.4]},
748
+ index=MultiIndex.from_product(
749
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
750
+ ),
751
+ )
752
+ result = df.add(series, axis=0)
753
+
754
+ tm.assert_frame_equal(result, expected)
755
+
756
+ def test_frame_multiindex_operations_series_index_to_frame_index(self):
757
+ # GH 43321
758
+ df = DataFrame(
759
+ {2010: [1], 2020: [3]},
760
+ index=MultiIndex.from_product([["a"], ["b"]], names=["scen", "mod"]),
761
+ )
762
+
763
+ series = Series(
764
+ [10.0, 20.0, 30.0],
765
+ index=MultiIndex.from_product(
766
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
767
+ ),
768
+ )
769
+
770
+ expected = DataFrame(
771
+ {2010: [11.0, 21, 31.0], 2020: [13.0, 23.0, 33.0]},
772
+ index=MultiIndex.from_product(
773
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
774
+ ),
775
+ )
776
+ result = df.add(series, axis=0)
777
+
778
+ tm.assert_frame_equal(result, expected)
779
+
780
+ def test_frame_multiindex_operations_no_align(self):
781
+ df = DataFrame(
782
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
783
+ index=MultiIndex.from_product(
784
+ [["a"], ["b"], [0, 1, 2]], names=["scen", "mod", "id"]
785
+ ),
786
+ )
787
+
788
+ series = Series(
789
+ [0.4],
790
+ index=MultiIndex.from_product([["c"], ["a"]], names=["mod", "scen"]),
791
+ )
792
+
793
+ expected = DataFrame(
794
+ {2010: np.nan, 2020: np.nan},
795
+ index=MultiIndex.from_tuples(
796
+ [
797
+ ("a", "b", 0),
798
+ ("a", "b", 1),
799
+ ("a", "b", 2),
800
+ ("a", "c", np.nan),
801
+ ],
802
+ names=["scen", "mod", "id"],
803
+ ),
804
+ )
805
+ result = df.add(series, axis=0)
806
+
807
+ tm.assert_frame_equal(result, expected)
808
+
809
+ def test_frame_multiindex_operations_part_align(self):
810
+ df = DataFrame(
811
+ {2010: [1, 2, 3], 2020: [3, 4, 5]},
812
+ index=MultiIndex.from_tuples(
813
+ [
814
+ ("a", "b", 0),
815
+ ("a", "b", 1),
816
+ ("a", "c", 2),
817
+ ],
818
+ names=["scen", "mod", "id"],
819
+ ),
820
+ )
821
+
822
+ series = Series(
823
+ [0.4],
824
+ index=MultiIndex.from_product([["b"], ["a"]], names=["mod", "scen"]),
825
+ )
826
+
827
+ expected = DataFrame(
828
+ {2010: [1.4, 2.4, np.nan], 2020: [3.4, 4.4, np.nan]},
829
+ index=MultiIndex.from_tuples(
830
+ [
831
+ ("a", "b", 0),
832
+ ("a", "b", 1),
833
+ ("a", "c", 2),
834
+ ],
835
+ names=["scen", "mod", "id"],
836
+ ),
837
+ )
838
+ result = df.add(series, axis=0)
839
+
840
+ tm.assert_frame_equal(result, expected)
841
+
842
+
843
+ class TestFrameArithmetic:
844
+ def test_td64_op_nat_casting(self):
845
+ # Make sure we don't accidentally treat timedelta64(NaT) as datetime64
846
+ # when calling dispatch_to_series in DataFrame arithmetic
847
+ ser = Series(["NaT", "NaT"], dtype="timedelta64[ns]")
848
+ df = DataFrame([[1, 2], [3, 4]])
849
+
850
+ result = df * ser
851
+ expected = DataFrame({0: ser, 1: ser})
852
+ tm.assert_frame_equal(result, expected)
853
+
854
+ def test_df_add_2d_array_rowlike_broadcasts(self):
855
+ # GH#23000
856
+ arr = np.arange(6).reshape(3, 2)
857
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
858
+
859
+ rowlike = arr[[1], :] # shape --> (1, ncols)
860
+ assert rowlike.shape == (1, df.shape[1])
861
+
862
+ expected = DataFrame(
863
+ [[2, 4], [4, 6], [6, 8]],
864
+ columns=df.columns,
865
+ index=df.index,
866
+ # specify dtype explicitly to avoid failing
867
+ # on 32bit builds
868
+ dtype=arr.dtype,
869
+ )
870
+ result = df + rowlike
871
+ tm.assert_frame_equal(result, expected)
872
+ result = rowlike + df
873
+ tm.assert_frame_equal(result, expected)
874
+
875
+ def test_df_add_2d_array_collike_broadcasts(self):
876
+ # GH#23000
877
+ arr = np.arange(6).reshape(3, 2)
878
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
879
+
880
+ collike = arr[:, [1]] # shape --> (nrows, 1)
881
+ assert collike.shape == (df.shape[0], 1)
882
+
883
+ expected = DataFrame(
884
+ [[1, 2], [5, 6], [9, 10]],
885
+ columns=df.columns,
886
+ index=df.index,
887
+ # specify dtype explicitly to avoid failing
888
+ # on 32bit builds
889
+ dtype=arr.dtype,
890
+ )
891
+ result = df + collike
892
+ tm.assert_frame_equal(result, expected)
893
+ result = collike + df
894
+ tm.assert_frame_equal(result, expected)
895
+
896
+ def test_df_arith_2d_array_rowlike_broadcasts(
897
+ self, request, all_arithmetic_operators, using_array_manager
898
+ ):
899
+ # GH#23000
900
+ opname = all_arithmetic_operators
901
+
902
+ if using_array_manager and opname in ("__rmod__", "__rfloordiv__"):
903
+ # TODO(ArrayManager) decide on dtypes
904
+ td.mark_array_manager_not_yet_implemented(request)
905
+
906
+ arr = np.arange(6).reshape(3, 2)
907
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
908
+
909
+ rowlike = arr[[1], :] # shape --> (1, ncols)
910
+ assert rowlike.shape == (1, df.shape[1])
911
+
912
+ exvals = [
913
+ getattr(df.loc["A"], opname)(rowlike.squeeze()),
914
+ getattr(df.loc["B"], opname)(rowlike.squeeze()),
915
+ getattr(df.loc["C"], opname)(rowlike.squeeze()),
916
+ ]
917
+
918
+ expected = DataFrame(exvals, columns=df.columns, index=df.index)
919
+
920
+ result = getattr(df, opname)(rowlike)
921
+ tm.assert_frame_equal(result, expected)
922
+
923
+ def test_df_arith_2d_array_collike_broadcasts(
924
+ self, request, all_arithmetic_operators, using_array_manager
925
+ ):
926
+ # GH#23000
927
+ opname = all_arithmetic_operators
928
+
929
+ if using_array_manager and opname in ("__rmod__", "__rfloordiv__"):
930
+ # TODO(ArrayManager) decide on dtypes
931
+ td.mark_array_manager_not_yet_implemented(request)
932
+
933
+ arr = np.arange(6).reshape(3, 2)
934
+ df = DataFrame(arr, columns=[True, False], index=["A", "B", "C"])
935
+
936
+ collike = arr[:, [1]] # shape --> (nrows, 1)
937
+ assert collike.shape == (df.shape[0], 1)
938
+
939
+ exvals = {
940
+ True: getattr(df[True], opname)(collike.squeeze()),
941
+ False: getattr(df[False], opname)(collike.squeeze()),
942
+ }
943
+
944
+ dtype = None
945
+ if opname in ["__rmod__", "__rfloordiv__"]:
946
+ # Series ops may return mixed int/float dtypes in cases where
947
+ # DataFrame op will return all-float. So we upcast `expected`
948
+ dtype = np.common_type(*(x.values for x in exvals.values()))
949
+
950
+ expected = DataFrame(exvals, columns=df.columns, index=df.index, dtype=dtype)
951
+
952
+ result = getattr(df, opname)(collike)
953
+ tm.assert_frame_equal(result, expected)
954
+
955
+ def test_df_bool_mul_int(self):
956
+ # GH 22047, GH 22163 multiplication by 1 should result in int dtype,
957
+ # not object dtype
958
+ df = DataFrame([[False, True], [False, False]])
959
+ result = df * 1
960
+
961
+ # On appveyor this comes back as np.int32 instead of np.int64,
962
+ # so we check dtype.kind instead of just dtype
963
+ kinds = result.dtypes.apply(lambda x: x.kind)
964
+ assert (kinds == "i").all()
965
+
966
+ result = 1 * df
967
+ kinds = result.dtypes.apply(lambda x: x.kind)
968
+ assert (kinds == "i").all()
969
+
970
+ def test_arith_mixed(self):
971
+ left = DataFrame({"A": ["a", "b", "c"], "B": [1, 2, 3]})
972
+
973
+ result = left + left
974
+ expected = DataFrame({"A": ["aa", "bb", "cc"], "B": [2, 4, 6]})
975
+ tm.assert_frame_equal(result, expected)
976
+
977
+ @pytest.mark.parametrize("col", ["A", "B"])
978
+ def test_arith_getitem_commute(self, all_arithmetic_functions, col):
979
+ df = DataFrame({"A": [1.1, 3.3], "B": [2.5, -3.9]})
980
+ result = all_arithmetic_functions(df, 1)[col]
981
+ expected = all_arithmetic_functions(df[col], 1)
982
+ tm.assert_series_equal(result, expected)
983
+
984
+ @pytest.mark.parametrize(
985
+ "values", [[1, 2], (1, 2), np.array([1, 2]), range(1, 3), deque([1, 2])]
986
+ )
987
+ def test_arith_alignment_non_pandas_object(self, values):
988
+ # GH#17901
989
+ df = DataFrame({"A": [1, 1], "B": [1, 1]})
990
+ expected = DataFrame({"A": [2, 2], "B": [3, 3]})
991
+ result = df + values
992
+ tm.assert_frame_equal(result, expected)
993
+
994
+ def test_arith_non_pandas_object(self):
995
+ df = DataFrame(
996
+ np.arange(1, 10, dtype="f8").reshape(3, 3),
997
+ columns=["one", "two", "three"],
998
+ index=["a", "b", "c"],
999
+ )
1000
+
1001
+ val1 = df.xs("a").values
1002
+ added = DataFrame(df.values + val1, index=df.index, columns=df.columns)
1003
+ tm.assert_frame_equal(df + val1, added)
1004
+
1005
+ added = DataFrame((df.values.T + val1).T, index=df.index, columns=df.columns)
1006
+ tm.assert_frame_equal(df.add(val1, axis=0), added)
1007
+
1008
+ val2 = list(df["two"])
1009
+
1010
+ added = DataFrame(df.values + val2, index=df.index, columns=df.columns)
1011
+ tm.assert_frame_equal(df + val2, added)
1012
+
1013
+ added = DataFrame((df.values.T + val2).T, index=df.index, columns=df.columns)
1014
+ tm.assert_frame_equal(df.add(val2, axis="index"), added)
1015
+
1016
+ val3 = np.random.default_rng(2).random(df.shape)
1017
+ added = DataFrame(df.values + val3, index=df.index, columns=df.columns)
1018
+ tm.assert_frame_equal(df.add(val3), added)
1019
+
1020
+ def test_operations_with_interval_categories_index(self, all_arithmetic_operators):
1021
+ # GH#27415
1022
+ op = all_arithmetic_operators
1023
+ ind = pd.CategoricalIndex(pd.interval_range(start=0.0, end=2.0))
1024
+ data = [1, 2]
1025
+ df = DataFrame([data], columns=ind)
1026
+ num = 10
1027
+ result = getattr(df, op)(num)
1028
+ expected = DataFrame([[getattr(n, op)(num) for n in data]], columns=ind)
1029
+ tm.assert_frame_equal(result, expected)
1030
+
1031
+ def test_frame_with_frame_reindex(self):
1032
+ # GH#31623
1033
+ df = DataFrame(
1034
+ {
1035
+ "foo": [pd.Timestamp("2019"), pd.Timestamp("2020")],
1036
+ "bar": [pd.Timestamp("2018"), pd.Timestamp("2021")],
1037
+ },
1038
+ columns=["foo", "bar"],
1039
+ dtype="M8[ns]",
1040
+ )
1041
+ df2 = df[["foo"]]
1042
+
1043
+ result = df - df2
1044
+
1045
+ expected = DataFrame(
1046
+ {"foo": [pd.Timedelta(0), pd.Timedelta(0)], "bar": [np.nan, np.nan]},
1047
+ columns=["bar", "foo"],
1048
+ )
1049
+ tm.assert_frame_equal(result, expected)
1050
+
1051
+ @pytest.mark.parametrize(
1052
+ "value, dtype",
1053
+ [
1054
+ (1, "i8"),
1055
+ (1.0, "f8"),
1056
+ (2**63, "f8"),
1057
+ (1j, "complex128"),
1058
+ (2**63, "complex128"),
1059
+ (True, "bool"),
1060
+ (np.timedelta64(20, "ns"), "<m8[ns]"),
1061
+ (np.datetime64(20, "ns"), "<M8[ns]"),
1062
+ ],
1063
+ )
1064
+ @pytest.mark.parametrize(
1065
+ "op",
1066
+ [
1067
+ operator.add,
1068
+ operator.sub,
1069
+ operator.mul,
1070
+ operator.truediv,
1071
+ operator.mod,
1072
+ operator.pow,
1073
+ ],
1074
+ ids=lambda x: x.__name__,
1075
+ )
1076
+ def test_binop_other(self, op, value, dtype, switch_numexpr_min_elements):
1077
+ skip = {
1078
+ (operator.truediv, "bool"),
1079
+ (operator.pow, "bool"),
1080
+ (operator.add, "bool"),
1081
+ (operator.mul, "bool"),
1082
+ }
1083
+
1084
+ elem = DummyElement(value, dtype)
1085
+ df = DataFrame({"A": [elem.value, elem.value]}, dtype=elem.dtype)
1086
+
1087
+ invalid = {
1088
+ (operator.pow, "<M8[ns]"),
1089
+ (operator.mod, "<M8[ns]"),
1090
+ (operator.truediv, "<M8[ns]"),
1091
+ (operator.mul, "<M8[ns]"),
1092
+ (operator.add, "<M8[ns]"),
1093
+ (operator.pow, "<m8[ns]"),
1094
+ (operator.mul, "<m8[ns]"),
1095
+ (operator.sub, "bool"),
1096
+ (operator.mod, "complex128"),
1097
+ }
1098
+
1099
+ if (op, dtype) in invalid:
1100
+ warn = None
1101
+ if (dtype == "<M8[ns]" and op == operator.add) or (
1102
+ dtype == "<m8[ns]" and op == operator.mul
1103
+ ):
1104
+ msg = None
1105
+ elif dtype == "complex128":
1106
+ msg = "ufunc 'remainder' not supported for the input types"
1107
+ elif op is operator.sub:
1108
+ msg = "numpy boolean subtract, the `-` operator, is "
1109
+ if (
1110
+ dtype == "bool"
1111
+ and expr.USE_NUMEXPR
1112
+ and switch_numexpr_min_elements == 0
1113
+ ):
1114
+ warn = UserWarning # "evaluating in Python space because ..."
1115
+ else:
1116
+ msg = (
1117
+ f"cannot perform __{op.__name__}__ with this "
1118
+ "index type: (DatetimeArray|TimedeltaArray)"
1119
+ )
1120
+
1121
+ with pytest.raises(TypeError, match=msg):
1122
+ with tm.assert_produces_warning(warn):
1123
+ op(df, elem.value)
1124
+
1125
+ elif (op, dtype) in skip:
1126
+ if op in [operator.add, operator.mul]:
1127
+ if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
1128
+ # "evaluating in Python space because ..."
1129
+ warn = UserWarning
1130
+ else:
1131
+ warn = None
1132
+ with tm.assert_produces_warning(warn):
1133
+ op(df, elem.value)
1134
+
1135
+ else:
1136
+ msg = "operator '.*' not implemented for .* dtypes"
1137
+ with pytest.raises(NotImplementedError, match=msg):
1138
+ op(df, elem.value)
1139
+
1140
+ else:
1141
+ with tm.assert_produces_warning(None):
1142
+ result = op(df, elem.value).dtypes
1143
+ expected = op(df, value).dtypes
1144
+ tm.assert_series_equal(result, expected)
1145
+
1146
+ def test_arithmetic_midx_cols_different_dtypes(self):
1147
+ # GH#49769
1148
+ midx = MultiIndex.from_arrays([Series([1, 2]), Series([3, 4])])
1149
+ midx2 = MultiIndex.from_arrays([Series([1, 2], dtype="Int8"), Series([3, 4])])
1150
+ left = DataFrame([[1, 2], [3, 4]], columns=midx)
1151
+ right = DataFrame([[1, 2], [3, 4]], columns=midx2)
1152
+ result = left - right
1153
+ expected = DataFrame([[0, 0], [0, 0]], columns=midx)
1154
+ tm.assert_frame_equal(result, expected)
1155
+
1156
+ def test_arithmetic_midx_cols_different_dtypes_different_order(self):
1157
+ # GH#49769
1158
+ midx = MultiIndex.from_arrays([Series([1, 2]), Series([3, 4])])
1159
+ midx2 = MultiIndex.from_arrays([Series([2, 1], dtype="Int8"), Series([4, 3])])
1160
+ left = DataFrame([[1, 2], [3, 4]], columns=midx)
1161
+ right = DataFrame([[1, 2], [3, 4]], columns=midx2)
1162
+ result = left - right
1163
+ expected = DataFrame([[-1, 1], [-1, 1]], columns=midx)
1164
+ tm.assert_frame_equal(result, expected)
1165
+
1166
+
1167
+ def test_frame_with_zero_len_series_corner_cases():
1168
+ # GH#28600
1169
+ # easy all-float case
1170
+ df = DataFrame(
1171
+ np.random.default_rng(2).standard_normal(6).reshape(3, 2), columns=["A", "B"]
1172
+ )
1173
+ ser = Series(dtype=np.float64)
1174
+
1175
+ result = df + ser
1176
+ expected = DataFrame(df.values * np.nan, columns=df.columns)
1177
+ tm.assert_frame_equal(result, expected)
1178
+
1179
+ with pytest.raises(ValueError, match="not aligned"):
1180
+ # Automatic alignment for comparisons deprecated GH#36795, enforced 2.0
1181
+ df == ser
1182
+
1183
+ # non-float case should not raise TypeError on comparison
1184
+ df2 = DataFrame(df.values.view("M8[ns]"), columns=df.columns)
1185
+ with pytest.raises(ValueError, match="not aligned"):
1186
+ # Automatic alignment for comparisons deprecated
1187
+ df2 == ser
1188
+
1189
+
1190
+ def test_zero_len_frame_with_series_corner_cases():
1191
+ # GH#28600
1192
+ df = DataFrame(columns=["A", "B"], dtype=np.float64)
1193
+ ser = Series([1, 2], index=["A", "B"])
1194
+
1195
+ result = df + ser
1196
+ expected = df
1197
+ tm.assert_frame_equal(result, expected)
1198
+
1199
+
1200
+ def test_frame_single_columns_object_sum_axis_1():
1201
+ # GH 13758
1202
+ data = {
1203
+ "One": Series(["A", 1.2, np.nan]),
1204
+ }
1205
+ df = DataFrame(data)
1206
+ result = df.sum(axis=1)
1207
+ expected = Series(["A", 1.2, 0])
1208
+ tm.assert_series_equal(result, expected)
1209
+
1210
+
1211
+ # -------------------------------------------------------------------
1212
+ # Unsorted
1213
+ # These arithmetic tests were previously in other files, eventually
1214
+ # should be parametrized and put into tests.arithmetic
1215
+
1216
+
1217
+ class TestFrameArithmeticUnsorted:
1218
+ def test_frame_add_tz_mismatch_converts_to_utc(self):
1219
+ rng = pd.date_range("1/1/2011", periods=10, freq="h", tz="US/Eastern")
1220
+ df = DataFrame(
1221
+ np.random.default_rng(2).standard_normal(len(rng)), index=rng, columns=["a"]
1222
+ )
1223
+
1224
+ df_moscow = df.tz_convert("Europe/Moscow")
1225
+ result = df + df_moscow
1226
+ assert result.index.tz is timezone.utc
1227
+
1228
+ result = df_moscow + df
1229
+ assert result.index.tz is timezone.utc
1230
+
1231
+ def test_align_frame(self):
1232
+ rng = pd.period_range("1/1/2000", "1/1/2010", freq="Y")
1233
+ ts = DataFrame(
1234
+ np.random.default_rng(2).standard_normal((len(rng), 3)), index=rng
1235
+ )
1236
+
1237
+ result = ts + ts[::2]
1238
+ expected = ts + ts
1239
+ expected.iloc[1::2] = np.nan
1240
+ tm.assert_frame_equal(result, expected)
1241
+
1242
+ half = ts[::2]
1243
+ result = ts + half.take(np.random.default_rng(2).permutation(len(half)))
1244
+ tm.assert_frame_equal(result, expected)
1245
+
1246
+ @pytest.mark.parametrize(
1247
+ "op", [operator.add, operator.sub, operator.mul, operator.truediv]
1248
+ )
1249
+ def test_operators_none_as_na(self, op):
1250
+ df = DataFrame(
1251
+ {"col1": [2, 5.0, 123, None], "col2": [1, 2, 3, 4]}, dtype=object
1252
+ )
1253
+
1254
+ # since filling converts dtypes from object, changed expected to be
1255
+ # object
1256
+ msg = "Downcasting object dtype arrays"
1257
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1258
+ filled = df.fillna(np.nan)
1259
+ result = op(df, 3)
1260
+ expected = op(filled, 3).astype(object)
1261
+ expected[pd.isna(expected)] = np.nan
1262
+ tm.assert_frame_equal(result, expected)
1263
+
1264
+ result = op(df, df)
1265
+ expected = op(filled, filled).astype(object)
1266
+ expected[pd.isna(expected)] = np.nan
1267
+ tm.assert_frame_equal(result, expected)
1268
+
1269
+ msg = "Downcasting object dtype arrays"
1270
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1271
+ result = op(df, df.fillna(7))
1272
+ tm.assert_frame_equal(result, expected)
1273
+
1274
+ msg = "Downcasting object dtype arrays"
1275
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1276
+ result = op(df.fillna(7), df)
1277
+ tm.assert_frame_equal(result, expected)
1278
+
1279
+ @pytest.mark.parametrize("op,res", [("__eq__", False), ("__ne__", True)])
1280
+ # TODO: not sure what's correct here.
1281
+ @pytest.mark.filterwarnings("ignore:elementwise:FutureWarning")
1282
+ def test_logical_typeerror_with_non_valid(self, op, res, float_frame):
1283
+ # we are comparing floats vs a string
1284
+ result = getattr(float_frame, op)("foo")
1285
+ assert bool(result.all().all()) is res
1286
+
1287
+ @pytest.mark.parametrize("op", ["add", "sub", "mul", "div", "truediv"])
1288
+ def test_binary_ops_align(self, op):
1289
+ # test aligning binary ops
1290
+
1291
+ # GH 6681
1292
+ index = MultiIndex.from_product(
1293
+ [list("abc"), ["one", "two", "three"], [1, 2, 3]],
1294
+ names=["first", "second", "third"],
1295
+ )
1296
+
1297
+ df = DataFrame(
1298
+ np.arange(27 * 3).reshape(27, 3),
1299
+ index=index,
1300
+ columns=["value1", "value2", "value3"],
1301
+ ).sort_index()
1302
+
1303
+ idx = pd.IndexSlice
1304
+ opa = getattr(operator, op, None)
1305
+ if opa is None:
1306
+ return
1307
+
1308
+ x = Series([1.0, 10.0, 100.0], [1, 2, 3])
1309
+ result = getattr(df, op)(x, level="third", axis=0)
1310
+
1311
+ expected = pd.concat(
1312
+ [opa(df.loc[idx[:, :, i], :], v) for i, v in x.items()]
1313
+ ).sort_index()
1314
+ tm.assert_frame_equal(result, expected)
1315
+
1316
+ x = Series([1.0, 10.0], ["two", "three"])
1317
+ result = getattr(df, op)(x, level="second", axis=0)
1318
+
1319
+ expected = (
1320
+ pd.concat([opa(df.loc[idx[:, i], :], v) for i, v in x.items()])
1321
+ .reindex_like(df)
1322
+ .sort_index()
1323
+ )
1324
+ tm.assert_frame_equal(result, expected)
1325
+
1326
+ def test_binary_ops_align_series_dataframe(self):
1327
+ # GH9463 (alignment level of dataframe with series)
1328
+
1329
+ midx = MultiIndex.from_product([["A", "B"], ["a", "b"]])
1330
+ df = DataFrame(np.ones((2, 4), dtype="int64"), columns=midx)
1331
+ s = Series({"a": 1, "b": 2})
1332
+
1333
+ df2 = df.copy()
1334
+ df2.columns.names = ["lvl0", "lvl1"]
1335
+ s2 = s.copy()
1336
+ s2.index.name = "lvl1"
1337
+
1338
+ # different cases of integer/string level names:
1339
+ res1 = df.mul(s, axis=1, level=1)
1340
+ res2 = df.mul(s2, axis=1, level=1)
1341
+ res3 = df2.mul(s, axis=1, level=1)
1342
+ res4 = df2.mul(s2, axis=1, level=1)
1343
+ res5 = df2.mul(s, axis=1, level="lvl1")
1344
+ res6 = df2.mul(s2, axis=1, level="lvl1")
1345
+
1346
+ exp = DataFrame(
1347
+ np.array([[1, 2, 1, 2], [1, 2, 1, 2]], dtype="int64"), columns=midx
1348
+ )
1349
+
1350
+ for res in [res1, res2]:
1351
+ tm.assert_frame_equal(res, exp)
1352
+
1353
+ exp.columns.names = ["lvl0", "lvl1"]
1354
+ for res in [res3, res4, res5, res6]:
1355
+ tm.assert_frame_equal(res, exp)
1356
+
1357
+ def test_add_with_dti_mismatched_tzs(self):
1358
+ base = pd.DatetimeIndex(["2011-01-01", "2011-01-02", "2011-01-03"], tz="UTC")
1359
+ idx1 = base.tz_convert("Asia/Tokyo")[:2]
1360
+ idx2 = base.tz_convert("US/Eastern")[1:]
1361
+
1362
+ df1 = DataFrame({"A": [1, 2]}, index=idx1)
1363
+ df2 = DataFrame({"A": [1, 1]}, index=idx2)
1364
+ exp = DataFrame({"A": [np.nan, 3, np.nan]}, index=base)
1365
+ tm.assert_frame_equal(df1 + df2, exp)
1366
+
1367
+ def test_combineFrame(self, float_frame, mixed_float_frame, mixed_int_frame):
1368
+ frame_copy = float_frame.reindex(float_frame.index[::2])
1369
+
1370
+ del frame_copy["D"]
1371
+ # adding NAs to first 5 values of column "C"
1372
+ frame_copy.loc[: frame_copy.index[4], "C"] = np.nan
1373
+
1374
+ added = float_frame + frame_copy
1375
+
1376
+ indexer = added["A"].dropna().index
1377
+ exp = (float_frame["A"] * 2).copy()
1378
+
1379
+ tm.assert_series_equal(added["A"].dropna(), exp.loc[indexer])
1380
+
1381
+ exp.loc[~exp.index.isin(indexer)] = np.nan
1382
+ tm.assert_series_equal(added["A"], exp.loc[added["A"].index])
1383
+
1384
+ assert np.isnan(added["C"].reindex(frame_copy.index)[:5]).all()
1385
+
1386
+ # assert(False)
1387
+
1388
+ assert np.isnan(added["D"]).all()
1389
+
1390
+ self_added = float_frame + float_frame
1391
+ tm.assert_index_equal(self_added.index, float_frame.index)
1392
+
1393
+ added_rev = frame_copy + float_frame
1394
+ assert np.isnan(added["D"]).all()
1395
+ assert np.isnan(added_rev["D"]).all()
1396
+
1397
+ # corner cases
1398
+
1399
+ # empty
1400
+ plus_empty = float_frame + DataFrame()
1401
+ assert np.isnan(plus_empty.values).all()
1402
+
1403
+ empty_plus = DataFrame() + float_frame
1404
+ assert np.isnan(empty_plus.values).all()
1405
+
1406
+ empty_empty = DataFrame() + DataFrame()
1407
+ assert empty_empty.empty
1408
+
1409
+ # out of order
1410
+ reverse = float_frame.reindex(columns=float_frame.columns[::-1])
1411
+
1412
+ tm.assert_frame_equal(reverse + float_frame, float_frame * 2)
1413
+
1414
+ # mix vs float64, upcast
1415
+ added = float_frame + mixed_float_frame
1416
+ _check_mixed_float(added, dtype="float64")
1417
+ added = mixed_float_frame + float_frame
1418
+ _check_mixed_float(added, dtype="float64")
1419
+
1420
+ # mix vs mix
1421
+ added = mixed_float_frame + mixed_float_frame
1422
+ _check_mixed_float(added, dtype={"C": None})
1423
+
1424
+ # with int
1425
+ added = float_frame + mixed_int_frame
1426
+ _check_mixed_float(added, dtype="float64")
1427
+
1428
+ def test_combine_series(self, float_frame, mixed_float_frame, mixed_int_frame):
1429
+ # Series
1430
+ series = float_frame.xs(float_frame.index[0])
1431
+
1432
+ added = float_frame + series
1433
+
1434
+ for key, s in added.items():
1435
+ tm.assert_series_equal(s, float_frame[key] + series[key])
1436
+
1437
+ larger_series = series.to_dict()
1438
+ larger_series["E"] = 1
1439
+ larger_series = Series(larger_series)
1440
+ larger_added = float_frame + larger_series
1441
+
1442
+ for key, s in float_frame.items():
1443
+ tm.assert_series_equal(larger_added[key], s + series[key])
1444
+ assert "E" in larger_added
1445
+ assert np.isnan(larger_added["E"]).all()
1446
+
1447
+ # no upcast needed
1448
+ added = mixed_float_frame + series
1449
+ assert np.all(added.dtypes == series.dtype)
1450
+
1451
+ # vs mix (upcast) as needed
1452
+ added = mixed_float_frame + series.astype("float32")
1453
+ _check_mixed_float(added, dtype={"C": None})
1454
+ added = mixed_float_frame + series.astype("float16")
1455
+ _check_mixed_float(added, dtype={"C": None})
1456
+
1457
+ # these used to raise with numexpr as we are adding an int64 to an
1458
+ # uint64....weird vs int
1459
+ added = mixed_int_frame + (100 * series).astype("int64")
1460
+ _check_mixed_int(
1461
+ added, dtype={"A": "int64", "B": "float64", "C": "int64", "D": "int64"}
1462
+ )
1463
+ added = mixed_int_frame + (100 * series).astype("int32")
1464
+ _check_mixed_int(
1465
+ added, dtype={"A": "int32", "B": "float64", "C": "int32", "D": "int64"}
1466
+ )
1467
+
1468
+ def test_combine_timeseries(self, datetime_frame):
1469
+ # TimeSeries
1470
+ ts = datetime_frame["A"]
1471
+
1472
+ # 10890
1473
+ # we no longer allow auto timeseries broadcasting
1474
+ # and require explicit broadcasting
1475
+ added = datetime_frame.add(ts, axis="index")
1476
+
1477
+ for key, col in datetime_frame.items():
1478
+ result = col + ts
1479
+ tm.assert_series_equal(added[key], result, check_names=False)
1480
+ assert added[key].name == key
1481
+ if col.name == ts.name:
1482
+ assert result.name == "A"
1483
+ else:
1484
+ assert result.name is None
1485
+
1486
+ smaller_frame = datetime_frame[:-5]
1487
+ smaller_added = smaller_frame.add(ts, axis="index")
1488
+
1489
+ tm.assert_index_equal(smaller_added.index, datetime_frame.index)
1490
+
1491
+ smaller_ts = ts[:-5]
1492
+ smaller_added2 = datetime_frame.add(smaller_ts, axis="index")
1493
+ tm.assert_frame_equal(smaller_added, smaller_added2)
1494
+
1495
+ # length 0, result is all-nan
1496
+ result = datetime_frame.add(ts[:0], axis="index")
1497
+ expected = DataFrame(
1498
+ np.nan, index=datetime_frame.index, columns=datetime_frame.columns
1499
+ )
1500
+ tm.assert_frame_equal(result, expected)
1501
+
1502
+ # Frame is all-nan
1503
+ result = datetime_frame[:0].add(ts, axis="index")
1504
+ expected = DataFrame(
1505
+ np.nan, index=datetime_frame.index, columns=datetime_frame.columns
1506
+ )
1507
+ tm.assert_frame_equal(result, expected)
1508
+
1509
+ # empty but with non-empty index
1510
+ frame = datetime_frame[:1].reindex(columns=[])
1511
+ result = frame.mul(ts, axis="index")
1512
+ assert len(result) == len(ts)
1513
+
1514
+ def test_combineFunc(self, float_frame, mixed_float_frame):
1515
+ result = float_frame * 2
1516
+ tm.assert_numpy_array_equal(result.values, float_frame.values * 2)
1517
+
1518
+ # vs mix
1519
+ result = mixed_float_frame * 2
1520
+ for c, s in result.items():
1521
+ tm.assert_numpy_array_equal(s.values, mixed_float_frame[c].values * 2)
1522
+ _check_mixed_float(result, dtype={"C": None})
1523
+
1524
+ result = DataFrame() * 2
1525
+ assert result.index.equals(DataFrame().index)
1526
+ assert len(result.columns) == 0
1527
+
1528
+ @pytest.mark.parametrize(
1529
+ "func",
1530
+ [operator.eq, operator.ne, operator.lt, operator.gt, operator.ge, operator.le],
1531
+ )
1532
+ def test_comparisons(self, simple_frame, float_frame, func):
1533
+ df1 = DataFrame(
1534
+ np.random.default_rng(2).standard_normal((30, 4)),
1535
+ columns=Index(list("ABCD"), dtype=object),
1536
+ index=pd.date_range("2000-01-01", periods=30, freq="B"),
1537
+ )
1538
+ df2 = df1.copy()
1539
+
1540
+ row = simple_frame.xs("a")
1541
+ ndim_5 = np.ones(df1.shape + (1, 1, 1))
1542
+
1543
+ result = func(df1, df2)
1544
+ tm.assert_numpy_array_equal(result.values, func(df1.values, df2.values))
1545
+
1546
+ msg = (
1547
+ "Unable to coerce to Series/DataFrame, "
1548
+ "dimension must be <= 2: (30, 4, 1, 1, 1)"
1549
+ )
1550
+ with pytest.raises(ValueError, match=re.escape(msg)):
1551
+ func(df1, ndim_5)
1552
+
1553
+ result2 = func(simple_frame, row)
1554
+ tm.assert_numpy_array_equal(
1555
+ result2.values, func(simple_frame.values, row.values)
1556
+ )
1557
+
1558
+ result3 = func(float_frame, 0)
1559
+ tm.assert_numpy_array_equal(result3.values, func(float_frame.values, 0))
1560
+
1561
+ msg = (
1562
+ r"Can only compare identically-labeled \(both index and columns\) "
1563
+ "DataFrame objects"
1564
+ )
1565
+ with pytest.raises(ValueError, match=msg):
1566
+ func(simple_frame, simple_frame[:2])
1567
+
1568
+ def test_strings_to_numbers_comparisons_raises(self, compare_operators_no_eq_ne):
1569
+ # GH 11565
1570
+ df = DataFrame(
1571
+ {x: {"x": "foo", "y": "bar", "z": "baz"} for x in ["a", "b", "c"]}
1572
+ )
1573
+
1574
+ f = getattr(operator, compare_operators_no_eq_ne)
1575
+ msg = "'[<>]=?' not supported between instances of 'str' and 'int'"
1576
+ with pytest.raises(TypeError, match=msg):
1577
+ f(df, 0)
1578
+
1579
+ def test_comparison_protected_from_errstate(self):
1580
+ missing_df = DataFrame(
1581
+ np.ones((10, 4), dtype=np.float64),
1582
+ columns=Index(list("ABCD"), dtype=object),
1583
+ )
1584
+ missing_df.loc[missing_df.index[0], "A"] = np.nan
1585
+ with np.errstate(invalid="ignore"):
1586
+ expected = missing_df.values < 0
1587
+ with np.errstate(invalid="raise"):
1588
+ result = (missing_df < 0).values
1589
+ tm.assert_numpy_array_equal(result, expected)
1590
+
1591
+ def test_boolean_comparison(self):
1592
+ # GH 4576
1593
+ # boolean comparisons with a tuple/list give unexpected results
1594
+ df = DataFrame(np.arange(6).reshape((3, 2)))
1595
+ b = np.array([2, 2])
1596
+ b_r = np.atleast_2d([2, 2])
1597
+ b_c = b_r.T
1598
+ lst = [2, 2, 2]
1599
+ tup = tuple(lst)
1600
+
1601
+ # gt
1602
+ expected = DataFrame([[False, False], [False, True], [True, True]])
1603
+ result = df > b
1604
+ tm.assert_frame_equal(result, expected)
1605
+
1606
+ result = df.values > b
1607
+ tm.assert_numpy_array_equal(result, expected.values)
1608
+
1609
+ msg1d = "Unable to coerce to Series, length must be 2: given 3"
1610
+ msg2d = "Unable to coerce to DataFrame, shape must be"
1611
+ msg2db = "operands could not be broadcast together with shapes"
1612
+ with pytest.raises(ValueError, match=msg1d):
1613
+ # wrong shape
1614
+ df > lst
1615
+
1616
+ with pytest.raises(ValueError, match=msg1d):
1617
+ # wrong shape
1618
+ df > tup
1619
+
1620
+ # broadcasts like ndarray (GH#23000)
1621
+ result = df > b_r
1622
+ tm.assert_frame_equal(result, expected)
1623
+
1624
+ result = df.values > b_r
1625
+ tm.assert_numpy_array_equal(result, expected.values)
1626
+
1627
+ with pytest.raises(ValueError, match=msg2d):
1628
+ df > b_c
1629
+
1630
+ with pytest.raises(ValueError, match=msg2db):
1631
+ df.values > b_c
1632
+
1633
+ # ==
1634
+ expected = DataFrame([[False, False], [True, False], [False, False]])
1635
+ result = df == b
1636
+ tm.assert_frame_equal(result, expected)
1637
+
1638
+ with pytest.raises(ValueError, match=msg1d):
1639
+ df == lst
1640
+
1641
+ with pytest.raises(ValueError, match=msg1d):
1642
+ df == tup
1643
+
1644
+ # broadcasts like ndarray (GH#23000)
1645
+ result = df == b_r
1646
+ tm.assert_frame_equal(result, expected)
1647
+
1648
+ result = df.values == b_r
1649
+ tm.assert_numpy_array_equal(result, expected.values)
1650
+
1651
+ with pytest.raises(ValueError, match=msg2d):
1652
+ df == b_c
1653
+
1654
+ assert df.values.shape != b_c.shape
1655
+
1656
+ # with alignment
1657
+ df = DataFrame(
1658
+ np.arange(6).reshape((3, 2)), columns=list("AB"), index=list("abc")
1659
+ )
1660
+ expected.index = df.index
1661
+ expected.columns = df.columns
1662
+
1663
+ with pytest.raises(ValueError, match=msg1d):
1664
+ df == lst
1665
+
1666
+ with pytest.raises(ValueError, match=msg1d):
1667
+ df == tup
1668
+
1669
+ def test_inplace_ops_alignment(self):
1670
+ # inplace ops / ops alignment
1671
+ # GH 8511
1672
+
1673
+ columns = list("abcdefg")
1674
+ X_orig = DataFrame(
1675
+ np.arange(10 * len(columns)).reshape(-1, len(columns)),
1676
+ columns=columns,
1677
+ index=range(10),
1678
+ )
1679
+ Z = 100 * X_orig.iloc[:, 1:-1].copy()
1680
+ block1 = list("bedcf")
1681
+ subs = list("bcdef")
1682
+
1683
+ # add
1684
+ X = X_orig.copy()
1685
+ result1 = (X[block1] + Z).reindex(columns=subs)
1686
+
1687
+ X[block1] += Z
1688
+ result2 = X.reindex(columns=subs)
1689
+
1690
+ X = X_orig.copy()
1691
+ result3 = (X[block1] + Z[block1]).reindex(columns=subs)
1692
+
1693
+ X[block1] += Z[block1]
1694
+ result4 = X.reindex(columns=subs)
1695
+
1696
+ tm.assert_frame_equal(result1, result2)
1697
+ tm.assert_frame_equal(result1, result3)
1698
+ tm.assert_frame_equal(result1, result4)
1699
+
1700
+ # sub
1701
+ X = X_orig.copy()
1702
+ result1 = (X[block1] - Z).reindex(columns=subs)
1703
+
1704
+ X[block1] -= Z
1705
+ result2 = X.reindex(columns=subs)
1706
+
1707
+ X = X_orig.copy()
1708
+ result3 = (X[block1] - Z[block1]).reindex(columns=subs)
1709
+
1710
+ X[block1] -= Z[block1]
1711
+ result4 = X.reindex(columns=subs)
1712
+
1713
+ tm.assert_frame_equal(result1, result2)
1714
+ tm.assert_frame_equal(result1, result3)
1715
+ tm.assert_frame_equal(result1, result4)
1716
+
1717
+ def test_inplace_ops_identity(self):
1718
+ # GH 5104
1719
+ # make sure that we are actually changing the object
1720
+ s_orig = Series([1, 2, 3])
1721
+ df_orig = DataFrame(
1722
+ np.random.default_rng(2).integers(0, 5, size=10).reshape(-1, 5)
1723
+ )
1724
+
1725
+ # no dtype change
1726
+ s = s_orig.copy()
1727
+ s2 = s
1728
+ s += 1
1729
+ tm.assert_series_equal(s, s2)
1730
+ tm.assert_series_equal(s_orig + 1, s)
1731
+ assert s is s2
1732
+ assert s._mgr is s2._mgr
1733
+
1734
+ df = df_orig.copy()
1735
+ df2 = df
1736
+ df += 1
1737
+ tm.assert_frame_equal(df, df2)
1738
+ tm.assert_frame_equal(df_orig + 1, df)
1739
+ assert df is df2
1740
+ assert df._mgr is df2._mgr
1741
+
1742
+ # dtype change
1743
+ s = s_orig.copy()
1744
+ s2 = s
1745
+ s += 1.5
1746
+ tm.assert_series_equal(s, s2)
1747
+ tm.assert_series_equal(s_orig + 1.5, s)
1748
+
1749
+ df = df_orig.copy()
1750
+ df2 = df
1751
+ df += 1.5
1752
+ tm.assert_frame_equal(df, df2)
1753
+ tm.assert_frame_equal(df_orig + 1.5, df)
1754
+ assert df is df2
1755
+ assert df._mgr is df2._mgr
1756
+
1757
+ # mixed dtype
1758
+ arr = np.random.default_rng(2).integers(0, 10, size=5)
1759
+ df_orig = DataFrame({"A": arr.copy(), "B": "foo"})
1760
+ df = df_orig.copy()
1761
+ df2 = df
1762
+ df["A"] += 1
1763
+ expected = DataFrame({"A": arr.copy() + 1, "B": "foo"})
1764
+ tm.assert_frame_equal(df, expected)
1765
+ tm.assert_frame_equal(df2, expected)
1766
+ assert df._mgr is df2._mgr
1767
+
1768
+ df = df_orig.copy()
1769
+ df2 = df
1770
+ df["A"] += 1.5
1771
+ expected = DataFrame({"A": arr.copy() + 1.5, "B": "foo"})
1772
+ tm.assert_frame_equal(df, expected)
1773
+ tm.assert_frame_equal(df2, expected)
1774
+ assert df._mgr is df2._mgr
1775
+
1776
+ @pytest.mark.parametrize(
1777
+ "op",
1778
+ [
1779
+ "add",
1780
+ "and",
1781
+ pytest.param(
1782
+ "div",
1783
+ marks=pytest.mark.xfail(
1784
+ raises=AttributeError, reason="__idiv__ not implemented"
1785
+ ),
1786
+ ),
1787
+ "floordiv",
1788
+ "mod",
1789
+ "mul",
1790
+ "or",
1791
+ "pow",
1792
+ "sub",
1793
+ "truediv",
1794
+ "xor",
1795
+ ],
1796
+ )
1797
+ def test_inplace_ops_identity2(self, op):
1798
+ df = DataFrame({"a": [1.0, 2.0, 3.0], "b": [1, 2, 3]})
1799
+
1800
+ operand = 2
1801
+ if op in ("and", "or", "xor"):
1802
+ # cannot use floats for boolean ops
1803
+ df["a"] = [True, False, True]
1804
+
1805
+ df_copy = df.copy()
1806
+ iop = f"__i{op}__"
1807
+ op = f"__{op}__"
1808
+
1809
+ # no id change and value is correct
1810
+ getattr(df, iop)(operand)
1811
+ expected = getattr(df_copy, op)(operand)
1812
+ tm.assert_frame_equal(df, expected)
1813
+ expected = id(df)
1814
+ assert id(df) == expected
1815
+
1816
+ @pytest.mark.parametrize(
1817
+ "val",
1818
+ [
1819
+ [1, 2, 3],
1820
+ (1, 2, 3),
1821
+ np.array([1, 2, 3], dtype=np.int64),
1822
+ range(1, 4),
1823
+ ],
1824
+ )
1825
+ def test_alignment_non_pandas(self, val):
1826
+ index = ["A", "B", "C"]
1827
+ columns = ["X", "Y", "Z"]
1828
+ df = DataFrame(
1829
+ np.random.default_rng(2).standard_normal((3, 3)),
1830
+ index=index,
1831
+ columns=columns,
1832
+ )
1833
+
1834
+ align = DataFrame._align_for_op
1835
+
1836
+ expected = DataFrame({"X": val, "Y": val, "Z": val}, index=df.index)
1837
+ tm.assert_frame_equal(align(df, val, axis=0)[1], expected)
1838
+
1839
+ expected = DataFrame(
1840
+ {"X": [1, 1, 1], "Y": [2, 2, 2], "Z": [3, 3, 3]}, index=df.index
1841
+ )
1842
+ tm.assert_frame_equal(align(df, val, axis=1)[1], expected)
1843
+
1844
+ @pytest.mark.parametrize("val", [[1, 2], (1, 2), np.array([1, 2]), range(1, 3)])
1845
+ def test_alignment_non_pandas_length_mismatch(self, val):
1846
+ index = ["A", "B", "C"]
1847
+ columns = ["X", "Y", "Z"]
1848
+ df = DataFrame(
1849
+ np.random.default_rng(2).standard_normal((3, 3)),
1850
+ index=index,
1851
+ columns=columns,
1852
+ )
1853
+
1854
+ align = DataFrame._align_for_op
1855
+ # length mismatch
1856
+ msg = "Unable to coerce to Series, length must be 3: given 2"
1857
+ with pytest.raises(ValueError, match=msg):
1858
+ align(df, val, axis=0)
1859
+
1860
+ with pytest.raises(ValueError, match=msg):
1861
+ align(df, val, axis=1)
1862
+
1863
+ def test_alignment_non_pandas_index_columns(self):
1864
+ index = ["A", "B", "C"]
1865
+ columns = ["X", "Y", "Z"]
1866
+ df = DataFrame(
1867
+ np.random.default_rng(2).standard_normal((3, 3)),
1868
+ index=index,
1869
+ columns=columns,
1870
+ )
1871
+
1872
+ align = DataFrame._align_for_op
1873
+ val = np.array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
1874
+ tm.assert_frame_equal(
1875
+ align(df, val, axis=0)[1],
1876
+ DataFrame(val, index=df.index, columns=df.columns),
1877
+ )
1878
+ tm.assert_frame_equal(
1879
+ align(df, val, axis=1)[1],
1880
+ DataFrame(val, index=df.index, columns=df.columns),
1881
+ )
1882
+
1883
+ # shape mismatch
1884
+ msg = "Unable to coerce to DataFrame, shape must be"
1885
+ val = np.array([[1, 2, 3], [4, 5, 6]])
1886
+ with pytest.raises(ValueError, match=msg):
1887
+ align(df, val, axis=0)
1888
+
1889
+ with pytest.raises(ValueError, match=msg):
1890
+ align(df, val, axis=1)
1891
+
1892
+ val = np.zeros((3, 3, 3))
1893
+ msg = re.escape(
1894
+ "Unable to coerce to Series/DataFrame, dimension must be <= 2: (3, 3, 3)"
1895
+ )
1896
+ with pytest.raises(ValueError, match=msg):
1897
+ align(df, val, axis=0)
1898
+ with pytest.raises(ValueError, match=msg):
1899
+ align(df, val, axis=1)
1900
+
1901
+ def test_no_warning(self, all_arithmetic_operators):
1902
+ df = DataFrame({"A": [0.0, 0.0], "B": [0.0, None]})
1903
+ b = df["B"]
1904
+ with tm.assert_produces_warning(None):
1905
+ getattr(df, all_arithmetic_operators)(b)
1906
+
1907
+ def test_dunder_methods_binary(self, all_arithmetic_operators):
1908
+ # GH#??? frame.__foo__ should only accept one argument
1909
+ df = DataFrame({"A": [0.0, 0.0], "B": [0.0, None]})
1910
+ b = df["B"]
1911
+ with pytest.raises(TypeError, match="takes 2 positional arguments"):
1912
+ getattr(df, all_arithmetic_operators)(b, 0)
1913
+
1914
+ def test_align_int_fill_bug(self):
1915
+ # GH#910
1916
+ X = np.arange(10 * 10, dtype="float64").reshape(10, 10)
1917
+ Y = np.ones((10, 1), dtype=int)
1918
+
1919
+ df1 = DataFrame(X)
1920
+ df1["0.X"] = Y.squeeze()
1921
+
1922
+ df2 = df1.astype(float)
1923
+
1924
+ result = df1 - df1.mean()
1925
+ expected = df2 - df2.mean()
1926
+ tm.assert_frame_equal(result, expected)
1927
+
1928
+
1929
+ def test_pow_with_realignment():
1930
+ # GH#32685 pow has special semantics for operating with null values
1931
+ left = DataFrame({"A": [0, 1, 2]})
1932
+ right = DataFrame(index=[0, 1, 2])
1933
+
1934
+ result = left**right
1935
+ expected = DataFrame({"A": [np.nan, 1.0, np.nan]})
1936
+ tm.assert_frame_equal(result, expected)
1937
+
1938
+
1939
+ def test_dataframe_series_extension_dtypes():
1940
+ # https://github.com/pandas-dev/pandas/issues/34311
1941
+ df = DataFrame(
1942
+ np.random.default_rng(2).integers(0, 100, (10, 3)), columns=["a", "b", "c"]
1943
+ )
1944
+ ser = Series([1, 2, 3], index=["a", "b", "c"])
1945
+
1946
+ expected = df.to_numpy("int64") + ser.to_numpy("int64").reshape(-1, 3)
1947
+ expected = DataFrame(expected, columns=df.columns, dtype="Int64")
1948
+
1949
+ df_ea = df.astype("Int64")
1950
+ result = df_ea + ser
1951
+ tm.assert_frame_equal(result, expected)
1952
+ result = df_ea + ser.astype("Int64")
1953
+ tm.assert_frame_equal(result, expected)
1954
+
1955
+
1956
+ def test_dataframe_blockwise_slicelike():
1957
+ # GH#34367
1958
+ arr = np.random.default_rng(2).integers(0, 1000, (100, 10))
1959
+ df1 = DataFrame(arr)
1960
+ # Explicit cast to float to avoid implicit cast when setting nan
1961
+ df2 = df1.copy().astype({1: "float", 3: "float", 7: "float"})
1962
+ df2.iloc[0, [1, 3, 7]] = np.nan
1963
+
1964
+ # Explicit cast to float to avoid implicit cast when setting nan
1965
+ df3 = df1.copy().astype({5: "float"})
1966
+ df3.iloc[0, [5]] = np.nan
1967
+
1968
+ # Explicit cast to float to avoid implicit cast when setting nan
1969
+ df4 = df1.copy().astype({2: "float", 3: "float", 4: "float"})
1970
+ df4.iloc[0, np.arange(2, 5)] = np.nan
1971
+ # Explicit cast to float to avoid implicit cast when setting nan
1972
+ df5 = df1.copy().astype({4: "float", 5: "float", 6: "float"})
1973
+ df5.iloc[0, np.arange(4, 7)] = np.nan
1974
+
1975
+ for left, right in [(df1, df2), (df2, df3), (df4, df5)]:
1976
+ res = left + right
1977
+
1978
+ expected = DataFrame({i: left[i] + right[i] for i in left.columns})
1979
+ tm.assert_frame_equal(res, expected)
1980
+
1981
+
1982
+ @pytest.mark.parametrize(
1983
+ "df, col_dtype",
1984
+ [
1985
+ (DataFrame([[1.0, 2.0], [4.0, 5.0]], columns=list("ab")), "float64"),
1986
+ (
1987
+ DataFrame([[1.0, "b"], [4.0, "b"]], columns=list("ab")).astype(
1988
+ {"b": object}
1989
+ ),
1990
+ "object",
1991
+ ),
1992
+ ],
1993
+ )
1994
+ def test_dataframe_operation_with_non_numeric_types(df, col_dtype):
1995
+ # GH #22663
1996
+ expected = DataFrame([[0.0, np.nan], [3.0, np.nan]], columns=list("ab"))
1997
+ expected = expected.astype({"b": col_dtype})
1998
+ result = df + Series([-1.0], index=list("a"))
1999
+ tm.assert_frame_equal(result, expected)
2000
+
2001
+
2002
+ def test_arith_reindex_with_duplicates():
2003
+ # https://github.com/pandas-dev/pandas/issues/35194
2004
+ df1 = DataFrame(data=[[0]], columns=["second"])
2005
+ df2 = DataFrame(data=[[0, 0, 0]], columns=["first", "second", "second"])
2006
+ result = df1 + df2
2007
+ expected = DataFrame([[np.nan, 0, 0]], columns=["first", "second", "second"])
2008
+ tm.assert_frame_equal(result, expected)
2009
+
2010
+
2011
+ @pytest.mark.parametrize("to_add", [[Series([1, 1])], [Series([1, 1]), Series([1, 1])]])
2012
+ def test_arith_list_of_arraylike_raise(to_add):
2013
+ # GH 36702. Raise when trying to add list of array-like to DataFrame
2014
+ df = DataFrame({"x": [1, 2], "y": [1, 2]})
2015
+
2016
+ msg = f"Unable to coerce list of {type(to_add[0])} to Series/DataFrame"
2017
+ with pytest.raises(ValueError, match=msg):
2018
+ df + to_add
2019
+ with pytest.raises(ValueError, match=msg):
2020
+ to_add + df
2021
+
2022
+
2023
+ def test_inplace_arithmetic_series_update(using_copy_on_write, warn_copy_on_write):
2024
+ # https://github.com/pandas-dev/pandas/issues/36373
2025
+ df = DataFrame({"A": [1, 2, 3]})
2026
+ df_orig = df.copy()
2027
+ series = df["A"]
2028
+ vals = series._values
2029
+
2030
+ with tm.assert_cow_warning(warn_copy_on_write):
2031
+ series += 1
2032
+ if using_copy_on_write:
2033
+ assert series._values is not vals
2034
+ tm.assert_frame_equal(df, df_orig)
2035
+ else:
2036
+ assert series._values is vals
2037
+
2038
+ expected = DataFrame({"A": [2, 3, 4]})
2039
+ tm.assert_frame_equal(df, expected)
2040
+
2041
+
2042
+ def test_arithmetic_multiindex_align():
2043
+ """
2044
+ Regression test for: https://github.com/pandas-dev/pandas/issues/33765
2045
+ """
2046
+ df1 = DataFrame(
2047
+ [[1]],
2048
+ index=["a"],
2049
+ columns=MultiIndex.from_product([[0], [1]], names=["a", "b"]),
2050
+ )
2051
+ df2 = DataFrame([[1]], index=["a"], columns=Index([0], name="a"))
2052
+ expected = DataFrame(
2053
+ [[0]],
2054
+ index=["a"],
2055
+ columns=MultiIndex.from_product([[0], [1]], names=["a", "b"]),
2056
+ )
2057
+ result = df1 - df2
2058
+ tm.assert_frame_equal(result, expected)
2059
+
2060
+
2061
+ def test_bool_frame_mult_float():
2062
+ # GH 18549
2063
+ df = DataFrame(True, list("ab"), list("cd"))
2064
+ result = df * 1.0
2065
+ expected = DataFrame(np.ones((2, 2)), list("ab"), list("cd"))
2066
+ tm.assert_frame_equal(result, expected)
2067
+
2068
+
2069
+ def test_frame_sub_nullable_int(any_int_ea_dtype):
2070
+ # GH 32822
2071
+ series1 = Series([1, 2, None], dtype=any_int_ea_dtype)
2072
+ series2 = Series([1, 2, 3], dtype=any_int_ea_dtype)
2073
+ expected = DataFrame([0, 0, None], dtype=any_int_ea_dtype)
2074
+ result = series1.to_frame() - series2.to_frame()
2075
+ tm.assert_frame_equal(result, expected)
2076
+
2077
+
2078
+ @pytest.mark.filterwarnings(
2079
+ "ignore:Passing a BlockManager|Passing a SingleBlockManager:DeprecationWarning"
2080
+ )
2081
+ def test_frame_op_subclass_nonclass_constructor():
2082
+ # GH#43201 subclass._constructor is a function, not the subclass itself
2083
+
2084
+ class SubclassedSeries(Series):
2085
+ @property
2086
+ def _constructor(self):
2087
+ return SubclassedSeries
2088
+
2089
+ @property
2090
+ def _constructor_expanddim(self):
2091
+ return SubclassedDataFrame
2092
+
2093
+ class SubclassedDataFrame(DataFrame):
2094
+ _metadata = ["my_extra_data"]
2095
+
2096
+ def __init__(self, my_extra_data, *args, **kwargs) -> None:
2097
+ self.my_extra_data = my_extra_data
2098
+ super().__init__(*args, **kwargs)
2099
+
2100
+ @property
2101
+ def _constructor(self):
2102
+ return functools.partial(type(self), self.my_extra_data)
2103
+
2104
+ @property
2105
+ def _constructor_sliced(self):
2106
+ return SubclassedSeries
2107
+
2108
+ sdf = SubclassedDataFrame("some_data", {"A": [1, 2, 3], "B": [4, 5, 6]})
2109
+ result = sdf * 2
2110
+ expected = SubclassedDataFrame("some_data", {"A": [2, 4, 6], "B": [8, 10, 12]})
2111
+ tm.assert_frame_equal(result, expected)
2112
+
2113
+ result = sdf + sdf
2114
+ tm.assert_frame_equal(result, expected)
2115
+
2116
+
2117
+ def test_enum_column_equality():
2118
+ Cols = Enum("Cols", "col1 col2")
2119
+
2120
+ q1 = DataFrame({Cols.col1: [1, 2, 3]})
2121
+ q2 = DataFrame({Cols.col1: [1, 2, 3]})
2122
+
2123
+ result = q1[Cols.col1] == q2[Cols.col1]
2124
+ expected = Series([True, True, True], name=Cols.col1)
2125
+
2126
+ tm.assert_series_equal(result, expected)
2127
+
2128
+
2129
+ def test_mixed_col_index_dtype():
2130
+ # GH 47382
2131
+ df1 = DataFrame(columns=list("abc"), data=1.0, index=[0])
2132
+ df2 = DataFrame(columns=list("abc"), data=0.0, index=[0])
2133
+ df1.columns = df2.columns.astype("string")
2134
+ result = df1 + df2
2135
+ expected = DataFrame(columns=list("abc"), data=1.0, index=[0])
2136
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_block_internals.py ADDED
@@ -0,0 +1,457 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ )
5
+ import itertools
6
+
7
+ import numpy as np
8
+ import pytest
9
+
10
+ from pandas.errors import PerformanceWarning
11
+ import pandas.util._test_decorators as td
12
+
13
+ import pandas as pd
14
+ from pandas import (
15
+ Categorical,
16
+ DataFrame,
17
+ Series,
18
+ Timestamp,
19
+ date_range,
20
+ option_context,
21
+ )
22
+ import pandas._testing as tm
23
+ from pandas.core.internals.blocks import NumpyBlock
24
+
25
+ # Segregated collection of methods that require the BlockManager internal data
26
+ # structure
27
+
28
+
29
+ # TODO(ArrayManager) check which of those tests need to be rewritten to test the
30
+ # equivalent for ArrayManager
31
+ pytestmark = td.skip_array_manager_invalid_test
32
+
33
+
34
+ class TestDataFrameBlockInternals:
35
+ def test_setitem_invalidates_datetime_index_freq(self):
36
+ # GH#24096 altering a datetime64tz column inplace invalidates the
37
+ # `freq` attribute on the underlying DatetimeIndex
38
+
39
+ dti = date_range("20130101", periods=3, tz="US/Eastern")
40
+ ts = dti[1]
41
+
42
+ df = DataFrame({"B": dti})
43
+ assert df["B"]._values.freq is None
44
+
45
+ df.iloc[1, 0] = pd.NaT
46
+ assert df["B"]._values.freq is None
47
+
48
+ # check that the DatetimeIndex was not altered in place
49
+ assert dti.freq == "D"
50
+ assert dti[1] == ts
51
+
52
+ def test_cast_internals(self, float_frame):
53
+ msg = "Passing a BlockManager to DataFrame"
54
+ with tm.assert_produces_warning(
55
+ DeprecationWarning, match=msg, check_stacklevel=False
56
+ ):
57
+ casted = DataFrame(float_frame._mgr, dtype=int)
58
+ expected = DataFrame(float_frame._series, dtype=int)
59
+ tm.assert_frame_equal(casted, expected)
60
+
61
+ with tm.assert_produces_warning(
62
+ DeprecationWarning, match=msg, check_stacklevel=False
63
+ ):
64
+ casted = DataFrame(float_frame._mgr, dtype=np.int32)
65
+ expected = DataFrame(float_frame._series, dtype=np.int32)
66
+ tm.assert_frame_equal(casted, expected)
67
+
68
+ def test_consolidate(self, float_frame):
69
+ float_frame["E"] = 7.0
70
+ consolidated = float_frame._consolidate()
71
+ assert len(consolidated._mgr.blocks) == 1
72
+
73
+ # Ensure copy, do I want this?
74
+ recons = consolidated._consolidate()
75
+ assert recons is not consolidated
76
+ tm.assert_frame_equal(recons, consolidated)
77
+
78
+ float_frame["F"] = 8.0
79
+ assert len(float_frame._mgr.blocks) == 3
80
+
81
+ return_value = float_frame._consolidate_inplace()
82
+ assert return_value is None
83
+ assert len(float_frame._mgr.blocks) == 1
84
+
85
+ def test_consolidate_inplace(self, float_frame):
86
+ # triggers in-place consolidation
87
+ for letter in range(ord("A"), ord("Z")):
88
+ float_frame[chr(letter)] = chr(letter)
89
+
90
+ def test_modify_values(self, float_frame, using_copy_on_write):
91
+ if using_copy_on_write:
92
+ with pytest.raises(ValueError, match="read-only"):
93
+ float_frame.values[5] = 5
94
+ assert (float_frame.values[5] != 5).all()
95
+ return
96
+
97
+ float_frame.values[5] = 5
98
+ assert (float_frame.values[5] == 5).all()
99
+
100
+ # unconsolidated
101
+ float_frame["E"] = 7.0
102
+ col = float_frame["E"]
103
+ float_frame.values[6] = 6
104
+ # as of 2.0 .values does not consolidate, so subsequent calls to .values
105
+ # does not share data
106
+ assert not (float_frame.values[6] == 6).all()
107
+
108
+ assert (col == 7).all()
109
+
110
+ def test_boolean_set_uncons(self, float_frame):
111
+ float_frame["E"] = 7.0
112
+
113
+ expected = float_frame.values.copy()
114
+ expected[expected > 1] = 2
115
+
116
+ float_frame[float_frame > 1] = 2
117
+ tm.assert_almost_equal(expected, float_frame.values)
118
+
119
+ def test_constructor_with_convert(self):
120
+ # this is actually mostly a test of lib.maybe_convert_objects
121
+ # #2845
122
+ df = DataFrame({"A": [2**63 - 1]})
123
+ result = df["A"]
124
+ expected = Series(np.asarray([2**63 - 1], np.int64), name="A")
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ df = DataFrame({"A": [2**63]})
128
+ result = df["A"]
129
+ expected = Series(np.asarray([2**63], np.uint64), name="A")
130
+ tm.assert_series_equal(result, expected)
131
+
132
+ df = DataFrame({"A": [datetime(2005, 1, 1), True]})
133
+ result = df["A"]
134
+ expected = Series(
135
+ np.asarray([datetime(2005, 1, 1), True], np.object_), name="A"
136
+ )
137
+ tm.assert_series_equal(result, expected)
138
+
139
+ df = DataFrame({"A": [None, 1]})
140
+ result = df["A"]
141
+ expected = Series(np.asarray([np.nan, 1], np.float64), name="A")
142
+ tm.assert_series_equal(result, expected)
143
+
144
+ df = DataFrame({"A": [1.0, 2]})
145
+ result = df["A"]
146
+ expected = Series(np.asarray([1.0, 2], np.float64), name="A")
147
+ tm.assert_series_equal(result, expected)
148
+
149
+ df = DataFrame({"A": [1.0 + 2.0j, 3]})
150
+ result = df["A"]
151
+ expected = Series(np.asarray([1.0 + 2.0j, 3], np.complex128), name="A")
152
+ tm.assert_series_equal(result, expected)
153
+
154
+ df = DataFrame({"A": [1.0 + 2.0j, 3.0]})
155
+ result = df["A"]
156
+ expected = Series(np.asarray([1.0 + 2.0j, 3.0], np.complex128), name="A")
157
+ tm.assert_series_equal(result, expected)
158
+
159
+ df = DataFrame({"A": [1.0 + 2.0j, True]})
160
+ result = df["A"]
161
+ expected = Series(np.asarray([1.0 + 2.0j, True], np.object_), name="A")
162
+ tm.assert_series_equal(result, expected)
163
+
164
+ df = DataFrame({"A": [1.0, None]})
165
+ result = df["A"]
166
+ expected = Series(np.asarray([1.0, np.nan], np.float64), name="A")
167
+ tm.assert_series_equal(result, expected)
168
+
169
+ df = DataFrame({"A": [1.0 + 2.0j, None]})
170
+ result = df["A"]
171
+ expected = Series(np.asarray([1.0 + 2.0j, np.nan], np.complex128), name="A")
172
+ tm.assert_series_equal(result, expected)
173
+
174
+ df = DataFrame({"A": [2.0, 1, True, None]})
175
+ result = df["A"]
176
+ expected = Series(np.asarray([2.0, 1, True, None], np.object_), name="A")
177
+ tm.assert_series_equal(result, expected)
178
+
179
+ df = DataFrame({"A": [2.0, 1, datetime(2006, 1, 1), None]})
180
+ result = df["A"]
181
+ expected = Series(
182
+ np.asarray([2.0, 1, datetime(2006, 1, 1), None], np.object_), name="A"
183
+ )
184
+ tm.assert_series_equal(result, expected)
185
+
186
+ def test_construction_with_mixed(self, float_string_frame, using_infer_string):
187
+ # test construction edge cases with mixed types
188
+
189
+ # f7u12, this does not work without extensive workaround
190
+ data = [
191
+ [datetime(2001, 1, 5), np.nan, datetime(2001, 1, 2)],
192
+ [datetime(2000, 1, 2), datetime(2000, 1, 3), datetime(2000, 1, 1)],
193
+ ]
194
+ df = DataFrame(data)
195
+
196
+ # check dtypes
197
+ result = df.dtypes
198
+ expected = Series({"datetime64[us]": 3})
199
+
200
+ # mixed-type frames
201
+ float_string_frame["datetime"] = datetime.now()
202
+ float_string_frame["timedelta"] = timedelta(days=1, seconds=1)
203
+ assert float_string_frame["datetime"].dtype == "M8[us]"
204
+ assert float_string_frame["timedelta"].dtype == "m8[us]"
205
+ result = float_string_frame.dtypes
206
+ expected = Series(
207
+ [np.dtype("float64")] * 4
208
+ + [
209
+ np.dtype("object") if not using_infer_string else "string",
210
+ np.dtype("datetime64[us]"),
211
+ np.dtype("timedelta64[us]"),
212
+ ],
213
+ index=list("ABCD") + ["foo", "datetime", "timedelta"],
214
+ )
215
+ tm.assert_series_equal(result, expected)
216
+
217
+ def test_construction_with_conversions(self):
218
+ # convert from a numpy array of non-ns timedelta64; as of 2.0 this does
219
+ # *not* convert
220
+ arr = np.array([1, 2, 3], dtype="timedelta64[s]")
221
+ df = DataFrame(index=range(3))
222
+ df["A"] = arr
223
+ expected = DataFrame(
224
+ {"A": pd.timedelta_range("00:00:01", periods=3, freq="s")}, index=range(3)
225
+ )
226
+ tm.assert_numpy_array_equal(df["A"].to_numpy(), arr)
227
+
228
+ expected = DataFrame(
229
+ {
230
+ "dt1": Timestamp("20130101"),
231
+ "dt2": date_range("20130101", periods=3).astype("M8[s]"),
232
+ # 'dt3' : date_range('20130101 00:00:01',periods=3,freq='s'),
233
+ # FIXME: don't leave commented-out
234
+ },
235
+ index=range(3),
236
+ )
237
+ assert expected.dtypes["dt1"] == "M8[s]"
238
+ assert expected.dtypes["dt2"] == "M8[s]"
239
+
240
+ df = DataFrame(index=range(3))
241
+ df["dt1"] = np.datetime64("2013-01-01")
242
+ df["dt2"] = np.array(
243
+ ["2013-01-01", "2013-01-02", "2013-01-03"], dtype="datetime64[D]"
244
+ )
245
+
246
+ # df['dt3'] = np.array(['2013-01-01 00:00:01','2013-01-01
247
+ # 00:00:02','2013-01-01 00:00:03'],dtype='datetime64[s]')
248
+ # FIXME: don't leave commented-out
249
+
250
+ tm.assert_frame_equal(df, expected)
251
+
252
+ def test_constructor_compound_dtypes(self):
253
+ # GH 5191
254
+ # compound dtypes should raise not-implementederror
255
+
256
+ def f(dtype):
257
+ data = list(itertools.repeat((datetime(2001, 1, 1), "aa", 20), 9))
258
+ return DataFrame(data=data, columns=["A", "B", "C"], dtype=dtype)
259
+
260
+ msg = "compound dtypes are not implemented in the DataFrame constructor"
261
+ with pytest.raises(NotImplementedError, match=msg):
262
+ f([("A", "datetime64[h]"), ("B", "str"), ("C", "int32")])
263
+
264
+ # pre-2.0 these used to work (though results may be unexpected)
265
+ with pytest.raises(TypeError, match="argument must be"):
266
+ f("int64")
267
+ with pytest.raises(TypeError, match="argument must be"):
268
+ f("float64")
269
+
270
+ # 10822
271
+ msg = "^Unknown datetime string format, unable to parse: aa, at position 0$"
272
+ with pytest.raises(ValueError, match=msg):
273
+ f("M8[ns]")
274
+
275
+ def test_pickle(self, float_string_frame, timezone_frame):
276
+ empty_frame = DataFrame()
277
+
278
+ unpickled = tm.round_trip_pickle(float_string_frame)
279
+ tm.assert_frame_equal(float_string_frame, unpickled)
280
+
281
+ # buglet
282
+ float_string_frame._mgr.ndim
283
+
284
+ # empty
285
+ unpickled = tm.round_trip_pickle(empty_frame)
286
+ repr(unpickled)
287
+
288
+ # tz frame
289
+ unpickled = tm.round_trip_pickle(timezone_frame)
290
+ tm.assert_frame_equal(timezone_frame, unpickled)
291
+
292
+ def test_consolidate_datetime64(self):
293
+ # numpy vstack bug
294
+
295
+ df = DataFrame(
296
+ {
297
+ "starting": pd.to_datetime(
298
+ [
299
+ "2012-06-21 00:00",
300
+ "2012-06-23 07:00",
301
+ "2012-06-23 16:30",
302
+ "2012-06-25 08:00",
303
+ "2012-06-26 12:00",
304
+ ]
305
+ ),
306
+ "ending": pd.to_datetime(
307
+ [
308
+ "2012-06-23 07:00",
309
+ "2012-06-23 16:30",
310
+ "2012-06-25 08:00",
311
+ "2012-06-26 12:00",
312
+ "2012-06-27 08:00",
313
+ ]
314
+ ),
315
+ "measure": [77, 65, 77, 0, 77],
316
+ }
317
+ )
318
+
319
+ ser_starting = df.starting
320
+ ser_starting.index = ser_starting.values
321
+ ser_starting = ser_starting.tz_localize("US/Eastern")
322
+ ser_starting = ser_starting.tz_convert("UTC")
323
+ ser_starting.index.name = "starting"
324
+
325
+ ser_ending = df.ending
326
+ ser_ending.index = ser_ending.values
327
+ ser_ending = ser_ending.tz_localize("US/Eastern")
328
+ ser_ending = ser_ending.tz_convert("UTC")
329
+ ser_ending.index.name = "ending"
330
+
331
+ df.starting = ser_starting.index
332
+ df.ending = ser_ending.index
333
+
334
+ tm.assert_index_equal(pd.DatetimeIndex(df.starting), ser_starting.index)
335
+ tm.assert_index_equal(pd.DatetimeIndex(df.ending), ser_ending.index)
336
+
337
+ def test_is_mixed_type(self, float_frame, float_string_frame):
338
+ assert not float_frame._is_mixed_type
339
+ assert float_string_frame._is_mixed_type
340
+
341
+ def test_stale_cached_series_bug_473(self, using_copy_on_write, warn_copy_on_write):
342
+ # this is chained, but ok
343
+ with option_context("chained_assignment", None):
344
+ Y = DataFrame(
345
+ np.random.default_rng(2).random((4, 4)),
346
+ index=("a", "b", "c", "d"),
347
+ columns=("e", "f", "g", "h"),
348
+ )
349
+ repr(Y)
350
+ Y["e"] = Y["e"].astype("object")
351
+ with tm.raises_chained_assignment_error():
352
+ Y["g"]["c"] = np.nan
353
+ repr(Y)
354
+ Y.sum()
355
+ Y["g"].sum()
356
+ if using_copy_on_write:
357
+ assert not pd.isna(Y["g"]["c"])
358
+ else:
359
+ assert pd.isna(Y["g"]["c"])
360
+
361
+ @pytest.mark.filterwarnings("ignore:Setting a value on a view:FutureWarning")
362
+ def test_strange_column_corruption_issue(self, using_copy_on_write):
363
+ # TODO(wesm): Unclear how exactly this is related to internal matters
364
+ df = DataFrame(index=[0, 1])
365
+ df[0] = np.nan
366
+ wasCol = {}
367
+
368
+ with tm.assert_produces_warning(
369
+ PerformanceWarning, raise_on_extra_warnings=False
370
+ ):
371
+ for i, dt in enumerate(df.index):
372
+ for col in range(100, 200):
373
+ if col not in wasCol:
374
+ wasCol[col] = 1
375
+ df[col] = np.nan
376
+ if using_copy_on_write:
377
+ df.loc[dt, col] = i
378
+ else:
379
+ df[col][dt] = i
380
+
381
+ myid = 100
382
+
383
+ first = len(df.loc[pd.isna(df[myid]), [myid]])
384
+ second = len(df.loc[pd.isna(df[myid]), [myid]])
385
+ assert first == second == 0
386
+
387
+ def test_constructor_no_pandas_array(self):
388
+ # Ensure that NumpyExtensionArray isn't allowed inside Series
389
+ # See https://github.com/pandas-dev/pandas/issues/23995 for more.
390
+ arr = Series([1, 2, 3]).array
391
+ result = DataFrame({"A": arr})
392
+ expected = DataFrame({"A": [1, 2, 3]})
393
+ tm.assert_frame_equal(result, expected)
394
+ assert isinstance(result._mgr.blocks[0], NumpyBlock)
395
+ assert result._mgr.blocks[0].is_numeric
396
+
397
+ def test_add_column_with_pandas_array(self):
398
+ # GH 26390
399
+ df = DataFrame({"a": [1, 2, 3, 4], "b": ["a", "b", "c", "d"]})
400
+ df["c"] = pd.arrays.NumpyExtensionArray(np.array([1, 2, None, 3], dtype=object))
401
+ df2 = DataFrame(
402
+ {
403
+ "a": [1, 2, 3, 4],
404
+ "b": ["a", "b", "c", "d"],
405
+ "c": pd.arrays.NumpyExtensionArray(
406
+ np.array([1, 2, None, 3], dtype=object)
407
+ ),
408
+ }
409
+ )
410
+ assert type(df["c"]._mgr.blocks[0]) == NumpyBlock
411
+ assert df["c"]._mgr.blocks[0].is_object
412
+ assert type(df2["c"]._mgr.blocks[0]) == NumpyBlock
413
+ assert df2["c"]._mgr.blocks[0].is_object
414
+ tm.assert_frame_equal(df, df2)
415
+
416
+
417
+ def test_update_inplace_sets_valid_block_values(using_copy_on_write):
418
+ # https://github.com/pandas-dev/pandas/issues/33457
419
+ df = DataFrame({"a": Series([1, 2, None], dtype="category")})
420
+
421
+ # inplace update of a single column
422
+ if using_copy_on_write:
423
+ with tm.raises_chained_assignment_error():
424
+ df["a"].fillna(1, inplace=True)
425
+ else:
426
+ with tm.assert_produces_warning(FutureWarning, match="inplace method"):
427
+ df["a"].fillna(1, inplace=True)
428
+
429
+ # check we haven't put a Series into any block.values
430
+ assert isinstance(df._mgr.blocks[0].values, Categorical)
431
+
432
+ if not using_copy_on_write:
433
+ # smoketest for OP bug from GH#35731
434
+ assert df.isnull().sum().sum() == 0
435
+
436
+
437
+ def test_nonconsolidated_item_cache_take():
438
+ # https://github.com/pandas-dev/pandas/issues/35521
439
+
440
+ # create non-consolidated dataframe with object dtype columns
441
+ df = DataFrame()
442
+ df["col1"] = Series(["a"], dtype=object)
443
+ df["col2"] = Series([0], dtype=object)
444
+
445
+ # access column (item cache)
446
+ df["col1"] == "A"
447
+ # take operation
448
+ # (regression was that this consolidated but didn't reset item cache,
449
+ # resulting in an invalid cache and the .at operation not working properly)
450
+ df[df["col2"] == 0]
451
+
452
+ # now setting value should update actual dataframe
453
+ df.at[0, "col1"] = "A"
454
+
455
+ expected = DataFrame({"col1": ["A"], "col2": [0]}, dtype=object)
456
+ tm.assert_frame_equal(df, expected)
457
+ assert df.at[0, "col1"] == "A"
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_constructors.py ADDED
The diff for this file is too large to render. See raw diff
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_cumulative.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for DataFrame cumulative operations
3
+
4
+ See also
5
+ --------
6
+ tests.series.test_cumulative
7
+ """
8
+
9
+ import numpy as np
10
+ import pytest
11
+
12
+ from pandas import (
13
+ DataFrame,
14
+ Series,
15
+ )
16
+ import pandas._testing as tm
17
+
18
+
19
+ class TestDataFrameCumulativeOps:
20
+ # ---------------------------------------------------------------------
21
+ # Cumulative Operations - cumsum, cummax, ...
22
+
23
+ def test_cumulative_ops_smoke(self):
24
+ # it works
25
+ df = DataFrame({"A": np.arange(20)}, index=np.arange(20))
26
+ df.cummax()
27
+ df.cummin()
28
+ df.cumsum()
29
+
30
+ dm = DataFrame(np.arange(20).reshape(4, 5), index=range(4), columns=range(5))
31
+ # TODO(wesm): do something with this?
32
+ dm.cumsum()
33
+
34
+ def test_cumprod_smoke(self, datetime_frame):
35
+ datetime_frame.iloc[5:10, 0] = np.nan
36
+ datetime_frame.iloc[10:15, 1] = np.nan
37
+ datetime_frame.iloc[15:, 2] = np.nan
38
+
39
+ # ints
40
+ df = datetime_frame.fillna(0).astype(int)
41
+ df.cumprod(0)
42
+ df.cumprod(1)
43
+
44
+ # ints32
45
+ df = datetime_frame.fillna(0).astype(np.int32)
46
+ df.cumprod(0)
47
+ df.cumprod(1)
48
+
49
+ @pytest.mark.parametrize("method", ["cumsum", "cumprod", "cummin", "cummax"])
50
+ def test_cumulative_ops_match_series_apply(self, datetime_frame, method):
51
+ datetime_frame.iloc[5:10, 0] = np.nan
52
+ datetime_frame.iloc[10:15, 1] = np.nan
53
+ datetime_frame.iloc[15:, 2] = np.nan
54
+
55
+ # axis = 0
56
+ result = getattr(datetime_frame, method)()
57
+ expected = datetime_frame.apply(getattr(Series, method))
58
+ tm.assert_frame_equal(result, expected)
59
+
60
+ # axis = 1
61
+ result = getattr(datetime_frame, method)(axis=1)
62
+ expected = datetime_frame.apply(getattr(Series, method), axis=1)
63
+ tm.assert_frame_equal(result, expected)
64
+
65
+ # fix issue TODO: GH ref?
66
+ assert np.shape(result) == np.shape(datetime_frame)
67
+
68
+ def test_cumsum_preserve_dtypes(self):
69
+ # GH#19296 dont incorrectly upcast to object
70
+ df = DataFrame({"A": [1, 2, 3], "B": [1, 2, 3.0], "C": [True, False, False]})
71
+
72
+ result = df.cumsum()
73
+
74
+ expected = DataFrame(
75
+ {
76
+ "A": Series([1, 3, 6], dtype=np.int64),
77
+ "B": Series([1, 3, 6], dtype=np.float64),
78
+ "C": df["C"].cumsum(),
79
+ }
80
+ )
81
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_iteration.py ADDED
@@ -0,0 +1,160 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.compat import (
7
+ IS64,
8
+ is_platform_windows,
9
+ )
10
+
11
+ from pandas import (
12
+ Categorical,
13
+ DataFrame,
14
+ Series,
15
+ date_range,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ class TestIteration:
21
+ def test_keys(self, float_frame):
22
+ assert float_frame.keys() is float_frame.columns
23
+
24
+ def test_iteritems(self):
25
+ df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=["a", "a", "b"])
26
+ for k, v in df.items():
27
+ assert isinstance(v, DataFrame._constructor_sliced)
28
+
29
+ def test_items(self):
30
+ # GH#17213, GH#13918
31
+ cols = ["a", "b", "c"]
32
+ df = DataFrame([[1, 2, 3], [4, 5, 6]], columns=cols)
33
+ for c, (k, v) in zip(cols, df.items()):
34
+ assert c == k
35
+ assert isinstance(v, Series)
36
+ assert (df[k] == v).all()
37
+
38
+ def test_items_names(self, float_string_frame):
39
+ for k, v in float_string_frame.items():
40
+ assert v.name == k
41
+
42
+ def test_iter(self, float_frame):
43
+ assert list(float_frame) == list(float_frame.columns)
44
+
45
+ def test_iterrows(self, float_frame, float_string_frame):
46
+ for k, v in float_frame.iterrows():
47
+ exp = float_frame.loc[k]
48
+ tm.assert_series_equal(v, exp)
49
+
50
+ for k, v in float_string_frame.iterrows():
51
+ exp = float_string_frame.loc[k]
52
+ tm.assert_series_equal(v, exp)
53
+
54
+ def test_iterrows_iso8601(self):
55
+ # GH#19671
56
+ s = DataFrame(
57
+ {
58
+ "non_iso8601": ["M1701", "M1802", "M1903", "M2004"],
59
+ "iso8601": date_range("2000-01-01", periods=4, freq="ME"),
60
+ }
61
+ )
62
+ for k, v in s.iterrows():
63
+ exp = s.loc[k]
64
+ tm.assert_series_equal(v, exp)
65
+
66
+ def test_iterrows_corner(self):
67
+ # GH#12222
68
+ df = DataFrame(
69
+ {
70
+ "a": [datetime.datetime(2015, 1, 1)],
71
+ "b": [None],
72
+ "c": [None],
73
+ "d": [""],
74
+ "e": [[]],
75
+ "f": [set()],
76
+ "g": [{}],
77
+ }
78
+ )
79
+ expected = Series(
80
+ [datetime.datetime(2015, 1, 1), None, None, "", [], set(), {}],
81
+ index=list("abcdefg"),
82
+ name=0,
83
+ dtype="object",
84
+ )
85
+ _, result = next(df.iterrows())
86
+ tm.assert_series_equal(result, expected)
87
+
88
+ def test_itertuples(self, float_frame):
89
+ for i, tup in enumerate(float_frame.itertuples()):
90
+ ser = DataFrame._constructor_sliced(tup[1:])
91
+ ser.name = tup[0]
92
+ expected = float_frame.iloc[i, :].reset_index(drop=True)
93
+ tm.assert_series_equal(ser, expected)
94
+
95
+ def test_itertuples_index_false(self):
96
+ df = DataFrame(
97
+ {"floats": np.random.default_rng(2).standard_normal(5), "ints": range(5)},
98
+ columns=["floats", "ints"],
99
+ )
100
+
101
+ for tup in df.itertuples(index=False):
102
+ assert isinstance(tup[1], int)
103
+
104
+ def test_itertuples_duplicate_cols(self):
105
+ df = DataFrame(data={"a": [1, 2, 3], "b": [4, 5, 6]})
106
+ dfaa = df[["a", "a"]]
107
+
108
+ assert list(dfaa.itertuples()) == [(0, 1, 1), (1, 2, 2), (2, 3, 3)]
109
+
110
+ # repr with int on 32-bit/windows
111
+ if not (is_platform_windows() or not IS64):
112
+ assert (
113
+ repr(list(df.itertuples(name=None)))
114
+ == "[(0, 1, 4), (1, 2, 5), (2, 3, 6)]"
115
+ )
116
+
117
+ def test_itertuples_tuple_name(self):
118
+ df = DataFrame(data={"a": [1, 2, 3], "b": [4, 5, 6]})
119
+ tup = next(df.itertuples(name="TestName"))
120
+ assert tup._fields == ("Index", "a", "b")
121
+ assert (tup.Index, tup.a, tup.b) == tup
122
+ assert type(tup).__name__ == "TestName"
123
+
124
+ def test_itertuples_disallowed_col_labels(self):
125
+ df = DataFrame(data={"def": [1, 2, 3], "return": [4, 5, 6]})
126
+ tup2 = next(df.itertuples(name="TestName"))
127
+ assert tup2 == (0, 1, 4)
128
+ assert tup2._fields == ("Index", "_1", "_2")
129
+
130
+ @pytest.mark.parametrize("limit", [254, 255, 1024])
131
+ @pytest.mark.parametrize("index", [True, False])
132
+ def test_itertuples_py2_3_field_limit_namedtuple(self, limit, index):
133
+ # GH#28282
134
+ df = DataFrame([{f"foo_{i}": f"bar_{i}" for i in range(limit)}])
135
+ result = next(df.itertuples(index=index))
136
+ assert isinstance(result, tuple)
137
+ assert hasattr(result, "_fields")
138
+
139
+ def test_sequence_like_with_categorical(self):
140
+ # GH#7839
141
+ # make sure can iterate
142
+ df = DataFrame(
143
+ {"id": [1, 2, 3, 4, 5, 6], "raw_grade": ["a", "b", "b", "a", "a", "e"]}
144
+ )
145
+ df["grade"] = Categorical(df["raw_grade"])
146
+
147
+ # basic sequencing testing
148
+ result = list(df.grade.values)
149
+ expected = np.array(df.grade.values).tolist()
150
+ tm.assert_almost_equal(result, expected)
151
+
152
+ # iteration
153
+ for t in df.itertuples(index=False):
154
+ str(t)
155
+
156
+ for row, s in df.iterrows():
157
+ str(s)
158
+
159
+ for c, col in df.items():
160
+ str(col)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_logical_ops.py ADDED
@@ -0,0 +1,218 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+ import re
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas import (
8
+ CategoricalIndex,
9
+ DataFrame,
10
+ Interval,
11
+ Series,
12
+ isnull,
13
+ )
14
+ import pandas._testing as tm
15
+
16
+
17
+ class TestDataFrameLogicalOperators:
18
+ # &, |, ^
19
+
20
+ @pytest.mark.parametrize(
21
+ "left, right, op, expected",
22
+ [
23
+ (
24
+ [True, False, np.nan],
25
+ [True, False, True],
26
+ operator.and_,
27
+ [True, False, False],
28
+ ),
29
+ (
30
+ [True, False, True],
31
+ [True, False, np.nan],
32
+ operator.and_,
33
+ [True, False, False],
34
+ ),
35
+ (
36
+ [True, False, np.nan],
37
+ [True, False, True],
38
+ operator.or_,
39
+ [True, False, False],
40
+ ),
41
+ (
42
+ [True, False, True],
43
+ [True, False, np.nan],
44
+ operator.or_,
45
+ [True, False, True],
46
+ ),
47
+ ],
48
+ )
49
+ def test_logical_operators_nans(self, left, right, op, expected, frame_or_series):
50
+ # GH#13896
51
+ result = op(frame_or_series(left), frame_or_series(right))
52
+ expected = frame_or_series(expected)
53
+
54
+ tm.assert_equal(result, expected)
55
+
56
+ def test_logical_ops_empty_frame(self):
57
+ # GH#5808
58
+ # empty frames, non-mixed dtype
59
+ df = DataFrame(index=[1])
60
+
61
+ result = df & df
62
+ tm.assert_frame_equal(result, df)
63
+
64
+ result = df | df
65
+ tm.assert_frame_equal(result, df)
66
+
67
+ df2 = DataFrame(index=[1, 2])
68
+ result = df & df2
69
+ tm.assert_frame_equal(result, df2)
70
+
71
+ dfa = DataFrame(index=[1], columns=["A"])
72
+
73
+ result = dfa & dfa
74
+ expected = DataFrame(False, index=[1], columns=["A"])
75
+ tm.assert_frame_equal(result, expected)
76
+
77
+ def test_logical_ops_bool_frame(self):
78
+ # GH#5808
79
+ df1a_bool = DataFrame(True, index=[1], columns=["A"])
80
+
81
+ result = df1a_bool & df1a_bool
82
+ tm.assert_frame_equal(result, df1a_bool)
83
+
84
+ result = df1a_bool | df1a_bool
85
+ tm.assert_frame_equal(result, df1a_bool)
86
+
87
+ def test_logical_ops_int_frame(self):
88
+ # GH#5808
89
+ df1a_int = DataFrame(1, index=[1], columns=["A"])
90
+ df1a_bool = DataFrame(True, index=[1], columns=["A"])
91
+
92
+ result = df1a_int | df1a_bool
93
+ tm.assert_frame_equal(result, df1a_bool)
94
+
95
+ # Check that this matches Series behavior
96
+ res_ser = df1a_int["A"] | df1a_bool["A"]
97
+ tm.assert_series_equal(res_ser, df1a_bool["A"])
98
+
99
+ def test_logical_ops_invalid(self, using_infer_string):
100
+ # GH#5808
101
+
102
+ df1 = DataFrame(1.0, index=[1], columns=["A"])
103
+ df2 = DataFrame(True, index=[1], columns=["A"])
104
+ msg = re.escape("unsupported operand type(s) for |: 'float' and 'bool'")
105
+ with pytest.raises(TypeError, match=msg):
106
+ df1 | df2
107
+
108
+ df1 = DataFrame("foo", index=[1], columns=["A"])
109
+ df2 = DataFrame(True, index=[1], columns=["A"])
110
+ msg = re.escape("unsupported operand type(s) for |: 'str' and 'bool'")
111
+ if using_infer_string:
112
+ import pyarrow as pa
113
+
114
+ with pytest.raises(pa.lib.ArrowNotImplementedError, match="|has no kernel"):
115
+ df1 | df2
116
+ else:
117
+ with pytest.raises(TypeError, match=msg):
118
+ df1 | df2
119
+
120
+ def test_logical_operators(self):
121
+ def _check_bin_op(op):
122
+ result = op(df1, df2)
123
+ expected = DataFrame(
124
+ op(df1.values, df2.values), index=df1.index, columns=df1.columns
125
+ )
126
+ assert result.values.dtype == np.bool_
127
+ tm.assert_frame_equal(result, expected)
128
+
129
+ def _check_unary_op(op):
130
+ result = op(df1)
131
+ expected = DataFrame(op(df1.values), index=df1.index, columns=df1.columns)
132
+ assert result.values.dtype == np.bool_
133
+ tm.assert_frame_equal(result, expected)
134
+
135
+ df1 = {
136
+ "a": {"a": True, "b": False, "c": False, "d": True, "e": True},
137
+ "b": {"a": False, "b": True, "c": False, "d": False, "e": False},
138
+ "c": {"a": False, "b": False, "c": True, "d": False, "e": False},
139
+ "d": {"a": True, "b": False, "c": False, "d": True, "e": True},
140
+ "e": {"a": True, "b": False, "c": False, "d": True, "e": True},
141
+ }
142
+
143
+ df2 = {
144
+ "a": {"a": True, "b": False, "c": True, "d": False, "e": False},
145
+ "b": {"a": False, "b": True, "c": False, "d": False, "e": False},
146
+ "c": {"a": True, "b": False, "c": True, "d": False, "e": False},
147
+ "d": {"a": False, "b": False, "c": False, "d": True, "e": False},
148
+ "e": {"a": False, "b": False, "c": False, "d": False, "e": True},
149
+ }
150
+
151
+ df1 = DataFrame(df1)
152
+ df2 = DataFrame(df2)
153
+
154
+ _check_bin_op(operator.and_)
155
+ _check_bin_op(operator.or_)
156
+ _check_bin_op(operator.xor)
157
+
158
+ _check_unary_op(operator.inv) # TODO: belongs elsewhere
159
+
160
+ @pytest.mark.filterwarnings("ignore:Downcasting object dtype arrays:FutureWarning")
161
+ def test_logical_with_nas(self):
162
+ d = DataFrame({"a": [np.nan, False], "b": [True, True]})
163
+
164
+ # GH4947
165
+ # bool comparisons should return bool
166
+ result = d["a"] | d["b"]
167
+ expected = Series([False, True])
168
+ tm.assert_series_equal(result, expected)
169
+
170
+ # GH4604, automatic casting here
171
+ result = d["a"].fillna(False) | d["b"]
172
+ expected = Series([True, True])
173
+ tm.assert_series_equal(result, expected)
174
+
175
+ msg = "The 'downcast' keyword in fillna is deprecated"
176
+ with tm.assert_produces_warning(FutureWarning, match=msg):
177
+ result = d["a"].fillna(False, downcast=False) | d["b"]
178
+ expected = Series([True, True])
179
+ tm.assert_series_equal(result, expected)
180
+
181
+ def test_logical_ops_categorical_columns(self):
182
+ # GH#38367
183
+ intervals = [Interval(1, 2), Interval(3, 4)]
184
+ data = DataFrame(
185
+ [[1, np.nan], [2, np.nan]],
186
+ columns=CategoricalIndex(
187
+ intervals, categories=intervals + [Interval(5, 6)]
188
+ ),
189
+ )
190
+ mask = DataFrame(
191
+ [[False, False], [False, False]], columns=data.columns, dtype=bool
192
+ )
193
+ result = mask | isnull(data)
194
+ expected = DataFrame(
195
+ [[False, True], [False, True]],
196
+ columns=CategoricalIndex(
197
+ intervals, categories=intervals + [Interval(5, 6)]
198
+ ),
199
+ )
200
+ tm.assert_frame_equal(result, expected)
201
+
202
+ def test_int_dtype_different_index_not_bool(self):
203
+ # GH 52500
204
+ df1 = DataFrame([1, 2, 3], index=[10, 11, 23], columns=["a"])
205
+ df2 = DataFrame([10, 20, 30], index=[11, 10, 23], columns=["a"])
206
+ result = np.bitwise_xor(df1, df2)
207
+ expected = DataFrame([21, 8, 29], index=[10, 11, 23], columns=["a"])
208
+ tm.assert_frame_equal(result, expected)
209
+
210
+ result = df1 ^ df2
211
+ tm.assert_frame_equal(result, expected)
212
+
213
+ def test_different_dtypes_different_index_raises(self):
214
+ # GH 52538
215
+ df1 = DataFrame([1, 2], index=["a", "b"])
216
+ df2 = DataFrame([3, 4], index=["b", "c"])
217
+ with pytest.raises(TypeError, match="unsupported operand type"):
218
+ df1 & df2
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_nonunique_indexes.py ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ DataFrame,
7
+ Series,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestDataFrameNonuniqueIndexes:
14
+ def test_setattr_columns_vs_construct_with_columns(self):
15
+ # assignment
16
+ # GH 3687
17
+ arr = np.random.default_rng(2).standard_normal((3, 2))
18
+ idx = list(range(2))
19
+ df = DataFrame(arr, columns=["A", "A"])
20
+ df.columns = idx
21
+ expected = DataFrame(arr, columns=idx)
22
+ tm.assert_frame_equal(df, expected)
23
+
24
+ def test_setattr_columns_vs_construct_with_columns_datetimeindx(self):
25
+ idx = date_range("20130101", periods=4, freq="QE-NOV")
26
+ df = DataFrame(
27
+ [[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]], columns=["a", "a", "a", "a"]
28
+ )
29
+ df.columns = idx
30
+ expected = DataFrame([[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]], columns=idx)
31
+ tm.assert_frame_equal(df, expected)
32
+
33
+ def test_insert_with_duplicate_columns(self):
34
+ # insert
35
+ df = DataFrame(
36
+ [[1, 1, 1, 5], [1, 1, 2, 5], [2, 1, 3, 5]],
37
+ columns=["foo", "bar", "foo", "hello"],
38
+ )
39
+ df["string"] = "bah"
40
+ expected = DataFrame(
41
+ [[1, 1, 1, 5, "bah"], [1, 1, 2, 5, "bah"], [2, 1, 3, 5, "bah"]],
42
+ columns=["foo", "bar", "foo", "hello", "string"],
43
+ )
44
+ tm.assert_frame_equal(df, expected)
45
+ with pytest.raises(ValueError, match="Length of value"):
46
+ df.insert(0, "AnotherColumn", range(len(df.index) - 1))
47
+
48
+ # insert same dtype
49
+ df["foo2"] = 3
50
+ expected = DataFrame(
51
+ [[1, 1, 1, 5, "bah", 3], [1, 1, 2, 5, "bah", 3], [2, 1, 3, 5, "bah", 3]],
52
+ columns=["foo", "bar", "foo", "hello", "string", "foo2"],
53
+ )
54
+ tm.assert_frame_equal(df, expected)
55
+
56
+ # set (non-dup)
57
+ df["foo2"] = 4
58
+ expected = DataFrame(
59
+ [[1, 1, 1, 5, "bah", 4], [1, 1, 2, 5, "bah", 4], [2, 1, 3, 5, "bah", 4]],
60
+ columns=["foo", "bar", "foo", "hello", "string", "foo2"],
61
+ )
62
+ tm.assert_frame_equal(df, expected)
63
+ df["foo2"] = 3
64
+
65
+ # delete (non dup)
66
+ del df["bar"]
67
+ expected = DataFrame(
68
+ [[1, 1, 5, "bah", 3], [1, 2, 5, "bah", 3], [2, 3, 5, "bah", 3]],
69
+ columns=["foo", "foo", "hello", "string", "foo2"],
70
+ )
71
+ tm.assert_frame_equal(df, expected)
72
+
73
+ # try to delete again (its not consolidated)
74
+ del df["hello"]
75
+ expected = DataFrame(
76
+ [[1, 1, "bah", 3], [1, 2, "bah", 3], [2, 3, "bah", 3]],
77
+ columns=["foo", "foo", "string", "foo2"],
78
+ )
79
+ tm.assert_frame_equal(df, expected)
80
+
81
+ # consolidate
82
+ df = df._consolidate()
83
+ expected = DataFrame(
84
+ [[1, 1, "bah", 3], [1, 2, "bah", 3], [2, 3, "bah", 3]],
85
+ columns=["foo", "foo", "string", "foo2"],
86
+ )
87
+ tm.assert_frame_equal(df, expected)
88
+
89
+ # insert
90
+ df.insert(2, "new_col", 5.0)
91
+ expected = DataFrame(
92
+ [[1, 1, 5.0, "bah", 3], [1, 2, 5.0, "bah", 3], [2, 3, 5.0, "bah", 3]],
93
+ columns=["foo", "foo", "new_col", "string", "foo2"],
94
+ )
95
+ tm.assert_frame_equal(df, expected)
96
+
97
+ # insert a dup
98
+ with pytest.raises(ValueError, match="cannot insert"):
99
+ df.insert(2, "new_col", 4.0)
100
+
101
+ df.insert(2, "new_col", 4.0, allow_duplicates=True)
102
+ expected = DataFrame(
103
+ [
104
+ [1, 1, 4.0, 5.0, "bah", 3],
105
+ [1, 2, 4.0, 5.0, "bah", 3],
106
+ [2, 3, 4.0, 5.0, "bah", 3],
107
+ ],
108
+ columns=["foo", "foo", "new_col", "new_col", "string", "foo2"],
109
+ )
110
+ tm.assert_frame_equal(df, expected)
111
+
112
+ # delete (dup)
113
+ del df["foo"]
114
+ expected = DataFrame(
115
+ [[4.0, 5.0, "bah", 3], [4.0, 5.0, "bah", 3], [4.0, 5.0, "bah", 3]],
116
+ columns=["new_col", "new_col", "string", "foo2"],
117
+ )
118
+ tm.assert_frame_equal(df, expected)
119
+
120
+ def test_dup_across_dtypes(self):
121
+ # dup across dtypes
122
+ df = DataFrame(
123
+ [[1, 1, 1.0, 5], [1, 1, 2.0, 5], [2, 1, 3.0, 5]],
124
+ columns=["foo", "bar", "foo", "hello"],
125
+ )
126
+
127
+ df["foo2"] = 7.0
128
+ expected = DataFrame(
129
+ [[1, 1, 1.0, 5, 7.0], [1, 1, 2.0, 5, 7.0], [2, 1, 3.0, 5, 7.0]],
130
+ columns=["foo", "bar", "foo", "hello", "foo2"],
131
+ )
132
+ tm.assert_frame_equal(df, expected)
133
+
134
+ result = df["foo"]
135
+ expected = DataFrame([[1, 1.0], [1, 2.0], [2, 3.0]], columns=["foo", "foo"])
136
+ tm.assert_frame_equal(result, expected)
137
+
138
+ # multiple replacements
139
+ df["foo"] = "string"
140
+ expected = DataFrame(
141
+ [
142
+ ["string", 1, "string", 5, 7.0],
143
+ ["string", 1, "string", 5, 7.0],
144
+ ["string", 1, "string", 5, 7.0],
145
+ ],
146
+ columns=["foo", "bar", "foo", "hello", "foo2"],
147
+ )
148
+ tm.assert_frame_equal(df, expected)
149
+
150
+ del df["foo"]
151
+ expected = DataFrame(
152
+ [[1, 5, 7.0], [1, 5, 7.0], [1, 5, 7.0]], columns=["bar", "hello", "foo2"]
153
+ )
154
+ tm.assert_frame_equal(df, expected)
155
+
156
+ def test_column_dups_indexes(self):
157
+ # check column dups with index equal and not equal to df's index
158
+ df = DataFrame(
159
+ np.random.default_rng(2).standard_normal((5, 3)),
160
+ index=["a", "b", "c", "d", "e"],
161
+ columns=["A", "B", "A"],
162
+ )
163
+ for index in [df.index, pd.Index(list("edcba"))]:
164
+ this_df = df.copy()
165
+ expected_ser = Series(index.values, index=this_df.index)
166
+ expected_df = DataFrame(
167
+ {"A": expected_ser, "B": this_df["B"]},
168
+ columns=["A", "B", "A"],
169
+ )
170
+ this_df["A"] = index
171
+ tm.assert_frame_equal(this_df, expected_df)
172
+
173
+ def test_changing_dtypes_with_duplicate_columns(self):
174
+ # multiple assignments that change dtypes
175
+ # the location indexer is a slice
176
+ # GH 6120
177
+ df = DataFrame(
178
+ np.random.default_rng(2).standard_normal((5, 2)), columns=["that", "that"]
179
+ )
180
+ expected = DataFrame(1.0, index=range(5), columns=["that", "that"])
181
+
182
+ df["that"] = 1.0
183
+ tm.assert_frame_equal(df, expected)
184
+
185
+ df = DataFrame(
186
+ np.random.default_rng(2).random((5, 2)), columns=["that", "that"]
187
+ )
188
+ expected = DataFrame(1, index=range(5), columns=["that", "that"])
189
+
190
+ df["that"] = 1
191
+ tm.assert_frame_equal(df, expected)
192
+
193
+ def test_dup_columns_comparisons(self):
194
+ # equality
195
+ df1 = DataFrame([[1, 2], [2, np.nan], [3, 4], [4, 4]], columns=["A", "B"])
196
+ df2 = DataFrame([[0, 1], [2, 4], [2, np.nan], [4, 5]], columns=["A", "A"])
197
+
198
+ # not-comparing like-labelled
199
+ msg = (
200
+ r"Can only compare identically-labeled \(both index and columns\) "
201
+ "DataFrame objects"
202
+ )
203
+ with pytest.raises(ValueError, match=msg):
204
+ df1 == df2
205
+
206
+ df1r = df1.reindex_like(df2)
207
+ result = df1r == df2
208
+ expected = DataFrame(
209
+ [[False, True], [True, False], [False, False], [True, False]],
210
+ columns=["A", "A"],
211
+ )
212
+ tm.assert_frame_equal(result, expected)
213
+
214
+ def test_mixed_column_selection(self):
215
+ # mixed column selection
216
+ # GH 5639
217
+ dfbool = DataFrame(
218
+ {
219
+ "one": Series([True, True, False], index=["a", "b", "c"]),
220
+ "two": Series([False, False, True, False], index=["a", "b", "c", "d"]),
221
+ "three": Series([False, True, True, True], index=["a", "b", "c", "d"]),
222
+ }
223
+ )
224
+ expected = pd.concat([dfbool["one"], dfbool["three"], dfbool["one"]], axis=1)
225
+ result = dfbool[["one", "three", "one"]]
226
+ tm.assert_frame_equal(result, expected)
227
+
228
+ def test_multi_axis_dups(self):
229
+ # multi-axis dups
230
+ # GH 6121
231
+ df = DataFrame(
232
+ np.arange(25.0).reshape(5, 5),
233
+ index=["a", "b", "c", "d", "e"],
234
+ columns=["A", "B", "C", "D", "E"],
235
+ )
236
+ z = df[["A", "C", "A"]].copy()
237
+ expected = z.loc[["a", "c", "a"]]
238
+
239
+ df = DataFrame(
240
+ np.arange(25.0).reshape(5, 5),
241
+ index=["a", "b", "c", "d", "e"],
242
+ columns=["A", "B", "C", "D", "E"],
243
+ )
244
+ z = df[["A", "C", "A"]]
245
+ result = z.loc[["a", "c", "a"]]
246
+ tm.assert_frame_equal(result, expected)
247
+
248
+ def test_columns_with_dups(self):
249
+ # GH 3468 related
250
+
251
+ # basic
252
+ df = DataFrame([[1, 2]], columns=["a", "a"])
253
+ df.columns = ["a", "a.1"]
254
+ expected = DataFrame([[1, 2]], columns=["a", "a.1"])
255
+ tm.assert_frame_equal(df, expected)
256
+
257
+ df = DataFrame([[1, 2, 3]], columns=["b", "a", "a"])
258
+ df.columns = ["b", "a", "a.1"]
259
+ expected = DataFrame([[1, 2, 3]], columns=["b", "a", "a.1"])
260
+ tm.assert_frame_equal(df, expected)
261
+
262
+ def test_columns_with_dup_index(self):
263
+ # with a dup index
264
+ df = DataFrame([[1, 2]], columns=["a", "a"])
265
+ df.columns = ["b", "b"]
266
+ expected = DataFrame([[1, 2]], columns=["b", "b"])
267
+ tm.assert_frame_equal(df, expected)
268
+
269
+ def test_multi_dtype(self):
270
+ # multi-dtype
271
+ df = DataFrame(
272
+ [[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]],
273
+ columns=["a", "a", "b", "b", "d", "c", "c"],
274
+ )
275
+ df.columns = list("ABCDEFG")
276
+ expected = DataFrame(
277
+ [[1, 2, 1.0, 2.0, 3.0, "foo", "bar"]], columns=list("ABCDEFG")
278
+ )
279
+ tm.assert_frame_equal(df, expected)
280
+
281
+ def test_multi_dtype2(self):
282
+ df = DataFrame([[1, 2, "foo", "bar"]], columns=["a", "a", "a", "a"])
283
+ df.columns = ["a", "a.1", "a.2", "a.3"]
284
+ expected = DataFrame([[1, 2, "foo", "bar"]], columns=["a", "a.1", "a.2", "a.3"])
285
+ tm.assert_frame_equal(df, expected)
286
+
287
+ def test_dups_across_blocks(self, using_array_manager):
288
+ # dups across blocks
289
+ df_float = DataFrame(
290
+ np.random.default_rng(2).standard_normal((10, 3)), dtype="float64"
291
+ )
292
+ df_int = DataFrame(
293
+ np.random.default_rng(2).standard_normal((10, 3)).astype("int64")
294
+ )
295
+ df_bool = DataFrame(True, index=df_float.index, columns=df_float.columns)
296
+ df_object = DataFrame("foo", index=df_float.index, columns=df_float.columns)
297
+ df_dt = DataFrame(
298
+ pd.Timestamp("20010101"), index=df_float.index, columns=df_float.columns
299
+ )
300
+ df = pd.concat([df_float, df_int, df_bool, df_object, df_dt], axis=1)
301
+
302
+ if not using_array_manager:
303
+ assert len(df._mgr.blknos) == len(df.columns)
304
+ assert len(df._mgr.blklocs) == len(df.columns)
305
+
306
+ # testing iloc
307
+ for i in range(len(df.columns)):
308
+ df.iloc[:, i]
309
+
310
+ def test_dup_columns_across_dtype(self):
311
+ # dup columns across dtype GH 2079/2194
312
+ vals = [[1, -1, 2.0], [2, -2, 3.0]]
313
+ rs = DataFrame(vals, columns=["A", "A", "B"])
314
+ xp = DataFrame(vals)
315
+ xp.columns = ["A", "A", "B"]
316
+ tm.assert_frame_equal(rs, xp)
317
+
318
+ def test_set_value_by_index(self):
319
+ # See gh-12344
320
+ warn = None
321
+ msg = "will attempt to set the values inplace"
322
+
323
+ df = DataFrame(np.arange(9).reshape(3, 3).T)
324
+ df.columns = list("AAA")
325
+ expected = df.iloc[:, 2].copy()
326
+
327
+ with tm.assert_produces_warning(warn, match=msg):
328
+ df.iloc[:, 0] = 3
329
+ tm.assert_series_equal(df.iloc[:, 2], expected)
330
+
331
+ df = DataFrame(np.arange(9).reshape(3, 3).T)
332
+ df.columns = [2, float(2), str(2)]
333
+ expected = df.iloc[:, 1].copy()
334
+
335
+ with tm.assert_produces_warning(warn, match=msg):
336
+ df.iloc[:, 0] = 3
337
+ tm.assert_series_equal(df.iloc[:, 1], expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_npfuncs.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for np.foo applied to DataFrame, not necessarily ufuncs.
3
+ """
4
+ import numpy as np
5
+
6
+ from pandas import (
7
+ Categorical,
8
+ DataFrame,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestAsArray:
14
+ def test_asarray_homogeneous(self):
15
+ df = DataFrame({"A": Categorical([1, 2]), "B": Categorical([1, 2])})
16
+ result = np.asarray(df)
17
+ # may change from object in the future
18
+ expected = np.array([[1, 1], [2, 2]], dtype="object")
19
+ tm.assert_numpy_array_equal(result, expected)
20
+
21
+ def test_np_sqrt(self, float_frame):
22
+ with np.errstate(all="ignore"):
23
+ result = np.sqrt(float_frame)
24
+ assert isinstance(result, type(float_frame))
25
+ assert result.index.is_(float_frame.index)
26
+ assert result.columns.is_(float_frame.columns)
27
+
28
+ tm.assert_frame_equal(result, float_frame.apply(np.sqrt))
29
+
30
+ def test_sum_deprecated_axis_behavior(self):
31
+ # GH#52042 deprecated behavior of df.sum(axis=None), which gets
32
+ # called when we do np.sum(df)
33
+
34
+ arr = np.random.default_rng(2).standard_normal((4, 3))
35
+ df = DataFrame(arr)
36
+
37
+ msg = "The behavior of DataFrame.sum with axis=None is deprecated"
38
+ with tm.assert_produces_warning(
39
+ FutureWarning, match=msg, check_stacklevel=False
40
+ ):
41
+ res = np.sum(df)
42
+
43
+ with tm.assert_produces_warning(FutureWarning, match=msg):
44
+ expected = df.sum(axis=None)
45
+ tm.assert_series_equal(res, expected)
46
+
47
+ def test_np_ravel(self):
48
+ # GH26247
49
+ arr = np.array(
50
+ [
51
+ [0.11197053, 0.44361564, -0.92589452],
52
+ [0.05883648, -0.00948922, -0.26469934],
53
+ ]
54
+ )
55
+
56
+ result = np.ravel([DataFrame(batch.reshape(1, 3)) for batch in arr])
57
+ expected = np.array(
58
+ [
59
+ 0.11197053,
60
+ 0.44361564,
61
+ -0.92589452,
62
+ 0.05883648,
63
+ -0.00948922,
64
+ -0.26469934,
65
+ ]
66
+ )
67
+ tm.assert_numpy_array_equal(result, expected)
68
+
69
+ result = np.ravel(DataFrame(arr[0].reshape(1, 3), columns=["x1", "x2", "x3"]))
70
+ expected = np.array([0.11197053, 0.44361564, -0.92589452])
71
+ tm.assert_numpy_array_equal(result, expected)
72
+
73
+ result = np.ravel(
74
+ [
75
+ DataFrame(batch.reshape(1, 3), columns=["x1", "x2", "x3"])
76
+ for batch in arr
77
+ ]
78
+ )
79
+ expected = np.array(
80
+ [
81
+ 0.11197053,
82
+ 0.44361564,
83
+ -0.92589452,
84
+ 0.05883648,
85
+ -0.00948922,
86
+ -0.26469934,
87
+ ]
88
+ )
89
+ tm.assert_numpy_array_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_query_eval.py ADDED
@@ -0,0 +1,1425 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.errors import (
7
+ NumExprClobberingError,
8
+ UndefinedVariableError,
9
+ )
10
+ import pandas.util._test_decorators as td
11
+
12
+ import pandas as pd
13
+ from pandas import (
14
+ DataFrame,
15
+ Index,
16
+ MultiIndex,
17
+ Series,
18
+ date_range,
19
+ )
20
+ import pandas._testing as tm
21
+ from pandas.core.computation.check import NUMEXPR_INSTALLED
22
+
23
+
24
+ @pytest.fixture(params=["python", "pandas"], ids=lambda x: x)
25
+ def parser(request):
26
+ return request.param
27
+
28
+
29
+ @pytest.fixture(
30
+ params=["python", pytest.param("numexpr", marks=td.skip_if_no("numexpr"))],
31
+ ids=lambda x: x,
32
+ )
33
+ def engine(request):
34
+ return request.param
35
+
36
+
37
+ def skip_if_no_pandas_parser(parser):
38
+ if parser != "pandas":
39
+ pytest.skip(f"cannot evaluate with parser={parser}")
40
+
41
+
42
+ class TestCompat:
43
+ @pytest.fixture
44
+ def df(self):
45
+ return DataFrame({"A": [1, 2, 3]})
46
+
47
+ @pytest.fixture
48
+ def expected1(self, df):
49
+ return df[df.A > 0]
50
+
51
+ @pytest.fixture
52
+ def expected2(self, df):
53
+ return df.A + 1
54
+
55
+ def test_query_default(self, df, expected1, expected2):
56
+ # GH 12749
57
+ # this should always work, whether NUMEXPR_INSTALLED or not
58
+ result = df.query("A>0")
59
+ tm.assert_frame_equal(result, expected1)
60
+ result = df.eval("A+1")
61
+ tm.assert_series_equal(result, expected2, check_names=False)
62
+
63
+ def test_query_None(self, df, expected1, expected2):
64
+ result = df.query("A>0", engine=None)
65
+ tm.assert_frame_equal(result, expected1)
66
+ result = df.eval("A+1", engine=None)
67
+ tm.assert_series_equal(result, expected2, check_names=False)
68
+
69
+ def test_query_python(self, df, expected1, expected2):
70
+ result = df.query("A>0", engine="python")
71
+ tm.assert_frame_equal(result, expected1)
72
+ result = df.eval("A+1", engine="python")
73
+ tm.assert_series_equal(result, expected2, check_names=False)
74
+
75
+ def test_query_numexpr(self, df, expected1, expected2):
76
+ if NUMEXPR_INSTALLED:
77
+ result = df.query("A>0", engine="numexpr")
78
+ tm.assert_frame_equal(result, expected1)
79
+ result = df.eval("A+1", engine="numexpr")
80
+ tm.assert_series_equal(result, expected2, check_names=False)
81
+ else:
82
+ msg = (
83
+ r"'numexpr' is not installed or an unsupported version. "
84
+ r"Cannot use engine='numexpr' for query/eval if 'numexpr' is "
85
+ r"not installed"
86
+ )
87
+ with pytest.raises(ImportError, match=msg):
88
+ df.query("A>0", engine="numexpr")
89
+ with pytest.raises(ImportError, match=msg):
90
+ df.eval("A+1", engine="numexpr")
91
+
92
+
93
+ class TestDataFrameEval:
94
+ # smaller hits python, larger hits numexpr
95
+ @pytest.mark.parametrize("n", [4, 4000])
96
+ @pytest.mark.parametrize(
97
+ "op_str,op,rop",
98
+ [
99
+ ("+", "__add__", "__radd__"),
100
+ ("-", "__sub__", "__rsub__"),
101
+ ("*", "__mul__", "__rmul__"),
102
+ ("/", "__truediv__", "__rtruediv__"),
103
+ ],
104
+ )
105
+ def test_ops(self, op_str, op, rop, n):
106
+ # tst ops and reversed ops in evaluation
107
+ # GH7198
108
+
109
+ df = DataFrame(1, index=range(n), columns=list("abcd"))
110
+ df.iloc[0] = 2
111
+ m = df.mean()
112
+
113
+ base = DataFrame( # noqa: F841
114
+ np.tile(m.values, n).reshape(n, -1), columns=list("abcd")
115
+ )
116
+
117
+ expected = eval(f"base {op_str} df")
118
+
119
+ # ops as strings
120
+ result = eval(f"m {op_str} df")
121
+ tm.assert_frame_equal(result, expected)
122
+
123
+ # these are commutative
124
+ if op in ["+", "*"]:
125
+ result = getattr(df, op)(m)
126
+ tm.assert_frame_equal(result, expected)
127
+
128
+ # these are not
129
+ elif op in ["-", "/"]:
130
+ result = getattr(df, rop)(m)
131
+ tm.assert_frame_equal(result, expected)
132
+
133
+ def test_dataframe_sub_numexpr_path(self):
134
+ # GH7192: Note we need a large number of rows to ensure this
135
+ # goes through the numexpr path
136
+ df = DataFrame({"A": np.random.default_rng(2).standard_normal(25000)})
137
+ df.iloc[0:5] = np.nan
138
+ expected = 1 - np.isnan(df.iloc[0:25])
139
+ result = (1 - np.isnan(df)).iloc[0:25]
140
+ tm.assert_frame_equal(result, expected)
141
+
142
+ def test_query_non_str(self):
143
+ # GH 11485
144
+ df = DataFrame({"A": [1, 2, 3], "B": ["a", "b", "b"]})
145
+
146
+ msg = "expr must be a string to be evaluated"
147
+ with pytest.raises(ValueError, match=msg):
148
+ df.query(lambda x: x.B == "b")
149
+
150
+ with pytest.raises(ValueError, match=msg):
151
+ df.query(111)
152
+
153
+ def test_query_empty_string(self):
154
+ # GH 13139
155
+ df = DataFrame({"A": [1, 2, 3]})
156
+
157
+ msg = "expr cannot be an empty string"
158
+ with pytest.raises(ValueError, match=msg):
159
+ df.query("")
160
+
161
+ def test_eval_resolvers_as_list(self):
162
+ # GH 14095
163
+ df = DataFrame(
164
+ np.random.default_rng(2).standard_normal((10, 2)), columns=list("ab")
165
+ )
166
+ dict1 = {"a": 1}
167
+ dict2 = {"b": 2}
168
+ assert df.eval("a + b", resolvers=[dict1, dict2]) == dict1["a"] + dict2["b"]
169
+ assert pd.eval("a + b", resolvers=[dict1, dict2]) == dict1["a"] + dict2["b"]
170
+
171
+ def test_eval_resolvers_combined(self):
172
+ # GH 34966
173
+ df = DataFrame(
174
+ np.random.default_rng(2).standard_normal((10, 2)), columns=list("ab")
175
+ )
176
+ dict1 = {"c": 2}
177
+
178
+ # Both input and default index/column resolvers should be usable
179
+ result = df.eval("a + b * c", resolvers=[dict1])
180
+
181
+ expected = df["a"] + df["b"] * dict1["c"]
182
+ tm.assert_series_equal(result, expected)
183
+
184
+ def test_eval_object_dtype_binop(self):
185
+ # GH#24883
186
+ df = DataFrame({"a1": ["Y", "N"]})
187
+ res = df.eval("c = ((a1 == 'Y') & True)")
188
+ expected = DataFrame({"a1": ["Y", "N"], "c": [True, False]})
189
+ tm.assert_frame_equal(res, expected)
190
+
191
+
192
+ class TestDataFrameQueryWithMultiIndex:
193
+ def test_query_with_named_multiindex(self, parser, engine):
194
+ skip_if_no_pandas_parser(parser)
195
+ a = np.random.default_rng(2).choice(["red", "green"], size=10)
196
+ b = np.random.default_rng(2).choice(["eggs", "ham"], size=10)
197
+ index = MultiIndex.from_arrays([a, b], names=["color", "food"])
198
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)), index=index)
199
+ ind = Series(
200
+ df.index.get_level_values("color").values, index=index, name="color"
201
+ )
202
+
203
+ # equality
204
+ res1 = df.query('color == "red"', parser=parser, engine=engine)
205
+ res2 = df.query('"red" == color', parser=parser, engine=engine)
206
+ exp = df[ind == "red"]
207
+ tm.assert_frame_equal(res1, exp)
208
+ tm.assert_frame_equal(res2, exp)
209
+
210
+ # inequality
211
+ res1 = df.query('color != "red"', parser=parser, engine=engine)
212
+ res2 = df.query('"red" != color', parser=parser, engine=engine)
213
+ exp = df[ind != "red"]
214
+ tm.assert_frame_equal(res1, exp)
215
+ tm.assert_frame_equal(res2, exp)
216
+
217
+ # list equality (really just set membership)
218
+ res1 = df.query('color == ["red"]', parser=parser, engine=engine)
219
+ res2 = df.query('["red"] == color', parser=parser, engine=engine)
220
+ exp = df[ind.isin(["red"])]
221
+ tm.assert_frame_equal(res1, exp)
222
+ tm.assert_frame_equal(res2, exp)
223
+
224
+ res1 = df.query('color != ["red"]', parser=parser, engine=engine)
225
+ res2 = df.query('["red"] != color', parser=parser, engine=engine)
226
+ exp = df[~ind.isin(["red"])]
227
+ tm.assert_frame_equal(res1, exp)
228
+ tm.assert_frame_equal(res2, exp)
229
+
230
+ # in/not in ops
231
+ res1 = df.query('["red"] in color', parser=parser, engine=engine)
232
+ res2 = df.query('"red" in color', parser=parser, engine=engine)
233
+ exp = df[ind.isin(["red"])]
234
+ tm.assert_frame_equal(res1, exp)
235
+ tm.assert_frame_equal(res2, exp)
236
+
237
+ res1 = df.query('["red"] not in color', parser=parser, engine=engine)
238
+ res2 = df.query('"red" not in color', parser=parser, engine=engine)
239
+ exp = df[~ind.isin(["red"])]
240
+ tm.assert_frame_equal(res1, exp)
241
+ tm.assert_frame_equal(res2, exp)
242
+
243
+ def test_query_with_unnamed_multiindex(self, parser, engine):
244
+ skip_if_no_pandas_parser(parser)
245
+ a = np.random.default_rng(2).choice(["red", "green"], size=10)
246
+ b = np.random.default_rng(2).choice(["eggs", "ham"], size=10)
247
+ index = MultiIndex.from_arrays([a, b])
248
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)), index=index)
249
+ ind = Series(df.index.get_level_values(0).values, index=index)
250
+
251
+ res1 = df.query('ilevel_0 == "red"', parser=parser, engine=engine)
252
+ res2 = df.query('"red" == ilevel_0', parser=parser, engine=engine)
253
+ exp = df[ind == "red"]
254
+ tm.assert_frame_equal(res1, exp)
255
+ tm.assert_frame_equal(res2, exp)
256
+
257
+ # inequality
258
+ res1 = df.query('ilevel_0 != "red"', parser=parser, engine=engine)
259
+ res2 = df.query('"red" != ilevel_0', parser=parser, engine=engine)
260
+ exp = df[ind != "red"]
261
+ tm.assert_frame_equal(res1, exp)
262
+ tm.assert_frame_equal(res2, exp)
263
+
264
+ # list equality (really just set membership)
265
+ res1 = df.query('ilevel_0 == ["red"]', parser=parser, engine=engine)
266
+ res2 = df.query('["red"] == ilevel_0', parser=parser, engine=engine)
267
+ exp = df[ind.isin(["red"])]
268
+ tm.assert_frame_equal(res1, exp)
269
+ tm.assert_frame_equal(res2, exp)
270
+
271
+ res1 = df.query('ilevel_0 != ["red"]', parser=parser, engine=engine)
272
+ res2 = df.query('["red"] != ilevel_0', parser=parser, engine=engine)
273
+ exp = df[~ind.isin(["red"])]
274
+ tm.assert_frame_equal(res1, exp)
275
+ tm.assert_frame_equal(res2, exp)
276
+
277
+ # in/not in ops
278
+ res1 = df.query('["red"] in ilevel_0', parser=parser, engine=engine)
279
+ res2 = df.query('"red" in ilevel_0', parser=parser, engine=engine)
280
+ exp = df[ind.isin(["red"])]
281
+ tm.assert_frame_equal(res1, exp)
282
+ tm.assert_frame_equal(res2, exp)
283
+
284
+ res1 = df.query('["red"] not in ilevel_0', parser=parser, engine=engine)
285
+ res2 = df.query('"red" not in ilevel_0', parser=parser, engine=engine)
286
+ exp = df[~ind.isin(["red"])]
287
+ tm.assert_frame_equal(res1, exp)
288
+ tm.assert_frame_equal(res2, exp)
289
+
290
+ # ## LEVEL 1
291
+ ind = Series(df.index.get_level_values(1).values, index=index)
292
+ res1 = df.query('ilevel_1 == "eggs"', parser=parser, engine=engine)
293
+ res2 = df.query('"eggs" == ilevel_1', parser=parser, engine=engine)
294
+ exp = df[ind == "eggs"]
295
+ tm.assert_frame_equal(res1, exp)
296
+ tm.assert_frame_equal(res2, exp)
297
+
298
+ # inequality
299
+ res1 = df.query('ilevel_1 != "eggs"', parser=parser, engine=engine)
300
+ res2 = df.query('"eggs" != ilevel_1', parser=parser, engine=engine)
301
+ exp = df[ind != "eggs"]
302
+ tm.assert_frame_equal(res1, exp)
303
+ tm.assert_frame_equal(res2, exp)
304
+
305
+ # list equality (really just set membership)
306
+ res1 = df.query('ilevel_1 == ["eggs"]', parser=parser, engine=engine)
307
+ res2 = df.query('["eggs"] == ilevel_1', parser=parser, engine=engine)
308
+ exp = df[ind.isin(["eggs"])]
309
+ tm.assert_frame_equal(res1, exp)
310
+ tm.assert_frame_equal(res2, exp)
311
+
312
+ res1 = df.query('ilevel_1 != ["eggs"]', parser=parser, engine=engine)
313
+ res2 = df.query('["eggs"] != ilevel_1', parser=parser, engine=engine)
314
+ exp = df[~ind.isin(["eggs"])]
315
+ tm.assert_frame_equal(res1, exp)
316
+ tm.assert_frame_equal(res2, exp)
317
+
318
+ # in/not in ops
319
+ res1 = df.query('["eggs"] in ilevel_1', parser=parser, engine=engine)
320
+ res2 = df.query('"eggs" in ilevel_1', parser=parser, engine=engine)
321
+ exp = df[ind.isin(["eggs"])]
322
+ tm.assert_frame_equal(res1, exp)
323
+ tm.assert_frame_equal(res2, exp)
324
+
325
+ res1 = df.query('["eggs"] not in ilevel_1', parser=parser, engine=engine)
326
+ res2 = df.query('"eggs" not in ilevel_1', parser=parser, engine=engine)
327
+ exp = df[~ind.isin(["eggs"])]
328
+ tm.assert_frame_equal(res1, exp)
329
+ tm.assert_frame_equal(res2, exp)
330
+
331
+ def test_query_with_partially_named_multiindex(self, parser, engine):
332
+ skip_if_no_pandas_parser(parser)
333
+ a = np.random.default_rng(2).choice(["red", "green"], size=10)
334
+ b = np.arange(10)
335
+ index = MultiIndex.from_arrays([a, b])
336
+ index.names = [None, "rating"]
337
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)), index=index)
338
+ res = df.query("rating == 1", parser=parser, engine=engine)
339
+ ind = Series(
340
+ df.index.get_level_values("rating").values, index=index, name="rating"
341
+ )
342
+ exp = df[ind == 1]
343
+ tm.assert_frame_equal(res, exp)
344
+
345
+ res = df.query("rating != 1", parser=parser, engine=engine)
346
+ ind = Series(
347
+ df.index.get_level_values("rating").values, index=index, name="rating"
348
+ )
349
+ exp = df[ind != 1]
350
+ tm.assert_frame_equal(res, exp)
351
+
352
+ res = df.query('ilevel_0 == "red"', parser=parser, engine=engine)
353
+ ind = Series(df.index.get_level_values(0).values, index=index)
354
+ exp = df[ind == "red"]
355
+ tm.assert_frame_equal(res, exp)
356
+
357
+ res = df.query('ilevel_0 != "red"', parser=parser, engine=engine)
358
+ ind = Series(df.index.get_level_values(0).values, index=index)
359
+ exp = df[ind != "red"]
360
+ tm.assert_frame_equal(res, exp)
361
+
362
+ def test_query_multiindex_get_index_resolvers(self):
363
+ df = DataFrame(
364
+ np.ones((10, 3)),
365
+ index=MultiIndex.from_arrays(
366
+ [range(10) for _ in range(2)], names=["spam", "eggs"]
367
+ ),
368
+ )
369
+ resolvers = df._get_index_resolvers()
370
+
371
+ def to_series(mi, level):
372
+ level_values = mi.get_level_values(level)
373
+ s = level_values.to_series()
374
+ s.index = mi
375
+ return s
376
+
377
+ col_series = df.columns.to_series()
378
+ expected = {
379
+ "index": df.index,
380
+ "columns": col_series,
381
+ "spam": to_series(df.index, "spam"),
382
+ "eggs": to_series(df.index, "eggs"),
383
+ "clevel_0": col_series,
384
+ }
385
+ for k, v in resolvers.items():
386
+ if isinstance(v, Index):
387
+ assert v.is_(expected[k])
388
+ elif isinstance(v, Series):
389
+ tm.assert_series_equal(v, expected[k])
390
+ else:
391
+ raise AssertionError("object must be a Series or Index")
392
+
393
+
394
+ @td.skip_if_no("numexpr")
395
+ class TestDataFrameQueryNumExprPandas:
396
+ @pytest.fixture
397
+ def engine(self):
398
+ return "numexpr"
399
+
400
+ @pytest.fixture
401
+ def parser(self):
402
+ return "pandas"
403
+
404
+ def test_date_query_with_attribute_access(self, engine, parser):
405
+ skip_if_no_pandas_parser(parser)
406
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
407
+ df["dates1"] = date_range("1/1/2012", periods=5)
408
+ df["dates2"] = date_range("1/1/2013", periods=5)
409
+ df["dates3"] = date_range("1/1/2014", periods=5)
410
+ res = df.query(
411
+ "@df.dates1 < 20130101 < @df.dates3", engine=engine, parser=parser
412
+ )
413
+ expec = df[(df.dates1 < "20130101") & ("20130101" < df.dates3)]
414
+ tm.assert_frame_equal(res, expec)
415
+
416
+ def test_date_query_no_attribute_access(self, engine, parser):
417
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
418
+ df["dates1"] = date_range("1/1/2012", periods=5)
419
+ df["dates2"] = date_range("1/1/2013", periods=5)
420
+ df["dates3"] = date_range("1/1/2014", periods=5)
421
+ res = df.query("dates1 < 20130101 < dates3", engine=engine, parser=parser)
422
+ expec = df[(df.dates1 < "20130101") & ("20130101" < df.dates3)]
423
+ tm.assert_frame_equal(res, expec)
424
+
425
+ def test_date_query_with_NaT(self, engine, parser):
426
+ n = 10
427
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3)))
428
+ df["dates1"] = date_range("1/1/2012", periods=n)
429
+ df["dates2"] = date_range("1/1/2013", periods=n)
430
+ df["dates3"] = date_range("1/1/2014", periods=n)
431
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates1"] = pd.NaT
432
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates3"] = pd.NaT
433
+ res = df.query("dates1 < 20130101 < dates3", engine=engine, parser=parser)
434
+ expec = df[(df.dates1 < "20130101") & ("20130101" < df.dates3)]
435
+ tm.assert_frame_equal(res, expec)
436
+
437
+ def test_date_index_query(self, engine, parser):
438
+ n = 10
439
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3)))
440
+ df["dates1"] = date_range("1/1/2012", periods=n)
441
+ df["dates3"] = date_range("1/1/2014", periods=n)
442
+ return_value = df.set_index("dates1", inplace=True, drop=True)
443
+ assert return_value is None
444
+ res = df.query("index < 20130101 < dates3", engine=engine, parser=parser)
445
+ expec = df[(df.index < "20130101") & ("20130101" < df.dates3)]
446
+ tm.assert_frame_equal(res, expec)
447
+
448
+ def test_date_index_query_with_NaT(self, engine, parser):
449
+ n = 10
450
+ # Cast to object to avoid implicit cast when setting entry to pd.NaT below
451
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3))).astype(
452
+ {0: object}
453
+ )
454
+ df["dates1"] = date_range("1/1/2012", periods=n)
455
+ df["dates3"] = date_range("1/1/2014", periods=n)
456
+ df.iloc[0, 0] = pd.NaT
457
+ return_value = df.set_index("dates1", inplace=True, drop=True)
458
+ assert return_value is None
459
+ res = df.query("index < 20130101 < dates3", engine=engine, parser=parser)
460
+ expec = df[(df.index < "20130101") & ("20130101" < df.dates3)]
461
+ tm.assert_frame_equal(res, expec)
462
+
463
+ def test_date_index_query_with_NaT_duplicates(self, engine, parser):
464
+ n = 10
465
+ d = {}
466
+ d["dates1"] = date_range("1/1/2012", periods=n)
467
+ d["dates3"] = date_range("1/1/2014", periods=n)
468
+ df = DataFrame(d)
469
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates1"] = pd.NaT
470
+ return_value = df.set_index("dates1", inplace=True, drop=True)
471
+ assert return_value is None
472
+ res = df.query("dates1 < 20130101 < dates3", engine=engine, parser=parser)
473
+ expec = df[(df.index.to_series() < "20130101") & ("20130101" < df.dates3)]
474
+ tm.assert_frame_equal(res, expec)
475
+
476
+ def test_date_query_with_non_date(self, engine, parser):
477
+ n = 10
478
+ df = DataFrame(
479
+ {"dates": date_range("1/1/2012", periods=n), "nondate": np.arange(n)}
480
+ )
481
+
482
+ result = df.query("dates == nondate", parser=parser, engine=engine)
483
+ assert len(result) == 0
484
+
485
+ result = df.query("dates != nondate", parser=parser, engine=engine)
486
+ tm.assert_frame_equal(result, df)
487
+
488
+ msg = r"Invalid comparison between dtype=datetime64\[ns\] and ndarray"
489
+ for op in ["<", ">", "<=", ">="]:
490
+ with pytest.raises(TypeError, match=msg):
491
+ df.query(f"dates {op} nondate", parser=parser, engine=engine)
492
+
493
+ def test_query_syntax_error(self, engine, parser):
494
+ df = DataFrame({"i": range(10), "+": range(3, 13), "r": range(4, 14)})
495
+ msg = "invalid syntax"
496
+ with pytest.raises(SyntaxError, match=msg):
497
+ df.query("i - +", engine=engine, parser=parser)
498
+
499
+ def test_query_scope(self, engine, parser):
500
+ skip_if_no_pandas_parser(parser)
501
+
502
+ df = DataFrame(
503
+ np.random.default_rng(2).standard_normal((20, 2)), columns=list("ab")
504
+ )
505
+
506
+ a, b = 1, 2 # noqa: F841
507
+ res = df.query("a > b", engine=engine, parser=parser)
508
+ expected = df[df.a > df.b]
509
+ tm.assert_frame_equal(res, expected)
510
+
511
+ res = df.query("@a > b", engine=engine, parser=parser)
512
+ expected = df[a > df.b]
513
+ tm.assert_frame_equal(res, expected)
514
+
515
+ # no local variable c
516
+ with pytest.raises(
517
+ UndefinedVariableError, match="local variable 'c' is not defined"
518
+ ):
519
+ df.query("@a > b > @c", engine=engine, parser=parser)
520
+
521
+ # no column named 'c'
522
+ with pytest.raises(UndefinedVariableError, match="name 'c' is not defined"):
523
+ df.query("@a > b > c", engine=engine, parser=parser)
524
+
525
+ def test_query_doesnt_pickup_local(self, engine, parser):
526
+ n = m = 10
527
+ df = DataFrame(
528
+ np.random.default_rng(2).integers(m, size=(n, 3)), columns=list("abc")
529
+ )
530
+
531
+ # we don't pick up the local 'sin'
532
+ with pytest.raises(UndefinedVariableError, match="name 'sin' is not defined"):
533
+ df.query("sin > 5", engine=engine, parser=parser)
534
+
535
+ def test_query_builtin(self, engine, parser):
536
+ n = m = 10
537
+ df = DataFrame(
538
+ np.random.default_rng(2).integers(m, size=(n, 3)), columns=list("abc")
539
+ )
540
+
541
+ df.index.name = "sin"
542
+ msg = "Variables in expression.+"
543
+ with pytest.raises(NumExprClobberingError, match=msg):
544
+ df.query("sin > 5", engine=engine, parser=parser)
545
+
546
+ def test_query(self, engine, parser):
547
+ df = DataFrame(
548
+ np.random.default_rng(2).standard_normal((10, 3)), columns=["a", "b", "c"]
549
+ )
550
+
551
+ tm.assert_frame_equal(
552
+ df.query("a < b", engine=engine, parser=parser), df[df.a < df.b]
553
+ )
554
+ tm.assert_frame_equal(
555
+ df.query("a + b > b * c", engine=engine, parser=parser),
556
+ df[df.a + df.b > df.b * df.c],
557
+ )
558
+
559
+ def test_query_index_with_name(self, engine, parser):
560
+ df = DataFrame(
561
+ np.random.default_rng(2).integers(10, size=(10, 3)),
562
+ index=Index(range(10), name="blob"),
563
+ columns=["a", "b", "c"],
564
+ )
565
+ res = df.query("(blob < 5) & (a < b)", engine=engine, parser=parser)
566
+ expec = df[(df.index < 5) & (df.a < df.b)]
567
+ tm.assert_frame_equal(res, expec)
568
+
569
+ res = df.query("blob < b", engine=engine, parser=parser)
570
+ expec = df[df.index < df.b]
571
+
572
+ tm.assert_frame_equal(res, expec)
573
+
574
+ def test_query_index_without_name(self, engine, parser):
575
+ df = DataFrame(
576
+ np.random.default_rng(2).integers(10, size=(10, 3)),
577
+ index=range(10),
578
+ columns=["a", "b", "c"],
579
+ )
580
+
581
+ # "index" should refer to the index
582
+ res = df.query("index < b", engine=engine, parser=parser)
583
+ expec = df[df.index < df.b]
584
+ tm.assert_frame_equal(res, expec)
585
+
586
+ # test against a scalar
587
+ res = df.query("index < 5", engine=engine, parser=parser)
588
+ expec = df[df.index < 5]
589
+ tm.assert_frame_equal(res, expec)
590
+
591
+ def test_nested_scope(self, engine, parser):
592
+ skip_if_no_pandas_parser(parser)
593
+
594
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
595
+ df2 = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
596
+ expected = df[(df > 0) & (df2 > 0)]
597
+
598
+ result = df.query("(@df > 0) & (@df2 > 0)", engine=engine, parser=parser)
599
+ tm.assert_frame_equal(result, expected)
600
+
601
+ result = pd.eval("df[df > 0 and df2 > 0]", engine=engine, parser=parser)
602
+ tm.assert_frame_equal(result, expected)
603
+
604
+ result = pd.eval(
605
+ "df[df > 0 and df2 > 0 and df[df > 0] > 0]", engine=engine, parser=parser
606
+ )
607
+ expected = df[(df > 0) & (df2 > 0) & (df[df > 0] > 0)]
608
+ tm.assert_frame_equal(result, expected)
609
+
610
+ result = pd.eval("df[(df>0) & (df2>0)]", engine=engine, parser=parser)
611
+ expected = df.query("(@df>0) & (@df2>0)", engine=engine, parser=parser)
612
+ tm.assert_frame_equal(result, expected)
613
+
614
+ def test_nested_raises_on_local_self_reference(self, engine, parser):
615
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
616
+
617
+ # can't reference ourself b/c we're a local so @ is necessary
618
+ with pytest.raises(UndefinedVariableError, match="name 'df' is not defined"):
619
+ df.query("df > 0", engine=engine, parser=parser)
620
+
621
+ def test_local_syntax(self, engine, parser):
622
+ skip_if_no_pandas_parser(parser)
623
+
624
+ df = DataFrame(
625
+ np.random.default_rng(2).standard_normal((100, 10)),
626
+ columns=list("abcdefghij"),
627
+ )
628
+ b = 1
629
+ expect = df[df.a < b]
630
+ result = df.query("a < @b", engine=engine, parser=parser)
631
+ tm.assert_frame_equal(result, expect)
632
+
633
+ expect = df[df.a < df.b]
634
+ result = df.query("a < b", engine=engine, parser=parser)
635
+ tm.assert_frame_equal(result, expect)
636
+
637
+ def test_chained_cmp_and_in(self, engine, parser):
638
+ skip_if_no_pandas_parser(parser)
639
+ cols = list("abc")
640
+ df = DataFrame(
641
+ np.random.default_rng(2).standard_normal((100, len(cols))), columns=cols
642
+ )
643
+ res = df.query(
644
+ "a < b < c and a not in b not in c", engine=engine, parser=parser
645
+ )
646
+ ind = (df.a < df.b) & (df.b < df.c) & ~df.b.isin(df.a) & ~df.c.isin(df.b)
647
+ expec = df[ind]
648
+ tm.assert_frame_equal(res, expec)
649
+
650
+ def test_local_variable_with_in(self, engine, parser):
651
+ skip_if_no_pandas_parser(parser)
652
+ a = Series(np.random.default_rng(2).integers(3, size=15), name="a")
653
+ b = Series(np.random.default_rng(2).integers(10, size=15), name="b")
654
+ df = DataFrame({"a": a, "b": b})
655
+
656
+ expected = df.loc[(df.b - 1).isin(a)]
657
+ result = df.query("b - 1 in a", engine=engine, parser=parser)
658
+ tm.assert_frame_equal(expected, result)
659
+
660
+ b = Series(np.random.default_rng(2).integers(10, size=15), name="b")
661
+ expected = df.loc[(b - 1).isin(a)]
662
+ result = df.query("@b - 1 in a", engine=engine, parser=parser)
663
+ tm.assert_frame_equal(expected, result)
664
+
665
+ def test_at_inside_string(self, engine, parser):
666
+ skip_if_no_pandas_parser(parser)
667
+ c = 1 # noqa: F841
668
+ df = DataFrame({"a": ["a", "a", "b", "b", "@c", "@c"]})
669
+ result = df.query('a == "@c"', engine=engine, parser=parser)
670
+ expected = df[df.a == "@c"]
671
+ tm.assert_frame_equal(result, expected)
672
+
673
+ def test_query_undefined_local(self):
674
+ engine, parser = self.engine, self.parser
675
+ skip_if_no_pandas_parser(parser)
676
+
677
+ df = DataFrame(np.random.default_rng(2).random((10, 2)), columns=list("ab"))
678
+ with pytest.raises(
679
+ UndefinedVariableError, match="local variable 'c' is not defined"
680
+ ):
681
+ df.query("a == @c", engine=engine, parser=parser)
682
+
683
+ def test_index_resolvers_come_after_columns_with_the_same_name(
684
+ self, engine, parser
685
+ ):
686
+ n = 1 # noqa: F841
687
+ a = np.r_[20:101:20]
688
+
689
+ df = DataFrame(
690
+ {"index": a, "b": np.random.default_rng(2).standard_normal(a.size)}
691
+ )
692
+ df.index.name = "index"
693
+ result = df.query("index > 5", engine=engine, parser=parser)
694
+ expected = df[df["index"] > 5]
695
+ tm.assert_frame_equal(result, expected)
696
+
697
+ df = DataFrame(
698
+ {"index": a, "b": np.random.default_rng(2).standard_normal(a.size)}
699
+ )
700
+ result = df.query("ilevel_0 > 5", engine=engine, parser=parser)
701
+ expected = df.loc[df.index[df.index > 5]]
702
+ tm.assert_frame_equal(result, expected)
703
+
704
+ df = DataFrame({"a": a, "b": np.random.default_rng(2).standard_normal(a.size)})
705
+ df.index.name = "a"
706
+ result = df.query("a > 5", engine=engine, parser=parser)
707
+ expected = df[df.a > 5]
708
+ tm.assert_frame_equal(result, expected)
709
+
710
+ result = df.query("index > 5", engine=engine, parser=parser)
711
+ expected = df.loc[df.index[df.index > 5]]
712
+ tm.assert_frame_equal(result, expected)
713
+
714
+ @pytest.mark.parametrize("op, f", [["==", operator.eq], ["!=", operator.ne]])
715
+ def test_inf(self, op, f, engine, parser):
716
+ n = 10
717
+ df = DataFrame(
718
+ {
719
+ "a": np.random.default_rng(2).random(n),
720
+ "b": np.random.default_rng(2).random(n),
721
+ }
722
+ )
723
+ df.loc[::2, 0] = np.inf
724
+ q = f"a {op} inf"
725
+ expected = df[f(df.a, np.inf)]
726
+ result = df.query(q, engine=engine, parser=parser)
727
+ tm.assert_frame_equal(result, expected)
728
+
729
+ def test_check_tz_aware_index_query(self, tz_aware_fixture):
730
+ # https://github.com/pandas-dev/pandas/issues/29463
731
+ tz = tz_aware_fixture
732
+ df_index = date_range(
733
+ start="2019-01-01", freq="1d", periods=10, tz=tz, name="time"
734
+ )
735
+ expected = DataFrame(index=df_index)
736
+ df = DataFrame(index=df_index)
737
+ result = df.query('"2018-01-03 00:00:00+00" < time')
738
+ tm.assert_frame_equal(result, expected)
739
+
740
+ expected = DataFrame(df_index)
741
+ result = df.reset_index().query('"2018-01-03 00:00:00+00" < time')
742
+ tm.assert_frame_equal(result, expected)
743
+
744
+ def test_method_calls_in_query(self, engine, parser):
745
+ # https://github.com/pandas-dev/pandas/issues/22435
746
+ n = 10
747
+ df = DataFrame(
748
+ {
749
+ "a": 2 * np.random.default_rng(2).random(n),
750
+ "b": np.random.default_rng(2).random(n),
751
+ }
752
+ )
753
+ expected = df[df["a"].astype("int") == 0]
754
+ result = df.query("a.astype('int') == 0", engine=engine, parser=parser)
755
+ tm.assert_frame_equal(result, expected)
756
+
757
+ df = DataFrame(
758
+ {
759
+ "a": np.where(
760
+ np.random.default_rng(2).random(n) < 0.5,
761
+ np.nan,
762
+ np.random.default_rng(2).standard_normal(n),
763
+ ),
764
+ "b": np.random.default_rng(2).standard_normal(n),
765
+ }
766
+ )
767
+ expected = df[df["a"].notnull()]
768
+ result = df.query("a.notnull()", engine=engine, parser=parser)
769
+ tm.assert_frame_equal(result, expected)
770
+
771
+
772
+ @td.skip_if_no("numexpr")
773
+ class TestDataFrameQueryNumExprPython(TestDataFrameQueryNumExprPandas):
774
+ @pytest.fixture
775
+ def engine(self):
776
+ return "numexpr"
777
+
778
+ @pytest.fixture
779
+ def parser(self):
780
+ return "python"
781
+
782
+ def test_date_query_no_attribute_access(self, engine, parser):
783
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
784
+ df["dates1"] = date_range("1/1/2012", periods=5)
785
+ df["dates2"] = date_range("1/1/2013", periods=5)
786
+ df["dates3"] = date_range("1/1/2014", periods=5)
787
+ res = df.query(
788
+ "(dates1 < 20130101) & (20130101 < dates3)", engine=engine, parser=parser
789
+ )
790
+ expec = df[(df.dates1 < "20130101") & ("20130101" < df.dates3)]
791
+ tm.assert_frame_equal(res, expec)
792
+
793
+ def test_date_query_with_NaT(self, engine, parser):
794
+ n = 10
795
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3)))
796
+ df["dates1"] = date_range("1/1/2012", periods=n)
797
+ df["dates2"] = date_range("1/1/2013", periods=n)
798
+ df["dates3"] = date_range("1/1/2014", periods=n)
799
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates1"] = pd.NaT
800
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates3"] = pd.NaT
801
+ res = df.query(
802
+ "(dates1 < 20130101) & (20130101 < dates3)", engine=engine, parser=parser
803
+ )
804
+ expec = df[(df.dates1 < "20130101") & ("20130101" < df.dates3)]
805
+ tm.assert_frame_equal(res, expec)
806
+
807
+ def test_date_index_query(self, engine, parser):
808
+ n = 10
809
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3)))
810
+ df["dates1"] = date_range("1/1/2012", periods=n)
811
+ df["dates3"] = date_range("1/1/2014", periods=n)
812
+ return_value = df.set_index("dates1", inplace=True, drop=True)
813
+ assert return_value is None
814
+ res = df.query(
815
+ "(index < 20130101) & (20130101 < dates3)", engine=engine, parser=parser
816
+ )
817
+ expec = df[(df.index < "20130101") & ("20130101" < df.dates3)]
818
+ tm.assert_frame_equal(res, expec)
819
+
820
+ def test_date_index_query_with_NaT(self, engine, parser):
821
+ n = 10
822
+ # Cast to object to avoid implicit cast when setting entry to pd.NaT below
823
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3))).astype(
824
+ {0: object}
825
+ )
826
+ df["dates1"] = date_range("1/1/2012", periods=n)
827
+ df["dates3"] = date_range("1/1/2014", periods=n)
828
+ df.iloc[0, 0] = pd.NaT
829
+ return_value = df.set_index("dates1", inplace=True, drop=True)
830
+ assert return_value is None
831
+ res = df.query(
832
+ "(index < 20130101) & (20130101 < dates3)", engine=engine, parser=parser
833
+ )
834
+ expec = df[(df.index < "20130101") & ("20130101" < df.dates3)]
835
+ tm.assert_frame_equal(res, expec)
836
+
837
+ def test_date_index_query_with_NaT_duplicates(self, engine, parser):
838
+ n = 10
839
+ df = DataFrame(np.random.default_rng(2).standard_normal((n, 3)))
840
+ df["dates1"] = date_range("1/1/2012", periods=n)
841
+ df["dates3"] = date_range("1/1/2014", periods=n)
842
+ df.loc[np.random.default_rng(2).random(n) > 0.5, "dates1"] = pd.NaT
843
+ return_value = df.set_index("dates1", inplace=True, drop=True)
844
+ assert return_value is None
845
+ msg = r"'BoolOp' nodes are not implemented"
846
+ with pytest.raises(NotImplementedError, match=msg):
847
+ df.query("index < 20130101 < dates3", engine=engine, parser=parser)
848
+
849
+ def test_nested_scope(self, engine, parser):
850
+ # smoke test
851
+ x = 1 # noqa: F841
852
+ result = pd.eval("x + 1", engine=engine, parser=parser)
853
+ assert result == 2
854
+
855
+ df = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
856
+ df2 = DataFrame(np.random.default_rng(2).standard_normal((5, 3)))
857
+
858
+ # don't have the pandas parser
859
+ msg = r"The '@' prefix is only supported by the pandas parser"
860
+ with pytest.raises(SyntaxError, match=msg):
861
+ df.query("(@df>0) & (@df2>0)", engine=engine, parser=parser)
862
+
863
+ with pytest.raises(UndefinedVariableError, match="name 'df' is not defined"):
864
+ df.query("(df>0) & (df2>0)", engine=engine, parser=parser)
865
+
866
+ expected = df[(df > 0) & (df2 > 0)]
867
+ result = pd.eval("df[(df > 0) & (df2 > 0)]", engine=engine, parser=parser)
868
+ tm.assert_frame_equal(expected, result)
869
+
870
+ expected = df[(df > 0) & (df2 > 0) & (df[df > 0] > 0)]
871
+ result = pd.eval(
872
+ "df[(df > 0) & (df2 > 0) & (df[df > 0] > 0)]", engine=engine, parser=parser
873
+ )
874
+ tm.assert_frame_equal(expected, result)
875
+
876
+ def test_query_numexpr_with_min_and_max_columns(self):
877
+ df = DataFrame({"min": [1, 2, 3], "max": [4, 5, 6]})
878
+ regex_to_match = (
879
+ r"Variables in expression \"\(min\) == \(1\)\" "
880
+ r"overlap with builtins: \('min'\)"
881
+ )
882
+ with pytest.raises(NumExprClobberingError, match=regex_to_match):
883
+ df.query("min == 1")
884
+
885
+ regex_to_match = (
886
+ r"Variables in expression \"\(max\) == \(1\)\" "
887
+ r"overlap with builtins: \('max'\)"
888
+ )
889
+ with pytest.raises(NumExprClobberingError, match=regex_to_match):
890
+ df.query("max == 1")
891
+
892
+
893
+ class TestDataFrameQueryPythonPandas(TestDataFrameQueryNumExprPandas):
894
+ @pytest.fixture
895
+ def engine(self):
896
+ return "python"
897
+
898
+ @pytest.fixture
899
+ def parser(self):
900
+ return "pandas"
901
+
902
+ def test_query_builtin(self, engine, parser):
903
+ n = m = 10
904
+ df = DataFrame(
905
+ np.random.default_rng(2).integers(m, size=(n, 3)), columns=list("abc")
906
+ )
907
+
908
+ df.index.name = "sin"
909
+ expected = df[df.index > 5]
910
+ result = df.query("sin > 5", engine=engine, parser=parser)
911
+ tm.assert_frame_equal(expected, result)
912
+
913
+
914
+ class TestDataFrameQueryPythonPython(TestDataFrameQueryNumExprPython):
915
+ @pytest.fixture
916
+ def engine(self):
917
+ return "python"
918
+
919
+ @pytest.fixture
920
+ def parser(self):
921
+ return "python"
922
+
923
+ def test_query_builtin(self, engine, parser):
924
+ n = m = 10
925
+ df = DataFrame(
926
+ np.random.default_rng(2).integers(m, size=(n, 3)), columns=list("abc")
927
+ )
928
+
929
+ df.index.name = "sin"
930
+ expected = df[df.index > 5]
931
+ result = df.query("sin > 5", engine=engine, parser=parser)
932
+ tm.assert_frame_equal(expected, result)
933
+
934
+
935
+ class TestDataFrameQueryStrings:
936
+ def test_str_query_method(self, parser, engine):
937
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 1)), columns=["b"])
938
+ df["strings"] = Series(list("aabbccddee"))
939
+ expect = df[df.strings == "a"]
940
+
941
+ if parser != "pandas":
942
+ col = "strings"
943
+ lst = '"a"'
944
+
945
+ lhs = [col] * 2 + [lst] * 2
946
+ rhs = lhs[::-1]
947
+
948
+ eq, ne = "==", "!="
949
+ ops = 2 * ([eq] + [ne])
950
+ msg = r"'(Not)?In' nodes are not implemented"
951
+
952
+ for lhs, op, rhs in zip(lhs, ops, rhs):
953
+ ex = f"{lhs} {op} {rhs}"
954
+ with pytest.raises(NotImplementedError, match=msg):
955
+ df.query(
956
+ ex,
957
+ engine=engine,
958
+ parser=parser,
959
+ local_dict={"strings": df.strings},
960
+ )
961
+ else:
962
+ res = df.query('"a" == strings', engine=engine, parser=parser)
963
+ tm.assert_frame_equal(res, expect)
964
+
965
+ res = df.query('strings == "a"', engine=engine, parser=parser)
966
+ tm.assert_frame_equal(res, expect)
967
+ tm.assert_frame_equal(res, df[df.strings.isin(["a"])])
968
+
969
+ expect = df[df.strings != "a"]
970
+ res = df.query('strings != "a"', engine=engine, parser=parser)
971
+ tm.assert_frame_equal(res, expect)
972
+
973
+ res = df.query('"a" != strings', engine=engine, parser=parser)
974
+ tm.assert_frame_equal(res, expect)
975
+ tm.assert_frame_equal(res, df[~df.strings.isin(["a"])])
976
+
977
+ def test_str_list_query_method(self, parser, engine):
978
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 1)), columns=["b"])
979
+ df["strings"] = Series(list("aabbccddee"))
980
+ expect = df[df.strings.isin(["a", "b"])]
981
+
982
+ if parser != "pandas":
983
+ col = "strings"
984
+ lst = '["a", "b"]'
985
+
986
+ lhs = [col] * 2 + [lst] * 2
987
+ rhs = lhs[::-1]
988
+
989
+ eq, ne = "==", "!="
990
+ ops = 2 * ([eq] + [ne])
991
+ msg = r"'(Not)?In' nodes are not implemented"
992
+
993
+ for lhs, op, rhs in zip(lhs, ops, rhs):
994
+ ex = f"{lhs} {op} {rhs}"
995
+ with pytest.raises(NotImplementedError, match=msg):
996
+ df.query(ex, engine=engine, parser=parser)
997
+ else:
998
+ res = df.query('strings == ["a", "b"]', engine=engine, parser=parser)
999
+ tm.assert_frame_equal(res, expect)
1000
+
1001
+ res = df.query('["a", "b"] == strings', engine=engine, parser=parser)
1002
+ tm.assert_frame_equal(res, expect)
1003
+
1004
+ expect = df[~df.strings.isin(["a", "b"])]
1005
+
1006
+ res = df.query('strings != ["a", "b"]', engine=engine, parser=parser)
1007
+ tm.assert_frame_equal(res, expect)
1008
+
1009
+ res = df.query('["a", "b"] != strings', engine=engine, parser=parser)
1010
+ tm.assert_frame_equal(res, expect)
1011
+
1012
+ def test_query_with_string_columns(self, parser, engine):
1013
+ df = DataFrame(
1014
+ {
1015
+ "a": list("aaaabbbbcccc"),
1016
+ "b": list("aabbccddeeff"),
1017
+ "c": np.random.default_rng(2).integers(5, size=12),
1018
+ "d": np.random.default_rng(2).integers(9, size=12),
1019
+ }
1020
+ )
1021
+ if parser == "pandas":
1022
+ res = df.query("a in b", parser=parser, engine=engine)
1023
+ expec = df[df.a.isin(df.b)]
1024
+ tm.assert_frame_equal(res, expec)
1025
+
1026
+ res = df.query("a in b and c < d", parser=parser, engine=engine)
1027
+ expec = df[df.a.isin(df.b) & (df.c < df.d)]
1028
+ tm.assert_frame_equal(res, expec)
1029
+ else:
1030
+ msg = r"'(Not)?In' nodes are not implemented"
1031
+ with pytest.raises(NotImplementedError, match=msg):
1032
+ df.query("a in b", parser=parser, engine=engine)
1033
+
1034
+ msg = r"'BoolOp' nodes are not implemented"
1035
+ with pytest.raises(NotImplementedError, match=msg):
1036
+ df.query("a in b and c < d", parser=parser, engine=engine)
1037
+
1038
+ def test_object_array_eq_ne(self, parser, engine, using_infer_string):
1039
+ df = DataFrame(
1040
+ {
1041
+ "a": list("aaaabbbbcccc"),
1042
+ "b": list("aabbccddeeff"),
1043
+ "c": np.random.default_rng(2).integers(5, size=12),
1044
+ "d": np.random.default_rng(2).integers(9, size=12),
1045
+ }
1046
+ )
1047
+ warning = RuntimeWarning if using_infer_string and engine == "numexpr" else None
1048
+ with tm.assert_produces_warning(warning):
1049
+ res = df.query("a == b", parser=parser, engine=engine)
1050
+ exp = df[df.a == df.b]
1051
+ tm.assert_frame_equal(res, exp)
1052
+
1053
+ with tm.assert_produces_warning(warning):
1054
+ res = df.query("a != b", parser=parser, engine=engine)
1055
+ exp = df[df.a != df.b]
1056
+ tm.assert_frame_equal(res, exp)
1057
+
1058
+ def test_query_with_nested_strings(self, parser, engine):
1059
+ skip_if_no_pandas_parser(parser)
1060
+ events = [
1061
+ f"page {n} {act}" for n in range(1, 4) for act in ["load", "exit"]
1062
+ ] * 2
1063
+ stamps1 = date_range("2014-01-01 0:00:01", freq="30s", periods=6)
1064
+ stamps2 = date_range("2014-02-01 1:00:01", freq="30s", periods=6)
1065
+ df = DataFrame(
1066
+ {
1067
+ "id": np.arange(1, 7).repeat(2),
1068
+ "event": events,
1069
+ "timestamp": stamps1.append(stamps2),
1070
+ }
1071
+ )
1072
+
1073
+ expected = df[df.event == '"page 1 load"']
1074
+ res = df.query("""'"page 1 load"' in event""", parser=parser, engine=engine)
1075
+ tm.assert_frame_equal(expected, res)
1076
+
1077
+ def test_query_with_nested_special_character(self, parser, engine):
1078
+ skip_if_no_pandas_parser(parser)
1079
+ df = DataFrame({"a": ["a", "b", "test & test"], "b": [1, 2, 3]})
1080
+ res = df.query('a == "test & test"', parser=parser, engine=engine)
1081
+ expec = df[df.a == "test & test"]
1082
+ tm.assert_frame_equal(res, expec)
1083
+
1084
+ @pytest.mark.parametrize(
1085
+ "op, func",
1086
+ [
1087
+ ["<", operator.lt],
1088
+ [">", operator.gt],
1089
+ ["<=", operator.le],
1090
+ [">=", operator.ge],
1091
+ ],
1092
+ )
1093
+ def test_query_lex_compare_strings(
1094
+ self, parser, engine, op, func, using_infer_string
1095
+ ):
1096
+ a = Series(np.random.default_rng(2).choice(list("abcde"), 20))
1097
+ b = Series(np.arange(a.size))
1098
+ df = DataFrame({"X": a, "Y": b})
1099
+
1100
+ warning = RuntimeWarning if using_infer_string and engine == "numexpr" else None
1101
+ with tm.assert_produces_warning(warning):
1102
+ res = df.query(f'X {op} "d"', engine=engine, parser=parser)
1103
+ expected = df[func(df.X, "d")]
1104
+ tm.assert_frame_equal(res, expected)
1105
+
1106
+ def test_query_single_element_booleans(self, parser, engine):
1107
+ columns = "bid", "bidsize", "ask", "asksize"
1108
+ data = np.random.default_rng(2).integers(2, size=(1, len(columns))).astype(bool)
1109
+ df = DataFrame(data, columns=columns)
1110
+ res = df.query("bid & ask", engine=engine, parser=parser)
1111
+ expected = df[df.bid & df.ask]
1112
+ tm.assert_frame_equal(res, expected)
1113
+
1114
+ def test_query_string_scalar_variable(self, parser, engine):
1115
+ skip_if_no_pandas_parser(parser)
1116
+ df = DataFrame(
1117
+ {
1118
+ "Symbol": ["BUD US", "BUD US", "IBM US", "IBM US"],
1119
+ "Price": [109.70, 109.72, 183.30, 183.35],
1120
+ }
1121
+ )
1122
+ e = df[df.Symbol == "BUD US"]
1123
+ symb = "BUD US" # noqa: F841
1124
+ r = df.query("Symbol == @symb", parser=parser, engine=engine)
1125
+ tm.assert_frame_equal(e, r)
1126
+
1127
+ @pytest.mark.parametrize(
1128
+ "in_list",
1129
+ [
1130
+ [None, "asdf", "ghjk"],
1131
+ ["asdf", None, "ghjk"],
1132
+ ["asdf", "ghjk", None],
1133
+ [None, None, "asdf"],
1134
+ ["asdf", None, None],
1135
+ [None, None, None],
1136
+ ],
1137
+ )
1138
+ def test_query_string_null_elements(self, in_list):
1139
+ # GITHUB ISSUE #31516
1140
+ parser = "pandas"
1141
+ engine = "python"
1142
+ expected = {i: value for i, value in enumerate(in_list) if value == "asdf"}
1143
+
1144
+ df_expected = DataFrame({"a": expected}, dtype="string")
1145
+ df_expected.index = df_expected.index.astype("int64")
1146
+ df = DataFrame({"a": in_list}, dtype="string")
1147
+ res1 = df.query("a == 'asdf'", parser=parser, engine=engine)
1148
+ res2 = df[df["a"] == "asdf"]
1149
+ res3 = df.query("a <= 'asdf'", parser=parser, engine=engine)
1150
+ tm.assert_frame_equal(res1, df_expected)
1151
+ tm.assert_frame_equal(res1, res2)
1152
+ tm.assert_frame_equal(res1, res3)
1153
+ tm.assert_frame_equal(res2, res3)
1154
+
1155
+
1156
+ class TestDataFrameEvalWithFrame:
1157
+ @pytest.fixture
1158
+ def frame(self):
1159
+ return DataFrame(
1160
+ np.random.default_rng(2).standard_normal((10, 3)), columns=list("abc")
1161
+ )
1162
+
1163
+ def test_simple_expr(self, frame, parser, engine):
1164
+ res = frame.eval("a + b", engine=engine, parser=parser)
1165
+ expect = frame.a + frame.b
1166
+ tm.assert_series_equal(res, expect)
1167
+
1168
+ def test_bool_arith_expr(self, frame, parser, engine):
1169
+ res = frame.eval("a[a < 1] + b", engine=engine, parser=parser)
1170
+ expect = frame.a[frame.a < 1] + frame.b
1171
+ tm.assert_series_equal(res, expect)
1172
+
1173
+ @pytest.mark.parametrize("op", ["+", "-", "*", "/"])
1174
+ def test_invalid_type_for_operator_raises(self, parser, engine, op):
1175
+ df = DataFrame({"a": [1, 2], "b": ["c", "d"]})
1176
+ msg = r"unsupported operand type\(s\) for .+: '.+' and '.+'|Cannot"
1177
+
1178
+ with pytest.raises(TypeError, match=msg):
1179
+ df.eval(f"a {op} b", engine=engine, parser=parser)
1180
+
1181
+
1182
+ class TestDataFrameQueryBacktickQuoting:
1183
+ @pytest.fixture
1184
+ def df(self):
1185
+ """
1186
+ Yields a dataframe with strings that may or may not need escaping
1187
+ by backticks. The last two columns cannot be escaped by backticks
1188
+ and should raise a ValueError.
1189
+ """
1190
+ yield DataFrame(
1191
+ {
1192
+ "A": [1, 2, 3],
1193
+ "B B": [3, 2, 1],
1194
+ "C C": [4, 5, 6],
1195
+ "C C": [7, 4, 3],
1196
+ "C_C": [8, 9, 10],
1197
+ "D_D D": [11, 1, 101],
1198
+ "E.E": [6, 3, 5],
1199
+ "F-F": [8, 1, 10],
1200
+ "1e1": [2, 4, 8],
1201
+ "def": [10, 11, 2],
1202
+ "A (x)": [4, 1, 3],
1203
+ "B(x)": [1, 1, 5],
1204
+ "B (x)": [2, 7, 4],
1205
+ " &^ :!€$?(} > <++*'' ": [2, 5, 6],
1206
+ "": [10, 11, 1],
1207
+ " A": [4, 7, 9],
1208
+ " ": [1, 2, 1],
1209
+ "it's": [6, 3, 1],
1210
+ "that's": [9, 1, 8],
1211
+ "☺": [8, 7, 6],
1212
+ "foo#bar": [2, 4, 5],
1213
+ 1: [5, 7, 9],
1214
+ }
1215
+ )
1216
+
1217
+ def test_single_backtick_variable_query(self, df):
1218
+ res = df.query("1 < `B B`")
1219
+ expect = df[1 < df["B B"]]
1220
+ tm.assert_frame_equal(res, expect)
1221
+
1222
+ def test_two_backtick_variables_query(self, df):
1223
+ res = df.query("1 < `B B` and 4 < `C C`")
1224
+ expect = df[(1 < df["B B"]) & (4 < df["C C"])]
1225
+ tm.assert_frame_equal(res, expect)
1226
+
1227
+ def test_single_backtick_variable_expr(self, df):
1228
+ res = df.eval("A + `B B`")
1229
+ expect = df["A"] + df["B B"]
1230
+ tm.assert_series_equal(res, expect)
1231
+
1232
+ def test_two_backtick_variables_expr(self, df):
1233
+ res = df.eval("`B B` + `C C`")
1234
+ expect = df["B B"] + df["C C"]
1235
+ tm.assert_series_equal(res, expect)
1236
+
1237
+ def test_already_underscore_variable(self, df):
1238
+ res = df.eval("`C_C` + A")
1239
+ expect = df["C_C"] + df["A"]
1240
+ tm.assert_series_equal(res, expect)
1241
+
1242
+ def test_same_name_but_underscores(self, df):
1243
+ res = df.eval("C_C + `C C`")
1244
+ expect = df["C_C"] + df["C C"]
1245
+ tm.assert_series_equal(res, expect)
1246
+
1247
+ def test_mixed_underscores_and_spaces(self, df):
1248
+ res = df.eval("A + `D_D D`")
1249
+ expect = df["A"] + df["D_D D"]
1250
+ tm.assert_series_equal(res, expect)
1251
+
1252
+ def test_backtick_quote_name_with_no_spaces(self, df):
1253
+ res = df.eval("A + `C_C`")
1254
+ expect = df["A"] + df["C_C"]
1255
+ tm.assert_series_equal(res, expect)
1256
+
1257
+ def test_special_characters(self, df):
1258
+ res = df.eval("`E.E` + `F-F` - A")
1259
+ expect = df["E.E"] + df["F-F"] - df["A"]
1260
+ tm.assert_series_equal(res, expect)
1261
+
1262
+ def test_start_with_digit(self, df):
1263
+ res = df.eval("A + `1e1`")
1264
+ expect = df["A"] + df["1e1"]
1265
+ tm.assert_series_equal(res, expect)
1266
+
1267
+ def test_keyword(self, df):
1268
+ res = df.eval("A + `def`")
1269
+ expect = df["A"] + df["def"]
1270
+ tm.assert_series_equal(res, expect)
1271
+
1272
+ def test_unneeded_quoting(self, df):
1273
+ res = df.query("`A` > 2")
1274
+ expect = df[df["A"] > 2]
1275
+ tm.assert_frame_equal(res, expect)
1276
+
1277
+ def test_parenthesis(self, df):
1278
+ res = df.query("`A (x)` > 2")
1279
+ expect = df[df["A (x)"] > 2]
1280
+ tm.assert_frame_equal(res, expect)
1281
+
1282
+ def test_empty_string(self, df):
1283
+ res = df.query("`` > 5")
1284
+ expect = df[df[""] > 5]
1285
+ tm.assert_frame_equal(res, expect)
1286
+
1287
+ def test_multiple_spaces(self, df):
1288
+ res = df.query("`C C` > 5")
1289
+ expect = df[df["C C"] > 5]
1290
+ tm.assert_frame_equal(res, expect)
1291
+
1292
+ def test_start_with_spaces(self, df):
1293
+ res = df.eval("` A` + ` `")
1294
+ expect = df[" A"] + df[" "]
1295
+ tm.assert_series_equal(res, expect)
1296
+
1297
+ def test_lots_of_operators_string(self, df):
1298
+ res = df.query("` &^ :!€$?(} > <++*'' ` > 4")
1299
+ expect = df[df[" &^ :!€$?(} > <++*'' "] > 4]
1300
+ tm.assert_frame_equal(res, expect)
1301
+
1302
+ def test_missing_attribute(self, df):
1303
+ message = "module 'pandas' has no attribute 'thing'"
1304
+ with pytest.raises(AttributeError, match=message):
1305
+ df.eval("@pd.thing")
1306
+
1307
+ def test_failing_quote(self, df):
1308
+ msg = r"(Could not convert ).*( to a valid Python identifier.)"
1309
+ with pytest.raises(SyntaxError, match=msg):
1310
+ df.query("`it's` > `that's`")
1311
+
1312
+ def test_failing_character_outside_range(self, df):
1313
+ msg = r"(Could not convert ).*( to a valid Python identifier.)"
1314
+ with pytest.raises(SyntaxError, match=msg):
1315
+ df.query("`☺` > 4")
1316
+
1317
+ def test_failing_hashtag(self, df):
1318
+ msg = "Failed to parse backticks"
1319
+ with pytest.raises(SyntaxError, match=msg):
1320
+ df.query("`foo#bar` > 4")
1321
+
1322
+ def test_call_non_named_expression(self, df):
1323
+ """
1324
+ Only attributes and variables ('named functions') can be called.
1325
+ .__call__() is not an allowed attribute because that would allow
1326
+ calling anything.
1327
+ https://github.com/pandas-dev/pandas/pull/32460
1328
+ """
1329
+
1330
+ def func(*_):
1331
+ return 1
1332
+
1333
+ funcs = [func] # noqa: F841
1334
+
1335
+ df.eval("@func()")
1336
+
1337
+ with pytest.raises(TypeError, match="Only named functions are supported"):
1338
+ df.eval("@funcs[0]()")
1339
+
1340
+ with pytest.raises(TypeError, match="Only named functions are supported"):
1341
+ df.eval("@funcs[0].__call__()")
1342
+
1343
+ def test_ea_dtypes(self, any_numeric_ea_and_arrow_dtype):
1344
+ # GH#29618
1345
+ df = DataFrame(
1346
+ [[1, 2], [3, 4]], columns=["a", "b"], dtype=any_numeric_ea_and_arrow_dtype
1347
+ )
1348
+ warning = RuntimeWarning if NUMEXPR_INSTALLED else None
1349
+ with tm.assert_produces_warning(warning):
1350
+ result = df.eval("c = b - a")
1351
+ expected = DataFrame(
1352
+ [[1, 2, 1], [3, 4, 1]],
1353
+ columns=["a", "b", "c"],
1354
+ dtype=any_numeric_ea_and_arrow_dtype,
1355
+ )
1356
+ tm.assert_frame_equal(result, expected)
1357
+
1358
+ def test_ea_dtypes_and_scalar(self):
1359
+ # GH#29618
1360
+ df = DataFrame([[1, 2], [3, 4]], columns=["a", "b"], dtype="Float64")
1361
+ warning = RuntimeWarning if NUMEXPR_INSTALLED else None
1362
+ with tm.assert_produces_warning(warning):
1363
+ result = df.eval("c = b - 1")
1364
+ expected = DataFrame(
1365
+ [[1, 2, 1], [3, 4, 3]], columns=["a", "b", "c"], dtype="Float64"
1366
+ )
1367
+ tm.assert_frame_equal(result, expected)
1368
+
1369
+ def test_ea_dtypes_and_scalar_operation(self, any_numeric_ea_and_arrow_dtype):
1370
+ # GH#29618
1371
+ df = DataFrame(
1372
+ [[1, 2], [3, 4]], columns=["a", "b"], dtype=any_numeric_ea_and_arrow_dtype
1373
+ )
1374
+ result = df.eval("c = 2 - 1")
1375
+ expected = DataFrame(
1376
+ {
1377
+ "a": Series([1, 3], dtype=any_numeric_ea_and_arrow_dtype),
1378
+ "b": Series([2, 4], dtype=any_numeric_ea_and_arrow_dtype),
1379
+ "c": Series([1, 1], dtype=result["c"].dtype),
1380
+ }
1381
+ )
1382
+ tm.assert_frame_equal(result, expected)
1383
+
1384
+ @pytest.mark.parametrize("dtype", ["int64", "Int64", "int64[pyarrow]"])
1385
+ def test_query_ea_dtypes(self, dtype):
1386
+ if dtype == "int64[pyarrow]":
1387
+ pytest.importorskip("pyarrow")
1388
+ # GH#50261
1389
+ df = DataFrame({"a": Series([1, 2], dtype=dtype)})
1390
+ ref = {2} # noqa: F841
1391
+ warning = RuntimeWarning if dtype == "Int64" and NUMEXPR_INSTALLED else None
1392
+ with tm.assert_produces_warning(warning):
1393
+ result = df.query("a in @ref")
1394
+ expected = DataFrame({"a": Series([2], dtype=dtype, index=[1])})
1395
+ tm.assert_frame_equal(result, expected)
1396
+
1397
+ @pytest.mark.parametrize("engine", ["python", "numexpr"])
1398
+ @pytest.mark.parametrize("dtype", ["int64", "Int64", "int64[pyarrow]"])
1399
+ def test_query_ea_equality_comparison(self, dtype, engine):
1400
+ # GH#50261
1401
+ warning = RuntimeWarning if engine == "numexpr" else None
1402
+ if engine == "numexpr" and not NUMEXPR_INSTALLED:
1403
+ pytest.skip("numexpr not installed")
1404
+ if dtype == "int64[pyarrow]":
1405
+ pytest.importorskip("pyarrow")
1406
+ df = DataFrame(
1407
+ {"A": Series([1, 1, 2], dtype="Int64"), "B": Series([1, 2, 2], dtype=dtype)}
1408
+ )
1409
+ with tm.assert_produces_warning(warning):
1410
+ result = df.query("A == B", engine=engine)
1411
+ expected = DataFrame(
1412
+ {
1413
+ "A": Series([1, 2], dtype="Int64", index=[0, 2]),
1414
+ "B": Series([1, 2], dtype=dtype, index=[0, 2]),
1415
+ }
1416
+ )
1417
+ tm.assert_frame_equal(result, expected)
1418
+
1419
+ def test_all_nat_in_object(self):
1420
+ # GH#57068
1421
+ now = pd.Timestamp.now("UTC") # noqa: F841
1422
+ df = DataFrame({"a": pd.to_datetime([None, None], utc=True)}, dtype=object)
1423
+ result = df.query("a > @now")
1424
+ expected = DataFrame({"a": []}, dtype=object)
1425
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_reductions.py ADDED
@@ -0,0 +1,2157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timedelta
2
+ from decimal import Decimal
3
+ import re
4
+
5
+ from dateutil.tz import tzlocal
6
+ import numpy as np
7
+ import pytest
8
+
9
+ from pandas._config import using_pyarrow_string_dtype
10
+
11
+ from pandas.compat import (
12
+ IS64,
13
+ is_platform_windows,
14
+ )
15
+ from pandas.compat.numpy import np_version_gt2
16
+ import pandas.util._test_decorators as td
17
+
18
+ import pandas as pd
19
+ from pandas import (
20
+ Categorical,
21
+ CategoricalDtype,
22
+ DataFrame,
23
+ DatetimeIndex,
24
+ Index,
25
+ PeriodIndex,
26
+ RangeIndex,
27
+ Series,
28
+ Timestamp,
29
+ date_range,
30
+ isna,
31
+ notna,
32
+ to_datetime,
33
+ to_timedelta,
34
+ )
35
+ import pandas._testing as tm
36
+ from pandas.core import (
37
+ algorithms,
38
+ nanops,
39
+ )
40
+
41
+ is_windows_np2_or_is32 = (is_platform_windows() and not np_version_gt2) or not IS64
42
+ is_windows_or_is32 = is_platform_windows() or not IS64
43
+
44
+
45
+ def make_skipna_wrapper(alternative, skipna_alternative=None):
46
+ """
47
+ Create a function for calling on an array.
48
+
49
+ Parameters
50
+ ----------
51
+ alternative : function
52
+ The function to be called on the array with no NaNs.
53
+ Only used when 'skipna_alternative' is None.
54
+ skipna_alternative : function
55
+ The function to be called on the original array
56
+
57
+ Returns
58
+ -------
59
+ function
60
+ """
61
+ if skipna_alternative:
62
+
63
+ def skipna_wrapper(x):
64
+ return skipna_alternative(x.values)
65
+
66
+ else:
67
+
68
+ def skipna_wrapper(x):
69
+ nona = x.dropna()
70
+ if len(nona) == 0:
71
+ return np.nan
72
+ return alternative(nona)
73
+
74
+ return skipna_wrapper
75
+
76
+
77
+ def assert_stat_op_calc(
78
+ opname,
79
+ alternative,
80
+ frame,
81
+ has_skipna=True,
82
+ check_dtype=True,
83
+ check_dates=False,
84
+ rtol=1e-5,
85
+ atol=1e-8,
86
+ skipna_alternative=None,
87
+ ):
88
+ """
89
+ Check that operator opname works as advertised on frame
90
+
91
+ Parameters
92
+ ----------
93
+ opname : str
94
+ Name of the operator to test on frame
95
+ alternative : function
96
+ Function that opname is tested against; i.e. "frame.opname()" should
97
+ equal "alternative(frame)".
98
+ frame : DataFrame
99
+ The object that the tests are executed on
100
+ has_skipna : bool, default True
101
+ Whether the method "opname" has the kwarg "skip_na"
102
+ check_dtype : bool, default True
103
+ Whether the dtypes of the result of "frame.opname()" and
104
+ "alternative(frame)" should be checked.
105
+ check_dates : bool, default false
106
+ Whether opname should be tested on a Datetime Series
107
+ rtol : float, default 1e-5
108
+ Relative tolerance.
109
+ atol : float, default 1e-8
110
+ Absolute tolerance.
111
+ skipna_alternative : function, default None
112
+ NaN-safe version of alternative
113
+ """
114
+ f = getattr(frame, opname)
115
+
116
+ if check_dates:
117
+ df = DataFrame({"b": date_range("1/1/2001", periods=2)})
118
+ with tm.assert_produces_warning(None):
119
+ result = getattr(df, opname)()
120
+ assert isinstance(result, Series)
121
+
122
+ df["a"] = range(len(df))
123
+ with tm.assert_produces_warning(None):
124
+ result = getattr(df, opname)()
125
+ assert isinstance(result, Series)
126
+ assert len(result)
127
+
128
+ if has_skipna:
129
+
130
+ def wrapper(x):
131
+ return alternative(x.values)
132
+
133
+ skipna_wrapper = make_skipna_wrapper(alternative, skipna_alternative)
134
+ result0 = f(axis=0, skipna=False)
135
+ result1 = f(axis=1, skipna=False)
136
+ tm.assert_series_equal(
137
+ result0, frame.apply(wrapper), check_dtype=check_dtype, rtol=rtol, atol=atol
138
+ )
139
+ tm.assert_series_equal(
140
+ result1,
141
+ frame.apply(wrapper, axis=1),
142
+ rtol=rtol,
143
+ atol=atol,
144
+ )
145
+ else:
146
+ skipna_wrapper = alternative
147
+
148
+ result0 = f(axis=0)
149
+ result1 = f(axis=1)
150
+ tm.assert_series_equal(
151
+ result0,
152
+ frame.apply(skipna_wrapper),
153
+ check_dtype=check_dtype,
154
+ rtol=rtol,
155
+ atol=atol,
156
+ )
157
+
158
+ if opname in ["sum", "prod"]:
159
+ expected = frame.apply(skipna_wrapper, axis=1)
160
+ tm.assert_series_equal(
161
+ result1, expected, check_dtype=False, rtol=rtol, atol=atol
162
+ )
163
+
164
+ # check dtypes
165
+ if check_dtype:
166
+ lcd_dtype = frame.values.dtype
167
+ assert lcd_dtype == result0.dtype
168
+ assert lcd_dtype == result1.dtype
169
+
170
+ # bad axis
171
+ with pytest.raises(ValueError, match="No axis named 2"):
172
+ f(axis=2)
173
+
174
+ # all NA case
175
+ if has_skipna:
176
+ all_na = frame * np.nan
177
+ r0 = getattr(all_na, opname)(axis=0)
178
+ r1 = getattr(all_na, opname)(axis=1)
179
+ if opname in ["sum", "prod"]:
180
+ unit = 1 if opname == "prod" else 0 # result for empty sum/prod
181
+ expected = Series(unit, index=r0.index, dtype=r0.dtype)
182
+ tm.assert_series_equal(r0, expected)
183
+ expected = Series(unit, index=r1.index, dtype=r1.dtype)
184
+ tm.assert_series_equal(r1, expected)
185
+
186
+
187
+ @pytest.fixture
188
+ def bool_frame_with_na():
189
+ """
190
+ Fixture for DataFrame of booleans with index of unique strings
191
+
192
+ Columns are ['A', 'B', 'C', 'D']; some entries are missing
193
+ """
194
+ df = DataFrame(
195
+ np.concatenate(
196
+ [np.ones((15, 4), dtype=bool), np.zeros((15, 4), dtype=bool)], axis=0
197
+ ),
198
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
199
+ columns=Index(list("ABCD"), dtype=object),
200
+ dtype=object,
201
+ )
202
+ # set some NAs
203
+ df.iloc[5:10] = np.nan
204
+ df.iloc[15:20, -2:] = np.nan
205
+ return df
206
+
207
+
208
+ @pytest.fixture
209
+ def float_frame_with_na():
210
+ """
211
+ Fixture for DataFrame of floats with index of unique strings
212
+
213
+ Columns are ['A', 'B', 'C', 'D']; some entries are missing
214
+ """
215
+ df = DataFrame(
216
+ np.random.default_rng(2).standard_normal((30, 4)),
217
+ index=Index([f"foo_{i}" for i in range(30)], dtype=object),
218
+ columns=Index(list("ABCD"), dtype=object),
219
+ )
220
+ # set some NAs
221
+ df.iloc[5:10] = np.nan
222
+ df.iloc[15:20, -2:] = np.nan
223
+ return df
224
+
225
+
226
+ class TestDataFrameAnalytics:
227
+ # ---------------------------------------------------------------------
228
+ # Reductions
229
+ @pytest.mark.parametrize("axis", [0, 1])
230
+ @pytest.mark.parametrize(
231
+ "opname",
232
+ [
233
+ "count",
234
+ "sum",
235
+ "mean",
236
+ "product",
237
+ "median",
238
+ "min",
239
+ "max",
240
+ "nunique",
241
+ "var",
242
+ "std",
243
+ "sem",
244
+ pytest.param("skew", marks=td.skip_if_no("scipy")),
245
+ pytest.param("kurt", marks=td.skip_if_no("scipy")),
246
+ ],
247
+ )
248
+ def test_stat_op_api_float_string_frame(
249
+ self, float_string_frame, axis, opname, using_infer_string
250
+ ):
251
+ if (
252
+ (opname in ("sum", "min", "max") and axis == 0)
253
+ or opname
254
+ in (
255
+ "count",
256
+ "nunique",
257
+ )
258
+ ) and not (using_infer_string and opname == "sum"):
259
+ getattr(float_string_frame, opname)(axis=axis)
260
+ else:
261
+ if opname in ["var", "std", "sem", "skew", "kurt"]:
262
+ msg = "could not convert string to float: 'bar'"
263
+ elif opname == "product":
264
+ if axis == 1:
265
+ msg = "can't multiply sequence by non-int of type 'float'"
266
+ else:
267
+ msg = "can't multiply sequence by non-int of type 'str'"
268
+ elif opname == "sum":
269
+ msg = r"unsupported operand type\(s\) for \+: 'float' and 'str'"
270
+ elif opname == "mean":
271
+ if axis == 0:
272
+ # different message on different builds
273
+ msg = "|".join(
274
+ [
275
+ r"Could not convert \['.*'\] to numeric",
276
+ "Could not convert string '(bar){30}' to numeric",
277
+ ]
278
+ )
279
+ else:
280
+ msg = r"unsupported operand type\(s\) for \+: 'float' and 'str'"
281
+ elif opname in ["min", "max"]:
282
+ msg = "'[><]=' not supported between instances of 'float' and 'str'"
283
+ elif opname == "median":
284
+ msg = re.compile(
285
+ r"Cannot convert \[.*\] to numeric|does not support", flags=re.S
286
+ )
287
+ if not isinstance(msg, re.Pattern):
288
+ msg = msg + "|does not support"
289
+ with pytest.raises(TypeError, match=msg):
290
+ getattr(float_string_frame, opname)(axis=axis)
291
+ if opname != "nunique":
292
+ getattr(float_string_frame, opname)(axis=axis, numeric_only=True)
293
+
294
+ @pytest.mark.parametrize("axis", [0, 1])
295
+ @pytest.mark.parametrize(
296
+ "opname",
297
+ [
298
+ "count",
299
+ "sum",
300
+ "mean",
301
+ "product",
302
+ "median",
303
+ "min",
304
+ "max",
305
+ "var",
306
+ "std",
307
+ "sem",
308
+ pytest.param("skew", marks=td.skip_if_no("scipy")),
309
+ pytest.param("kurt", marks=td.skip_if_no("scipy")),
310
+ ],
311
+ )
312
+ def test_stat_op_api_float_frame(self, float_frame, axis, opname):
313
+ getattr(float_frame, opname)(axis=axis, numeric_only=False)
314
+
315
+ def test_stat_op_calc(self, float_frame_with_na, mixed_float_frame):
316
+ def count(s):
317
+ return notna(s).sum()
318
+
319
+ def nunique(s):
320
+ return len(algorithms.unique1d(s.dropna()))
321
+
322
+ def var(x):
323
+ return np.var(x, ddof=1)
324
+
325
+ def std(x):
326
+ return np.std(x, ddof=1)
327
+
328
+ def sem(x):
329
+ return np.std(x, ddof=1) / np.sqrt(len(x))
330
+
331
+ assert_stat_op_calc(
332
+ "nunique",
333
+ nunique,
334
+ float_frame_with_na,
335
+ has_skipna=False,
336
+ check_dtype=False,
337
+ check_dates=True,
338
+ )
339
+
340
+ # GH#32571: rol needed for flaky CI builds
341
+ # mixed types (with upcasting happening)
342
+ assert_stat_op_calc(
343
+ "sum",
344
+ np.sum,
345
+ mixed_float_frame.astype("float32"),
346
+ check_dtype=False,
347
+ rtol=1e-3,
348
+ )
349
+
350
+ assert_stat_op_calc(
351
+ "sum", np.sum, float_frame_with_na, skipna_alternative=np.nansum
352
+ )
353
+ assert_stat_op_calc("mean", np.mean, float_frame_with_na, check_dates=True)
354
+ assert_stat_op_calc(
355
+ "product", np.prod, float_frame_with_na, skipna_alternative=np.nanprod
356
+ )
357
+
358
+ assert_stat_op_calc("var", var, float_frame_with_na)
359
+ assert_stat_op_calc("std", std, float_frame_with_na)
360
+ assert_stat_op_calc("sem", sem, float_frame_with_na)
361
+
362
+ assert_stat_op_calc(
363
+ "count",
364
+ count,
365
+ float_frame_with_na,
366
+ has_skipna=False,
367
+ check_dtype=False,
368
+ check_dates=True,
369
+ )
370
+
371
+ def test_stat_op_calc_skew_kurtosis(self, float_frame_with_na):
372
+ sp_stats = pytest.importorskip("scipy.stats")
373
+
374
+ def skewness(x):
375
+ if len(x) < 3:
376
+ return np.nan
377
+ return sp_stats.skew(x, bias=False)
378
+
379
+ def kurt(x):
380
+ if len(x) < 4:
381
+ return np.nan
382
+ return sp_stats.kurtosis(x, bias=False)
383
+
384
+ assert_stat_op_calc("skew", skewness, float_frame_with_na)
385
+ assert_stat_op_calc("kurt", kurt, float_frame_with_na)
386
+
387
+ def test_median(self, float_frame_with_na, int_frame):
388
+ def wrapper(x):
389
+ if isna(x).any():
390
+ return np.nan
391
+ return np.median(x)
392
+
393
+ assert_stat_op_calc("median", wrapper, float_frame_with_na, check_dates=True)
394
+ assert_stat_op_calc(
395
+ "median", wrapper, int_frame, check_dtype=False, check_dates=True
396
+ )
397
+
398
+ @pytest.mark.parametrize(
399
+ "method", ["sum", "mean", "prod", "var", "std", "skew", "min", "max"]
400
+ )
401
+ @pytest.mark.parametrize(
402
+ "df",
403
+ [
404
+ DataFrame(
405
+ {
406
+ "a": [
407
+ -0.00049987540199591344,
408
+ -0.0016467257772919831,
409
+ 0.00067695870775883013,
410
+ ],
411
+ "b": [-0, -0, 0.0],
412
+ "c": [
413
+ 0.00031111847529610595,
414
+ 0.0014902627951905339,
415
+ -0.00094099200035979691,
416
+ ],
417
+ },
418
+ index=["foo", "bar", "baz"],
419
+ dtype="O",
420
+ ),
421
+ DataFrame({0: [np.nan, 2], 1: [np.nan, 3], 2: [np.nan, 4]}, dtype=object),
422
+ ],
423
+ )
424
+ @pytest.mark.filterwarnings("ignore:Mismatched null-like values:FutureWarning")
425
+ def test_stat_operators_attempt_obj_array(self, method, df, axis):
426
+ # GH#676
427
+ assert df.values.dtype == np.object_
428
+ result = getattr(df, method)(axis=axis)
429
+ expected = getattr(df.astype("f8"), method)(axis=axis).astype(object)
430
+ if axis in [1, "columns"] and method in ["min", "max"]:
431
+ expected[expected.isna()] = None
432
+ tm.assert_series_equal(result, expected)
433
+
434
+ @pytest.mark.parametrize("op", ["mean", "std", "var", "skew", "kurt", "sem"])
435
+ def test_mixed_ops(self, op):
436
+ # GH#16116
437
+ df = DataFrame(
438
+ {
439
+ "int": [1, 2, 3, 4],
440
+ "float": [1.0, 2.0, 3.0, 4.0],
441
+ "str": ["a", "b", "c", "d"],
442
+ }
443
+ )
444
+ msg = "|".join(
445
+ [
446
+ "Could not convert",
447
+ "could not convert",
448
+ "can't multiply sequence by non-int",
449
+ "does not support",
450
+ ]
451
+ )
452
+ with pytest.raises(TypeError, match=msg):
453
+ getattr(df, op)()
454
+
455
+ with pd.option_context("use_bottleneck", False):
456
+ msg = "|".join(
457
+ [
458
+ "Could not convert",
459
+ "could not convert",
460
+ "can't multiply sequence by non-int",
461
+ "does not support",
462
+ ]
463
+ )
464
+ with pytest.raises(TypeError, match=msg):
465
+ getattr(df, op)()
466
+
467
+ @pytest.mark.xfail(
468
+ using_pyarrow_string_dtype(), reason="sum doesn't work for arrow strings"
469
+ )
470
+ def test_reduce_mixed_frame(self):
471
+ # GH 6806
472
+ df = DataFrame(
473
+ {
474
+ "bool_data": [True, True, False, False, False],
475
+ "int_data": [10, 20, 30, 40, 50],
476
+ "string_data": ["a", "b", "c", "d", "e"],
477
+ }
478
+ )
479
+ df.reindex(columns=["bool_data", "int_data", "string_data"])
480
+ test = df.sum(axis=0)
481
+ tm.assert_numpy_array_equal(
482
+ test.values, np.array([2, 150, "abcde"], dtype=object)
483
+ )
484
+ alt = df.T.sum(axis=1)
485
+ tm.assert_series_equal(test, alt)
486
+
487
+ def test_nunique(self):
488
+ df = DataFrame({"A": [1, 1, 1], "B": [1, 2, 3], "C": [1, np.nan, 3]})
489
+ tm.assert_series_equal(df.nunique(), Series({"A": 1, "B": 3, "C": 2}))
490
+ tm.assert_series_equal(
491
+ df.nunique(dropna=False), Series({"A": 1, "B": 3, "C": 3})
492
+ )
493
+ tm.assert_series_equal(df.nunique(axis=1), Series({0: 1, 1: 2, 2: 2}))
494
+ tm.assert_series_equal(
495
+ df.nunique(axis=1, dropna=False), Series({0: 1, 1: 3, 2: 2})
496
+ )
497
+
498
+ @pytest.mark.parametrize("tz", [None, "UTC"])
499
+ def test_mean_mixed_datetime_numeric(self, tz):
500
+ # https://github.com/pandas-dev/pandas/issues/24752
501
+ df = DataFrame({"A": [1, 1], "B": [Timestamp("2000", tz=tz)] * 2})
502
+ result = df.mean()
503
+ expected = Series([1.0, Timestamp("2000", tz=tz)], index=["A", "B"])
504
+ tm.assert_series_equal(result, expected)
505
+
506
+ @pytest.mark.parametrize("tz", [None, "UTC"])
507
+ def test_mean_includes_datetimes(self, tz):
508
+ # https://github.com/pandas-dev/pandas/issues/24752
509
+ # Behavior in 0.24.0rc1 was buggy.
510
+ # As of 2.0 with numeric_only=None we do *not* drop datetime columns
511
+ df = DataFrame({"A": [Timestamp("2000", tz=tz)] * 2})
512
+ result = df.mean()
513
+
514
+ expected = Series([Timestamp("2000", tz=tz)], index=["A"])
515
+ tm.assert_series_equal(result, expected)
516
+
517
+ def test_mean_mixed_string_decimal(self):
518
+ # GH 11670
519
+ # possible bug when calculating mean of DataFrame?
520
+
521
+ d = [
522
+ {"A": 2, "B": None, "C": Decimal("628.00")},
523
+ {"A": 1, "B": None, "C": Decimal("383.00")},
524
+ {"A": 3, "B": None, "C": Decimal("651.00")},
525
+ {"A": 2, "B": None, "C": Decimal("575.00")},
526
+ {"A": 4, "B": None, "C": Decimal("1114.00")},
527
+ {"A": 1, "B": "TEST", "C": Decimal("241.00")},
528
+ {"A": 2, "B": None, "C": Decimal("572.00")},
529
+ {"A": 4, "B": None, "C": Decimal("609.00")},
530
+ {"A": 3, "B": None, "C": Decimal("820.00")},
531
+ {"A": 5, "B": None, "C": Decimal("1223.00")},
532
+ ]
533
+
534
+ df = DataFrame(d)
535
+
536
+ with pytest.raises(
537
+ TypeError, match="unsupported operand type|does not support"
538
+ ):
539
+ df.mean()
540
+ result = df[["A", "C"]].mean()
541
+ expected = Series([2.7, 681.6], index=["A", "C"], dtype=object)
542
+ tm.assert_series_equal(result, expected)
543
+
544
+ def test_var_std(self, datetime_frame):
545
+ result = datetime_frame.std(ddof=4)
546
+ expected = datetime_frame.apply(lambda x: x.std(ddof=4))
547
+ tm.assert_almost_equal(result, expected)
548
+
549
+ result = datetime_frame.var(ddof=4)
550
+ expected = datetime_frame.apply(lambda x: x.var(ddof=4))
551
+ tm.assert_almost_equal(result, expected)
552
+
553
+ arr = np.repeat(np.random.default_rng(2).random((1, 1000)), 1000, 0)
554
+ result = nanops.nanvar(arr, axis=0)
555
+ assert not (result < 0).any()
556
+
557
+ with pd.option_context("use_bottleneck", False):
558
+ result = nanops.nanvar(arr, axis=0)
559
+ assert not (result < 0).any()
560
+
561
+ @pytest.mark.parametrize("meth", ["sem", "var", "std"])
562
+ def test_numeric_only_flag(self, meth):
563
+ # GH 9201
564
+ df1 = DataFrame(
565
+ np.random.default_rng(2).standard_normal((5, 3)),
566
+ columns=["foo", "bar", "baz"],
567
+ )
568
+ # Cast to object to avoid implicit cast when setting entry to "100" below
569
+ df1 = df1.astype({"foo": object})
570
+ # set one entry to a number in str format
571
+ df1.loc[0, "foo"] = "100"
572
+
573
+ df2 = DataFrame(
574
+ np.random.default_rng(2).standard_normal((5, 3)),
575
+ columns=["foo", "bar", "baz"],
576
+ )
577
+ # Cast to object to avoid implicit cast when setting entry to "a" below
578
+ df2 = df2.astype({"foo": object})
579
+ # set one entry to a non-number str
580
+ df2.loc[0, "foo"] = "a"
581
+
582
+ result = getattr(df1, meth)(axis=1, numeric_only=True)
583
+ expected = getattr(df1[["bar", "baz"]], meth)(axis=1)
584
+ tm.assert_series_equal(expected, result)
585
+
586
+ result = getattr(df2, meth)(axis=1, numeric_only=True)
587
+ expected = getattr(df2[["bar", "baz"]], meth)(axis=1)
588
+ tm.assert_series_equal(expected, result)
589
+
590
+ # df1 has all numbers, df2 has a letter inside
591
+ msg = r"unsupported operand type\(s\) for -: 'float' and 'str'"
592
+ with pytest.raises(TypeError, match=msg):
593
+ getattr(df1, meth)(axis=1, numeric_only=False)
594
+ msg = "could not convert string to float: 'a'"
595
+ with pytest.raises(TypeError, match=msg):
596
+ getattr(df2, meth)(axis=1, numeric_only=False)
597
+
598
+ def test_sem(self, datetime_frame):
599
+ result = datetime_frame.sem(ddof=4)
600
+ expected = datetime_frame.apply(lambda x: x.std(ddof=4) / np.sqrt(len(x)))
601
+ tm.assert_almost_equal(result, expected)
602
+
603
+ arr = np.repeat(np.random.default_rng(2).random((1, 1000)), 1000, 0)
604
+ result = nanops.nansem(arr, axis=0)
605
+ assert not (result < 0).any()
606
+
607
+ with pd.option_context("use_bottleneck", False):
608
+ result = nanops.nansem(arr, axis=0)
609
+ assert not (result < 0).any()
610
+
611
+ @pytest.mark.parametrize(
612
+ "dropna, expected",
613
+ [
614
+ (
615
+ True,
616
+ {
617
+ "A": [12],
618
+ "B": [10.0],
619
+ "C": [1.0],
620
+ "D": ["a"],
621
+ "E": Categorical(["a"], categories=["a"]),
622
+ "F": DatetimeIndex(["2000-01-02"], dtype="M8[ns]"),
623
+ "G": to_timedelta(["1 days"]),
624
+ },
625
+ ),
626
+ (
627
+ False,
628
+ {
629
+ "A": [12],
630
+ "B": [10.0],
631
+ "C": [np.nan],
632
+ "D": np.array([np.nan], dtype=object),
633
+ "E": Categorical([np.nan], categories=["a"]),
634
+ "F": DatetimeIndex([pd.NaT], dtype="M8[ns]"),
635
+ "G": to_timedelta([pd.NaT]),
636
+ },
637
+ ),
638
+ (
639
+ True,
640
+ {
641
+ "H": [8, 9, np.nan, np.nan],
642
+ "I": [8, 9, np.nan, np.nan],
643
+ "J": [1, np.nan, np.nan, np.nan],
644
+ "K": Categorical(["a", np.nan, np.nan, np.nan], categories=["a"]),
645
+ "L": DatetimeIndex(
646
+ ["2000-01-02", "NaT", "NaT", "NaT"], dtype="M8[ns]"
647
+ ),
648
+ "M": to_timedelta(["1 days", "nan", "nan", "nan"]),
649
+ "N": [0, 1, 2, 3],
650
+ },
651
+ ),
652
+ (
653
+ False,
654
+ {
655
+ "H": [8, 9, np.nan, np.nan],
656
+ "I": [8, 9, np.nan, np.nan],
657
+ "J": [1, np.nan, np.nan, np.nan],
658
+ "K": Categorical([np.nan, "a", np.nan, np.nan], categories=["a"]),
659
+ "L": DatetimeIndex(
660
+ ["NaT", "2000-01-02", "NaT", "NaT"], dtype="M8[ns]"
661
+ ),
662
+ "M": to_timedelta(["nan", "1 days", "nan", "nan"]),
663
+ "N": [0, 1, 2, 3],
664
+ },
665
+ ),
666
+ ],
667
+ )
668
+ def test_mode_dropna(self, dropna, expected):
669
+ df = DataFrame(
670
+ {
671
+ "A": [12, 12, 19, 11],
672
+ "B": [10, 10, np.nan, 3],
673
+ "C": [1, np.nan, np.nan, np.nan],
674
+ "D": Series([np.nan, np.nan, "a", np.nan], dtype=object),
675
+ "E": Categorical([np.nan, np.nan, "a", np.nan]),
676
+ "F": DatetimeIndex(["NaT", "2000-01-02", "NaT", "NaT"], dtype="M8[ns]"),
677
+ "G": to_timedelta(["1 days", "nan", "nan", "nan"]),
678
+ "H": [8, 8, 9, 9],
679
+ "I": [9, 9, 8, 8],
680
+ "J": [1, 1, np.nan, np.nan],
681
+ "K": Categorical(["a", np.nan, "a", np.nan]),
682
+ "L": DatetimeIndex(
683
+ ["2000-01-02", "2000-01-02", "NaT", "NaT"], dtype="M8[ns]"
684
+ ),
685
+ "M": to_timedelta(["1 days", "nan", "1 days", "nan"]),
686
+ "N": np.arange(4, dtype="int64"),
687
+ }
688
+ )
689
+
690
+ result = df[sorted(expected.keys())].mode(dropna=dropna)
691
+ expected = DataFrame(expected)
692
+ tm.assert_frame_equal(result, expected)
693
+
694
+ def test_mode_sortwarning(self, using_infer_string):
695
+ # Check for the warning that is raised when the mode
696
+ # results cannot be sorted
697
+
698
+ df = DataFrame({"A": [np.nan, np.nan, "a", "a"]})
699
+ expected = DataFrame({"A": ["a", np.nan]})
700
+
701
+ warning = None if using_infer_string else UserWarning
702
+ with tm.assert_produces_warning(warning):
703
+ result = df.mode(dropna=False)
704
+ result = result.sort_values(by="A").reset_index(drop=True)
705
+
706
+ tm.assert_frame_equal(result, expected)
707
+
708
+ def test_mode_empty_df(self):
709
+ df = DataFrame([], columns=["a", "b"])
710
+ result = df.mode()
711
+ expected = DataFrame([], columns=["a", "b"], index=Index([], dtype=np.int64))
712
+ tm.assert_frame_equal(result, expected)
713
+
714
+ def test_operators_timedelta64(self):
715
+ df = DataFrame(
716
+ {
717
+ "A": date_range("2012-1-1", periods=3, freq="D"),
718
+ "B": date_range("2012-1-2", periods=3, freq="D"),
719
+ "C": Timestamp("20120101") - timedelta(minutes=5, seconds=5),
720
+ }
721
+ )
722
+
723
+ diffs = DataFrame({"A": df["A"] - df["C"], "B": df["A"] - df["B"]})
724
+
725
+ # min
726
+ result = diffs.min()
727
+ assert result.iloc[0] == diffs.loc[0, "A"]
728
+ assert result.iloc[1] == diffs.loc[0, "B"]
729
+
730
+ result = diffs.min(axis=1)
731
+ assert (result == diffs.loc[0, "B"]).all()
732
+
733
+ # max
734
+ result = diffs.max()
735
+ assert result.iloc[0] == diffs.loc[2, "A"]
736
+ assert result.iloc[1] == diffs.loc[2, "B"]
737
+
738
+ result = diffs.max(axis=1)
739
+ assert (result == diffs["A"]).all()
740
+
741
+ # abs
742
+ result = diffs.abs()
743
+ result2 = abs(diffs)
744
+ expected = DataFrame({"A": df["A"] - df["C"], "B": df["B"] - df["A"]})
745
+ tm.assert_frame_equal(result, expected)
746
+ tm.assert_frame_equal(result2, expected)
747
+
748
+ # mixed frame
749
+ mixed = diffs.copy()
750
+ mixed["C"] = "foo"
751
+ mixed["D"] = 1
752
+ mixed["E"] = 1.0
753
+ mixed["F"] = Timestamp("20130101")
754
+
755
+ # results in an object array
756
+ result = mixed.min()
757
+ expected = Series(
758
+ [
759
+ pd.Timedelta(timedelta(seconds=5 * 60 + 5)),
760
+ pd.Timedelta(timedelta(days=-1)),
761
+ "foo",
762
+ 1,
763
+ 1.0,
764
+ Timestamp("20130101"),
765
+ ],
766
+ index=mixed.columns,
767
+ )
768
+ tm.assert_series_equal(result, expected)
769
+
770
+ # excludes non-numeric
771
+ result = mixed.min(axis=1, numeric_only=True)
772
+ expected = Series([1, 1, 1.0], index=[0, 1, 2])
773
+ tm.assert_series_equal(result, expected)
774
+
775
+ # works when only those columns are selected
776
+ result = mixed[["A", "B"]].min(1)
777
+ expected = Series([timedelta(days=-1)] * 3)
778
+ tm.assert_series_equal(result, expected)
779
+
780
+ result = mixed[["A", "B"]].min()
781
+ expected = Series(
782
+ [timedelta(seconds=5 * 60 + 5), timedelta(days=-1)], index=["A", "B"]
783
+ )
784
+ tm.assert_series_equal(result, expected)
785
+
786
+ # GH 3106
787
+ df = DataFrame(
788
+ {
789
+ "time": date_range("20130102", periods=5),
790
+ "time2": date_range("20130105", periods=5),
791
+ }
792
+ )
793
+ df["off1"] = df["time2"] - df["time"]
794
+ assert df["off1"].dtype == "timedelta64[ns]"
795
+
796
+ df["off2"] = df["time"] - df["time2"]
797
+ df._consolidate_inplace()
798
+ assert df["off1"].dtype == "timedelta64[ns]"
799
+ assert df["off2"].dtype == "timedelta64[ns]"
800
+
801
+ def test_std_timedelta64_skipna_false(self):
802
+ # GH#37392
803
+ tdi = pd.timedelta_range("1 Day", periods=10)
804
+ df = DataFrame({"A": tdi, "B": tdi}, copy=True)
805
+ df.iloc[-2, -1] = pd.NaT
806
+
807
+ result = df.std(skipna=False)
808
+ expected = Series(
809
+ [df["A"].std(), pd.NaT], index=["A", "B"], dtype="timedelta64[ns]"
810
+ )
811
+ tm.assert_series_equal(result, expected)
812
+
813
+ result = df.std(axis=1, skipna=False)
814
+ expected = Series([pd.Timedelta(0)] * 8 + [pd.NaT, pd.Timedelta(0)])
815
+ tm.assert_series_equal(result, expected)
816
+
817
+ @pytest.mark.parametrize(
818
+ "values", [["2022-01-01", "2022-01-02", pd.NaT, "2022-01-03"], 4 * [pd.NaT]]
819
+ )
820
+ def test_std_datetime64_with_nat(
821
+ self, values, skipna, using_array_manager, request, unit
822
+ ):
823
+ # GH#51335
824
+ if using_array_manager and (
825
+ not skipna or all(value is pd.NaT for value in values)
826
+ ):
827
+ mark = pytest.mark.xfail(
828
+ reason="GH#51446: Incorrect type inference on NaT in reduction result"
829
+ )
830
+ request.applymarker(mark)
831
+ dti = to_datetime(values).as_unit(unit)
832
+ df = DataFrame({"a": dti})
833
+ result = df.std(skipna=skipna)
834
+ if not skipna or all(value is pd.NaT for value in values):
835
+ expected = Series({"a": pd.NaT}, dtype=f"timedelta64[{unit}]")
836
+ else:
837
+ # 86400000000000ns == 1 day
838
+ expected = Series({"a": 86400000000000}, dtype=f"timedelta64[{unit}]")
839
+ tm.assert_series_equal(result, expected)
840
+
841
+ def test_sum_corner(self):
842
+ empty_frame = DataFrame()
843
+
844
+ axis0 = empty_frame.sum(0)
845
+ axis1 = empty_frame.sum(1)
846
+ assert isinstance(axis0, Series)
847
+ assert isinstance(axis1, Series)
848
+ assert len(axis0) == 0
849
+ assert len(axis1) == 0
850
+
851
+ @pytest.mark.parametrize(
852
+ "index",
853
+ [
854
+ RangeIndex(0),
855
+ DatetimeIndex([]),
856
+ Index([], dtype=np.int64),
857
+ Index([], dtype=np.float64),
858
+ DatetimeIndex([], freq="ME"),
859
+ PeriodIndex([], freq="D"),
860
+ ],
861
+ )
862
+ def test_axis_1_empty(self, all_reductions, index):
863
+ df = DataFrame(columns=["a"], index=index)
864
+ result = getattr(df, all_reductions)(axis=1)
865
+ if all_reductions in ("any", "all"):
866
+ expected_dtype = "bool"
867
+ elif all_reductions == "count":
868
+ expected_dtype = "int64"
869
+ else:
870
+ expected_dtype = "object"
871
+ expected = Series([], index=index, dtype=expected_dtype)
872
+ tm.assert_series_equal(result, expected)
873
+
874
+ @pytest.mark.parametrize("method, unit", [("sum", 0), ("prod", 1)])
875
+ @pytest.mark.parametrize("numeric_only", [None, True, False])
876
+ def test_sum_prod_nanops(self, method, unit, numeric_only):
877
+ idx = ["a", "b", "c"]
878
+ df = DataFrame({"a": [unit, unit], "b": [unit, np.nan], "c": [np.nan, np.nan]})
879
+ # The default
880
+ result = getattr(df, method)(numeric_only=numeric_only)
881
+ expected = Series([unit, unit, unit], index=idx, dtype="float64")
882
+ tm.assert_series_equal(result, expected)
883
+
884
+ # min_count=1
885
+ result = getattr(df, method)(numeric_only=numeric_only, min_count=1)
886
+ expected = Series([unit, unit, np.nan], index=idx)
887
+ tm.assert_series_equal(result, expected)
888
+
889
+ # min_count=0
890
+ result = getattr(df, method)(numeric_only=numeric_only, min_count=0)
891
+ expected = Series([unit, unit, unit], index=idx, dtype="float64")
892
+ tm.assert_series_equal(result, expected)
893
+
894
+ result = getattr(df.iloc[1:], method)(numeric_only=numeric_only, min_count=1)
895
+ expected = Series([unit, np.nan, np.nan], index=idx)
896
+ tm.assert_series_equal(result, expected)
897
+
898
+ # min_count > 1
899
+ df = DataFrame({"A": [unit] * 10, "B": [unit] * 5 + [np.nan] * 5})
900
+ result = getattr(df, method)(numeric_only=numeric_only, min_count=5)
901
+ expected = Series(result, index=["A", "B"])
902
+ tm.assert_series_equal(result, expected)
903
+
904
+ result = getattr(df, method)(numeric_only=numeric_only, min_count=6)
905
+ expected = Series(result, index=["A", "B"])
906
+ tm.assert_series_equal(result, expected)
907
+
908
+ def test_sum_nanops_timedelta(self):
909
+ # prod isn't defined on timedeltas
910
+ idx = ["a", "b", "c"]
911
+ df = DataFrame({"a": [0, 0], "b": [0, np.nan], "c": [np.nan, np.nan]})
912
+
913
+ df2 = df.apply(to_timedelta)
914
+
915
+ # 0 by default
916
+ result = df2.sum()
917
+ expected = Series([0, 0, 0], dtype="m8[ns]", index=idx)
918
+ tm.assert_series_equal(result, expected)
919
+
920
+ # min_count=0
921
+ result = df2.sum(min_count=0)
922
+ tm.assert_series_equal(result, expected)
923
+
924
+ # min_count=1
925
+ result = df2.sum(min_count=1)
926
+ expected = Series([0, 0, np.nan], dtype="m8[ns]", index=idx)
927
+ tm.assert_series_equal(result, expected)
928
+
929
+ def test_sum_nanops_min_count(self):
930
+ # https://github.com/pandas-dev/pandas/issues/39738
931
+ df = DataFrame({"x": [1, 2, 3], "y": [4, 5, 6]})
932
+ result = df.sum(min_count=10)
933
+ expected = Series([np.nan, np.nan], index=["x", "y"])
934
+ tm.assert_series_equal(result, expected)
935
+
936
+ @pytest.mark.parametrize("float_type", ["float16", "float32", "float64"])
937
+ @pytest.mark.parametrize(
938
+ "kwargs, expected_result",
939
+ [
940
+ ({"axis": 1, "min_count": 2}, [3.2, 5.3, np.nan]),
941
+ ({"axis": 1, "min_count": 3}, [np.nan, np.nan, np.nan]),
942
+ ({"axis": 1, "skipna": False}, [3.2, 5.3, np.nan]),
943
+ ],
944
+ )
945
+ def test_sum_nanops_dtype_min_count(self, float_type, kwargs, expected_result):
946
+ # GH#46947
947
+ df = DataFrame({"a": [1.0, 2.3, 4.4], "b": [2.2, 3, np.nan]}, dtype=float_type)
948
+ result = df.sum(**kwargs)
949
+ expected = Series(expected_result).astype(float_type)
950
+ tm.assert_series_equal(result, expected)
951
+
952
+ @pytest.mark.parametrize("float_type", ["float16", "float32", "float64"])
953
+ @pytest.mark.parametrize(
954
+ "kwargs, expected_result",
955
+ [
956
+ ({"axis": 1, "min_count": 2}, [2.0, 4.0, np.nan]),
957
+ ({"axis": 1, "min_count": 3}, [np.nan, np.nan, np.nan]),
958
+ ({"axis": 1, "skipna": False}, [2.0, 4.0, np.nan]),
959
+ ],
960
+ )
961
+ def test_prod_nanops_dtype_min_count(self, float_type, kwargs, expected_result):
962
+ # GH#46947
963
+ df = DataFrame(
964
+ {"a": [1.0, 2.0, 4.4], "b": [2.0, 2.0, np.nan]}, dtype=float_type
965
+ )
966
+ result = df.prod(**kwargs)
967
+ expected = Series(expected_result).astype(float_type)
968
+ tm.assert_series_equal(result, expected)
969
+
970
+ def test_sum_object(self, float_frame):
971
+ values = float_frame.values.astype(int)
972
+ frame = DataFrame(values, index=float_frame.index, columns=float_frame.columns)
973
+ deltas = frame * timedelta(1)
974
+ deltas.sum()
975
+
976
+ def test_sum_bool(self, float_frame):
977
+ # ensure this works, bug report
978
+ bools = np.isnan(float_frame)
979
+ bools.sum(1)
980
+ bools.sum(0)
981
+
982
+ def test_sum_mixed_datetime(self):
983
+ # GH#30886
984
+ df = DataFrame({"A": date_range("2000", periods=4), "B": [1, 2, 3, 4]}).reindex(
985
+ [2, 3, 4]
986
+ )
987
+ with pytest.raises(TypeError, match="does not support reduction 'sum'"):
988
+ df.sum()
989
+
990
+ def test_mean_corner(self, float_frame, float_string_frame):
991
+ # unit test when have object data
992
+ msg = "Could not convert|does not support"
993
+ with pytest.raises(TypeError, match=msg):
994
+ float_string_frame.mean(axis=0)
995
+
996
+ # xs sum mixed type, just want to know it works...
997
+ with pytest.raises(TypeError, match="unsupported operand type"):
998
+ float_string_frame.mean(axis=1)
999
+
1000
+ # take mean of boolean column
1001
+ float_frame["bool"] = float_frame["A"] > 0
1002
+ means = float_frame.mean(0)
1003
+ assert means["bool"] == float_frame["bool"].values.mean()
1004
+
1005
+ def test_mean_datetimelike(self):
1006
+ # GH#24757 check that datetimelike are excluded by default, handled
1007
+ # correctly with numeric_only=True
1008
+ # As of 2.0, datetimelike are *not* excluded with numeric_only=None
1009
+
1010
+ df = DataFrame(
1011
+ {
1012
+ "A": np.arange(3),
1013
+ "B": date_range("2016-01-01", periods=3),
1014
+ "C": pd.timedelta_range("1D", periods=3),
1015
+ "D": pd.period_range("2016", periods=3, freq="Y"),
1016
+ }
1017
+ )
1018
+ result = df.mean(numeric_only=True)
1019
+ expected = Series({"A": 1.0})
1020
+ tm.assert_series_equal(result, expected)
1021
+
1022
+ with pytest.raises(TypeError, match="mean is not implemented for PeriodArray"):
1023
+ df.mean()
1024
+
1025
+ def test_mean_datetimelike_numeric_only_false(self):
1026
+ df = DataFrame(
1027
+ {
1028
+ "A": np.arange(3),
1029
+ "B": date_range("2016-01-01", periods=3),
1030
+ "C": pd.timedelta_range("1D", periods=3),
1031
+ }
1032
+ )
1033
+
1034
+ # datetime(tz) and timedelta work
1035
+ result = df.mean(numeric_only=False)
1036
+ expected = Series({"A": 1, "B": df.loc[1, "B"], "C": df.loc[1, "C"]})
1037
+ tm.assert_series_equal(result, expected)
1038
+
1039
+ # mean of period is not allowed
1040
+ df["D"] = pd.period_range("2016", periods=3, freq="Y")
1041
+
1042
+ with pytest.raises(TypeError, match="mean is not implemented for Period"):
1043
+ df.mean(numeric_only=False)
1044
+
1045
+ def test_mean_extensionarray_numeric_only_true(self):
1046
+ # https://github.com/pandas-dev/pandas/issues/33256
1047
+ arr = np.random.default_rng(2).integers(1000, size=(10, 5))
1048
+ df = DataFrame(arr, dtype="Int64")
1049
+ result = df.mean(numeric_only=True)
1050
+ expected = DataFrame(arr).mean().astype("Float64")
1051
+ tm.assert_series_equal(result, expected)
1052
+
1053
+ def test_stats_mixed_type(self, float_string_frame):
1054
+ with pytest.raises(TypeError, match="could not convert"):
1055
+ float_string_frame.std(1)
1056
+ with pytest.raises(TypeError, match="could not convert"):
1057
+ float_string_frame.var(1)
1058
+ with pytest.raises(TypeError, match="unsupported operand type"):
1059
+ float_string_frame.mean(1)
1060
+ with pytest.raises(TypeError, match="could not convert"):
1061
+ float_string_frame.skew(1)
1062
+
1063
+ def test_sum_bools(self):
1064
+ df = DataFrame(index=range(1), columns=range(10))
1065
+ bools = isna(df)
1066
+ assert bools.sum(axis=1)[0] == 10
1067
+
1068
+ # ----------------------------------------------------------------------
1069
+ # Index of max / min
1070
+
1071
+ @pytest.mark.parametrize("skipna", [True, False])
1072
+ @pytest.mark.parametrize("axis", [0, 1])
1073
+ def test_idxmin(self, float_frame, int_frame, skipna, axis):
1074
+ frame = float_frame
1075
+ frame.iloc[5:10] = np.nan
1076
+ frame.iloc[15:20, -2:] = np.nan
1077
+ for df in [frame, int_frame]:
1078
+ warn = None
1079
+ if skipna is False or axis == 1:
1080
+ warn = None if df is int_frame else FutureWarning
1081
+ msg = "The behavior of DataFrame.idxmin with all-NA values"
1082
+ with tm.assert_produces_warning(warn, match=msg):
1083
+ result = df.idxmin(axis=axis, skipna=skipna)
1084
+
1085
+ msg2 = "The behavior of Series.idxmin"
1086
+ with tm.assert_produces_warning(warn, match=msg2):
1087
+ expected = df.apply(Series.idxmin, axis=axis, skipna=skipna)
1088
+ expected = expected.astype(df.index.dtype)
1089
+ tm.assert_series_equal(result, expected)
1090
+
1091
+ @pytest.mark.parametrize("axis", [0, 1])
1092
+ @pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
1093
+ def test_idxmin_empty(self, index, skipna, axis):
1094
+ # GH53265
1095
+ if axis == 0:
1096
+ frame = DataFrame(index=index)
1097
+ else:
1098
+ frame = DataFrame(columns=index)
1099
+
1100
+ result = frame.idxmin(axis=axis, skipna=skipna)
1101
+ expected = Series(dtype=index.dtype)
1102
+ tm.assert_series_equal(result, expected)
1103
+
1104
+ @pytest.mark.parametrize("numeric_only", [True, False])
1105
+ def test_idxmin_numeric_only(self, numeric_only):
1106
+ df = DataFrame({"a": [2, 3, 1], "b": [2, 1, 1], "c": list("xyx")})
1107
+ result = df.idxmin(numeric_only=numeric_only)
1108
+ if numeric_only:
1109
+ expected = Series([2, 1], index=["a", "b"])
1110
+ else:
1111
+ expected = Series([2, 1, 0], index=["a", "b", "c"])
1112
+ tm.assert_series_equal(result, expected)
1113
+
1114
+ def test_idxmin_axis_2(self, float_frame):
1115
+ frame = float_frame
1116
+ msg = "No axis named 2 for object type DataFrame"
1117
+ with pytest.raises(ValueError, match=msg):
1118
+ frame.idxmin(axis=2)
1119
+
1120
+ @pytest.mark.parametrize("skipna", [True, False])
1121
+ @pytest.mark.parametrize("axis", [0, 1])
1122
+ def test_idxmax(self, float_frame, int_frame, skipna, axis):
1123
+ frame = float_frame
1124
+ frame.iloc[5:10] = np.nan
1125
+ frame.iloc[15:20, -2:] = np.nan
1126
+ for df in [frame, int_frame]:
1127
+ warn = None
1128
+ if skipna is False or axis == 1:
1129
+ warn = None if df is int_frame else FutureWarning
1130
+ msg = "The behavior of DataFrame.idxmax with all-NA values"
1131
+ with tm.assert_produces_warning(warn, match=msg):
1132
+ result = df.idxmax(axis=axis, skipna=skipna)
1133
+
1134
+ msg2 = "The behavior of Series.idxmax"
1135
+ with tm.assert_produces_warning(warn, match=msg2):
1136
+ expected = df.apply(Series.idxmax, axis=axis, skipna=skipna)
1137
+ expected = expected.astype(df.index.dtype)
1138
+ tm.assert_series_equal(result, expected)
1139
+
1140
+ @pytest.mark.parametrize("axis", [0, 1])
1141
+ @pytest.mark.filterwarnings(r"ignore:PeriodDtype\[B\] is deprecated:FutureWarning")
1142
+ def test_idxmax_empty(self, index, skipna, axis):
1143
+ # GH53265
1144
+ if axis == 0:
1145
+ frame = DataFrame(index=index)
1146
+ else:
1147
+ frame = DataFrame(columns=index)
1148
+
1149
+ result = frame.idxmax(axis=axis, skipna=skipna)
1150
+ expected = Series(dtype=index.dtype)
1151
+ tm.assert_series_equal(result, expected)
1152
+
1153
+ @pytest.mark.parametrize("numeric_only", [True, False])
1154
+ def test_idxmax_numeric_only(self, numeric_only):
1155
+ df = DataFrame({"a": [2, 3, 1], "b": [2, 1, 1], "c": list("xyx")})
1156
+ result = df.idxmax(numeric_only=numeric_only)
1157
+ if numeric_only:
1158
+ expected = Series([1, 0], index=["a", "b"])
1159
+ else:
1160
+ expected = Series([1, 0, 1], index=["a", "b", "c"])
1161
+ tm.assert_series_equal(result, expected)
1162
+
1163
+ def test_idxmax_arrow_types(self):
1164
+ # GH#55368
1165
+ pytest.importorskip("pyarrow")
1166
+
1167
+ df = DataFrame({"a": [2, 3, 1], "b": [2, 1, 1]}, dtype="int64[pyarrow]")
1168
+ result = df.idxmax()
1169
+ expected = Series([1, 0], index=["a", "b"])
1170
+ tm.assert_series_equal(result, expected)
1171
+
1172
+ result = df.idxmin()
1173
+ expected = Series([2, 1], index=["a", "b"])
1174
+ tm.assert_series_equal(result, expected)
1175
+
1176
+ df = DataFrame({"a": ["b", "c", "a"]}, dtype="string[pyarrow]")
1177
+ result = df.idxmax(numeric_only=False)
1178
+ expected = Series([1], index=["a"])
1179
+ tm.assert_series_equal(result, expected)
1180
+
1181
+ result = df.idxmin(numeric_only=False)
1182
+ expected = Series([2], index=["a"])
1183
+ tm.assert_series_equal(result, expected)
1184
+
1185
+ def test_idxmax_axis_2(self, float_frame):
1186
+ frame = float_frame
1187
+ msg = "No axis named 2 for object type DataFrame"
1188
+ with pytest.raises(ValueError, match=msg):
1189
+ frame.idxmax(axis=2)
1190
+
1191
+ def test_idxmax_mixed_dtype(self):
1192
+ # don't cast to object, which would raise in nanops
1193
+ dti = date_range("2016-01-01", periods=3)
1194
+
1195
+ # Copying dti is needed for ArrayManager otherwise when we set
1196
+ # df.loc[0, 3] = pd.NaT below it edits dti
1197
+ df = DataFrame({1: [0, 2, 1], 2: range(3)[::-1], 3: dti.copy(deep=True)})
1198
+
1199
+ result = df.idxmax()
1200
+ expected = Series([1, 0, 2], index=[1, 2, 3])
1201
+ tm.assert_series_equal(result, expected)
1202
+
1203
+ result = df.idxmin()
1204
+ expected = Series([0, 2, 0], index=[1, 2, 3])
1205
+ tm.assert_series_equal(result, expected)
1206
+
1207
+ # with NaTs
1208
+ df.loc[0, 3] = pd.NaT
1209
+ result = df.idxmax()
1210
+ expected = Series([1, 0, 2], index=[1, 2, 3])
1211
+ tm.assert_series_equal(result, expected)
1212
+
1213
+ result = df.idxmin()
1214
+ expected = Series([0, 2, 1], index=[1, 2, 3])
1215
+ tm.assert_series_equal(result, expected)
1216
+
1217
+ # with multi-column dt64 block
1218
+ df[4] = dti[::-1]
1219
+ df._consolidate_inplace()
1220
+
1221
+ result = df.idxmax()
1222
+ expected = Series([1, 0, 2, 0], index=[1, 2, 3, 4])
1223
+ tm.assert_series_equal(result, expected)
1224
+
1225
+ result = df.idxmin()
1226
+ expected = Series([0, 2, 1, 2], index=[1, 2, 3, 4])
1227
+ tm.assert_series_equal(result, expected)
1228
+
1229
+ @pytest.mark.parametrize(
1230
+ "op, expected_value",
1231
+ [("idxmax", [0, 4]), ("idxmin", [0, 5])],
1232
+ )
1233
+ def test_idxmax_idxmin_convert_dtypes(self, op, expected_value):
1234
+ # GH 40346
1235
+ df = DataFrame(
1236
+ {
1237
+ "ID": [100, 100, 100, 200, 200, 200],
1238
+ "value": [0, 0, 0, 1, 2, 0],
1239
+ },
1240
+ dtype="Int64",
1241
+ )
1242
+ df = df.groupby("ID")
1243
+
1244
+ result = getattr(df, op)()
1245
+ expected = DataFrame(
1246
+ {"value": expected_value},
1247
+ index=Index([100, 200], name="ID", dtype="Int64"),
1248
+ )
1249
+ tm.assert_frame_equal(result, expected)
1250
+
1251
+ def test_idxmax_dt64_multicolumn_axis1(self):
1252
+ dti = date_range("2016-01-01", periods=3)
1253
+ df = DataFrame({3: dti, 4: dti[::-1]}, copy=True)
1254
+ df.iloc[0, 0] = pd.NaT
1255
+
1256
+ df._consolidate_inplace()
1257
+
1258
+ result = df.idxmax(axis=1)
1259
+ expected = Series([4, 3, 3])
1260
+ tm.assert_series_equal(result, expected)
1261
+
1262
+ result = df.idxmin(axis=1)
1263
+ expected = Series([4, 3, 4])
1264
+ tm.assert_series_equal(result, expected)
1265
+
1266
+ # ----------------------------------------------------------------------
1267
+ # Logical reductions
1268
+
1269
+ @pytest.mark.parametrize("opname", ["any", "all"])
1270
+ @pytest.mark.parametrize("axis", [0, 1])
1271
+ @pytest.mark.parametrize("bool_only", [False, True])
1272
+ def test_any_all_mixed_float(self, opname, axis, bool_only, float_string_frame):
1273
+ # make sure op works on mixed-type frame
1274
+ mixed = float_string_frame
1275
+ mixed["_bool_"] = np.random.default_rng(2).standard_normal(len(mixed)) > 0.5
1276
+
1277
+ getattr(mixed, opname)(axis=axis, bool_only=bool_only)
1278
+
1279
+ @pytest.mark.parametrize("opname", ["any", "all"])
1280
+ @pytest.mark.parametrize("axis", [0, 1])
1281
+ def test_any_all_bool_with_na(self, opname, axis, bool_frame_with_na):
1282
+ getattr(bool_frame_with_na, opname)(axis=axis, bool_only=False)
1283
+
1284
+ @pytest.mark.filterwarnings("ignore:Downcasting object dtype arrays:FutureWarning")
1285
+ @pytest.mark.parametrize("opname", ["any", "all"])
1286
+ def test_any_all_bool_frame(self, opname, bool_frame_with_na):
1287
+ # GH#12863: numpy gives back non-boolean data for object type
1288
+ # so fill NaNs to compare with pandas behavior
1289
+ frame = bool_frame_with_na.fillna(True)
1290
+ alternative = getattr(np, opname)
1291
+ f = getattr(frame, opname)
1292
+
1293
+ def skipna_wrapper(x):
1294
+ nona = x.dropna().values
1295
+ return alternative(nona)
1296
+
1297
+ def wrapper(x):
1298
+ return alternative(x.values)
1299
+
1300
+ result0 = f(axis=0, skipna=False)
1301
+ result1 = f(axis=1, skipna=False)
1302
+
1303
+ tm.assert_series_equal(result0, frame.apply(wrapper))
1304
+ tm.assert_series_equal(result1, frame.apply(wrapper, axis=1))
1305
+
1306
+ result0 = f(axis=0)
1307
+ result1 = f(axis=1)
1308
+
1309
+ tm.assert_series_equal(result0, frame.apply(skipna_wrapper))
1310
+ tm.assert_series_equal(
1311
+ result1, frame.apply(skipna_wrapper, axis=1), check_dtype=False
1312
+ )
1313
+
1314
+ # bad axis
1315
+ with pytest.raises(ValueError, match="No axis named 2"):
1316
+ f(axis=2)
1317
+
1318
+ # all NA case
1319
+ all_na = frame * np.nan
1320
+ r0 = getattr(all_na, opname)(axis=0)
1321
+ r1 = getattr(all_na, opname)(axis=1)
1322
+ if opname == "any":
1323
+ assert not r0.any()
1324
+ assert not r1.any()
1325
+ else:
1326
+ assert r0.all()
1327
+ assert r1.all()
1328
+
1329
+ def test_any_all_extra(self):
1330
+ df = DataFrame(
1331
+ {
1332
+ "A": [True, False, False],
1333
+ "B": [True, True, False],
1334
+ "C": [True, True, True],
1335
+ },
1336
+ index=["a", "b", "c"],
1337
+ )
1338
+ result = df[["A", "B"]].any(axis=1)
1339
+ expected = Series([True, True, False], index=["a", "b", "c"])
1340
+ tm.assert_series_equal(result, expected)
1341
+
1342
+ result = df[["A", "B"]].any(axis=1, bool_only=True)
1343
+ tm.assert_series_equal(result, expected)
1344
+
1345
+ result = df.all(1)
1346
+ expected = Series([True, False, False], index=["a", "b", "c"])
1347
+ tm.assert_series_equal(result, expected)
1348
+
1349
+ result = df.all(1, bool_only=True)
1350
+ tm.assert_series_equal(result, expected)
1351
+
1352
+ # Axis is None
1353
+ result = df.all(axis=None).item()
1354
+ assert result is False
1355
+
1356
+ result = df.any(axis=None).item()
1357
+ assert result is True
1358
+
1359
+ result = df[["C"]].all(axis=None).item()
1360
+ assert result is True
1361
+
1362
+ @pytest.mark.parametrize("axis", [0, 1])
1363
+ @pytest.mark.parametrize("bool_agg_func", ["any", "all"])
1364
+ @pytest.mark.parametrize("skipna", [True, False])
1365
+ def test_any_all_object_dtype(
1366
+ self, axis, bool_agg_func, skipna, using_infer_string
1367
+ ):
1368
+ # GH#35450
1369
+ df = DataFrame(
1370
+ data=[
1371
+ [1, np.nan, np.nan, True],
1372
+ [np.nan, 2, np.nan, True],
1373
+ [np.nan, np.nan, np.nan, True],
1374
+ [np.nan, np.nan, "5", np.nan],
1375
+ ]
1376
+ )
1377
+ if using_infer_string:
1378
+ # na in object is True while in string pyarrow numpy it's false
1379
+ val = not axis == 0 and not skipna and bool_agg_func == "all"
1380
+ else:
1381
+ val = True
1382
+ result = getattr(df, bool_agg_func)(axis=axis, skipna=skipna)
1383
+ expected = Series([True, True, val, True])
1384
+ tm.assert_series_equal(result, expected)
1385
+
1386
+ # GH#50947 deprecates this but it is not emitting a warning in some builds.
1387
+ @pytest.mark.filterwarnings(
1388
+ "ignore:'any' with datetime64 dtypes is deprecated.*:FutureWarning"
1389
+ )
1390
+ def test_any_datetime(self):
1391
+ # GH 23070
1392
+ float_data = [1, np.nan, 3, np.nan]
1393
+ datetime_data = [
1394
+ Timestamp("1960-02-15"),
1395
+ Timestamp("1960-02-16"),
1396
+ pd.NaT,
1397
+ pd.NaT,
1398
+ ]
1399
+ df = DataFrame({"A": float_data, "B": datetime_data})
1400
+
1401
+ result = df.any(axis=1)
1402
+
1403
+ expected = Series([True, True, True, False])
1404
+ tm.assert_series_equal(result, expected)
1405
+
1406
+ def test_any_all_bool_only(self):
1407
+ # GH 25101
1408
+ df = DataFrame(
1409
+ {"col1": [1, 2, 3], "col2": [4, 5, 6], "col3": [None, None, None]},
1410
+ columns=Index(["col1", "col2", "col3"], dtype=object),
1411
+ )
1412
+
1413
+ result = df.all(bool_only=True)
1414
+ expected = Series(dtype=np.bool_, index=[])
1415
+ tm.assert_series_equal(result, expected)
1416
+
1417
+ df = DataFrame(
1418
+ {
1419
+ "col1": [1, 2, 3],
1420
+ "col2": [4, 5, 6],
1421
+ "col3": [None, None, None],
1422
+ "col4": [False, False, True],
1423
+ }
1424
+ )
1425
+
1426
+ result = df.all(bool_only=True)
1427
+ expected = Series({"col4": False})
1428
+ tm.assert_series_equal(result, expected)
1429
+
1430
+ @pytest.mark.parametrize(
1431
+ "func, data, expected",
1432
+ [
1433
+ (np.any, {}, False),
1434
+ (np.all, {}, True),
1435
+ (np.any, {"A": []}, False),
1436
+ (np.all, {"A": []}, True),
1437
+ (np.any, {"A": [False, False]}, False),
1438
+ (np.all, {"A": [False, False]}, False),
1439
+ (np.any, {"A": [True, False]}, True),
1440
+ (np.all, {"A": [True, False]}, False),
1441
+ (np.any, {"A": [True, True]}, True),
1442
+ (np.all, {"A": [True, True]}, True),
1443
+ (np.any, {"A": [False], "B": [False]}, False),
1444
+ (np.all, {"A": [False], "B": [False]}, False),
1445
+ (np.any, {"A": [False, False], "B": [False, True]}, True),
1446
+ (np.all, {"A": [False, False], "B": [False, True]}, False),
1447
+ # other types
1448
+ (np.all, {"A": Series([0.0, 1.0], dtype="float")}, False),
1449
+ (np.any, {"A": Series([0.0, 1.0], dtype="float")}, True),
1450
+ (np.all, {"A": Series([0, 1], dtype=int)}, False),
1451
+ (np.any, {"A": Series([0, 1], dtype=int)}, True),
1452
+ pytest.param(np.all, {"A": Series([0, 1], dtype="M8[ns]")}, False),
1453
+ pytest.param(np.all, {"A": Series([0, 1], dtype="M8[ns, UTC]")}, False),
1454
+ pytest.param(np.any, {"A": Series([0, 1], dtype="M8[ns]")}, True),
1455
+ pytest.param(np.any, {"A": Series([0, 1], dtype="M8[ns, UTC]")}, True),
1456
+ pytest.param(np.all, {"A": Series([1, 2], dtype="M8[ns]")}, True),
1457
+ pytest.param(np.all, {"A": Series([1, 2], dtype="M8[ns, UTC]")}, True),
1458
+ pytest.param(np.any, {"A": Series([1, 2], dtype="M8[ns]")}, True),
1459
+ pytest.param(np.any, {"A": Series([1, 2], dtype="M8[ns, UTC]")}, True),
1460
+ pytest.param(np.all, {"A": Series([0, 1], dtype="m8[ns]")}, False),
1461
+ pytest.param(np.any, {"A": Series([0, 1], dtype="m8[ns]")}, True),
1462
+ pytest.param(np.all, {"A": Series([1, 2], dtype="m8[ns]")}, True),
1463
+ pytest.param(np.any, {"A": Series([1, 2], dtype="m8[ns]")}, True),
1464
+ # np.all on Categorical raises, so the reduction drops the
1465
+ # column, so all is being done on an empty Series, so is True
1466
+ (np.all, {"A": Series([0, 1], dtype="category")}, True),
1467
+ (np.any, {"A": Series([0, 1], dtype="category")}, False),
1468
+ (np.all, {"A": Series([1, 2], dtype="category")}, True),
1469
+ (np.any, {"A": Series([1, 2], dtype="category")}, False),
1470
+ # Mix GH#21484
1471
+ pytest.param(
1472
+ np.all,
1473
+ {
1474
+ "A": Series([10, 20], dtype="M8[ns]"),
1475
+ "B": Series([10, 20], dtype="m8[ns]"),
1476
+ },
1477
+ True,
1478
+ ),
1479
+ ],
1480
+ )
1481
+ def test_any_all_np_func(self, func, data, expected):
1482
+ # GH 19976
1483
+ data = DataFrame(data)
1484
+
1485
+ if any(isinstance(x, CategoricalDtype) for x in data.dtypes):
1486
+ with pytest.raises(
1487
+ TypeError, match="dtype category does not support reduction"
1488
+ ):
1489
+ func(data)
1490
+
1491
+ # method version
1492
+ with pytest.raises(
1493
+ TypeError, match="dtype category does not support reduction"
1494
+ ):
1495
+ getattr(DataFrame(data), func.__name__)(axis=None)
1496
+ else:
1497
+ msg = "'(any|all)' with datetime64 dtypes is deprecated"
1498
+ if data.dtypes.apply(lambda x: x.kind == "M").any():
1499
+ warn = FutureWarning
1500
+ else:
1501
+ warn = None
1502
+
1503
+ with tm.assert_produces_warning(warn, match=msg, check_stacklevel=False):
1504
+ # GH#34479
1505
+ result = func(data)
1506
+ assert isinstance(result, np.bool_)
1507
+ assert result.item() is expected
1508
+
1509
+ # method version
1510
+ with tm.assert_produces_warning(warn, match=msg):
1511
+ # GH#34479
1512
+ result = getattr(DataFrame(data), func.__name__)(axis=None)
1513
+ assert isinstance(result, np.bool_)
1514
+ assert result.item() is expected
1515
+
1516
+ def test_any_all_object(self):
1517
+ # GH 19976
1518
+ result = np.all(DataFrame(columns=["a", "b"])).item()
1519
+ assert result is True
1520
+
1521
+ result = np.any(DataFrame(columns=["a", "b"])).item()
1522
+ assert result is False
1523
+
1524
+ def test_any_all_object_bool_only(self):
1525
+ df = DataFrame({"A": ["foo", 2], "B": [True, False]}).astype(object)
1526
+ df._consolidate_inplace()
1527
+ df["C"] = Series([True, True])
1528
+
1529
+ # Categorical of bools is _not_ considered booly
1530
+ df["D"] = df["C"].astype("category")
1531
+
1532
+ # The underlying bug is in DataFrame._get_bool_data, so we check
1533
+ # that while we're here
1534
+ res = df._get_bool_data()
1535
+ expected = df[["C"]]
1536
+ tm.assert_frame_equal(res, expected)
1537
+
1538
+ res = df.all(bool_only=True, axis=0)
1539
+ expected = Series([True], index=["C"])
1540
+ tm.assert_series_equal(res, expected)
1541
+
1542
+ # operating on a subset of columns should not produce a _larger_ Series
1543
+ res = df[["B", "C"]].all(bool_only=True, axis=0)
1544
+ tm.assert_series_equal(res, expected)
1545
+
1546
+ assert df.all(bool_only=True, axis=None)
1547
+
1548
+ res = df.any(bool_only=True, axis=0)
1549
+ expected = Series([True], index=["C"])
1550
+ tm.assert_series_equal(res, expected)
1551
+
1552
+ # operating on a subset of columns should not produce a _larger_ Series
1553
+ res = df[["C"]].any(bool_only=True, axis=0)
1554
+ tm.assert_series_equal(res, expected)
1555
+
1556
+ assert df.any(bool_only=True, axis=None)
1557
+
1558
+ # ---------------------------------------------------------------------
1559
+ # Unsorted
1560
+
1561
+ def test_series_broadcasting(self):
1562
+ # smoke test for numpy warnings
1563
+ # GH 16378, GH 16306
1564
+ df = DataFrame([1.0, 1.0, 1.0])
1565
+ df_nan = DataFrame({"A": [np.nan, 2.0, np.nan]})
1566
+ s = Series([1, 1, 1])
1567
+ s_nan = Series([np.nan, np.nan, 1])
1568
+
1569
+ with tm.assert_produces_warning(None):
1570
+ df_nan.clip(lower=s, axis=0)
1571
+ for op in ["lt", "le", "gt", "ge", "eq", "ne"]:
1572
+ getattr(df, op)(s_nan, axis=0)
1573
+
1574
+
1575
+ class TestDataFrameReductions:
1576
+ def test_min_max_dt64_with_NaT(self):
1577
+ # Both NaT and Timestamp are in DataFrame.
1578
+ df = DataFrame({"foo": [pd.NaT, pd.NaT, Timestamp("2012-05-01")]})
1579
+
1580
+ res = df.min()
1581
+ exp = Series([Timestamp("2012-05-01")], index=["foo"])
1582
+ tm.assert_series_equal(res, exp)
1583
+
1584
+ res = df.max()
1585
+ exp = Series([Timestamp("2012-05-01")], index=["foo"])
1586
+ tm.assert_series_equal(res, exp)
1587
+
1588
+ # GH12941, only NaTs are in DataFrame.
1589
+ df = DataFrame({"foo": [pd.NaT, pd.NaT]})
1590
+
1591
+ res = df.min()
1592
+ exp = Series([pd.NaT], index=["foo"])
1593
+ tm.assert_series_equal(res, exp)
1594
+
1595
+ res = df.max()
1596
+ exp = Series([pd.NaT], index=["foo"])
1597
+ tm.assert_series_equal(res, exp)
1598
+
1599
+ def test_min_max_dt64_with_NaT_skipna_false(self, request, tz_naive_fixture):
1600
+ # GH#36907
1601
+ tz = tz_naive_fixture
1602
+ if isinstance(tz, tzlocal) and is_platform_windows():
1603
+ pytest.skip(
1604
+ "GH#37659 OSError raised within tzlocal bc Windows "
1605
+ "chokes in times before 1970-01-01"
1606
+ )
1607
+
1608
+ df = DataFrame(
1609
+ {
1610
+ "a": [
1611
+ Timestamp("2020-01-01 08:00:00", tz=tz),
1612
+ Timestamp("1920-02-01 09:00:00", tz=tz),
1613
+ ],
1614
+ "b": [Timestamp("2020-02-01 08:00:00", tz=tz), pd.NaT],
1615
+ }
1616
+ )
1617
+ res = df.min(axis=1, skipna=False)
1618
+ expected = Series([df.loc[0, "a"], pd.NaT])
1619
+ assert expected.dtype == df["a"].dtype
1620
+
1621
+ tm.assert_series_equal(res, expected)
1622
+
1623
+ res = df.max(axis=1, skipna=False)
1624
+ expected = Series([df.loc[0, "b"], pd.NaT])
1625
+ assert expected.dtype == df["a"].dtype
1626
+
1627
+ tm.assert_series_equal(res, expected)
1628
+
1629
+ def test_min_max_dt64_api_consistency_with_NaT(self):
1630
+ # Calling the following sum functions returned an error for dataframes but
1631
+ # returned NaT for series. These tests check that the API is consistent in
1632
+ # min/max calls on empty Series/DataFrames. See GH:33704 for more
1633
+ # information
1634
+ df = DataFrame({"x": to_datetime([])})
1635
+ expected_dt_series = Series(to_datetime([]))
1636
+ # check axis 0
1637
+ assert (df.min(axis=0).x is pd.NaT) == (expected_dt_series.min() is pd.NaT)
1638
+ assert (df.max(axis=0).x is pd.NaT) == (expected_dt_series.max() is pd.NaT)
1639
+
1640
+ # check axis 1
1641
+ tm.assert_series_equal(df.min(axis=1), expected_dt_series)
1642
+ tm.assert_series_equal(df.max(axis=1), expected_dt_series)
1643
+
1644
+ def test_min_max_dt64_api_consistency_empty_df(self):
1645
+ # check DataFrame/Series api consistency when calling min/max on an empty
1646
+ # DataFrame/Series.
1647
+ df = DataFrame({"x": []})
1648
+ expected_float_series = Series([], dtype=float)
1649
+ # check axis 0
1650
+ assert np.isnan(df.min(axis=0).x) == np.isnan(expected_float_series.min())
1651
+ assert np.isnan(df.max(axis=0).x) == np.isnan(expected_float_series.max())
1652
+ # check axis 1
1653
+ tm.assert_series_equal(df.min(axis=1), expected_float_series)
1654
+ tm.assert_series_equal(df.min(axis=1), expected_float_series)
1655
+
1656
+ @pytest.mark.parametrize(
1657
+ "initial",
1658
+ ["2018-10-08 13:36:45+00:00", "2018-10-08 13:36:45+03:00"], # Non-UTC timezone
1659
+ )
1660
+ @pytest.mark.parametrize("method", ["min", "max"])
1661
+ def test_preserve_timezone(self, initial: str, method):
1662
+ # GH 28552
1663
+ initial_dt = to_datetime(initial)
1664
+ expected = Series([initial_dt])
1665
+ df = DataFrame([expected])
1666
+ result = getattr(df, method)(axis=1)
1667
+ tm.assert_series_equal(result, expected)
1668
+
1669
+ @pytest.mark.parametrize("method", ["min", "max"])
1670
+ def test_minmax_tzaware_skipna_axis_1(self, method, skipna):
1671
+ # GH#51242
1672
+ val = to_datetime("1900-01-01", utc=True)
1673
+ df = DataFrame(
1674
+ {"a": Series([pd.NaT, pd.NaT, val]), "b": Series([pd.NaT, val, val])}
1675
+ )
1676
+ op = getattr(df, method)
1677
+ result = op(axis=1, skipna=skipna)
1678
+ if skipna:
1679
+ expected = Series([pd.NaT, val, val])
1680
+ else:
1681
+ expected = Series([pd.NaT, pd.NaT, val])
1682
+ tm.assert_series_equal(result, expected)
1683
+
1684
+ def test_frame_any_with_timedelta(self):
1685
+ # GH#17667
1686
+ df = DataFrame(
1687
+ {
1688
+ "a": Series([0, 0]),
1689
+ "t": Series([to_timedelta(0, "s"), to_timedelta(1, "ms")]),
1690
+ }
1691
+ )
1692
+
1693
+ result = df.any(axis=0)
1694
+ expected = Series(data=[False, True], index=["a", "t"])
1695
+ tm.assert_series_equal(result, expected)
1696
+
1697
+ result = df.any(axis=1)
1698
+ expected = Series(data=[False, True])
1699
+ tm.assert_series_equal(result, expected)
1700
+
1701
+ def test_reductions_skipna_none_raises(
1702
+ self, request, frame_or_series, all_reductions
1703
+ ):
1704
+ if all_reductions == "count":
1705
+ request.applymarker(
1706
+ pytest.mark.xfail(reason="Count does not accept skipna")
1707
+ )
1708
+ obj = frame_or_series([1, 2, 3])
1709
+ msg = 'For argument "skipna" expected type bool, received type NoneType.'
1710
+ with pytest.raises(ValueError, match=msg):
1711
+ getattr(obj, all_reductions)(skipna=None)
1712
+
1713
+ @td.skip_array_manager_invalid_test
1714
+ def test_reduction_timestamp_smallest_unit(self):
1715
+ # GH#52524
1716
+ df = DataFrame(
1717
+ {
1718
+ "a": Series([Timestamp("2019-12-31")], dtype="datetime64[s]"),
1719
+ "b": Series(
1720
+ [Timestamp("2019-12-31 00:00:00.123")], dtype="datetime64[ms]"
1721
+ ),
1722
+ }
1723
+ )
1724
+ result = df.max()
1725
+ expected = Series(
1726
+ [Timestamp("2019-12-31"), Timestamp("2019-12-31 00:00:00.123")],
1727
+ dtype="datetime64[ms]",
1728
+ index=["a", "b"],
1729
+ )
1730
+ tm.assert_series_equal(result, expected)
1731
+
1732
+ @td.skip_array_manager_not_yet_implemented
1733
+ def test_reduction_timedelta_smallest_unit(self):
1734
+ # GH#52524
1735
+ df = DataFrame(
1736
+ {
1737
+ "a": Series([pd.Timedelta("1 days")], dtype="timedelta64[s]"),
1738
+ "b": Series([pd.Timedelta("1 days")], dtype="timedelta64[ms]"),
1739
+ }
1740
+ )
1741
+ result = df.max()
1742
+ expected = Series(
1743
+ [pd.Timedelta("1 days"), pd.Timedelta("1 days")],
1744
+ dtype="timedelta64[ms]",
1745
+ index=["a", "b"],
1746
+ )
1747
+ tm.assert_series_equal(result, expected)
1748
+
1749
+
1750
+ class TestNuisanceColumns:
1751
+ @pytest.mark.parametrize("method", ["any", "all"])
1752
+ def test_any_all_categorical_dtype_nuisance_column(self, method):
1753
+ # GH#36076 DataFrame should match Series behavior
1754
+ ser = Series([0, 1], dtype="category", name="A")
1755
+ df = ser.to_frame()
1756
+
1757
+ # Double-check the Series behavior is to raise
1758
+ with pytest.raises(TypeError, match="does not support reduction"):
1759
+ getattr(ser, method)()
1760
+
1761
+ with pytest.raises(TypeError, match="does not support reduction"):
1762
+ getattr(np, method)(ser)
1763
+
1764
+ with pytest.raises(TypeError, match="does not support reduction"):
1765
+ getattr(df, method)(bool_only=False)
1766
+
1767
+ with pytest.raises(TypeError, match="does not support reduction"):
1768
+ getattr(df, method)(bool_only=None)
1769
+
1770
+ with pytest.raises(TypeError, match="does not support reduction"):
1771
+ getattr(np, method)(df, axis=0)
1772
+
1773
+ def test_median_categorical_dtype_nuisance_column(self):
1774
+ # GH#21020 DataFrame.median should match Series.median
1775
+ df = DataFrame({"A": Categorical([1, 2, 2, 2, 3])})
1776
+ ser = df["A"]
1777
+
1778
+ # Double-check the Series behavior is to raise
1779
+ with pytest.raises(TypeError, match="does not support reduction"):
1780
+ ser.median()
1781
+
1782
+ with pytest.raises(TypeError, match="does not support reduction"):
1783
+ df.median(numeric_only=False)
1784
+
1785
+ with pytest.raises(TypeError, match="does not support reduction"):
1786
+ df.median()
1787
+
1788
+ # same thing, but with an additional non-categorical column
1789
+ df["B"] = df["A"].astype(int)
1790
+
1791
+ with pytest.raises(TypeError, match="does not support reduction"):
1792
+ df.median(numeric_only=False)
1793
+
1794
+ with pytest.raises(TypeError, match="does not support reduction"):
1795
+ df.median()
1796
+
1797
+ # TODO: np.median(df, axis=0) gives np.array([2.0, 2.0]) instead
1798
+ # of expected.values
1799
+
1800
+ @pytest.mark.parametrize("method", ["min", "max"])
1801
+ def test_min_max_categorical_dtype_non_ordered_nuisance_column(self, method):
1802
+ # GH#28949 DataFrame.min should behave like Series.min
1803
+ cat = Categorical(["a", "b", "c", "b"], ordered=False)
1804
+ ser = Series(cat)
1805
+ df = ser.to_frame("A")
1806
+
1807
+ # Double-check the Series behavior
1808
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1809
+ getattr(ser, method)()
1810
+
1811
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1812
+ getattr(np, method)(ser)
1813
+
1814
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1815
+ getattr(df, method)(numeric_only=False)
1816
+
1817
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1818
+ getattr(df, method)()
1819
+
1820
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1821
+ getattr(np, method)(df, axis=0)
1822
+
1823
+ # same thing, but with an additional non-categorical column
1824
+ df["B"] = df["A"].astype(object)
1825
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1826
+ getattr(df, method)()
1827
+
1828
+ with pytest.raises(TypeError, match="is not ordered for operation"):
1829
+ getattr(np, method)(df, axis=0)
1830
+
1831
+
1832
+ class TestEmptyDataFrameReductions:
1833
+ @pytest.mark.parametrize(
1834
+ "opname, dtype, exp_value, exp_dtype",
1835
+ [
1836
+ ("sum", np.int8, 0, np.int64),
1837
+ ("prod", np.int8, 1, np.int_),
1838
+ ("sum", np.int64, 0, np.int64),
1839
+ ("prod", np.int64, 1, np.int64),
1840
+ ("sum", np.uint8, 0, np.uint64),
1841
+ ("prod", np.uint8, 1, np.uint),
1842
+ ("sum", np.uint64, 0, np.uint64),
1843
+ ("prod", np.uint64, 1, np.uint64),
1844
+ ("sum", np.float32, 0, np.float32),
1845
+ ("prod", np.float32, 1, np.float32),
1846
+ ("sum", np.float64, 0, np.float64),
1847
+ ],
1848
+ )
1849
+ def test_df_empty_min_count_0(self, opname, dtype, exp_value, exp_dtype):
1850
+ df = DataFrame({0: [], 1: []}, dtype=dtype)
1851
+ result = getattr(df, opname)(min_count=0)
1852
+
1853
+ expected = Series([exp_value, exp_value], dtype=exp_dtype)
1854
+ tm.assert_series_equal(result, expected)
1855
+
1856
+ @pytest.mark.parametrize(
1857
+ "opname, dtype, exp_dtype",
1858
+ [
1859
+ ("sum", np.int8, np.float64),
1860
+ ("prod", np.int8, np.float64),
1861
+ ("sum", np.int64, np.float64),
1862
+ ("prod", np.int64, np.float64),
1863
+ ("sum", np.uint8, np.float64),
1864
+ ("prod", np.uint8, np.float64),
1865
+ ("sum", np.uint64, np.float64),
1866
+ ("prod", np.uint64, np.float64),
1867
+ ("sum", np.float32, np.float32),
1868
+ ("prod", np.float32, np.float32),
1869
+ ("sum", np.float64, np.float64),
1870
+ ],
1871
+ )
1872
+ def test_df_empty_min_count_1(self, opname, dtype, exp_dtype):
1873
+ df = DataFrame({0: [], 1: []}, dtype=dtype)
1874
+ result = getattr(df, opname)(min_count=1)
1875
+
1876
+ expected = Series([np.nan, np.nan], dtype=exp_dtype)
1877
+ tm.assert_series_equal(result, expected)
1878
+
1879
+ @pytest.mark.parametrize(
1880
+ "opname, dtype, exp_value, exp_dtype",
1881
+ [
1882
+ ("sum", "Int8", 0, ("Int32" if is_windows_np2_or_is32 else "Int64")),
1883
+ ("prod", "Int8", 1, ("Int32" if is_windows_np2_or_is32 else "Int64")),
1884
+ ("prod", "Int8", 1, ("Int32" if is_windows_np2_or_is32 else "Int64")),
1885
+ ("sum", "Int64", 0, "Int64"),
1886
+ ("prod", "Int64", 1, "Int64"),
1887
+ ("sum", "UInt8", 0, ("UInt32" if is_windows_np2_or_is32 else "UInt64")),
1888
+ ("prod", "UInt8", 1, ("UInt32" if is_windows_np2_or_is32 else "UInt64")),
1889
+ ("sum", "UInt64", 0, "UInt64"),
1890
+ ("prod", "UInt64", 1, "UInt64"),
1891
+ ("sum", "Float32", 0, "Float32"),
1892
+ ("prod", "Float32", 1, "Float32"),
1893
+ ("sum", "Float64", 0, "Float64"),
1894
+ ],
1895
+ )
1896
+ def test_df_empty_nullable_min_count_0(self, opname, dtype, exp_value, exp_dtype):
1897
+ df = DataFrame({0: [], 1: []}, dtype=dtype)
1898
+ result = getattr(df, opname)(min_count=0)
1899
+
1900
+ expected = Series([exp_value, exp_value], dtype=exp_dtype)
1901
+ tm.assert_series_equal(result, expected)
1902
+
1903
+ # TODO: why does min_count=1 impact the resulting Windows dtype
1904
+ # differently than min_count=0?
1905
+ @pytest.mark.parametrize(
1906
+ "opname, dtype, exp_dtype",
1907
+ [
1908
+ ("sum", "Int8", ("Int32" if is_windows_or_is32 else "Int64")),
1909
+ ("prod", "Int8", ("Int32" if is_windows_or_is32 else "Int64")),
1910
+ ("sum", "Int64", "Int64"),
1911
+ ("prod", "Int64", "Int64"),
1912
+ ("sum", "UInt8", ("UInt32" if is_windows_or_is32 else "UInt64")),
1913
+ ("prod", "UInt8", ("UInt32" if is_windows_or_is32 else "UInt64")),
1914
+ ("sum", "UInt64", "UInt64"),
1915
+ ("prod", "UInt64", "UInt64"),
1916
+ ("sum", "Float32", "Float32"),
1917
+ ("prod", "Float32", "Float32"),
1918
+ ("sum", "Float64", "Float64"),
1919
+ ],
1920
+ )
1921
+ def test_df_empty_nullable_min_count_1(self, opname, dtype, exp_dtype):
1922
+ df = DataFrame({0: [], 1: []}, dtype=dtype)
1923
+ result = getattr(df, opname)(min_count=1)
1924
+
1925
+ expected = Series([pd.NA, pd.NA], dtype=exp_dtype)
1926
+ tm.assert_series_equal(result, expected)
1927
+
1928
+
1929
+ def test_sum_timedelta64_skipna_false(using_array_manager, request):
1930
+ # GH#17235
1931
+ if using_array_manager:
1932
+ mark = pytest.mark.xfail(
1933
+ reason="Incorrect type inference on NaT in reduction result"
1934
+ )
1935
+ request.applymarker(mark)
1936
+
1937
+ arr = np.arange(8).astype(np.int64).view("m8[s]").reshape(4, 2)
1938
+ arr[-1, -1] = "Nat"
1939
+
1940
+ df = DataFrame(arr)
1941
+ assert (df.dtypes == arr.dtype).all()
1942
+
1943
+ result = df.sum(skipna=False)
1944
+ expected = Series([pd.Timedelta(seconds=12), pd.NaT], dtype="m8[s]")
1945
+ tm.assert_series_equal(result, expected)
1946
+
1947
+ result = df.sum(axis=0, skipna=False)
1948
+ tm.assert_series_equal(result, expected)
1949
+
1950
+ result = df.sum(axis=1, skipna=False)
1951
+ expected = Series(
1952
+ [
1953
+ pd.Timedelta(seconds=1),
1954
+ pd.Timedelta(seconds=5),
1955
+ pd.Timedelta(seconds=9),
1956
+ pd.NaT,
1957
+ ],
1958
+ dtype="m8[s]",
1959
+ )
1960
+ tm.assert_series_equal(result, expected)
1961
+
1962
+
1963
+ @pytest.mark.xfail(
1964
+ using_pyarrow_string_dtype(), reason="sum doesn't work with arrow strings"
1965
+ )
1966
+ def test_mixed_frame_with_integer_sum():
1967
+ # https://github.com/pandas-dev/pandas/issues/34520
1968
+ df = DataFrame([["a", 1]], columns=list("ab"))
1969
+ df = df.astype({"b": "Int64"})
1970
+ result = df.sum()
1971
+ expected = Series(["a", 1], index=["a", "b"])
1972
+ tm.assert_series_equal(result, expected)
1973
+
1974
+
1975
+ @pytest.mark.parametrize("numeric_only", [True, False, None])
1976
+ @pytest.mark.parametrize("method", ["min", "max"])
1977
+ def test_minmax_extensionarray(method, numeric_only):
1978
+ # https://github.com/pandas-dev/pandas/issues/32651
1979
+ int64_info = np.iinfo("int64")
1980
+ ser = Series([int64_info.max, None, int64_info.min], dtype=pd.Int64Dtype())
1981
+ df = DataFrame({"Int64": ser})
1982
+ result = getattr(df, method)(numeric_only=numeric_only)
1983
+ expected = Series(
1984
+ [getattr(int64_info, method)],
1985
+ dtype="Int64",
1986
+ index=Index(["Int64"]),
1987
+ )
1988
+ tm.assert_series_equal(result, expected)
1989
+
1990
+
1991
+ @pytest.mark.parametrize("ts_value", [Timestamp("2000-01-01"), pd.NaT])
1992
+ def test_frame_mixed_numeric_object_with_timestamp(ts_value):
1993
+ # GH 13912
1994
+ df = DataFrame({"a": [1], "b": [1.1], "c": ["foo"], "d": [ts_value]})
1995
+ with pytest.raises(TypeError, match="does not support reduction"):
1996
+ df.sum()
1997
+
1998
+
1999
+ def test_prod_sum_min_count_mixed_object():
2000
+ # https://github.com/pandas-dev/pandas/issues/41074
2001
+ df = DataFrame([1, "a", True])
2002
+
2003
+ result = df.prod(axis=0, min_count=1, numeric_only=False)
2004
+ expected = Series(["a"], dtype=object)
2005
+ tm.assert_series_equal(result, expected)
2006
+
2007
+ msg = re.escape("unsupported operand type(s) for +: 'int' and 'str'")
2008
+ with pytest.raises(TypeError, match=msg):
2009
+ df.sum(axis=0, min_count=1, numeric_only=False)
2010
+
2011
+
2012
+ @pytest.mark.parametrize("method", ["min", "max", "mean", "median", "skew", "kurt"])
2013
+ @pytest.mark.parametrize("numeric_only", [True, False])
2014
+ @pytest.mark.parametrize("dtype", ["float64", "Float64"])
2015
+ def test_reduction_axis_none_returns_scalar(method, numeric_only, dtype):
2016
+ # GH#21597 As of 2.0, axis=None reduces over all axes.
2017
+
2018
+ df = DataFrame(np.random.default_rng(2).standard_normal((4, 4)), dtype=dtype)
2019
+
2020
+ result = getattr(df, method)(axis=None, numeric_only=numeric_only)
2021
+ np_arr = df.to_numpy(dtype=np.float64)
2022
+ if method in {"skew", "kurt"}:
2023
+ comp_mod = pytest.importorskip("scipy.stats")
2024
+ if method == "kurt":
2025
+ method = "kurtosis"
2026
+ expected = getattr(comp_mod, method)(np_arr, bias=False, axis=None)
2027
+ tm.assert_almost_equal(result, expected)
2028
+ else:
2029
+ expected = getattr(np, method)(np_arr, axis=None)
2030
+ assert result == expected
2031
+
2032
+
2033
+ @pytest.mark.parametrize(
2034
+ "kernel",
2035
+ [
2036
+ "corr",
2037
+ "corrwith",
2038
+ "cov",
2039
+ "idxmax",
2040
+ "idxmin",
2041
+ "kurt",
2042
+ "max",
2043
+ "mean",
2044
+ "median",
2045
+ "min",
2046
+ "prod",
2047
+ "quantile",
2048
+ "sem",
2049
+ "skew",
2050
+ "std",
2051
+ "sum",
2052
+ "var",
2053
+ ],
2054
+ )
2055
+ def test_fails_on_non_numeric(kernel):
2056
+ # GH#46852
2057
+ df = DataFrame({"a": [1, 2, 3], "b": object})
2058
+ args = (df,) if kernel == "corrwith" else ()
2059
+ msg = "|".join(
2060
+ [
2061
+ "not allowed for this dtype",
2062
+ "argument must be a string or a number",
2063
+ "not supported between instances of",
2064
+ "unsupported operand type",
2065
+ "argument must be a string or a real number",
2066
+ ]
2067
+ )
2068
+ if kernel == "median":
2069
+ # slightly different message on different builds
2070
+ msg1 = (
2071
+ r"Cannot convert \[\[<class 'object'> <class 'object'> "
2072
+ r"<class 'object'>\]\] to numeric"
2073
+ )
2074
+ msg2 = (
2075
+ r"Cannot convert \[<class 'object'> <class 'object'> "
2076
+ r"<class 'object'>\] to numeric"
2077
+ )
2078
+ msg = "|".join([msg1, msg2])
2079
+ with pytest.raises(TypeError, match=msg):
2080
+ getattr(df, kernel)(*args)
2081
+
2082
+
2083
+ @pytest.mark.parametrize(
2084
+ "method",
2085
+ [
2086
+ "all",
2087
+ "any",
2088
+ "count",
2089
+ "idxmax",
2090
+ "idxmin",
2091
+ "kurt",
2092
+ "kurtosis",
2093
+ "max",
2094
+ "mean",
2095
+ "median",
2096
+ "min",
2097
+ "nunique",
2098
+ "prod",
2099
+ "product",
2100
+ "sem",
2101
+ "skew",
2102
+ "std",
2103
+ "sum",
2104
+ "var",
2105
+ ],
2106
+ )
2107
+ @pytest.mark.parametrize("min_count", [0, 2])
2108
+ def test_numeric_ea_axis_1(method, skipna, min_count, any_numeric_ea_dtype):
2109
+ # GH 54341
2110
+ df = DataFrame(
2111
+ {
2112
+ "a": Series([0, 1, 2, 3], dtype=any_numeric_ea_dtype),
2113
+ "b": Series([0, 1, pd.NA, 3], dtype=any_numeric_ea_dtype),
2114
+ },
2115
+ )
2116
+ expected_df = DataFrame(
2117
+ {
2118
+ "a": [0.0, 1.0, 2.0, 3.0],
2119
+ "b": [0.0, 1.0, np.nan, 3.0],
2120
+ },
2121
+ )
2122
+ if method in ("count", "nunique"):
2123
+ expected_dtype = "int64"
2124
+ elif method in ("all", "any"):
2125
+ expected_dtype = "boolean"
2126
+ elif method in (
2127
+ "kurt",
2128
+ "kurtosis",
2129
+ "mean",
2130
+ "median",
2131
+ "sem",
2132
+ "skew",
2133
+ "std",
2134
+ "var",
2135
+ ) and not any_numeric_ea_dtype.startswith("Float"):
2136
+ expected_dtype = "Float64"
2137
+ else:
2138
+ expected_dtype = any_numeric_ea_dtype
2139
+
2140
+ kwargs = {}
2141
+ if method not in ("count", "nunique", "quantile"):
2142
+ kwargs["skipna"] = skipna
2143
+ if method in ("prod", "product", "sum"):
2144
+ kwargs["min_count"] = min_count
2145
+
2146
+ warn = None
2147
+ msg = None
2148
+ if not skipna and method in ("idxmax", "idxmin"):
2149
+ warn = FutureWarning
2150
+ msg = f"The behavior of DataFrame.{method} with all-NA values"
2151
+ with tm.assert_produces_warning(warn, match=msg):
2152
+ result = getattr(df, method)(axis=1, **kwargs)
2153
+ with tm.assert_produces_warning(warn, match=msg):
2154
+ expected = getattr(expected_df, method)(axis=1, **kwargs)
2155
+ if method not in ("idxmax", "idxmin"):
2156
+ expected = expected.astype(expected_dtype)
2157
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_stack_unstack.py ADDED
The diff for this file is too large to render. See raw diff
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_ufunc.py ADDED
@@ -0,0 +1,311 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from functools import partial
2
+ import re
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ import pandas as pd
8
+ import pandas._testing as tm
9
+ from pandas.api.types import is_extension_array_dtype
10
+
11
+ dtypes = [
12
+ "int64",
13
+ "Int64",
14
+ {"A": "int64", "B": "Int64"},
15
+ ]
16
+
17
+
18
+ @pytest.mark.parametrize("dtype", dtypes)
19
+ def test_unary_unary(dtype):
20
+ # unary input, unary output
21
+ values = np.array([[-1, -1], [1, 1]], dtype="int64")
22
+ df = pd.DataFrame(values, columns=["A", "B"], index=["a", "b"]).astype(dtype=dtype)
23
+ result = np.positive(df)
24
+ expected = pd.DataFrame(
25
+ np.positive(values), index=df.index, columns=df.columns
26
+ ).astype(dtype)
27
+ tm.assert_frame_equal(result, expected)
28
+
29
+
30
+ @pytest.mark.parametrize("dtype", dtypes)
31
+ def test_unary_binary(request, dtype):
32
+ # unary input, binary output
33
+ if is_extension_array_dtype(dtype) or isinstance(dtype, dict):
34
+ request.applymarker(
35
+ pytest.mark.xfail(
36
+ reason="Extension / mixed with multiple outputs not implemented."
37
+ )
38
+ )
39
+
40
+ values = np.array([[-1, -1], [1, 1]], dtype="int64")
41
+ df = pd.DataFrame(values, columns=["A", "B"], index=["a", "b"]).astype(dtype=dtype)
42
+ result_pandas = np.modf(df)
43
+ assert isinstance(result_pandas, tuple)
44
+ assert len(result_pandas) == 2
45
+ expected_numpy = np.modf(values)
46
+
47
+ for result, b in zip(result_pandas, expected_numpy):
48
+ expected = pd.DataFrame(b, index=df.index, columns=df.columns)
49
+ tm.assert_frame_equal(result, expected)
50
+
51
+
52
+ @pytest.mark.parametrize("dtype", dtypes)
53
+ def test_binary_input_dispatch_binop(dtype):
54
+ # binop ufuncs are dispatched to our dunder methods.
55
+ values = np.array([[-1, -1], [1, 1]], dtype="int64")
56
+ df = pd.DataFrame(values, columns=["A", "B"], index=["a", "b"]).astype(dtype=dtype)
57
+ result = np.add(df, df)
58
+ expected = pd.DataFrame(
59
+ np.add(values, values), index=df.index, columns=df.columns
60
+ ).astype(dtype)
61
+ tm.assert_frame_equal(result, expected)
62
+
63
+
64
+ @pytest.mark.parametrize(
65
+ "func,arg,expected",
66
+ [
67
+ (np.add, 1, [2, 3, 4, 5]),
68
+ (
69
+ partial(np.add, where=[[False, True], [True, False]]),
70
+ np.array([[1, 1], [1, 1]]),
71
+ [0, 3, 4, 0],
72
+ ),
73
+ (np.power, np.array([[1, 1], [2, 2]]), [1, 2, 9, 16]),
74
+ (np.subtract, 2, [-1, 0, 1, 2]),
75
+ (
76
+ partial(np.negative, where=np.array([[False, True], [True, False]])),
77
+ None,
78
+ [0, -2, -3, 0],
79
+ ),
80
+ ],
81
+ )
82
+ def test_ufunc_passes_args(func, arg, expected):
83
+ # GH#40662
84
+ arr = np.array([[1, 2], [3, 4]])
85
+ df = pd.DataFrame(arr)
86
+ result_inplace = np.zeros_like(arr)
87
+ # 1-argument ufunc
88
+ if arg is None:
89
+ result = func(df, out=result_inplace)
90
+ else:
91
+ result = func(df, arg, out=result_inplace)
92
+
93
+ expected = np.array(expected).reshape(2, 2)
94
+ tm.assert_numpy_array_equal(result_inplace, expected)
95
+
96
+ expected = pd.DataFrame(expected)
97
+ tm.assert_frame_equal(result, expected)
98
+
99
+
100
+ @pytest.mark.parametrize("dtype_a", dtypes)
101
+ @pytest.mark.parametrize("dtype_b", dtypes)
102
+ def test_binary_input_aligns_columns(request, dtype_a, dtype_b):
103
+ if (
104
+ is_extension_array_dtype(dtype_a)
105
+ or isinstance(dtype_a, dict)
106
+ or is_extension_array_dtype(dtype_b)
107
+ or isinstance(dtype_b, dict)
108
+ ):
109
+ request.applymarker(
110
+ pytest.mark.xfail(
111
+ reason="Extension / mixed with multiple inputs not implemented."
112
+ )
113
+ )
114
+
115
+ df1 = pd.DataFrame({"A": [1, 2], "B": [3, 4]}).astype(dtype_a)
116
+
117
+ if isinstance(dtype_a, dict) and isinstance(dtype_b, dict):
118
+ dtype_b = dtype_b.copy()
119
+ dtype_b["C"] = dtype_b.pop("B")
120
+ df2 = pd.DataFrame({"A": [1, 2], "C": [3, 4]}).astype(dtype_b)
121
+ # As of 2.0, align first before applying the ufunc
122
+ result = np.heaviside(df1, df2)
123
+ expected = np.heaviside(
124
+ np.array([[1, 3, np.nan], [2, 4, np.nan]]),
125
+ np.array([[1, np.nan, 3], [2, np.nan, 4]]),
126
+ )
127
+ expected = pd.DataFrame(expected, index=[0, 1], columns=["A", "B", "C"])
128
+ tm.assert_frame_equal(result, expected)
129
+
130
+ result = np.heaviside(df1, df2.values)
131
+ expected = pd.DataFrame([[1.0, 1.0], [1.0, 1.0]], columns=["A", "B"])
132
+ tm.assert_frame_equal(result, expected)
133
+
134
+
135
+ @pytest.mark.parametrize("dtype", dtypes)
136
+ def test_binary_input_aligns_index(request, dtype):
137
+ if is_extension_array_dtype(dtype) or isinstance(dtype, dict):
138
+ request.applymarker(
139
+ pytest.mark.xfail(
140
+ reason="Extension / mixed with multiple inputs not implemented."
141
+ )
142
+ )
143
+ df1 = pd.DataFrame({"A": [1, 2], "B": [3, 4]}, index=["a", "b"]).astype(dtype)
144
+ df2 = pd.DataFrame({"A": [1, 2], "B": [3, 4]}, index=["a", "c"]).astype(dtype)
145
+ result = np.heaviside(df1, df2)
146
+ expected = np.heaviside(
147
+ np.array([[1, 3], [3, 4], [np.nan, np.nan]]),
148
+ np.array([[1, 3], [np.nan, np.nan], [3, 4]]),
149
+ )
150
+ # TODO(FloatArray): this will be Float64Dtype.
151
+ expected = pd.DataFrame(expected, index=["a", "b", "c"], columns=["A", "B"])
152
+ tm.assert_frame_equal(result, expected)
153
+
154
+ result = np.heaviside(df1, df2.values)
155
+ expected = pd.DataFrame(
156
+ [[1.0, 1.0], [1.0, 1.0]], columns=["A", "B"], index=["a", "b"]
157
+ )
158
+ tm.assert_frame_equal(result, expected)
159
+
160
+
161
+ def test_binary_frame_series_raises():
162
+ # We don't currently implement
163
+ df = pd.DataFrame({"A": [1, 2]})
164
+ with pytest.raises(NotImplementedError, match="logaddexp"):
165
+ np.logaddexp(df, df["A"])
166
+
167
+ with pytest.raises(NotImplementedError, match="logaddexp"):
168
+ np.logaddexp(df["A"], df)
169
+
170
+
171
+ def test_unary_accumulate_axis():
172
+ # https://github.com/pandas-dev/pandas/issues/39259
173
+ df = pd.DataFrame({"a": [1, 3, 2, 4]})
174
+ result = np.maximum.accumulate(df)
175
+ expected = pd.DataFrame({"a": [1, 3, 3, 4]})
176
+ tm.assert_frame_equal(result, expected)
177
+
178
+ df = pd.DataFrame({"a": [1, 3, 2, 4], "b": [0.1, 4.0, 3.0, 2.0]})
179
+ result = np.maximum.accumulate(df)
180
+ # in theory could preserve int dtype for default axis=0
181
+ expected = pd.DataFrame({"a": [1.0, 3.0, 3.0, 4.0], "b": [0.1, 4.0, 4.0, 4.0]})
182
+ tm.assert_frame_equal(result, expected)
183
+
184
+ result = np.maximum.accumulate(df, axis=0)
185
+ tm.assert_frame_equal(result, expected)
186
+
187
+ result = np.maximum.accumulate(df, axis=1)
188
+ expected = pd.DataFrame({"a": [1.0, 3.0, 2.0, 4.0], "b": [1.0, 4.0, 3.0, 4.0]})
189
+ tm.assert_frame_equal(result, expected)
190
+
191
+
192
+ def test_frame_outer_disallowed():
193
+ df = pd.DataFrame({"A": [1, 2]})
194
+ with pytest.raises(NotImplementedError, match=""):
195
+ # deprecation enforced in 2.0
196
+ np.subtract.outer(df, df)
197
+
198
+
199
+ def test_alignment_deprecation_enforced():
200
+ # Enforced in 2.0
201
+ # https://github.com/pandas-dev/pandas/issues/39184
202
+ df1 = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
203
+ df2 = pd.DataFrame({"b": [1, 2, 3], "c": [4, 5, 6]})
204
+ s1 = pd.Series([1, 2], index=["a", "b"])
205
+ s2 = pd.Series([1, 2], index=["b", "c"])
206
+
207
+ # binary dataframe / dataframe
208
+ expected = pd.DataFrame({"a": [2, 4, 6], "b": [8, 10, 12]})
209
+
210
+ with tm.assert_produces_warning(None):
211
+ # aligned -> no warning!
212
+ result = np.add(df1, df1)
213
+ tm.assert_frame_equal(result, expected)
214
+
215
+ result = np.add(df1, df2.values)
216
+ tm.assert_frame_equal(result, expected)
217
+
218
+ result = np.add(df1, df2)
219
+ expected = pd.DataFrame({"a": [np.nan] * 3, "b": [5, 7, 9], "c": [np.nan] * 3})
220
+ tm.assert_frame_equal(result, expected)
221
+
222
+ result = np.add(df1.values, df2)
223
+ expected = pd.DataFrame({"b": [2, 4, 6], "c": [8, 10, 12]})
224
+ tm.assert_frame_equal(result, expected)
225
+
226
+ # binary dataframe / series
227
+ expected = pd.DataFrame({"a": [2, 3, 4], "b": [6, 7, 8]})
228
+
229
+ with tm.assert_produces_warning(None):
230
+ # aligned -> no warning!
231
+ result = np.add(df1, s1)
232
+ tm.assert_frame_equal(result, expected)
233
+
234
+ result = np.add(df1, s2.values)
235
+ tm.assert_frame_equal(result, expected)
236
+
237
+ expected = pd.DataFrame(
238
+ {"a": [np.nan] * 3, "b": [5.0, 6.0, 7.0], "c": [np.nan] * 3}
239
+ )
240
+ result = np.add(df1, s2)
241
+ tm.assert_frame_equal(result, expected)
242
+
243
+ msg = "Cannot apply ufunc <ufunc 'add'> to mixed DataFrame and Series inputs."
244
+ with pytest.raises(NotImplementedError, match=msg):
245
+ np.add(s2, df1)
246
+
247
+
248
+ def test_alignment_deprecation_many_inputs_enforced():
249
+ # Enforced in 2.0
250
+ # https://github.com/pandas-dev/pandas/issues/39184
251
+ # test that the deprecation also works with > 2 inputs -> using a numba
252
+ # written ufunc for this because numpy itself doesn't have such ufuncs
253
+ numba = pytest.importorskip("numba")
254
+
255
+ @numba.vectorize([numba.float64(numba.float64, numba.float64, numba.float64)])
256
+ def my_ufunc(x, y, z):
257
+ return x + y + z
258
+
259
+ df1 = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]})
260
+ df2 = pd.DataFrame({"b": [1, 2, 3], "c": [4, 5, 6]})
261
+ df3 = pd.DataFrame({"a": [1, 2, 3], "c": [4, 5, 6]})
262
+
263
+ result = my_ufunc(df1, df2, df3)
264
+ expected = pd.DataFrame(np.full((3, 3), np.nan), columns=["a", "b", "c"])
265
+ tm.assert_frame_equal(result, expected)
266
+
267
+ # all aligned -> no warning
268
+ with tm.assert_produces_warning(None):
269
+ result = my_ufunc(df1, df1, df1)
270
+ expected = pd.DataFrame([[3.0, 12.0], [6.0, 15.0], [9.0, 18.0]], columns=["a", "b"])
271
+ tm.assert_frame_equal(result, expected)
272
+
273
+ # mixed frame / arrays
274
+ msg = (
275
+ r"operands could not be broadcast together with shapes \(3,3\) \(3,3\) \(3,2\)"
276
+ )
277
+ with pytest.raises(ValueError, match=msg):
278
+ my_ufunc(df1, df2, df3.values)
279
+
280
+ # single frame -> no warning
281
+ with tm.assert_produces_warning(None):
282
+ result = my_ufunc(df1, df2.values, df3.values)
283
+ tm.assert_frame_equal(result, expected)
284
+
285
+ # takes indices of first frame
286
+ msg = (
287
+ r"operands could not be broadcast together with shapes \(3,2\) \(3,3\) \(3,3\)"
288
+ )
289
+ with pytest.raises(ValueError, match=msg):
290
+ my_ufunc(df1.values, df2, df3)
291
+
292
+
293
+ def test_array_ufuncs_for_many_arguments():
294
+ # GH39853
295
+ def add3(x, y, z):
296
+ return x + y + z
297
+
298
+ ufunc = np.frompyfunc(add3, 3, 1)
299
+ df = pd.DataFrame([[1, 2], [3, 4]])
300
+
301
+ result = ufunc(df, df, 1)
302
+ expected = pd.DataFrame([[3, 5], [7, 9]], dtype=object)
303
+ tm.assert_frame_equal(result, expected)
304
+
305
+ ser = pd.Series([1, 2])
306
+ msg = (
307
+ "Cannot apply ufunc <ufunc 'add3 (vectorized)'> "
308
+ "to mixed DataFrame and Series inputs."
309
+ )
310
+ with pytest.raises(NotImplementedError, match=re.escape(msg)):
311
+ ufunc(df, df, ser)
env-llmeval/lib/python3.10/site-packages/pandas/tests/frame/test_unary.py ADDED
@@ -0,0 +1,204 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from decimal import Decimal
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.compat.numpy import np_version_gte1p25
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestDataFrameUnaryOperators:
13
+ # __pos__, __neg__, __invert__
14
+
15
+ @pytest.mark.parametrize(
16
+ "df,expected",
17
+ [
18
+ (pd.DataFrame({"a": [-1, 1]}), pd.DataFrame({"a": [1, -1]})),
19
+ (pd.DataFrame({"a": [False, True]}), pd.DataFrame({"a": [True, False]})),
20
+ (
21
+ pd.DataFrame({"a": pd.Series(pd.to_timedelta([-1, 1]))}),
22
+ pd.DataFrame({"a": pd.Series(pd.to_timedelta([1, -1]))}),
23
+ ),
24
+ ],
25
+ )
26
+ def test_neg_numeric(self, df, expected):
27
+ tm.assert_frame_equal(-df, expected)
28
+ tm.assert_series_equal(-df["a"], expected["a"])
29
+
30
+ @pytest.mark.parametrize(
31
+ "df, expected",
32
+ [
33
+ (np.array([1, 2], dtype=object), np.array([-1, -2], dtype=object)),
34
+ ([Decimal("1.0"), Decimal("2.0")], [Decimal("-1.0"), Decimal("-2.0")]),
35
+ ],
36
+ )
37
+ def test_neg_object(self, df, expected):
38
+ # GH#21380
39
+ df = pd.DataFrame({"a": df})
40
+ expected = pd.DataFrame({"a": expected})
41
+ tm.assert_frame_equal(-df, expected)
42
+ tm.assert_series_equal(-df["a"], expected["a"])
43
+
44
+ @pytest.mark.parametrize(
45
+ "df",
46
+ [
47
+ pd.DataFrame({"a": ["a", "b"]}),
48
+ pd.DataFrame({"a": pd.to_datetime(["2017-01-22", "1970-01-01"])}),
49
+ ],
50
+ )
51
+ def test_neg_raises(self, df, using_infer_string):
52
+ msg = (
53
+ "bad operand type for unary -: 'str'|"
54
+ r"bad operand type for unary -: 'DatetimeArray'"
55
+ )
56
+ if using_infer_string and df.dtypes.iloc[0] == "string":
57
+ import pyarrow as pa
58
+
59
+ msg = "has no kernel"
60
+ with pytest.raises(pa.lib.ArrowNotImplementedError, match=msg):
61
+ (-df)
62
+ with pytest.raises(pa.lib.ArrowNotImplementedError, match=msg):
63
+ (-df["a"])
64
+
65
+ else:
66
+ with pytest.raises(TypeError, match=msg):
67
+ (-df)
68
+ with pytest.raises(TypeError, match=msg):
69
+ (-df["a"])
70
+
71
+ def test_invert(self, float_frame):
72
+ df = float_frame
73
+
74
+ tm.assert_frame_equal(-(df < 0), ~(df < 0))
75
+
76
+ def test_invert_mixed(self):
77
+ shape = (10, 5)
78
+ df = pd.concat(
79
+ [
80
+ pd.DataFrame(np.zeros(shape, dtype="bool")),
81
+ pd.DataFrame(np.zeros(shape, dtype=int)),
82
+ ],
83
+ axis=1,
84
+ ignore_index=True,
85
+ )
86
+ result = ~df
87
+ expected = pd.concat(
88
+ [
89
+ pd.DataFrame(np.ones(shape, dtype="bool")),
90
+ pd.DataFrame(-np.ones(shape, dtype=int)),
91
+ ],
92
+ axis=1,
93
+ ignore_index=True,
94
+ )
95
+ tm.assert_frame_equal(result, expected)
96
+
97
+ def test_invert_empty_not_input(self):
98
+ # GH#51032
99
+ df = pd.DataFrame()
100
+ result = ~df
101
+ tm.assert_frame_equal(df, result)
102
+ assert df is not result
103
+
104
+ @pytest.mark.parametrize(
105
+ "df",
106
+ [
107
+ pd.DataFrame({"a": [-1, 1]}),
108
+ pd.DataFrame({"a": [False, True]}),
109
+ pd.DataFrame({"a": pd.Series(pd.to_timedelta([-1, 1]))}),
110
+ ],
111
+ )
112
+ def test_pos_numeric(self, df):
113
+ # GH#16073
114
+ tm.assert_frame_equal(+df, df)
115
+ tm.assert_series_equal(+df["a"], df["a"])
116
+
117
+ @pytest.mark.parametrize(
118
+ "df",
119
+ [
120
+ pd.DataFrame({"a": np.array([-1, 2], dtype=object)}),
121
+ pd.DataFrame({"a": [Decimal("-1.0"), Decimal("2.0")]}),
122
+ ],
123
+ )
124
+ def test_pos_object(self, df):
125
+ # GH#21380
126
+ tm.assert_frame_equal(+df, df)
127
+ tm.assert_series_equal(+df["a"], df["a"])
128
+
129
+ @pytest.mark.parametrize(
130
+ "df",
131
+ [
132
+ pytest.param(
133
+ pd.DataFrame({"a": ["a", "b"]}),
134
+ # filterwarnings removable once min numpy version is 1.25
135
+ marks=[
136
+ pytest.mark.filterwarnings("ignore:Applying:DeprecationWarning")
137
+ ],
138
+ ),
139
+ ],
140
+ )
141
+ def test_pos_object_raises(self, df):
142
+ # GH#21380
143
+ if np_version_gte1p25:
144
+ with pytest.raises(
145
+ TypeError, match=r"^bad operand type for unary \+: \'str\'$"
146
+ ):
147
+ tm.assert_frame_equal(+df, df)
148
+ else:
149
+ tm.assert_series_equal(+df["a"], df["a"])
150
+
151
+ @pytest.mark.parametrize(
152
+ "df", [pd.DataFrame({"a": pd.to_datetime(["2017-01-22", "1970-01-01"])})]
153
+ )
154
+ def test_pos_raises(self, df):
155
+ msg = r"bad operand type for unary \+: 'DatetimeArray'"
156
+ with pytest.raises(TypeError, match=msg):
157
+ (+df)
158
+ with pytest.raises(TypeError, match=msg):
159
+ (+df["a"])
160
+
161
+ def test_unary_nullable(self):
162
+ df = pd.DataFrame(
163
+ {
164
+ "a": pd.array([1, -2, 3, pd.NA], dtype="Int64"),
165
+ "b": pd.array([4.0, -5.0, 6.0, pd.NA], dtype="Float32"),
166
+ "c": pd.array([True, False, False, pd.NA], dtype="boolean"),
167
+ # include numpy bool to make sure bool-vs-boolean behavior
168
+ # is consistent in non-NA locations
169
+ "d": np.array([True, False, False, True]),
170
+ }
171
+ )
172
+
173
+ result = +df
174
+ res_ufunc = np.positive(df)
175
+ expected = df
176
+ # TODO: assert that we have copies?
177
+ tm.assert_frame_equal(result, expected)
178
+ tm.assert_frame_equal(res_ufunc, expected)
179
+
180
+ result = -df
181
+ res_ufunc = np.negative(df)
182
+ expected = pd.DataFrame(
183
+ {
184
+ "a": pd.array([-1, 2, -3, pd.NA], dtype="Int64"),
185
+ "b": pd.array([-4.0, 5.0, -6.0, pd.NA], dtype="Float32"),
186
+ "c": pd.array([False, True, True, pd.NA], dtype="boolean"),
187
+ "d": np.array([False, True, True, False]),
188
+ }
189
+ )
190
+ tm.assert_frame_equal(result, expected)
191
+ tm.assert_frame_equal(res_ufunc, expected)
192
+
193
+ result = abs(df)
194
+ res_ufunc = np.abs(df)
195
+ expected = pd.DataFrame(
196
+ {
197
+ "a": pd.array([1, 2, 3, pd.NA], dtype="Int64"),
198
+ "b": pd.array([4.0, 5.0, 6.0, pd.NA], dtype="Float32"),
199
+ "c": pd.array([True, False, False, pd.NA], dtype="boolean"),
200
+ "d": np.array([True, False, False, True]),
201
+ }
202
+ )
203
+ tm.assert_frame_equal(result, expected)
204
+ tm.assert_frame_equal(res_ufunc, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (858 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (4.25 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_all_methods.cpython-310.pyc ADDED
Binary file (2.87 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_api.cpython-310.pyc ADDED
Binary file (5.07 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_apply.cpython-310.pyc ADDED
Binary file (52.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_apply_mutate.cpython-310.pyc ADDED
Binary file (5.09 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_bin_groupby.cpython-310.pyc ADDED
Binary file (1.96 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_categorical.cpython-310.pyc ADDED
Binary file (50.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_counting.cpython-310.pyc ADDED
Binary file (14.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_cumulative.cpython-310.pyc ADDED
Binary file (9.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_filters.cpython-310.pyc ADDED
Binary file (22.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby.cpython-310.pyc ADDED
Binary file (95.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby_dropna.cpython-310.pyc ADDED
Binary file (16.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_groupby_subclass.cpython-310.pyc ADDED
Binary file (3.68 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_grouping.cpython-310.pyc ADDED
Binary file (38.1 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_index_as_string.cpython-310.pyc ADDED
Binary file (2.11 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_indexing.cpython-310.pyc ADDED
Binary file (9.75 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_libgroupby.cpython-310.pyc ADDED
Binary file (9.96 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_missing.cpython-310.pyc ADDED
Binary file (5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_numba.cpython-310.pyc ADDED
Binary file (3.24 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_numeric_only.cpython-310.pyc ADDED
Binary file (10.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_pipe.cpython-310.pyc ADDED
Binary file (2.56 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_raises.cpython-310.pyc ADDED
Binary file (14.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_reductions.cpython-310.pyc ADDED
Binary file (29.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/groupby/__pycache__/test_timegrouper.cpython-310.pyc ADDED
Binary file (23.6 kB). View file