applied-ai-018 commited on
Commit
fe9b727
·
verified ·
1 Parent(s): b67c234

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py +0 -0
  2. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/__init__.cpython-310.pyc +0 -0
  3. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/conftest.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_array_ops.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py +155 -0
  14. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py +139 -0
  15. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py +39 -0
  16. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py +25 -0
  17. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py +2469 -0
  18. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py +306 -0
  19. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_numeric.py +1567 -0
  20. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_object.py +420 -0
  21. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_period.py +1675 -0
  22. env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_timedelta64.py +2173 -0
  23. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__init__.py +0 -0
  24. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/__init__.cpython-310.pyc +0 -0
  25. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_odf.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_odswriter.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_openpyxl.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_readers.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_style.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_writers.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_xlrd.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_xlsxwriter.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_odf.py +77 -0
  34. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_odswriter.py +106 -0
  35. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_openpyxl.py +432 -0
  36. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_readers.py +1751 -0
  37. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_style.py +298 -0
  38. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_writers.py +1511 -0
  39. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_xlrd.py +76 -0
  40. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_xlsxwriter.py +86 -0
  41. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_console.cpython-310.pyc +0 -0
  42. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_eng_formatting.cpython-310.pyc +0 -0
  43. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_to_markdown.cpython-310.pyc +0 -0
  44. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__init__.py +0 -0
  45. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/__init__.cpython-310.pyc +0 -0
  46. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_bar.cpython-310.pyc +0 -0
  47. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_exceptions.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_format.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_highlight.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_html.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (188 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/common.cpython-310.pyc ADDED
Binary file (3.83 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (3.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_array_ops.cpython-310.pyc ADDED
Binary file (1.18 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_categorical.cpython-310.pyc ADDED
Binary file (1.19 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_datetime64.cpython-310.pyc ADDED
Binary file (59 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_interval.cpython-310.pyc ADDED
Binary file (8.78 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_numeric.cpython-310.pyc ADDED
Binary file (44.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_object.cpython-310.pyc ADDED
Binary file (11.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_period.cpython-310.pyc ADDED
Binary file (46.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/__pycache__/test_timedelta64.cpython-310.pyc ADDED
Binary file (54.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/common.py ADDED
@@ -0,0 +1,155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Assertion helpers for arithmetic tests.
3
+ """
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas import (
8
+ DataFrame,
9
+ Index,
10
+ Series,
11
+ array,
12
+ )
13
+ import pandas._testing as tm
14
+ from pandas.core.arrays import (
15
+ BooleanArray,
16
+ NumpyExtensionArray,
17
+ )
18
+
19
+
20
+ def assert_cannot_add(left, right, msg="cannot add"):
21
+ """
22
+ Helper to assert that left and right cannot be added.
23
+
24
+ Parameters
25
+ ----------
26
+ left : object
27
+ right : object
28
+ msg : str, default "cannot add"
29
+ """
30
+ with pytest.raises(TypeError, match=msg):
31
+ left + right
32
+ with pytest.raises(TypeError, match=msg):
33
+ right + left
34
+
35
+
36
+ def assert_invalid_addsub_type(left, right, msg=None):
37
+ """
38
+ Helper to assert that left and right can be neither added nor subtracted.
39
+
40
+ Parameters
41
+ ----------
42
+ left : object
43
+ right : object
44
+ msg : str or None, default None
45
+ """
46
+ with pytest.raises(TypeError, match=msg):
47
+ left + right
48
+ with pytest.raises(TypeError, match=msg):
49
+ right + left
50
+ with pytest.raises(TypeError, match=msg):
51
+ left - right
52
+ with pytest.raises(TypeError, match=msg):
53
+ right - left
54
+
55
+
56
+ def get_upcast_box(left, right, is_cmp: bool = False):
57
+ """
58
+ Get the box to use for 'expected' in an arithmetic or comparison operation.
59
+
60
+ Parameters
61
+ left : Any
62
+ right : Any
63
+ is_cmp : bool, default False
64
+ Whether the operation is a comparison method.
65
+ """
66
+
67
+ if isinstance(left, DataFrame) or isinstance(right, DataFrame):
68
+ return DataFrame
69
+ if isinstance(left, Series) or isinstance(right, Series):
70
+ if is_cmp and isinstance(left, Index):
71
+ # Index does not defer for comparisons
72
+ return np.array
73
+ return Series
74
+ if isinstance(left, Index) or isinstance(right, Index):
75
+ if is_cmp:
76
+ return np.array
77
+ return Index
78
+ return tm.to_array
79
+
80
+
81
+ def assert_invalid_comparison(left, right, box):
82
+ """
83
+ Assert that comparison operations with mismatched types behave correctly.
84
+
85
+ Parameters
86
+ ----------
87
+ left : np.ndarray, ExtensionArray, Index, or Series
88
+ right : object
89
+ box : {pd.DataFrame, pd.Series, pd.Index, pd.array, tm.to_array}
90
+ """
91
+ # Not for tznaive-tzaware comparison
92
+
93
+ # Note: not quite the same as how we do this for tm.box_expected
94
+ xbox = box if box not in [Index, array] else np.array
95
+
96
+ def xbox2(x):
97
+ # Eventually we'd like this to be tighter, but for now we'll
98
+ # just exclude NumpyExtensionArray[bool]
99
+ if isinstance(x, NumpyExtensionArray):
100
+ return x._ndarray
101
+ if isinstance(x, BooleanArray):
102
+ # NB: we are assuming no pd.NAs for now
103
+ return x.astype(bool)
104
+ return x
105
+
106
+ # rev_box: box to use for reversed comparisons
107
+ rev_box = xbox
108
+ if isinstance(right, Index) and isinstance(left, Series):
109
+ rev_box = np.array
110
+
111
+ result = xbox2(left == right)
112
+ expected = xbox(np.zeros(result.shape, dtype=np.bool_))
113
+
114
+ tm.assert_equal(result, expected)
115
+
116
+ result = xbox2(right == left)
117
+ tm.assert_equal(result, rev_box(expected))
118
+
119
+ result = xbox2(left != right)
120
+ tm.assert_equal(result, ~expected)
121
+
122
+ result = xbox2(right != left)
123
+ tm.assert_equal(result, rev_box(~expected))
124
+
125
+ msg = "|".join(
126
+ [
127
+ "Invalid comparison between",
128
+ "Cannot compare type",
129
+ "not supported between",
130
+ "invalid type promotion",
131
+ (
132
+ # GH#36706 npdev 1.20.0 2020-09-28
133
+ r"The DTypes <class 'numpy.dtype\[datetime64\]'> and "
134
+ r"<class 'numpy.dtype\[int64\]'> do not have a common DType. "
135
+ "For example they cannot be stored in a single array unless the "
136
+ "dtype is `object`."
137
+ ),
138
+ ]
139
+ )
140
+ with pytest.raises(TypeError, match=msg):
141
+ left < right
142
+ with pytest.raises(TypeError, match=msg):
143
+ left <= right
144
+ with pytest.raises(TypeError, match=msg):
145
+ left > right
146
+ with pytest.raises(TypeError, match=msg):
147
+ left >= right
148
+ with pytest.raises(TypeError, match=msg):
149
+ right < left
150
+ with pytest.raises(TypeError, match=msg):
151
+ right <= left
152
+ with pytest.raises(TypeError, match=msg):
153
+ right > left
154
+ with pytest.raises(TypeError, match=msg):
155
+ right >= left
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/conftest.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import Index
6
+
7
+
8
+ @pytest.fixture(params=[1, np.array(1, dtype=np.int64)])
9
+ def one(request):
10
+ """
11
+ Several variants of integer value 1. The zero-dim integer array
12
+ behaves like an integer.
13
+
14
+ This fixture can be used to check that datetimelike indexes handle
15
+ addition and subtraction of integers and zero-dimensional arrays
16
+ of integers.
17
+
18
+ Examples
19
+ --------
20
+ dti = pd.date_range('2016-01-01', periods=2, freq='h')
21
+ dti
22
+ DatetimeIndex(['2016-01-01 00:00:00', '2016-01-01 01:00:00'],
23
+ dtype='datetime64[ns]', freq='h')
24
+ dti + one
25
+ DatetimeIndex(['2016-01-01 01:00:00', '2016-01-01 02:00:00'],
26
+ dtype='datetime64[ns]', freq='h')
27
+ """
28
+ return request.param
29
+
30
+
31
+ zeros = [
32
+ box_cls([0] * 5, dtype=dtype)
33
+ for box_cls in [Index, np.array, pd.array]
34
+ for dtype in [np.int64, np.uint64, np.float64]
35
+ ]
36
+ zeros.extend([box_cls([-0.0] * 5, dtype=np.float64) for box_cls in [Index, np.array]])
37
+ zeros.extend([np.array(0, dtype=dtype) for dtype in [np.int64, np.uint64, np.float64]])
38
+ zeros.extend([np.array(-0.0, dtype=np.float64)])
39
+ zeros.extend([0, 0.0, -0.0])
40
+
41
+
42
+ @pytest.fixture(params=zeros)
43
+ def zero(request):
44
+ """
45
+ Several types of scalar zeros and length 5 vectors of zeros.
46
+
47
+ This fixture can be used to check that numeric-dtype indexes handle
48
+ division by any zero numeric-dtype.
49
+
50
+ Uses vector of length 5 for broadcasting with `numeric_idx` fixture,
51
+ which creates numeric-dtype vectors also of length 5.
52
+
53
+ Examples
54
+ --------
55
+ arr = RangeIndex(5)
56
+ arr / zeros
57
+ Index([nan, inf, inf, inf, inf], dtype='float64')
58
+ """
59
+ return request.param
60
+
61
+
62
+ # ------------------------------------------------------------------
63
+ # Scalar Fixtures
64
+
65
+
66
+ @pytest.fixture(
67
+ params=[
68
+ pd.Timedelta("10m7s").to_pytimedelta(),
69
+ pd.Timedelta("10m7s"),
70
+ pd.Timedelta("10m7s").to_timedelta64(),
71
+ ],
72
+ ids=lambda x: type(x).__name__,
73
+ )
74
+ def scalar_td(request):
75
+ """
76
+ Several variants of Timedelta scalars representing 10 minutes and 7 seconds.
77
+ """
78
+ return request.param
79
+
80
+
81
+ @pytest.fixture(
82
+ params=[
83
+ pd.offsets.Day(3),
84
+ pd.offsets.Hour(72),
85
+ pd.Timedelta(days=3).to_pytimedelta(),
86
+ pd.Timedelta("72:00:00"),
87
+ np.timedelta64(3, "D"),
88
+ np.timedelta64(72, "h"),
89
+ ],
90
+ ids=lambda x: type(x).__name__,
91
+ )
92
+ def three_days(request):
93
+ """
94
+ Several timedelta-like and DateOffset objects that each represent
95
+ a 3-day timedelta
96
+ """
97
+ return request.param
98
+
99
+
100
+ @pytest.fixture(
101
+ params=[
102
+ pd.offsets.Hour(2),
103
+ pd.offsets.Minute(120),
104
+ pd.Timedelta(hours=2).to_pytimedelta(),
105
+ pd.Timedelta(seconds=2 * 3600),
106
+ np.timedelta64(2, "h"),
107
+ np.timedelta64(120, "m"),
108
+ ],
109
+ ids=lambda x: type(x).__name__,
110
+ )
111
+ def two_hours(request):
112
+ """
113
+ Several timedelta-like and DateOffset objects that each represent
114
+ a 2-hour timedelta
115
+ """
116
+ return request.param
117
+
118
+
119
+ _common_mismatch = [
120
+ pd.offsets.YearBegin(2),
121
+ pd.offsets.MonthBegin(1),
122
+ pd.offsets.Minute(),
123
+ ]
124
+
125
+
126
+ @pytest.fixture(
127
+ params=[
128
+ np.timedelta64(4, "h"),
129
+ pd.Timedelta(hours=23).to_pytimedelta(),
130
+ pd.Timedelta("23:00:00"),
131
+ ]
132
+ + _common_mismatch
133
+ )
134
+ def not_daily(request):
135
+ """
136
+ Several timedelta-like and DateOffset instances that are _not_
137
+ compatible with Daily frequencies.
138
+ """
139
+ return request.param
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_array_ops.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas._testing as tm
7
+ from pandas.core.ops.array_ops import (
8
+ comparison_op,
9
+ na_logical_op,
10
+ )
11
+
12
+
13
+ def test_na_logical_op_2d():
14
+ left = np.arange(8).reshape(4, 2)
15
+ right = left.astype(object)
16
+ right[0, 0] = np.nan
17
+
18
+ # Check that we fall back to the vec_binop branch
19
+ with pytest.raises(TypeError, match="unsupported operand type"):
20
+ operator.or_(left, right)
21
+
22
+ result = na_logical_op(left, right, operator.or_)
23
+ expected = right
24
+ tm.assert_numpy_array_equal(result, expected)
25
+
26
+
27
+ def test_object_comparison_2d():
28
+ left = np.arange(9).reshape(3, 3).astype(object)
29
+ right = left.T
30
+
31
+ result = comparison_op(left, right, operator.eq)
32
+ expected = np.eye(3).astype(bool)
33
+ tm.assert_numpy_array_equal(result, expected)
34
+
35
+ # Ensure that cython doesn't raise on non-writeable arg, which
36
+ # we can get from np.broadcast_to
37
+ right.flags.writeable = False
38
+ result = comparison_op(left, right, operator.ne)
39
+ tm.assert_numpy_array_equal(result, ~expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_categorical.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Categorical,
5
+ Series,
6
+ )
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestCategoricalComparisons:
11
+ def test_categorical_nan_equality(self):
12
+ cat = Series(Categorical(["a", "b", "c", np.nan]))
13
+ expected = Series([True, True, True, False])
14
+ result = cat == cat
15
+ tm.assert_series_equal(result, expected)
16
+
17
+ def test_categorical_tuple_equality(self):
18
+ # GH 18050
19
+ ser = Series([(0, 0), (0, 1), (0, 0), (1, 0), (1, 1)])
20
+ expected = Series([True, False, True, False, False])
21
+ result = ser == (0, 0)
22
+ tm.assert_series_equal(result, expected)
23
+
24
+ result = ser.astype("category") == (0, 0)
25
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_datetime64.py ADDED
@@ -0,0 +1,2469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ # Specifically for datetime64 and datetime64tz dtypes
4
+ from datetime import (
5
+ datetime,
6
+ time,
7
+ timedelta,
8
+ )
9
+ from itertools import (
10
+ product,
11
+ starmap,
12
+ )
13
+ import operator
14
+
15
+ import numpy as np
16
+ import pytest
17
+ import pytz
18
+
19
+ from pandas._libs.tslibs.conversion import localize_pydatetime
20
+ from pandas._libs.tslibs.offsets import shift_months
21
+ from pandas.errors import PerformanceWarning
22
+
23
+ import pandas as pd
24
+ from pandas import (
25
+ DateOffset,
26
+ DatetimeIndex,
27
+ NaT,
28
+ Period,
29
+ Series,
30
+ Timedelta,
31
+ TimedeltaIndex,
32
+ Timestamp,
33
+ date_range,
34
+ )
35
+ import pandas._testing as tm
36
+ from pandas.core import roperator
37
+ from pandas.tests.arithmetic.common import (
38
+ assert_cannot_add,
39
+ assert_invalid_addsub_type,
40
+ assert_invalid_comparison,
41
+ get_upcast_box,
42
+ )
43
+
44
+ # ------------------------------------------------------------------
45
+ # Comparisons
46
+
47
+
48
+ class TestDatetime64ArrayLikeComparisons:
49
+ # Comparison tests for datetime64 vectors fully parametrized over
50
+ # DataFrame/Series/DatetimeIndex/DatetimeArray. Ideally all comparison
51
+ # tests will eventually end up here.
52
+
53
+ def test_compare_zerodim(self, tz_naive_fixture, box_with_array):
54
+ # Test comparison with zero-dimensional array is unboxed
55
+ tz = tz_naive_fixture
56
+ box = box_with_array
57
+ dti = date_range("20130101", periods=3, tz=tz)
58
+
59
+ other = np.array(dti.to_numpy()[0])
60
+
61
+ dtarr = tm.box_expected(dti, box)
62
+ xbox = get_upcast_box(dtarr, other, True)
63
+ result = dtarr <= other
64
+ expected = np.array([True, False, False])
65
+ expected = tm.box_expected(expected, xbox)
66
+ tm.assert_equal(result, expected)
67
+
68
+ @pytest.mark.parametrize(
69
+ "other",
70
+ [
71
+ "foo",
72
+ -1,
73
+ 99,
74
+ 4.0,
75
+ object(),
76
+ timedelta(days=2),
77
+ # GH#19800, GH#19301 datetime.date comparison raises to
78
+ # match DatetimeIndex/Timestamp. This also matches the behavior
79
+ # of stdlib datetime.datetime
80
+ datetime(2001, 1, 1).date(),
81
+ # GH#19301 None and NaN are *not* cast to NaT for comparisons
82
+ None,
83
+ np.nan,
84
+ ],
85
+ )
86
+ def test_dt64arr_cmp_scalar_invalid(self, other, tz_naive_fixture, box_with_array):
87
+ # GH#22074, GH#15966
88
+ tz = tz_naive_fixture
89
+
90
+ rng = date_range("1/1/2000", periods=10, tz=tz)
91
+ dtarr = tm.box_expected(rng, box_with_array)
92
+ assert_invalid_comparison(dtarr, other, box_with_array)
93
+
94
+ @pytest.mark.parametrize(
95
+ "other",
96
+ [
97
+ # GH#4968 invalid date/int comparisons
98
+ list(range(10)),
99
+ np.arange(10),
100
+ np.arange(10).astype(np.float32),
101
+ np.arange(10).astype(object),
102
+ pd.timedelta_range("1ns", periods=10).array,
103
+ np.array(pd.timedelta_range("1ns", periods=10)),
104
+ list(pd.timedelta_range("1ns", periods=10)),
105
+ pd.timedelta_range("1 Day", periods=10).astype(object),
106
+ pd.period_range("1971-01-01", freq="D", periods=10).array,
107
+ pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
108
+ ],
109
+ )
110
+ def test_dt64arr_cmp_arraylike_invalid(
111
+ self, other, tz_naive_fixture, box_with_array
112
+ ):
113
+ tz = tz_naive_fixture
114
+
115
+ dta = date_range("1970-01-01", freq="ns", periods=10, tz=tz)._data
116
+ obj = tm.box_expected(dta, box_with_array)
117
+ assert_invalid_comparison(obj, other, box_with_array)
118
+
119
+ def test_dt64arr_cmp_mixed_invalid(self, tz_naive_fixture):
120
+ tz = tz_naive_fixture
121
+
122
+ dta = date_range("1970-01-01", freq="h", periods=5, tz=tz)._data
123
+
124
+ other = np.array([0, 1, 2, dta[3], Timedelta(days=1)])
125
+ result = dta == other
126
+ expected = np.array([False, False, False, True, False])
127
+ tm.assert_numpy_array_equal(result, expected)
128
+
129
+ result = dta != other
130
+ tm.assert_numpy_array_equal(result, ~expected)
131
+
132
+ msg = "Invalid comparison between|Cannot compare type|not supported between"
133
+ with pytest.raises(TypeError, match=msg):
134
+ dta < other
135
+ with pytest.raises(TypeError, match=msg):
136
+ dta > other
137
+ with pytest.raises(TypeError, match=msg):
138
+ dta <= other
139
+ with pytest.raises(TypeError, match=msg):
140
+ dta >= other
141
+
142
+ def test_dt64arr_nat_comparison(self, tz_naive_fixture, box_with_array):
143
+ # GH#22242, GH#22163 DataFrame considered NaT == ts incorrectly
144
+ tz = tz_naive_fixture
145
+ box = box_with_array
146
+
147
+ ts = Timestamp("2021-01-01", tz=tz)
148
+ ser = Series([ts, NaT])
149
+
150
+ obj = tm.box_expected(ser, box)
151
+ xbox = get_upcast_box(obj, ts, True)
152
+
153
+ expected = Series([True, False], dtype=np.bool_)
154
+ expected = tm.box_expected(expected, xbox)
155
+
156
+ result = obj == ts
157
+ tm.assert_equal(result, expected)
158
+
159
+
160
+ class TestDatetime64SeriesComparison:
161
+ # TODO: moved from tests.series.test_operators; needs cleanup
162
+
163
+ @pytest.mark.parametrize(
164
+ "pair",
165
+ [
166
+ (
167
+ [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
168
+ [NaT, NaT, Timestamp("2011-01-03")],
169
+ ),
170
+ (
171
+ [Timedelta("1 days"), NaT, Timedelta("3 days")],
172
+ [NaT, NaT, Timedelta("3 days")],
173
+ ),
174
+ (
175
+ [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
176
+ [NaT, NaT, Period("2011-03", freq="M")],
177
+ ),
178
+ ],
179
+ )
180
+ @pytest.mark.parametrize("reverse", [True, False])
181
+ @pytest.mark.parametrize("dtype", [None, object])
182
+ @pytest.mark.parametrize(
183
+ "op, expected",
184
+ [
185
+ (operator.eq, Series([False, False, True])),
186
+ (operator.ne, Series([True, True, False])),
187
+ (operator.lt, Series([False, False, False])),
188
+ (operator.gt, Series([False, False, False])),
189
+ (operator.ge, Series([False, False, True])),
190
+ (operator.le, Series([False, False, True])),
191
+ ],
192
+ )
193
+ def test_nat_comparisons(
194
+ self,
195
+ dtype,
196
+ index_or_series,
197
+ reverse,
198
+ pair,
199
+ op,
200
+ expected,
201
+ ):
202
+ box = index_or_series
203
+ lhs, rhs = pair
204
+ if reverse:
205
+ # add lhs / rhs switched data
206
+ lhs, rhs = rhs, lhs
207
+
208
+ left = Series(lhs, dtype=dtype)
209
+ right = box(rhs, dtype=dtype)
210
+
211
+ result = op(left, right)
212
+
213
+ tm.assert_series_equal(result, expected)
214
+
215
+ @pytest.mark.parametrize(
216
+ "data",
217
+ [
218
+ [Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")],
219
+ [Timedelta("1 days"), NaT, Timedelta("3 days")],
220
+ [Period("2011-01", freq="M"), NaT, Period("2011-03", freq="M")],
221
+ ],
222
+ )
223
+ @pytest.mark.parametrize("dtype", [None, object])
224
+ def test_nat_comparisons_scalar(self, dtype, data, box_with_array):
225
+ box = box_with_array
226
+
227
+ left = Series(data, dtype=dtype)
228
+ left = tm.box_expected(left, box)
229
+ xbox = get_upcast_box(left, NaT, True)
230
+
231
+ expected = [False, False, False]
232
+ expected = tm.box_expected(expected, xbox)
233
+ if box is pd.array and dtype is object:
234
+ expected = pd.array(expected, dtype="bool")
235
+
236
+ tm.assert_equal(left == NaT, expected)
237
+ tm.assert_equal(NaT == left, expected)
238
+
239
+ expected = [True, True, True]
240
+ expected = tm.box_expected(expected, xbox)
241
+ if box is pd.array and dtype is object:
242
+ expected = pd.array(expected, dtype="bool")
243
+ tm.assert_equal(left != NaT, expected)
244
+ tm.assert_equal(NaT != left, expected)
245
+
246
+ expected = [False, False, False]
247
+ expected = tm.box_expected(expected, xbox)
248
+ if box is pd.array and dtype is object:
249
+ expected = pd.array(expected, dtype="bool")
250
+ tm.assert_equal(left < NaT, expected)
251
+ tm.assert_equal(NaT > left, expected)
252
+ tm.assert_equal(left <= NaT, expected)
253
+ tm.assert_equal(NaT >= left, expected)
254
+
255
+ tm.assert_equal(left > NaT, expected)
256
+ tm.assert_equal(NaT < left, expected)
257
+ tm.assert_equal(left >= NaT, expected)
258
+ tm.assert_equal(NaT <= left, expected)
259
+
260
+ @pytest.mark.parametrize("val", [datetime(2000, 1, 4), datetime(2000, 1, 5)])
261
+ def test_series_comparison_scalars(self, val):
262
+ series = Series(date_range("1/1/2000", periods=10))
263
+
264
+ result = series > val
265
+ expected = Series([x > val for x in series])
266
+ tm.assert_series_equal(result, expected)
267
+
268
+ @pytest.mark.parametrize(
269
+ "left,right", [("lt", "gt"), ("le", "ge"), ("eq", "eq"), ("ne", "ne")]
270
+ )
271
+ def test_timestamp_compare_series(self, left, right):
272
+ # see gh-4982
273
+ # Make sure we can compare Timestamps on the right AND left hand side.
274
+ ser = Series(date_range("20010101", periods=10), name="dates")
275
+ s_nat = ser.copy(deep=True)
276
+
277
+ ser[0] = Timestamp("nat")
278
+ ser[3] = Timestamp("nat")
279
+
280
+ left_f = getattr(operator, left)
281
+ right_f = getattr(operator, right)
282
+
283
+ # No NaT
284
+ expected = left_f(ser, Timestamp("20010109"))
285
+ result = right_f(Timestamp("20010109"), ser)
286
+ tm.assert_series_equal(result, expected)
287
+
288
+ # NaT
289
+ expected = left_f(ser, Timestamp("nat"))
290
+ result = right_f(Timestamp("nat"), ser)
291
+ tm.assert_series_equal(result, expected)
292
+
293
+ # Compare to Timestamp with series containing NaT
294
+ expected = left_f(s_nat, Timestamp("20010109"))
295
+ result = right_f(Timestamp("20010109"), s_nat)
296
+ tm.assert_series_equal(result, expected)
297
+
298
+ # Compare to NaT with series containing NaT
299
+ expected = left_f(s_nat, NaT)
300
+ result = right_f(NaT, s_nat)
301
+ tm.assert_series_equal(result, expected)
302
+
303
+ def test_dt64arr_timestamp_equality(self, box_with_array):
304
+ # GH#11034
305
+ box = box_with_array
306
+
307
+ ser = Series([Timestamp("2000-01-29 01:59:00"), Timestamp("2000-01-30"), NaT])
308
+ ser = tm.box_expected(ser, box)
309
+ xbox = get_upcast_box(ser, ser, True)
310
+
311
+ result = ser != ser
312
+ expected = tm.box_expected([False, False, True], xbox)
313
+ tm.assert_equal(result, expected)
314
+
315
+ if box is pd.DataFrame:
316
+ # alignment for frame vs series comparisons deprecated
317
+ # in GH#46795 enforced 2.0
318
+ with pytest.raises(ValueError, match="not aligned"):
319
+ ser != ser[0]
320
+
321
+ else:
322
+ result = ser != ser[0]
323
+ expected = tm.box_expected([False, True, True], xbox)
324
+ tm.assert_equal(result, expected)
325
+
326
+ if box is pd.DataFrame:
327
+ # alignment for frame vs series comparisons deprecated
328
+ # in GH#46795 enforced 2.0
329
+ with pytest.raises(ValueError, match="not aligned"):
330
+ ser != ser[2]
331
+ else:
332
+ result = ser != ser[2]
333
+ expected = tm.box_expected([True, True, True], xbox)
334
+ tm.assert_equal(result, expected)
335
+
336
+ result = ser == ser
337
+ expected = tm.box_expected([True, True, False], xbox)
338
+ tm.assert_equal(result, expected)
339
+
340
+ if box is pd.DataFrame:
341
+ # alignment for frame vs series comparisons deprecated
342
+ # in GH#46795 enforced 2.0
343
+ with pytest.raises(ValueError, match="not aligned"):
344
+ ser == ser[0]
345
+ else:
346
+ result = ser == ser[0]
347
+ expected = tm.box_expected([True, False, False], xbox)
348
+ tm.assert_equal(result, expected)
349
+
350
+ if box is pd.DataFrame:
351
+ # alignment for frame vs series comparisons deprecated
352
+ # in GH#46795 enforced 2.0
353
+ with pytest.raises(ValueError, match="not aligned"):
354
+ ser == ser[2]
355
+ else:
356
+ result = ser == ser[2]
357
+ expected = tm.box_expected([False, False, False], xbox)
358
+ tm.assert_equal(result, expected)
359
+
360
+ @pytest.mark.parametrize(
361
+ "datetimelike",
362
+ [
363
+ Timestamp("20130101"),
364
+ datetime(2013, 1, 1),
365
+ np.datetime64("2013-01-01T00:00", "ns"),
366
+ ],
367
+ )
368
+ @pytest.mark.parametrize(
369
+ "op,expected",
370
+ [
371
+ (operator.lt, [True, False, False, False]),
372
+ (operator.le, [True, True, False, False]),
373
+ (operator.eq, [False, True, False, False]),
374
+ (operator.gt, [False, False, False, True]),
375
+ ],
376
+ )
377
+ def test_dt64_compare_datetime_scalar(self, datetimelike, op, expected):
378
+ # GH#17965, test for ability to compare datetime64[ns] columns
379
+ # to datetimelike
380
+ ser = Series(
381
+ [
382
+ Timestamp("20120101"),
383
+ Timestamp("20130101"),
384
+ np.nan,
385
+ Timestamp("20130103"),
386
+ ],
387
+ name="A",
388
+ )
389
+ result = op(ser, datetimelike)
390
+ expected = Series(expected, name="A")
391
+ tm.assert_series_equal(result, expected)
392
+
393
+
394
+ class TestDatetimeIndexComparisons:
395
+ # TODO: moved from tests.indexes.test_base; parametrize and de-duplicate
396
+ def test_comparators(self, comparison_op):
397
+ index = date_range("2020-01-01", periods=10)
398
+ element = index[len(index) // 2]
399
+ element = Timestamp(element).to_datetime64()
400
+
401
+ arr = np.array(index)
402
+ arr_result = comparison_op(arr, element)
403
+ index_result = comparison_op(index, element)
404
+
405
+ assert isinstance(index_result, np.ndarray)
406
+ tm.assert_numpy_array_equal(arr_result, index_result)
407
+
408
+ @pytest.mark.parametrize(
409
+ "other",
410
+ [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
411
+ )
412
+ def test_dti_cmp_datetimelike(self, other, tz_naive_fixture):
413
+ tz = tz_naive_fixture
414
+ dti = date_range("2016-01-01", periods=2, tz=tz)
415
+ if tz is not None:
416
+ if isinstance(other, np.datetime64):
417
+ pytest.skip(f"{type(other).__name__} is not tz aware")
418
+ other = localize_pydatetime(other, dti.tzinfo)
419
+
420
+ result = dti == other
421
+ expected = np.array([True, False])
422
+ tm.assert_numpy_array_equal(result, expected)
423
+
424
+ result = dti > other
425
+ expected = np.array([False, True])
426
+ tm.assert_numpy_array_equal(result, expected)
427
+
428
+ result = dti >= other
429
+ expected = np.array([True, True])
430
+ tm.assert_numpy_array_equal(result, expected)
431
+
432
+ result = dti < other
433
+ expected = np.array([False, False])
434
+ tm.assert_numpy_array_equal(result, expected)
435
+
436
+ result = dti <= other
437
+ expected = np.array([True, False])
438
+ tm.assert_numpy_array_equal(result, expected)
439
+
440
+ @pytest.mark.parametrize("dtype", [None, object])
441
+ def test_dti_cmp_nat(self, dtype, box_with_array):
442
+ left = DatetimeIndex([Timestamp("2011-01-01"), NaT, Timestamp("2011-01-03")])
443
+ right = DatetimeIndex([NaT, NaT, Timestamp("2011-01-03")])
444
+
445
+ left = tm.box_expected(left, box_with_array)
446
+ right = tm.box_expected(right, box_with_array)
447
+ xbox = get_upcast_box(left, right, True)
448
+
449
+ lhs, rhs = left, right
450
+ if dtype is object:
451
+ lhs, rhs = left.astype(object), right.astype(object)
452
+
453
+ result = rhs == lhs
454
+ expected = np.array([False, False, True])
455
+ expected = tm.box_expected(expected, xbox)
456
+ tm.assert_equal(result, expected)
457
+
458
+ result = lhs != rhs
459
+ expected = np.array([True, True, False])
460
+ expected = tm.box_expected(expected, xbox)
461
+ tm.assert_equal(result, expected)
462
+
463
+ expected = np.array([False, False, False])
464
+ expected = tm.box_expected(expected, xbox)
465
+ tm.assert_equal(lhs == NaT, expected)
466
+ tm.assert_equal(NaT == rhs, expected)
467
+
468
+ expected = np.array([True, True, True])
469
+ expected = tm.box_expected(expected, xbox)
470
+ tm.assert_equal(lhs != NaT, expected)
471
+ tm.assert_equal(NaT != lhs, expected)
472
+
473
+ expected = np.array([False, False, False])
474
+ expected = tm.box_expected(expected, xbox)
475
+ tm.assert_equal(lhs < NaT, expected)
476
+ tm.assert_equal(NaT > lhs, expected)
477
+
478
+ def test_dti_cmp_nat_behaves_like_float_cmp_nan(self):
479
+ fidx1 = pd.Index([1.0, np.nan, 3.0, np.nan, 5.0, 7.0])
480
+ fidx2 = pd.Index([2.0, 3.0, np.nan, np.nan, 6.0, 7.0])
481
+
482
+ didx1 = DatetimeIndex(
483
+ ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
484
+ )
485
+ didx2 = DatetimeIndex(
486
+ ["2014-02-01", "2014-03-01", NaT, NaT, "2014-06-01", "2014-07-01"]
487
+ )
488
+ darr = np.array(
489
+ [
490
+ np.datetime64("2014-02-01 00:00"),
491
+ np.datetime64("2014-03-01 00:00"),
492
+ np.datetime64("nat"),
493
+ np.datetime64("nat"),
494
+ np.datetime64("2014-06-01 00:00"),
495
+ np.datetime64("2014-07-01 00:00"),
496
+ ]
497
+ )
498
+
499
+ cases = [(fidx1, fidx2), (didx1, didx2), (didx1, darr)]
500
+
501
+ # Check pd.NaT is handles as the same as np.nan
502
+ with tm.assert_produces_warning(None):
503
+ for idx1, idx2 in cases:
504
+ result = idx1 < idx2
505
+ expected = np.array([True, False, False, False, True, False])
506
+ tm.assert_numpy_array_equal(result, expected)
507
+
508
+ result = idx2 > idx1
509
+ expected = np.array([True, False, False, False, True, False])
510
+ tm.assert_numpy_array_equal(result, expected)
511
+
512
+ result = idx1 <= idx2
513
+ expected = np.array([True, False, False, False, True, True])
514
+ tm.assert_numpy_array_equal(result, expected)
515
+
516
+ result = idx2 >= idx1
517
+ expected = np.array([True, False, False, False, True, True])
518
+ tm.assert_numpy_array_equal(result, expected)
519
+
520
+ result = idx1 == idx2
521
+ expected = np.array([False, False, False, False, False, True])
522
+ tm.assert_numpy_array_equal(result, expected)
523
+
524
+ result = idx1 != idx2
525
+ expected = np.array([True, True, True, True, True, False])
526
+ tm.assert_numpy_array_equal(result, expected)
527
+
528
+ with tm.assert_produces_warning(None):
529
+ for idx1, val in [(fidx1, np.nan), (didx1, NaT)]:
530
+ result = idx1 < val
531
+ expected = np.array([False, False, False, False, False, False])
532
+ tm.assert_numpy_array_equal(result, expected)
533
+ result = idx1 > val
534
+ tm.assert_numpy_array_equal(result, expected)
535
+
536
+ result = idx1 <= val
537
+ tm.assert_numpy_array_equal(result, expected)
538
+ result = idx1 >= val
539
+ tm.assert_numpy_array_equal(result, expected)
540
+
541
+ result = idx1 == val
542
+ tm.assert_numpy_array_equal(result, expected)
543
+
544
+ result = idx1 != val
545
+ expected = np.array([True, True, True, True, True, True])
546
+ tm.assert_numpy_array_equal(result, expected)
547
+
548
+ # Check pd.NaT is handles as the same as np.nan
549
+ with tm.assert_produces_warning(None):
550
+ for idx1, val in [(fidx1, 3), (didx1, datetime(2014, 3, 1))]:
551
+ result = idx1 < val
552
+ expected = np.array([True, False, False, False, False, False])
553
+ tm.assert_numpy_array_equal(result, expected)
554
+ result = idx1 > val
555
+ expected = np.array([False, False, False, False, True, True])
556
+ tm.assert_numpy_array_equal(result, expected)
557
+
558
+ result = idx1 <= val
559
+ expected = np.array([True, False, True, False, False, False])
560
+ tm.assert_numpy_array_equal(result, expected)
561
+ result = idx1 >= val
562
+ expected = np.array([False, False, True, False, True, True])
563
+ tm.assert_numpy_array_equal(result, expected)
564
+
565
+ result = idx1 == val
566
+ expected = np.array([False, False, True, False, False, False])
567
+ tm.assert_numpy_array_equal(result, expected)
568
+
569
+ result = idx1 != val
570
+ expected = np.array([True, True, False, True, True, True])
571
+ tm.assert_numpy_array_equal(result, expected)
572
+
573
+ def test_comparison_tzawareness_compat(self, comparison_op, box_with_array):
574
+ # GH#18162
575
+ op = comparison_op
576
+ box = box_with_array
577
+
578
+ dr = date_range("2016-01-01", periods=6)
579
+ dz = dr.tz_localize("US/Pacific")
580
+
581
+ dr = tm.box_expected(dr, box)
582
+ dz = tm.box_expected(dz, box)
583
+
584
+ if box is pd.DataFrame:
585
+ tolist = lambda x: x.astype(object).values.tolist()[0]
586
+ else:
587
+ tolist = list
588
+
589
+ if op not in [operator.eq, operator.ne]:
590
+ msg = (
591
+ r"Invalid comparison between dtype=datetime64\[ns.*\] "
592
+ "and (Timestamp|DatetimeArray|list|ndarray)"
593
+ )
594
+ with pytest.raises(TypeError, match=msg):
595
+ op(dr, dz)
596
+
597
+ with pytest.raises(TypeError, match=msg):
598
+ op(dr, tolist(dz))
599
+ with pytest.raises(TypeError, match=msg):
600
+ op(dr, np.array(tolist(dz), dtype=object))
601
+ with pytest.raises(TypeError, match=msg):
602
+ op(dz, dr)
603
+
604
+ with pytest.raises(TypeError, match=msg):
605
+ op(dz, tolist(dr))
606
+ with pytest.raises(TypeError, match=msg):
607
+ op(dz, np.array(tolist(dr), dtype=object))
608
+
609
+ # The aware==aware and naive==naive comparisons should *not* raise
610
+ assert np.all(dr == dr)
611
+ assert np.all(dr == tolist(dr))
612
+ assert np.all(tolist(dr) == dr)
613
+ assert np.all(np.array(tolist(dr), dtype=object) == dr)
614
+ assert np.all(dr == np.array(tolist(dr), dtype=object))
615
+
616
+ assert np.all(dz == dz)
617
+ assert np.all(dz == tolist(dz))
618
+ assert np.all(tolist(dz) == dz)
619
+ assert np.all(np.array(tolist(dz), dtype=object) == dz)
620
+ assert np.all(dz == np.array(tolist(dz), dtype=object))
621
+
622
+ def test_comparison_tzawareness_compat_scalars(self, comparison_op, box_with_array):
623
+ # GH#18162
624
+ op = comparison_op
625
+
626
+ dr = date_range("2016-01-01", periods=6)
627
+ dz = dr.tz_localize("US/Pacific")
628
+
629
+ dr = tm.box_expected(dr, box_with_array)
630
+ dz = tm.box_expected(dz, box_with_array)
631
+
632
+ # Check comparisons against scalar Timestamps
633
+ ts = Timestamp("2000-03-14 01:59")
634
+ ts_tz = Timestamp("2000-03-14 01:59", tz="Europe/Amsterdam")
635
+
636
+ assert np.all(dr > ts)
637
+ msg = r"Invalid comparison between dtype=datetime64\[ns.*\] and Timestamp"
638
+ if op not in [operator.eq, operator.ne]:
639
+ with pytest.raises(TypeError, match=msg):
640
+ op(dr, ts_tz)
641
+
642
+ assert np.all(dz > ts_tz)
643
+ if op not in [operator.eq, operator.ne]:
644
+ with pytest.raises(TypeError, match=msg):
645
+ op(dz, ts)
646
+
647
+ if op not in [operator.eq, operator.ne]:
648
+ # GH#12601: Check comparison against Timestamps and DatetimeIndex
649
+ with pytest.raises(TypeError, match=msg):
650
+ op(ts, dz)
651
+
652
+ @pytest.mark.parametrize(
653
+ "other",
654
+ [datetime(2016, 1, 1), Timestamp("2016-01-01"), np.datetime64("2016-01-01")],
655
+ )
656
+ # Bug in NumPy? https://github.com/numpy/numpy/issues/13841
657
+ # Raising in __eq__ will fallback to NumPy, which warns, fails,
658
+ # then re-raises the original exception. So we just need to ignore.
659
+ @pytest.mark.filterwarnings("ignore:elementwise comp:DeprecationWarning")
660
+ def test_scalar_comparison_tzawareness(
661
+ self, comparison_op, other, tz_aware_fixture, box_with_array
662
+ ):
663
+ op = comparison_op
664
+ tz = tz_aware_fixture
665
+ dti = date_range("2016-01-01", periods=2, tz=tz)
666
+
667
+ dtarr = tm.box_expected(dti, box_with_array)
668
+ xbox = get_upcast_box(dtarr, other, True)
669
+ if op in [operator.eq, operator.ne]:
670
+ exbool = op is operator.ne
671
+ expected = np.array([exbool, exbool], dtype=bool)
672
+ expected = tm.box_expected(expected, xbox)
673
+
674
+ result = op(dtarr, other)
675
+ tm.assert_equal(result, expected)
676
+
677
+ result = op(other, dtarr)
678
+ tm.assert_equal(result, expected)
679
+ else:
680
+ msg = (
681
+ r"Invalid comparison between dtype=datetime64\[ns, .*\] "
682
+ f"and {type(other).__name__}"
683
+ )
684
+ with pytest.raises(TypeError, match=msg):
685
+ op(dtarr, other)
686
+ with pytest.raises(TypeError, match=msg):
687
+ op(other, dtarr)
688
+
689
+ def test_nat_comparison_tzawareness(self, comparison_op):
690
+ # GH#19276
691
+ # tzaware DatetimeIndex should not raise when compared to NaT
692
+ op = comparison_op
693
+
694
+ dti = DatetimeIndex(
695
+ ["2014-01-01", NaT, "2014-03-01", NaT, "2014-05-01", "2014-07-01"]
696
+ )
697
+ expected = np.array([op == operator.ne] * len(dti))
698
+ result = op(dti, NaT)
699
+ tm.assert_numpy_array_equal(result, expected)
700
+
701
+ result = op(dti.tz_localize("US/Pacific"), NaT)
702
+ tm.assert_numpy_array_equal(result, expected)
703
+
704
+ def test_dti_cmp_str(self, tz_naive_fixture):
705
+ # GH#22074
706
+ # regardless of tz, we expect these comparisons are valid
707
+ tz = tz_naive_fixture
708
+ rng = date_range("1/1/2000", periods=10, tz=tz)
709
+ other = "1/1/2000"
710
+
711
+ result = rng == other
712
+ expected = np.array([True] + [False] * 9)
713
+ tm.assert_numpy_array_equal(result, expected)
714
+
715
+ result = rng != other
716
+ expected = np.array([False] + [True] * 9)
717
+ tm.assert_numpy_array_equal(result, expected)
718
+
719
+ result = rng < other
720
+ expected = np.array([False] * 10)
721
+ tm.assert_numpy_array_equal(result, expected)
722
+
723
+ result = rng <= other
724
+ expected = np.array([True] + [False] * 9)
725
+ tm.assert_numpy_array_equal(result, expected)
726
+
727
+ result = rng > other
728
+ expected = np.array([False] + [True] * 9)
729
+ tm.assert_numpy_array_equal(result, expected)
730
+
731
+ result = rng >= other
732
+ expected = np.array([True] * 10)
733
+ tm.assert_numpy_array_equal(result, expected)
734
+
735
+ def test_dti_cmp_list(self):
736
+ rng = date_range("1/1/2000", periods=10)
737
+
738
+ result = rng == list(rng)
739
+ expected = rng == rng
740
+ tm.assert_numpy_array_equal(result, expected)
741
+
742
+ @pytest.mark.parametrize(
743
+ "other",
744
+ [
745
+ pd.timedelta_range("1D", periods=10),
746
+ pd.timedelta_range("1D", periods=10).to_series(),
747
+ pd.timedelta_range("1D", periods=10).asi8.view("m8[ns]"),
748
+ ],
749
+ ids=lambda x: type(x).__name__,
750
+ )
751
+ def test_dti_cmp_tdi_tzawareness(self, other):
752
+ # GH#22074
753
+ # reversion test that we _don't_ call _assert_tzawareness_compat
754
+ # when comparing against TimedeltaIndex
755
+ dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
756
+
757
+ result = dti == other
758
+ expected = np.array([False] * 10)
759
+ tm.assert_numpy_array_equal(result, expected)
760
+
761
+ result = dti != other
762
+ expected = np.array([True] * 10)
763
+ tm.assert_numpy_array_equal(result, expected)
764
+ msg = "Invalid comparison between"
765
+ with pytest.raises(TypeError, match=msg):
766
+ dti < other
767
+ with pytest.raises(TypeError, match=msg):
768
+ dti <= other
769
+ with pytest.raises(TypeError, match=msg):
770
+ dti > other
771
+ with pytest.raises(TypeError, match=msg):
772
+ dti >= other
773
+
774
+ def test_dti_cmp_object_dtype(self):
775
+ # GH#22074
776
+ dti = date_range("2000-01-01", periods=10, tz="Asia/Tokyo")
777
+
778
+ other = dti.astype("O")
779
+
780
+ result = dti == other
781
+ expected = np.array([True] * 10)
782
+ tm.assert_numpy_array_equal(result, expected)
783
+
784
+ other = dti.tz_localize(None)
785
+ result = dti != other
786
+ tm.assert_numpy_array_equal(result, expected)
787
+
788
+ other = np.array(list(dti[:5]) + [Timedelta(days=1)] * 5)
789
+ result = dti == other
790
+ expected = np.array([True] * 5 + [False] * 5)
791
+ tm.assert_numpy_array_equal(result, expected)
792
+ msg = ">=' not supported between instances of 'Timestamp' and 'Timedelta'"
793
+ with pytest.raises(TypeError, match=msg):
794
+ dti >= other
795
+
796
+
797
+ # ------------------------------------------------------------------
798
+ # Arithmetic
799
+
800
+
801
+ class TestDatetime64Arithmetic:
802
+ # This class is intended for "finished" tests that are fully parametrized
803
+ # over DataFrame/Series/Index/DatetimeArray
804
+
805
+ # -------------------------------------------------------------
806
+ # Addition/Subtraction of timedelta-like
807
+
808
+ @pytest.mark.arm_slow
809
+ def test_dt64arr_add_timedeltalike_scalar(
810
+ self, tz_naive_fixture, two_hours, box_with_array
811
+ ):
812
+ # GH#22005, GH#22163 check DataFrame doesn't raise TypeError
813
+ tz = tz_naive_fixture
814
+
815
+ rng = date_range("2000-01-01", "2000-02-01", tz=tz)
816
+ expected = date_range("2000-01-01 02:00", "2000-02-01 02:00", tz=tz)
817
+
818
+ rng = tm.box_expected(rng, box_with_array)
819
+ expected = tm.box_expected(expected, box_with_array)
820
+
821
+ result = rng + two_hours
822
+ tm.assert_equal(result, expected)
823
+
824
+ result = two_hours + rng
825
+ tm.assert_equal(result, expected)
826
+
827
+ rng += two_hours
828
+ tm.assert_equal(rng, expected)
829
+
830
+ def test_dt64arr_sub_timedeltalike_scalar(
831
+ self, tz_naive_fixture, two_hours, box_with_array
832
+ ):
833
+ tz = tz_naive_fixture
834
+
835
+ rng = date_range("2000-01-01", "2000-02-01", tz=tz)
836
+ expected = date_range("1999-12-31 22:00", "2000-01-31 22:00", tz=tz)
837
+
838
+ rng = tm.box_expected(rng, box_with_array)
839
+ expected = tm.box_expected(expected, box_with_array)
840
+
841
+ result = rng - two_hours
842
+ tm.assert_equal(result, expected)
843
+
844
+ rng -= two_hours
845
+ tm.assert_equal(rng, expected)
846
+
847
+ def test_dt64_array_sub_dt_with_different_timezone(self, box_with_array):
848
+ t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
849
+ t1 = tm.box_expected(t1, box_with_array)
850
+ t2 = Timestamp("20130101").tz_localize("CET")
851
+ tnaive = Timestamp(20130101)
852
+
853
+ result = t1 - t2
854
+ expected = TimedeltaIndex(
855
+ ["0 days 06:00:00", "1 days 06:00:00", "2 days 06:00:00"]
856
+ )
857
+ expected = tm.box_expected(expected, box_with_array)
858
+ tm.assert_equal(result, expected)
859
+
860
+ result = t2 - t1
861
+ expected = TimedeltaIndex(
862
+ ["-1 days +18:00:00", "-2 days +18:00:00", "-3 days +18:00:00"]
863
+ )
864
+ expected = tm.box_expected(expected, box_with_array)
865
+ tm.assert_equal(result, expected)
866
+
867
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
868
+ with pytest.raises(TypeError, match=msg):
869
+ t1 - tnaive
870
+
871
+ with pytest.raises(TypeError, match=msg):
872
+ tnaive - t1
873
+
874
+ def test_dt64_array_sub_dt64_array_with_different_timezone(self, box_with_array):
875
+ t1 = date_range("20130101", periods=3).tz_localize("US/Eastern")
876
+ t1 = tm.box_expected(t1, box_with_array)
877
+ t2 = date_range("20130101", periods=3).tz_localize("CET")
878
+ t2 = tm.box_expected(t2, box_with_array)
879
+ tnaive = date_range("20130101", periods=3)
880
+
881
+ result = t1 - t2
882
+ expected = TimedeltaIndex(
883
+ ["0 days 06:00:00", "0 days 06:00:00", "0 days 06:00:00"]
884
+ )
885
+ expected = tm.box_expected(expected, box_with_array)
886
+ tm.assert_equal(result, expected)
887
+
888
+ result = t2 - t1
889
+ expected = TimedeltaIndex(
890
+ ["-1 days +18:00:00", "-1 days +18:00:00", "-1 days +18:00:00"]
891
+ )
892
+ expected = tm.box_expected(expected, box_with_array)
893
+ tm.assert_equal(result, expected)
894
+
895
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
896
+ with pytest.raises(TypeError, match=msg):
897
+ t1 - tnaive
898
+
899
+ with pytest.raises(TypeError, match=msg):
900
+ tnaive - t1
901
+
902
+ def test_dt64arr_add_sub_td64_nat(self, box_with_array, tz_naive_fixture):
903
+ # GH#23320 special handling for timedelta64("NaT")
904
+ tz = tz_naive_fixture
905
+
906
+ dti = date_range("1994-04-01", periods=9, tz=tz, freq="QS")
907
+ other = np.timedelta64("NaT")
908
+ expected = DatetimeIndex(["NaT"] * 9, tz=tz).as_unit("ns")
909
+
910
+ obj = tm.box_expected(dti, box_with_array)
911
+ expected = tm.box_expected(expected, box_with_array)
912
+
913
+ result = obj + other
914
+ tm.assert_equal(result, expected)
915
+ result = other + obj
916
+ tm.assert_equal(result, expected)
917
+ result = obj - other
918
+ tm.assert_equal(result, expected)
919
+ msg = "cannot subtract"
920
+ with pytest.raises(TypeError, match=msg):
921
+ other - obj
922
+
923
+ def test_dt64arr_add_sub_td64ndarray(self, tz_naive_fixture, box_with_array):
924
+ tz = tz_naive_fixture
925
+ dti = date_range("2016-01-01", periods=3, tz=tz)
926
+ tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
927
+ tdarr = tdi.values
928
+
929
+ expected = date_range("2015-12-31", "2016-01-02", periods=3, tz=tz)
930
+
931
+ dtarr = tm.box_expected(dti, box_with_array)
932
+ expected = tm.box_expected(expected, box_with_array)
933
+
934
+ result = dtarr + tdarr
935
+ tm.assert_equal(result, expected)
936
+ result = tdarr + dtarr
937
+ tm.assert_equal(result, expected)
938
+
939
+ expected = date_range("2016-01-02", "2016-01-04", periods=3, tz=tz)
940
+ expected = tm.box_expected(expected, box_with_array)
941
+
942
+ result = dtarr - tdarr
943
+ tm.assert_equal(result, expected)
944
+ msg = "cannot subtract|(bad|unsupported) operand type for unary"
945
+ with pytest.raises(TypeError, match=msg):
946
+ tdarr - dtarr
947
+
948
+ # -----------------------------------------------------------------
949
+ # Subtraction of datetime-like scalars
950
+
951
+ @pytest.mark.parametrize(
952
+ "ts",
953
+ [
954
+ Timestamp("2013-01-01"),
955
+ Timestamp("2013-01-01").to_pydatetime(),
956
+ Timestamp("2013-01-01").to_datetime64(),
957
+ # GH#7996, GH#22163 ensure non-nano datetime64 is converted to nano
958
+ # for DataFrame operation
959
+ np.datetime64("2013-01-01", "D"),
960
+ ],
961
+ )
962
+ def test_dt64arr_sub_dtscalar(self, box_with_array, ts):
963
+ # GH#8554, GH#22163 DataFrame op should _not_ return dt64 dtype
964
+ idx = date_range("2013-01-01", periods=3)._with_freq(None)
965
+ idx = tm.box_expected(idx, box_with_array)
966
+
967
+ expected = TimedeltaIndex(["0 Days", "1 Day", "2 Days"])
968
+ expected = tm.box_expected(expected, box_with_array)
969
+
970
+ result = idx - ts
971
+ tm.assert_equal(result, expected)
972
+
973
+ result = ts - idx
974
+ tm.assert_equal(result, -expected)
975
+ tm.assert_equal(result, -expected)
976
+
977
+ def test_dt64arr_sub_timestamp_tzaware(self, box_with_array):
978
+ ser = date_range("2014-03-17", periods=2, freq="D", tz="US/Eastern")
979
+ ser = ser._with_freq(None)
980
+ ts = ser[0]
981
+
982
+ ser = tm.box_expected(ser, box_with_array)
983
+
984
+ delta_series = Series([np.timedelta64(0, "D"), np.timedelta64(1, "D")])
985
+ expected = tm.box_expected(delta_series, box_with_array)
986
+
987
+ tm.assert_equal(ser - ts, expected)
988
+ tm.assert_equal(ts - ser, -expected)
989
+
990
+ def test_dt64arr_sub_NaT(self, box_with_array, unit):
991
+ # GH#18808
992
+ dti = DatetimeIndex([NaT, Timestamp("19900315")]).as_unit(unit)
993
+ ser = tm.box_expected(dti, box_with_array)
994
+
995
+ result = ser - NaT
996
+ expected = Series([NaT, NaT], dtype=f"timedelta64[{unit}]")
997
+ expected = tm.box_expected(expected, box_with_array)
998
+ tm.assert_equal(result, expected)
999
+
1000
+ dti_tz = dti.tz_localize("Asia/Tokyo")
1001
+ ser_tz = tm.box_expected(dti_tz, box_with_array)
1002
+
1003
+ result = ser_tz - NaT
1004
+ expected = Series([NaT, NaT], dtype=f"timedelta64[{unit}]")
1005
+ expected = tm.box_expected(expected, box_with_array)
1006
+ tm.assert_equal(result, expected)
1007
+
1008
+ # -------------------------------------------------------------
1009
+ # Subtraction of datetime-like array-like
1010
+
1011
+ def test_dt64arr_sub_dt64object_array(self, box_with_array, tz_naive_fixture):
1012
+ dti = date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
1013
+ expected = dti - dti
1014
+
1015
+ obj = tm.box_expected(dti, box_with_array)
1016
+ expected = tm.box_expected(expected, box_with_array).astype(object)
1017
+
1018
+ with tm.assert_produces_warning(PerformanceWarning):
1019
+ result = obj - obj.astype(object)
1020
+ tm.assert_equal(result, expected)
1021
+
1022
+ def test_dt64arr_naive_sub_dt64ndarray(self, box_with_array):
1023
+ dti = date_range("2016-01-01", periods=3, tz=None)
1024
+ dt64vals = dti.values
1025
+
1026
+ dtarr = tm.box_expected(dti, box_with_array)
1027
+
1028
+ expected = dtarr - dtarr
1029
+ result = dtarr - dt64vals
1030
+ tm.assert_equal(result, expected)
1031
+ result = dt64vals - dtarr
1032
+ tm.assert_equal(result, expected)
1033
+
1034
+ def test_dt64arr_aware_sub_dt64ndarray_raises(
1035
+ self, tz_aware_fixture, box_with_array
1036
+ ):
1037
+ tz = tz_aware_fixture
1038
+ dti = date_range("2016-01-01", periods=3, tz=tz)
1039
+ dt64vals = dti.values
1040
+
1041
+ dtarr = tm.box_expected(dti, box_with_array)
1042
+ msg = "Cannot subtract tz-naive and tz-aware datetime"
1043
+ with pytest.raises(TypeError, match=msg):
1044
+ dtarr - dt64vals
1045
+ with pytest.raises(TypeError, match=msg):
1046
+ dt64vals - dtarr
1047
+
1048
+ # -------------------------------------------------------------
1049
+ # Addition of datetime-like others (invalid)
1050
+
1051
+ def test_dt64arr_add_dtlike_raises(self, tz_naive_fixture, box_with_array):
1052
+ # GH#22163 ensure DataFrame doesn't cast Timestamp to i8
1053
+ # GH#9631
1054
+ tz = tz_naive_fixture
1055
+
1056
+ dti = date_range("2016-01-01", periods=3, tz=tz)
1057
+ if tz is None:
1058
+ dti2 = dti.tz_localize("US/Eastern")
1059
+ else:
1060
+ dti2 = dti.tz_localize(None)
1061
+ dtarr = tm.box_expected(dti, box_with_array)
1062
+
1063
+ assert_cannot_add(dtarr, dti.values)
1064
+ assert_cannot_add(dtarr, dti)
1065
+ assert_cannot_add(dtarr, dtarr)
1066
+ assert_cannot_add(dtarr, dti[0])
1067
+ assert_cannot_add(dtarr, dti[0].to_pydatetime())
1068
+ assert_cannot_add(dtarr, dti[0].to_datetime64())
1069
+ assert_cannot_add(dtarr, dti2[0])
1070
+ assert_cannot_add(dtarr, dti2[0].to_pydatetime())
1071
+ assert_cannot_add(dtarr, np.datetime64("2011-01-01", "D"))
1072
+
1073
+ # -------------------------------------------------------------
1074
+ # Other Invalid Addition/Subtraction
1075
+
1076
+ # Note: freq here includes both Tick and non-Tick offsets; this is
1077
+ # relevant because historically integer-addition was allowed if we had
1078
+ # a freq.
1079
+ @pytest.mark.parametrize("freq", ["h", "D", "W", "2ME", "MS", "QE", "B", None])
1080
+ @pytest.mark.parametrize("dtype", [None, "uint8"])
1081
+ def test_dt64arr_addsub_intlike(
1082
+ self, request, dtype, index_or_series_or_array, freq, tz_naive_fixture
1083
+ ):
1084
+ # GH#19959, GH#19123, GH#19012
1085
+ # GH#55860 use index_or_series_or_array instead of box_with_array
1086
+ # bc DataFrame alignment makes it inapplicable
1087
+ tz = tz_naive_fixture
1088
+
1089
+ if freq is None:
1090
+ dti = DatetimeIndex(["NaT", "2017-04-05 06:07:08"], tz=tz)
1091
+ else:
1092
+ dti = date_range("2016-01-01", periods=2, freq=freq, tz=tz)
1093
+
1094
+ obj = index_or_series_or_array(dti)
1095
+ other = np.array([4, -1])
1096
+ if dtype is not None:
1097
+ other = other.astype(dtype)
1098
+
1099
+ msg = "|".join(
1100
+ [
1101
+ "Addition/subtraction of integers",
1102
+ "cannot subtract DatetimeArray from",
1103
+ # IntegerArray
1104
+ "can only perform ops with numeric values",
1105
+ "unsupported operand type.*Categorical",
1106
+ r"unsupported operand type\(s\) for -: 'int' and 'Timestamp'",
1107
+ ]
1108
+ )
1109
+ assert_invalid_addsub_type(obj, 1, msg)
1110
+ assert_invalid_addsub_type(obj, np.int64(2), msg)
1111
+ assert_invalid_addsub_type(obj, np.array(3, dtype=np.int64), msg)
1112
+ assert_invalid_addsub_type(obj, other, msg)
1113
+ assert_invalid_addsub_type(obj, np.array(other), msg)
1114
+ assert_invalid_addsub_type(obj, pd.array(other), msg)
1115
+ assert_invalid_addsub_type(obj, pd.Categorical(other), msg)
1116
+ assert_invalid_addsub_type(obj, pd.Index(other), msg)
1117
+ assert_invalid_addsub_type(obj, Series(other), msg)
1118
+
1119
+ @pytest.mark.parametrize(
1120
+ "other",
1121
+ [
1122
+ 3.14,
1123
+ np.array([2.0, 3.0]),
1124
+ # GH#13078 datetime +/- Period is invalid
1125
+ Period("2011-01-01", freq="D"),
1126
+ # https://github.com/pandas-dev/pandas/issues/10329
1127
+ time(1, 2, 3),
1128
+ ],
1129
+ )
1130
+ @pytest.mark.parametrize("dti_freq", [None, "D"])
1131
+ def test_dt64arr_add_sub_invalid(self, dti_freq, other, box_with_array):
1132
+ dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
1133
+ dtarr = tm.box_expected(dti, box_with_array)
1134
+ msg = "|".join(
1135
+ [
1136
+ "unsupported operand type",
1137
+ "cannot (add|subtract)",
1138
+ "cannot use operands with types",
1139
+ "ufunc '?(add|subtract)'? cannot use operands with types",
1140
+ "Concatenation operation is not implemented for NumPy arrays",
1141
+ ]
1142
+ )
1143
+ assert_invalid_addsub_type(dtarr, other, msg)
1144
+
1145
+ @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
1146
+ @pytest.mark.parametrize("dti_freq", [None, "D"])
1147
+ def test_dt64arr_add_sub_parr(
1148
+ self, dti_freq, pi_freq, box_with_array, box_with_array2
1149
+ ):
1150
+ # GH#20049 subtracting PeriodIndex should raise TypeError
1151
+ dti = DatetimeIndex(["2011-01-01", "2011-01-02"], freq=dti_freq)
1152
+ pi = dti.to_period(pi_freq)
1153
+
1154
+ dtarr = tm.box_expected(dti, box_with_array)
1155
+ parr = tm.box_expected(pi, box_with_array2)
1156
+ msg = "|".join(
1157
+ [
1158
+ "cannot (add|subtract)",
1159
+ "unsupported operand",
1160
+ "descriptor.*requires",
1161
+ "ufunc.*cannot use operands",
1162
+ ]
1163
+ )
1164
+ assert_invalid_addsub_type(dtarr, parr, msg)
1165
+
1166
+ @pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning")
1167
+ def test_dt64arr_addsub_time_objects_raises(self, box_with_array, tz_naive_fixture):
1168
+ # https://github.com/pandas-dev/pandas/issues/10329
1169
+
1170
+ tz = tz_naive_fixture
1171
+
1172
+ obj1 = date_range("2012-01-01", periods=3, tz=tz)
1173
+ obj2 = [time(i, i, i) for i in range(3)]
1174
+
1175
+ obj1 = tm.box_expected(obj1, box_with_array)
1176
+ obj2 = tm.box_expected(obj2, box_with_array)
1177
+
1178
+ msg = "|".join(
1179
+ [
1180
+ "unsupported operand",
1181
+ "cannot subtract DatetimeArray from ndarray",
1182
+ ]
1183
+ )
1184
+ # pandas.errors.PerformanceWarning: Non-vectorized DateOffset being
1185
+ # applied to Series or DatetimeIndex
1186
+ # we aren't testing that here, so ignore.
1187
+ assert_invalid_addsub_type(obj1, obj2, msg=msg)
1188
+
1189
+ # -------------------------------------------------------------
1190
+ # Other invalid operations
1191
+
1192
+ @pytest.mark.parametrize(
1193
+ "dt64_series",
1194
+ [
1195
+ Series([Timestamp("19900315"), Timestamp("19900315")]),
1196
+ Series([NaT, Timestamp("19900315")]),
1197
+ Series([NaT, NaT], dtype="datetime64[ns]"),
1198
+ ],
1199
+ )
1200
+ @pytest.mark.parametrize("one", [1, 1.0, np.array(1)])
1201
+ def test_dt64_mul_div_numeric_invalid(self, one, dt64_series, box_with_array):
1202
+ obj = tm.box_expected(dt64_series, box_with_array)
1203
+
1204
+ msg = "cannot perform .* with this index type"
1205
+
1206
+ # multiplication
1207
+ with pytest.raises(TypeError, match=msg):
1208
+ obj * one
1209
+ with pytest.raises(TypeError, match=msg):
1210
+ one * obj
1211
+
1212
+ # division
1213
+ with pytest.raises(TypeError, match=msg):
1214
+ obj / one
1215
+ with pytest.raises(TypeError, match=msg):
1216
+ one / obj
1217
+
1218
+
1219
+ class TestDatetime64DateOffsetArithmetic:
1220
+ # -------------------------------------------------------------
1221
+ # Tick DateOffsets
1222
+
1223
+ # TODO: parametrize over timezone?
1224
+ @pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
1225
+ def test_dt64arr_series_add_tick_DateOffset(self, box_with_array, unit):
1226
+ # GH#4532
1227
+ # operate with pd.offsets
1228
+ ser = Series(
1229
+ [Timestamp("20130101 9:01"), Timestamp("20130101 9:02")]
1230
+ ).dt.as_unit(unit)
1231
+ expected = Series(
1232
+ [Timestamp("20130101 9:01:05"), Timestamp("20130101 9:02:05")]
1233
+ ).dt.as_unit(unit)
1234
+
1235
+ ser = tm.box_expected(ser, box_with_array)
1236
+ expected = tm.box_expected(expected, box_with_array)
1237
+
1238
+ result = ser + pd.offsets.Second(5)
1239
+ tm.assert_equal(result, expected)
1240
+
1241
+ result2 = pd.offsets.Second(5) + ser
1242
+ tm.assert_equal(result2, expected)
1243
+
1244
+ def test_dt64arr_series_sub_tick_DateOffset(self, box_with_array):
1245
+ # GH#4532
1246
+ # operate with pd.offsets
1247
+ ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1248
+ expected = Series(
1249
+ [Timestamp("20130101 9:00:55"), Timestamp("20130101 9:01:55")]
1250
+ )
1251
+
1252
+ ser = tm.box_expected(ser, box_with_array)
1253
+ expected = tm.box_expected(expected, box_with_array)
1254
+
1255
+ result = ser - pd.offsets.Second(5)
1256
+ tm.assert_equal(result, expected)
1257
+
1258
+ result2 = -pd.offsets.Second(5) + ser
1259
+ tm.assert_equal(result2, expected)
1260
+ msg = "(bad|unsupported) operand type for unary"
1261
+ with pytest.raises(TypeError, match=msg):
1262
+ pd.offsets.Second(5) - ser
1263
+
1264
+ @pytest.mark.parametrize(
1265
+ "cls_name", ["Day", "Hour", "Minute", "Second", "Milli", "Micro", "Nano"]
1266
+ )
1267
+ def test_dt64arr_add_sub_tick_DateOffset_smoke(self, cls_name, box_with_array):
1268
+ # GH#4532
1269
+ # smoke tests for valid DateOffsets
1270
+ ser = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1271
+ ser = tm.box_expected(ser, box_with_array)
1272
+
1273
+ offset_cls = getattr(pd.offsets, cls_name)
1274
+ ser + offset_cls(5)
1275
+ offset_cls(5) + ser
1276
+ ser - offset_cls(5)
1277
+
1278
+ def test_dti_add_tick_tzaware(self, tz_aware_fixture, box_with_array):
1279
+ # GH#21610, GH#22163 ensure DataFrame doesn't return object-dtype
1280
+ tz = tz_aware_fixture
1281
+ if tz == "US/Pacific":
1282
+ dates = date_range("2012-11-01", periods=3, tz=tz)
1283
+ offset = dates + pd.offsets.Hour(5)
1284
+ assert dates[0] + pd.offsets.Hour(5) == offset[0]
1285
+
1286
+ dates = date_range("2010-11-01 00:00", periods=3, tz=tz, freq="h")
1287
+ expected = DatetimeIndex(
1288
+ ["2010-11-01 05:00", "2010-11-01 06:00", "2010-11-01 07:00"],
1289
+ freq="h",
1290
+ tz=tz,
1291
+ ).as_unit("ns")
1292
+
1293
+ dates = tm.box_expected(dates, box_with_array)
1294
+ expected = tm.box_expected(expected, box_with_array)
1295
+
1296
+ for scalar in [pd.offsets.Hour(5), np.timedelta64(5, "h"), timedelta(hours=5)]:
1297
+ offset = dates + scalar
1298
+ tm.assert_equal(offset, expected)
1299
+ offset = scalar + dates
1300
+ tm.assert_equal(offset, expected)
1301
+
1302
+ roundtrip = offset - scalar
1303
+ tm.assert_equal(roundtrip, dates)
1304
+
1305
+ msg = "|".join(
1306
+ ["bad operand type for unary -", "cannot subtract DatetimeArray"]
1307
+ )
1308
+ with pytest.raises(TypeError, match=msg):
1309
+ scalar - dates
1310
+
1311
+ # -------------------------------------------------------------
1312
+ # RelativeDelta DateOffsets
1313
+
1314
+ @pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
1315
+ def test_dt64arr_add_sub_relativedelta_offsets(self, box_with_array, unit):
1316
+ # GH#10699
1317
+ vec = DatetimeIndex(
1318
+ [
1319
+ Timestamp("2000-01-05 00:15:00"),
1320
+ Timestamp("2000-01-31 00:23:00"),
1321
+ Timestamp("2000-01-01"),
1322
+ Timestamp("2000-03-31"),
1323
+ Timestamp("2000-02-29"),
1324
+ Timestamp("2000-12-31"),
1325
+ Timestamp("2000-05-15"),
1326
+ Timestamp("2001-06-15"),
1327
+ ]
1328
+ ).as_unit(unit)
1329
+ vec = tm.box_expected(vec, box_with_array)
1330
+ vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
1331
+
1332
+ # DateOffset relativedelta fastpath
1333
+ relative_kwargs = [
1334
+ ("years", 2),
1335
+ ("months", 5),
1336
+ ("days", 3),
1337
+ ("hours", 5),
1338
+ ("minutes", 10),
1339
+ ("seconds", 2),
1340
+ ("microseconds", 5),
1341
+ ]
1342
+ for i, (offset_unit, value) in enumerate(relative_kwargs):
1343
+ off = DateOffset(**{offset_unit: value})
1344
+
1345
+ exp_unit = unit
1346
+ if offset_unit == "microseconds" and unit != "ns":
1347
+ exp_unit = "us"
1348
+
1349
+ # TODO(GH#55564): as_unit will be unnecessary
1350
+ expected = DatetimeIndex([x + off for x in vec_items]).as_unit(exp_unit)
1351
+ expected = tm.box_expected(expected, box_with_array)
1352
+ tm.assert_equal(expected, vec + off)
1353
+
1354
+ expected = DatetimeIndex([x - off for x in vec_items]).as_unit(exp_unit)
1355
+ expected = tm.box_expected(expected, box_with_array)
1356
+ tm.assert_equal(expected, vec - off)
1357
+
1358
+ off = DateOffset(**dict(relative_kwargs[: i + 1]))
1359
+
1360
+ expected = DatetimeIndex([x + off for x in vec_items]).as_unit(exp_unit)
1361
+ expected = tm.box_expected(expected, box_with_array)
1362
+ tm.assert_equal(expected, vec + off)
1363
+
1364
+ expected = DatetimeIndex([x - off for x in vec_items]).as_unit(exp_unit)
1365
+ expected = tm.box_expected(expected, box_with_array)
1366
+ tm.assert_equal(expected, vec - off)
1367
+ msg = "(bad|unsupported) operand type for unary"
1368
+ with pytest.raises(TypeError, match=msg):
1369
+ off - vec
1370
+
1371
+ # -------------------------------------------------------------
1372
+ # Non-Tick, Non-RelativeDelta DateOffsets
1373
+
1374
+ # TODO: redundant with test_dt64arr_add_sub_DateOffset? that includes
1375
+ # tz-aware cases which this does not
1376
+ @pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning")
1377
+ @pytest.mark.parametrize(
1378
+ "cls_and_kwargs",
1379
+ [
1380
+ "YearBegin",
1381
+ ("YearBegin", {"month": 5}),
1382
+ "YearEnd",
1383
+ ("YearEnd", {"month": 5}),
1384
+ "MonthBegin",
1385
+ "MonthEnd",
1386
+ "SemiMonthEnd",
1387
+ "SemiMonthBegin",
1388
+ "Week",
1389
+ ("Week", {"weekday": 3}),
1390
+ "Week",
1391
+ ("Week", {"weekday": 6}),
1392
+ "BusinessDay",
1393
+ "BDay",
1394
+ "QuarterEnd",
1395
+ "QuarterBegin",
1396
+ "CustomBusinessDay",
1397
+ "CDay",
1398
+ "CBMonthEnd",
1399
+ "CBMonthBegin",
1400
+ "BMonthBegin",
1401
+ "BMonthEnd",
1402
+ "BusinessHour",
1403
+ "BYearBegin",
1404
+ "BYearEnd",
1405
+ "BQuarterBegin",
1406
+ ("LastWeekOfMonth", {"weekday": 2}),
1407
+ (
1408
+ "FY5253Quarter",
1409
+ {
1410
+ "qtr_with_extra_week": 1,
1411
+ "startingMonth": 1,
1412
+ "weekday": 2,
1413
+ "variation": "nearest",
1414
+ },
1415
+ ),
1416
+ ("FY5253", {"weekday": 0, "startingMonth": 2, "variation": "nearest"}),
1417
+ ("WeekOfMonth", {"weekday": 2, "week": 2}),
1418
+ "Easter",
1419
+ ("DateOffset", {"day": 4}),
1420
+ ("DateOffset", {"month": 5}),
1421
+ ],
1422
+ )
1423
+ @pytest.mark.parametrize("normalize", [True, False])
1424
+ @pytest.mark.parametrize("n", [0, 5])
1425
+ @pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
1426
+ @pytest.mark.parametrize("tz", [None, "US/Central"])
1427
+ def test_dt64arr_add_sub_DateOffsets(
1428
+ self, box_with_array, n, normalize, cls_and_kwargs, unit, tz
1429
+ ):
1430
+ # GH#10699
1431
+ # assert vectorized operation matches pointwise operations
1432
+
1433
+ if isinstance(cls_and_kwargs, tuple):
1434
+ # If cls_name param is a tuple, then 2nd entry is kwargs for
1435
+ # the offset constructor
1436
+ cls_name, kwargs = cls_and_kwargs
1437
+ else:
1438
+ cls_name = cls_and_kwargs
1439
+ kwargs = {}
1440
+
1441
+ if n == 0 and cls_name in [
1442
+ "WeekOfMonth",
1443
+ "LastWeekOfMonth",
1444
+ "FY5253Quarter",
1445
+ "FY5253",
1446
+ ]:
1447
+ # passing n = 0 is invalid for these offset classes
1448
+ return
1449
+
1450
+ vec = (
1451
+ DatetimeIndex(
1452
+ [
1453
+ Timestamp("2000-01-05 00:15:00"),
1454
+ Timestamp("2000-01-31 00:23:00"),
1455
+ Timestamp("2000-01-01"),
1456
+ Timestamp("2000-03-31"),
1457
+ Timestamp("2000-02-29"),
1458
+ Timestamp("2000-12-31"),
1459
+ Timestamp("2000-05-15"),
1460
+ Timestamp("2001-06-15"),
1461
+ ]
1462
+ )
1463
+ .as_unit(unit)
1464
+ .tz_localize(tz)
1465
+ )
1466
+ vec = tm.box_expected(vec, box_with_array)
1467
+ vec_items = vec.iloc[0] if box_with_array is pd.DataFrame else vec
1468
+
1469
+ offset_cls = getattr(pd.offsets, cls_name)
1470
+ offset = offset_cls(n, normalize=normalize, **kwargs)
1471
+
1472
+ # TODO(GH#55564): as_unit will be unnecessary
1473
+ expected = DatetimeIndex([x + offset for x in vec_items]).as_unit(unit)
1474
+ expected = tm.box_expected(expected, box_with_array)
1475
+ tm.assert_equal(expected, vec + offset)
1476
+ tm.assert_equal(expected, offset + vec)
1477
+
1478
+ expected = DatetimeIndex([x - offset for x in vec_items]).as_unit(unit)
1479
+ expected = tm.box_expected(expected, box_with_array)
1480
+ tm.assert_equal(expected, vec - offset)
1481
+
1482
+ expected = DatetimeIndex([offset + x for x in vec_items]).as_unit(unit)
1483
+ expected = tm.box_expected(expected, box_with_array)
1484
+ tm.assert_equal(expected, offset + vec)
1485
+ msg = "(bad|unsupported) operand type for unary"
1486
+ with pytest.raises(TypeError, match=msg):
1487
+ offset - vec
1488
+
1489
+ @pytest.mark.parametrize(
1490
+ "other",
1491
+ [
1492
+ np.array([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)]),
1493
+ np.array([pd.offsets.DateOffset(years=1), pd.offsets.MonthEnd()]),
1494
+ np.array( # matching offsets
1495
+ [pd.offsets.DateOffset(years=1), pd.offsets.DateOffset(years=1)]
1496
+ ),
1497
+ ],
1498
+ )
1499
+ @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
1500
+ def test_dt64arr_add_sub_offset_array(
1501
+ self, tz_naive_fixture, box_with_array, op, other
1502
+ ):
1503
+ # GH#18849
1504
+ # GH#10699 array of offsets
1505
+
1506
+ tz = tz_naive_fixture
1507
+ dti = date_range("2017-01-01", periods=2, tz=tz)
1508
+ dtarr = tm.box_expected(dti, box_with_array)
1509
+
1510
+ expected = DatetimeIndex([op(dti[n], other[n]) for n in range(len(dti))])
1511
+ expected = tm.box_expected(expected, box_with_array).astype(object)
1512
+
1513
+ with tm.assert_produces_warning(PerformanceWarning):
1514
+ res = op(dtarr, other)
1515
+ tm.assert_equal(res, expected)
1516
+
1517
+ # Same thing but boxing other
1518
+ other = tm.box_expected(other, box_with_array)
1519
+ if box_with_array is pd.array and op is roperator.radd:
1520
+ # We expect a NumpyExtensionArray, not ndarray[object] here
1521
+ expected = pd.array(expected, dtype=object)
1522
+ with tm.assert_produces_warning(PerformanceWarning):
1523
+ res = op(dtarr, other)
1524
+ tm.assert_equal(res, expected)
1525
+
1526
+ @pytest.mark.parametrize(
1527
+ "op, offset, exp, exp_freq",
1528
+ [
1529
+ (
1530
+ "__add__",
1531
+ DateOffset(months=3, days=10),
1532
+ [
1533
+ Timestamp("2014-04-11"),
1534
+ Timestamp("2015-04-11"),
1535
+ Timestamp("2016-04-11"),
1536
+ Timestamp("2017-04-11"),
1537
+ ],
1538
+ None,
1539
+ ),
1540
+ (
1541
+ "__add__",
1542
+ DateOffset(months=3),
1543
+ [
1544
+ Timestamp("2014-04-01"),
1545
+ Timestamp("2015-04-01"),
1546
+ Timestamp("2016-04-01"),
1547
+ Timestamp("2017-04-01"),
1548
+ ],
1549
+ "YS-APR",
1550
+ ),
1551
+ (
1552
+ "__sub__",
1553
+ DateOffset(months=3, days=10),
1554
+ [
1555
+ Timestamp("2013-09-21"),
1556
+ Timestamp("2014-09-21"),
1557
+ Timestamp("2015-09-21"),
1558
+ Timestamp("2016-09-21"),
1559
+ ],
1560
+ None,
1561
+ ),
1562
+ (
1563
+ "__sub__",
1564
+ DateOffset(months=3),
1565
+ [
1566
+ Timestamp("2013-10-01"),
1567
+ Timestamp("2014-10-01"),
1568
+ Timestamp("2015-10-01"),
1569
+ Timestamp("2016-10-01"),
1570
+ ],
1571
+ "YS-OCT",
1572
+ ),
1573
+ ],
1574
+ )
1575
+ def test_dti_add_sub_nonzero_mth_offset(
1576
+ self, op, offset, exp, exp_freq, tz_aware_fixture, box_with_array
1577
+ ):
1578
+ # GH 26258
1579
+ tz = tz_aware_fixture
1580
+ date = date_range(start="01 Jan 2014", end="01 Jan 2017", freq="YS", tz=tz)
1581
+ date = tm.box_expected(date, box_with_array, False)
1582
+ mth = getattr(date, op)
1583
+ result = mth(offset)
1584
+
1585
+ expected = DatetimeIndex(exp, tz=tz).as_unit("ns")
1586
+ expected = tm.box_expected(expected, box_with_array, False)
1587
+ tm.assert_equal(result, expected)
1588
+
1589
+ def test_dt64arr_series_add_DateOffset_with_milli(self):
1590
+ # GH 57529
1591
+ dti = DatetimeIndex(
1592
+ [
1593
+ "2000-01-01 00:00:00.012345678",
1594
+ "2000-01-31 00:00:00.012345678",
1595
+ "2000-02-29 00:00:00.012345678",
1596
+ ],
1597
+ dtype="datetime64[ns]",
1598
+ )
1599
+ result = dti + DateOffset(milliseconds=4)
1600
+ expected = DatetimeIndex(
1601
+ [
1602
+ "2000-01-01 00:00:00.016345678",
1603
+ "2000-01-31 00:00:00.016345678",
1604
+ "2000-02-29 00:00:00.016345678",
1605
+ ],
1606
+ dtype="datetime64[ns]",
1607
+ )
1608
+ tm.assert_index_equal(result, expected)
1609
+
1610
+ result = dti + DateOffset(days=1, milliseconds=4)
1611
+ expected = DatetimeIndex(
1612
+ [
1613
+ "2000-01-02 00:00:00.016345678",
1614
+ "2000-02-01 00:00:00.016345678",
1615
+ "2000-03-01 00:00:00.016345678",
1616
+ ],
1617
+ dtype="datetime64[ns]",
1618
+ )
1619
+ tm.assert_index_equal(result, expected)
1620
+
1621
+
1622
+ class TestDatetime64OverflowHandling:
1623
+ # TODO: box + de-duplicate
1624
+
1625
+ def test_dt64_overflow_masking(self, box_with_array):
1626
+ # GH#25317
1627
+ left = Series([Timestamp("1969-12-31")], dtype="M8[ns]")
1628
+ right = Series([NaT])
1629
+
1630
+ left = tm.box_expected(left, box_with_array)
1631
+ right = tm.box_expected(right, box_with_array)
1632
+
1633
+ expected = TimedeltaIndex([NaT], dtype="m8[ns]")
1634
+ expected = tm.box_expected(expected, box_with_array)
1635
+
1636
+ result = left - right
1637
+ tm.assert_equal(result, expected)
1638
+
1639
+ def test_dt64_series_arith_overflow(self):
1640
+ # GH#12534, fixed by GH#19024
1641
+ dt = Timestamp("1700-01-31")
1642
+ td = Timedelta("20000 Days")
1643
+ dti = date_range("1949-09-30", freq="100YE", periods=4)
1644
+ ser = Series(dti)
1645
+ msg = "Overflow in int64 addition"
1646
+ with pytest.raises(OverflowError, match=msg):
1647
+ ser - dt
1648
+ with pytest.raises(OverflowError, match=msg):
1649
+ dt - ser
1650
+ with pytest.raises(OverflowError, match=msg):
1651
+ ser + td
1652
+ with pytest.raises(OverflowError, match=msg):
1653
+ td + ser
1654
+
1655
+ ser.iloc[-1] = NaT
1656
+ expected = Series(
1657
+ ["2004-10-03", "2104-10-04", "2204-10-04", "NaT"], dtype="datetime64[ns]"
1658
+ )
1659
+ res = ser + td
1660
+ tm.assert_series_equal(res, expected)
1661
+ res = td + ser
1662
+ tm.assert_series_equal(res, expected)
1663
+
1664
+ ser.iloc[1:] = NaT
1665
+ expected = Series(["91279 Days", "NaT", "NaT", "NaT"], dtype="timedelta64[ns]")
1666
+ res = ser - dt
1667
+ tm.assert_series_equal(res, expected)
1668
+ res = dt - ser
1669
+ tm.assert_series_equal(res, -expected)
1670
+
1671
+ def test_datetimeindex_sub_timestamp_overflow(self):
1672
+ dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max]).as_unit("ns")
1673
+ dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min]).as_unit("ns")
1674
+
1675
+ tsneg = Timestamp("1950-01-01").as_unit("ns")
1676
+ ts_neg_variants = [
1677
+ tsneg,
1678
+ tsneg.to_pydatetime(),
1679
+ tsneg.to_datetime64().astype("datetime64[ns]"),
1680
+ tsneg.to_datetime64().astype("datetime64[D]"),
1681
+ ]
1682
+
1683
+ tspos = Timestamp("1980-01-01").as_unit("ns")
1684
+ ts_pos_variants = [
1685
+ tspos,
1686
+ tspos.to_pydatetime(),
1687
+ tspos.to_datetime64().astype("datetime64[ns]"),
1688
+ tspos.to_datetime64().astype("datetime64[D]"),
1689
+ ]
1690
+ msg = "Overflow in int64 addition"
1691
+ for variant in ts_neg_variants:
1692
+ with pytest.raises(OverflowError, match=msg):
1693
+ dtimax - variant
1694
+
1695
+ expected = Timestamp.max._value - tspos._value
1696
+ for variant in ts_pos_variants:
1697
+ res = dtimax - variant
1698
+ assert res[1]._value == expected
1699
+
1700
+ expected = Timestamp.min._value - tsneg._value
1701
+ for variant in ts_neg_variants:
1702
+ res = dtimin - variant
1703
+ assert res[1]._value == expected
1704
+
1705
+ for variant in ts_pos_variants:
1706
+ with pytest.raises(OverflowError, match=msg):
1707
+ dtimin - variant
1708
+
1709
+ def test_datetimeindex_sub_datetimeindex_overflow(self):
1710
+ # GH#22492, GH#22508
1711
+ dtimax = pd.to_datetime(["2021-12-28 17:19", Timestamp.max]).as_unit("ns")
1712
+ dtimin = pd.to_datetime(["2021-12-28 17:19", Timestamp.min]).as_unit("ns")
1713
+
1714
+ ts_neg = pd.to_datetime(["1950-01-01", "1950-01-01"]).as_unit("ns")
1715
+ ts_pos = pd.to_datetime(["1980-01-01", "1980-01-01"]).as_unit("ns")
1716
+
1717
+ # General tests
1718
+ expected = Timestamp.max._value - ts_pos[1]._value
1719
+ result = dtimax - ts_pos
1720
+ assert result[1]._value == expected
1721
+
1722
+ expected = Timestamp.min._value - ts_neg[1]._value
1723
+ result = dtimin - ts_neg
1724
+ assert result[1]._value == expected
1725
+ msg = "Overflow in int64 addition"
1726
+ with pytest.raises(OverflowError, match=msg):
1727
+ dtimax - ts_neg
1728
+
1729
+ with pytest.raises(OverflowError, match=msg):
1730
+ dtimin - ts_pos
1731
+
1732
+ # Edge cases
1733
+ tmin = pd.to_datetime([Timestamp.min])
1734
+ t1 = tmin + Timedelta.max + Timedelta("1us")
1735
+ with pytest.raises(OverflowError, match=msg):
1736
+ t1 - tmin
1737
+
1738
+ tmax = pd.to_datetime([Timestamp.max])
1739
+ t2 = tmax + Timedelta.min - Timedelta("1us")
1740
+ with pytest.raises(OverflowError, match=msg):
1741
+ tmax - t2
1742
+
1743
+
1744
+ class TestTimestampSeriesArithmetic:
1745
+ def test_empty_series_add_sub(self, box_with_array):
1746
+ # GH#13844
1747
+ a = Series(dtype="M8[ns]")
1748
+ b = Series(dtype="m8[ns]")
1749
+ a = box_with_array(a)
1750
+ b = box_with_array(b)
1751
+ tm.assert_equal(a, a + b)
1752
+ tm.assert_equal(a, a - b)
1753
+ tm.assert_equal(a, b + a)
1754
+ msg = "cannot subtract"
1755
+ with pytest.raises(TypeError, match=msg):
1756
+ b - a
1757
+
1758
+ def test_operators_datetimelike(self):
1759
+ # ## timedelta64 ###
1760
+ td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
1761
+ td1.iloc[2] = np.nan
1762
+
1763
+ # ## datetime64 ###
1764
+ dt1 = Series(
1765
+ [
1766
+ Timestamp("20111230"),
1767
+ Timestamp("20120101"),
1768
+ Timestamp("20120103"),
1769
+ ]
1770
+ )
1771
+ dt1.iloc[2] = np.nan
1772
+ dt2 = Series(
1773
+ [
1774
+ Timestamp("20111231"),
1775
+ Timestamp("20120102"),
1776
+ Timestamp("20120104"),
1777
+ ]
1778
+ )
1779
+ dt1 - dt2
1780
+ dt2 - dt1
1781
+
1782
+ # datetime64 with timetimedelta
1783
+ dt1 + td1
1784
+ td1 + dt1
1785
+ dt1 - td1
1786
+
1787
+ # timetimedelta with datetime64
1788
+ td1 + dt1
1789
+ dt1 + td1
1790
+
1791
+ def test_dt64ser_sub_datetime_dtype(self, unit):
1792
+ ts = Timestamp(datetime(1993, 1, 7, 13, 30, 00))
1793
+ dt = datetime(1993, 6, 22, 13, 30)
1794
+ ser = Series([ts], dtype=f"M8[{unit}]")
1795
+ result = ser - dt
1796
+
1797
+ # the expected unit is the max of `unit` and the unit imputed to `dt`,
1798
+ # which is "us"
1799
+ exp_unit = tm.get_finest_unit(unit, "us")
1800
+ assert result.dtype == f"timedelta64[{exp_unit}]"
1801
+
1802
+ # -------------------------------------------------------------
1803
+ # TODO: This next block of tests came from tests.series.test_operators,
1804
+ # needs to be de-duplicated and parametrized over `box` classes
1805
+
1806
+ @pytest.mark.parametrize(
1807
+ "left, right, op_fail",
1808
+ [
1809
+ [
1810
+ [Timestamp("20111230"), Timestamp("20120101"), NaT],
1811
+ [Timestamp("20111231"), Timestamp("20120102"), Timestamp("20120104")],
1812
+ ["__sub__", "__rsub__"],
1813
+ ],
1814
+ [
1815
+ [Timestamp("20111230"), Timestamp("20120101"), NaT],
1816
+ [timedelta(minutes=5, seconds=3), timedelta(minutes=5, seconds=3), NaT],
1817
+ ["__add__", "__radd__", "__sub__"],
1818
+ ],
1819
+ [
1820
+ [
1821
+ Timestamp("20111230", tz="US/Eastern"),
1822
+ Timestamp("20111230", tz="US/Eastern"),
1823
+ NaT,
1824
+ ],
1825
+ [timedelta(minutes=5, seconds=3), NaT, timedelta(minutes=5, seconds=3)],
1826
+ ["__add__", "__radd__", "__sub__"],
1827
+ ],
1828
+ ],
1829
+ )
1830
+ def test_operators_datetimelike_invalid(
1831
+ self, left, right, op_fail, all_arithmetic_operators
1832
+ ):
1833
+ # these are all TypeError ops
1834
+ op_str = all_arithmetic_operators
1835
+ arg1 = Series(left)
1836
+ arg2 = Series(right)
1837
+ # check that we are getting a TypeError
1838
+ # with 'operate' (from core/ops.py) for the ops that are not
1839
+ # defined
1840
+ op = getattr(arg1, op_str, None)
1841
+ # Previously, _validate_for_numeric_binop in core/indexes/base.py
1842
+ # did this for us.
1843
+ if op_str not in op_fail:
1844
+ with pytest.raises(
1845
+ TypeError, match="operate|[cC]annot|unsupported operand"
1846
+ ):
1847
+ op(arg2)
1848
+ else:
1849
+ # Smoke test
1850
+ op(arg2)
1851
+
1852
+ def test_sub_single_tz(self, unit):
1853
+ # GH#12290
1854
+ s1 = Series([Timestamp("2016-02-10", tz="America/Sao_Paulo")]).dt.as_unit(unit)
1855
+ s2 = Series([Timestamp("2016-02-08", tz="America/Sao_Paulo")]).dt.as_unit(unit)
1856
+ result = s1 - s2
1857
+ expected = Series([Timedelta("2days")]).dt.as_unit(unit)
1858
+ tm.assert_series_equal(result, expected)
1859
+ result = s2 - s1
1860
+ expected = Series([Timedelta("-2days")]).dt.as_unit(unit)
1861
+ tm.assert_series_equal(result, expected)
1862
+
1863
+ def test_dt64tz_series_sub_dtitz(self):
1864
+ # GH#19071 subtracting tzaware DatetimeIndex from tzaware Series
1865
+ # (with same tz) raises, fixed by #19024
1866
+ dti = date_range("1999-09-30", periods=10, tz="US/Pacific")
1867
+ ser = Series(dti)
1868
+ expected = Series(TimedeltaIndex(["0days"] * 10))
1869
+
1870
+ res = dti - ser
1871
+ tm.assert_series_equal(res, expected)
1872
+ res = ser - dti
1873
+ tm.assert_series_equal(res, expected)
1874
+
1875
+ def test_sub_datetime_compat(self, unit):
1876
+ # see GH#14088
1877
+ ser = Series([datetime(2016, 8, 23, 12, tzinfo=pytz.utc), NaT]).dt.as_unit(unit)
1878
+ dt = datetime(2016, 8, 22, 12, tzinfo=pytz.utc)
1879
+ # The datetime object has "us" so we upcast lower units
1880
+ exp_unit = tm.get_finest_unit(unit, "us")
1881
+ exp = Series([Timedelta("1 days"), NaT]).dt.as_unit(exp_unit)
1882
+ result = ser - dt
1883
+ tm.assert_series_equal(result, exp)
1884
+ result2 = ser - Timestamp(dt)
1885
+ tm.assert_series_equal(result2, exp)
1886
+
1887
+ def test_dt64_series_add_mixed_tick_DateOffset(self):
1888
+ # GH#4532
1889
+ # operate with pd.offsets
1890
+ s = Series([Timestamp("20130101 9:01"), Timestamp("20130101 9:02")])
1891
+
1892
+ result = s + pd.offsets.Milli(5)
1893
+ result2 = pd.offsets.Milli(5) + s
1894
+ expected = Series(
1895
+ [Timestamp("20130101 9:01:00.005"), Timestamp("20130101 9:02:00.005")]
1896
+ )
1897
+ tm.assert_series_equal(result, expected)
1898
+ tm.assert_series_equal(result2, expected)
1899
+
1900
+ result = s + pd.offsets.Minute(5) + pd.offsets.Milli(5)
1901
+ expected = Series(
1902
+ [Timestamp("20130101 9:06:00.005"), Timestamp("20130101 9:07:00.005")]
1903
+ )
1904
+ tm.assert_series_equal(result, expected)
1905
+
1906
+ def test_datetime64_ops_nat(self, unit):
1907
+ # GH#11349
1908
+ datetime_series = Series([NaT, Timestamp("19900315")]).dt.as_unit(unit)
1909
+ nat_series_dtype_timestamp = Series([NaT, NaT], dtype=f"datetime64[{unit}]")
1910
+ single_nat_dtype_datetime = Series([NaT], dtype=f"datetime64[{unit}]")
1911
+
1912
+ # subtraction
1913
+ tm.assert_series_equal(-NaT + datetime_series, nat_series_dtype_timestamp)
1914
+ msg = "bad operand type for unary -: 'DatetimeArray'"
1915
+ with pytest.raises(TypeError, match=msg):
1916
+ -single_nat_dtype_datetime + datetime_series
1917
+
1918
+ tm.assert_series_equal(
1919
+ -NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1920
+ )
1921
+ with pytest.raises(TypeError, match=msg):
1922
+ -single_nat_dtype_datetime + nat_series_dtype_timestamp
1923
+
1924
+ # addition
1925
+ tm.assert_series_equal(
1926
+ nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
1927
+ )
1928
+ tm.assert_series_equal(
1929
+ NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1930
+ )
1931
+
1932
+ tm.assert_series_equal(
1933
+ nat_series_dtype_timestamp + NaT, nat_series_dtype_timestamp
1934
+ )
1935
+ tm.assert_series_equal(
1936
+ NaT + nat_series_dtype_timestamp, nat_series_dtype_timestamp
1937
+ )
1938
+
1939
+ # -------------------------------------------------------------
1940
+ # Timezone-Centric Tests
1941
+
1942
+ def test_operators_datetimelike_with_timezones(self):
1943
+ tz = "US/Eastern"
1944
+ dt1 = Series(date_range("2000-01-01 09:00:00", periods=5, tz=tz), name="foo")
1945
+ dt2 = dt1.copy()
1946
+ dt2.iloc[2] = np.nan
1947
+
1948
+ td1 = Series(pd.timedelta_range("1 days 1 min", periods=5, freq="h"))
1949
+ td2 = td1.copy()
1950
+ td2.iloc[1] = np.nan
1951
+ assert td2._values.freq is None
1952
+
1953
+ result = dt1 + td1[0]
1954
+ exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
1955
+ tm.assert_series_equal(result, exp)
1956
+
1957
+ result = dt2 + td2[0]
1958
+ exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
1959
+ tm.assert_series_equal(result, exp)
1960
+
1961
+ # odd numpy behavior with scalar timedeltas
1962
+ result = td1[0] + dt1
1963
+ exp = (dt1.dt.tz_localize(None) + td1[0]).dt.tz_localize(tz)
1964
+ tm.assert_series_equal(result, exp)
1965
+
1966
+ result = td2[0] + dt2
1967
+ exp = (dt2.dt.tz_localize(None) + td2[0]).dt.tz_localize(tz)
1968
+ tm.assert_series_equal(result, exp)
1969
+
1970
+ result = dt1 - td1[0]
1971
+ exp = (dt1.dt.tz_localize(None) - td1[0]).dt.tz_localize(tz)
1972
+ tm.assert_series_equal(result, exp)
1973
+ msg = "(bad|unsupported) operand type for unary"
1974
+ with pytest.raises(TypeError, match=msg):
1975
+ td1[0] - dt1
1976
+
1977
+ result = dt2 - td2[0]
1978
+ exp = (dt2.dt.tz_localize(None) - td2[0]).dt.tz_localize(tz)
1979
+ tm.assert_series_equal(result, exp)
1980
+ with pytest.raises(TypeError, match=msg):
1981
+ td2[0] - dt2
1982
+
1983
+ result = dt1 + td1
1984
+ exp = (dt1.dt.tz_localize(None) + td1).dt.tz_localize(tz)
1985
+ tm.assert_series_equal(result, exp)
1986
+
1987
+ result = dt2 + td2
1988
+ exp = (dt2.dt.tz_localize(None) + td2).dt.tz_localize(tz)
1989
+ tm.assert_series_equal(result, exp)
1990
+
1991
+ result = dt1 - td1
1992
+ exp = (dt1.dt.tz_localize(None) - td1).dt.tz_localize(tz)
1993
+ tm.assert_series_equal(result, exp)
1994
+
1995
+ result = dt2 - td2
1996
+ exp = (dt2.dt.tz_localize(None) - td2).dt.tz_localize(tz)
1997
+ tm.assert_series_equal(result, exp)
1998
+ msg = "cannot (add|subtract)"
1999
+ with pytest.raises(TypeError, match=msg):
2000
+ td1 - dt1
2001
+ with pytest.raises(TypeError, match=msg):
2002
+ td2 - dt2
2003
+
2004
+
2005
+ class TestDatetimeIndexArithmetic:
2006
+ # -------------------------------------------------------------
2007
+ # Binary operations DatetimeIndex and TimedeltaIndex/array
2008
+
2009
+ def test_dti_add_tdi(self, tz_naive_fixture):
2010
+ # GH#17558
2011
+ tz = tz_naive_fixture
2012
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2013
+ tdi = pd.timedelta_range("0 days", periods=10)
2014
+ expected = date_range("2017-01-01", periods=10, tz=tz)
2015
+ expected = expected._with_freq(None)
2016
+
2017
+ # add with TimedeltaIndex
2018
+ result = dti + tdi
2019
+ tm.assert_index_equal(result, expected)
2020
+
2021
+ result = tdi + dti
2022
+ tm.assert_index_equal(result, expected)
2023
+
2024
+ # add with timedelta64 array
2025
+ result = dti + tdi.values
2026
+ tm.assert_index_equal(result, expected)
2027
+
2028
+ result = tdi.values + dti
2029
+ tm.assert_index_equal(result, expected)
2030
+
2031
+ def test_dti_iadd_tdi(self, tz_naive_fixture):
2032
+ # GH#17558
2033
+ tz = tz_naive_fixture
2034
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2035
+ tdi = pd.timedelta_range("0 days", periods=10)
2036
+ expected = date_range("2017-01-01", periods=10, tz=tz)
2037
+ expected = expected._with_freq(None)
2038
+
2039
+ # iadd with TimedeltaIndex
2040
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2041
+ result += tdi
2042
+ tm.assert_index_equal(result, expected)
2043
+
2044
+ result = pd.timedelta_range("0 days", periods=10)
2045
+ result += dti
2046
+ tm.assert_index_equal(result, expected)
2047
+
2048
+ # iadd with timedelta64 array
2049
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2050
+ result += tdi.values
2051
+ tm.assert_index_equal(result, expected)
2052
+
2053
+ result = pd.timedelta_range("0 days", periods=10)
2054
+ result += dti
2055
+ tm.assert_index_equal(result, expected)
2056
+
2057
+ def test_dti_sub_tdi(self, tz_naive_fixture):
2058
+ # GH#17558
2059
+ tz = tz_naive_fixture
2060
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10)
2061
+ tdi = pd.timedelta_range("0 days", periods=10)
2062
+ expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D")
2063
+ expected = expected._with_freq(None)
2064
+
2065
+ # sub with TimedeltaIndex
2066
+ result = dti - tdi
2067
+ tm.assert_index_equal(result, expected)
2068
+
2069
+ msg = "cannot subtract .*TimedeltaArray"
2070
+ with pytest.raises(TypeError, match=msg):
2071
+ tdi - dti
2072
+
2073
+ # sub with timedelta64 array
2074
+ result = dti - tdi.values
2075
+ tm.assert_index_equal(result, expected)
2076
+
2077
+ msg = "cannot subtract a datelike from a TimedeltaArray"
2078
+ with pytest.raises(TypeError, match=msg):
2079
+ tdi.values - dti
2080
+
2081
+ def test_dti_isub_tdi(self, tz_naive_fixture, unit):
2082
+ # GH#17558
2083
+ tz = tz_naive_fixture
2084
+ dti = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
2085
+ tdi = pd.timedelta_range("0 days", periods=10, unit=unit)
2086
+ expected = date_range("2017-01-01", periods=10, tz=tz, freq="-1D", unit=unit)
2087
+ expected = expected._with_freq(None)
2088
+
2089
+ # isub with TimedeltaIndex
2090
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
2091
+ result -= tdi
2092
+ tm.assert_index_equal(result, expected)
2093
+
2094
+ # DTA.__isub__ GH#43904
2095
+ dta = dti._data.copy()
2096
+ dta -= tdi
2097
+ tm.assert_datetime_array_equal(dta, expected._data)
2098
+
2099
+ out = dti._data.copy()
2100
+ np.subtract(out, tdi, out=out)
2101
+ tm.assert_datetime_array_equal(out, expected._data)
2102
+
2103
+ msg = "cannot subtract a datelike from a TimedeltaArray"
2104
+ with pytest.raises(TypeError, match=msg):
2105
+ tdi -= dti
2106
+
2107
+ # isub with timedelta64 array
2108
+ result = DatetimeIndex([Timestamp("2017-01-01", tz=tz)] * 10).as_unit(unit)
2109
+ result -= tdi.values
2110
+ tm.assert_index_equal(result, expected)
2111
+
2112
+ with pytest.raises(TypeError, match=msg):
2113
+ tdi.values -= dti
2114
+
2115
+ with pytest.raises(TypeError, match=msg):
2116
+ tdi._values -= dti
2117
+
2118
+ # -------------------------------------------------------------
2119
+ # Binary Operations DatetimeIndex and datetime-like
2120
+ # TODO: A couple other tests belong in this section. Move them in
2121
+ # A PR where there isn't already a giant diff.
2122
+
2123
+ # -------------------------------------------------------------
2124
+
2125
+ def test_dta_add_sub_index(self, tz_naive_fixture):
2126
+ # Check that DatetimeArray defers to Index classes
2127
+ dti = date_range("20130101", periods=3, tz=tz_naive_fixture)
2128
+ dta = dti.array
2129
+ result = dta - dti
2130
+ expected = dti - dti
2131
+ tm.assert_index_equal(result, expected)
2132
+
2133
+ tdi = result
2134
+ result = dta + tdi
2135
+ expected = dti + tdi
2136
+ tm.assert_index_equal(result, expected)
2137
+
2138
+ result = dta - tdi
2139
+ expected = dti - tdi
2140
+ tm.assert_index_equal(result, expected)
2141
+
2142
+ def test_sub_dti_dti(self, unit):
2143
+ # previously performed setop (deprecated in 0.16.0), now changed to
2144
+ # return subtraction -> TimeDeltaIndex (GH ...)
2145
+
2146
+ dti = date_range("20130101", periods=3, unit=unit)
2147
+ dti_tz = date_range("20130101", periods=3, unit=unit).tz_localize("US/Eastern")
2148
+ expected = TimedeltaIndex([0, 0, 0]).as_unit(unit)
2149
+
2150
+ result = dti - dti
2151
+ tm.assert_index_equal(result, expected)
2152
+
2153
+ result = dti_tz - dti_tz
2154
+ tm.assert_index_equal(result, expected)
2155
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects"
2156
+ with pytest.raises(TypeError, match=msg):
2157
+ dti_tz - dti
2158
+
2159
+ with pytest.raises(TypeError, match=msg):
2160
+ dti - dti_tz
2161
+
2162
+ # isub
2163
+ dti -= dti
2164
+ tm.assert_index_equal(dti, expected)
2165
+
2166
+ # different length raises ValueError
2167
+ dti1 = date_range("20130101", periods=3, unit=unit)
2168
+ dti2 = date_range("20130101", periods=4, unit=unit)
2169
+ msg = "cannot add indices of unequal length"
2170
+ with pytest.raises(ValueError, match=msg):
2171
+ dti1 - dti2
2172
+
2173
+ # NaN propagation
2174
+ dti1 = DatetimeIndex(["2012-01-01", np.nan, "2012-01-03"]).as_unit(unit)
2175
+ dti2 = DatetimeIndex(["2012-01-02", "2012-01-03", np.nan]).as_unit(unit)
2176
+ expected = TimedeltaIndex(["1 days", np.nan, np.nan]).as_unit(unit)
2177
+ result = dti2 - dti1
2178
+ tm.assert_index_equal(result, expected)
2179
+
2180
+ # -------------------------------------------------------------------
2181
+ # TODO: Most of this block is moved from series or frame tests, needs
2182
+ # cleanup, box-parametrization, and de-duplication
2183
+
2184
+ @pytest.mark.parametrize("op", [operator.add, operator.sub])
2185
+ def test_timedelta64_equal_timedelta_supported_ops(self, op, box_with_array):
2186
+ ser = Series(
2187
+ [
2188
+ Timestamp("20130301"),
2189
+ Timestamp("20130228 23:00:00"),
2190
+ Timestamp("20130228 22:00:00"),
2191
+ Timestamp("20130228 21:00:00"),
2192
+ ]
2193
+ )
2194
+ obj = box_with_array(ser)
2195
+
2196
+ intervals = ["D", "h", "m", "s", "us"]
2197
+
2198
+ def timedelta64(*args):
2199
+ # see casting notes in NumPy gh-12927
2200
+ return np.sum(list(starmap(np.timedelta64, zip(args, intervals))))
2201
+
2202
+ for d, h, m, s, us in product(*([range(2)] * 5)):
2203
+ nptd = timedelta64(d, h, m, s, us)
2204
+ pytd = timedelta(days=d, hours=h, minutes=m, seconds=s, microseconds=us)
2205
+ lhs = op(obj, nptd)
2206
+ rhs = op(obj, pytd)
2207
+
2208
+ tm.assert_equal(lhs, rhs)
2209
+
2210
+ def test_ops_nat_mixed_datetime64_timedelta64(self):
2211
+ # GH#11349
2212
+ timedelta_series = Series([NaT, Timedelta("1s")])
2213
+ datetime_series = Series([NaT, Timestamp("19900315")])
2214
+ nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
2215
+ nat_series_dtype_timestamp = Series([NaT, NaT], dtype="datetime64[ns]")
2216
+ single_nat_dtype_datetime = Series([NaT], dtype="datetime64[ns]")
2217
+ single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
2218
+
2219
+ # subtraction
2220
+ tm.assert_series_equal(
2221
+ datetime_series - single_nat_dtype_datetime, nat_series_dtype_timedelta
2222
+ )
2223
+
2224
+ tm.assert_series_equal(
2225
+ datetime_series - single_nat_dtype_timedelta, nat_series_dtype_timestamp
2226
+ )
2227
+ tm.assert_series_equal(
2228
+ -single_nat_dtype_timedelta + datetime_series, nat_series_dtype_timestamp
2229
+ )
2230
+
2231
+ # without a Series wrapping the NaT, it is ambiguous
2232
+ # whether it is a datetime64 or timedelta64
2233
+ # defaults to interpreting it as timedelta64
2234
+ tm.assert_series_equal(
2235
+ nat_series_dtype_timestamp - single_nat_dtype_datetime,
2236
+ nat_series_dtype_timedelta,
2237
+ )
2238
+
2239
+ tm.assert_series_equal(
2240
+ nat_series_dtype_timestamp - single_nat_dtype_timedelta,
2241
+ nat_series_dtype_timestamp,
2242
+ )
2243
+ tm.assert_series_equal(
2244
+ -single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2245
+ nat_series_dtype_timestamp,
2246
+ )
2247
+ msg = "cannot subtract a datelike"
2248
+ with pytest.raises(TypeError, match=msg):
2249
+ timedelta_series - single_nat_dtype_datetime
2250
+
2251
+ # addition
2252
+ tm.assert_series_equal(
2253
+ nat_series_dtype_timestamp + single_nat_dtype_timedelta,
2254
+ nat_series_dtype_timestamp,
2255
+ )
2256
+ tm.assert_series_equal(
2257
+ single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2258
+ nat_series_dtype_timestamp,
2259
+ )
2260
+
2261
+ tm.assert_series_equal(
2262
+ nat_series_dtype_timestamp + single_nat_dtype_timedelta,
2263
+ nat_series_dtype_timestamp,
2264
+ )
2265
+ tm.assert_series_equal(
2266
+ single_nat_dtype_timedelta + nat_series_dtype_timestamp,
2267
+ nat_series_dtype_timestamp,
2268
+ )
2269
+
2270
+ tm.assert_series_equal(
2271
+ nat_series_dtype_timedelta + single_nat_dtype_datetime,
2272
+ nat_series_dtype_timestamp,
2273
+ )
2274
+ tm.assert_series_equal(
2275
+ single_nat_dtype_datetime + nat_series_dtype_timedelta,
2276
+ nat_series_dtype_timestamp,
2277
+ )
2278
+
2279
+ def test_ufunc_coercions(self, unit):
2280
+ idx = date_range("2011-01-01", periods=3, freq="2D", name="x", unit=unit)
2281
+
2282
+ delta = np.timedelta64(1, "D")
2283
+ exp = date_range("2011-01-02", periods=3, freq="2D", name="x", unit=unit)
2284
+ for result in [idx + delta, np.add(idx, delta)]:
2285
+ assert isinstance(result, DatetimeIndex)
2286
+ tm.assert_index_equal(result, exp)
2287
+ assert result.freq == "2D"
2288
+
2289
+ exp = date_range("2010-12-31", periods=3, freq="2D", name="x", unit=unit)
2290
+
2291
+ for result in [idx - delta, np.subtract(idx, delta)]:
2292
+ assert isinstance(result, DatetimeIndex)
2293
+ tm.assert_index_equal(result, exp)
2294
+ assert result.freq == "2D"
2295
+
2296
+ # When adding/subtracting an ndarray (which has no .freq), the result
2297
+ # does not infer freq
2298
+ idx = idx._with_freq(None)
2299
+ delta = np.array(
2300
+ [np.timedelta64(1, "D"), np.timedelta64(2, "D"), np.timedelta64(3, "D")]
2301
+ )
2302
+ exp = DatetimeIndex(
2303
+ ["2011-01-02", "2011-01-05", "2011-01-08"], name="x"
2304
+ ).as_unit(unit)
2305
+
2306
+ for result in [idx + delta, np.add(idx, delta)]:
2307
+ tm.assert_index_equal(result, exp)
2308
+ assert result.freq == exp.freq
2309
+
2310
+ exp = DatetimeIndex(
2311
+ ["2010-12-31", "2011-01-01", "2011-01-02"], name="x"
2312
+ ).as_unit(unit)
2313
+ for result in [idx - delta, np.subtract(idx, delta)]:
2314
+ assert isinstance(result, DatetimeIndex)
2315
+ tm.assert_index_equal(result, exp)
2316
+ assert result.freq == exp.freq
2317
+
2318
+ def test_dti_add_series(self, tz_naive_fixture, names):
2319
+ # GH#13905
2320
+ tz = tz_naive_fixture
2321
+ index = DatetimeIndex(
2322
+ ["2016-06-28 05:30", "2016-06-28 05:31"], tz=tz, name=names[0]
2323
+ ).as_unit("ns")
2324
+ ser = Series([Timedelta(seconds=5)] * 2, index=index, name=names[1])
2325
+ expected = Series(index + Timedelta(seconds=5), index=index, name=names[2])
2326
+
2327
+ # passing name arg isn't enough when names[2] is None
2328
+ expected.name = names[2]
2329
+ assert expected.dtype == index.dtype
2330
+ result = ser + index
2331
+ tm.assert_series_equal(result, expected)
2332
+ result2 = index + ser
2333
+ tm.assert_series_equal(result2, expected)
2334
+
2335
+ expected = index + Timedelta(seconds=5)
2336
+ result3 = ser.values + index
2337
+ tm.assert_index_equal(result3, expected)
2338
+ result4 = index + ser.values
2339
+ tm.assert_index_equal(result4, expected)
2340
+
2341
+ @pytest.mark.parametrize("op", [operator.add, roperator.radd, operator.sub])
2342
+ def test_dti_addsub_offset_arraylike(
2343
+ self, tz_naive_fixture, names, op, index_or_series
2344
+ ):
2345
+ # GH#18849, GH#19744
2346
+ other_box = index_or_series
2347
+
2348
+ tz = tz_naive_fixture
2349
+ dti = date_range("2017-01-01", periods=2, tz=tz, name=names[0])
2350
+ other = other_box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)], name=names[1])
2351
+
2352
+ xbox = get_upcast_box(dti, other)
2353
+
2354
+ with tm.assert_produces_warning(PerformanceWarning):
2355
+ res = op(dti, other)
2356
+
2357
+ expected = DatetimeIndex(
2358
+ [op(dti[n], other[n]) for n in range(len(dti))], name=names[2], freq="infer"
2359
+ )
2360
+ expected = tm.box_expected(expected, xbox).astype(object)
2361
+ tm.assert_equal(res, expected)
2362
+
2363
+ @pytest.mark.parametrize("other_box", [pd.Index, np.array])
2364
+ def test_dti_addsub_object_arraylike(
2365
+ self, tz_naive_fixture, box_with_array, other_box
2366
+ ):
2367
+ tz = tz_naive_fixture
2368
+
2369
+ dti = date_range("2017-01-01", periods=2, tz=tz)
2370
+ dtarr = tm.box_expected(dti, box_with_array)
2371
+ other = other_box([pd.offsets.MonthEnd(), Timedelta(days=4)])
2372
+ xbox = get_upcast_box(dtarr, other)
2373
+
2374
+ expected = DatetimeIndex(["2017-01-31", "2017-01-06"], tz=tz_naive_fixture)
2375
+ expected = tm.box_expected(expected, xbox).astype(object)
2376
+
2377
+ with tm.assert_produces_warning(PerformanceWarning):
2378
+ result = dtarr + other
2379
+ tm.assert_equal(result, expected)
2380
+
2381
+ expected = DatetimeIndex(["2016-12-31", "2016-12-29"], tz=tz_naive_fixture)
2382
+ expected = tm.box_expected(expected, xbox).astype(object)
2383
+
2384
+ with tm.assert_produces_warning(PerformanceWarning):
2385
+ result = dtarr - other
2386
+ tm.assert_equal(result, expected)
2387
+
2388
+
2389
+ @pytest.mark.parametrize("years", [-1, 0, 1])
2390
+ @pytest.mark.parametrize("months", [-2, 0, 2])
2391
+ @pytest.mark.parametrize("unit", ["s", "ms", "us", "ns"])
2392
+ def test_shift_months(years, months, unit):
2393
+ dti = DatetimeIndex(
2394
+ [
2395
+ Timestamp("2000-01-05 00:15:00"),
2396
+ Timestamp("2000-01-31 00:23:00"),
2397
+ Timestamp("2000-01-01"),
2398
+ Timestamp("2000-02-29"),
2399
+ Timestamp("2000-12-31"),
2400
+ ]
2401
+ ).as_unit(unit)
2402
+ shifted = shift_months(dti.asi8, years * 12 + months, reso=dti._data._creso)
2403
+ shifted_dt64 = shifted.view(f"M8[{dti.unit}]")
2404
+ actual = DatetimeIndex(shifted_dt64)
2405
+
2406
+ raw = [x + pd.offsets.DateOffset(years=years, months=months) for x in dti]
2407
+ expected = DatetimeIndex(raw).as_unit(dti.unit)
2408
+ tm.assert_index_equal(actual, expected)
2409
+
2410
+
2411
+ def test_dt64arr_addsub_object_dtype_2d():
2412
+ # block-wise DataFrame operations will require operating on 2D
2413
+ # DatetimeArray/TimedeltaArray, so check that specifically.
2414
+ dti = date_range("1994-02-13", freq="2W", periods=4)
2415
+ dta = dti._data.reshape((4, 1))
2416
+
2417
+ other = np.array([[pd.offsets.Day(n)] for n in range(4)])
2418
+ assert other.shape == dta.shape
2419
+
2420
+ with tm.assert_produces_warning(PerformanceWarning):
2421
+ result = dta + other
2422
+ with tm.assert_produces_warning(PerformanceWarning):
2423
+ expected = (dta[:, 0] + other[:, 0]).reshape(-1, 1)
2424
+
2425
+ tm.assert_numpy_array_equal(result, expected)
2426
+
2427
+ with tm.assert_produces_warning(PerformanceWarning):
2428
+ # Case where we expect to get a TimedeltaArray back
2429
+ result2 = dta - dta.astype(object)
2430
+
2431
+ assert result2.shape == (4, 1)
2432
+ assert all(td._value == 0 for td in result2.ravel())
2433
+
2434
+
2435
+ def test_non_nano_dt64_addsub_np_nat_scalars():
2436
+ # GH 52295
2437
+ ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
2438
+ result = ser - np.datetime64("nat", "ms")
2439
+ expected = Series([NaT] * 3, dtype="timedelta64[ms]")
2440
+ tm.assert_series_equal(result, expected)
2441
+
2442
+ result = ser + np.timedelta64("nat", "ms")
2443
+ expected = Series([NaT] * 3, dtype="datetime64[ms]")
2444
+ tm.assert_series_equal(result, expected)
2445
+
2446
+
2447
+ def test_non_nano_dt64_addsub_np_nat_scalars_unitless():
2448
+ # GH 52295
2449
+ # TODO: Can we default to the ser unit?
2450
+ ser = Series([1233242342344, 232432434324, 332434242344], dtype="datetime64[ms]")
2451
+ result = ser - np.datetime64("nat")
2452
+ expected = Series([NaT] * 3, dtype="timedelta64[ns]")
2453
+ tm.assert_series_equal(result, expected)
2454
+
2455
+ result = ser + np.timedelta64("nat")
2456
+ expected = Series([NaT] * 3, dtype="datetime64[ns]")
2457
+ tm.assert_series_equal(result, expected)
2458
+
2459
+
2460
+ def test_non_nano_dt64_addsub_np_nat_scalars_unsupported_unit():
2461
+ # GH 52295
2462
+ ser = Series([12332, 23243, 33243], dtype="datetime64[s]")
2463
+ result = ser - np.datetime64("nat", "D")
2464
+ expected = Series([NaT] * 3, dtype="timedelta64[s]")
2465
+ tm.assert_series_equal(result, expected)
2466
+
2467
+ result = ser + np.timedelta64("nat", "D")
2468
+ expected = Series([NaT] * 3, dtype="datetime64[s]")
2469
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_interval.py ADDED
@@ -0,0 +1,306 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.core.dtypes.common import is_list_like
7
+
8
+ import pandas as pd
9
+ from pandas import (
10
+ Categorical,
11
+ Index,
12
+ Interval,
13
+ IntervalIndex,
14
+ Period,
15
+ Series,
16
+ Timedelta,
17
+ Timestamp,
18
+ date_range,
19
+ period_range,
20
+ timedelta_range,
21
+ )
22
+ import pandas._testing as tm
23
+ from pandas.core.arrays import (
24
+ BooleanArray,
25
+ IntervalArray,
26
+ )
27
+ from pandas.tests.arithmetic.common import get_upcast_box
28
+
29
+
30
+ @pytest.fixture(
31
+ params=[
32
+ (Index([0, 2, 4, 4]), Index([1, 3, 5, 8])),
33
+ (Index([0.0, 1.0, 2.0, np.nan]), Index([1.0, 2.0, 3.0, np.nan])),
34
+ (
35
+ timedelta_range("0 days", periods=3).insert(3, pd.NaT),
36
+ timedelta_range("1 day", periods=3).insert(3, pd.NaT),
37
+ ),
38
+ (
39
+ date_range("20170101", periods=3).insert(3, pd.NaT),
40
+ date_range("20170102", periods=3).insert(3, pd.NaT),
41
+ ),
42
+ (
43
+ date_range("20170101", periods=3, tz="US/Eastern").insert(3, pd.NaT),
44
+ date_range("20170102", periods=3, tz="US/Eastern").insert(3, pd.NaT),
45
+ ),
46
+ ],
47
+ ids=lambda x: str(x[0].dtype),
48
+ )
49
+ def left_right_dtypes(request):
50
+ """
51
+ Fixture for building an IntervalArray from various dtypes
52
+ """
53
+ return request.param
54
+
55
+
56
+ @pytest.fixture
57
+ def interval_array(left_right_dtypes):
58
+ """
59
+ Fixture to generate an IntervalArray of various dtypes containing NA if possible
60
+ """
61
+ left, right = left_right_dtypes
62
+ return IntervalArray.from_arrays(left, right)
63
+
64
+
65
+ def create_categorical_intervals(left, right, closed="right"):
66
+ return Categorical(IntervalIndex.from_arrays(left, right, closed))
67
+
68
+
69
+ def create_series_intervals(left, right, closed="right"):
70
+ return Series(IntervalArray.from_arrays(left, right, closed))
71
+
72
+
73
+ def create_series_categorical_intervals(left, right, closed="right"):
74
+ return Series(Categorical(IntervalIndex.from_arrays(left, right, closed)))
75
+
76
+
77
+ class TestComparison:
78
+ @pytest.fixture(params=[operator.eq, operator.ne])
79
+ def op(self, request):
80
+ return request.param
81
+
82
+ @pytest.fixture(
83
+ params=[
84
+ IntervalArray.from_arrays,
85
+ IntervalIndex.from_arrays,
86
+ create_categorical_intervals,
87
+ create_series_intervals,
88
+ create_series_categorical_intervals,
89
+ ],
90
+ ids=[
91
+ "IntervalArray",
92
+ "IntervalIndex",
93
+ "Categorical[Interval]",
94
+ "Series[Interval]",
95
+ "Series[Categorical[Interval]]",
96
+ ],
97
+ )
98
+ def interval_constructor(self, request):
99
+ """
100
+ Fixture for all pandas native interval constructors.
101
+ To be used as the LHS of IntervalArray comparisons.
102
+ """
103
+ return request.param
104
+
105
+ def elementwise_comparison(self, op, interval_array, other):
106
+ """
107
+ Helper that performs elementwise comparisons between `array` and `other`
108
+ """
109
+ other = other if is_list_like(other) else [other] * len(interval_array)
110
+ expected = np.array([op(x, y) for x, y in zip(interval_array, other)])
111
+ if isinstance(other, Series):
112
+ return Series(expected, index=other.index)
113
+ return expected
114
+
115
+ def test_compare_scalar_interval(self, op, interval_array):
116
+ # matches first interval
117
+ other = interval_array[0]
118
+ result = op(interval_array, other)
119
+ expected = self.elementwise_comparison(op, interval_array, other)
120
+ tm.assert_numpy_array_equal(result, expected)
121
+
122
+ # matches on a single endpoint but not both
123
+ other = Interval(interval_array.left[0], interval_array.right[1])
124
+ result = op(interval_array, other)
125
+ expected = self.elementwise_comparison(op, interval_array, other)
126
+ tm.assert_numpy_array_equal(result, expected)
127
+
128
+ def test_compare_scalar_interval_mixed_closed(self, op, closed, other_closed):
129
+ interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
130
+ other = Interval(0, 1, closed=other_closed)
131
+
132
+ result = op(interval_array, other)
133
+ expected = self.elementwise_comparison(op, interval_array, other)
134
+ tm.assert_numpy_array_equal(result, expected)
135
+
136
+ def test_compare_scalar_na(self, op, interval_array, nulls_fixture, box_with_array):
137
+ box = box_with_array
138
+ obj = tm.box_expected(interval_array, box)
139
+ result = op(obj, nulls_fixture)
140
+
141
+ if nulls_fixture is pd.NA:
142
+ # GH#31882
143
+ exp = np.ones(interval_array.shape, dtype=bool)
144
+ expected = BooleanArray(exp, exp)
145
+ else:
146
+ expected = self.elementwise_comparison(op, interval_array, nulls_fixture)
147
+
148
+ if not (box is Index and nulls_fixture is pd.NA):
149
+ # don't cast expected from BooleanArray to ndarray[object]
150
+ xbox = get_upcast_box(obj, nulls_fixture, True)
151
+ expected = tm.box_expected(expected, xbox)
152
+
153
+ tm.assert_equal(result, expected)
154
+
155
+ rev = op(nulls_fixture, obj)
156
+ tm.assert_equal(rev, expected)
157
+
158
+ @pytest.mark.parametrize(
159
+ "other",
160
+ [
161
+ 0,
162
+ 1.0,
163
+ True,
164
+ "foo",
165
+ Timestamp("2017-01-01"),
166
+ Timestamp("2017-01-01", tz="US/Eastern"),
167
+ Timedelta("0 days"),
168
+ Period("2017-01-01", "D"),
169
+ ],
170
+ )
171
+ def test_compare_scalar_other(self, op, interval_array, other):
172
+ result = op(interval_array, other)
173
+ expected = self.elementwise_comparison(op, interval_array, other)
174
+ tm.assert_numpy_array_equal(result, expected)
175
+
176
+ def test_compare_list_like_interval(self, op, interval_array, interval_constructor):
177
+ # same endpoints
178
+ other = interval_constructor(interval_array.left, interval_array.right)
179
+ result = op(interval_array, other)
180
+ expected = self.elementwise_comparison(op, interval_array, other)
181
+ tm.assert_equal(result, expected)
182
+
183
+ # different endpoints
184
+ other = interval_constructor(
185
+ interval_array.left[::-1], interval_array.right[::-1]
186
+ )
187
+ result = op(interval_array, other)
188
+ expected = self.elementwise_comparison(op, interval_array, other)
189
+ tm.assert_equal(result, expected)
190
+
191
+ # all nan endpoints
192
+ other = interval_constructor([np.nan] * 4, [np.nan] * 4)
193
+ result = op(interval_array, other)
194
+ expected = self.elementwise_comparison(op, interval_array, other)
195
+ tm.assert_equal(result, expected)
196
+
197
+ def test_compare_list_like_interval_mixed_closed(
198
+ self, op, interval_constructor, closed, other_closed
199
+ ):
200
+ interval_array = IntervalArray.from_arrays(range(2), range(1, 3), closed=closed)
201
+ other = interval_constructor(range(2), range(1, 3), closed=other_closed)
202
+
203
+ result = op(interval_array, other)
204
+ expected = self.elementwise_comparison(op, interval_array, other)
205
+ tm.assert_equal(result, expected)
206
+
207
+ @pytest.mark.parametrize(
208
+ "other",
209
+ [
210
+ (
211
+ Interval(0, 1),
212
+ Interval(Timedelta("1 day"), Timedelta("2 days")),
213
+ Interval(4, 5, "both"),
214
+ Interval(10, 20, "neither"),
215
+ ),
216
+ (0, 1.5, Timestamp("20170103"), np.nan),
217
+ (
218
+ Timestamp("20170102", tz="US/Eastern"),
219
+ Timedelta("2 days"),
220
+ "baz",
221
+ pd.NaT,
222
+ ),
223
+ ],
224
+ )
225
+ def test_compare_list_like_object(self, op, interval_array, other):
226
+ result = op(interval_array, other)
227
+ expected = self.elementwise_comparison(op, interval_array, other)
228
+ tm.assert_numpy_array_equal(result, expected)
229
+
230
+ def test_compare_list_like_nan(self, op, interval_array, nulls_fixture):
231
+ other = [nulls_fixture] * 4
232
+ result = op(interval_array, other)
233
+ expected = self.elementwise_comparison(op, interval_array, other)
234
+
235
+ tm.assert_equal(result, expected)
236
+
237
+ @pytest.mark.parametrize(
238
+ "other",
239
+ [
240
+ np.arange(4, dtype="int64"),
241
+ np.arange(4, dtype="float64"),
242
+ date_range("2017-01-01", periods=4),
243
+ date_range("2017-01-01", periods=4, tz="US/Eastern"),
244
+ timedelta_range("0 days", periods=4),
245
+ period_range("2017-01-01", periods=4, freq="D"),
246
+ Categorical(list("abab")),
247
+ Categorical(date_range("2017-01-01", periods=4)),
248
+ pd.array(list("abcd")),
249
+ pd.array(["foo", 3.14, None, object()], dtype=object),
250
+ ],
251
+ ids=lambda x: str(x.dtype),
252
+ )
253
+ def test_compare_list_like_other(self, op, interval_array, other):
254
+ result = op(interval_array, other)
255
+ expected = self.elementwise_comparison(op, interval_array, other)
256
+ tm.assert_numpy_array_equal(result, expected)
257
+
258
+ @pytest.mark.parametrize("length", [1, 3, 5])
259
+ @pytest.mark.parametrize("other_constructor", [IntervalArray, list])
260
+ def test_compare_length_mismatch_errors(self, op, other_constructor, length):
261
+ interval_array = IntervalArray.from_arrays(range(4), range(1, 5))
262
+ other = other_constructor([Interval(0, 1)] * length)
263
+ with pytest.raises(ValueError, match="Lengths must match to compare"):
264
+ op(interval_array, other)
265
+
266
+ @pytest.mark.parametrize(
267
+ "constructor, expected_type, assert_func",
268
+ [
269
+ (IntervalIndex, np.array, tm.assert_numpy_array_equal),
270
+ (Series, Series, tm.assert_series_equal),
271
+ ],
272
+ )
273
+ def test_index_series_compat(self, op, constructor, expected_type, assert_func):
274
+ # IntervalIndex/Series that rely on IntervalArray for comparisons
275
+ breaks = range(4)
276
+ index = constructor(IntervalIndex.from_breaks(breaks))
277
+
278
+ # scalar comparisons
279
+ other = index[0]
280
+ result = op(index, other)
281
+ expected = expected_type(self.elementwise_comparison(op, index, other))
282
+ assert_func(result, expected)
283
+
284
+ other = breaks[0]
285
+ result = op(index, other)
286
+ expected = expected_type(self.elementwise_comparison(op, index, other))
287
+ assert_func(result, expected)
288
+
289
+ # list-like comparisons
290
+ other = IntervalArray.from_breaks(breaks)
291
+ result = op(index, other)
292
+ expected = expected_type(self.elementwise_comparison(op, index, other))
293
+ assert_func(result, expected)
294
+
295
+ other = [index[0], breaks[0], "foo"]
296
+ result = op(index, other)
297
+ expected = expected_type(self.elementwise_comparison(op, index, other))
298
+ assert_func(result, expected)
299
+
300
+ @pytest.mark.parametrize("scalars", ["a", False, 1, 1.0, None])
301
+ def test_comparison_operations(self, scalars):
302
+ # GH #28981
303
+ expected = Series([False, False])
304
+ s = Series([Interval(0, 1), Interval(1, 2)], dtype="interval")
305
+ result = s == scalars
306
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_numeric.py ADDED
@@ -0,0 +1,1567 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ # Specifically for numeric dtypes
4
+ from __future__ import annotations
5
+
6
+ from collections import abc
7
+ from datetime import timedelta
8
+ from decimal import Decimal
9
+ import operator
10
+
11
+ import numpy as np
12
+ import pytest
13
+
14
+ import pandas as pd
15
+ from pandas import (
16
+ Index,
17
+ RangeIndex,
18
+ Series,
19
+ Timedelta,
20
+ TimedeltaIndex,
21
+ array,
22
+ date_range,
23
+ )
24
+ import pandas._testing as tm
25
+ from pandas.core import ops
26
+ from pandas.core.computation import expressions as expr
27
+ from pandas.tests.arithmetic.common import (
28
+ assert_invalid_addsub_type,
29
+ assert_invalid_comparison,
30
+ )
31
+
32
+
33
+ @pytest.fixture(autouse=True, params=[0, 1000000], ids=["numexpr", "python"])
34
+ def switch_numexpr_min_elements(request, monkeypatch):
35
+ with monkeypatch.context() as m:
36
+ m.setattr(expr, "_MIN_ELEMENTS", request.param)
37
+ yield request.param
38
+
39
+
40
+ @pytest.fixture(params=[Index, Series, tm.to_array])
41
+ def box_pandas_1d_array(request):
42
+ """
43
+ Fixture to test behavior for Index, Series and tm.to_array classes
44
+ """
45
+ return request.param
46
+
47
+
48
+ @pytest.fixture(
49
+ params=[
50
+ # TODO: add more dtypes here
51
+ Index(np.arange(5, dtype="float64")),
52
+ Index(np.arange(5, dtype="int64")),
53
+ Index(np.arange(5, dtype="uint64")),
54
+ RangeIndex(5),
55
+ ],
56
+ ids=lambda x: type(x).__name__,
57
+ )
58
+ def numeric_idx(request):
59
+ """
60
+ Several types of numeric-dtypes Index objects
61
+ """
62
+ return request.param
63
+
64
+
65
+ @pytest.fixture(
66
+ params=[Index, Series, tm.to_array, np.array, list], ids=lambda x: x.__name__
67
+ )
68
+ def box_1d_array(request):
69
+ """
70
+ Fixture to test behavior for Index, Series, tm.to_array, numpy Array and list
71
+ classes
72
+ """
73
+ return request.param
74
+
75
+
76
+ def adjust_negative_zero(zero, expected):
77
+ """
78
+ Helper to adjust the expected result if we are dividing by -0.0
79
+ as opposed to 0.0
80
+ """
81
+ if np.signbit(np.array(zero)).any():
82
+ # All entries in the `zero` fixture should be either
83
+ # all-negative or no-negative.
84
+ assert np.signbit(np.array(zero)).all()
85
+
86
+ expected *= -1
87
+
88
+ return expected
89
+
90
+
91
+ def compare_op(series, other, op):
92
+ left = np.abs(series) if op in (ops.rpow, operator.pow) else series
93
+ right = np.abs(other) if op in (ops.rpow, operator.pow) else other
94
+
95
+ cython_or_numpy = op(left, right)
96
+ python = left.combine(right, op)
97
+ if isinstance(other, Series) and not other.index.equals(series.index):
98
+ python.index = python.index._with_freq(None)
99
+ tm.assert_series_equal(cython_or_numpy, python)
100
+
101
+
102
+ # TODO: remove this kludge once mypy stops giving false positives here
103
+ # List comprehension has incompatible type List[PandasObject]; expected List[RangeIndex]
104
+ # See GH#29725
105
+ _ldtypes = ["i1", "i2", "i4", "i8", "u1", "u2", "u4", "u8", "f2", "f4", "f8"]
106
+ lefts: list[Index | Series] = [RangeIndex(10, 40, 10)]
107
+ lefts.extend([Series([10, 20, 30], dtype=dtype) for dtype in _ldtypes])
108
+ lefts.extend([Index([10, 20, 30], dtype=dtype) for dtype in _ldtypes if dtype != "f2"])
109
+
110
+ # ------------------------------------------------------------------
111
+ # Comparisons
112
+
113
+
114
+ class TestNumericComparisons:
115
+ def test_operator_series_comparison_zerorank(self):
116
+ # GH#13006
117
+ result = np.float64(0) > Series([1, 2, 3])
118
+ expected = 0.0 > Series([1, 2, 3])
119
+ tm.assert_series_equal(result, expected)
120
+ result = Series([1, 2, 3]) < np.float64(0)
121
+ expected = Series([1, 2, 3]) < 0.0
122
+ tm.assert_series_equal(result, expected)
123
+ result = np.array([0, 1, 2])[0] > Series([0, 1, 2])
124
+ expected = 0.0 > Series([1, 2, 3])
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ def test_df_numeric_cmp_dt64_raises(self, box_with_array, fixed_now_ts):
128
+ # GH#8932, GH#22163
129
+ ts = fixed_now_ts
130
+ obj = np.array(range(5))
131
+ obj = tm.box_expected(obj, box_with_array)
132
+
133
+ assert_invalid_comparison(obj, ts, box_with_array)
134
+
135
+ def test_compare_invalid(self):
136
+ # GH#8058
137
+ # ops testing
138
+ a = Series(np.random.default_rng(2).standard_normal(5), name=0)
139
+ b = Series(np.random.default_rng(2).standard_normal(5))
140
+ b.name = pd.Timestamp("2000-01-01")
141
+ tm.assert_series_equal(a / b, 1 / (b / a))
142
+
143
+ def test_numeric_cmp_string_numexpr_path(self, box_with_array, monkeypatch):
144
+ # GH#36377, GH#35700
145
+ box = box_with_array
146
+ xbox = box if box is not Index else np.ndarray
147
+
148
+ obj = Series(np.random.default_rng(2).standard_normal(51))
149
+ obj = tm.box_expected(obj, box, transpose=False)
150
+ with monkeypatch.context() as m:
151
+ m.setattr(expr, "_MIN_ELEMENTS", 50)
152
+ result = obj == "a"
153
+
154
+ expected = Series(np.zeros(51, dtype=bool))
155
+ expected = tm.box_expected(expected, xbox, transpose=False)
156
+ tm.assert_equal(result, expected)
157
+
158
+ with monkeypatch.context() as m:
159
+ m.setattr(expr, "_MIN_ELEMENTS", 50)
160
+ result = obj != "a"
161
+ tm.assert_equal(result, ~expected)
162
+
163
+ msg = "Invalid comparison between dtype=float64 and str"
164
+ with pytest.raises(TypeError, match=msg):
165
+ obj < "a"
166
+
167
+
168
+ # ------------------------------------------------------------------
169
+ # Numeric dtypes Arithmetic with Datetime/Timedelta Scalar
170
+
171
+
172
+ class TestNumericArraylikeArithmeticWithDatetimeLike:
173
+ @pytest.mark.parametrize("box_cls", [np.array, Index, Series])
174
+ @pytest.mark.parametrize(
175
+ "left", lefts, ids=lambda x: type(x).__name__ + str(x.dtype)
176
+ )
177
+ def test_mul_td64arr(self, left, box_cls):
178
+ # GH#22390
179
+ right = np.array([1, 2, 3], dtype="m8[s]")
180
+ right = box_cls(right)
181
+
182
+ expected = TimedeltaIndex(["10s", "40s", "90s"], dtype=right.dtype)
183
+
184
+ if isinstance(left, Series) or box_cls is Series:
185
+ expected = Series(expected)
186
+ assert expected.dtype == right.dtype
187
+
188
+ result = left * right
189
+ tm.assert_equal(result, expected)
190
+
191
+ result = right * left
192
+ tm.assert_equal(result, expected)
193
+
194
+ @pytest.mark.parametrize("box_cls", [np.array, Index, Series])
195
+ @pytest.mark.parametrize(
196
+ "left", lefts, ids=lambda x: type(x).__name__ + str(x.dtype)
197
+ )
198
+ def test_div_td64arr(self, left, box_cls):
199
+ # GH#22390
200
+ right = np.array([10, 40, 90], dtype="m8[s]")
201
+ right = box_cls(right)
202
+
203
+ expected = TimedeltaIndex(["1s", "2s", "3s"], dtype=right.dtype)
204
+ if isinstance(left, Series) or box_cls is Series:
205
+ expected = Series(expected)
206
+ assert expected.dtype == right.dtype
207
+
208
+ result = right / left
209
+ tm.assert_equal(result, expected)
210
+
211
+ result = right // left
212
+ tm.assert_equal(result, expected)
213
+
214
+ # (true_) needed for min-versions build 2022-12-26
215
+ msg = "ufunc '(true_)?divide' cannot use operands with types"
216
+ with pytest.raises(TypeError, match=msg):
217
+ left / right
218
+
219
+ msg = "ufunc 'floor_divide' cannot use operands with types"
220
+ with pytest.raises(TypeError, match=msg):
221
+ left // right
222
+
223
+ # TODO: also test Tick objects;
224
+ # see test_numeric_arr_rdiv_tdscalar for note on these failing
225
+ @pytest.mark.parametrize(
226
+ "scalar_td",
227
+ [
228
+ Timedelta(days=1),
229
+ Timedelta(days=1).to_timedelta64(),
230
+ Timedelta(days=1).to_pytimedelta(),
231
+ Timedelta(days=1).to_timedelta64().astype("timedelta64[s]"),
232
+ Timedelta(days=1).to_timedelta64().astype("timedelta64[ms]"),
233
+ ],
234
+ ids=lambda x: type(x).__name__,
235
+ )
236
+ def test_numeric_arr_mul_tdscalar(self, scalar_td, numeric_idx, box_with_array):
237
+ # GH#19333
238
+ box = box_with_array
239
+ index = numeric_idx
240
+ expected = TimedeltaIndex([Timedelta(days=n) for n in range(len(index))])
241
+ if isinstance(scalar_td, np.timedelta64):
242
+ dtype = scalar_td.dtype
243
+ expected = expected.astype(dtype)
244
+ elif type(scalar_td) is timedelta:
245
+ expected = expected.astype("m8[us]")
246
+
247
+ index = tm.box_expected(index, box)
248
+ expected = tm.box_expected(expected, box)
249
+
250
+ result = index * scalar_td
251
+ tm.assert_equal(result, expected)
252
+
253
+ commute = scalar_td * index
254
+ tm.assert_equal(commute, expected)
255
+
256
+ @pytest.mark.parametrize(
257
+ "scalar_td",
258
+ [
259
+ Timedelta(days=1),
260
+ Timedelta(days=1).to_timedelta64(),
261
+ Timedelta(days=1).to_pytimedelta(),
262
+ ],
263
+ ids=lambda x: type(x).__name__,
264
+ )
265
+ @pytest.mark.parametrize("dtype", [np.int64, np.float64])
266
+ def test_numeric_arr_mul_tdscalar_numexpr_path(
267
+ self, dtype, scalar_td, box_with_array
268
+ ):
269
+ # GH#44772 for the float64 case
270
+ box = box_with_array
271
+
272
+ arr_i8 = np.arange(2 * 10**4).astype(np.int64, copy=False)
273
+ arr = arr_i8.astype(dtype, copy=False)
274
+ obj = tm.box_expected(arr, box, transpose=False)
275
+
276
+ expected = arr_i8.view("timedelta64[D]").astype("timedelta64[ns]")
277
+ if type(scalar_td) is timedelta:
278
+ expected = expected.astype("timedelta64[us]")
279
+
280
+ expected = tm.box_expected(expected, box, transpose=False)
281
+
282
+ result = obj * scalar_td
283
+ tm.assert_equal(result, expected)
284
+
285
+ result = scalar_td * obj
286
+ tm.assert_equal(result, expected)
287
+
288
+ def test_numeric_arr_rdiv_tdscalar(self, three_days, numeric_idx, box_with_array):
289
+ box = box_with_array
290
+
291
+ index = numeric_idx[1:3]
292
+
293
+ expected = TimedeltaIndex(["3 Days", "36 Hours"])
294
+ if isinstance(three_days, np.timedelta64):
295
+ dtype = three_days.dtype
296
+ if dtype < np.dtype("m8[s]"):
297
+ # i.e. resolution is lower -> use lowest supported resolution
298
+ dtype = np.dtype("m8[s]")
299
+ expected = expected.astype(dtype)
300
+ elif type(three_days) is timedelta:
301
+ expected = expected.astype("m8[us]")
302
+ elif isinstance(
303
+ three_days,
304
+ (pd.offsets.Day, pd.offsets.Hour, pd.offsets.Minute, pd.offsets.Second),
305
+ ):
306
+ # closest reso is Second
307
+ expected = expected.astype("m8[s]")
308
+
309
+ index = tm.box_expected(index, box)
310
+ expected = tm.box_expected(expected, box)
311
+
312
+ result = three_days / index
313
+ tm.assert_equal(result, expected)
314
+
315
+ msg = "cannot use operands with types dtype"
316
+ with pytest.raises(TypeError, match=msg):
317
+ index / three_days
318
+
319
+ @pytest.mark.parametrize(
320
+ "other",
321
+ [
322
+ Timedelta(hours=31),
323
+ Timedelta(hours=31).to_pytimedelta(),
324
+ Timedelta(hours=31).to_timedelta64(),
325
+ Timedelta(hours=31).to_timedelta64().astype("m8[h]"),
326
+ np.timedelta64("NaT"),
327
+ np.timedelta64("NaT", "D"),
328
+ pd.offsets.Minute(3),
329
+ pd.offsets.Second(0),
330
+ # GH#28080 numeric+datetimelike should raise; Timestamp used
331
+ # to raise NullFrequencyError but that behavior was removed in 1.0
332
+ pd.Timestamp("2021-01-01", tz="Asia/Tokyo"),
333
+ pd.Timestamp("2021-01-01"),
334
+ pd.Timestamp("2021-01-01").to_pydatetime(),
335
+ pd.Timestamp("2021-01-01", tz="UTC").to_pydatetime(),
336
+ pd.Timestamp("2021-01-01").to_datetime64(),
337
+ np.datetime64("NaT", "ns"),
338
+ pd.NaT,
339
+ ],
340
+ ids=repr,
341
+ )
342
+ def test_add_sub_datetimedeltalike_invalid(
343
+ self, numeric_idx, other, box_with_array
344
+ ):
345
+ box = box_with_array
346
+
347
+ left = tm.box_expected(numeric_idx, box)
348
+ msg = "|".join(
349
+ [
350
+ "unsupported operand type",
351
+ "Addition/subtraction of integers and integer-arrays",
352
+ "Instead of adding/subtracting",
353
+ "cannot use operands with types dtype",
354
+ "Concatenation operation is not implemented for NumPy arrays",
355
+ "Cannot (add|subtract) NaT (to|from) ndarray",
356
+ # pd.array vs np.datetime64 case
357
+ r"operand type\(s\) all returned NotImplemented from __array_ufunc__",
358
+ "can only perform ops with numeric values",
359
+ "cannot subtract DatetimeArray from ndarray",
360
+ # pd.Timedelta(1) + Index([0, 1, 2])
361
+ "Cannot add or subtract Timedelta from integers",
362
+ ]
363
+ )
364
+ assert_invalid_addsub_type(left, other, msg)
365
+
366
+
367
+ # ------------------------------------------------------------------
368
+ # Arithmetic
369
+
370
+
371
+ class TestDivisionByZero:
372
+ def test_div_zero(self, zero, numeric_idx):
373
+ idx = numeric_idx
374
+
375
+ expected = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
376
+ # We only adjust for Index, because Series does not yet apply
377
+ # the adjustment correctly.
378
+ expected2 = adjust_negative_zero(zero, expected)
379
+
380
+ result = idx / zero
381
+ tm.assert_index_equal(result, expected2)
382
+ ser_compat = Series(idx).astype("i8") / np.array(zero).astype("i8")
383
+ tm.assert_series_equal(ser_compat, Series(expected))
384
+
385
+ def test_floordiv_zero(self, zero, numeric_idx):
386
+ idx = numeric_idx
387
+
388
+ expected = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
389
+ # We only adjust for Index, because Series does not yet apply
390
+ # the adjustment correctly.
391
+ expected2 = adjust_negative_zero(zero, expected)
392
+
393
+ result = idx // zero
394
+ tm.assert_index_equal(result, expected2)
395
+ ser_compat = Series(idx).astype("i8") // np.array(zero).astype("i8")
396
+ tm.assert_series_equal(ser_compat, Series(expected))
397
+
398
+ def test_mod_zero(self, zero, numeric_idx):
399
+ idx = numeric_idx
400
+
401
+ expected = Index([np.nan, np.nan, np.nan, np.nan, np.nan], dtype=np.float64)
402
+ result = idx % zero
403
+ tm.assert_index_equal(result, expected)
404
+ ser_compat = Series(idx).astype("i8") % np.array(zero).astype("i8")
405
+ tm.assert_series_equal(ser_compat, Series(result))
406
+
407
+ def test_divmod_zero(self, zero, numeric_idx):
408
+ idx = numeric_idx
409
+
410
+ exleft = Index([np.nan, np.inf, np.inf, np.inf, np.inf], dtype=np.float64)
411
+ exright = Index([np.nan, np.nan, np.nan, np.nan, np.nan], dtype=np.float64)
412
+ exleft = adjust_negative_zero(zero, exleft)
413
+
414
+ result = divmod(idx, zero)
415
+ tm.assert_index_equal(result[0], exleft)
416
+ tm.assert_index_equal(result[1], exright)
417
+
418
+ @pytest.mark.parametrize("op", [operator.truediv, operator.floordiv])
419
+ def test_div_negative_zero(self, zero, numeric_idx, op):
420
+ # Check that -1 / -0.0 returns np.inf, not -np.inf
421
+ if numeric_idx.dtype == np.uint64:
422
+ pytest.skip(f"Div by negative 0 not relevant for {numeric_idx.dtype}")
423
+ idx = numeric_idx - 3
424
+
425
+ expected = Index([-np.inf, -np.inf, -np.inf, np.nan, np.inf], dtype=np.float64)
426
+ expected = adjust_negative_zero(zero, expected)
427
+
428
+ result = op(idx, zero)
429
+ tm.assert_index_equal(result, expected)
430
+
431
+ # ------------------------------------------------------------------
432
+
433
+ @pytest.mark.parametrize("dtype1", [np.int64, np.float64, np.uint64])
434
+ def test_ser_div_ser(
435
+ self,
436
+ switch_numexpr_min_elements,
437
+ dtype1,
438
+ any_real_numpy_dtype,
439
+ ):
440
+ # no longer do integer div for any ops, but deal with the 0's
441
+ dtype2 = any_real_numpy_dtype
442
+
443
+ first = Series([3, 4, 5, 8], name="first").astype(dtype1)
444
+ second = Series([0, 0, 0, 3], name="second").astype(dtype2)
445
+
446
+ with np.errstate(all="ignore"):
447
+ expected = Series(
448
+ first.values.astype(np.float64) / second.values,
449
+ dtype="float64",
450
+ name=None,
451
+ )
452
+ expected.iloc[0:3] = np.inf
453
+ if first.dtype == "int64" and second.dtype == "float32":
454
+ # when using numexpr, the casting rules are slightly different
455
+ # and int64/float32 combo results in float32 instead of float64
456
+ if expr.USE_NUMEXPR and switch_numexpr_min_elements == 0:
457
+ expected = expected.astype("float32")
458
+
459
+ result = first / second
460
+ tm.assert_series_equal(result, expected)
461
+ assert not result.equals(second / first)
462
+
463
+ @pytest.mark.parametrize("dtype1", [np.int64, np.float64, np.uint64])
464
+ def test_ser_divmod_zero(self, dtype1, any_real_numpy_dtype):
465
+ # GH#26987
466
+ dtype2 = any_real_numpy_dtype
467
+ left = Series([1, 1]).astype(dtype1)
468
+ right = Series([0, 2]).astype(dtype2)
469
+
470
+ # GH#27321 pandas convention is to set 1 // 0 to np.inf, as opposed
471
+ # to numpy which sets to np.nan; patch `expected[0]` below
472
+ expected = left // right, left % right
473
+ expected = list(expected)
474
+ expected[0] = expected[0].astype(np.float64)
475
+ expected[0][0] = np.inf
476
+ result = divmod(left, right)
477
+
478
+ tm.assert_series_equal(result[0], expected[0])
479
+ tm.assert_series_equal(result[1], expected[1])
480
+
481
+ # rdivmod case
482
+ result = divmod(left.values, right)
483
+ tm.assert_series_equal(result[0], expected[0])
484
+ tm.assert_series_equal(result[1], expected[1])
485
+
486
+ def test_ser_divmod_inf(self):
487
+ left = Series([np.inf, 1.0])
488
+ right = Series([np.inf, 2.0])
489
+
490
+ expected = left // right, left % right
491
+ result = divmod(left, right)
492
+
493
+ tm.assert_series_equal(result[0], expected[0])
494
+ tm.assert_series_equal(result[1], expected[1])
495
+
496
+ # rdivmod case
497
+ result = divmod(left.values, right)
498
+ tm.assert_series_equal(result[0], expected[0])
499
+ tm.assert_series_equal(result[1], expected[1])
500
+
501
+ def test_rdiv_zero_compat(self):
502
+ # GH#8674
503
+ zero_array = np.array([0] * 5)
504
+ data = np.random.default_rng(2).standard_normal(5)
505
+ expected = Series([0.0] * 5)
506
+
507
+ result = zero_array / Series(data)
508
+ tm.assert_series_equal(result, expected)
509
+
510
+ result = Series(zero_array) / data
511
+ tm.assert_series_equal(result, expected)
512
+
513
+ result = Series(zero_array) / Series(data)
514
+ tm.assert_series_equal(result, expected)
515
+
516
+ def test_div_zero_inf_signs(self):
517
+ # GH#9144, inf signing
518
+ ser = Series([-1, 0, 1], name="first")
519
+ expected = Series([-np.inf, np.nan, np.inf], name="first")
520
+
521
+ result = ser / 0
522
+ tm.assert_series_equal(result, expected)
523
+
524
+ def test_rdiv_zero(self):
525
+ # GH#9144
526
+ ser = Series([-1, 0, 1], name="first")
527
+ expected = Series([0.0, np.nan, 0.0], name="first")
528
+
529
+ result = 0 / ser
530
+ tm.assert_series_equal(result, expected)
531
+
532
+ def test_floordiv_div(self):
533
+ # GH#9144
534
+ ser = Series([-1, 0, 1], name="first")
535
+
536
+ result = ser // 0
537
+ expected = Series([-np.inf, np.nan, np.inf], name="first")
538
+ tm.assert_series_equal(result, expected)
539
+
540
+ def test_df_div_zero_df(self):
541
+ # integer div, but deal with the 0's (GH#9144)
542
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
543
+ result = df / df
544
+
545
+ first = Series([1.0, 1.0, 1.0, 1.0])
546
+ second = Series([np.nan, np.nan, np.nan, 1])
547
+ expected = pd.DataFrame({"first": first, "second": second})
548
+ tm.assert_frame_equal(result, expected)
549
+
550
+ def test_df_div_zero_array(self):
551
+ # integer div, but deal with the 0's (GH#9144)
552
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
553
+
554
+ first = Series([1.0, 1.0, 1.0, 1.0])
555
+ second = Series([np.nan, np.nan, np.nan, 1])
556
+ expected = pd.DataFrame({"first": first, "second": second})
557
+
558
+ with np.errstate(all="ignore"):
559
+ arr = df.values.astype("float") / df.values
560
+ result = pd.DataFrame(arr, index=df.index, columns=df.columns)
561
+ tm.assert_frame_equal(result, expected)
562
+
563
+ def test_df_div_zero_int(self):
564
+ # integer div, but deal with the 0's (GH#9144)
565
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
566
+
567
+ result = df / 0
568
+ expected = pd.DataFrame(np.inf, index=df.index, columns=df.columns)
569
+ expected.iloc[0:3, 1] = np.nan
570
+ tm.assert_frame_equal(result, expected)
571
+
572
+ # numpy has a slightly different (wrong) treatment
573
+ with np.errstate(all="ignore"):
574
+ arr = df.values.astype("float64") / 0
575
+ result2 = pd.DataFrame(arr, index=df.index, columns=df.columns)
576
+ tm.assert_frame_equal(result2, expected)
577
+
578
+ def test_df_div_zero_series_does_not_commute(self):
579
+ # integer div, but deal with the 0's (GH#9144)
580
+ df = pd.DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
581
+ ser = df[0]
582
+ res = ser / df
583
+ res2 = df / ser
584
+ assert not res.fillna(0).equals(res2.fillna(0))
585
+
586
+ # ------------------------------------------------------------------
587
+ # Mod By Zero
588
+
589
+ def test_df_mod_zero_df(self, using_array_manager):
590
+ # GH#3590, modulo as ints
591
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
592
+ # this is technically wrong, as the integer portion is coerced to float
593
+ first = Series([0, 0, 0, 0])
594
+ if not using_array_manager:
595
+ # INFO(ArrayManager) BlockManager doesn't preserve dtype per column
596
+ # while ArrayManager performs op column-wisedoes and thus preserves
597
+ # dtype if possible
598
+ first = first.astype("float64")
599
+ second = Series([np.nan, np.nan, np.nan, 0])
600
+ expected = pd.DataFrame({"first": first, "second": second})
601
+ result = df % df
602
+ tm.assert_frame_equal(result, expected)
603
+
604
+ # GH#38939 If we dont pass copy=False, df is consolidated and
605
+ # result["first"] is float64 instead of int64
606
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]}, copy=False)
607
+ first = Series([0, 0, 0, 0], dtype="int64")
608
+ second = Series([np.nan, np.nan, np.nan, 0])
609
+ expected = pd.DataFrame({"first": first, "second": second})
610
+ result = df % df
611
+ tm.assert_frame_equal(result, expected)
612
+
613
+ def test_df_mod_zero_array(self):
614
+ # GH#3590, modulo as ints
615
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
616
+
617
+ # this is technically wrong, as the integer portion is coerced to float
618
+ # ###
619
+ first = Series([0, 0, 0, 0], dtype="float64")
620
+ second = Series([np.nan, np.nan, np.nan, 0])
621
+ expected = pd.DataFrame({"first": first, "second": second})
622
+
623
+ # numpy has a slightly different (wrong) treatment
624
+ with np.errstate(all="ignore"):
625
+ arr = df.values % df.values
626
+ result2 = pd.DataFrame(arr, index=df.index, columns=df.columns, dtype="float64")
627
+ result2.iloc[0:3, 1] = np.nan
628
+ tm.assert_frame_equal(result2, expected)
629
+
630
+ def test_df_mod_zero_int(self):
631
+ # GH#3590, modulo as ints
632
+ df = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
633
+
634
+ result = df % 0
635
+ expected = pd.DataFrame(np.nan, index=df.index, columns=df.columns)
636
+ tm.assert_frame_equal(result, expected)
637
+
638
+ # numpy has a slightly different (wrong) treatment
639
+ with np.errstate(all="ignore"):
640
+ arr = df.values.astype("float64") % 0
641
+ result2 = pd.DataFrame(arr, index=df.index, columns=df.columns)
642
+ tm.assert_frame_equal(result2, expected)
643
+
644
+ def test_df_mod_zero_series_does_not_commute(self):
645
+ # GH#3590, modulo as ints
646
+ # not commutative with series
647
+ df = pd.DataFrame(np.random.default_rng(2).standard_normal((10, 5)))
648
+ ser = df[0]
649
+ res = ser % df
650
+ res2 = df % ser
651
+ assert not res.fillna(0).equals(res2.fillna(0))
652
+
653
+
654
+ class TestMultiplicationDivision:
655
+ # __mul__, __rmul__, __div__, __rdiv__, __floordiv__, __rfloordiv__
656
+ # for non-timestamp/timedelta/period dtypes
657
+
658
+ def test_divide_decimal(self, box_with_array):
659
+ # resolves issue GH#9787
660
+ box = box_with_array
661
+ ser = Series([Decimal(10)])
662
+ expected = Series([Decimal(5)])
663
+
664
+ ser = tm.box_expected(ser, box)
665
+ expected = tm.box_expected(expected, box)
666
+
667
+ result = ser / Decimal(2)
668
+
669
+ tm.assert_equal(result, expected)
670
+
671
+ result = ser // Decimal(2)
672
+ tm.assert_equal(result, expected)
673
+
674
+ def test_div_equiv_binop(self):
675
+ # Test Series.div as well as Series.__div__
676
+ # float/integer issue
677
+ # GH#7785
678
+ first = Series([1, 0], name="first")
679
+ second = Series([-0.01, -0.02], name="second")
680
+ expected = Series([-0.01, -np.inf])
681
+
682
+ result = second.div(first)
683
+ tm.assert_series_equal(result, expected, check_names=False)
684
+
685
+ result = second / first
686
+ tm.assert_series_equal(result, expected)
687
+
688
+ def test_div_int(self, numeric_idx):
689
+ idx = numeric_idx
690
+ result = idx / 1
691
+ expected = idx.astype("float64")
692
+ tm.assert_index_equal(result, expected)
693
+
694
+ result = idx / 2
695
+ expected = Index(idx.values / 2)
696
+ tm.assert_index_equal(result, expected)
697
+
698
+ @pytest.mark.parametrize("op", [operator.mul, ops.rmul, operator.floordiv])
699
+ def test_mul_int_identity(self, op, numeric_idx, box_with_array):
700
+ idx = numeric_idx
701
+ idx = tm.box_expected(idx, box_with_array)
702
+
703
+ result = op(idx, 1)
704
+ tm.assert_equal(result, idx)
705
+
706
+ def test_mul_int_array(self, numeric_idx):
707
+ idx = numeric_idx
708
+ didx = idx * idx
709
+
710
+ result = idx * np.array(5, dtype="int64")
711
+ tm.assert_index_equal(result, idx * 5)
712
+
713
+ arr_dtype = "uint64" if idx.dtype == np.uint64 else "int64"
714
+ result = idx * np.arange(5, dtype=arr_dtype)
715
+ tm.assert_index_equal(result, didx)
716
+
717
+ def test_mul_int_series(self, numeric_idx):
718
+ idx = numeric_idx
719
+ didx = idx * idx
720
+
721
+ arr_dtype = "uint64" if idx.dtype == np.uint64 else "int64"
722
+ result = idx * Series(np.arange(5, dtype=arr_dtype))
723
+ tm.assert_series_equal(result, Series(didx))
724
+
725
+ def test_mul_float_series(self, numeric_idx):
726
+ idx = numeric_idx
727
+ rng5 = np.arange(5, dtype="float64")
728
+
729
+ result = idx * Series(rng5 + 0.1)
730
+ expected = Series(rng5 * (rng5 + 0.1))
731
+ tm.assert_series_equal(result, expected)
732
+
733
+ def test_mul_index(self, numeric_idx):
734
+ idx = numeric_idx
735
+
736
+ result = idx * idx
737
+ tm.assert_index_equal(result, idx**2)
738
+
739
+ def test_mul_datelike_raises(self, numeric_idx):
740
+ idx = numeric_idx
741
+ msg = "cannot perform __rmul__ with this index type"
742
+ with pytest.raises(TypeError, match=msg):
743
+ idx * date_range("20130101", periods=5)
744
+
745
+ def test_mul_size_mismatch_raises(self, numeric_idx):
746
+ idx = numeric_idx
747
+ msg = "operands could not be broadcast together"
748
+ with pytest.raises(ValueError, match=msg):
749
+ idx * idx[0:3]
750
+ with pytest.raises(ValueError, match=msg):
751
+ idx * np.array([1, 2])
752
+
753
+ @pytest.mark.parametrize("op", [operator.pow, ops.rpow])
754
+ def test_pow_float(self, op, numeric_idx, box_with_array):
755
+ # test power calculations both ways, GH#14973
756
+ box = box_with_array
757
+ idx = numeric_idx
758
+ expected = Index(op(idx.values, 2.0))
759
+
760
+ idx = tm.box_expected(idx, box)
761
+ expected = tm.box_expected(expected, box)
762
+
763
+ result = op(idx, 2.0)
764
+ tm.assert_equal(result, expected)
765
+
766
+ def test_modulo(self, numeric_idx, box_with_array):
767
+ # GH#9244
768
+ box = box_with_array
769
+ idx = numeric_idx
770
+ expected = Index(idx.values % 2)
771
+
772
+ idx = tm.box_expected(idx, box)
773
+ expected = tm.box_expected(expected, box)
774
+
775
+ result = idx % 2
776
+ tm.assert_equal(result, expected)
777
+
778
+ def test_divmod_scalar(self, numeric_idx):
779
+ idx = numeric_idx
780
+
781
+ result = divmod(idx, 2)
782
+ with np.errstate(all="ignore"):
783
+ div, mod = divmod(idx.values, 2)
784
+
785
+ expected = Index(div), Index(mod)
786
+ for r, e in zip(result, expected):
787
+ tm.assert_index_equal(r, e)
788
+
789
+ def test_divmod_ndarray(self, numeric_idx):
790
+ idx = numeric_idx
791
+ other = np.ones(idx.values.shape, dtype=idx.values.dtype) * 2
792
+
793
+ result = divmod(idx, other)
794
+ with np.errstate(all="ignore"):
795
+ div, mod = divmod(idx.values, other)
796
+
797
+ expected = Index(div), Index(mod)
798
+ for r, e in zip(result, expected):
799
+ tm.assert_index_equal(r, e)
800
+
801
+ def test_divmod_series(self, numeric_idx):
802
+ idx = numeric_idx
803
+ other = np.ones(idx.values.shape, dtype=idx.values.dtype) * 2
804
+
805
+ result = divmod(idx, Series(other))
806
+ with np.errstate(all="ignore"):
807
+ div, mod = divmod(idx.values, other)
808
+
809
+ expected = Series(div), Series(mod)
810
+ for r, e in zip(result, expected):
811
+ tm.assert_series_equal(r, e)
812
+
813
+ @pytest.mark.parametrize("other", [np.nan, 7, -23, 2.718, -3.14, np.inf])
814
+ def test_ops_np_scalar(self, other):
815
+ vals = np.random.default_rng(2).standard_normal((5, 3))
816
+ f = lambda x: pd.DataFrame(
817
+ x, index=list("ABCDE"), columns=["jim", "joe", "jolie"]
818
+ )
819
+
820
+ df = f(vals)
821
+
822
+ tm.assert_frame_equal(df / np.array(other), f(vals / other))
823
+ tm.assert_frame_equal(np.array(other) * df, f(vals * other))
824
+ tm.assert_frame_equal(df + np.array(other), f(vals + other))
825
+ tm.assert_frame_equal(np.array(other) - df, f(other - vals))
826
+
827
+ # TODO: This came from series.test.test_operators, needs cleanup
828
+ def test_operators_frame(self):
829
+ # rpow does not work with DataFrame
830
+ ts = Series(
831
+ np.arange(10, dtype=np.float64),
832
+ index=date_range("2020-01-01", periods=10),
833
+ name="ts",
834
+ )
835
+ ts.name = "ts"
836
+
837
+ df = pd.DataFrame({"A": ts})
838
+
839
+ tm.assert_series_equal(ts + ts, ts + df["A"], check_names=False)
840
+ tm.assert_series_equal(ts**ts, ts ** df["A"], check_names=False)
841
+ tm.assert_series_equal(ts < ts, ts < df["A"], check_names=False)
842
+ tm.assert_series_equal(ts / ts, ts / df["A"], check_names=False)
843
+
844
+ # TODO: this came from tests.series.test_analytics, needs cleanup and
845
+ # de-duplication with test_modulo above
846
+ def test_modulo2(self):
847
+ with np.errstate(all="ignore"):
848
+ # GH#3590, modulo as ints
849
+ p = pd.DataFrame({"first": [3, 4, 5, 8], "second": [0, 0, 0, 3]})
850
+ result = p["first"] % p["second"]
851
+ expected = Series(p["first"].values % p["second"].values, dtype="float64")
852
+ expected.iloc[0:3] = np.nan
853
+ tm.assert_series_equal(result, expected)
854
+
855
+ result = p["first"] % 0
856
+ expected = Series(np.nan, index=p.index, name="first")
857
+ tm.assert_series_equal(result, expected)
858
+
859
+ p = p.astype("float64")
860
+ result = p["first"] % p["second"]
861
+ expected = Series(p["first"].values % p["second"].values)
862
+ tm.assert_series_equal(result, expected)
863
+
864
+ p = p.astype("float64")
865
+ result = p["first"] % p["second"]
866
+ result2 = p["second"] % p["first"]
867
+ assert not result.equals(result2)
868
+
869
+ def test_modulo_zero_int(self):
870
+ # GH#9144
871
+ with np.errstate(all="ignore"):
872
+ s = Series([0, 1])
873
+
874
+ result = s % 0
875
+ expected = Series([np.nan, np.nan])
876
+ tm.assert_series_equal(result, expected)
877
+
878
+ result = 0 % s
879
+ expected = Series([np.nan, 0.0])
880
+ tm.assert_series_equal(result, expected)
881
+
882
+
883
+ class TestAdditionSubtraction:
884
+ # __add__, __sub__, __radd__, __rsub__, __iadd__, __isub__
885
+ # for non-timestamp/timedelta/period dtypes
886
+
887
+ @pytest.mark.parametrize(
888
+ "first, second, expected",
889
+ [
890
+ (
891
+ Series([1, 2, 3], index=list("ABC"), name="x"),
892
+ Series([2, 2, 2], index=list("ABD"), name="x"),
893
+ Series([3.0, 4.0, np.nan, np.nan], index=list("ABCD"), name="x"),
894
+ ),
895
+ (
896
+ Series([1, 2, 3], index=list("ABC"), name="x"),
897
+ Series([2, 2, 2, 2], index=list("ABCD"), name="x"),
898
+ Series([3, 4, 5, np.nan], index=list("ABCD"), name="x"),
899
+ ),
900
+ ],
901
+ )
902
+ def test_add_series(self, first, second, expected):
903
+ # GH#1134
904
+ tm.assert_series_equal(first + second, expected)
905
+ tm.assert_series_equal(second + first, expected)
906
+
907
+ @pytest.mark.parametrize(
908
+ "first, second, expected",
909
+ [
910
+ (
911
+ pd.DataFrame({"x": [1, 2, 3]}, index=list("ABC")),
912
+ pd.DataFrame({"x": [2, 2, 2]}, index=list("ABD")),
913
+ pd.DataFrame({"x": [3.0, 4.0, np.nan, np.nan]}, index=list("ABCD")),
914
+ ),
915
+ (
916
+ pd.DataFrame({"x": [1, 2, 3]}, index=list("ABC")),
917
+ pd.DataFrame({"x": [2, 2, 2, 2]}, index=list("ABCD")),
918
+ pd.DataFrame({"x": [3, 4, 5, np.nan]}, index=list("ABCD")),
919
+ ),
920
+ ],
921
+ )
922
+ def test_add_frames(self, first, second, expected):
923
+ # GH#1134
924
+ tm.assert_frame_equal(first + second, expected)
925
+ tm.assert_frame_equal(second + first, expected)
926
+
927
+ # TODO: This came from series.test.test_operators, needs cleanup
928
+ def test_series_frame_radd_bug(self, fixed_now_ts):
929
+ # GH#353
930
+ vals = Series([str(i) for i in range(5)])
931
+ result = "foo_" + vals
932
+ expected = vals.map(lambda x: "foo_" + x)
933
+ tm.assert_series_equal(result, expected)
934
+
935
+ frame = pd.DataFrame({"vals": vals})
936
+ result = "foo_" + frame
937
+ expected = pd.DataFrame({"vals": vals.map(lambda x: "foo_" + x)})
938
+ tm.assert_frame_equal(result, expected)
939
+
940
+ ts = Series(
941
+ np.arange(10, dtype=np.float64),
942
+ index=date_range("2020-01-01", periods=10),
943
+ name="ts",
944
+ )
945
+
946
+ # really raise this time
947
+ fix_now = fixed_now_ts.to_pydatetime()
948
+ msg = "|".join(
949
+ [
950
+ "unsupported operand type",
951
+ # wrong error message, see https://github.com/numpy/numpy/issues/18832
952
+ "Concatenation operation",
953
+ ]
954
+ )
955
+ with pytest.raises(TypeError, match=msg):
956
+ fix_now + ts
957
+
958
+ with pytest.raises(TypeError, match=msg):
959
+ ts + fix_now
960
+
961
+ # TODO: This came from series.test.test_operators, needs cleanup
962
+ def test_datetime64_with_index(self):
963
+ # arithmetic integer ops with an index
964
+ ser = Series(np.random.default_rng(2).standard_normal(5))
965
+ expected = ser - ser.index.to_series()
966
+ result = ser - ser.index
967
+ tm.assert_series_equal(result, expected)
968
+
969
+ # GH#4629
970
+ # arithmetic datetime64 ops with an index
971
+ ser = Series(
972
+ date_range("20130101", periods=5),
973
+ index=date_range("20130101", periods=5),
974
+ )
975
+ expected = ser - ser.index.to_series()
976
+ result = ser - ser.index
977
+ tm.assert_series_equal(result, expected)
978
+
979
+ msg = "cannot subtract PeriodArray from DatetimeArray"
980
+ with pytest.raises(TypeError, match=msg):
981
+ # GH#18850
982
+ result = ser - ser.index.to_period()
983
+
984
+ df = pd.DataFrame(
985
+ np.random.default_rng(2).standard_normal((5, 2)),
986
+ index=date_range("20130101", periods=5),
987
+ )
988
+ df["date"] = pd.Timestamp("20130102")
989
+ df["expected"] = df["date"] - df.index.to_series()
990
+ df["result"] = df["date"] - df.index
991
+ tm.assert_series_equal(df["result"], df["expected"], check_names=False)
992
+
993
+ # TODO: taken from tests.frame.test_operators, needs cleanup
994
+ def test_frame_operators(self, float_frame):
995
+ frame = float_frame
996
+
997
+ garbage = np.random.default_rng(2).random(4)
998
+ colSeries = Series(garbage, index=np.array(frame.columns))
999
+
1000
+ idSum = frame + frame
1001
+ seriesSum = frame + colSeries
1002
+
1003
+ for col, series in idSum.items():
1004
+ for idx, val in series.items():
1005
+ origVal = frame[col][idx] * 2
1006
+ if not np.isnan(val):
1007
+ assert val == origVal
1008
+ else:
1009
+ assert np.isnan(origVal)
1010
+
1011
+ for col, series in seriesSum.items():
1012
+ for idx, val in series.items():
1013
+ origVal = frame[col][idx] + colSeries[col]
1014
+ if not np.isnan(val):
1015
+ assert val == origVal
1016
+ else:
1017
+ assert np.isnan(origVal)
1018
+
1019
+ def test_frame_operators_col_align(self, float_frame):
1020
+ frame2 = pd.DataFrame(float_frame, columns=["D", "C", "B", "A"])
1021
+ added = frame2 + frame2
1022
+ expected = frame2 * 2
1023
+ tm.assert_frame_equal(added, expected)
1024
+
1025
+ def test_frame_operators_none_to_nan(self):
1026
+ df = pd.DataFrame({"a": ["a", None, "b"]})
1027
+ tm.assert_frame_equal(df + df, pd.DataFrame({"a": ["aa", np.nan, "bb"]}))
1028
+
1029
+ @pytest.mark.parametrize("dtype", ("float", "int64"))
1030
+ def test_frame_operators_empty_like(self, dtype):
1031
+ # Test for issue #10181
1032
+ frames = [
1033
+ pd.DataFrame(dtype=dtype),
1034
+ pd.DataFrame(columns=["A"], dtype=dtype),
1035
+ pd.DataFrame(index=[0], dtype=dtype),
1036
+ ]
1037
+ for df in frames:
1038
+ assert (df + df).equals(df)
1039
+ tm.assert_frame_equal(df + df, df)
1040
+
1041
+ @pytest.mark.parametrize(
1042
+ "func",
1043
+ [lambda x: x * 2, lambda x: x[::2], lambda x: 5],
1044
+ ids=["multiply", "slice", "constant"],
1045
+ )
1046
+ def test_series_operators_arithmetic(self, all_arithmetic_functions, func):
1047
+ op = all_arithmetic_functions
1048
+ series = Series(
1049
+ np.arange(10, dtype=np.float64),
1050
+ index=date_range("2020-01-01", periods=10),
1051
+ name="ts",
1052
+ )
1053
+ other = func(series)
1054
+ compare_op(series, other, op)
1055
+
1056
+ @pytest.mark.parametrize(
1057
+ "func", [lambda x: x + 1, lambda x: 5], ids=["add", "constant"]
1058
+ )
1059
+ def test_series_operators_compare(self, comparison_op, func):
1060
+ op = comparison_op
1061
+ series = Series(
1062
+ np.arange(10, dtype=np.float64),
1063
+ index=date_range("2020-01-01", periods=10),
1064
+ name="ts",
1065
+ )
1066
+ other = func(series)
1067
+ compare_op(series, other, op)
1068
+
1069
+ @pytest.mark.parametrize(
1070
+ "func",
1071
+ [lambda x: x * 2, lambda x: x[::2], lambda x: 5],
1072
+ ids=["multiply", "slice", "constant"],
1073
+ )
1074
+ def test_divmod(self, func):
1075
+ series = Series(
1076
+ np.arange(10, dtype=np.float64),
1077
+ index=date_range("2020-01-01", periods=10),
1078
+ name="ts",
1079
+ )
1080
+ other = func(series)
1081
+ results = divmod(series, other)
1082
+ if isinstance(other, abc.Iterable) and len(series) != len(other):
1083
+ # if the lengths don't match, this is the test where we use
1084
+ # `tser[::2]`. Pad every other value in `other_np` with nan.
1085
+ other_np = []
1086
+ for n in other:
1087
+ other_np.append(n)
1088
+ other_np.append(np.nan)
1089
+ else:
1090
+ other_np = other
1091
+ other_np = np.asarray(other_np)
1092
+ with np.errstate(all="ignore"):
1093
+ expecteds = divmod(series.values, np.asarray(other_np))
1094
+
1095
+ for result, expected in zip(results, expecteds):
1096
+ # check the values, name, and index separately
1097
+ tm.assert_almost_equal(np.asarray(result), expected)
1098
+
1099
+ assert result.name == series.name
1100
+ tm.assert_index_equal(result.index, series.index._with_freq(None))
1101
+
1102
+ def test_series_divmod_zero(self):
1103
+ # Check that divmod uses pandas convention for division by zero,
1104
+ # which does not match numpy.
1105
+ # pandas convention has
1106
+ # 1/0 == np.inf
1107
+ # -1/0 == -np.inf
1108
+ # 1/-0.0 == -np.inf
1109
+ # -1/-0.0 == np.inf
1110
+ tser = Series(
1111
+ np.arange(1, 11, dtype=np.float64),
1112
+ index=date_range("2020-01-01", periods=10),
1113
+ name="ts",
1114
+ )
1115
+ other = tser * 0
1116
+
1117
+ result = divmod(tser, other)
1118
+ exp1 = Series([np.inf] * len(tser), index=tser.index, name="ts")
1119
+ exp2 = Series([np.nan] * len(tser), index=tser.index, name="ts")
1120
+ tm.assert_series_equal(result[0], exp1)
1121
+ tm.assert_series_equal(result[1], exp2)
1122
+
1123
+
1124
+ class TestUFuncCompat:
1125
+ # TODO: add more dtypes
1126
+ @pytest.mark.parametrize("holder", [Index, RangeIndex, Series])
1127
+ @pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
1128
+ def test_ufunc_compat(self, holder, dtype):
1129
+ box = Series if holder is Series else Index
1130
+
1131
+ if holder is RangeIndex:
1132
+ if dtype != np.int64:
1133
+ pytest.skip(f"dtype {dtype} not relevant for RangeIndex")
1134
+ idx = RangeIndex(0, 5, name="foo")
1135
+ else:
1136
+ idx = holder(np.arange(5, dtype=dtype), name="foo")
1137
+ result = np.sin(idx)
1138
+ expected = box(np.sin(np.arange(5, dtype=dtype)), name="foo")
1139
+ tm.assert_equal(result, expected)
1140
+
1141
+ # TODO: add more dtypes
1142
+ @pytest.mark.parametrize("holder", [Index, Series])
1143
+ @pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
1144
+ def test_ufunc_coercions(self, holder, dtype):
1145
+ idx = holder([1, 2, 3, 4, 5], dtype=dtype, name="x")
1146
+ box = Series if holder is Series else Index
1147
+
1148
+ result = np.sqrt(idx)
1149
+ assert result.dtype == "f8" and isinstance(result, box)
1150
+ exp = Index(np.sqrt(np.array([1, 2, 3, 4, 5], dtype=np.float64)), name="x")
1151
+ exp = tm.box_expected(exp, box)
1152
+ tm.assert_equal(result, exp)
1153
+
1154
+ result = np.divide(idx, 2.0)
1155
+ assert result.dtype == "f8" and isinstance(result, box)
1156
+ exp = Index([0.5, 1.0, 1.5, 2.0, 2.5], dtype=np.float64, name="x")
1157
+ exp = tm.box_expected(exp, box)
1158
+ tm.assert_equal(result, exp)
1159
+
1160
+ # _evaluate_numeric_binop
1161
+ result = idx + 2.0
1162
+ assert result.dtype == "f8" and isinstance(result, box)
1163
+ exp = Index([3.0, 4.0, 5.0, 6.0, 7.0], dtype=np.float64, name="x")
1164
+ exp = tm.box_expected(exp, box)
1165
+ tm.assert_equal(result, exp)
1166
+
1167
+ result = idx - 2.0
1168
+ assert result.dtype == "f8" and isinstance(result, box)
1169
+ exp = Index([-1.0, 0.0, 1.0, 2.0, 3.0], dtype=np.float64, name="x")
1170
+ exp = tm.box_expected(exp, box)
1171
+ tm.assert_equal(result, exp)
1172
+
1173
+ result = idx * 1.0
1174
+ assert result.dtype == "f8" and isinstance(result, box)
1175
+ exp = Index([1.0, 2.0, 3.0, 4.0, 5.0], dtype=np.float64, name="x")
1176
+ exp = tm.box_expected(exp, box)
1177
+ tm.assert_equal(result, exp)
1178
+
1179
+ result = idx / 2.0
1180
+ assert result.dtype == "f8" and isinstance(result, box)
1181
+ exp = Index([0.5, 1.0, 1.5, 2.0, 2.5], dtype=np.float64, name="x")
1182
+ exp = tm.box_expected(exp, box)
1183
+ tm.assert_equal(result, exp)
1184
+
1185
+ # TODO: add more dtypes
1186
+ @pytest.mark.parametrize("holder", [Index, Series])
1187
+ @pytest.mark.parametrize("dtype", [np.int64, np.uint64, np.float64])
1188
+ def test_ufunc_multiple_return_values(self, holder, dtype):
1189
+ obj = holder([1, 2, 3], dtype=dtype, name="x")
1190
+ box = Series if holder is Series else Index
1191
+
1192
+ result = np.modf(obj)
1193
+ assert isinstance(result, tuple)
1194
+ exp1 = Index([0.0, 0.0, 0.0], dtype=np.float64, name="x")
1195
+ exp2 = Index([1.0, 2.0, 3.0], dtype=np.float64, name="x")
1196
+ tm.assert_equal(result[0], tm.box_expected(exp1, box))
1197
+ tm.assert_equal(result[1], tm.box_expected(exp2, box))
1198
+
1199
+ def test_ufunc_at(self):
1200
+ s = Series([0, 1, 2], index=[1, 2, 3], name="x")
1201
+ np.add.at(s, [0, 2], 10)
1202
+ expected = Series([10, 1, 12], index=[1, 2, 3], name="x")
1203
+ tm.assert_series_equal(s, expected)
1204
+
1205
+
1206
+ class TestObjectDtypeEquivalence:
1207
+ # Tests that arithmetic operations match operations executed elementwise
1208
+
1209
+ @pytest.mark.parametrize("dtype", [None, object])
1210
+ def test_numarr_with_dtype_add_nan(self, dtype, box_with_array):
1211
+ box = box_with_array
1212
+ ser = Series([1, 2, 3], dtype=dtype)
1213
+ expected = Series([np.nan, np.nan, np.nan], dtype=dtype)
1214
+
1215
+ ser = tm.box_expected(ser, box)
1216
+ expected = tm.box_expected(expected, box)
1217
+
1218
+ result = np.nan + ser
1219
+ tm.assert_equal(result, expected)
1220
+
1221
+ result = ser + np.nan
1222
+ tm.assert_equal(result, expected)
1223
+
1224
+ @pytest.mark.parametrize("dtype", [None, object])
1225
+ def test_numarr_with_dtype_add_int(self, dtype, box_with_array):
1226
+ box = box_with_array
1227
+ ser = Series([1, 2, 3], dtype=dtype)
1228
+ expected = Series([2, 3, 4], dtype=dtype)
1229
+
1230
+ ser = tm.box_expected(ser, box)
1231
+ expected = tm.box_expected(expected, box)
1232
+
1233
+ result = 1 + ser
1234
+ tm.assert_equal(result, expected)
1235
+
1236
+ result = ser + 1
1237
+ tm.assert_equal(result, expected)
1238
+
1239
+ # TODO: moved from tests.series.test_operators; needs cleanup
1240
+ @pytest.mark.parametrize(
1241
+ "op",
1242
+ [operator.add, operator.sub, operator.mul, operator.truediv, operator.floordiv],
1243
+ )
1244
+ def test_operators_reverse_object(self, op):
1245
+ # GH#56
1246
+ arr = Series(
1247
+ np.random.default_rng(2).standard_normal(10),
1248
+ index=np.arange(10),
1249
+ dtype=object,
1250
+ )
1251
+
1252
+ result = op(1.0, arr)
1253
+ expected = op(1.0, arr.astype(float))
1254
+ tm.assert_series_equal(result.astype(float), expected)
1255
+
1256
+
1257
+ class TestNumericArithmeticUnsorted:
1258
+ # Tests in this class have been moved from type-specific test modules
1259
+ # but not yet sorted, parametrized, and de-duplicated
1260
+ @pytest.mark.parametrize(
1261
+ "op",
1262
+ [
1263
+ operator.add,
1264
+ operator.sub,
1265
+ operator.mul,
1266
+ operator.floordiv,
1267
+ operator.truediv,
1268
+ ],
1269
+ )
1270
+ @pytest.mark.parametrize(
1271
+ "idx1",
1272
+ [
1273
+ RangeIndex(0, 10, 1),
1274
+ RangeIndex(0, 20, 2),
1275
+ RangeIndex(-10, 10, 2),
1276
+ RangeIndex(5, -5, -1),
1277
+ ],
1278
+ )
1279
+ @pytest.mark.parametrize(
1280
+ "idx2",
1281
+ [
1282
+ RangeIndex(0, 10, 1),
1283
+ RangeIndex(0, 20, 2),
1284
+ RangeIndex(-10, 10, 2),
1285
+ RangeIndex(5, -5, -1),
1286
+ ],
1287
+ )
1288
+ def test_binops_index(self, op, idx1, idx2):
1289
+ idx1 = idx1._rename("foo")
1290
+ idx2 = idx2._rename("bar")
1291
+ result = op(idx1, idx2)
1292
+ expected = op(Index(idx1.to_numpy()), Index(idx2.to_numpy()))
1293
+ tm.assert_index_equal(result, expected, exact="equiv")
1294
+
1295
+ @pytest.mark.parametrize(
1296
+ "op",
1297
+ [
1298
+ operator.add,
1299
+ operator.sub,
1300
+ operator.mul,
1301
+ operator.floordiv,
1302
+ operator.truediv,
1303
+ ],
1304
+ )
1305
+ @pytest.mark.parametrize(
1306
+ "idx",
1307
+ [
1308
+ RangeIndex(0, 10, 1),
1309
+ RangeIndex(0, 20, 2),
1310
+ RangeIndex(-10, 10, 2),
1311
+ RangeIndex(5, -5, -1),
1312
+ ],
1313
+ )
1314
+ @pytest.mark.parametrize("scalar", [-1, 1, 2])
1315
+ def test_binops_index_scalar(self, op, idx, scalar):
1316
+ result = op(idx, scalar)
1317
+ expected = op(Index(idx.to_numpy()), scalar)
1318
+ tm.assert_index_equal(result, expected, exact="equiv")
1319
+
1320
+ @pytest.mark.parametrize("idx1", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
1321
+ @pytest.mark.parametrize("idx2", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
1322
+ def test_binops_index_pow(self, idx1, idx2):
1323
+ # numpy does not allow powers of negative integers so test separately
1324
+ # https://github.com/numpy/numpy/pull/8127
1325
+ idx1 = idx1._rename("foo")
1326
+ idx2 = idx2._rename("bar")
1327
+ result = pow(idx1, idx2)
1328
+ expected = pow(Index(idx1.to_numpy()), Index(idx2.to_numpy()))
1329
+ tm.assert_index_equal(result, expected, exact="equiv")
1330
+
1331
+ @pytest.mark.parametrize("idx", [RangeIndex(0, 10, 1), RangeIndex(0, 20, 2)])
1332
+ @pytest.mark.parametrize("scalar", [1, 2])
1333
+ def test_binops_index_scalar_pow(self, idx, scalar):
1334
+ # numpy does not allow powers of negative integers so test separately
1335
+ # https://github.com/numpy/numpy/pull/8127
1336
+ result = pow(idx, scalar)
1337
+ expected = pow(Index(idx.to_numpy()), scalar)
1338
+ tm.assert_index_equal(result, expected, exact="equiv")
1339
+
1340
+ # TODO: divmod?
1341
+ @pytest.mark.parametrize(
1342
+ "op",
1343
+ [
1344
+ operator.add,
1345
+ operator.sub,
1346
+ operator.mul,
1347
+ operator.floordiv,
1348
+ operator.truediv,
1349
+ operator.pow,
1350
+ operator.mod,
1351
+ ],
1352
+ )
1353
+ def test_arithmetic_with_frame_or_series(self, op):
1354
+ # check that we return NotImplemented when operating with Series
1355
+ # or DataFrame
1356
+ index = RangeIndex(5)
1357
+ other = Series(np.random.default_rng(2).standard_normal(5))
1358
+
1359
+ expected = op(Series(index), other)
1360
+ result = op(index, other)
1361
+ tm.assert_series_equal(result, expected)
1362
+
1363
+ other = pd.DataFrame(np.random.default_rng(2).standard_normal((2, 5)))
1364
+ expected = op(pd.DataFrame([index, index]), other)
1365
+ result = op(index, other)
1366
+ tm.assert_frame_equal(result, expected)
1367
+
1368
+ def test_numeric_compat2(self):
1369
+ # validate that we are handling the RangeIndex overrides to numeric ops
1370
+ # and returning RangeIndex where possible
1371
+
1372
+ idx = RangeIndex(0, 10, 2)
1373
+
1374
+ result = idx * 2
1375
+ expected = RangeIndex(0, 20, 4)
1376
+ tm.assert_index_equal(result, expected, exact=True)
1377
+
1378
+ result = idx + 2
1379
+ expected = RangeIndex(2, 12, 2)
1380
+ tm.assert_index_equal(result, expected, exact=True)
1381
+
1382
+ result = idx - 2
1383
+ expected = RangeIndex(-2, 8, 2)
1384
+ tm.assert_index_equal(result, expected, exact=True)
1385
+
1386
+ result = idx / 2
1387
+ expected = RangeIndex(0, 5, 1).astype("float64")
1388
+ tm.assert_index_equal(result, expected, exact=True)
1389
+
1390
+ result = idx / 4
1391
+ expected = RangeIndex(0, 10, 2) / 4
1392
+ tm.assert_index_equal(result, expected, exact=True)
1393
+
1394
+ result = idx // 1
1395
+ expected = idx
1396
+ tm.assert_index_equal(result, expected, exact=True)
1397
+
1398
+ # __mul__
1399
+ result = idx * idx
1400
+ expected = Index(idx.values * idx.values)
1401
+ tm.assert_index_equal(result, expected, exact=True)
1402
+
1403
+ # __pow__
1404
+ idx = RangeIndex(0, 1000, 2)
1405
+ result = idx**2
1406
+ expected = Index(idx._values) ** 2
1407
+ tm.assert_index_equal(Index(result.values), expected, exact=True)
1408
+
1409
+ @pytest.mark.parametrize(
1410
+ "idx, div, expected",
1411
+ [
1412
+ # TODO: add more dtypes
1413
+ (RangeIndex(0, 1000, 2), 2, RangeIndex(0, 500, 1)),
1414
+ (RangeIndex(-99, -201, -3), -3, RangeIndex(33, 67, 1)),
1415
+ (
1416
+ RangeIndex(0, 1000, 1),
1417
+ 2,
1418
+ Index(RangeIndex(0, 1000, 1)._values) // 2,
1419
+ ),
1420
+ (
1421
+ RangeIndex(0, 100, 1),
1422
+ 2.0,
1423
+ Index(RangeIndex(0, 100, 1)._values) // 2.0,
1424
+ ),
1425
+ (RangeIndex(0), 50, RangeIndex(0)),
1426
+ (RangeIndex(2, 4, 2), 3, RangeIndex(0, 1, 1)),
1427
+ (RangeIndex(-5, -10, -6), 4, RangeIndex(-2, -1, 1)),
1428
+ (RangeIndex(-100, -200, 3), 2, RangeIndex(0)),
1429
+ ],
1430
+ )
1431
+ def test_numeric_compat2_floordiv(self, idx, div, expected):
1432
+ # __floordiv__
1433
+ tm.assert_index_equal(idx // div, expected, exact=True)
1434
+
1435
+ @pytest.mark.parametrize("dtype", [np.int64, np.float64])
1436
+ @pytest.mark.parametrize("delta", [1, 0, -1])
1437
+ def test_addsub_arithmetic(self, dtype, delta):
1438
+ # GH#8142
1439
+ delta = dtype(delta)
1440
+ index = Index([10, 11, 12], dtype=dtype)
1441
+ result = index + delta
1442
+ expected = Index(index.values + delta, dtype=dtype)
1443
+ tm.assert_index_equal(result, expected)
1444
+
1445
+ # this subtraction used to fail
1446
+ result = index - delta
1447
+ expected = Index(index.values - delta, dtype=dtype)
1448
+ tm.assert_index_equal(result, expected)
1449
+
1450
+ tm.assert_index_equal(index + index, 2 * index)
1451
+ tm.assert_index_equal(index - index, 0 * index)
1452
+ assert not (index - index).empty
1453
+
1454
+ def test_pow_nan_with_zero(self, box_with_array):
1455
+ left = Index([np.nan, np.nan, np.nan])
1456
+ right = Index([0, 0, 0])
1457
+ expected = Index([1.0, 1.0, 1.0])
1458
+
1459
+ left = tm.box_expected(left, box_with_array)
1460
+ right = tm.box_expected(right, box_with_array)
1461
+ expected = tm.box_expected(expected, box_with_array)
1462
+
1463
+ result = left**right
1464
+ tm.assert_equal(result, expected)
1465
+
1466
+
1467
+ def test_fill_value_inf_masking():
1468
+ # GH #27464 make sure we mask 0/1 with Inf and not NaN
1469
+ df = pd.DataFrame({"A": [0, 1, 2], "B": [1.1, None, 1.1]})
1470
+
1471
+ other = pd.DataFrame({"A": [1.1, 1.2, 1.3]}, index=[0, 2, 3])
1472
+
1473
+ result = df.rfloordiv(other, fill_value=1)
1474
+
1475
+ expected = pd.DataFrame(
1476
+ {"A": [np.inf, 1.0, 0.0, 1.0], "B": [0.0, np.nan, 0.0, np.nan]}
1477
+ )
1478
+ tm.assert_frame_equal(result, expected)
1479
+
1480
+
1481
+ def test_dataframe_div_silenced():
1482
+ # GH#26793
1483
+ pdf1 = pd.DataFrame(
1484
+ {
1485
+ "A": np.arange(10),
1486
+ "B": [np.nan, 1, 2, 3, 4] * 2,
1487
+ "C": [np.nan] * 10,
1488
+ "D": np.arange(10),
1489
+ },
1490
+ index=list("abcdefghij"),
1491
+ columns=list("ABCD"),
1492
+ )
1493
+ pdf2 = pd.DataFrame(
1494
+ np.random.default_rng(2).standard_normal((10, 4)),
1495
+ index=list("abcdefghjk"),
1496
+ columns=list("ABCX"),
1497
+ )
1498
+ with tm.assert_produces_warning(None):
1499
+ pdf1.div(pdf2, fill_value=0)
1500
+
1501
+
1502
+ @pytest.mark.parametrize(
1503
+ "data, expected_data",
1504
+ [([0, 1, 2], [0, 2, 4])],
1505
+ )
1506
+ def test_integer_array_add_list_like(
1507
+ box_pandas_1d_array, box_1d_array, data, expected_data
1508
+ ):
1509
+ # GH22606 Verify operators with IntegerArray and list-likes
1510
+ arr = array(data, dtype="Int64")
1511
+ container = box_pandas_1d_array(arr)
1512
+ left = container + box_1d_array(data)
1513
+ right = box_1d_array(data) + container
1514
+
1515
+ if Series in [box_1d_array, box_pandas_1d_array]:
1516
+ cls = Series
1517
+ elif Index in [box_1d_array, box_pandas_1d_array]:
1518
+ cls = Index
1519
+ else:
1520
+ cls = array
1521
+
1522
+ expected = cls(expected_data, dtype="Int64")
1523
+
1524
+ tm.assert_equal(left, expected)
1525
+ tm.assert_equal(right, expected)
1526
+
1527
+
1528
+ def test_sub_multiindex_swapped_levels():
1529
+ # GH 9952
1530
+ df = pd.DataFrame(
1531
+ {"a": np.random.default_rng(2).standard_normal(6)},
1532
+ index=pd.MultiIndex.from_product(
1533
+ [["a", "b"], [0, 1, 2]], names=["levA", "levB"]
1534
+ ),
1535
+ )
1536
+ df2 = df.copy()
1537
+ df2.index = df2.index.swaplevel(0, 1)
1538
+ result = df - df2
1539
+ expected = pd.DataFrame([0.0] * 6, columns=["a"], index=df.index)
1540
+ tm.assert_frame_equal(result, expected)
1541
+
1542
+
1543
+ @pytest.mark.parametrize("power", [1, 2, 5])
1544
+ @pytest.mark.parametrize("string_size", [0, 1, 2, 5])
1545
+ def test_empty_str_comparison(power, string_size):
1546
+ # GH 37348
1547
+ a = np.array(range(10**power))
1548
+ right = pd.DataFrame(a, dtype=np.int64)
1549
+ left = " " * string_size
1550
+
1551
+ result = right == left
1552
+ expected = pd.DataFrame(np.zeros(right.shape, dtype=bool))
1553
+ tm.assert_frame_equal(result, expected)
1554
+
1555
+
1556
+ def test_series_add_sub_with_UInt64():
1557
+ # GH 22023
1558
+ series1 = Series([1, 2, 3])
1559
+ series2 = Series([2, 1, 3], dtype="UInt64")
1560
+
1561
+ result = series1 + series2
1562
+ expected = Series([3, 3, 6], dtype="Float64")
1563
+ tm.assert_series_equal(result, expected)
1564
+
1565
+ result = series1 - series2
1566
+ expected = Series([-1, 1, 0], dtype="Float64")
1567
+ tm.assert_series_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_object.py ADDED
@@ -0,0 +1,420 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ # Specifically for object dtype
4
+ import datetime
5
+ from decimal import Decimal
6
+ import operator
7
+
8
+ import numpy as np
9
+ import pytest
10
+
11
+ from pandas._config import using_pyarrow_string_dtype
12
+
13
+ import pandas.util._test_decorators as td
14
+
15
+ import pandas as pd
16
+ from pandas import (
17
+ Series,
18
+ Timestamp,
19
+ option_context,
20
+ )
21
+ import pandas._testing as tm
22
+ from pandas.core import ops
23
+
24
+ # ------------------------------------------------------------------
25
+ # Comparisons
26
+
27
+
28
+ class TestObjectComparisons:
29
+ def test_comparison_object_numeric_nas(self, comparison_op):
30
+ ser = Series(np.random.default_rng(2).standard_normal(10), dtype=object)
31
+ shifted = ser.shift(2)
32
+
33
+ func = comparison_op
34
+
35
+ result = func(ser, shifted)
36
+ expected = func(ser.astype(float), shifted.astype(float))
37
+ tm.assert_series_equal(result, expected)
38
+
39
+ @pytest.mark.parametrize(
40
+ "infer_string", [False, pytest.param(True, marks=td.skip_if_no("pyarrow"))]
41
+ )
42
+ def test_object_comparisons(self, infer_string):
43
+ with option_context("future.infer_string", infer_string):
44
+ ser = Series(["a", "b", np.nan, "c", "a"])
45
+
46
+ result = ser == "a"
47
+ expected = Series([True, False, False, False, True])
48
+ tm.assert_series_equal(result, expected)
49
+
50
+ result = ser < "a"
51
+ expected = Series([False, False, False, False, False])
52
+ tm.assert_series_equal(result, expected)
53
+
54
+ result = ser != "a"
55
+ expected = -(ser == "a")
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ @pytest.mark.parametrize("dtype", [None, object])
59
+ def test_more_na_comparisons(self, dtype):
60
+ left = Series(["a", np.nan, "c"], dtype=dtype)
61
+ right = Series(["a", np.nan, "d"], dtype=dtype)
62
+
63
+ result = left == right
64
+ expected = Series([True, False, False])
65
+ tm.assert_series_equal(result, expected)
66
+
67
+ result = left != right
68
+ expected = Series([False, True, True])
69
+ tm.assert_series_equal(result, expected)
70
+
71
+ result = left == np.nan
72
+ expected = Series([False, False, False])
73
+ tm.assert_series_equal(result, expected)
74
+
75
+ result = left != np.nan
76
+ expected = Series([True, True, True])
77
+ tm.assert_series_equal(result, expected)
78
+
79
+
80
+ # ------------------------------------------------------------------
81
+ # Arithmetic
82
+
83
+
84
+ class TestArithmetic:
85
+ def test_add_period_to_array_of_offset(self):
86
+ # GH#50162
87
+ per = pd.Period("2012-1-1", freq="D")
88
+ pi = pd.period_range("2012-1-1", periods=10, freq="D")
89
+ idx = per - pi
90
+
91
+ expected = pd.Index([x + per for x in idx], dtype=object)
92
+ result = idx + per
93
+ tm.assert_index_equal(result, expected)
94
+
95
+ result = per + idx
96
+ tm.assert_index_equal(result, expected)
97
+
98
+ # TODO: parametrize
99
+ def test_pow_ops_object(self):
100
+ # GH#22922
101
+ # pow is weird with masking & 1, so testing here
102
+ a = Series([1, np.nan, 1, np.nan], dtype=object)
103
+ b = Series([1, np.nan, np.nan, 1], dtype=object)
104
+ result = a**b
105
+ expected = Series(a.values**b.values, dtype=object)
106
+ tm.assert_series_equal(result, expected)
107
+
108
+ result = b**a
109
+ expected = Series(b.values**a.values, dtype=object)
110
+
111
+ tm.assert_series_equal(result, expected)
112
+
113
+ @pytest.mark.parametrize("op", [operator.add, ops.radd])
114
+ @pytest.mark.parametrize("other", ["category", "Int64"])
115
+ def test_add_extension_scalar(self, other, box_with_array, op):
116
+ # GH#22378
117
+ # Check that scalars satisfying is_extension_array_dtype(obj)
118
+ # do not incorrectly try to dispatch to an ExtensionArray operation
119
+
120
+ arr = Series(["a", "b", "c"])
121
+ expected = Series([op(x, other) for x in arr])
122
+
123
+ arr = tm.box_expected(arr, box_with_array)
124
+ expected = tm.box_expected(expected, box_with_array)
125
+
126
+ result = op(arr, other)
127
+ tm.assert_equal(result, expected)
128
+
129
+ def test_objarr_add_str(self, box_with_array):
130
+ ser = Series(["x", np.nan, "x"])
131
+ expected = Series(["xa", np.nan, "xa"])
132
+
133
+ ser = tm.box_expected(ser, box_with_array)
134
+ expected = tm.box_expected(expected, box_with_array)
135
+
136
+ result = ser + "a"
137
+ tm.assert_equal(result, expected)
138
+
139
+ def test_objarr_radd_str(self, box_with_array):
140
+ ser = Series(["x", np.nan, "x"])
141
+ expected = Series(["ax", np.nan, "ax"])
142
+
143
+ ser = tm.box_expected(ser, box_with_array)
144
+ expected = tm.box_expected(expected, box_with_array)
145
+
146
+ result = "a" + ser
147
+ tm.assert_equal(result, expected)
148
+
149
+ @pytest.mark.parametrize(
150
+ "data",
151
+ [
152
+ [1, 2, 3],
153
+ [1.1, 2.2, 3.3],
154
+ [Timestamp("2011-01-01"), Timestamp("2011-01-02"), pd.NaT],
155
+ ["x", "y", 1],
156
+ ],
157
+ )
158
+ @pytest.mark.parametrize("dtype", [None, object])
159
+ def test_objarr_radd_str_invalid(self, dtype, data, box_with_array):
160
+ ser = Series(data, dtype=dtype)
161
+
162
+ ser = tm.box_expected(ser, box_with_array)
163
+ msg = "|".join(
164
+ [
165
+ "can only concatenate str",
166
+ "did not contain a loop with signature matching types",
167
+ "unsupported operand type",
168
+ "must be str",
169
+ ]
170
+ )
171
+ with pytest.raises(TypeError, match=msg):
172
+ "foo_" + ser
173
+
174
+ @pytest.mark.parametrize("op", [operator.add, ops.radd, operator.sub, ops.rsub])
175
+ def test_objarr_add_invalid(self, op, box_with_array):
176
+ # invalid ops
177
+ box = box_with_array
178
+
179
+ obj_ser = Series(list("abc"), dtype=object, name="objects")
180
+
181
+ obj_ser = tm.box_expected(obj_ser, box)
182
+ msg = "|".join(
183
+ [
184
+ "can only concatenate str",
185
+ "unsupported operand type",
186
+ "must be str",
187
+ "has no kernel",
188
+ ]
189
+ )
190
+ with pytest.raises(Exception, match=msg):
191
+ op(obj_ser, 1)
192
+ with pytest.raises(Exception, match=msg):
193
+ op(obj_ser, np.array(1, dtype=np.int64))
194
+
195
+ # TODO: Moved from tests.series.test_operators; needs cleanup
196
+ def test_operators_na_handling(self):
197
+ ser = Series(["foo", "bar", "baz", np.nan])
198
+ result = "prefix_" + ser
199
+ expected = Series(["prefix_foo", "prefix_bar", "prefix_baz", np.nan])
200
+ tm.assert_series_equal(result, expected)
201
+
202
+ result = ser + "_suffix"
203
+ expected = Series(["foo_suffix", "bar_suffix", "baz_suffix", np.nan])
204
+ tm.assert_series_equal(result, expected)
205
+
206
+ # TODO: parametrize over box
207
+ @pytest.mark.parametrize("dtype", [None, object])
208
+ def test_series_with_dtype_radd_timedelta(self, dtype):
209
+ # note this test is _not_ aimed at timedelta64-dtyped Series
210
+ # as of 2.0 we retain object dtype when ser.dtype == object
211
+ ser = Series(
212
+ [pd.Timedelta("1 days"), pd.Timedelta("2 days"), pd.Timedelta("3 days")],
213
+ dtype=dtype,
214
+ )
215
+ expected = Series(
216
+ [pd.Timedelta("4 days"), pd.Timedelta("5 days"), pd.Timedelta("6 days")],
217
+ dtype=dtype,
218
+ )
219
+
220
+ result = pd.Timedelta("3 days") + ser
221
+ tm.assert_series_equal(result, expected)
222
+
223
+ result = ser + pd.Timedelta("3 days")
224
+ tm.assert_series_equal(result, expected)
225
+
226
+ # TODO: cleanup & parametrize over box
227
+ def test_mixed_timezone_series_ops_object(self):
228
+ # GH#13043
229
+ ser = Series(
230
+ [
231
+ Timestamp("2015-01-01", tz="US/Eastern"),
232
+ Timestamp("2015-01-01", tz="Asia/Tokyo"),
233
+ ],
234
+ name="xxx",
235
+ )
236
+ assert ser.dtype == object
237
+
238
+ exp = Series(
239
+ [
240
+ Timestamp("2015-01-02", tz="US/Eastern"),
241
+ Timestamp("2015-01-02", tz="Asia/Tokyo"),
242
+ ],
243
+ name="xxx",
244
+ )
245
+ tm.assert_series_equal(ser + pd.Timedelta("1 days"), exp)
246
+ tm.assert_series_equal(pd.Timedelta("1 days") + ser, exp)
247
+
248
+ # object series & object series
249
+ ser2 = Series(
250
+ [
251
+ Timestamp("2015-01-03", tz="US/Eastern"),
252
+ Timestamp("2015-01-05", tz="Asia/Tokyo"),
253
+ ],
254
+ name="xxx",
255
+ )
256
+ assert ser2.dtype == object
257
+ exp = Series(
258
+ [pd.Timedelta("2 days"), pd.Timedelta("4 days")], name="xxx", dtype=object
259
+ )
260
+ tm.assert_series_equal(ser2 - ser, exp)
261
+ tm.assert_series_equal(ser - ser2, -exp)
262
+
263
+ ser = Series(
264
+ [pd.Timedelta("01:00:00"), pd.Timedelta("02:00:00")],
265
+ name="xxx",
266
+ dtype=object,
267
+ )
268
+ assert ser.dtype == object
269
+
270
+ exp = Series(
271
+ [pd.Timedelta("01:30:00"), pd.Timedelta("02:30:00")],
272
+ name="xxx",
273
+ dtype=object,
274
+ )
275
+ tm.assert_series_equal(ser + pd.Timedelta("00:30:00"), exp)
276
+ tm.assert_series_equal(pd.Timedelta("00:30:00") + ser, exp)
277
+
278
+ # TODO: cleanup & parametrize over box
279
+ def test_iadd_preserves_name(self):
280
+ # GH#17067, GH#19723 __iadd__ and __isub__ should preserve index name
281
+ ser = Series([1, 2, 3])
282
+ ser.index.name = "foo"
283
+
284
+ ser.index += 1
285
+ assert ser.index.name == "foo"
286
+
287
+ ser.index -= 1
288
+ assert ser.index.name == "foo"
289
+
290
+ def test_add_string(self):
291
+ # from bug report
292
+ index = pd.Index(["a", "b", "c"])
293
+ index2 = index + "foo"
294
+
295
+ assert "a" not in index2
296
+ assert "afoo" in index2
297
+
298
+ def test_iadd_string(self):
299
+ index = pd.Index(["a", "b", "c"])
300
+ # doesn't fail test unless there is a check before `+=`
301
+ assert "a" in index
302
+
303
+ index += "_x"
304
+ assert "a_x" in index
305
+
306
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="add doesn't work")
307
+ def test_add(self):
308
+ index = pd.Index([str(i) for i in range(10)])
309
+ expected = pd.Index(index.values * 2)
310
+ tm.assert_index_equal(index + index, expected)
311
+ tm.assert_index_equal(index + index.tolist(), expected)
312
+ tm.assert_index_equal(index.tolist() + index, expected)
313
+
314
+ # test add and radd
315
+ index = pd.Index(list("abc"))
316
+ expected = pd.Index(["a1", "b1", "c1"])
317
+ tm.assert_index_equal(index + "1", expected)
318
+ expected = pd.Index(["1a", "1b", "1c"])
319
+ tm.assert_index_equal("1" + index, expected)
320
+
321
+ def test_sub_fail(self, using_infer_string):
322
+ index = pd.Index([str(i) for i in range(10)])
323
+
324
+ if using_infer_string:
325
+ import pyarrow as pa
326
+
327
+ err = pa.lib.ArrowNotImplementedError
328
+ msg = "has no kernel"
329
+ else:
330
+ err = TypeError
331
+ msg = "unsupported operand type|Cannot broadcast"
332
+ with pytest.raises(err, match=msg):
333
+ index - "a"
334
+ with pytest.raises(err, match=msg):
335
+ index - index
336
+ with pytest.raises(err, match=msg):
337
+ index - index.tolist()
338
+ with pytest.raises(err, match=msg):
339
+ index.tolist() - index
340
+
341
+ def test_sub_object(self):
342
+ # GH#19369
343
+ index = pd.Index([Decimal(1), Decimal(2)])
344
+ expected = pd.Index([Decimal(0), Decimal(1)])
345
+
346
+ result = index - Decimal(1)
347
+ tm.assert_index_equal(result, expected)
348
+
349
+ result = index - pd.Index([Decimal(1), Decimal(1)])
350
+ tm.assert_index_equal(result, expected)
351
+
352
+ msg = "unsupported operand type"
353
+ with pytest.raises(TypeError, match=msg):
354
+ index - "foo"
355
+
356
+ with pytest.raises(TypeError, match=msg):
357
+ index - np.array([2, "foo"], dtype=object)
358
+
359
+ def test_rsub_object(self, fixed_now_ts):
360
+ # GH#19369
361
+ index = pd.Index([Decimal(1), Decimal(2)])
362
+ expected = pd.Index([Decimal(1), Decimal(0)])
363
+
364
+ result = Decimal(2) - index
365
+ tm.assert_index_equal(result, expected)
366
+
367
+ result = np.array([Decimal(2), Decimal(2)]) - index
368
+ tm.assert_index_equal(result, expected)
369
+
370
+ msg = "unsupported operand type"
371
+ with pytest.raises(TypeError, match=msg):
372
+ "foo" - index
373
+
374
+ with pytest.raises(TypeError, match=msg):
375
+ np.array([True, fixed_now_ts]) - index
376
+
377
+
378
+ class MyIndex(pd.Index):
379
+ # Simple index subclass that tracks ops calls.
380
+
381
+ _calls: int
382
+
383
+ @classmethod
384
+ def _simple_new(cls, values, name=None, dtype=None):
385
+ result = object.__new__(cls)
386
+ result._data = values
387
+ result._name = name
388
+ result._calls = 0
389
+ result._reset_identity()
390
+
391
+ return result
392
+
393
+ def __add__(self, other):
394
+ self._calls += 1
395
+ return self._simple_new(self._data)
396
+
397
+ def __radd__(self, other):
398
+ return self.__add__(other)
399
+
400
+
401
+ @pytest.mark.parametrize(
402
+ "other",
403
+ [
404
+ [datetime.timedelta(1), datetime.timedelta(2)],
405
+ [datetime.datetime(2000, 1, 1), datetime.datetime(2000, 1, 2)],
406
+ [pd.Period("2000"), pd.Period("2001")],
407
+ ["a", "b"],
408
+ ],
409
+ ids=["timedelta", "datetime", "period", "object"],
410
+ )
411
+ def test_index_ops_defer_to_unknown_subclasses(other):
412
+ # https://github.com/pandas-dev/pandas/issues/31109
413
+ values = np.array(
414
+ [datetime.date(2000, 1, 1), datetime.date(2000, 1, 2)], dtype=object
415
+ )
416
+ a = MyIndex._simple_new(values)
417
+ other = pd.Index(other)
418
+ result = other + a
419
+ assert isinstance(result, MyIndex)
420
+ assert a._calls == 1
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_period.py ADDED
@@ -0,0 +1,1675 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ # Specifically for Period dtype
4
+ import operator
5
+
6
+ import numpy as np
7
+ import pytest
8
+
9
+ from pandas._libs.tslibs import (
10
+ IncompatibleFrequency,
11
+ Period,
12
+ Timestamp,
13
+ to_offset,
14
+ )
15
+ from pandas.errors import PerformanceWarning
16
+
17
+ import pandas as pd
18
+ from pandas import (
19
+ PeriodIndex,
20
+ Series,
21
+ Timedelta,
22
+ TimedeltaIndex,
23
+ period_range,
24
+ )
25
+ import pandas._testing as tm
26
+ from pandas.core import ops
27
+ from pandas.core.arrays import TimedeltaArray
28
+ from pandas.tests.arithmetic.common import (
29
+ assert_invalid_addsub_type,
30
+ assert_invalid_comparison,
31
+ get_upcast_box,
32
+ )
33
+
34
+ _common_mismatch = [
35
+ pd.offsets.YearBegin(2),
36
+ pd.offsets.MonthBegin(1),
37
+ pd.offsets.Minute(),
38
+ ]
39
+
40
+
41
+ @pytest.fixture(
42
+ params=[
43
+ Timedelta(minutes=30).to_pytimedelta(),
44
+ np.timedelta64(30, "s"),
45
+ Timedelta(seconds=30),
46
+ ]
47
+ + _common_mismatch
48
+ )
49
+ def not_hourly(request):
50
+ """
51
+ Several timedelta-like and DateOffset instances that are _not_
52
+ compatible with Hourly frequencies.
53
+ """
54
+ return request.param
55
+
56
+
57
+ @pytest.fixture(
58
+ params=[
59
+ np.timedelta64(365, "D"),
60
+ Timedelta(days=365).to_pytimedelta(),
61
+ Timedelta(days=365),
62
+ ]
63
+ + _common_mismatch
64
+ )
65
+ def mismatched_freq(request):
66
+ """
67
+ Several timedelta-like and DateOffset instances that are _not_
68
+ compatible with Monthly or Annual frequencies.
69
+ """
70
+ return request.param
71
+
72
+
73
+ # ------------------------------------------------------------------
74
+ # Comparisons
75
+
76
+
77
+ class TestPeriodArrayLikeComparisons:
78
+ # Comparison tests for PeriodDtype vectors fully parametrized over
79
+ # DataFrame/Series/PeriodIndex/PeriodArray. Ideally all comparison
80
+ # tests will eventually end up here.
81
+
82
+ @pytest.mark.parametrize("other", ["2017", Period("2017", freq="D")])
83
+ def test_eq_scalar(self, other, box_with_array):
84
+ idx = PeriodIndex(["2017", "2017", "2018"], freq="D")
85
+ idx = tm.box_expected(idx, box_with_array)
86
+ xbox = get_upcast_box(idx, other, True)
87
+
88
+ expected = np.array([True, True, False])
89
+ expected = tm.box_expected(expected, xbox)
90
+
91
+ result = idx == other
92
+
93
+ tm.assert_equal(result, expected)
94
+
95
+ def test_compare_zerodim(self, box_with_array):
96
+ # GH#26689 make sure we unbox zero-dimensional arrays
97
+
98
+ pi = period_range("2000", periods=4)
99
+ other = np.array(pi.to_numpy()[0])
100
+
101
+ pi = tm.box_expected(pi, box_with_array)
102
+ xbox = get_upcast_box(pi, other, True)
103
+
104
+ result = pi <= other
105
+ expected = np.array([True, False, False, False])
106
+ expected = tm.box_expected(expected, xbox)
107
+ tm.assert_equal(result, expected)
108
+
109
+ @pytest.mark.parametrize(
110
+ "scalar",
111
+ [
112
+ "foo",
113
+ Timestamp("2021-01-01"),
114
+ Timedelta(days=4),
115
+ 9,
116
+ 9.5,
117
+ 2000, # specifically don't consider 2000 to match Period("2000", "D")
118
+ False,
119
+ None,
120
+ ],
121
+ )
122
+ def test_compare_invalid_scalar(self, box_with_array, scalar):
123
+ # GH#28980
124
+ # comparison with scalar that cannot be interpreted as a Period
125
+ pi = period_range("2000", periods=4)
126
+ parr = tm.box_expected(pi, box_with_array)
127
+ assert_invalid_comparison(parr, scalar, box_with_array)
128
+
129
+ @pytest.mark.parametrize(
130
+ "other",
131
+ [
132
+ pd.date_range("2000", periods=4).array,
133
+ pd.timedelta_range("1D", periods=4).array,
134
+ np.arange(4),
135
+ np.arange(4).astype(np.float64),
136
+ list(range(4)),
137
+ # match Period semantics by not treating integers as Periods
138
+ [2000, 2001, 2002, 2003],
139
+ np.arange(2000, 2004),
140
+ np.arange(2000, 2004).astype(object),
141
+ pd.Index([2000, 2001, 2002, 2003]),
142
+ ],
143
+ )
144
+ def test_compare_invalid_listlike(self, box_with_array, other):
145
+ pi = period_range("2000", periods=4)
146
+ parr = tm.box_expected(pi, box_with_array)
147
+ assert_invalid_comparison(parr, other, box_with_array)
148
+
149
+ @pytest.mark.parametrize("other_box", [list, np.array, lambda x: x.astype(object)])
150
+ def test_compare_object_dtype(self, box_with_array, other_box):
151
+ pi = period_range("2000", periods=5)
152
+ parr = tm.box_expected(pi, box_with_array)
153
+
154
+ other = other_box(pi)
155
+ xbox = get_upcast_box(parr, other, True)
156
+
157
+ expected = np.array([True, True, True, True, True])
158
+ expected = tm.box_expected(expected, xbox)
159
+
160
+ result = parr == other
161
+ tm.assert_equal(result, expected)
162
+ result = parr <= other
163
+ tm.assert_equal(result, expected)
164
+ result = parr >= other
165
+ tm.assert_equal(result, expected)
166
+
167
+ result = parr != other
168
+ tm.assert_equal(result, ~expected)
169
+ result = parr < other
170
+ tm.assert_equal(result, ~expected)
171
+ result = parr > other
172
+ tm.assert_equal(result, ~expected)
173
+
174
+ other = other_box(pi[::-1])
175
+
176
+ expected = np.array([False, False, True, False, False])
177
+ expected = tm.box_expected(expected, xbox)
178
+ result = parr == other
179
+ tm.assert_equal(result, expected)
180
+
181
+ expected = np.array([True, True, True, False, False])
182
+ expected = tm.box_expected(expected, xbox)
183
+ result = parr <= other
184
+ tm.assert_equal(result, expected)
185
+
186
+ expected = np.array([False, False, True, True, True])
187
+ expected = tm.box_expected(expected, xbox)
188
+ result = parr >= other
189
+ tm.assert_equal(result, expected)
190
+
191
+ expected = np.array([True, True, False, True, True])
192
+ expected = tm.box_expected(expected, xbox)
193
+ result = parr != other
194
+ tm.assert_equal(result, expected)
195
+
196
+ expected = np.array([True, True, False, False, False])
197
+ expected = tm.box_expected(expected, xbox)
198
+ result = parr < other
199
+ tm.assert_equal(result, expected)
200
+
201
+ expected = np.array([False, False, False, True, True])
202
+ expected = tm.box_expected(expected, xbox)
203
+ result = parr > other
204
+ tm.assert_equal(result, expected)
205
+
206
+
207
+ class TestPeriodIndexComparisons:
208
+ # TODO: parameterize over boxes
209
+
210
+ def test_pi_cmp_period(self):
211
+ idx = period_range("2007-01", periods=20, freq="M")
212
+ per = idx[10]
213
+
214
+ result = idx < per
215
+ exp = idx.values < idx.values[10]
216
+ tm.assert_numpy_array_equal(result, exp)
217
+
218
+ # Tests Period.__richcmp__ against ndarray[object, ndim=2]
219
+ result = idx.values.reshape(10, 2) < per
220
+ tm.assert_numpy_array_equal(result, exp.reshape(10, 2))
221
+
222
+ # Tests Period.__richcmp__ against ndarray[object, ndim=0]
223
+ result = idx < np.array(per)
224
+ tm.assert_numpy_array_equal(result, exp)
225
+
226
+ # TODO: moved from test_datetime64; de-duplicate with version below
227
+ def test_parr_cmp_period_scalar2(self, box_with_array):
228
+ pi = period_range("2000-01-01", periods=10, freq="D")
229
+
230
+ val = pi[3]
231
+ expected = [x > val for x in pi]
232
+
233
+ ser = tm.box_expected(pi, box_with_array)
234
+ xbox = get_upcast_box(ser, val, True)
235
+
236
+ expected = tm.box_expected(expected, xbox)
237
+ result = ser > val
238
+ tm.assert_equal(result, expected)
239
+
240
+ val = pi[5]
241
+ result = ser > val
242
+ expected = [x > val for x in pi]
243
+ expected = tm.box_expected(expected, xbox)
244
+ tm.assert_equal(result, expected)
245
+
246
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
247
+ def test_parr_cmp_period_scalar(self, freq, box_with_array):
248
+ # GH#13200
249
+ base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
250
+ base = tm.box_expected(base, box_with_array)
251
+ per = Period("2011-02", freq=freq)
252
+ xbox = get_upcast_box(base, per, True)
253
+
254
+ exp = np.array([False, True, False, False])
255
+ exp = tm.box_expected(exp, xbox)
256
+ tm.assert_equal(base == per, exp)
257
+ tm.assert_equal(per == base, exp)
258
+
259
+ exp = np.array([True, False, True, True])
260
+ exp = tm.box_expected(exp, xbox)
261
+ tm.assert_equal(base != per, exp)
262
+ tm.assert_equal(per != base, exp)
263
+
264
+ exp = np.array([False, False, True, True])
265
+ exp = tm.box_expected(exp, xbox)
266
+ tm.assert_equal(base > per, exp)
267
+ tm.assert_equal(per < base, exp)
268
+
269
+ exp = np.array([True, False, False, False])
270
+ exp = tm.box_expected(exp, xbox)
271
+ tm.assert_equal(base < per, exp)
272
+ tm.assert_equal(per > base, exp)
273
+
274
+ exp = np.array([False, True, True, True])
275
+ exp = tm.box_expected(exp, xbox)
276
+ tm.assert_equal(base >= per, exp)
277
+ tm.assert_equal(per <= base, exp)
278
+
279
+ exp = np.array([True, True, False, False])
280
+ exp = tm.box_expected(exp, xbox)
281
+ tm.assert_equal(base <= per, exp)
282
+ tm.assert_equal(per >= base, exp)
283
+
284
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
285
+ def test_parr_cmp_pi(self, freq, box_with_array):
286
+ # GH#13200
287
+ base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
288
+ base = tm.box_expected(base, box_with_array)
289
+
290
+ # TODO: could also box idx?
291
+ idx = PeriodIndex(["2011-02", "2011-01", "2011-03", "2011-05"], freq=freq)
292
+
293
+ xbox = get_upcast_box(base, idx, True)
294
+
295
+ exp = np.array([False, False, True, False])
296
+ exp = tm.box_expected(exp, xbox)
297
+ tm.assert_equal(base == idx, exp)
298
+
299
+ exp = np.array([True, True, False, True])
300
+ exp = tm.box_expected(exp, xbox)
301
+ tm.assert_equal(base != idx, exp)
302
+
303
+ exp = np.array([False, True, False, False])
304
+ exp = tm.box_expected(exp, xbox)
305
+ tm.assert_equal(base > idx, exp)
306
+
307
+ exp = np.array([True, False, False, True])
308
+ exp = tm.box_expected(exp, xbox)
309
+ tm.assert_equal(base < idx, exp)
310
+
311
+ exp = np.array([False, True, True, False])
312
+ exp = tm.box_expected(exp, xbox)
313
+ tm.assert_equal(base >= idx, exp)
314
+
315
+ exp = np.array([True, False, True, True])
316
+ exp = tm.box_expected(exp, xbox)
317
+ tm.assert_equal(base <= idx, exp)
318
+
319
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
320
+ def test_parr_cmp_pi_mismatched_freq(self, freq, box_with_array):
321
+ # GH#13200
322
+ # different base freq
323
+ base = PeriodIndex(["2011-01", "2011-02", "2011-03", "2011-04"], freq=freq)
324
+ base = tm.box_expected(base, box_with_array)
325
+
326
+ msg = rf"Invalid comparison between dtype=period\[{freq}\] and Period"
327
+ with pytest.raises(TypeError, match=msg):
328
+ base <= Period("2011", freq="Y")
329
+
330
+ with pytest.raises(TypeError, match=msg):
331
+ Period("2011", freq="Y") >= base
332
+
333
+ # TODO: Could parametrize over boxes for idx?
334
+ idx = PeriodIndex(["2011", "2012", "2013", "2014"], freq="Y")
335
+ rev_msg = r"Invalid comparison between dtype=period\[Y-DEC\] and PeriodArray"
336
+ idx_msg = rev_msg if box_with_array in [tm.to_array, pd.array] else msg
337
+ with pytest.raises(TypeError, match=idx_msg):
338
+ base <= idx
339
+
340
+ # Different frequency
341
+ msg = rf"Invalid comparison between dtype=period\[{freq}\] and Period"
342
+ with pytest.raises(TypeError, match=msg):
343
+ base <= Period("2011", freq="4M")
344
+
345
+ with pytest.raises(TypeError, match=msg):
346
+ Period("2011", freq="4M") >= base
347
+
348
+ idx = PeriodIndex(["2011", "2012", "2013", "2014"], freq="4M")
349
+ rev_msg = r"Invalid comparison between dtype=period\[4M\] and PeriodArray"
350
+ idx_msg = rev_msg if box_with_array in [tm.to_array, pd.array] else msg
351
+ with pytest.raises(TypeError, match=idx_msg):
352
+ base <= idx
353
+
354
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
355
+ def test_pi_cmp_nat(self, freq):
356
+ idx1 = PeriodIndex(["2011-01", "2011-02", "NaT", "2011-05"], freq=freq)
357
+ per = idx1[1]
358
+
359
+ result = idx1 > per
360
+ exp = np.array([False, False, False, True])
361
+ tm.assert_numpy_array_equal(result, exp)
362
+ result = per < idx1
363
+ tm.assert_numpy_array_equal(result, exp)
364
+
365
+ result = idx1 == pd.NaT
366
+ exp = np.array([False, False, False, False])
367
+ tm.assert_numpy_array_equal(result, exp)
368
+ result = pd.NaT == idx1
369
+ tm.assert_numpy_array_equal(result, exp)
370
+
371
+ result = idx1 != pd.NaT
372
+ exp = np.array([True, True, True, True])
373
+ tm.assert_numpy_array_equal(result, exp)
374
+ result = pd.NaT != idx1
375
+ tm.assert_numpy_array_equal(result, exp)
376
+
377
+ idx2 = PeriodIndex(["2011-02", "2011-01", "2011-04", "NaT"], freq=freq)
378
+ result = idx1 < idx2
379
+ exp = np.array([True, False, False, False])
380
+ tm.assert_numpy_array_equal(result, exp)
381
+
382
+ result = idx1 == idx2
383
+ exp = np.array([False, False, False, False])
384
+ tm.assert_numpy_array_equal(result, exp)
385
+
386
+ result = idx1 != idx2
387
+ exp = np.array([True, True, True, True])
388
+ tm.assert_numpy_array_equal(result, exp)
389
+
390
+ result = idx1 == idx1
391
+ exp = np.array([True, True, False, True])
392
+ tm.assert_numpy_array_equal(result, exp)
393
+
394
+ result = idx1 != idx1
395
+ exp = np.array([False, False, True, False])
396
+ tm.assert_numpy_array_equal(result, exp)
397
+
398
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
399
+ def test_pi_cmp_nat_mismatched_freq_raises(self, freq):
400
+ idx1 = PeriodIndex(["2011-01", "2011-02", "NaT", "2011-05"], freq=freq)
401
+
402
+ diff = PeriodIndex(["2011-02", "2011-01", "2011-04", "NaT"], freq="4M")
403
+ msg = rf"Invalid comparison between dtype=period\[{freq}\] and PeriodArray"
404
+ with pytest.raises(TypeError, match=msg):
405
+ idx1 > diff
406
+
407
+ result = idx1 == diff
408
+ expected = np.array([False, False, False, False], dtype=bool)
409
+ tm.assert_numpy_array_equal(result, expected)
410
+
411
+ # TODO: De-duplicate with test_pi_cmp_nat
412
+ @pytest.mark.parametrize("dtype", [object, None])
413
+ def test_comp_nat(self, dtype):
414
+ left = PeriodIndex([Period("2011-01-01"), pd.NaT, Period("2011-01-03")])
415
+ right = PeriodIndex([pd.NaT, pd.NaT, Period("2011-01-03")])
416
+
417
+ if dtype is not None:
418
+ left = left.astype(dtype)
419
+ right = right.astype(dtype)
420
+
421
+ result = left == right
422
+ expected = np.array([False, False, True])
423
+ tm.assert_numpy_array_equal(result, expected)
424
+
425
+ result = left != right
426
+ expected = np.array([True, True, False])
427
+ tm.assert_numpy_array_equal(result, expected)
428
+
429
+ expected = np.array([False, False, False])
430
+ tm.assert_numpy_array_equal(left == pd.NaT, expected)
431
+ tm.assert_numpy_array_equal(pd.NaT == right, expected)
432
+
433
+ expected = np.array([True, True, True])
434
+ tm.assert_numpy_array_equal(left != pd.NaT, expected)
435
+ tm.assert_numpy_array_equal(pd.NaT != left, expected)
436
+
437
+ expected = np.array([False, False, False])
438
+ tm.assert_numpy_array_equal(left < pd.NaT, expected)
439
+ tm.assert_numpy_array_equal(pd.NaT > left, expected)
440
+
441
+
442
+ class TestPeriodSeriesComparisons:
443
+ def test_cmp_series_period_series_mixed_freq(self):
444
+ # GH#13200
445
+ base = Series(
446
+ [
447
+ Period("2011", freq="Y"),
448
+ Period("2011-02", freq="M"),
449
+ Period("2013", freq="Y"),
450
+ Period("2011-04", freq="M"),
451
+ ]
452
+ )
453
+
454
+ ser = Series(
455
+ [
456
+ Period("2012", freq="Y"),
457
+ Period("2011-01", freq="M"),
458
+ Period("2013", freq="Y"),
459
+ Period("2011-05", freq="M"),
460
+ ]
461
+ )
462
+
463
+ exp = Series([False, False, True, False])
464
+ tm.assert_series_equal(base == ser, exp)
465
+
466
+ exp = Series([True, True, False, True])
467
+ tm.assert_series_equal(base != ser, exp)
468
+
469
+ exp = Series([False, True, False, False])
470
+ tm.assert_series_equal(base > ser, exp)
471
+
472
+ exp = Series([True, False, False, True])
473
+ tm.assert_series_equal(base < ser, exp)
474
+
475
+ exp = Series([False, True, True, False])
476
+ tm.assert_series_equal(base >= ser, exp)
477
+
478
+ exp = Series([True, False, True, True])
479
+ tm.assert_series_equal(base <= ser, exp)
480
+
481
+
482
+ class TestPeriodIndexSeriesComparisonConsistency:
483
+ """Test PeriodIndex and Period Series Ops consistency"""
484
+
485
+ # TODO: needs parametrization+de-duplication
486
+
487
+ def _check(self, values, func, expected):
488
+ # Test PeriodIndex and Period Series Ops consistency
489
+
490
+ idx = PeriodIndex(values)
491
+ result = func(idx)
492
+
493
+ # check that we don't pass an unwanted type to tm.assert_equal
494
+ assert isinstance(expected, (pd.Index, np.ndarray))
495
+ tm.assert_equal(result, expected)
496
+
497
+ s = Series(values)
498
+ result = func(s)
499
+
500
+ exp = Series(expected, name=values.name)
501
+ tm.assert_series_equal(result, exp)
502
+
503
+ def test_pi_comp_period(self):
504
+ idx = PeriodIndex(
505
+ ["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
506
+ )
507
+ per = idx[2]
508
+
509
+ f = lambda x: x == per
510
+ exp = np.array([False, False, True, False], dtype=np.bool_)
511
+ self._check(idx, f, exp)
512
+ f = lambda x: per == x
513
+ self._check(idx, f, exp)
514
+
515
+ f = lambda x: x != per
516
+ exp = np.array([True, True, False, True], dtype=np.bool_)
517
+ self._check(idx, f, exp)
518
+ f = lambda x: per != x
519
+ self._check(idx, f, exp)
520
+
521
+ f = lambda x: per >= x
522
+ exp = np.array([True, True, True, False], dtype=np.bool_)
523
+ self._check(idx, f, exp)
524
+
525
+ f = lambda x: x > per
526
+ exp = np.array([False, False, False, True], dtype=np.bool_)
527
+ self._check(idx, f, exp)
528
+
529
+ f = lambda x: per >= x
530
+ exp = np.array([True, True, True, False], dtype=np.bool_)
531
+ self._check(idx, f, exp)
532
+
533
+ def test_pi_comp_period_nat(self):
534
+ idx = PeriodIndex(
535
+ ["2011-01", "NaT", "2011-03", "2011-04"], freq="M", name="idx"
536
+ )
537
+ per = idx[2]
538
+
539
+ f = lambda x: x == per
540
+ exp = np.array([False, False, True, False], dtype=np.bool_)
541
+ self._check(idx, f, exp)
542
+ f = lambda x: per == x
543
+ self._check(idx, f, exp)
544
+
545
+ f = lambda x: x == pd.NaT
546
+ exp = np.array([False, False, False, False], dtype=np.bool_)
547
+ self._check(idx, f, exp)
548
+ f = lambda x: pd.NaT == x
549
+ self._check(idx, f, exp)
550
+
551
+ f = lambda x: x != per
552
+ exp = np.array([True, True, False, True], dtype=np.bool_)
553
+ self._check(idx, f, exp)
554
+ f = lambda x: per != x
555
+ self._check(idx, f, exp)
556
+
557
+ f = lambda x: x != pd.NaT
558
+ exp = np.array([True, True, True, True], dtype=np.bool_)
559
+ self._check(idx, f, exp)
560
+ f = lambda x: pd.NaT != x
561
+ self._check(idx, f, exp)
562
+
563
+ f = lambda x: per >= x
564
+ exp = np.array([True, False, True, False], dtype=np.bool_)
565
+ self._check(idx, f, exp)
566
+
567
+ f = lambda x: x < per
568
+ exp = np.array([True, False, False, False], dtype=np.bool_)
569
+ self._check(idx, f, exp)
570
+
571
+ f = lambda x: x > pd.NaT
572
+ exp = np.array([False, False, False, False], dtype=np.bool_)
573
+ self._check(idx, f, exp)
574
+
575
+ f = lambda x: pd.NaT >= x
576
+ exp = np.array([False, False, False, False], dtype=np.bool_)
577
+ self._check(idx, f, exp)
578
+
579
+
580
+ # ------------------------------------------------------------------
581
+ # Arithmetic
582
+
583
+
584
+ class TestPeriodFrameArithmetic:
585
+ def test_ops_frame_period(self):
586
+ # GH#13043
587
+ df = pd.DataFrame(
588
+ {
589
+ "A": [Period("2015-01", freq="M"), Period("2015-02", freq="M")],
590
+ "B": [Period("2014-01", freq="M"), Period("2014-02", freq="M")],
591
+ }
592
+ )
593
+ assert df["A"].dtype == "Period[M]"
594
+ assert df["B"].dtype == "Period[M]"
595
+
596
+ p = Period("2015-03", freq="M")
597
+ off = p.freq
598
+ # dtype will be object because of original dtype
599
+ exp = pd.DataFrame(
600
+ {
601
+ "A": np.array([2 * off, 1 * off], dtype=object),
602
+ "B": np.array([14 * off, 13 * off], dtype=object),
603
+ }
604
+ )
605
+ tm.assert_frame_equal(p - df, exp)
606
+ tm.assert_frame_equal(df - p, -1 * exp)
607
+
608
+ df2 = pd.DataFrame(
609
+ {
610
+ "A": [Period("2015-05", freq="M"), Period("2015-06", freq="M")],
611
+ "B": [Period("2015-05", freq="M"), Period("2015-06", freq="M")],
612
+ }
613
+ )
614
+ assert df2["A"].dtype == "Period[M]"
615
+ assert df2["B"].dtype == "Period[M]"
616
+
617
+ exp = pd.DataFrame(
618
+ {
619
+ "A": np.array([4 * off, 4 * off], dtype=object),
620
+ "B": np.array([16 * off, 16 * off], dtype=object),
621
+ }
622
+ )
623
+ tm.assert_frame_equal(df2 - df, exp)
624
+ tm.assert_frame_equal(df - df2, -1 * exp)
625
+
626
+
627
+ class TestPeriodIndexArithmetic:
628
+ # ---------------------------------------------------------------
629
+ # __add__/__sub__ with PeriodIndex
630
+ # PeriodIndex + other is defined for integers and timedelta-like others
631
+ # PeriodIndex - other is defined for integers, timedelta-like others,
632
+ # and PeriodIndex (with matching freq)
633
+
634
+ def test_parr_add_iadd_parr_raises(self, box_with_array):
635
+ rng = period_range("1/1/2000", freq="D", periods=5)
636
+ other = period_range("1/6/2000", freq="D", periods=5)
637
+ # TODO: parametrize over boxes for other?
638
+
639
+ rng = tm.box_expected(rng, box_with_array)
640
+ # An earlier implementation of PeriodIndex addition performed
641
+ # a set operation (union). This has since been changed to
642
+ # raise a TypeError. See GH#14164 and GH#13077 for historical
643
+ # reference.
644
+ msg = r"unsupported operand type\(s\) for \+: .* and .*"
645
+ with pytest.raises(TypeError, match=msg):
646
+ rng + other
647
+
648
+ with pytest.raises(TypeError, match=msg):
649
+ rng += other
650
+
651
+ def test_pi_sub_isub_pi(self):
652
+ # GH#20049
653
+ # For historical reference see GH#14164, GH#13077.
654
+ # PeriodIndex subtraction originally performed set difference,
655
+ # then changed to raise TypeError before being implemented in GH#20049
656
+ rng = period_range("1/1/2000", freq="D", periods=5)
657
+ other = period_range("1/6/2000", freq="D", periods=5)
658
+
659
+ off = rng.freq
660
+ expected = pd.Index([-5 * off] * 5)
661
+ result = rng - other
662
+ tm.assert_index_equal(result, expected)
663
+
664
+ rng -= other
665
+ tm.assert_index_equal(rng, expected)
666
+
667
+ def test_pi_sub_pi_with_nat(self):
668
+ rng = period_range("1/1/2000", freq="D", periods=5)
669
+ other = rng[1:].insert(0, pd.NaT)
670
+ assert other[1:].equals(rng[1:])
671
+
672
+ result = rng - other
673
+ off = rng.freq
674
+ expected = pd.Index([pd.NaT, 0 * off, 0 * off, 0 * off, 0 * off])
675
+ tm.assert_index_equal(result, expected)
676
+
677
+ def test_parr_sub_pi_mismatched_freq(self, box_with_array, box_with_array2):
678
+ rng = period_range("1/1/2000", freq="D", periods=5)
679
+ other = period_range("1/6/2000", freq="h", periods=5)
680
+
681
+ rng = tm.box_expected(rng, box_with_array)
682
+ other = tm.box_expected(other, box_with_array2)
683
+ msg = r"Input has different freq=[hD] from PeriodArray\(freq=[Dh]\)"
684
+ with pytest.raises(IncompatibleFrequency, match=msg):
685
+ rng - other
686
+
687
+ @pytest.mark.parametrize("n", [1, 2, 3, 4])
688
+ def test_sub_n_gt_1_ticks(self, tick_classes, n):
689
+ # GH 23878
690
+ p1_d = "19910905"
691
+ p2_d = "19920406"
692
+ p1 = PeriodIndex([p1_d], freq=tick_classes(n))
693
+ p2 = PeriodIndex([p2_d], freq=tick_classes(n))
694
+
695
+ expected = PeriodIndex([p2_d], freq=p2.freq.base) - PeriodIndex(
696
+ [p1_d], freq=p1.freq.base
697
+ )
698
+
699
+ tm.assert_index_equal((p2 - p1), expected)
700
+
701
+ @pytest.mark.parametrize("n", [1, 2, 3, 4])
702
+ @pytest.mark.parametrize(
703
+ "offset, kwd_name",
704
+ [
705
+ (pd.offsets.YearEnd, "month"),
706
+ (pd.offsets.QuarterEnd, "startingMonth"),
707
+ (pd.offsets.MonthEnd, None),
708
+ (pd.offsets.Week, "weekday"),
709
+ ],
710
+ )
711
+ def test_sub_n_gt_1_offsets(self, offset, kwd_name, n):
712
+ # GH 23878
713
+ kwds = {kwd_name: 3} if kwd_name is not None else {}
714
+ p1_d = "19910905"
715
+ p2_d = "19920406"
716
+ freq = offset(n, normalize=False, **kwds)
717
+ p1 = PeriodIndex([p1_d], freq=freq)
718
+ p2 = PeriodIndex([p2_d], freq=freq)
719
+
720
+ result = p2 - p1
721
+ expected = PeriodIndex([p2_d], freq=freq.base) - PeriodIndex(
722
+ [p1_d], freq=freq.base
723
+ )
724
+
725
+ tm.assert_index_equal(result, expected)
726
+
727
+ # -------------------------------------------------------------
728
+ # Invalid Operations
729
+
730
+ @pytest.mark.parametrize(
731
+ "other",
732
+ [
733
+ # datetime scalars
734
+ Timestamp("2016-01-01"),
735
+ Timestamp("2016-01-01").to_pydatetime(),
736
+ Timestamp("2016-01-01").to_datetime64(),
737
+ # datetime-like arrays
738
+ pd.date_range("2016-01-01", periods=3, freq="h"),
739
+ pd.date_range("2016-01-01", periods=3, tz="Europe/Brussels"),
740
+ pd.date_range("2016-01-01", periods=3, freq="s")._data,
741
+ pd.date_range("2016-01-01", periods=3, tz="Asia/Tokyo")._data,
742
+ # Miscellaneous invalid types
743
+ 3.14,
744
+ np.array([2.0, 3.0, 4.0]),
745
+ ],
746
+ )
747
+ def test_parr_add_sub_invalid(self, other, box_with_array):
748
+ # GH#23215
749
+ rng = period_range("1/1/2000", freq="D", periods=3)
750
+ rng = tm.box_expected(rng, box_with_array)
751
+
752
+ msg = "|".join(
753
+ [
754
+ r"(:?cannot add PeriodArray and .*)",
755
+ r"(:?cannot subtract .* from (:?a\s)?.*)",
756
+ r"(:?unsupported operand type\(s\) for \+: .* and .*)",
757
+ r"unsupported operand type\(s\) for [+-]: .* and .*",
758
+ ]
759
+ )
760
+ assert_invalid_addsub_type(rng, other, msg)
761
+ with pytest.raises(TypeError, match=msg):
762
+ rng + other
763
+ with pytest.raises(TypeError, match=msg):
764
+ other + rng
765
+ with pytest.raises(TypeError, match=msg):
766
+ rng - other
767
+ with pytest.raises(TypeError, match=msg):
768
+ other - rng
769
+
770
+ # -----------------------------------------------------------------
771
+ # __add__/__sub__ with ndarray[datetime64] and ndarray[timedelta64]
772
+
773
+ def test_pi_add_sub_td64_array_non_tick_raises(self):
774
+ rng = period_range("1/1/2000", freq="Q", periods=3)
775
+ tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
776
+ tdarr = tdi.values
777
+
778
+ msg = r"Cannot add or subtract timedelta64\[ns\] dtype from period\[Q-DEC\]"
779
+ with pytest.raises(TypeError, match=msg):
780
+ rng + tdarr
781
+ with pytest.raises(TypeError, match=msg):
782
+ tdarr + rng
783
+
784
+ with pytest.raises(TypeError, match=msg):
785
+ rng - tdarr
786
+ msg = r"cannot subtract PeriodArray from TimedeltaArray"
787
+ with pytest.raises(TypeError, match=msg):
788
+ tdarr - rng
789
+
790
+ def test_pi_add_sub_td64_array_tick(self):
791
+ # PeriodIndex + Timedelta-like is allowed only with
792
+ # tick-like frequencies
793
+ rng = period_range("1/1/2000", freq="90D", periods=3)
794
+ tdi = TimedeltaIndex(["-1 Day", "-1 Day", "-1 Day"])
795
+ tdarr = tdi.values
796
+
797
+ expected = period_range("12/31/1999", freq="90D", periods=3)
798
+ result = rng + tdi
799
+ tm.assert_index_equal(result, expected)
800
+ result = rng + tdarr
801
+ tm.assert_index_equal(result, expected)
802
+ result = tdi + rng
803
+ tm.assert_index_equal(result, expected)
804
+ result = tdarr + rng
805
+ tm.assert_index_equal(result, expected)
806
+
807
+ expected = period_range("1/2/2000", freq="90D", periods=3)
808
+
809
+ result = rng - tdi
810
+ tm.assert_index_equal(result, expected)
811
+ result = rng - tdarr
812
+ tm.assert_index_equal(result, expected)
813
+
814
+ msg = r"cannot subtract .* from .*"
815
+ with pytest.raises(TypeError, match=msg):
816
+ tdarr - rng
817
+
818
+ with pytest.raises(TypeError, match=msg):
819
+ tdi - rng
820
+
821
+ @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
822
+ @pytest.mark.parametrize("tdi_freq", [None, "h"])
823
+ def test_parr_sub_td64array(self, box_with_array, tdi_freq, pi_freq):
824
+ box = box_with_array
825
+ xbox = box if box not in [pd.array, tm.to_array] else pd.Index
826
+
827
+ tdi = TimedeltaIndex(["1 hours", "2 hours"], freq=tdi_freq)
828
+ dti = Timestamp("2018-03-07 17:16:40") + tdi
829
+ pi = dti.to_period(pi_freq)
830
+
831
+ # TODO: parametrize over box for pi?
832
+ td64obj = tm.box_expected(tdi, box)
833
+
834
+ if pi_freq == "h":
835
+ result = pi - td64obj
836
+ expected = (pi.to_timestamp("s") - tdi).to_period(pi_freq)
837
+ expected = tm.box_expected(expected, xbox)
838
+ tm.assert_equal(result, expected)
839
+
840
+ # Subtract from scalar
841
+ result = pi[0] - td64obj
842
+ expected = (pi[0].to_timestamp("s") - tdi).to_period(pi_freq)
843
+ expected = tm.box_expected(expected, box)
844
+ tm.assert_equal(result, expected)
845
+
846
+ elif pi_freq == "D":
847
+ # Tick, but non-compatible
848
+ msg = (
849
+ "Cannot add/subtract timedelta-like from PeriodArray that is "
850
+ "not an integer multiple of the PeriodArray's freq."
851
+ )
852
+ with pytest.raises(IncompatibleFrequency, match=msg):
853
+ pi - td64obj
854
+
855
+ with pytest.raises(IncompatibleFrequency, match=msg):
856
+ pi[0] - td64obj
857
+
858
+ else:
859
+ # With non-Tick freq, we could not add timedelta64 array regardless
860
+ # of what its resolution is
861
+ msg = "Cannot add or subtract timedelta64"
862
+ with pytest.raises(TypeError, match=msg):
863
+ pi - td64obj
864
+ with pytest.raises(TypeError, match=msg):
865
+ pi[0] - td64obj
866
+
867
+ # -----------------------------------------------------------------
868
+ # operations with array/Index of DateOffset objects
869
+
870
+ @pytest.mark.parametrize("box", [np.array, pd.Index])
871
+ def test_pi_add_offset_array(self, box):
872
+ # GH#18849
873
+ pi = PeriodIndex([Period("2015Q1"), Period("2016Q2")])
874
+ offs = box(
875
+ [
876
+ pd.offsets.QuarterEnd(n=1, startingMonth=12),
877
+ pd.offsets.QuarterEnd(n=-2, startingMonth=12),
878
+ ]
879
+ )
880
+ expected = PeriodIndex([Period("2015Q2"), Period("2015Q4")]).astype(object)
881
+
882
+ with tm.assert_produces_warning(PerformanceWarning):
883
+ res = pi + offs
884
+ tm.assert_index_equal(res, expected)
885
+
886
+ with tm.assert_produces_warning(PerformanceWarning):
887
+ res2 = offs + pi
888
+ tm.assert_index_equal(res2, expected)
889
+
890
+ unanchored = np.array([pd.offsets.Hour(n=1), pd.offsets.Minute(n=-2)])
891
+ # addition/subtraction ops with incompatible offsets should issue
892
+ # a PerformanceWarning and _then_ raise a TypeError.
893
+ msg = r"Input cannot be converted to Period\(freq=Q-DEC\)"
894
+ with pytest.raises(IncompatibleFrequency, match=msg):
895
+ with tm.assert_produces_warning(PerformanceWarning):
896
+ pi + unanchored
897
+ with pytest.raises(IncompatibleFrequency, match=msg):
898
+ with tm.assert_produces_warning(PerformanceWarning):
899
+ unanchored + pi
900
+
901
+ @pytest.mark.parametrize("box", [np.array, pd.Index])
902
+ def test_pi_sub_offset_array(self, box):
903
+ # GH#18824
904
+ pi = PeriodIndex([Period("2015Q1"), Period("2016Q2")])
905
+ other = box(
906
+ [
907
+ pd.offsets.QuarterEnd(n=1, startingMonth=12),
908
+ pd.offsets.QuarterEnd(n=-2, startingMonth=12),
909
+ ]
910
+ )
911
+
912
+ expected = PeriodIndex([pi[n] - other[n] for n in range(len(pi))])
913
+ expected = expected.astype(object)
914
+
915
+ with tm.assert_produces_warning(PerformanceWarning):
916
+ res = pi - other
917
+ tm.assert_index_equal(res, expected)
918
+
919
+ anchored = box([pd.offsets.MonthEnd(), pd.offsets.Day(n=2)])
920
+
921
+ # addition/subtraction ops with anchored offsets should issue
922
+ # a PerformanceWarning and _then_ raise a TypeError.
923
+ msg = r"Input has different freq=-1M from Period\(freq=Q-DEC\)"
924
+ with pytest.raises(IncompatibleFrequency, match=msg):
925
+ with tm.assert_produces_warning(PerformanceWarning):
926
+ pi - anchored
927
+ with pytest.raises(IncompatibleFrequency, match=msg):
928
+ with tm.assert_produces_warning(PerformanceWarning):
929
+ anchored - pi
930
+
931
+ def test_pi_add_iadd_int(self, one):
932
+ # Variants of `one` for #19012
933
+ rng = period_range("2000-01-01 09:00", freq="h", periods=10)
934
+ result = rng + one
935
+ expected = period_range("2000-01-01 10:00", freq="h", periods=10)
936
+ tm.assert_index_equal(result, expected)
937
+ rng += one
938
+ tm.assert_index_equal(rng, expected)
939
+
940
+ def test_pi_sub_isub_int(self, one):
941
+ """
942
+ PeriodIndex.__sub__ and __isub__ with several representations of
943
+ the integer 1, e.g. int, np.int64, np.uint8, ...
944
+ """
945
+ rng = period_range("2000-01-01 09:00", freq="h", periods=10)
946
+ result = rng - one
947
+ expected = period_range("2000-01-01 08:00", freq="h", periods=10)
948
+ tm.assert_index_equal(result, expected)
949
+ rng -= one
950
+ tm.assert_index_equal(rng, expected)
951
+
952
+ @pytest.mark.parametrize("five", [5, np.array(5, dtype=np.int64)])
953
+ def test_pi_sub_intlike(self, five):
954
+ rng = period_range("2007-01", periods=50)
955
+
956
+ result = rng - five
957
+ exp = rng + (-five)
958
+ tm.assert_index_equal(result, exp)
959
+
960
+ def test_pi_add_sub_int_array_freqn_gt1(self):
961
+ # GH#47209 test adding array of ints when freq.n > 1 matches
962
+ # scalar behavior
963
+ pi = period_range("2016-01-01", periods=10, freq="2D")
964
+ arr = np.arange(10)
965
+ result = pi + arr
966
+ expected = pd.Index([x + y for x, y in zip(pi, arr)])
967
+ tm.assert_index_equal(result, expected)
968
+
969
+ result = pi - arr
970
+ expected = pd.Index([x - y for x, y in zip(pi, arr)])
971
+ tm.assert_index_equal(result, expected)
972
+
973
+ def test_pi_sub_isub_offset(self):
974
+ # offset
975
+ # DateOffset
976
+ rng = period_range("2014", "2024", freq="Y")
977
+ result = rng - pd.offsets.YearEnd(5)
978
+ expected = period_range("2009", "2019", freq="Y")
979
+ tm.assert_index_equal(result, expected)
980
+ rng -= pd.offsets.YearEnd(5)
981
+ tm.assert_index_equal(rng, expected)
982
+
983
+ rng = period_range("2014-01", "2016-12", freq="M")
984
+ result = rng - pd.offsets.MonthEnd(5)
985
+ expected = period_range("2013-08", "2016-07", freq="M")
986
+ tm.assert_index_equal(result, expected)
987
+
988
+ rng -= pd.offsets.MonthEnd(5)
989
+ tm.assert_index_equal(rng, expected)
990
+
991
+ @pytest.mark.parametrize("transpose", [True, False])
992
+ def test_pi_add_offset_n_gt1(self, box_with_array, transpose):
993
+ # GH#23215
994
+ # add offset to PeriodIndex with freq.n > 1
995
+
996
+ per = Period("2016-01", freq="2M")
997
+ pi = PeriodIndex([per])
998
+
999
+ expected = PeriodIndex(["2016-03"], freq="2M")
1000
+
1001
+ pi = tm.box_expected(pi, box_with_array, transpose=transpose)
1002
+ expected = tm.box_expected(expected, box_with_array, transpose=transpose)
1003
+
1004
+ result = pi + per.freq
1005
+ tm.assert_equal(result, expected)
1006
+
1007
+ result = per.freq + pi
1008
+ tm.assert_equal(result, expected)
1009
+
1010
+ def test_pi_add_offset_n_gt1_not_divisible(self, box_with_array):
1011
+ # GH#23215
1012
+ # PeriodIndex with freq.n > 1 add offset with offset.n % freq.n != 0
1013
+ pi = PeriodIndex(["2016-01"], freq="2M")
1014
+ expected = PeriodIndex(["2016-04"], freq="2M")
1015
+
1016
+ pi = tm.box_expected(pi, box_with_array)
1017
+ expected = tm.box_expected(expected, box_with_array)
1018
+
1019
+ result = pi + to_offset("3ME")
1020
+ tm.assert_equal(result, expected)
1021
+
1022
+ result = to_offset("3ME") + pi
1023
+ tm.assert_equal(result, expected)
1024
+
1025
+ # ---------------------------------------------------------------
1026
+ # __add__/__sub__ with integer arrays
1027
+
1028
+ @pytest.mark.parametrize("int_holder", [np.array, pd.Index])
1029
+ @pytest.mark.parametrize("op", [operator.add, ops.radd])
1030
+ def test_pi_add_intarray(self, int_holder, op):
1031
+ # GH#19959
1032
+ pi = PeriodIndex([Period("2015Q1"), Period("NaT")])
1033
+ other = int_holder([4, -1])
1034
+
1035
+ result = op(pi, other)
1036
+ expected = PeriodIndex([Period("2016Q1"), Period("NaT")])
1037
+ tm.assert_index_equal(result, expected)
1038
+
1039
+ @pytest.mark.parametrize("int_holder", [np.array, pd.Index])
1040
+ def test_pi_sub_intarray(self, int_holder):
1041
+ # GH#19959
1042
+ pi = PeriodIndex([Period("2015Q1"), Period("NaT")])
1043
+ other = int_holder([4, -1])
1044
+
1045
+ result = pi - other
1046
+ expected = PeriodIndex([Period("2014Q1"), Period("NaT")])
1047
+ tm.assert_index_equal(result, expected)
1048
+
1049
+ msg = r"bad operand type for unary -: 'PeriodArray'"
1050
+ with pytest.raises(TypeError, match=msg):
1051
+ other - pi
1052
+
1053
+ # ---------------------------------------------------------------
1054
+ # Timedelta-like (timedelta, timedelta64, Timedelta, Tick)
1055
+ # TODO: Some of these are misnomers because of non-Tick DateOffsets
1056
+
1057
+ def test_parr_add_timedeltalike_minute_gt1(self, three_days, box_with_array):
1058
+ # GH#23031 adding a time-delta-like offset to a PeriodArray that has
1059
+ # minute frequency with n != 1. A more general case is tested below
1060
+ # in test_pi_add_timedeltalike_tick_gt1, but here we write out the
1061
+ # expected result more explicitly.
1062
+ other = three_days
1063
+ rng = period_range("2014-05-01", periods=3, freq="2D")
1064
+ rng = tm.box_expected(rng, box_with_array)
1065
+
1066
+ expected = PeriodIndex(["2014-05-04", "2014-05-06", "2014-05-08"], freq="2D")
1067
+ expected = tm.box_expected(expected, box_with_array)
1068
+
1069
+ result = rng + other
1070
+ tm.assert_equal(result, expected)
1071
+
1072
+ result = other + rng
1073
+ tm.assert_equal(result, expected)
1074
+
1075
+ # subtraction
1076
+ expected = PeriodIndex(["2014-04-28", "2014-04-30", "2014-05-02"], freq="2D")
1077
+ expected = tm.box_expected(expected, box_with_array)
1078
+ result = rng - other
1079
+ tm.assert_equal(result, expected)
1080
+
1081
+ msg = "|".join(
1082
+ [
1083
+ r"bad operand type for unary -: 'PeriodArray'",
1084
+ r"cannot subtract PeriodArray from timedelta64\[[hD]\]",
1085
+ ]
1086
+ )
1087
+ with pytest.raises(TypeError, match=msg):
1088
+ other - rng
1089
+
1090
+ @pytest.mark.parametrize("freqstr", ["5ns", "5us", "5ms", "5s", "5min", "5h", "5d"])
1091
+ def test_parr_add_timedeltalike_tick_gt1(self, three_days, freqstr, box_with_array):
1092
+ # GH#23031 adding a time-delta-like offset to a PeriodArray that has
1093
+ # tick-like frequency with n != 1
1094
+ other = three_days
1095
+ rng = period_range("2014-05-01", periods=6, freq=freqstr)
1096
+ first = rng[0]
1097
+ rng = tm.box_expected(rng, box_with_array)
1098
+
1099
+ expected = period_range(first + other, periods=6, freq=freqstr)
1100
+ expected = tm.box_expected(expected, box_with_array)
1101
+
1102
+ result = rng + other
1103
+ tm.assert_equal(result, expected)
1104
+
1105
+ result = other + rng
1106
+ tm.assert_equal(result, expected)
1107
+
1108
+ # subtraction
1109
+ expected = period_range(first - other, periods=6, freq=freqstr)
1110
+ expected = tm.box_expected(expected, box_with_array)
1111
+ result = rng - other
1112
+ tm.assert_equal(result, expected)
1113
+ msg = "|".join(
1114
+ [
1115
+ r"bad operand type for unary -: 'PeriodArray'",
1116
+ r"cannot subtract PeriodArray from timedelta64\[[hD]\]",
1117
+ ]
1118
+ )
1119
+ with pytest.raises(TypeError, match=msg):
1120
+ other - rng
1121
+
1122
+ def test_pi_add_iadd_timedeltalike_daily(self, three_days):
1123
+ # Tick
1124
+ other = three_days
1125
+ rng = period_range("2014-05-01", "2014-05-15", freq="D")
1126
+ expected = period_range("2014-05-04", "2014-05-18", freq="D")
1127
+
1128
+ result = rng + other
1129
+ tm.assert_index_equal(result, expected)
1130
+
1131
+ rng += other
1132
+ tm.assert_index_equal(rng, expected)
1133
+
1134
+ def test_pi_sub_isub_timedeltalike_daily(self, three_days):
1135
+ # Tick-like 3 Days
1136
+ other = three_days
1137
+ rng = period_range("2014-05-01", "2014-05-15", freq="D")
1138
+ expected = period_range("2014-04-28", "2014-05-12", freq="D")
1139
+
1140
+ result = rng - other
1141
+ tm.assert_index_equal(result, expected)
1142
+
1143
+ rng -= other
1144
+ tm.assert_index_equal(rng, expected)
1145
+
1146
+ def test_parr_add_sub_timedeltalike_freq_mismatch_daily(
1147
+ self, not_daily, box_with_array
1148
+ ):
1149
+ other = not_daily
1150
+ rng = period_range("2014-05-01", "2014-05-15", freq="D")
1151
+ rng = tm.box_expected(rng, box_with_array)
1152
+
1153
+ msg = "|".join(
1154
+ [
1155
+ # non-timedelta-like DateOffset
1156
+ "Input has different freq(=.+)? from Period.*?\\(freq=D\\)",
1157
+ # timedelta/td64/Timedelta but not a multiple of 24H
1158
+ "Cannot add/subtract timedelta-like from PeriodArray that is "
1159
+ "not an integer multiple of the PeriodArray's freq.",
1160
+ ]
1161
+ )
1162
+ with pytest.raises(IncompatibleFrequency, match=msg):
1163
+ rng + other
1164
+ with pytest.raises(IncompatibleFrequency, match=msg):
1165
+ rng += other
1166
+ with pytest.raises(IncompatibleFrequency, match=msg):
1167
+ rng - other
1168
+ with pytest.raises(IncompatibleFrequency, match=msg):
1169
+ rng -= other
1170
+
1171
+ def test_pi_add_iadd_timedeltalike_hourly(self, two_hours):
1172
+ other = two_hours
1173
+ rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
1174
+ expected = period_range("2014-01-01 12:00", "2014-01-05 12:00", freq="h")
1175
+
1176
+ result = rng + other
1177
+ tm.assert_index_equal(result, expected)
1178
+
1179
+ rng += other
1180
+ tm.assert_index_equal(rng, expected)
1181
+
1182
+ def test_parr_add_timedeltalike_mismatched_freq_hourly(
1183
+ self, not_hourly, box_with_array
1184
+ ):
1185
+ other = not_hourly
1186
+ rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
1187
+ rng = tm.box_expected(rng, box_with_array)
1188
+ msg = "|".join(
1189
+ [
1190
+ # non-timedelta-like DateOffset
1191
+ "Input has different freq(=.+)? from Period.*?\\(freq=h\\)",
1192
+ # timedelta/td64/Timedelta but not a multiple of 24H
1193
+ "Cannot add/subtract timedelta-like from PeriodArray that is "
1194
+ "not an integer multiple of the PeriodArray's freq.",
1195
+ ]
1196
+ )
1197
+
1198
+ with pytest.raises(IncompatibleFrequency, match=msg):
1199
+ rng + other
1200
+
1201
+ with pytest.raises(IncompatibleFrequency, match=msg):
1202
+ rng += other
1203
+
1204
+ def test_pi_sub_isub_timedeltalike_hourly(self, two_hours):
1205
+ other = two_hours
1206
+ rng = period_range("2014-01-01 10:00", "2014-01-05 10:00", freq="h")
1207
+ expected = period_range("2014-01-01 08:00", "2014-01-05 08:00", freq="h")
1208
+
1209
+ result = rng - other
1210
+ tm.assert_index_equal(result, expected)
1211
+
1212
+ rng -= other
1213
+ tm.assert_index_equal(rng, expected)
1214
+
1215
+ def test_add_iadd_timedeltalike_annual(self):
1216
+ # offset
1217
+ # DateOffset
1218
+ rng = period_range("2014", "2024", freq="Y")
1219
+ result = rng + pd.offsets.YearEnd(5)
1220
+ expected = period_range("2019", "2029", freq="Y")
1221
+ tm.assert_index_equal(result, expected)
1222
+ rng += pd.offsets.YearEnd(5)
1223
+ tm.assert_index_equal(rng, expected)
1224
+
1225
+ def test_pi_add_sub_timedeltalike_freq_mismatch_annual(self, mismatched_freq):
1226
+ other = mismatched_freq
1227
+ rng = period_range("2014", "2024", freq="Y")
1228
+ msg = "Input has different freq(=.+)? from Period.*?\\(freq=Y-DEC\\)"
1229
+ with pytest.raises(IncompatibleFrequency, match=msg):
1230
+ rng + other
1231
+ with pytest.raises(IncompatibleFrequency, match=msg):
1232
+ rng += other
1233
+ with pytest.raises(IncompatibleFrequency, match=msg):
1234
+ rng - other
1235
+ with pytest.raises(IncompatibleFrequency, match=msg):
1236
+ rng -= other
1237
+
1238
+ def test_pi_add_iadd_timedeltalike_M(self):
1239
+ rng = period_range("2014-01", "2016-12", freq="M")
1240
+ expected = period_range("2014-06", "2017-05", freq="M")
1241
+
1242
+ result = rng + pd.offsets.MonthEnd(5)
1243
+ tm.assert_index_equal(result, expected)
1244
+
1245
+ rng += pd.offsets.MonthEnd(5)
1246
+ tm.assert_index_equal(rng, expected)
1247
+
1248
+ def test_pi_add_sub_timedeltalike_freq_mismatch_monthly(self, mismatched_freq):
1249
+ other = mismatched_freq
1250
+ rng = period_range("2014-01", "2016-12", freq="M")
1251
+ msg = "Input has different freq(=.+)? from Period.*?\\(freq=M\\)"
1252
+ with pytest.raises(IncompatibleFrequency, match=msg):
1253
+ rng + other
1254
+ with pytest.raises(IncompatibleFrequency, match=msg):
1255
+ rng += other
1256
+ with pytest.raises(IncompatibleFrequency, match=msg):
1257
+ rng - other
1258
+ with pytest.raises(IncompatibleFrequency, match=msg):
1259
+ rng -= other
1260
+
1261
+ @pytest.mark.parametrize("transpose", [True, False])
1262
+ def test_parr_add_sub_td64_nat(self, box_with_array, transpose):
1263
+ # GH#23320 special handling for timedelta64("NaT")
1264
+ pi = period_range("1994-04-01", periods=9, freq="19D")
1265
+ other = np.timedelta64("NaT")
1266
+ expected = PeriodIndex(["NaT"] * 9, freq="19D")
1267
+
1268
+ obj = tm.box_expected(pi, box_with_array, transpose=transpose)
1269
+ expected = tm.box_expected(expected, box_with_array, transpose=transpose)
1270
+
1271
+ result = obj + other
1272
+ tm.assert_equal(result, expected)
1273
+ result = other + obj
1274
+ tm.assert_equal(result, expected)
1275
+ result = obj - other
1276
+ tm.assert_equal(result, expected)
1277
+ msg = r"cannot subtract .* from .*"
1278
+ with pytest.raises(TypeError, match=msg):
1279
+ other - obj
1280
+
1281
+ @pytest.mark.parametrize(
1282
+ "other",
1283
+ [
1284
+ np.array(["NaT"] * 9, dtype="m8[ns]"),
1285
+ TimedeltaArray._from_sequence(["NaT"] * 9, dtype="m8[ns]"),
1286
+ ],
1287
+ )
1288
+ def test_parr_add_sub_tdt64_nat_array(self, box_with_array, other):
1289
+ pi = period_range("1994-04-01", periods=9, freq="19D")
1290
+ expected = PeriodIndex(["NaT"] * 9, freq="19D")
1291
+
1292
+ obj = tm.box_expected(pi, box_with_array)
1293
+ expected = tm.box_expected(expected, box_with_array)
1294
+
1295
+ result = obj + other
1296
+ tm.assert_equal(result, expected)
1297
+ result = other + obj
1298
+ tm.assert_equal(result, expected)
1299
+ result = obj - other
1300
+ tm.assert_equal(result, expected)
1301
+ msg = r"cannot subtract .* from .*"
1302
+ with pytest.raises(TypeError, match=msg):
1303
+ other - obj
1304
+
1305
+ # some but not *all* NaT
1306
+ other = other.copy()
1307
+ other[0] = np.timedelta64(0, "ns")
1308
+ expected = PeriodIndex([pi[0]] + ["NaT"] * 8, freq="19D")
1309
+ expected = tm.box_expected(expected, box_with_array)
1310
+
1311
+ result = obj + other
1312
+ tm.assert_equal(result, expected)
1313
+ result = other + obj
1314
+ tm.assert_equal(result, expected)
1315
+ result = obj - other
1316
+ tm.assert_equal(result, expected)
1317
+ with pytest.raises(TypeError, match=msg):
1318
+ other - obj
1319
+
1320
+ # ---------------------------------------------------------------
1321
+ # Unsorted
1322
+
1323
+ def test_parr_add_sub_index(self):
1324
+ # Check that PeriodArray defers to Index on arithmetic ops
1325
+ pi = period_range("2000-12-31", periods=3)
1326
+ parr = pi.array
1327
+
1328
+ result = parr - pi
1329
+ expected = pi - pi
1330
+ tm.assert_index_equal(result, expected)
1331
+
1332
+ def test_parr_add_sub_object_array(self):
1333
+ pi = period_range("2000-12-31", periods=3, freq="D")
1334
+ parr = pi.array
1335
+
1336
+ other = np.array([Timedelta(days=1), pd.offsets.Day(2), 3])
1337
+
1338
+ with tm.assert_produces_warning(PerformanceWarning):
1339
+ result = parr + other
1340
+
1341
+ expected = PeriodIndex(
1342
+ ["2001-01-01", "2001-01-03", "2001-01-05"], freq="D"
1343
+ )._data.astype(object)
1344
+ tm.assert_equal(result, expected)
1345
+
1346
+ with tm.assert_produces_warning(PerformanceWarning):
1347
+ result = parr - other
1348
+
1349
+ expected = PeriodIndex(["2000-12-30"] * 3, freq="D")._data.astype(object)
1350
+ tm.assert_equal(result, expected)
1351
+
1352
+ def test_period_add_timestamp_raises(self, box_with_array):
1353
+ # GH#17983
1354
+ ts = Timestamp("2017")
1355
+ per = Period("2017", freq="M")
1356
+
1357
+ arr = pd.Index([per], dtype="Period[M]")
1358
+ arr = tm.box_expected(arr, box_with_array)
1359
+
1360
+ msg = "cannot add PeriodArray and Timestamp"
1361
+ with pytest.raises(TypeError, match=msg):
1362
+ arr + ts
1363
+ with pytest.raises(TypeError, match=msg):
1364
+ ts + arr
1365
+ msg = "cannot add PeriodArray and DatetimeArray"
1366
+ with pytest.raises(TypeError, match=msg):
1367
+ arr + Series([ts])
1368
+ with pytest.raises(TypeError, match=msg):
1369
+ Series([ts]) + arr
1370
+ with pytest.raises(TypeError, match=msg):
1371
+ arr + pd.Index([ts])
1372
+ with pytest.raises(TypeError, match=msg):
1373
+ pd.Index([ts]) + arr
1374
+
1375
+ if box_with_array is pd.DataFrame:
1376
+ msg = "cannot add PeriodArray and DatetimeArray"
1377
+ else:
1378
+ msg = r"unsupported operand type\(s\) for \+: 'Period' and 'DatetimeArray"
1379
+ with pytest.raises(TypeError, match=msg):
1380
+ arr + pd.DataFrame([ts])
1381
+ if box_with_array is pd.DataFrame:
1382
+ msg = "cannot add PeriodArray and DatetimeArray"
1383
+ else:
1384
+ msg = r"unsupported operand type\(s\) for \+: 'DatetimeArray' and 'Period'"
1385
+ with pytest.raises(TypeError, match=msg):
1386
+ pd.DataFrame([ts]) + arr
1387
+
1388
+
1389
+ class TestPeriodSeriesArithmetic:
1390
+ def test_parr_add_timedeltalike_scalar(self, three_days, box_with_array):
1391
+ # GH#13043
1392
+ ser = Series(
1393
+ [Period("2015-01-01", freq="D"), Period("2015-01-02", freq="D")],
1394
+ name="xxx",
1395
+ )
1396
+ assert ser.dtype == "Period[D]"
1397
+
1398
+ expected = Series(
1399
+ [Period("2015-01-04", freq="D"), Period("2015-01-05", freq="D")],
1400
+ name="xxx",
1401
+ )
1402
+
1403
+ obj = tm.box_expected(ser, box_with_array)
1404
+ if box_with_array is pd.DataFrame:
1405
+ assert (obj.dtypes == "Period[D]").all()
1406
+
1407
+ expected = tm.box_expected(expected, box_with_array)
1408
+
1409
+ result = obj + three_days
1410
+ tm.assert_equal(result, expected)
1411
+
1412
+ result = three_days + obj
1413
+ tm.assert_equal(result, expected)
1414
+
1415
+ def test_ops_series_period(self):
1416
+ # GH#13043
1417
+ ser = Series(
1418
+ [Period("2015-01-01", freq="D"), Period("2015-01-02", freq="D")],
1419
+ name="xxx",
1420
+ )
1421
+ assert ser.dtype == "Period[D]"
1422
+
1423
+ per = Period("2015-01-10", freq="D")
1424
+ off = per.freq
1425
+ # dtype will be object because of original dtype
1426
+ expected = Series([9 * off, 8 * off], name="xxx", dtype=object)
1427
+ tm.assert_series_equal(per - ser, expected)
1428
+ tm.assert_series_equal(ser - per, -1 * expected)
1429
+
1430
+ s2 = Series(
1431
+ [Period("2015-01-05", freq="D"), Period("2015-01-04", freq="D")],
1432
+ name="xxx",
1433
+ )
1434
+ assert s2.dtype == "Period[D]"
1435
+
1436
+ expected = Series([4 * off, 2 * off], name="xxx", dtype=object)
1437
+ tm.assert_series_equal(s2 - ser, expected)
1438
+ tm.assert_series_equal(ser - s2, -1 * expected)
1439
+
1440
+
1441
+ class TestPeriodIndexSeriesMethods:
1442
+ """Test PeriodIndex and Period Series Ops consistency"""
1443
+
1444
+ def _check(self, values, func, expected):
1445
+ idx = PeriodIndex(values)
1446
+ result = func(idx)
1447
+ tm.assert_equal(result, expected)
1448
+
1449
+ ser = Series(values)
1450
+ result = func(ser)
1451
+
1452
+ exp = Series(expected, name=values.name)
1453
+ tm.assert_series_equal(result, exp)
1454
+
1455
+ def test_pi_ops(self):
1456
+ idx = PeriodIndex(
1457
+ ["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
1458
+ )
1459
+
1460
+ expected = PeriodIndex(
1461
+ ["2011-03", "2011-04", "2011-05", "2011-06"], freq="M", name="idx"
1462
+ )
1463
+
1464
+ self._check(idx, lambda x: x + 2, expected)
1465
+ self._check(idx, lambda x: 2 + x, expected)
1466
+
1467
+ self._check(idx + 2, lambda x: x - 2, idx)
1468
+
1469
+ result = idx - Period("2011-01", freq="M")
1470
+ off = idx.freq
1471
+ exp = pd.Index([0 * off, 1 * off, 2 * off, 3 * off], name="idx")
1472
+ tm.assert_index_equal(result, exp)
1473
+
1474
+ result = Period("2011-01", freq="M") - idx
1475
+ exp = pd.Index([0 * off, -1 * off, -2 * off, -3 * off], name="idx")
1476
+ tm.assert_index_equal(result, exp)
1477
+
1478
+ @pytest.mark.parametrize("ng", ["str", 1.5])
1479
+ @pytest.mark.parametrize(
1480
+ "func",
1481
+ [
1482
+ lambda obj, ng: obj + ng,
1483
+ lambda obj, ng: ng + obj,
1484
+ lambda obj, ng: obj - ng,
1485
+ lambda obj, ng: ng - obj,
1486
+ lambda obj, ng: np.add(obj, ng),
1487
+ lambda obj, ng: np.add(ng, obj),
1488
+ lambda obj, ng: np.subtract(obj, ng),
1489
+ lambda obj, ng: np.subtract(ng, obj),
1490
+ ],
1491
+ )
1492
+ def test_parr_ops_errors(self, ng, func, box_with_array):
1493
+ idx = PeriodIndex(
1494
+ ["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
1495
+ )
1496
+ obj = tm.box_expected(idx, box_with_array)
1497
+ msg = "|".join(
1498
+ [
1499
+ r"unsupported operand type\(s\)",
1500
+ "can only concatenate",
1501
+ r"must be str",
1502
+ "object to str implicitly",
1503
+ ]
1504
+ )
1505
+
1506
+ with pytest.raises(TypeError, match=msg):
1507
+ func(obj, ng)
1508
+
1509
+ def test_pi_ops_nat(self):
1510
+ idx = PeriodIndex(
1511
+ ["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
1512
+ )
1513
+ expected = PeriodIndex(
1514
+ ["2011-03", "2011-04", "NaT", "2011-06"], freq="M", name="idx"
1515
+ )
1516
+
1517
+ self._check(idx, lambda x: x + 2, expected)
1518
+ self._check(idx, lambda x: 2 + x, expected)
1519
+ self._check(idx, lambda x: np.add(x, 2), expected)
1520
+
1521
+ self._check(idx + 2, lambda x: x - 2, idx)
1522
+ self._check(idx + 2, lambda x: np.subtract(x, 2), idx)
1523
+
1524
+ # freq with mult
1525
+ idx = PeriodIndex(
1526
+ ["2011-01", "2011-02", "NaT", "2011-04"], freq="2M", name="idx"
1527
+ )
1528
+ expected = PeriodIndex(
1529
+ ["2011-07", "2011-08", "NaT", "2011-10"], freq="2M", name="idx"
1530
+ )
1531
+
1532
+ self._check(idx, lambda x: x + 3, expected)
1533
+ self._check(idx, lambda x: 3 + x, expected)
1534
+ self._check(idx, lambda x: np.add(x, 3), expected)
1535
+
1536
+ self._check(idx + 3, lambda x: x - 3, idx)
1537
+ self._check(idx + 3, lambda x: np.subtract(x, 3), idx)
1538
+
1539
+ def test_pi_ops_array_int(self):
1540
+ idx = PeriodIndex(
1541
+ ["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
1542
+ )
1543
+ f = lambda x: x + np.array([1, 2, 3, 4])
1544
+ exp = PeriodIndex(
1545
+ ["2011-02", "2011-04", "NaT", "2011-08"], freq="M", name="idx"
1546
+ )
1547
+ self._check(idx, f, exp)
1548
+
1549
+ f = lambda x: np.add(x, np.array([4, -1, 1, 2]))
1550
+ exp = PeriodIndex(
1551
+ ["2011-05", "2011-01", "NaT", "2011-06"], freq="M", name="idx"
1552
+ )
1553
+ self._check(idx, f, exp)
1554
+
1555
+ f = lambda x: x - np.array([1, 2, 3, 4])
1556
+ exp = PeriodIndex(
1557
+ ["2010-12", "2010-12", "NaT", "2010-12"], freq="M", name="idx"
1558
+ )
1559
+ self._check(idx, f, exp)
1560
+
1561
+ f = lambda x: np.subtract(x, np.array([3, 2, 3, -2]))
1562
+ exp = PeriodIndex(
1563
+ ["2010-10", "2010-12", "NaT", "2011-06"], freq="M", name="idx"
1564
+ )
1565
+ self._check(idx, f, exp)
1566
+
1567
+ def test_pi_ops_offset(self):
1568
+ idx = PeriodIndex(
1569
+ ["2011-01-01", "2011-02-01", "2011-03-01", "2011-04-01"],
1570
+ freq="D",
1571
+ name="idx",
1572
+ )
1573
+ f = lambda x: x + pd.offsets.Day()
1574
+ exp = PeriodIndex(
1575
+ ["2011-01-02", "2011-02-02", "2011-03-02", "2011-04-02"],
1576
+ freq="D",
1577
+ name="idx",
1578
+ )
1579
+ self._check(idx, f, exp)
1580
+
1581
+ f = lambda x: x + pd.offsets.Day(2)
1582
+ exp = PeriodIndex(
1583
+ ["2011-01-03", "2011-02-03", "2011-03-03", "2011-04-03"],
1584
+ freq="D",
1585
+ name="idx",
1586
+ )
1587
+ self._check(idx, f, exp)
1588
+
1589
+ f = lambda x: x - pd.offsets.Day(2)
1590
+ exp = PeriodIndex(
1591
+ ["2010-12-30", "2011-01-30", "2011-02-27", "2011-03-30"],
1592
+ freq="D",
1593
+ name="idx",
1594
+ )
1595
+ self._check(idx, f, exp)
1596
+
1597
+ def test_pi_offset_errors(self):
1598
+ idx = PeriodIndex(
1599
+ ["2011-01-01", "2011-02-01", "2011-03-01", "2011-04-01"],
1600
+ freq="D",
1601
+ name="idx",
1602
+ )
1603
+ ser = Series(idx)
1604
+
1605
+ msg = (
1606
+ "Cannot add/subtract timedelta-like from PeriodArray that is not "
1607
+ "an integer multiple of the PeriodArray's freq"
1608
+ )
1609
+ for obj in [idx, ser]:
1610
+ with pytest.raises(IncompatibleFrequency, match=msg):
1611
+ obj + pd.offsets.Hour(2)
1612
+
1613
+ with pytest.raises(IncompatibleFrequency, match=msg):
1614
+ pd.offsets.Hour(2) + obj
1615
+
1616
+ with pytest.raises(IncompatibleFrequency, match=msg):
1617
+ obj - pd.offsets.Hour(2)
1618
+
1619
+ def test_pi_sub_period(self):
1620
+ # GH#13071
1621
+ idx = PeriodIndex(
1622
+ ["2011-01", "2011-02", "2011-03", "2011-04"], freq="M", name="idx"
1623
+ )
1624
+
1625
+ result = idx - Period("2012-01", freq="M")
1626
+ off = idx.freq
1627
+ exp = pd.Index([-12 * off, -11 * off, -10 * off, -9 * off], name="idx")
1628
+ tm.assert_index_equal(result, exp)
1629
+
1630
+ result = np.subtract(idx, Period("2012-01", freq="M"))
1631
+ tm.assert_index_equal(result, exp)
1632
+
1633
+ result = Period("2012-01", freq="M") - idx
1634
+ exp = pd.Index([12 * off, 11 * off, 10 * off, 9 * off], name="idx")
1635
+ tm.assert_index_equal(result, exp)
1636
+
1637
+ result = np.subtract(Period("2012-01", freq="M"), idx)
1638
+ tm.assert_index_equal(result, exp)
1639
+
1640
+ exp = TimedeltaIndex([np.nan, np.nan, np.nan, np.nan], name="idx")
1641
+ result = idx - Period("NaT", freq="M")
1642
+ tm.assert_index_equal(result, exp)
1643
+ assert result.freq == exp.freq
1644
+
1645
+ result = Period("NaT", freq="M") - idx
1646
+ tm.assert_index_equal(result, exp)
1647
+ assert result.freq == exp.freq
1648
+
1649
+ def test_pi_sub_pdnat(self):
1650
+ # GH#13071, GH#19389
1651
+ idx = PeriodIndex(
1652
+ ["2011-01", "2011-02", "NaT", "2011-04"], freq="M", name="idx"
1653
+ )
1654
+ exp = TimedeltaIndex([pd.NaT] * 4, name="idx")
1655
+ tm.assert_index_equal(pd.NaT - idx, exp)
1656
+ tm.assert_index_equal(idx - pd.NaT, exp)
1657
+
1658
+ def test_pi_sub_period_nat(self):
1659
+ # GH#13071
1660
+ idx = PeriodIndex(
1661
+ ["2011-01", "NaT", "2011-03", "2011-04"], freq="M", name="idx"
1662
+ )
1663
+
1664
+ result = idx - Period("2012-01", freq="M")
1665
+ off = idx.freq
1666
+ exp = pd.Index([-12 * off, pd.NaT, -10 * off, -9 * off], name="idx")
1667
+ tm.assert_index_equal(result, exp)
1668
+
1669
+ result = Period("2012-01", freq="M") - idx
1670
+ exp = pd.Index([12 * off, pd.NaT, 10 * off, 9 * off], name="idx")
1671
+ tm.assert_index_equal(result, exp)
1672
+
1673
+ exp = TimedeltaIndex([np.nan, np.nan, np.nan, np.nan], name="idx")
1674
+ tm.assert_index_equal(idx - Period("NaT", freq="M"), exp)
1675
+ tm.assert_index_equal(Period("NaT", freq="M") - idx, exp)
env-llmeval/lib/python3.10/site-packages/pandas/tests/arithmetic/test_timedelta64.py ADDED
@@ -0,0 +1,2173 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Arithmetic tests for DataFrame/Series/Index/Array classes that should
2
+ # behave identically.
3
+ from datetime import (
4
+ datetime,
5
+ timedelta,
6
+ )
7
+
8
+ import numpy as np
9
+ import pytest
10
+
11
+ from pandas.errors import (
12
+ OutOfBoundsDatetime,
13
+ PerformanceWarning,
14
+ )
15
+
16
+ import pandas as pd
17
+ from pandas import (
18
+ DataFrame,
19
+ DatetimeIndex,
20
+ Index,
21
+ NaT,
22
+ Series,
23
+ Timedelta,
24
+ TimedeltaIndex,
25
+ Timestamp,
26
+ offsets,
27
+ timedelta_range,
28
+ )
29
+ import pandas._testing as tm
30
+ from pandas.core.arrays import NumpyExtensionArray
31
+ from pandas.tests.arithmetic.common import (
32
+ assert_invalid_addsub_type,
33
+ assert_invalid_comparison,
34
+ get_upcast_box,
35
+ )
36
+
37
+
38
+ def assert_dtype(obj, expected_dtype):
39
+ """
40
+ Helper to check the dtype for a Series, Index, or single-column DataFrame.
41
+ """
42
+ dtype = tm.get_dtype(obj)
43
+
44
+ assert dtype == expected_dtype
45
+
46
+
47
+ def get_expected_name(box, names):
48
+ if box is DataFrame:
49
+ # Since we are operating with a DataFrame and a non-DataFrame,
50
+ # the non-DataFrame is cast to Series and its name ignored.
51
+ exname = names[0]
52
+ elif box in [tm.to_array, pd.array]:
53
+ exname = names[1]
54
+ else:
55
+ exname = names[2]
56
+ return exname
57
+
58
+
59
+ # ------------------------------------------------------------------
60
+ # Timedelta64[ns] dtype Comparisons
61
+
62
+
63
+ class TestTimedelta64ArrayLikeComparisons:
64
+ # Comparison tests for timedelta64[ns] vectors fully parametrized over
65
+ # DataFrame/Series/TimedeltaIndex/TimedeltaArray. Ideally all comparison
66
+ # tests will eventually end up here.
67
+
68
+ def test_compare_timedelta64_zerodim(self, box_with_array):
69
+ # GH#26689 should unbox when comparing with zerodim array
70
+ box = box_with_array
71
+ xbox = box_with_array if box_with_array not in [Index, pd.array] else np.ndarray
72
+
73
+ tdi = timedelta_range("2h", periods=4)
74
+ other = np.array(tdi.to_numpy()[0])
75
+
76
+ tdi = tm.box_expected(tdi, box)
77
+ res = tdi <= other
78
+ expected = np.array([True, False, False, False])
79
+ expected = tm.box_expected(expected, xbox)
80
+ tm.assert_equal(res, expected)
81
+
82
+ @pytest.mark.parametrize(
83
+ "td_scalar",
84
+ [
85
+ timedelta(days=1),
86
+ Timedelta(days=1),
87
+ Timedelta(days=1).to_timedelta64(),
88
+ offsets.Hour(24),
89
+ ],
90
+ )
91
+ def test_compare_timedeltalike_scalar(self, box_with_array, td_scalar):
92
+ # regression test for GH#5963
93
+ box = box_with_array
94
+ xbox = box if box not in [Index, pd.array] else np.ndarray
95
+
96
+ ser = Series([timedelta(days=1), timedelta(days=2)])
97
+ ser = tm.box_expected(ser, box)
98
+ actual = ser > td_scalar
99
+ expected = Series([False, True])
100
+ expected = tm.box_expected(expected, xbox)
101
+ tm.assert_equal(actual, expected)
102
+
103
+ @pytest.mark.parametrize(
104
+ "invalid",
105
+ [
106
+ 345600000000000,
107
+ "a",
108
+ Timestamp("2021-01-01"),
109
+ Timestamp("2021-01-01").now("UTC"),
110
+ Timestamp("2021-01-01").now().to_datetime64(),
111
+ Timestamp("2021-01-01").now().to_pydatetime(),
112
+ Timestamp("2021-01-01").date(),
113
+ np.array(4), # zero-dim mismatched dtype
114
+ ],
115
+ )
116
+ def test_td64_comparisons_invalid(self, box_with_array, invalid):
117
+ # GH#13624 for str
118
+ box = box_with_array
119
+
120
+ rng = timedelta_range("1 days", periods=10)
121
+ obj = tm.box_expected(rng, box)
122
+
123
+ assert_invalid_comparison(obj, invalid, box)
124
+
125
+ @pytest.mark.parametrize(
126
+ "other",
127
+ [
128
+ list(range(10)),
129
+ np.arange(10),
130
+ np.arange(10).astype(np.float32),
131
+ np.arange(10).astype(object),
132
+ pd.date_range("1970-01-01", periods=10, tz="UTC").array,
133
+ np.array(pd.date_range("1970-01-01", periods=10)),
134
+ list(pd.date_range("1970-01-01", periods=10)),
135
+ pd.date_range("1970-01-01", periods=10).astype(object),
136
+ pd.period_range("1971-01-01", freq="D", periods=10).array,
137
+ pd.period_range("1971-01-01", freq="D", periods=10).astype(object),
138
+ ],
139
+ )
140
+ def test_td64arr_cmp_arraylike_invalid(self, other, box_with_array):
141
+ # We don't parametrize this over box_with_array because listlike
142
+ # other plays poorly with assert_invalid_comparison reversed checks
143
+
144
+ rng = timedelta_range("1 days", periods=10)._data
145
+ rng = tm.box_expected(rng, box_with_array)
146
+ assert_invalid_comparison(rng, other, box_with_array)
147
+
148
+ def test_td64arr_cmp_mixed_invalid(self):
149
+ rng = timedelta_range("1 days", periods=5)._data
150
+ other = np.array([0, 1, 2, rng[3], Timestamp("2021-01-01")])
151
+
152
+ result = rng == other
153
+ expected = np.array([False, False, False, True, False])
154
+ tm.assert_numpy_array_equal(result, expected)
155
+
156
+ result = rng != other
157
+ tm.assert_numpy_array_equal(result, ~expected)
158
+
159
+ msg = "Invalid comparison between|Cannot compare type|not supported between"
160
+ with pytest.raises(TypeError, match=msg):
161
+ rng < other
162
+ with pytest.raises(TypeError, match=msg):
163
+ rng > other
164
+ with pytest.raises(TypeError, match=msg):
165
+ rng <= other
166
+ with pytest.raises(TypeError, match=msg):
167
+ rng >= other
168
+
169
+
170
+ class TestTimedelta64ArrayComparisons:
171
+ # TODO: All of these need to be parametrized over box
172
+
173
+ @pytest.mark.parametrize("dtype", [None, object])
174
+ def test_comp_nat(self, dtype):
175
+ left = TimedeltaIndex([Timedelta("1 days"), NaT, Timedelta("3 days")])
176
+ right = TimedeltaIndex([NaT, NaT, Timedelta("3 days")])
177
+
178
+ lhs, rhs = left, right
179
+ if dtype is object:
180
+ lhs, rhs = left.astype(object), right.astype(object)
181
+
182
+ result = rhs == lhs
183
+ expected = np.array([False, False, True])
184
+ tm.assert_numpy_array_equal(result, expected)
185
+
186
+ result = rhs != lhs
187
+ expected = np.array([True, True, False])
188
+ tm.assert_numpy_array_equal(result, expected)
189
+
190
+ expected = np.array([False, False, False])
191
+ tm.assert_numpy_array_equal(lhs == NaT, expected)
192
+ tm.assert_numpy_array_equal(NaT == rhs, expected)
193
+
194
+ expected = np.array([True, True, True])
195
+ tm.assert_numpy_array_equal(lhs != NaT, expected)
196
+ tm.assert_numpy_array_equal(NaT != lhs, expected)
197
+
198
+ expected = np.array([False, False, False])
199
+ tm.assert_numpy_array_equal(lhs < NaT, expected)
200
+ tm.assert_numpy_array_equal(NaT > lhs, expected)
201
+
202
+ @pytest.mark.parametrize(
203
+ "idx2",
204
+ [
205
+ TimedeltaIndex(
206
+ ["2 day", "2 day", NaT, NaT, "1 day 00:00:02", "5 days 00:00:03"]
207
+ ),
208
+ np.array(
209
+ [
210
+ np.timedelta64(2, "D"),
211
+ np.timedelta64(2, "D"),
212
+ np.timedelta64("nat"),
213
+ np.timedelta64("nat"),
214
+ np.timedelta64(1, "D") + np.timedelta64(2, "s"),
215
+ np.timedelta64(5, "D") + np.timedelta64(3, "s"),
216
+ ]
217
+ ),
218
+ ],
219
+ )
220
+ def test_comparisons_nat(self, idx2):
221
+ idx1 = TimedeltaIndex(
222
+ [
223
+ "1 day",
224
+ NaT,
225
+ "1 day 00:00:01",
226
+ NaT,
227
+ "1 day 00:00:01",
228
+ "5 day 00:00:03",
229
+ ]
230
+ )
231
+ # Check pd.NaT is handles as the same as np.nan
232
+ result = idx1 < idx2
233
+ expected = np.array([True, False, False, False, True, False])
234
+ tm.assert_numpy_array_equal(result, expected)
235
+
236
+ result = idx2 > idx1
237
+ expected = np.array([True, False, False, False, True, False])
238
+ tm.assert_numpy_array_equal(result, expected)
239
+
240
+ result = idx1 <= idx2
241
+ expected = np.array([True, False, False, False, True, True])
242
+ tm.assert_numpy_array_equal(result, expected)
243
+
244
+ result = idx2 >= idx1
245
+ expected = np.array([True, False, False, False, True, True])
246
+ tm.assert_numpy_array_equal(result, expected)
247
+
248
+ result = idx1 == idx2
249
+ expected = np.array([False, False, False, False, False, True])
250
+ tm.assert_numpy_array_equal(result, expected)
251
+
252
+ result = idx1 != idx2
253
+ expected = np.array([True, True, True, True, True, False])
254
+ tm.assert_numpy_array_equal(result, expected)
255
+
256
+ # TODO: better name
257
+ def test_comparisons_coverage(self):
258
+ rng = timedelta_range("1 days", periods=10)
259
+
260
+ result = rng < rng[3]
261
+ expected = np.array([True, True, True] + [False] * 7)
262
+ tm.assert_numpy_array_equal(result, expected)
263
+
264
+ result = rng == list(rng)
265
+ exp = rng == rng
266
+ tm.assert_numpy_array_equal(result, exp)
267
+
268
+
269
+ # ------------------------------------------------------------------
270
+ # Timedelta64[ns] dtype Arithmetic Operations
271
+
272
+
273
+ class TestTimedelta64ArithmeticUnsorted:
274
+ # Tests moved from type-specific test files but not
275
+ # yet sorted/parametrized/de-duplicated
276
+
277
+ def test_ufunc_coercions(self):
278
+ # normal ops are also tested in tseries/test_timedeltas.py
279
+ idx = TimedeltaIndex(["2h", "4h", "6h", "8h", "10h"], freq="2h", name="x")
280
+
281
+ for result in [idx * 2, np.multiply(idx, 2)]:
282
+ assert isinstance(result, TimedeltaIndex)
283
+ exp = TimedeltaIndex(["4h", "8h", "12h", "16h", "20h"], freq="4h", name="x")
284
+ tm.assert_index_equal(result, exp)
285
+ assert result.freq == "4h"
286
+
287
+ for result in [idx / 2, np.divide(idx, 2)]:
288
+ assert isinstance(result, TimedeltaIndex)
289
+ exp = TimedeltaIndex(["1h", "2h", "3h", "4h", "5h"], freq="h", name="x")
290
+ tm.assert_index_equal(result, exp)
291
+ assert result.freq == "h"
292
+
293
+ for result in [-idx, np.negative(idx)]:
294
+ assert isinstance(result, TimedeltaIndex)
295
+ exp = TimedeltaIndex(
296
+ ["-2h", "-4h", "-6h", "-8h", "-10h"], freq="-2h", name="x"
297
+ )
298
+ tm.assert_index_equal(result, exp)
299
+ assert result.freq == "-2h"
300
+
301
+ idx = TimedeltaIndex(["-2h", "-1h", "0h", "1h", "2h"], freq="h", name="x")
302
+ for result in [abs(idx), np.absolute(idx)]:
303
+ assert isinstance(result, TimedeltaIndex)
304
+ exp = TimedeltaIndex(["2h", "1h", "0h", "1h", "2h"], freq=None, name="x")
305
+ tm.assert_index_equal(result, exp)
306
+ assert result.freq is None
307
+
308
+ def test_subtraction_ops(self):
309
+ # with datetimes/timedelta and tdi/dti
310
+ tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
311
+ dti = pd.date_range("20130101", periods=3, name="bar")
312
+ td = Timedelta("1 days")
313
+ dt = Timestamp("20130101")
314
+
315
+ msg = "cannot subtract a datelike from a TimedeltaArray"
316
+ with pytest.raises(TypeError, match=msg):
317
+ tdi - dt
318
+ with pytest.raises(TypeError, match=msg):
319
+ tdi - dti
320
+
321
+ msg = r"unsupported operand type\(s\) for -"
322
+ with pytest.raises(TypeError, match=msg):
323
+ td - dt
324
+
325
+ msg = "(bad|unsupported) operand type for unary"
326
+ with pytest.raises(TypeError, match=msg):
327
+ td - dti
328
+
329
+ result = dt - dti
330
+ expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"], name="bar")
331
+ tm.assert_index_equal(result, expected)
332
+
333
+ result = dti - dt
334
+ expected = TimedeltaIndex(["0 days", "1 days", "2 days"], name="bar")
335
+ tm.assert_index_equal(result, expected)
336
+
337
+ result = tdi - td
338
+ expected = TimedeltaIndex(["0 days", NaT, "1 days"], name="foo")
339
+ tm.assert_index_equal(result, expected)
340
+
341
+ result = td - tdi
342
+ expected = TimedeltaIndex(["0 days", NaT, "-1 days"], name="foo")
343
+ tm.assert_index_equal(result, expected)
344
+
345
+ result = dti - td
346
+ expected = DatetimeIndex(
347
+ ["20121231", "20130101", "20130102"], dtype="M8[ns]", freq="D", name="bar"
348
+ )
349
+ tm.assert_index_equal(result, expected)
350
+
351
+ result = dt - tdi
352
+ expected = DatetimeIndex(
353
+ ["20121231", NaT, "20121230"], dtype="M8[ns]", name="foo"
354
+ )
355
+ tm.assert_index_equal(result, expected)
356
+
357
+ def test_subtraction_ops_with_tz(self, box_with_array):
358
+ # check that dt/dti subtraction ops with tz are validated
359
+ dti = pd.date_range("20130101", periods=3)
360
+ dti = tm.box_expected(dti, box_with_array)
361
+ ts = Timestamp("20130101")
362
+ dt = ts.to_pydatetime()
363
+ dti_tz = pd.date_range("20130101", periods=3).tz_localize("US/Eastern")
364
+ dti_tz = tm.box_expected(dti_tz, box_with_array)
365
+ ts_tz = Timestamp("20130101").tz_localize("US/Eastern")
366
+ ts_tz2 = Timestamp("20130101").tz_localize("CET")
367
+ dt_tz = ts_tz.to_pydatetime()
368
+ td = Timedelta("1 days")
369
+
370
+ def _check(result, expected):
371
+ assert result == expected
372
+ assert isinstance(result, Timedelta)
373
+
374
+ # scalars
375
+ result = ts - ts
376
+ expected = Timedelta("0 days")
377
+ _check(result, expected)
378
+
379
+ result = dt_tz - ts_tz
380
+ expected = Timedelta("0 days")
381
+ _check(result, expected)
382
+
383
+ result = ts_tz - dt_tz
384
+ expected = Timedelta("0 days")
385
+ _check(result, expected)
386
+
387
+ # tz mismatches
388
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects."
389
+ with pytest.raises(TypeError, match=msg):
390
+ dt_tz - ts
391
+ msg = "can't subtract offset-naive and offset-aware datetimes"
392
+ with pytest.raises(TypeError, match=msg):
393
+ dt_tz - dt
394
+ msg = "can't subtract offset-naive and offset-aware datetimes"
395
+ with pytest.raises(TypeError, match=msg):
396
+ dt - dt_tz
397
+ msg = "Cannot subtract tz-naive and tz-aware datetime-like objects."
398
+ with pytest.raises(TypeError, match=msg):
399
+ ts - dt_tz
400
+ with pytest.raises(TypeError, match=msg):
401
+ ts_tz2 - ts
402
+ with pytest.raises(TypeError, match=msg):
403
+ ts_tz2 - dt
404
+
405
+ msg = "Cannot subtract tz-naive and tz-aware"
406
+ # with dti
407
+ with pytest.raises(TypeError, match=msg):
408
+ dti - ts_tz
409
+ with pytest.raises(TypeError, match=msg):
410
+ dti_tz - ts
411
+
412
+ result = dti_tz - dt_tz
413
+ expected = TimedeltaIndex(["0 days", "1 days", "2 days"])
414
+ expected = tm.box_expected(expected, box_with_array)
415
+ tm.assert_equal(result, expected)
416
+
417
+ result = dt_tz - dti_tz
418
+ expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"])
419
+ expected = tm.box_expected(expected, box_with_array)
420
+ tm.assert_equal(result, expected)
421
+
422
+ result = dti_tz - ts_tz
423
+ expected = TimedeltaIndex(["0 days", "1 days", "2 days"])
424
+ expected = tm.box_expected(expected, box_with_array)
425
+ tm.assert_equal(result, expected)
426
+
427
+ result = ts_tz - dti_tz
428
+ expected = TimedeltaIndex(["0 days", "-1 days", "-2 days"])
429
+ expected = tm.box_expected(expected, box_with_array)
430
+ tm.assert_equal(result, expected)
431
+
432
+ result = td - td
433
+ expected = Timedelta("0 days")
434
+ _check(result, expected)
435
+
436
+ result = dti_tz - td
437
+ expected = DatetimeIndex(
438
+ ["20121231", "20130101", "20130102"], tz="US/Eastern"
439
+ ).as_unit("ns")
440
+ expected = tm.box_expected(expected, box_with_array)
441
+ tm.assert_equal(result, expected)
442
+
443
+ def test_dti_tdi_numeric_ops(self):
444
+ # These are normally union/diff set-like ops
445
+ tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
446
+ dti = pd.date_range("20130101", periods=3, name="bar")
447
+
448
+ result = tdi - tdi
449
+ expected = TimedeltaIndex(["0 days", NaT, "0 days"], name="foo")
450
+ tm.assert_index_equal(result, expected)
451
+
452
+ result = tdi + tdi
453
+ expected = TimedeltaIndex(["2 days", NaT, "4 days"], name="foo")
454
+ tm.assert_index_equal(result, expected)
455
+
456
+ result = dti - tdi # name will be reset
457
+ expected = DatetimeIndex(["20121231", NaT, "20130101"], dtype="M8[ns]")
458
+ tm.assert_index_equal(result, expected)
459
+
460
+ def test_addition_ops(self):
461
+ # with datetimes/timedelta and tdi/dti
462
+ tdi = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
463
+ dti = pd.date_range("20130101", periods=3, name="bar")
464
+ td = Timedelta("1 days")
465
+ dt = Timestamp("20130101")
466
+
467
+ result = tdi + dt
468
+ expected = DatetimeIndex(
469
+ ["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo"
470
+ )
471
+ tm.assert_index_equal(result, expected)
472
+
473
+ result = dt + tdi
474
+ expected = DatetimeIndex(
475
+ ["20130102", NaT, "20130103"], dtype="M8[ns]", name="foo"
476
+ )
477
+ tm.assert_index_equal(result, expected)
478
+
479
+ result = td + tdi
480
+ expected = TimedeltaIndex(["2 days", NaT, "3 days"], name="foo")
481
+ tm.assert_index_equal(result, expected)
482
+
483
+ result = tdi + td
484
+ expected = TimedeltaIndex(["2 days", NaT, "3 days"], name="foo")
485
+ tm.assert_index_equal(result, expected)
486
+
487
+ # unequal length
488
+ msg = "cannot add indices of unequal length"
489
+ with pytest.raises(ValueError, match=msg):
490
+ tdi + dti[0:1]
491
+ with pytest.raises(ValueError, match=msg):
492
+ tdi[0:1] + dti
493
+
494
+ # random indexes
495
+ msg = "Addition/subtraction of integers and integer-arrays"
496
+ with pytest.raises(TypeError, match=msg):
497
+ tdi + Index([1, 2, 3], dtype=np.int64)
498
+
499
+ # this is a union!
500
+ # FIXME: don't leave commented-out
501
+ # pytest.raises(TypeError, lambda : Index([1,2,3]) + tdi)
502
+
503
+ result = tdi + dti # name will be reset
504
+ expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]")
505
+ tm.assert_index_equal(result, expected)
506
+
507
+ result = dti + tdi # name will be reset
508
+ expected = DatetimeIndex(["20130102", NaT, "20130105"], dtype="M8[ns]")
509
+ tm.assert_index_equal(result, expected)
510
+
511
+ result = dt + td
512
+ expected = Timestamp("20130102")
513
+ assert result == expected
514
+
515
+ result = td + dt
516
+ expected = Timestamp("20130102")
517
+ assert result == expected
518
+
519
+ # TODO: Needs more informative name, probably split up into
520
+ # more targeted tests
521
+ @pytest.mark.parametrize("freq", ["D", "B"])
522
+ def test_timedelta(self, freq):
523
+ index = pd.date_range("1/1/2000", periods=50, freq=freq)
524
+
525
+ shifted = index + timedelta(1)
526
+ back = shifted + timedelta(-1)
527
+ back = back._with_freq("infer")
528
+ tm.assert_index_equal(index, back)
529
+
530
+ if freq == "D":
531
+ expected = pd.tseries.offsets.Day(1)
532
+ assert index.freq == expected
533
+ assert shifted.freq == expected
534
+ assert back.freq == expected
535
+ else: # freq == 'B'
536
+ assert index.freq == pd.tseries.offsets.BusinessDay(1)
537
+ assert shifted.freq is None
538
+ assert back.freq == pd.tseries.offsets.BusinessDay(1)
539
+
540
+ result = index - timedelta(1)
541
+ expected = index + timedelta(-1)
542
+ tm.assert_index_equal(result, expected)
543
+
544
+ def test_timedelta_tick_arithmetic(self):
545
+ # GH#4134, buggy with timedeltas
546
+ rng = pd.date_range("2013", "2014")
547
+ s = Series(rng)
548
+ result1 = rng - offsets.Hour(1)
549
+ result2 = DatetimeIndex(s - np.timedelta64(100000000))
550
+ result3 = rng - np.timedelta64(100000000)
551
+ result4 = DatetimeIndex(s - offsets.Hour(1))
552
+
553
+ assert result1.freq == rng.freq
554
+ result1 = result1._with_freq(None)
555
+ tm.assert_index_equal(result1, result4)
556
+
557
+ assert result3.freq == rng.freq
558
+ result3 = result3._with_freq(None)
559
+ tm.assert_index_equal(result2, result3)
560
+
561
+ def test_tda_add_sub_index(self):
562
+ # Check that TimedeltaArray defers to Index on arithmetic ops
563
+ tdi = TimedeltaIndex(["1 days", NaT, "2 days"])
564
+ tda = tdi.array
565
+
566
+ dti = pd.date_range("1999-12-31", periods=3, freq="D")
567
+
568
+ result = tda + dti
569
+ expected = tdi + dti
570
+ tm.assert_index_equal(result, expected)
571
+
572
+ result = tda + tdi
573
+ expected = tdi + tdi
574
+ tm.assert_index_equal(result, expected)
575
+
576
+ result = tda - tdi
577
+ expected = tdi - tdi
578
+ tm.assert_index_equal(result, expected)
579
+
580
+ def test_tda_add_dt64_object_array(self, box_with_array, tz_naive_fixture):
581
+ # Result should be cast back to DatetimeArray
582
+ box = box_with_array
583
+
584
+ dti = pd.date_range("2016-01-01", periods=3, tz=tz_naive_fixture)
585
+ dti = dti._with_freq(None)
586
+ tdi = dti - dti
587
+
588
+ obj = tm.box_expected(tdi, box)
589
+ other = tm.box_expected(dti, box)
590
+
591
+ with tm.assert_produces_warning(PerformanceWarning):
592
+ result = obj + other.astype(object)
593
+ tm.assert_equal(result, other.astype(object))
594
+
595
+ # -------------------------------------------------------------
596
+ # Binary operations TimedeltaIndex and timedelta-like
597
+
598
+ def test_tdi_iadd_timedeltalike(self, two_hours, box_with_array):
599
+ # only test adding/sub offsets as + is now numeric
600
+ rng = timedelta_range("1 days", "10 days")
601
+ expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D")
602
+
603
+ rng = tm.box_expected(rng, box_with_array)
604
+ expected = tm.box_expected(expected, box_with_array)
605
+
606
+ orig_rng = rng
607
+ rng += two_hours
608
+ tm.assert_equal(rng, expected)
609
+ if box_with_array is not Index:
610
+ # Check that operation is actually inplace
611
+ tm.assert_equal(orig_rng, expected)
612
+
613
+ def test_tdi_isub_timedeltalike(self, two_hours, box_with_array):
614
+ # only test adding/sub offsets as - is now numeric
615
+ rng = timedelta_range("1 days", "10 days")
616
+ expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00")
617
+
618
+ rng = tm.box_expected(rng, box_with_array)
619
+ expected = tm.box_expected(expected, box_with_array)
620
+
621
+ orig_rng = rng
622
+ rng -= two_hours
623
+ tm.assert_equal(rng, expected)
624
+ if box_with_array is not Index:
625
+ # Check that operation is actually inplace
626
+ tm.assert_equal(orig_rng, expected)
627
+
628
+ # -------------------------------------------------------------
629
+
630
+ def test_tdi_ops_attributes(self):
631
+ rng = timedelta_range("2 days", periods=5, freq="2D", name="x")
632
+
633
+ result = rng + 1 * rng.freq
634
+ exp = timedelta_range("4 days", periods=5, freq="2D", name="x")
635
+ tm.assert_index_equal(result, exp)
636
+ assert result.freq == "2D"
637
+
638
+ result = rng - 2 * rng.freq
639
+ exp = timedelta_range("-2 days", periods=5, freq="2D", name="x")
640
+ tm.assert_index_equal(result, exp)
641
+ assert result.freq == "2D"
642
+
643
+ result = rng * 2
644
+ exp = timedelta_range("4 days", periods=5, freq="4D", name="x")
645
+ tm.assert_index_equal(result, exp)
646
+ assert result.freq == "4D"
647
+
648
+ result = rng / 2
649
+ exp = timedelta_range("1 days", periods=5, freq="D", name="x")
650
+ tm.assert_index_equal(result, exp)
651
+ assert result.freq == "D"
652
+
653
+ result = -rng
654
+ exp = timedelta_range("-2 days", periods=5, freq="-2D", name="x")
655
+ tm.assert_index_equal(result, exp)
656
+ assert result.freq == "-2D"
657
+
658
+ rng = timedelta_range("-2 days", periods=5, freq="D", name="x")
659
+
660
+ result = abs(rng)
661
+ exp = TimedeltaIndex(
662
+ ["2 days", "1 days", "0 days", "1 days", "2 days"], name="x"
663
+ )
664
+ tm.assert_index_equal(result, exp)
665
+ assert result.freq is None
666
+
667
+
668
+ class TestAddSubNaTMasking:
669
+ # TODO: parametrize over boxes
670
+
671
+ @pytest.mark.parametrize("str_ts", ["1950-01-01", "1980-01-01"])
672
+ def test_tdarr_add_timestamp_nat_masking(self, box_with_array, str_ts):
673
+ # GH#17991 checking for overflow-masking with NaT
674
+ tdinat = pd.to_timedelta(["24658 days 11:15:00", "NaT"])
675
+ tdobj = tm.box_expected(tdinat, box_with_array)
676
+
677
+ ts = Timestamp(str_ts)
678
+ ts_variants = [
679
+ ts,
680
+ ts.to_pydatetime(),
681
+ ts.to_datetime64().astype("datetime64[ns]"),
682
+ ts.to_datetime64().astype("datetime64[D]"),
683
+ ]
684
+
685
+ for variant in ts_variants:
686
+ res = tdobj + variant
687
+ if box_with_array is DataFrame:
688
+ assert res.iloc[1, 1] is NaT
689
+ else:
690
+ assert res[1] is NaT
691
+
692
+ def test_tdi_add_overflow(self):
693
+ # See GH#14068
694
+ # preliminary test scalar analogue of vectorized tests below
695
+ # TODO: Make raised error message more informative and test
696
+ with pytest.raises(OutOfBoundsDatetime, match="10155196800000000000"):
697
+ pd.to_timedelta(106580, "D") + Timestamp("2000")
698
+ with pytest.raises(OutOfBoundsDatetime, match="10155196800000000000"):
699
+ Timestamp("2000") + pd.to_timedelta(106580, "D")
700
+
701
+ _NaT = NaT._value + 1
702
+ msg = "Overflow in int64 addition"
703
+ with pytest.raises(OverflowError, match=msg):
704
+ pd.to_timedelta([106580], "D") + Timestamp("2000")
705
+ with pytest.raises(OverflowError, match=msg):
706
+ Timestamp("2000") + pd.to_timedelta([106580], "D")
707
+ with pytest.raises(OverflowError, match=msg):
708
+ pd.to_timedelta([_NaT]) - Timedelta("1 days")
709
+ with pytest.raises(OverflowError, match=msg):
710
+ pd.to_timedelta(["5 days", _NaT]) - Timedelta("1 days")
711
+ with pytest.raises(OverflowError, match=msg):
712
+ (
713
+ pd.to_timedelta([_NaT, "5 days", "1 hours"])
714
+ - pd.to_timedelta(["7 seconds", _NaT, "4 hours"])
715
+ )
716
+
717
+ # These should not overflow!
718
+ exp = TimedeltaIndex([NaT])
719
+ result = pd.to_timedelta([NaT]) - Timedelta("1 days")
720
+ tm.assert_index_equal(result, exp)
721
+
722
+ exp = TimedeltaIndex(["4 days", NaT])
723
+ result = pd.to_timedelta(["5 days", NaT]) - Timedelta("1 days")
724
+ tm.assert_index_equal(result, exp)
725
+
726
+ exp = TimedeltaIndex([NaT, NaT, "5 hours"])
727
+ result = pd.to_timedelta([NaT, "5 days", "1 hours"]) + pd.to_timedelta(
728
+ ["7 seconds", NaT, "4 hours"]
729
+ )
730
+ tm.assert_index_equal(result, exp)
731
+
732
+
733
+ class TestTimedeltaArraylikeAddSubOps:
734
+ # Tests for timedelta64[ns] __add__, __sub__, __radd__, __rsub__
735
+
736
+ def test_sub_nat_retain_unit(self):
737
+ ser = pd.to_timedelta(Series(["00:00:01"])).astype("m8[s]")
738
+
739
+ result = ser - NaT
740
+ expected = Series([NaT], dtype="m8[s]")
741
+ tm.assert_series_equal(result, expected)
742
+
743
+ # TODO: moved from tests.indexes.timedeltas.test_arithmetic; needs
744
+ # parametrization+de-duplication
745
+ def test_timedelta_ops_with_missing_values(self):
746
+ # setup
747
+ s1 = pd.to_timedelta(Series(["00:00:01"]))
748
+ s2 = pd.to_timedelta(Series(["00:00:02"]))
749
+
750
+ sn = pd.to_timedelta(Series([NaT], dtype="m8[ns]"))
751
+
752
+ df1 = DataFrame(["00:00:01"]).apply(pd.to_timedelta)
753
+ df2 = DataFrame(["00:00:02"]).apply(pd.to_timedelta)
754
+
755
+ dfn = DataFrame([NaT._value]).apply(pd.to_timedelta)
756
+
757
+ scalar1 = pd.to_timedelta("00:00:01")
758
+ scalar2 = pd.to_timedelta("00:00:02")
759
+ timedelta_NaT = pd.to_timedelta("NaT")
760
+
761
+ actual = scalar1 + scalar1
762
+ assert actual == scalar2
763
+ actual = scalar2 - scalar1
764
+ assert actual == scalar1
765
+
766
+ actual = s1 + s1
767
+ tm.assert_series_equal(actual, s2)
768
+ actual = s2 - s1
769
+ tm.assert_series_equal(actual, s1)
770
+
771
+ actual = s1 + scalar1
772
+ tm.assert_series_equal(actual, s2)
773
+ actual = scalar1 + s1
774
+ tm.assert_series_equal(actual, s2)
775
+ actual = s2 - scalar1
776
+ tm.assert_series_equal(actual, s1)
777
+ actual = -scalar1 + s2
778
+ tm.assert_series_equal(actual, s1)
779
+
780
+ actual = s1 + timedelta_NaT
781
+ tm.assert_series_equal(actual, sn)
782
+ actual = timedelta_NaT + s1
783
+ tm.assert_series_equal(actual, sn)
784
+ actual = s1 - timedelta_NaT
785
+ tm.assert_series_equal(actual, sn)
786
+ actual = -timedelta_NaT + s1
787
+ tm.assert_series_equal(actual, sn)
788
+
789
+ msg = "unsupported operand type"
790
+ with pytest.raises(TypeError, match=msg):
791
+ s1 + np.nan
792
+ with pytest.raises(TypeError, match=msg):
793
+ np.nan + s1
794
+ with pytest.raises(TypeError, match=msg):
795
+ s1 - np.nan
796
+ with pytest.raises(TypeError, match=msg):
797
+ -np.nan + s1
798
+
799
+ actual = s1 + NaT
800
+ tm.assert_series_equal(actual, sn)
801
+ actual = s2 - NaT
802
+ tm.assert_series_equal(actual, sn)
803
+
804
+ actual = s1 + df1
805
+ tm.assert_frame_equal(actual, df2)
806
+ actual = s2 - df1
807
+ tm.assert_frame_equal(actual, df1)
808
+ actual = df1 + s1
809
+ tm.assert_frame_equal(actual, df2)
810
+ actual = df2 - s1
811
+ tm.assert_frame_equal(actual, df1)
812
+
813
+ actual = df1 + df1
814
+ tm.assert_frame_equal(actual, df2)
815
+ actual = df2 - df1
816
+ tm.assert_frame_equal(actual, df1)
817
+
818
+ actual = df1 + scalar1
819
+ tm.assert_frame_equal(actual, df2)
820
+ actual = df2 - scalar1
821
+ tm.assert_frame_equal(actual, df1)
822
+
823
+ actual = df1 + timedelta_NaT
824
+ tm.assert_frame_equal(actual, dfn)
825
+ actual = df1 - timedelta_NaT
826
+ tm.assert_frame_equal(actual, dfn)
827
+
828
+ msg = "cannot subtract a datelike from|unsupported operand type"
829
+ with pytest.raises(TypeError, match=msg):
830
+ df1 + np.nan
831
+ with pytest.raises(TypeError, match=msg):
832
+ df1 - np.nan
833
+
834
+ actual = df1 + NaT # NaT is datetime, not timedelta
835
+ tm.assert_frame_equal(actual, dfn)
836
+ actual = df1 - NaT
837
+ tm.assert_frame_equal(actual, dfn)
838
+
839
+ # TODO: moved from tests.series.test_operators, needs splitting, cleanup,
840
+ # de-duplication, box-parametrization...
841
+ def test_operators_timedelta64(self):
842
+ # series ops
843
+ v1 = pd.date_range("2012-1-1", periods=3, freq="D")
844
+ v2 = pd.date_range("2012-1-2", periods=3, freq="D")
845
+ rs = Series(v2) - Series(v1)
846
+ xp = Series(1e9 * 3600 * 24, rs.index).astype("int64").astype("timedelta64[ns]")
847
+ tm.assert_series_equal(rs, xp)
848
+ assert rs.dtype == "timedelta64[ns]"
849
+
850
+ df = DataFrame({"A": v1})
851
+ td = Series([timedelta(days=i) for i in range(3)])
852
+ assert td.dtype == "timedelta64[ns]"
853
+
854
+ # series on the rhs
855
+ result = df["A"] - df["A"].shift()
856
+ assert result.dtype == "timedelta64[ns]"
857
+
858
+ result = df["A"] + td
859
+ assert result.dtype == "M8[ns]"
860
+
861
+ # scalar Timestamp on rhs
862
+ maxa = df["A"].max()
863
+ assert isinstance(maxa, Timestamp)
864
+
865
+ resultb = df["A"] - df["A"].max()
866
+ assert resultb.dtype == "timedelta64[ns]"
867
+
868
+ # timestamp on lhs
869
+ result = resultb + df["A"]
870
+ values = [Timestamp("20111230"), Timestamp("20120101"), Timestamp("20120103")]
871
+ expected = Series(values, dtype="M8[ns]", name="A")
872
+ tm.assert_series_equal(result, expected)
873
+
874
+ # datetimes on rhs
875
+ result = df["A"] - datetime(2001, 1, 1)
876
+ expected = Series([timedelta(days=4017 + i) for i in range(3)], name="A")
877
+ tm.assert_series_equal(result, expected)
878
+ assert result.dtype == "m8[ns]"
879
+
880
+ d = datetime(2001, 1, 1, 3, 4)
881
+ resulta = df["A"] - d
882
+ assert resulta.dtype == "m8[ns]"
883
+
884
+ # roundtrip
885
+ resultb = resulta + d
886
+ tm.assert_series_equal(df["A"], resultb)
887
+
888
+ # timedeltas on rhs
889
+ td = timedelta(days=1)
890
+ resulta = df["A"] + td
891
+ resultb = resulta - td
892
+ tm.assert_series_equal(resultb, df["A"])
893
+ assert resultb.dtype == "M8[ns]"
894
+
895
+ # roundtrip
896
+ td = timedelta(minutes=5, seconds=3)
897
+ resulta = df["A"] + td
898
+ resultb = resulta - td
899
+ tm.assert_series_equal(df["A"], resultb)
900
+ assert resultb.dtype == "M8[ns]"
901
+
902
+ # inplace
903
+ value = rs[2] + np.timedelta64(timedelta(minutes=5, seconds=1))
904
+ rs[2] += np.timedelta64(timedelta(minutes=5, seconds=1))
905
+ assert rs[2] == value
906
+
907
+ def test_timedelta64_ops_nat(self):
908
+ # GH 11349
909
+ timedelta_series = Series([NaT, Timedelta("1s")])
910
+ nat_series_dtype_timedelta = Series([NaT, NaT], dtype="timedelta64[ns]")
911
+ single_nat_dtype_timedelta = Series([NaT], dtype="timedelta64[ns]")
912
+
913
+ # subtraction
914
+ tm.assert_series_equal(timedelta_series - NaT, nat_series_dtype_timedelta)
915
+ tm.assert_series_equal(-NaT + timedelta_series, nat_series_dtype_timedelta)
916
+
917
+ tm.assert_series_equal(
918
+ timedelta_series - single_nat_dtype_timedelta, nat_series_dtype_timedelta
919
+ )
920
+ tm.assert_series_equal(
921
+ -single_nat_dtype_timedelta + timedelta_series, nat_series_dtype_timedelta
922
+ )
923
+
924
+ # addition
925
+ tm.assert_series_equal(
926
+ nat_series_dtype_timedelta + NaT, nat_series_dtype_timedelta
927
+ )
928
+ tm.assert_series_equal(
929
+ NaT + nat_series_dtype_timedelta, nat_series_dtype_timedelta
930
+ )
931
+
932
+ tm.assert_series_equal(
933
+ nat_series_dtype_timedelta + single_nat_dtype_timedelta,
934
+ nat_series_dtype_timedelta,
935
+ )
936
+ tm.assert_series_equal(
937
+ single_nat_dtype_timedelta + nat_series_dtype_timedelta,
938
+ nat_series_dtype_timedelta,
939
+ )
940
+
941
+ tm.assert_series_equal(timedelta_series + NaT, nat_series_dtype_timedelta)
942
+ tm.assert_series_equal(NaT + timedelta_series, nat_series_dtype_timedelta)
943
+
944
+ tm.assert_series_equal(
945
+ timedelta_series + single_nat_dtype_timedelta, nat_series_dtype_timedelta
946
+ )
947
+ tm.assert_series_equal(
948
+ single_nat_dtype_timedelta + timedelta_series, nat_series_dtype_timedelta
949
+ )
950
+
951
+ tm.assert_series_equal(
952
+ nat_series_dtype_timedelta + NaT, nat_series_dtype_timedelta
953
+ )
954
+ tm.assert_series_equal(
955
+ NaT + nat_series_dtype_timedelta, nat_series_dtype_timedelta
956
+ )
957
+
958
+ tm.assert_series_equal(
959
+ nat_series_dtype_timedelta + single_nat_dtype_timedelta,
960
+ nat_series_dtype_timedelta,
961
+ )
962
+ tm.assert_series_equal(
963
+ single_nat_dtype_timedelta + nat_series_dtype_timedelta,
964
+ nat_series_dtype_timedelta,
965
+ )
966
+
967
+ # multiplication
968
+ tm.assert_series_equal(
969
+ nat_series_dtype_timedelta * 1.0, nat_series_dtype_timedelta
970
+ )
971
+ tm.assert_series_equal(
972
+ 1.0 * nat_series_dtype_timedelta, nat_series_dtype_timedelta
973
+ )
974
+
975
+ tm.assert_series_equal(timedelta_series * 1, timedelta_series)
976
+ tm.assert_series_equal(1 * timedelta_series, timedelta_series)
977
+
978
+ tm.assert_series_equal(timedelta_series * 1.5, Series([NaT, Timedelta("1.5s")]))
979
+ tm.assert_series_equal(1.5 * timedelta_series, Series([NaT, Timedelta("1.5s")]))
980
+
981
+ tm.assert_series_equal(timedelta_series * np.nan, nat_series_dtype_timedelta)
982
+ tm.assert_series_equal(np.nan * timedelta_series, nat_series_dtype_timedelta)
983
+
984
+ # division
985
+ tm.assert_series_equal(timedelta_series / 2, Series([NaT, Timedelta("0.5s")]))
986
+ tm.assert_series_equal(timedelta_series / 2.0, Series([NaT, Timedelta("0.5s")]))
987
+ tm.assert_series_equal(timedelta_series / np.nan, nat_series_dtype_timedelta)
988
+
989
+ # -------------------------------------------------------------
990
+ # Binary operations td64 arraylike and datetime-like
991
+
992
+ @pytest.mark.parametrize("cls", [Timestamp, datetime, np.datetime64])
993
+ def test_td64arr_add_sub_datetimelike_scalar(
994
+ self, cls, box_with_array, tz_naive_fixture
995
+ ):
996
+ # GH#11925, GH#29558, GH#23215
997
+ tz = tz_naive_fixture
998
+
999
+ dt_scalar = Timestamp("2012-01-01", tz=tz)
1000
+ if cls is datetime:
1001
+ ts = dt_scalar.to_pydatetime()
1002
+ elif cls is np.datetime64:
1003
+ if tz_naive_fixture is not None:
1004
+ pytest.skip(f"{cls} doesn support {tz_naive_fixture}")
1005
+ ts = dt_scalar.to_datetime64()
1006
+ else:
1007
+ ts = dt_scalar
1008
+
1009
+ tdi = timedelta_range("1 day", periods=3)
1010
+ expected = pd.date_range("2012-01-02", periods=3, tz=tz)
1011
+
1012
+ tdarr = tm.box_expected(tdi, box_with_array)
1013
+ expected = tm.box_expected(expected, box_with_array)
1014
+
1015
+ tm.assert_equal(ts + tdarr, expected)
1016
+ tm.assert_equal(tdarr + ts, expected)
1017
+
1018
+ expected2 = pd.date_range("2011-12-31", periods=3, freq="-1D", tz=tz)
1019
+ expected2 = tm.box_expected(expected2, box_with_array)
1020
+
1021
+ tm.assert_equal(ts - tdarr, expected2)
1022
+ tm.assert_equal(ts + (-tdarr), expected2)
1023
+
1024
+ msg = "cannot subtract a datelike"
1025
+ with pytest.raises(TypeError, match=msg):
1026
+ tdarr - ts
1027
+
1028
+ def test_td64arr_add_datetime64_nat(self, box_with_array):
1029
+ # GH#23215
1030
+ other = np.datetime64("NaT")
1031
+
1032
+ tdi = timedelta_range("1 day", periods=3)
1033
+ expected = DatetimeIndex(["NaT", "NaT", "NaT"], dtype="M8[ns]")
1034
+
1035
+ tdser = tm.box_expected(tdi, box_with_array)
1036
+ expected = tm.box_expected(expected, box_with_array)
1037
+
1038
+ tm.assert_equal(tdser + other, expected)
1039
+ tm.assert_equal(other + tdser, expected)
1040
+
1041
+ def test_td64arr_sub_dt64_array(self, box_with_array):
1042
+ dti = pd.date_range("2016-01-01", periods=3)
1043
+ tdi = TimedeltaIndex(["-1 Day"] * 3)
1044
+ dtarr = dti.values
1045
+ expected = DatetimeIndex(dtarr) - tdi
1046
+
1047
+ tdi = tm.box_expected(tdi, box_with_array)
1048
+ expected = tm.box_expected(expected, box_with_array)
1049
+
1050
+ msg = "cannot subtract a datelike from"
1051
+ with pytest.raises(TypeError, match=msg):
1052
+ tdi - dtarr
1053
+
1054
+ # TimedeltaIndex.__rsub__
1055
+ result = dtarr - tdi
1056
+ tm.assert_equal(result, expected)
1057
+
1058
+ def test_td64arr_add_dt64_array(self, box_with_array):
1059
+ dti = pd.date_range("2016-01-01", periods=3)
1060
+ tdi = TimedeltaIndex(["-1 Day"] * 3)
1061
+ dtarr = dti.values
1062
+ expected = DatetimeIndex(dtarr) + tdi
1063
+
1064
+ tdi = tm.box_expected(tdi, box_with_array)
1065
+ expected = tm.box_expected(expected, box_with_array)
1066
+
1067
+ result = tdi + dtarr
1068
+ tm.assert_equal(result, expected)
1069
+ result = dtarr + tdi
1070
+ tm.assert_equal(result, expected)
1071
+
1072
+ # ------------------------------------------------------------------
1073
+ # Invalid __add__/__sub__ operations
1074
+
1075
+ @pytest.mark.parametrize("pi_freq", ["D", "W", "Q", "h"])
1076
+ @pytest.mark.parametrize("tdi_freq", [None, "h"])
1077
+ def test_td64arr_sub_periodlike(
1078
+ self, box_with_array, box_with_array2, tdi_freq, pi_freq
1079
+ ):
1080
+ # GH#20049 subtracting PeriodIndex should raise TypeError
1081
+ tdi = TimedeltaIndex(["1 hours", "2 hours"], freq=tdi_freq)
1082
+ dti = Timestamp("2018-03-07 17:16:40") + tdi
1083
+ pi = dti.to_period(pi_freq)
1084
+ per = pi[0]
1085
+
1086
+ tdi = tm.box_expected(tdi, box_with_array)
1087
+ pi = tm.box_expected(pi, box_with_array2)
1088
+ msg = "cannot subtract|unsupported operand type"
1089
+ with pytest.raises(TypeError, match=msg):
1090
+ tdi - pi
1091
+
1092
+ # GH#13078 subtraction of Period scalar not supported
1093
+ with pytest.raises(TypeError, match=msg):
1094
+ tdi - per
1095
+
1096
+ @pytest.mark.parametrize(
1097
+ "other",
1098
+ [
1099
+ # GH#12624 for str case
1100
+ "a",
1101
+ # GH#19123
1102
+ 1,
1103
+ 1.5,
1104
+ np.array(2),
1105
+ ],
1106
+ )
1107
+ def test_td64arr_addsub_numeric_scalar_invalid(self, box_with_array, other):
1108
+ # vector-like others are tested in test_td64arr_add_sub_numeric_arr_invalid
1109
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1110
+ tdarr = tm.box_expected(tdser, box_with_array)
1111
+
1112
+ assert_invalid_addsub_type(tdarr, other)
1113
+
1114
+ @pytest.mark.parametrize(
1115
+ "vec",
1116
+ [
1117
+ np.array([1, 2, 3]),
1118
+ Index([1, 2, 3]),
1119
+ Series([1, 2, 3]),
1120
+ DataFrame([[1, 2, 3]]),
1121
+ ],
1122
+ ids=lambda x: type(x).__name__,
1123
+ )
1124
+ def test_td64arr_addsub_numeric_arr_invalid(
1125
+ self, box_with_array, vec, any_real_numpy_dtype
1126
+ ):
1127
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1128
+ tdarr = tm.box_expected(tdser, box_with_array)
1129
+
1130
+ vector = vec.astype(any_real_numpy_dtype)
1131
+ assert_invalid_addsub_type(tdarr, vector)
1132
+
1133
+ def test_td64arr_add_sub_int(self, box_with_array, one):
1134
+ # Variants of `one` for #19012, deprecated GH#22535
1135
+ rng = timedelta_range("1 days 09:00:00", freq="h", periods=10)
1136
+ tdarr = tm.box_expected(rng, box_with_array)
1137
+
1138
+ msg = "Addition/subtraction of integers"
1139
+ assert_invalid_addsub_type(tdarr, one, msg)
1140
+
1141
+ # TODO: get inplace ops into assert_invalid_addsub_type
1142
+ with pytest.raises(TypeError, match=msg):
1143
+ tdarr += one
1144
+ with pytest.raises(TypeError, match=msg):
1145
+ tdarr -= one
1146
+
1147
+ def test_td64arr_add_sub_integer_array(self, box_with_array):
1148
+ # GH#19959, deprecated GH#22535
1149
+ # GH#22696 for DataFrame case, check that we don't dispatch to numpy
1150
+ # implementation, which treats int64 as m8[ns]
1151
+ box = box_with_array
1152
+ xbox = np.ndarray if box is pd.array else box
1153
+
1154
+ rng = timedelta_range("1 days 09:00:00", freq="h", periods=3)
1155
+ tdarr = tm.box_expected(rng, box)
1156
+ other = tm.box_expected([4, 3, 2], xbox)
1157
+
1158
+ msg = "Addition/subtraction of integers and integer-arrays"
1159
+ assert_invalid_addsub_type(tdarr, other, msg)
1160
+
1161
+ def test_td64arr_addsub_integer_array_no_freq(self, box_with_array):
1162
+ # GH#19959
1163
+ box = box_with_array
1164
+ xbox = np.ndarray if box is pd.array else box
1165
+
1166
+ tdi = TimedeltaIndex(["1 Day", "NaT", "3 Hours"])
1167
+ tdarr = tm.box_expected(tdi, box)
1168
+ other = tm.box_expected([14, -1, 16], xbox)
1169
+
1170
+ msg = "Addition/subtraction of integers"
1171
+ assert_invalid_addsub_type(tdarr, other, msg)
1172
+
1173
+ # ------------------------------------------------------------------
1174
+ # Operations with timedelta-like others
1175
+
1176
+ def test_td64arr_add_sub_td64_array(self, box_with_array):
1177
+ box = box_with_array
1178
+ dti = pd.date_range("2016-01-01", periods=3)
1179
+ tdi = dti - dti.shift(1)
1180
+ tdarr = tdi.values
1181
+
1182
+ expected = 2 * tdi
1183
+ tdi = tm.box_expected(tdi, box)
1184
+ expected = tm.box_expected(expected, box)
1185
+
1186
+ result = tdi + tdarr
1187
+ tm.assert_equal(result, expected)
1188
+ result = tdarr + tdi
1189
+ tm.assert_equal(result, expected)
1190
+
1191
+ expected_sub = 0 * tdi
1192
+ result = tdi - tdarr
1193
+ tm.assert_equal(result, expected_sub)
1194
+ result = tdarr - tdi
1195
+ tm.assert_equal(result, expected_sub)
1196
+
1197
+ def test_td64arr_add_sub_tdi(self, box_with_array, names):
1198
+ # GH#17250 make sure result dtype is correct
1199
+ # GH#19043 make sure names are propagated correctly
1200
+ box = box_with_array
1201
+ exname = get_expected_name(box, names)
1202
+
1203
+ tdi = TimedeltaIndex(["0 days", "1 day"], name=names[1])
1204
+ tdi = np.array(tdi) if box in [tm.to_array, pd.array] else tdi
1205
+ ser = Series([Timedelta(hours=3), Timedelta(hours=4)], name=names[0])
1206
+ expected = Series([Timedelta(hours=3), Timedelta(days=1, hours=4)], name=exname)
1207
+
1208
+ ser = tm.box_expected(ser, box)
1209
+ expected = tm.box_expected(expected, box)
1210
+
1211
+ result = tdi + ser
1212
+ tm.assert_equal(result, expected)
1213
+ assert_dtype(result, "timedelta64[ns]")
1214
+
1215
+ result = ser + tdi
1216
+ tm.assert_equal(result, expected)
1217
+ assert_dtype(result, "timedelta64[ns]")
1218
+
1219
+ expected = Series(
1220
+ [Timedelta(hours=-3), Timedelta(days=1, hours=-4)], name=exname
1221
+ )
1222
+ expected = tm.box_expected(expected, box)
1223
+
1224
+ result = tdi - ser
1225
+ tm.assert_equal(result, expected)
1226
+ assert_dtype(result, "timedelta64[ns]")
1227
+
1228
+ result = ser - tdi
1229
+ tm.assert_equal(result, -expected)
1230
+ assert_dtype(result, "timedelta64[ns]")
1231
+
1232
+ @pytest.mark.parametrize("tdnat", [np.timedelta64("NaT"), NaT])
1233
+ def test_td64arr_add_sub_td64_nat(self, box_with_array, tdnat):
1234
+ # GH#18808, GH#23320 special handling for timedelta64("NaT")
1235
+ box = box_with_array
1236
+ tdi = TimedeltaIndex([NaT, Timedelta("1s")])
1237
+ expected = TimedeltaIndex(["NaT"] * 2)
1238
+
1239
+ obj = tm.box_expected(tdi, box)
1240
+ expected = tm.box_expected(expected, box)
1241
+
1242
+ result = obj + tdnat
1243
+ tm.assert_equal(result, expected)
1244
+ result = tdnat + obj
1245
+ tm.assert_equal(result, expected)
1246
+ result = obj - tdnat
1247
+ tm.assert_equal(result, expected)
1248
+ result = tdnat - obj
1249
+ tm.assert_equal(result, expected)
1250
+
1251
+ def test_td64arr_add_timedeltalike(self, two_hours, box_with_array):
1252
+ # only test adding/sub offsets as + is now numeric
1253
+ # GH#10699 for Tick cases
1254
+ box = box_with_array
1255
+ rng = timedelta_range("1 days", "10 days")
1256
+ expected = timedelta_range("1 days 02:00:00", "10 days 02:00:00", freq="D")
1257
+ rng = tm.box_expected(rng, box)
1258
+ expected = tm.box_expected(expected, box)
1259
+
1260
+ result = rng + two_hours
1261
+ tm.assert_equal(result, expected)
1262
+
1263
+ result = two_hours + rng
1264
+ tm.assert_equal(result, expected)
1265
+
1266
+ def test_td64arr_sub_timedeltalike(self, two_hours, box_with_array):
1267
+ # only test adding/sub offsets as - is now numeric
1268
+ # GH#10699 for Tick cases
1269
+ box = box_with_array
1270
+ rng = timedelta_range("1 days", "10 days")
1271
+ expected = timedelta_range("0 days 22:00:00", "9 days 22:00:00")
1272
+
1273
+ rng = tm.box_expected(rng, box)
1274
+ expected = tm.box_expected(expected, box)
1275
+
1276
+ result = rng - two_hours
1277
+ tm.assert_equal(result, expected)
1278
+
1279
+ result = two_hours - rng
1280
+ tm.assert_equal(result, -expected)
1281
+
1282
+ # ------------------------------------------------------------------
1283
+ # __add__/__sub__ with DateOffsets and arrays of DateOffsets
1284
+
1285
+ def test_td64arr_add_sub_offset_index(self, names, box_with_array):
1286
+ # GH#18849, GH#19744
1287
+ box = box_with_array
1288
+ exname = get_expected_name(box, names)
1289
+
1290
+ tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"], name=names[0])
1291
+ other = Index([offsets.Hour(n=1), offsets.Minute(n=-2)], name=names[1])
1292
+ other = np.array(other) if box in [tm.to_array, pd.array] else other
1293
+
1294
+ expected = TimedeltaIndex(
1295
+ [tdi[n] + other[n] for n in range(len(tdi))], freq="infer", name=exname
1296
+ )
1297
+ expected_sub = TimedeltaIndex(
1298
+ [tdi[n] - other[n] for n in range(len(tdi))], freq="infer", name=exname
1299
+ )
1300
+
1301
+ tdi = tm.box_expected(tdi, box)
1302
+ expected = tm.box_expected(expected, box).astype(object, copy=False)
1303
+ expected_sub = tm.box_expected(expected_sub, box).astype(object, copy=False)
1304
+
1305
+ with tm.assert_produces_warning(PerformanceWarning):
1306
+ res = tdi + other
1307
+ tm.assert_equal(res, expected)
1308
+
1309
+ with tm.assert_produces_warning(PerformanceWarning):
1310
+ res2 = other + tdi
1311
+ tm.assert_equal(res2, expected)
1312
+
1313
+ with tm.assert_produces_warning(PerformanceWarning):
1314
+ res_sub = tdi - other
1315
+ tm.assert_equal(res_sub, expected_sub)
1316
+
1317
+ def test_td64arr_add_sub_offset_array(self, box_with_array):
1318
+ # GH#18849, GH#18824
1319
+ box = box_with_array
1320
+ tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"])
1321
+ other = np.array([offsets.Hour(n=1), offsets.Minute(n=-2)])
1322
+
1323
+ expected = TimedeltaIndex(
1324
+ [tdi[n] + other[n] for n in range(len(tdi))], freq="infer"
1325
+ )
1326
+ expected_sub = TimedeltaIndex(
1327
+ [tdi[n] - other[n] for n in range(len(tdi))], freq="infer"
1328
+ )
1329
+
1330
+ tdi = tm.box_expected(tdi, box)
1331
+ expected = tm.box_expected(expected, box).astype(object)
1332
+
1333
+ with tm.assert_produces_warning(PerformanceWarning):
1334
+ res = tdi + other
1335
+ tm.assert_equal(res, expected)
1336
+
1337
+ with tm.assert_produces_warning(PerformanceWarning):
1338
+ res2 = other + tdi
1339
+ tm.assert_equal(res2, expected)
1340
+
1341
+ expected_sub = tm.box_expected(expected_sub, box_with_array).astype(object)
1342
+ with tm.assert_produces_warning(PerformanceWarning):
1343
+ res_sub = tdi - other
1344
+ tm.assert_equal(res_sub, expected_sub)
1345
+
1346
+ def test_td64arr_with_offset_series(self, names, box_with_array):
1347
+ # GH#18849
1348
+ box = box_with_array
1349
+ box2 = Series if box in [Index, tm.to_array, pd.array] else box
1350
+ exname = get_expected_name(box, names)
1351
+
1352
+ tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"], name=names[0])
1353
+ other = Series([offsets.Hour(n=1), offsets.Minute(n=-2)], name=names[1])
1354
+
1355
+ expected_add = Series(
1356
+ [tdi[n] + other[n] for n in range(len(tdi))], name=exname, dtype=object
1357
+ )
1358
+ obj = tm.box_expected(tdi, box)
1359
+ expected_add = tm.box_expected(expected_add, box2).astype(object)
1360
+
1361
+ with tm.assert_produces_warning(PerformanceWarning):
1362
+ res = obj + other
1363
+ tm.assert_equal(res, expected_add)
1364
+
1365
+ with tm.assert_produces_warning(PerformanceWarning):
1366
+ res2 = other + obj
1367
+ tm.assert_equal(res2, expected_add)
1368
+
1369
+ expected_sub = Series(
1370
+ [tdi[n] - other[n] for n in range(len(tdi))], name=exname, dtype=object
1371
+ )
1372
+ expected_sub = tm.box_expected(expected_sub, box2).astype(object)
1373
+
1374
+ with tm.assert_produces_warning(PerformanceWarning):
1375
+ res3 = obj - other
1376
+ tm.assert_equal(res3, expected_sub)
1377
+
1378
+ @pytest.mark.parametrize("obox", [np.array, Index, Series])
1379
+ def test_td64arr_addsub_anchored_offset_arraylike(self, obox, box_with_array):
1380
+ # GH#18824
1381
+ tdi = TimedeltaIndex(["1 days 00:00:00", "3 days 04:00:00"])
1382
+ tdi = tm.box_expected(tdi, box_with_array)
1383
+
1384
+ anchored = obox([offsets.MonthEnd(), offsets.Day(n=2)])
1385
+
1386
+ # addition/subtraction ops with anchored offsets should issue
1387
+ # a PerformanceWarning and _then_ raise a TypeError.
1388
+ msg = "has incorrect type|cannot add the type MonthEnd"
1389
+ with pytest.raises(TypeError, match=msg):
1390
+ with tm.assert_produces_warning(PerformanceWarning):
1391
+ tdi + anchored
1392
+ with pytest.raises(TypeError, match=msg):
1393
+ with tm.assert_produces_warning(PerformanceWarning):
1394
+ anchored + tdi
1395
+ with pytest.raises(TypeError, match=msg):
1396
+ with tm.assert_produces_warning(PerformanceWarning):
1397
+ tdi - anchored
1398
+ with pytest.raises(TypeError, match=msg):
1399
+ with tm.assert_produces_warning(PerformanceWarning):
1400
+ anchored - tdi
1401
+
1402
+ # ------------------------------------------------------------------
1403
+ # Unsorted
1404
+
1405
+ def test_td64arr_add_sub_object_array(self, box_with_array):
1406
+ box = box_with_array
1407
+ xbox = np.ndarray if box is pd.array else box
1408
+
1409
+ tdi = timedelta_range("1 day", periods=3, freq="D")
1410
+ tdarr = tm.box_expected(tdi, box)
1411
+
1412
+ other = np.array([Timedelta(days=1), offsets.Day(2), Timestamp("2000-01-04")])
1413
+
1414
+ with tm.assert_produces_warning(PerformanceWarning):
1415
+ result = tdarr + other
1416
+
1417
+ expected = Index(
1418
+ [Timedelta(days=2), Timedelta(days=4), Timestamp("2000-01-07")]
1419
+ )
1420
+ expected = tm.box_expected(expected, xbox).astype(object)
1421
+ tm.assert_equal(result, expected)
1422
+
1423
+ msg = "unsupported operand type|cannot subtract a datelike"
1424
+ with pytest.raises(TypeError, match=msg):
1425
+ with tm.assert_produces_warning(PerformanceWarning):
1426
+ tdarr - other
1427
+
1428
+ with tm.assert_produces_warning(PerformanceWarning):
1429
+ result = other - tdarr
1430
+
1431
+ expected = Index([Timedelta(0), Timedelta(0), Timestamp("2000-01-01")])
1432
+ expected = tm.box_expected(expected, xbox).astype(object)
1433
+ tm.assert_equal(result, expected)
1434
+
1435
+
1436
+ class TestTimedeltaArraylikeMulDivOps:
1437
+ # Tests for timedelta64[ns]
1438
+ # __mul__, __rmul__, __div__, __rdiv__, __floordiv__, __rfloordiv__
1439
+
1440
+ # ------------------------------------------------------------------
1441
+ # Multiplication
1442
+ # organized with scalar others first, then array-like
1443
+
1444
+ def test_td64arr_mul_int(self, box_with_array):
1445
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1446
+ idx = tm.box_expected(idx, box_with_array)
1447
+
1448
+ result = idx * 1
1449
+ tm.assert_equal(result, idx)
1450
+
1451
+ result = 1 * idx
1452
+ tm.assert_equal(result, idx)
1453
+
1454
+ def test_td64arr_mul_tdlike_scalar_raises(self, two_hours, box_with_array):
1455
+ rng = timedelta_range("1 days", "10 days", name="foo")
1456
+ rng = tm.box_expected(rng, box_with_array)
1457
+ msg = "argument must be an integer|cannot use operands with types dtype"
1458
+ with pytest.raises(TypeError, match=msg):
1459
+ rng * two_hours
1460
+
1461
+ def test_tdi_mul_int_array_zerodim(self, box_with_array):
1462
+ rng5 = np.arange(5, dtype="int64")
1463
+ idx = TimedeltaIndex(rng5)
1464
+ expected = TimedeltaIndex(rng5 * 5)
1465
+
1466
+ idx = tm.box_expected(idx, box_with_array)
1467
+ expected = tm.box_expected(expected, box_with_array)
1468
+
1469
+ result = idx * np.array(5, dtype="int64")
1470
+ tm.assert_equal(result, expected)
1471
+
1472
+ def test_tdi_mul_int_array(self, box_with_array):
1473
+ rng5 = np.arange(5, dtype="int64")
1474
+ idx = TimedeltaIndex(rng5)
1475
+ expected = TimedeltaIndex(rng5**2)
1476
+
1477
+ idx = tm.box_expected(idx, box_with_array)
1478
+ expected = tm.box_expected(expected, box_with_array)
1479
+
1480
+ result = idx * rng5
1481
+ tm.assert_equal(result, expected)
1482
+
1483
+ def test_tdi_mul_int_series(self, box_with_array):
1484
+ box = box_with_array
1485
+ xbox = Series if box in [Index, tm.to_array, pd.array] else box
1486
+
1487
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1488
+ expected = TimedeltaIndex(np.arange(5, dtype="int64") ** 2)
1489
+
1490
+ idx = tm.box_expected(idx, box)
1491
+ expected = tm.box_expected(expected, xbox)
1492
+
1493
+ result = idx * Series(np.arange(5, dtype="int64"))
1494
+ tm.assert_equal(result, expected)
1495
+
1496
+ def test_tdi_mul_float_series(self, box_with_array):
1497
+ box = box_with_array
1498
+ xbox = Series if box in [Index, tm.to_array, pd.array] else box
1499
+
1500
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1501
+ idx = tm.box_expected(idx, box)
1502
+
1503
+ rng5f = np.arange(5, dtype="float64")
1504
+ expected = TimedeltaIndex(rng5f * (rng5f + 1.0))
1505
+ expected = tm.box_expected(expected, xbox)
1506
+
1507
+ result = idx * Series(rng5f + 1.0)
1508
+ tm.assert_equal(result, expected)
1509
+
1510
+ # TODO: Put Series/DataFrame in others?
1511
+ @pytest.mark.parametrize(
1512
+ "other",
1513
+ [
1514
+ np.arange(1, 11),
1515
+ Index(np.arange(1, 11), np.int64),
1516
+ Index(range(1, 11), np.uint64),
1517
+ Index(range(1, 11), np.float64),
1518
+ pd.RangeIndex(1, 11),
1519
+ ],
1520
+ ids=lambda x: type(x).__name__,
1521
+ )
1522
+ def test_tdi_rmul_arraylike(self, other, box_with_array):
1523
+ box = box_with_array
1524
+
1525
+ tdi = TimedeltaIndex(["1 Day"] * 10)
1526
+ expected = timedelta_range("1 days", "10 days")._with_freq(None)
1527
+
1528
+ tdi = tm.box_expected(tdi, box)
1529
+ xbox = get_upcast_box(tdi, other)
1530
+
1531
+ expected = tm.box_expected(expected, xbox)
1532
+
1533
+ result = other * tdi
1534
+ tm.assert_equal(result, expected)
1535
+ commute = tdi * other
1536
+ tm.assert_equal(commute, expected)
1537
+
1538
+ # ------------------------------------------------------------------
1539
+ # __div__, __rdiv__
1540
+
1541
+ def test_td64arr_div_nat_invalid(self, box_with_array):
1542
+ # don't allow division by NaT (maybe could in the future)
1543
+ rng = timedelta_range("1 days", "10 days", name="foo")
1544
+ rng = tm.box_expected(rng, box_with_array)
1545
+
1546
+ with pytest.raises(TypeError, match="unsupported operand type"):
1547
+ rng / NaT
1548
+ with pytest.raises(TypeError, match="Cannot divide NaTType by"):
1549
+ NaT / rng
1550
+
1551
+ dt64nat = np.datetime64("NaT", "ns")
1552
+ msg = "|".join(
1553
+ [
1554
+ # 'divide' on npdev as of 2021-12-18
1555
+ "ufunc '(true_divide|divide)' cannot use operands",
1556
+ "cannot perform __r?truediv__",
1557
+ "Cannot divide datetime64 by TimedeltaArray",
1558
+ ]
1559
+ )
1560
+ with pytest.raises(TypeError, match=msg):
1561
+ rng / dt64nat
1562
+ with pytest.raises(TypeError, match=msg):
1563
+ dt64nat / rng
1564
+
1565
+ def test_td64arr_div_td64nat(self, box_with_array):
1566
+ # GH#23829
1567
+ box = box_with_array
1568
+ xbox = np.ndarray if box is pd.array else box
1569
+
1570
+ rng = timedelta_range("1 days", "10 days")
1571
+ rng = tm.box_expected(rng, box)
1572
+
1573
+ other = np.timedelta64("NaT")
1574
+
1575
+ expected = np.array([np.nan] * 10)
1576
+ expected = tm.box_expected(expected, xbox)
1577
+
1578
+ result = rng / other
1579
+ tm.assert_equal(result, expected)
1580
+
1581
+ result = other / rng
1582
+ tm.assert_equal(result, expected)
1583
+
1584
+ def test_td64arr_div_int(self, box_with_array):
1585
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1586
+ idx = tm.box_expected(idx, box_with_array)
1587
+
1588
+ result = idx / 1
1589
+ tm.assert_equal(result, idx)
1590
+
1591
+ with pytest.raises(TypeError, match="Cannot divide"):
1592
+ # GH#23829
1593
+ 1 / idx
1594
+
1595
+ def test_td64arr_div_tdlike_scalar(self, two_hours, box_with_array):
1596
+ # GH#20088, GH#22163 ensure DataFrame returns correct dtype
1597
+ box = box_with_array
1598
+ xbox = np.ndarray if box is pd.array else box
1599
+
1600
+ rng = timedelta_range("1 days", "10 days", name="foo")
1601
+ expected = Index((np.arange(10) + 1) * 12, dtype=np.float64, name="foo")
1602
+
1603
+ rng = tm.box_expected(rng, box)
1604
+ expected = tm.box_expected(expected, xbox)
1605
+
1606
+ result = rng / two_hours
1607
+ tm.assert_equal(result, expected)
1608
+
1609
+ result = two_hours / rng
1610
+ expected = 1 / expected
1611
+ tm.assert_equal(result, expected)
1612
+
1613
+ @pytest.mark.parametrize("m", [1, 3, 10])
1614
+ @pytest.mark.parametrize("unit", ["D", "h", "m", "s", "ms", "us", "ns"])
1615
+ def test_td64arr_div_td64_scalar(self, m, unit, box_with_array):
1616
+ box = box_with_array
1617
+ xbox = np.ndarray if box is pd.array else box
1618
+
1619
+ ser = Series([Timedelta(days=59)] * 3)
1620
+ ser[2] = np.nan
1621
+ flat = ser
1622
+ ser = tm.box_expected(ser, box)
1623
+
1624
+ # op
1625
+ expected = Series([x / np.timedelta64(m, unit) for x in flat])
1626
+ expected = tm.box_expected(expected, xbox)
1627
+ result = ser / np.timedelta64(m, unit)
1628
+ tm.assert_equal(result, expected)
1629
+
1630
+ # reverse op
1631
+ expected = Series([Timedelta(np.timedelta64(m, unit)) / x for x in flat])
1632
+ expected = tm.box_expected(expected, xbox)
1633
+ result = np.timedelta64(m, unit) / ser
1634
+ tm.assert_equal(result, expected)
1635
+
1636
+ def test_td64arr_div_tdlike_scalar_with_nat(self, two_hours, box_with_array):
1637
+ box = box_with_array
1638
+ xbox = np.ndarray if box is pd.array else box
1639
+
1640
+ rng = TimedeltaIndex(["1 days", NaT, "2 days"], name="foo")
1641
+ expected = Index([12, np.nan, 24], dtype=np.float64, name="foo")
1642
+
1643
+ rng = tm.box_expected(rng, box)
1644
+ expected = tm.box_expected(expected, xbox)
1645
+
1646
+ result = rng / two_hours
1647
+ tm.assert_equal(result, expected)
1648
+
1649
+ result = two_hours / rng
1650
+ expected = 1 / expected
1651
+ tm.assert_equal(result, expected)
1652
+
1653
+ def test_td64arr_div_td64_ndarray(self, box_with_array):
1654
+ # GH#22631
1655
+ box = box_with_array
1656
+ xbox = np.ndarray if box is pd.array else box
1657
+
1658
+ rng = TimedeltaIndex(["1 days", NaT, "2 days"])
1659
+ expected = Index([12, np.nan, 24], dtype=np.float64)
1660
+
1661
+ rng = tm.box_expected(rng, box)
1662
+ expected = tm.box_expected(expected, xbox)
1663
+
1664
+ other = np.array([2, 4, 2], dtype="m8[h]")
1665
+ result = rng / other
1666
+ tm.assert_equal(result, expected)
1667
+
1668
+ result = rng / tm.box_expected(other, box)
1669
+ tm.assert_equal(result, expected)
1670
+
1671
+ result = rng / other.astype(object)
1672
+ tm.assert_equal(result, expected.astype(object))
1673
+
1674
+ result = rng / list(other)
1675
+ tm.assert_equal(result, expected)
1676
+
1677
+ # reversed op
1678
+ expected = 1 / expected
1679
+ result = other / rng
1680
+ tm.assert_equal(result, expected)
1681
+
1682
+ result = tm.box_expected(other, box) / rng
1683
+ tm.assert_equal(result, expected)
1684
+
1685
+ result = other.astype(object) / rng
1686
+ tm.assert_equal(result, expected)
1687
+
1688
+ result = list(other) / rng
1689
+ tm.assert_equal(result, expected)
1690
+
1691
+ def test_tdarr_div_length_mismatch(self, box_with_array):
1692
+ rng = TimedeltaIndex(["1 days", NaT, "2 days"])
1693
+ mismatched = [1, 2, 3, 4]
1694
+
1695
+ rng = tm.box_expected(rng, box_with_array)
1696
+ msg = "Cannot divide vectors|Unable to coerce to Series"
1697
+ for obj in [mismatched, mismatched[:2]]:
1698
+ # one shorter, one longer
1699
+ for other in [obj, np.array(obj), Index(obj)]:
1700
+ with pytest.raises(ValueError, match=msg):
1701
+ rng / other
1702
+ with pytest.raises(ValueError, match=msg):
1703
+ other / rng
1704
+
1705
+ def test_td64_div_object_mixed_result(self, box_with_array):
1706
+ # Case where we having a NaT in the result inseat of timedelta64("NaT")
1707
+ # is misleading
1708
+ orig = timedelta_range("1 Day", periods=3).insert(1, NaT)
1709
+ tdi = tm.box_expected(orig, box_with_array, transpose=False)
1710
+
1711
+ other = np.array([orig[0], 1.5, 2.0, orig[2]], dtype=object)
1712
+ other = tm.box_expected(other, box_with_array, transpose=False)
1713
+
1714
+ res = tdi / other
1715
+
1716
+ expected = Index([1.0, np.timedelta64("NaT", "ns"), orig[0], 1.5], dtype=object)
1717
+ expected = tm.box_expected(expected, box_with_array, transpose=False)
1718
+ if isinstance(expected, NumpyExtensionArray):
1719
+ expected = expected.to_numpy()
1720
+ tm.assert_equal(res, expected)
1721
+ if box_with_array is DataFrame:
1722
+ # We have a np.timedelta64(NaT), not pd.NaT
1723
+ assert isinstance(res.iloc[1, 0], np.timedelta64)
1724
+
1725
+ res = tdi // other
1726
+
1727
+ expected = Index([1, np.timedelta64("NaT", "ns"), orig[0], 1], dtype=object)
1728
+ expected = tm.box_expected(expected, box_with_array, transpose=False)
1729
+ if isinstance(expected, NumpyExtensionArray):
1730
+ expected = expected.to_numpy()
1731
+ tm.assert_equal(res, expected)
1732
+ if box_with_array is DataFrame:
1733
+ # We have a np.timedelta64(NaT), not pd.NaT
1734
+ assert isinstance(res.iloc[1, 0], np.timedelta64)
1735
+
1736
+ # ------------------------------------------------------------------
1737
+ # __floordiv__, __rfloordiv__
1738
+
1739
+ def test_td64arr_floordiv_td64arr_with_nat(
1740
+ self, box_with_array, using_array_manager
1741
+ ):
1742
+ # GH#35529
1743
+ box = box_with_array
1744
+ xbox = np.ndarray if box is pd.array else box
1745
+
1746
+ left = Series([1000, 222330, 30], dtype="timedelta64[ns]")
1747
+ right = Series([1000, 222330, None], dtype="timedelta64[ns]")
1748
+
1749
+ left = tm.box_expected(left, box)
1750
+ right = tm.box_expected(right, box)
1751
+
1752
+ expected = np.array([1.0, 1.0, np.nan], dtype=np.float64)
1753
+ expected = tm.box_expected(expected, xbox)
1754
+ if box is DataFrame and using_array_manager:
1755
+ # INFO(ArrayManager) floordiv returns integer, and ArrayManager
1756
+ # performs ops column-wise and thus preserves int64 dtype for
1757
+ # columns without missing values
1758
+ expected[[0, 1]] = expected[[0, 1]].astype("int64")
1759
+
1760
+ with tm.maybe_produces_warning(
1761
+ RuntimeWarning, box is pd.array, check_stacklevel=False
1762
+ ):
1763
+ result = left // right
1764
+
1765
+ tm.assert_equal(result, expected)
1766
+
1767
+ # case that goes through __rfloordiv__ with arraylike
1768
+ with tm.maybe_produces_warning(
1769
+ RuntimeWarning, box is pd.array, check_stacklevel=False
1770
+ ):
1771
+ result = np.asarray(left) // right
1772
+ tm.assert_equal(result, expected)
1773
+
1774
+ @pytest.mark.filterwarnings("ignore:invalid value encountered:RuntimeWarning")
1775
+ def test_td64arr_floordiv_tdscalar(self, box_with_array, scalar_td):
1776
+ # GH#18831, GH#19125
1777
+ box = box_with_array
1778
+ xbox = np.ndarray if box is pd.array else box
1779
+ td = Timedelta("5m3s") # i.e. (scalar_td - 1sec) / 2
1780
+
1781
+ td1 = Series([td, td, NaT], dtype="m8[ns]")
1782
+ td1 = tm.box_expected(td1, box, transpose=False)
1783
+
1784
+ expected = Series([0, 0, np.nan])
1785
+ expected = tm.box_expected(expected, xbox, transpose=False)
1786
+
1787
+ result = td1 // scalar_td
1788
+ tm.assert_equal(result, expected)
1789
+
1790
+ # Reversed op
1791
+ expected = Series([2, 2, np.nan])
1792
+ expected = tm.box_expected(expected, xbox, transpose=False)
1793
+
1794
+ result = scalar_td // td1
1795
+ tm.assert_equal(result, expected)
1796
+
1797
+ # same thing buts let's be explicit about calling __rfloordiv__
1798
+ result = td1.__rfloordiv__(scalar_td)
1799
+ tm.assert_equal(result, expected)
1800
+
1801
+ def test_td64arr_floordiv_int(self, box_with_array):
1802
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1803
+ idx = tm.box_expected(idx, box_with_array)
1804
+ result = idx // 1
1805
+ tm.assert_equal(result, idx)
1806
+
1807
+ pattern = "floor_divide cannot use operands|Cannot divide int by Timedelta*"
1808
+ with pytest.raises(TypeError, match=pattern):
1809
+ 1 // idx
1810
+
1811
+ # ------------------------------------------------------------------
1812
+ # mod, divmod
1813
+ # TODO: operations with timedelta-like arrays, numeric arrays,
1814
+ # reversed ops
1815
+
1816
+ def test_td64arr_mod_tdscalar(self, box_with_array, three_days):
1817
+ tdi = timedelta_range("1 Day", "9 days")
1818
+ tdarr = tm.box_expected(tdi, box_with_array)
1819
+
1820
+ expected = TimedeltaIndex(["1 Day", "2 Days", "0 Days"] * 3)
1821
+ expected = tm.box_expected(expected, box_with_array)
1822
+
1823
+ result = tdarr % three_days
1824
+ tm.assert_equal(result, expected)
1825
+
1826
+ warn = None
1827
+ if box_with_array is DataFrame and isinstance(three_days, pd.DateOffset):
1828
+ warn = PerformanceWarning
1829
+ # TODO: making expected be object here a result of DataFrame.__divmod__
1830
+ # being defined in a naive way that does not dispatch to the underlying
1831
+ # array's __divmod__
1832
+ expected = expected.astype(object)
1833
+
1834
+ with tm.assert_produces_warning(warn):
1835
+ result = divmod(tdarr, three_days)
1836
+
1837
+ tm.assert_equal(result[1], expected)
1838
+ tm.assert_equal(result[0], tdarr // three_days)
1839
+
1840
+ def test_td64arr_mod_int(self, box_with_array):
1841
+ tdi = timedelta_range("1 ns", "10 ns", periods=10)
1842
+ tdarr = tm.box_expected(tdi, box_with_array)
1843
+
1844
+ expected = TimedeltaIndex(["1 ns", "0 ns"] * 5)
1845
+ expected = tm.box_expected(expected, box_with_array)
1846
+
1847
+ result = tdarr % 2
1848
+ tm.assert_equal(result, expected)
1849
+
1850
+ msg = "Cannot divide int by"
1851
+ with pytest.raises(TypeError, match=msg):
1852
+ 2 % tdarr
1853
+
1854
+ result = divmod(tdarr, 2)
1855
+ tm.assert_equal(result[1], expected)
1856
+ tm.assert_equal(result[0], tdarr // 2)
1857
+
1858
+ def test_td64arr_rmod_tdscalar(self, box_with_array, three_days):
1859
+ tdi = timedelta_range("1 Day", "9 days")
1860
+ tdarr = tm.box_expected(tdi, box_with_array)
1861
+
1862
+ expected = ["0 Days", "1 Day", "0 Days"] + ["3 Days"] * 6
1863
+ expected = TimedeltaIndex(expected)
1864
+ expected = tm.box_expected(expected, box_with_array)
1865
+
1866
+ result = three_days % tdarr
1867
+ tm.assert_equal(result, expected)
1868
+
1869
+ result = divmod(three_days, tdarr)
1870
+ tm.assert_equal(result[1], expected)
1871
+ tm.assert_equal(result[0], three_days // tdarr)
1872
+
1873
+ # ------------------------------------------------------------------
1874
+ # Operations with invalid others
1875
+
1876
+ def test_td64arr_mul_tdscalar_invalid(self, box_with_array, scalar_td):
1877
+ td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
1878
+ td1.iloc[2] = np.nan
1879
+
1880
+ td1 = tm.box_expected(td1, box_with_array)
1881
+
1882
+ # check that we are getting a TypeError
1883
+ # with 'operate' (from core/ops.py) for the ops that are not
1884
+ # defined
1885
+ pattern = "operate|unsupported|cannot|not supported"
1886
+ with pytest.raises(TypeError, match=pattern):
1887
+ td1 * scalar_td
1888
+ with pytest.raises(TypeError, match=pattern):
1889
+ scalar_td * td1
1890
+
1891
+ def test_td64arr_mul_too_short_raises(self, box_with_array):
1892
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1893
+ idx = tm.box_expected(idx, box_with_array)
1894
+ msg = "|".join(
1895
+ [
1896
+ "cannot use operands with types dtype",
1897
+ "Cannot multiply with unequal lengths",
1898
+ "Unable to coerce to Series",
1899
+ ]
1900
+ )
1901
+ with pytest.raises(TypeError, match=msg):
1902
+ # length check before dtype check
1903
+ idx * idx[:3]
1904
+ with pytest.raises(ValueError, match=msg):
1905
+ idx * np.array([1, 2])
1906
+
1907
+ def test_td64arr_mul_td64arr_raises(self, box_with_array):
1908
+ idx = TimedeltaIndex(np.arange(5, dtype="int64"))
1909
+ idx = tm.box_expected(idx, box_with_array)
1910
+ msg = "cannot use operands with types dtype"
1911
+ with pytest.raises(TypeError, match=msg):
1912
+ idx * idx
1913
+
1914
+ # ------------------------------------------------------------------
1915
+ # Operations with numeric others
1916
+
1917
+ def test_td64arr_mul_numeric_scalar(self, box_with_array, one):
1918
+ # GH#4521
1919
+ # divide/multiply by integers
1920
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1921
+ expected = Series(["-59 Days", "-59 Days", "NaT"], dtype="timedelta64[ns]")
1922
+
1923
+ tdser = tm.box_expected(tdser, box_with_array)
1924
+ expected = tm.box_expected(expected, box_with_array)
1925
+
1926
+ result = tdser * (-one)
1927
+ tm.assert_equal(result, expected)
1928
+ result = (-one) * tdser
1929
+ tm.assert_equal(result, expected)
1930
+
1931
+ expected = Series(["118 Days", "118 Days", "NaT"], dtype="timedelta64[ns]")
1932
+ expected = tm.box_expected(expected, box_with_array)
1933
+
1934
+ result = tdser * (2 * one)
1935
+ tm.assert_equal(result, expected)
1936
+ result = (2 * one) * tdser
1937
+ tm.assert_equal(result, expected)
1938
+
1939
+ @pytest.mark.parametrize("two", [2, 2.0, np.array(2), np.array(2.0)])
1940
+ def test_td64arr_div_numeric_scalar(self, box_with_array, two):
1941
+ # GH#4521
1942
+ # divide/multiply by integers
1943
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1944
+ expected = Series(["29.5D", "29.5D", "NaT"], dtype="timedelta64[ns]")
1945
+
1946
+ tdser = tm.box_expected(tdser, box_with_array)
1947
+ expected = tm.box_expected(expected, box_with_array)
1948
+
1949
+ result = tdser / two
1950
+ tm.assert_equal(result, expected)
1951
+
1952
+ with pytest.raises(TypeError, match="Cannot divide"):
1953
+ two / tdser
1954
+
1955
+ @pytest.mark.parametrize("two", [2, 2.0, np.array(2), np.array(2.0)])
1956
+ def test_td64arr_floordiv_numeric_scalar(self, box_with_array, two):
1957
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1958
+ expected = Series(["29.5D", "29.5D", "NaT"], dtype="timedelta64[ns]")
1959
+
1960
+ tdser = tm.box_expected(tdser, box_with_array)
1961
+ expected = tm.box_expected(expected, box_with_array)
1962
+
1963
+ result = tdser // two
1964
+ tm.assert_equal(result, expected)
1965
+
1966
+ with pytest.raises(TypeError, match="Cannot divide"):
1967
+ two // tdser
1968
+
1969
+ @pytest.mark.parametrize(
1970
+ "vector",
1971
+ [np.array([20, 30, 40]), Index([20, 30, 40]), Series([20, 30, 40])],
1972
+ ids=lambda x: type(x).__name__,
1973
+ )
1974
+ def test_td64arr_rmul_numeric_array(
1975
+ self,
1976
+ box_with_array,
1977
+ vector,
1978
+ any_real_numpy_dtype,
1979
+ ):
1980
+ # GH#4521
1981
+ # divide/multiply by integers
1982
+
1983
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
1984
+ vector = vector.astype(any_real_numpy_dtype)
1985
+
1986
+ expected = Series(["1180 Days", "1770 Days", "NaT"], dtype="timedelta64[ns]")
1987
+
1988
+ tdser = tm.box_expected(tdser, box_with_array)
1989
+ xbox = get_upcast_box(tdser, vector)
1990
+
1991
+ expected = tm.box_expected(expected, xbox)
1992
+
1993
+ result = tdser * vector
1994
+ tm.assert_equal(result, expected)
1995
+
1996
+ result = vector * tdser
1997
+ tm.assert_equal(result, expected)
1998
+
1999
+ @pytest.mark.parametrize(
2000
+ "vector",
2001
+ [np.array([20, 30, 40]), Index([20, 30, 40]), Series([20, 30, 40])],
2002
+ ids=lambda x: type(x).__name__,
2003
+ )
2004
+ def test_td64arr_div_numeric_array(
2005
+ self, box_with_array, vector, any_real_numpy_dtype
2006
+ ):
2007
+ # GH#4521
2008
+ # divide/multiply by integers
2009
+
2010
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="m8[ns]")
2011
+ vector = vector.astype(any_real_numpy_dtype)
2012
+
2013
+ expected = Series(["2.95D", "1D 23h 12m", "NaT"], dtype="timedelta64[ns]")
2014
+
2015
+ tdser = tm.box_expected(tdser, box_with_array)
2016
+ xbox = get_upcast_box(tdser, vector)
2017
+ expected = tm.box_expected(expected, xbox)
2018
+
2019
+ result = tdser / vector
2020
+ tm.assert_equal(result, expected)
2021
+
2022
+ pattern = "|".join(
2023
+ [
2024
+ "true_divide'? cannot use operands",
2025
+ "cannot perform __div__",
2026
+ "cannot perform __truediv__",
2027
+ "unsupported operand",
2028
+ "Cannot divide",
2029
+ "ufunc 'divide' cannot use operands with types",
2030
+ ]
2031
+ )
2032
+ with pytest.raises(TypeError, match=pattern):
2033
+ vector / tdser
2034
+
2035
+ result = tdser / vector.astype(object)
2036
+ if box_with_array is DataFrame:
2037
+ expected = [tdser.iloc[0, n] / vector[n] for n in range(len(vector))]
2038
+ expected = tm.box_expected(expected, xbox).astype(object)
2039
+ # We specifically expect timedelta64("NaT") here, not pd.NA
2040
+ msg = "The 'downcast' keyword in fillna"
2041
+ with tm.assert_produces_warning(FutureWarning, match=msg):
2042
+ expected[2] = expected[2].fillna(
2043
+ np.timedelta64("NaT", "ns"), downcast=False
2044
+ )
2045
+ else:
2046
+ expected = [tdser[n] / vector[n] for n in range(len(tdser))]
2047
+ expected = [
2048
+ x if x is not NaT else np.timedelta64("NaT", "ns") for x in expected
2049
+ ]
2050
+ if xbox is tm.to_array:
2051
+ expected = tm.to_array(expected).astype(object)
2052
+ else:
2053
+ expected = xbox(expected, dtype=object)
2054
+
2055
+ tm.assert_equal(result, expected)
2056
+
2057
+ with pytest.raises(TypeError, match=pattern):
2058
+ vector.astype(object) / tdser
2059
+
2060
+ def test_td64arr_mul_int_series(self, box_with_array, names):
2061
+ # GH#19042 test for correct name attachment
2062
+ box = box_with_array
2063
+ exname = get_expected_name(box, names)
2064
+
2065
+ tdi = TimedeltaIndex(
2066
+ ["0days", "1day", "2days", "3days", "4days"], name=names[0]
2067
+ )
2068
+ # TODO: Should we be parametrizing over types for `ser` too?
2069
+ ser = Series([0, 1, 2, 3, 4], dtype=np.int64, name=names[1])
2070
+
2071
+ expected = Series(
2072
+ ["0days", "1day", "4days", "9days", "16days"],
2073
+ dtype="timedelta64[ns]",
2074
+ name=exname,
2075
+ )
2076
+
2077
+ tdi = tm.box_expected(tdi, box)
2078
+ xbox = get_upcast_box(tdi, ser)
2079
+
2080
+ expected = tm.box_expected(expected, xbox)
2081
+
2082
+ result = ser * tdi
2083
+ tm.assert_equal(result, expected)
2084
+
2085
+ result = tdi * ser
2086
+ tm.assert_equal(result, expected)
2087
+
2088
+ # TODO: Should we be parametrizing over types for `ser` too?
2089
+ def test_float_series_rdiv_td64arr(self, box_with_array, names):
2090
+ # GH#19042 test for correct name attachment
2091
+ box = box_with_array
2092
+ tdi = TimedeltaIndex(
2093
+ ["0days", "1day", "2days", "3days", "4days"], name=names[0]
2094
+ )
2095
+ ser = Series([1.5, 3, 4.5, 6, 7.5], dtype=np.float64, name=names[1])
2096
+
2097
+ xname = names[2] if box not in [tm.to_array, pd.array] else names[1]
2098
+ expected = Series(
2099
+ [tdi[n] / ser[n] for n in range(len(ser))],
2100
+ dtype="timedelta64[ns]",
2101
+ name=xname,
2102
+ )
2103
+
2104
+ tdi = tm.box_expected(tdi, box)
2105
+ xbox = get_upcast_box(tdi, ser)
2106
+ expected = tm.box_expected(expected, xbox)
2107
+
2108
+ result = ser.__rtruediv__(tdi)
2109
+ if box is DataFrame:
2110
+ assert result is NotImplemented
2111
+ else:
2112
+ tm.assert_equal(result, expected)
2113
+
2114
+ def test_td64arr_all_nat_div_object_dtype_numeric(self, box_with_array):
2115
+ # GH#39750 make sure we infer the result as td64
2116
+ tdi = TimedeltaIndex([NaT, NaT])
2117
+
2118
+ left = tm.box_expected(tdi, box_with_array)
2119
+ right = np.array([2, 2.0], dtype=object)
2120
+
2121
+ tdnat = np.timedelta64("NaT", "ns")
2122
+ expected = Index([tdnat] * 2, dtype=object)
2123
+ if box_with_array is not Index:
2124
+ expected = tm.box_expected(expected, box_with_array).astype(object)
2125
+ if box_with_array in [Series, DataFrame]:
2126
+ msg = "The 'downcast' keyword in fillna is deprecated"
2127
+ with tm.assert_produces_warning(FutureWarning, match=msg):
2128
+ expected = expected.fillna(tdnat, downcast=False) # GH#18463
2129
+
2130
+ result = left / right
2131
+ tm.assert_equal(result, expected)
2132
+
2133
+ result = left // right
2134
+ tm.assert_equal(result, expected)
2135
+
2136
+
2137
+ class TestTimedelta64ArrayLikeArithmetic:
2138
+ # Arithmetic tests for timedelta64[ns] vectors fully parametrized over
2139
+ # DataFrame/Series/TimedeltaIndex/TimedeltaArray. Ideally all arithmetic
2140
+ # tests will eventually end up here.
2141
+
2142
+ def test_td64arr_pow_invalid(self, scalar_td, box_with_array):
2143
+ td1 = Series([timedelta(minutes=5, seconds=3)] * 3)
2144
+ td1.iloc[2] = np.nan
2145
+
2146
+ td1 = tm.box_expected(td1, box_with_array)
2147
+
2148
+ # check that we are getting a TypeError
2149
+ # with 'operate' (from core/ops.py) for the ops that are not
2150
+ # defined
2151
+ pattern = "operate|unsupported|cannot|not supported"
2152
+ with pytest.raises(TypeError, match=pattern):
2153
+ scalar_td**td1
2154
+
2155
+ with pytest.raises(TypeError, match=pattern):
2156
+ td1**scalar_td
2157
+
2158
+
2159
+ def test_add_timestamp_to_timedelta():
2160
+ # GH: 35897
2161
+ timestamp = Timestamp("2021-01-01")
2162
+ result = timestamp + timedelta_range("0s", "1s", periods=31)
2163
+ expected = DatetimeIndex(
2164
+ [
2165
+ timestamp
2166
+ + (
2167
+ pd.to_timedelta("0.033333333s") * i
2168
+ + pd.to_timedelta("0.000000001s") * divmod(i, 3)[0]
2169
+ )
2170
+ for i in range(31)
2171
+ ]
2172
+ )
2173
+ tm.assert_index_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (186 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_odf.cpython-310.pyc ADDED
Binary file (2.34 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_odswriter.cpython-310.pyc ADDED
Binary file (3.33 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_openpyxl.cpython-310.pyc ADDED
Binary file (12.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_readers.cpython-310.pyc ADDED
Binary file (48.8 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_style.cpython-310.pyc ADDED
Binary file (8.72 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_writers.cpython-310.pyc ADDED
Binary file (46.1 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_xlrd.cpython-310.pyc ADDED
Binary file (2.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/__pycache__/test_xlsxwriter.cpython-310.pyc ADDED
Binary file (2.74 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_odf.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.compat import is_platform_windows
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+ pytest.importorskip("odf")
12
+
13
+ if is_platform_windows():
14
+ pytestmark = pytest.mark.single_cpu
15
+
16
+
17
+ @pytest.fixture(autouse=True)
18
+ def cd_and_set_engine(monkeypatch, datapath):
19
+ func = functools.partial(pd.read_excel, engine="odf")
20
+ monkeypatch.setattr(pd, "read_excel", func)
21
+ monkeypatch.chdir(datapath("io", "data", "excel"))
22
+
23
+
24
+ def test_read_invalid_types_raises():
25
+ # the invalid_value_type.ods required manually editing
26
+ # of the included content.xml file
27
+ with pytest.raises(ValueError, match="Unrecognized type awesome_new_type"):
28
+ pd.read_excel("invalid_value_type.ods")
29
+
30
+
31
+ def test_read_writer_table():
32
+ # Also test reading tables from an text OpenDocument file
33
+ # (.odt)
34
+ index = pd.Index(["Row 1", "Row 2", "Row 3"], name="Header")
35
+ expected = pd.DataFrame(
36
+ [[1, np.nan, 7], [2, np.nan, 8], [3, np.nan, 9]],
37
+ index=index,
38
+ columns=["Column 1", "Unnamed: 2", "Column 3"],
39
+ )
40
+
41
+ result = pd.read_excel("writertable.odt", sheet_name="Table1", index_col=0)
42
+
43
+ tm.assert_frame_equal(result, expected)
44
+
45
+
46
+ def test_read_newlines_between_xml_elements_table():
47
+ # GH#45598
48
+ expected = pd.DataFrame(
49
+ [[1.0, 4.0, 7], [np.nan, np.nan, 8], [3.0, 6.0, 9]],
50
+ columns=["Column 1", "Column 2", "Column 3"],
51
+ )
52
+
53
+ result = pd.read_excel("test_newlines.ods")
54
+
55
+ tm.assert_frame_equal(result, expected)
56
+
57
+
58
+ def test_read_unempty_cells():
59
+ expected = pd.DataFrame(
60
+ [1, np.nan, 3, np.nan, 5],
61
+ columns=["Column 1"],
62
+ )
63
+
64
+ result = pd.read_excel("test_unempty_cells.ods")
65
+
66
+ tm.assert_frame_equal(result, expected)
67
+
68
+
69
+ def test_read_cell_annotation():
70
+ expected = pd.DataFrame(
71
+ ["test", np.nan, "test 3"],
72
+ columns=["Column 1"],
73
+ )
74
+
75
+ result = pd.read_excel("test_cell_annotation.ods")
76
+
77
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_odswriter.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ date,
3
+ datetime,
4
+ )
5
+ import re
6
+
7
+ import pytest
8
+
9
+ from pandas.compat import is_platform_windows
10
+
11
+ import pandas as pd
12
+ import pandas._testing as tm
13
+
14
+ from pandas.io.excel import ExcelWriter
15
+
16
+ odf = pytest.importorskip("odf")
17
+
18
+ if is_platform_windows():
19
+ pytestmark = pytest.mark.single_cpu
20
+
21
+
22
+ @pytest.fixture
23
+ def ext():
24
+ return ".ods"
25
+
26
+
27
+ def test_write_append_mode_raises(ext):
28
+ msg = "Append mode is not supported with odf!"
29
+
30
+ with tm.ensure_clean(ext) as f:
31
+ with pytest.raises(ValueError, match=msg):
32
+ ExcelWriter(f, engine="odf", mode="a")
33
+
34
+
35
+ @pytest.mark.parametrize("engine_kwargs", [None, {"kwarg": 1}])
36
+ def test_engine_kwargs(ext, engine_kwargs):
37
+ # GH 42286
38
+ # GH 43445
39
+ # test for error: OpenDocumentSpreadsheet does not accept any arguments
40
+ with tm.ensure_clean(ext) as f:
41
+ if engine_kwargs is not None:
42
+ error = re.escape(
43
+ "OpenDocumentSpreadsheet() got an unexpected keyword argument 'kwarg'"
44
+ )
45
+ with pytest.raises(
46
+ TypeError,
47
+ match=error,
48
+ ):
49
+ ExcelWriter(f, engine="odf", engine_kwargs=engine_kwargs)
50
+ else:
51
+ with ExcelWriter(f, engine="odf", engine_kwargs=engine_kwargs) as _:
52
+ pass
53
+
54
+
55
+ def test_book_and_sheets_consistent(ext):
56
+ # GH#45687 - Ensure sheets is updated if user modifies book
57
+ with tm.ensure_clean(ext) as f:
58
+ with ExcelWriter(f) as writer:
59
+ assert writer.sheets == {}
60
+ table = odf.table.Table(name="test_name")
61
+ writer.book.spreadsheet.addElement(table)
62
+ assert writer.sheets == {"test_name": table}
63
+
64
+
65
+ @pytest.mark.parametrize(
66
+ ["value", "cell_value_type", "cell_value_attribute", "cell_value"],
67
+ argvalues=[
68
+ (True, "boolean", "boolean-value", "true"),
69
+ ("test string", "string", "string-value", "test string"),
70
+ (1, "float", "value", "1"),
71
+ (1.5, "float", "value", "1.5"),
72
+ (
73
+ datetime(2010, 10, 10, 10, 10, 10),
74
+ "date",
75
+ "date-value",
76
+ "2010-10-10T10:10:10",
77
+ ),
78
+ (date(2010, 10, 10), "date", "date-value", "2010-10-10"),
79
+ ],
80
+ )
81
+ def test_cell_value_type(ext, value, cell_value_type, cell_value_attribute, cell_value):
82
+ # GH#54994 ODS: cell attributes should follow specification
83
+ # http://docs.oasis-open.org/office/v1.2/os/OpenDocument-v1.2-os-part1.html#refTable13
84
+ from odf.namespaces import OFFICENS
85
+ from odf.table import (
86
+ TableCell,
87
+ TableRow,
88
+ )
89
+
90
+ table_cell_name = TableCell().qname
91
+
92
+ with tm.ensure_clean(ext) as f:
93
+ pd.DataFrame([[value]]).to_excel(f, header=False, index=False)
94
+
95
+ with pd.ExcelFile(f) as wb:
96
+ sheet = wb._reader.get_sheet_by_index(0)
97
+ sheet_rows = sheet.getElementsByType(TableRow)
98
+ sheet_cells = [
99
+ x
100
+ for x in sheet_rows[0].childNodes
101
+ if hasattr(x, "qname") and x.qname == table_cell_name
102
+ ]
103
+
104
+ cell = sheet_cells[0]
105
+ assert cell.attributes.get((OFFICENS, "value-type")) == cell_value_type
106
+ assert cell.attributes.get((OFFICENS, cell_value_attribute)) == cell_value
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_openpyxl.py ADDED
@@ -0,0 +1,432 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ from pathlib import Path
3
+ import re
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas.compat import is_platform_windows
9
+
10
+ import pandas as pd
11
+ from pandas import DataFrame
12
+ import pandas._testing as tm
13
+
14
+ from pandas.io.excel import (
15
+ ExcelWriter,
16
+ _OpenpyxlWriter,
17
+ )
18
+ from pandas.io.excel._openpyxl import OpenpyxlReader
19
+
20
+ openpyxl = pytest.importorskip("openpyxl")
21
+
22
+ if is_platform_windows():
23
+ pytestmark = pytest.mark.single_cpu
24
+
25
+
26
+ @pytest.fixture
27
+ def ext():
28
+ return ".xlsx"
29
+
30
+
31
+ def test_to_excel_styleconverter():
32
+ from openpyxl import styles
33
+
34
+ hstyle = {
35
+ "font": {"color": "00FF0000", "bold": True},
36
+ "borders": {"top": "thin", "right": "thin", "bottom": "thin", "left": "thin"},
37
+ "alignment": {"horizontal": "center", "vertical": "top"},
38
+ "fill": {"patternType": "solid", "fgColor": {"rgb": "006666FF", "tint": 0.3}},
39
+ "number_format": {"format_code": "0.00"},
40
+ "protection": {"locked": True, "hidden": False},
41
+ }
42
+
43
+ font_color = styles.Color("00FF0000")
44
+ font = styles.Font(bold=True, color=font_color)
45
+ side = styles.Side(style=styles.borders.BORDER_THIN)
46
+ border = styles.Border(top=side, right=side, bottom=side, left=side)
47
+ alignment = styles.Alignment(horizontal="center", vertical="top")
48
+ fill_color = styles.Color(rgb="006666FF", tint=0.3)
49
+ fill = styles.PatternFill(patternType="solid", fgColor=fill_color)
50
+
51
+ number_format = "0.00"
52
+
53
+ protection = styles.Protection(locked=True, hidden=False)
54
+
55
+ kw = _OpenpyxlWriter._convert_to_style_kwargs(hstyle)
56
+ assert kw["font"] == font
57
+ assert kw["border"] == border
58
+ assert kw["alignment"] == alignment
59
+ assert kw["fill"] == fill
60
+ assert kw["number_format"] == number_format
61
+ assert kw["protection"] == protection
62
+
63
+
64
+ def test_write_cells_merge_styled(ext):
65
+ from pandas.io.formats.excel import ExcelCell
66
+
67
+ sheet_name = "merge_styled"
68
+
69
+ sty_b1 = {"font": {"color": "00FF0000"}}
70
+ sty_a2 = {"font": {"color": "0000FF00"}}
71
+
72
+ initial_cells = [
73
+ ExcelCell(col=1, row=0, val=42, style=sty_b1),
74
+ ExcelCell(col=0, row=1, val=99, style=sty_a2),
75
+ ]
76
+
77
+ sty_merged = {"font": {"color": "000000FF", "bold": True}}
78
+ sty_kwargs = _OpenpyxlWriter._convert_to_style_kwargs(sty_merged)
79
+ openpyxl_sty_merged = sty_kwargs["font"]
80
+ merge_cells = [
81
+ ExcelCell(
82
+ col=0, row=0, val="pandas", mergestart=1, mergeend=1, style=sty_merged
83
+ )
84
+ ]
85
+
86
+ with tm.ensure_clean(ext) as path:
87
+ with _OpenpyxlWriter(path) as writer:
88
+ writer._write_cells(initial_cells, sheet_name=sheet_name)
89
+ writer._write_cells(merge_cells, sheet_name=sheet_name)
90
+
91
+ wks = writer.sheets[sheet_name]
92
+ xcell_b1 = wks["B1"]
93
+ xcell_a2 = wks["A2"]
94
+ assert xcell_b1.font == openpyxl_sty_merged
95
+ assert xcell_a2.font == openpyxl_sty_merged
96
+
97
+
98
+ @pytest.mark.parametrize("iso_dates", [True, False])
99
+ def test_engine_kwargs_write(ext, iso_dates):
100
+ # GH 42286 GH 43445
101
+ engine_kwargs = {"iso_dates": iso_dates}
102
+ with tm.ensure_clean(ext) as f:
103
+ with ExcelWriter(f, engine="openpyxl", engine_kwargs=engine_kwargs) as writer:
104
+ assert writer.book.iso_dates == iso_dates
105
+ # ExcelWriter won't allow us to close without writing something
106
+ DataFrame().to_excel(writer)
107
+
108
+
109
+ def test_engine_kwargs_append_invalid(ext):
110
+ # GH 43445
111
+ # test whether an invalid engine kwargs actually raises
112
+ with tm.ensure_clean(ext) as f:
113
+ DataFrame(["hello", "world"]).to_excel(f)
114
+ with pytest.raises(
115
+ TypeError,
116
+ match=re.escape(
117
+ "load_workbook() got an unexpected keyword argument 'apple_banana'"
118
+ ),
119
+ ):
120
+ with ExcelWriter(
121
+ f, engine="openpyxl", mode="a", engine_kwargs={"apple_banana": "fruit"}
122
+ ) as writer:
123
+ # ExcelWriter needs us to write something to close properly
124
+ DataFrame(["good"]).to_excel(writer, sheet_name="Sheet2")
125
+
126
+
127
+ @pytest.mark.parametrize("data_only, expected", [(True, 0), (False, "=1+1")])
128
+ def test_engine_kwargs_append_data_only(ext, data_only, expected):
129
+ # GH 43445
130
+ # tests whether the data_only engine_kwarg actually works well for
131
+ # openpyxl's load_workbook
132
+ with tm.ensure_clean(ext) as f:
133
+ DataFrame(["=1+1"]).to_excel(f)
134
+ with ExcelWriter(
135
+ f, engine="openpyxl", mode="a", engine_kwargs={"data_only": data_only}
136
+ ) as writer:
137
+ assert writer.sheets["Sheet1"]["B2"].value == expected
138
+ # ExcelWriter needs us to writer something to close properly?
139
+ DataFrame().to_excel(writer, sheet_name="Sheet2")
140
+
141
+ # ensure that data_only also works for reading
142
+ # and that formulas/values roundtrip
143
+ assert (
144
+ pd.read_excel(
145
+ f,
146
+ sheet_name="Sheet1",
147
+ engine="openpyxl",
148
+ engine_kwargs={"data_only": data_only},
149
+ ).iloc[0, 1]
150
+ == expected
151
+ )
152
+
153
+
154
+ @pytest.mark.parametrize("kwarg_name", ["read_only", "data_only"])
155
+ @pytest.mark.parametrize("kwarg_value", [True, False])
156
+ def test_engine_kwargs_append_reader(datapath, ext, kwarg_name, kwarg_value):
157
+ # GH 55027
158
+ # test that `read_only` and `data_only` can be passed to
159
+ # `openpyxl.reader.excel.load_workbook` via `engine_kwargs`
160
+ filename = datapath("io", "data", "excel", "test1" + ext)
161
+ with contextlib.closing(
162
+ OpenpyxlReader(filename, engine_kwargs={kwarg_name: kwarg_value})
163
+ ) as reader:
164
+ assert getattr(reader.book, kwarg_name) == kwarg_value
165
+
166
+
167
+ @pytest.mark.parametrize(
168
+ "mode,expected", [("w", ["baz"]), ("a", ["foo", "bar", "baz"])]
169
+ )
170
+ def test_write_append_mode(ext, mode, expected):
171
+ df = DataFrame([1], columns=["baz"])
172
+
173
+ with tm.ensure_clean(ext) as f:
174
+ wb = openpyxl.Workbook()
175
+ wb.worksheets[0].title = "foo"
176
+ wb.worksheets[0]["A1"].value = "foo"
177
+ wb.create_sheet("bar")
178
+ wb.worksheets[1]["A1"].value = "bar"
179
+ wb.save(f)
180
+
181
+ with ExcelWriter(f, engine="openpyxl", mode=mode) as writer:
182
+ df.to_excel(writer, sheet_name="baz", index=False)
183
+
184
+ with contextlib.closing(openpyxl.load_workbook(f)) as wb2:
185
+ result = [sheet.title for sheet in wb2.worksheets]
186
+ assert result == expected
187
+
188
+ for index, cell_value in enumerate(expected):
189
+ assert wb2.worksheets[index]["A1"].value == cell_value
190
+
191
+
192
+ @pytest.mark.parametrize(
193
+ "if_sheet_exists,num_sheets,expected",
194
+ [
195
+ ("new", 2, ["apple", "banana"]),
196
+ ("replace", 1, ["pear"]),
197
+ ("overlay", 1, ["pear", "banana"]),
198
+ ],
199
+ )
200
+ def test_if_sheet_exists_append_modes(ext, if_sheet_exists, num_sheets, expected):
201
+ # GH 40230
202
+ df1 = DataFrame({"fruit": ["apple", "banana"]})
203
+ df2 = DataFrame({"fruit": ["pear"]})
204
+
205
+ with tm.ensure_clean(ext) as f:
206
+ df1.to_excel(f, engine="openpyxl", sheet_name="foo", index=False)
207
+ with ExcelWriter(
208
+ f, engine="openpyxl", mode="a", if_sheet_exists=if_sheet_exists
209
+ ) as writer:
210
+ df2.to_excel(writer, sheet_name="foo", index=False)
211
+
212
+ with contextlib.closing(openpyxl.load_workbook(f)) as wb:
213
+ assert len(wb.sheetnames) == num_sheets
214
+ assert wb.sheetnames[0] == "foo"
215
+ result = pd.read_excel(wb, "foo", engine="openpyxl")
216
+ assert list(result["fruit"]) == expected
217
+ if len(wb.sheetnames) == 2:
218
+ result = pd.read_excel(wb, wb.sheetnames[1], engine="openpyxl")
219
+ tm.assert_frame_equal(result, df2)
220
+
221
+
222
+ @pytest.mark.parametrize(
223
+ "startrow, startcol, greeting, goodbye",
224
+ [
225
+ (0, 0, ["poop", "world"], ["goodbye", "people"]),
226
+ (0, 1, ["hello", "world"], ["poop", "people"]),
227
+ (1, 0, ["hello", "poop"], ["goodbye", "people"]),
228
+ (1, 1, ["hello", "world"], ["goodbye", "poop"]),
229
+ ],
230
+ )
231
+ def test_append_overlay_startrow_startcol(ext, startrow, startcol, greeting, goodbye):
232
+ df1 = DataFrame({"greeting": ["hello", "world"], "goodbye": ["goodbye", "people"]})
233
+ df2 = DataFrame(["poop"])
234
+
235
+ with tm.ensure_clean(ext) as f:
236
+ df1.to_excel(f, engine="openpyxl", sheet_name="poo", index=False)
237
+ with ExcelWriter(
238
+ f, engine="openpyxl", mode="a", if_sheet_exists="overlay"
239
+ ) as writer:
240
+ # use startrow+1 because we don't have a header
241
+ df2.to_excel(
242
+ writer,
243
+ index=False,
244
+ header=False,
245
+ startrow=startrow + 1,
246
+ startcol=startcol,
247
+ sheet_name="poo",
248
+ )
249
+
250
+ result = pd.read_excel(f, sheet_name="poo", engine="openpyxl")
251
+ expected = DataFrame({"greeting": greeting, "goodbye": goodbye})
252
+ tm.assert_frame_equal(result, expected)
253
+
254
+
255
+ @pytest.mark.parametrize(
256
+ "if_sheet_exists,msg",
257
+ [
258
+ (
259
+ "invalid",
260
+ "'invalid' is not valid for if_sheet_exists. Valid options "
261
+ "are 'error', 'new', 'replace' and 'overlay'.",
262
+ ),
263
+ (
264
+ "error",
265
+ "Sheet 'foo' already exists and if_sheet_exists is set to 'error'.",
266
+ ),
267
+ (
268
+ None,
269
+ "Sheet 'foo' already exists and if_sheet_exists is set to 'error'.",
270
+ ),
271
+ ],
272
+ )
273
+ def test_if_sheet_exists_raises(ext, if_sheet_exists, msg):
274
+ # GH 40230
275
+ df = DataFrame({"fruit": ["pear"]})
276
+ with tm.ensure_clean(ext) as f:
277
+ with pytest.raises(ValueError, match=re.escape(msg)):
278
+ df.to_excel(f, sheet_name="foo", engine="openpyxl")
279
+ with ExcelWriter(
280
+ f, engine="openpyxl", mode="a", if_sheet_exists=if_sheet_exists
281
+ ) as writer:
282
+ df.to_excel(writer, sheet_name="foo")
283
+
284
+
285
+ def test_to_excel_with_openpyxl_engine(ext):
286
+ # GH 29854
287
+ with tm.ensure_clean(ext) as filename:
288
+ df1 = DataFrame({"A": np.linspace(1, 10, 10)})
289
+ df2 = DataFrame({"B": np.linspace(1, 20, 10)})
290
+ df = pd.concat([df1, df2], axis=1)
291
+ styled = df.style.map(
292
+ lambda val: f"color: {'red' if val < 0 else 'black'}"
293
+ ).highlight_max()
294
+
295
+ styled.to_excel(filename, engine="openpyxl")
296
+
297
+
298
+ @pytest.mark.parametrize("read_only", [True, False])
299
+ def test_read_workbook(datapath, ext, read_only):
300
+ # GH 39528
301
+ filename = datapath("io", "data", "excel", "test1" + ext)
302
+ with contextlib.closing(
303
+ openpyxl.load_workbook(filename, read_only=read_only)
304
+ ) as wb:
305
+ result = pd.read_excel(wb, engine="openpyxl")
306
+ expected = pd.read_excel(filename)
307
+ tm.assert_frame_equal(result, expected)
308
+
309
+
310
+ @pytest.mark.parametrize(
311
+ "header, expected_data",
312
+ [
313
+ (
314
+ 0,
315
+ {
316
+ "Title": [np.nan, "A", 1, 2, 3],
317
+ "Unnamed: 1": [np.nan, "B", 4, 5, 6],
318
+ "Unnamed: 2": [np.nan, "C", 7, 8, 9],
319
+ },
320
+ ),
321
+ (2, {"A": [1, 2, 3], "B": [4, 5, 6], "C": [7, 8, 9]}),
322
+ ],
323
+ )
324
+ @pytest.mark.parametrize(
325
+ "filename", ["dimension_missing", "dimension_small", "dimension_large"]
326
+ )
327
+ # When read_only is None, use read_excel instead of a workbook
328
+ @pytest.mark.parametrize("read_only", [True, False, None])
329
+ def test_read_with_bad_dimension(
330
+ datapath, ext, header, expected_data, filename, read_only
331
+ ):
332
+ # GH 38956, 39001 - no/incorrect dimension information
333
+ path = datapath("io", "data", "excel", f"{filename}{ext}")
334
+ if read_only is None:
335
+ result = pd.read_excel(path, header=header)
336
+ else:
337
+ with contextlib.closing(
338
+ openpyxl.load_workbook(path, read_only=read_only)
339
+ ) as wb:
340
+ result = pd.read_excel(wb, engine="openpyxl", header=header)
341
+ expected = DataFrame(expected_data)
342
+ tm.assert_frame_equal(result, expected)
343
+
344
+
345
+ def test_append_mode_file(ext):
346
+ # GH 39576
347
+ df = DataFrame()
348
+
349
+ with tm.ensure_clean(ext) as f:
350
+ df.to_excel(f, engine="openpyxl")
351
+
352
+ with ExcelWriter(
353
+ f, mode="a", engine="openpyxl", if_sheet_exists="new"
354
+ ) as writer:
355
+ df.to_excel(writer)
356
+
357
+ # make sure that zip files are not concatenated by making sure that
358
+ # "docProps/app.xml" only occurs twice in the file
359
+ data = Path(f).read_bytes()
360
+ first = data.find(b"docProps/app.xml")
361
+ second = data.find(b"docProps/app.xml", first + 1)
362
+ third = data.find(b"docProps/app.xml", second + 1)
363
+ assert second != -1 and third == -1
364
+
365
+
366
+ # When read_only is None, use read_excel instead of a workbook
367
+ @pytest.mark.parametrize("read_only", [True, False, None])
368
+ def test_read_with_empty_trailing_rows(datapath, ext, read_only):
369
+ # GH 39181
370
+ path = datapath("io", "data", "excel", f"empty_trailing_rows{ext}")
371
+ if read_only is None:
372
+ result = pd.read_excel(path)
373
+ else:
374
+ with contextlib.closing(
375
+ openpyxl.load_workbook(path, read_only=read_only)
376
+ ) as wb:
377
+ result = pd.read_excel(wb, engine="openpyxl")
378
+ expected = DataFrame(
379
+ {
380
+ "Title": [np.nan, "A", 1, 2, 3],
381
+ "Unnamed: 1": [np.nan, "B", 4, 5, 6],
382
+ "Unnamed: 2": [np.nan, "C", 7, 8, 9],
383
+ }
384
+ )
385
+ tm.assert_frame_equal(result, expected)
386
+
387
+
388
+ # When read_only is None, use read_excel instead of a workbook
389
+ @pytest.mark.parametrize("read_only", [True, False, None])
390
+ def test_read_empty_with_blank_row(datapath, ext, read_only):
391
+ # GH 39547 - empty excel file with a row that has no data
392
+ path = datapath("io", "data", "excel", f"empty_with_blank_row{ext}")
393
+ if read_only is None:
394
+ result = pd.read_excel(path)
395
+ else:
396
+ with contextlib.closing(
397
+ openpyxl.load_workbook(path, read_only=read_only)
398
+ ) as wb:
399
+ result = pd.read_excel(wb, engine="openpyxl")
400
+ expected = DataFrame()
401
+ tm.assert_frame_equal(result, expected)
402
+
403
+
404
+ def test_book_and_sheets_consistent(ext):
405
+ # GH#45687 - Ensure sheets is updated if user modifies book
406
+ with tm.ensure_clean(ext) as f:
407
+ with ExcelWriter(f, engine="openpyxl") as writer:
408
+ assert writer.sheets == {}
409
+ sheet = writer.book.create_sheet("test_name", 0)
410
+ assert writer.sheets == {"test_name": sheet}
411
+
412
+
413
+ def test_ints_spelled_with_decimals(datapath, ext):
414
+ # GH 46988 - openpyxl returns this sheet with floats
415
+ path = datapath("io", "data", "excel", f"ints_spelled_with_decimals{ext}")
416
+ result = pd.read_excel(path)
417
+ expected = DataFrame(range(2, 12), columns=[1])
418
+ tm.assert_frame_equal(result, expected)
419
+
420
+
421
+ def test_read_multiindex_header_no_index_names(datapath, ext):
422
+ # GH#47487
423
+ path = datapath("io", "data", "excel", f"multiindex_no_index_names{ext}")
424
+ result = pd.read_excel(path, index_col=[0, 1, 2], header=[0, 1, 2])
425
+ expected = DataFrame(
426
+ [[np.nan, "x", "x", "x"], ["x", np.nan, np.nan, np.nan]],
427
+ columns=pd.MultiIndex.from_tuples(
428
+ [("X", "Y", "A1"), ("X", "Y", "A2"), ("XX", "YY", "B1"), ("XX", "YY", "B2")]
429
+ ),
430
+ index=pd.MultiIndex.from_tuples([("A", "AA", "AAA"), ("A", "BB", "BBB")]),
431
+ )
432
+ tm.assert_frame_equal(result, expected)
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_readers.py ADDED
@@ -0,0 +1,1751 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from datetime import (
4
+ datetime,
5
+ time,
6
+ )
7
+ from functools import partial
8
+ from io import BytesIO
9
+ import os
10
+ from pathlib import Path
11
+ import platform
12
+ import re
13
+ from urllib.error import URLError
14
+ from zipfile import BadZipFile
15
+
16
+ import numpy as np
17
+ import pytest
18
+
19
+ from pandas._config import using_pyarrow_string_dtype
20
+
21
+ from pandas.compat import is_platform_windows
22
+ import pandas.util._test_decorators as td
23
+
24
+ import pandas as pd
25
+ from pandas import (
26
+ DataFrame,
27
+ Index,
28
+ MultiIndex,
29
+ Series,
30
+ read_csv,
31
+ )
32
+ import pandas._testing as tm
33
+ from pandas.core.arrays import (
34
+ ArrowStringArray,
35
+ StringArray,
36
+ )
37
+
38
+ if is_platform_windows():
39
+ pytestmark = pytest.mark.single_cpu
40
+
41
+ read_ext_params = [".xls", ".xlsx", ".xlsm", ".xlsb", ".ods"]
42
+ engine_params = [
43
+ # Add any engines to test here
44
+ # When defusedxml is installed it triggers deprecation warnings for
45
+ # xlrd and openpyxl, so catch those here
46
+ pytest.param(
47
+ "xlrd",
48
+ marks=[
49
+ td.skip_if_no("xlrd"),
50
+ ],
51
+ ),
52
+ pytest.param(
53
+ "openpyxl",
54
+ marks=[
55
+ td.skip_if_no("openpyxl"),
56
+ ],
57
+ ),
58
+ pytest.param(
59
+ None,
60
+ marks=[
61
+ td.skip_if_no("xlrd"),
62
+ ],
63
+ ),
64
+ pytest.param("pyxlsb", marks=td.skip_if_no("pyxlsb")),
65
+ pytest.param("odf", marks=td.skip_if_no("odf")),
66
+ pytest.param("calamine", marks=td.skip_if_no("python_calamine")),
67
+ ]
68
+
69
+
70
+ def _is_valid_engine_ext_pair(engine, read_ext: str) -> bool:
71
+ """
72
+ Filter out invalid (engine, ext) pairs instead of skipping, as that
73
+ produces 500+ pytest.skips.
74
+ """
75
+ engine = engine.values[0]
76
+ if engine == "openpyxl" and read_ext == ".xls":
77
+ return False
78
+ if engine == "odf" and read_ext != ".ods":
79
+ return False
80
+ if read_ext == ".ods" and engine not in {"odf", "calamine"}:
81
+ return False
82
+ if engine == "pyxlsb" and read_ext != ".xlsb":
83
+ return False
84
+ if read_ext == ".xlsb" and engine not in {"pyxlsb", "calamine"}:
85
+ return False
86
+ if engine == "xlrd" and read_ext != ".xls":
87
+ return False
88
+ return True
89
+
90
+
91
+ def _transfer_marks(engine, read_ext):
92
+ """
93
+ engine gives us a pytest.param object with some marks, read_ext is just
94
+ a string. We need to generate a new pytest.param inheriting the marks.
95
+ """
96
+ values = engine.values + (read_ext,)
97
+ new_param = pytest.param(values, marks=engine.marks)
98
+ return new_param
99
+
100
+
101
+ @pytest.fixture(
102
+ params=[
103
+ _transfer_marks(eng, ext)
104
+ for eng in engine_params
105
+ for ext in read_ext_params
106
+ if _is_valid_engine_ext_pair(eng, ext)
107
+ ],
108
+ ids=str,
109
+ )
110
+ def engine_and_read_ext(request):
111
+ """
112
+ Fixture for Excel reader engine and read_ext, only including valid pairs.
113
+ """
114
+ return request.param
115
+
116
+
117
+ @pytest.fixture
118
+ def engine(engine_and_read_ext):
119
+ engine, read_ext = engine_and_read_ext
120
+ return engine
121
+
122
+
123
+ @pytest.fixture
124
+ def read_ext(engine_and_read_ext):
125
+ engine, read_ext = engine_and_read_ext
126
+ return read_ext
127
+
128
+
129
+ @pytest.fixture
130
+ def df_ref(datapath):
131
+ """
132
+ Obtain the reference data from read_csv with the Python engine.
133
+ """
134
+ filepath = datapath("io", "data", "csv", "test1.csv")
135
+ df_ref = read_csv(filepath, index_col=0, parse_dates=True, engine="python")
136
+ return df_ref
137
+
138
+
139
+ def get_exp_unit(read_ext: str, engine: str | None) -> str:
140
+ return "ns"
141
+
142
+
143
+ def adjust_expected(expected: DataFrame, read_ext: str, engine: str) -> None:
144
+ expected.index.name = None
145
+ unit = get_exp_unit(read_ext, engine)
146
+ # error: "Index" has no attribute "as_unit"
147
+ expected.index = expected.index.as_unit(unit) # type: ignore[attr-defined]
148
+
149
+
150
+ def xfail_datetimes_with_pyxlsb(engine, request):
151
+ if engine == "pyxlsb":
152
+ request.applymarker(
153
+ pytest.mark.xfail(
154
+ reason="Sheets containing datetimes not supported by pyxlsb"
155
+ )
156
+ )
157
+
158
+
159
+ class TestReaders:
160
+ @pytest.fixture(autouse=True)
161
+ def cd_and_set_engine(self, engine, datapath, monkeypatch):
162
+ """
163
+ Change directory and set engine for read_excel calls.
164
+ """
165
+ func = partial(pd.read_excel, engine=engine)
166
+ monkeypatch.chdir(datapath("io", "data", "excel"))
167
+ monkeypatch.setattr(pd, "read_excel", func)
168
+
169
+ def test_engine_used(self, read_ext, engine, monkeypatch):
170
+ # GH 38884
171
+ def parser(self, *args, **kwargs):
172
+ return self.engine
173
+
174
+ monkeypatch.setattr(pd.ExcelFile, "parse", parser)
175
+
176
+ expected_defaults = {
177
+ "xlsx": "openpyxl",
178
+ "xlsm": "openpyxl",
179
+ "xlsb": "pyxlsb",
180
+ "xls": "xlrd",
181
+ "ods": "odf",
182
+ }
183
+
184
+ with open("test1" + read_ext, "rb") as f:
185
+ result = pd.read_excel(f)
186
+
187
+ if engine is not None:
188
+ expected = engine
189
+ else:
190
+ expected = expected_defaults[read_ext[1:]]
191
+ assert result == expected
192
+
193
+ def test_engine_kwargs(self, read_ext, engine):
194
+ # GH#52214
195
+ expected_defaults = {
196
+ "xlsx": {"foo": "abcd"},
197
+ "xlsm": {"foo": 123},
198
+ "xlsb": {"foo": "True"},
199
+ "xls": {"foo": True},
200
+ "ods": {"foo": "abcd"},
201
+ }
202
+
203
+ if engine in {"xlrd", "pyxlsb"}:
204
+ msg = re.escape(r"open_workbook() got an unexpected keyword argument 'foo'")
205
+ elif engine == "odf":
206
+ msg = re.escape(r"load() got an unexpected keyword argument 'foo'")
207
+ else:
208
+ msg = re.escape(r"load_workbook() got an unexpected keyword argument 'foo'")
209
+
210
+ if engine is not None:
211
+ with pytest.raises(TypeError, match=msg):
212
+ pd.read_excel(
213
+ "test1" + read_ext,
214
+ sheet_name="Sheet1",
215
+ index_col=0,
216
+ engine_kwargs=expected_defaults[read_ext[1:]],
217
+ )
218
+
219
+ def test_usecols_int(self, read_ext):
220
+ # usecols as int
221
+ msg = "Passing an integer for `usecols`"
222
+ with pytest.raises(ValueError, match=msg):
223
+ pd.read_excel(
224
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols=3
225
+ )
226
+
227
+ # usecols as int
228
+ with pytest.raises(ValueError, match=msg):
229
+ pd.read_excel(
230
+ "test1" + read_ext,
231
+ sheet_name="Sheet2",
232
+ skiprows=[1],
233
+ index_col=0,
234
+ usecols=3,
235
+ )
236
+
237
+ def test_usecols_list(self, request, engine, read_ext, df_ref):
238
+ xfail_datetimes_with_pyxlsb(engine, request)
239
+
240
+ expected = df_ref[["B", "C"]]
241
+ adjust_expected(expected, read_ext, engine)
242
+
243
+ df1 = pd.read_excel(
244
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols=[0, 2, 3]
245
+ )
246
+ df2 = pd.read_excel(
247
+ "test1" + read_ext,
248
+ sheet_name="Sheet2",
249
+ skiprows=[1],
250
+ index_col=0,
251
+ usecols=[0, 2, 3],
252
+ )
253
+
254
+ # TODO add index to xls file)
255
+ tm.assert_frame_equal(df1, expected)
256
+ tm.assert_frame_equal(df2, expected)
257
+
258
+ def test_usecols_str(self, request, engine, read_ext, df_ref):
259
+ xfail_datetimes_with_pyxlsb(engine, request)
260
+
261
+ expected = df_ref[["A", "B", "C"]]
262
+ adjust_expected(expected, read_ext, engine)
263
+
264
+ df2 = pd.read_excel(
265
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols="A:D"
266
+ )
267
+ df3 = pd.read_excel(
268
+ "test1" + read_ext,
269
+ sheet_name="Sheet2",
270
+ skiprows=[1],
271
+ index_col=0,
272
+ usecols="A:D",
273
+ )
274
+
275
+ # TODO add index to xls, read xls ignores index name ?
276
+ tm.assert_frame_equal(df2, expected)
277
+ tm.assert_frame_equal(df3, expected)
278
+
279
+ expected = df_ref[["B", "C"]]
280
+ adjust_expected(expected, read_ext, engine)
281
+
282
+ df2 = pd.read_excel(
283
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols="A,C,D"
284
+ )
285
+ df3 = pd.read_excel(
286
+ "test1" + read_ext,
287
+ sheet_name="Sheet2",
288
+ skiprows=[1],
289
+ index_col=0,
290
+ usecols="A,C,D",
291
+ )
292
+ # TODO add index to xls file
293
+ tm.assert_frame_equal(df2, expected)
294
+ tm.assert_frame_equal(df3, expected)
295
+
296
+ df2 = pd.read_excel(
297
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols="A,C:D"
298
+ )
299
+ df3 = pd.read_excel(
300
+ "test1" + read_ext,
301
+ sheet_name="Sheet2",
302
+ skiprows=[1],
303
+ index_col=0,
304
+ usecols="A,C:D",
305
+ )
306
+ tm.assert_frame_equal(df2, expected)
307
+ tm.assert_frame_equal(df3, expected)
308
+
309
+ @pytest.mark.parametrize(
310
+ "usecols", [[0, 1, 3], [0, 3, 1], [1, 0, 3], [1, 3, 0], [3, 0, 1], [3, 1, 0]]
311
+ )
312
+ def test_usecols_diff_positional_int_columns_order(
313
+ self, request, engine, read_ext, usecols, df_ref
314
+ ):
315
+ xfail_datetimes_with_pyxlsb(engine, request)
316
+
317
+ expected = df_ref[["A", "C"]]
318
+ adjust_expected(expected, read_ext, engine)
319
+
320
+ result = pd.read_excel(
321
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols=usecols
322
+ )
323
+ tm.assert_frame_equal(result, expected)
324
+
325
+ @pytest.mark.parametrize("usecols", [["B", "D"], ["D", "B"]])
326
+ def test_usecols_diff_positional_str_columns_order(self, read_ext, usecols, df_ref):
327
+ expected = df_ref[["B", "D"]]
328
+ expected.index = range(len(expected))
329
+
330
+ result = pd.read_excel("test1" + read_ext, sheet_name="Sheet1", usecols=usecols)
331
+ tm.assert_frame_equal(result, expected)
332
+
333
+ def test_read_excel_without_slicing(self, request, engine, read_ext, df_ref):
334
+ xfail_datetimes_with_pyxlsb(engine, request)
335
+
336
+ expected = df_ref
337
+ adjust_expected(expected, read_ext, engine)
338
+
339
+ result = pd.read_excel("test1" + read_ext, sheet_name="Sheet1", index_col=0)
340
+ tm.assert_frame_equal(result, expected)
341
+
342
+ def test_usecols_excel_range_str(self, request, engine, read_ext, df_ref):
343
+ xfail_datetimes_with_pyxlsb(engine, request)
344
+
345
+ expected = df_ref[["C", "D"]]
346
+ adjust_expected(expected, read_ext, engine)
347
+
348
+ result = pd.read_excel(
349
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, usecols="A,D:E"
350
+ )
351
+ tm.assert_frame_equal(result, expected)
352
+
353
+ def test_usecols_excel_range_str_invalid(self, read_ext):
354
+ msg = "Invalid column name: E1"
355
+
356
+ with pytest.raises(ValueError, match=msg):
357
+ pd.read_excel("test1" + read_ext, sheet_name="Sheet1", usecols="D:E1")
358
+
359
+ def test_index_col_label_error(self, read_ext):
360
+ msg = "list indices must be integers.*, not str"
361
+
362
+ with pytest.raises(TypeError, match=msg):
363
+ pd.read_excel(
364
+ "test1" + read_ext,
365
+ sheet_name="Sheet1",
366
+ index_col=["A"],
367
+ usecols=["A", "C"],
368
+ )
369
+
370
+ def test_index_col_str(self, read_ext):
371
+ # see gh-52716
372
+ result = pd.read_excel("test1" + read_ext, sheet_name="Sheet3", index_col="A")
373
+ expected = DataFrame(
374
+ columns=["B", "C", "D", "E", "F"], index=Index([], name="A")
375
+ )
376
+ tm.assert_frame_equal(result, expected)
377
+
378
+ def test_index_col_empty(self, read_ext):
379
+ # see gh-9208
380
+ result = pd.read_excel(
381
+ "test1" + read_ext, sheet_name="Sheet3", index_col=["A", "B", "C"]
382
+ )
383
+ expected = DataFrame(
384
+ columns=["D", "E", "F"],
385
+ index=MultiIndex(levels=[[]] * 3, codes=[[]] * 3, names=["A", "B", "C"]),
386
+ )
387
+ tm.assert_frame_equal(result, expected)
388
+
389
+ @pytest.mark.parametrize("index_col", [None, 2])
390
+ def test_index_col_with_unnamed(self, read_ext, index_col):
391
+ # see gh-18792
392
+ result = pd.read_excel(
393
+ "test1" + read_ext, sheet_name="Sheet4", index_col=index_col
394
+ )
395
+ expected = DataFrame(
396
+ [["i1", "a", "x"], ["i2", "b", "y"]], columns=["Unnamed: 0", "col1", "col2"]
397
+ )
398
+ if index_col:
399
+ expected = expected.set_index(expected.columns[index_col])
400
+
401
+ tm.assert_frame_equal(result, expected)
402
+
403
+ def test_usecols_pass_non_existent_column(self, read_ext):
404
+ msg = (
405
+ "Usecols do not match columns, "
406
+ "columns expected but not found: "
407
+ r"\['E'\]"
408
+ )
409
+
410
+ with pytest.raises(ValueError, match=msg):
411
+ pd.read_excel("test1" + read_ext, usecols=["E"])
412
+
413
+ def test_usecols_wrong_type(self, read_ext):
414
+ msg = (
415
+ "'usecols' must either be list-like of "
416
+ "all strings, all unicode, all integers or a callable."
417
+ )
418
+
419
+ with pytest.raises(ValueError, match=msg):
420
+ pd.read_excel("test1" + read_ext, usecols=["E1", 0])
421
+
422
+ def test_excel_stop_iterator(self, read_ext):
423
+ parsed = pd.read_excel("test2" + read_ext, sheet_name="Sheet1")
424
+ expected = DataFrame([["aaaa", "bbbbb"]], columns=["Test", "Test1"])
425
+ tm.assert_frame_equal(parsed, expected)
426
+
427
+ def test_excel_cell_error_na(self, request, engine, read_ext):
428
+ xfail_datetimes_with_pyxlsb(engine, request)
429
+
430
+ # https://github.com/tafia/calamine/issues/355
431
+ if engine == "calamine" and read_ext == ".ods":
432
+ request.applymarker(
433
+ pytest.mark.xfail(reason="Calamine can't extract error from ods files")
434
+ )
435
+
436
+ parsed = pd.read_excel("test3" + read_ext, sheet_name="Sheet1")
437
+ expected = DataFrame([[np.nan]], columns=["Test"])
438
+ tm.assert_frame_equal(parsed, expected)
439
+
440
+ def test_excel_table(self, request, engine, read_ext, df_ref):
441
+ xfail_datetimes_with_pyxlsb(engine, request)
442
+
443
+ expected = df_ref
444
+ adjust_expected(expected, read_ext, engine)
445
+
446
+ df1 = pd.read_excel("test1" + read_ext, sheet_name="Sheet1", index_col=0)
447
+ df2 = pd.read_excel(
448
+ "test1" + read_ext, sheet_name="Sheet2", skiprows=[1], index_col=0
449
+ )
450
+ # TODO add index to file
451
+ tm.assert_frame_equal(df1, expected)
452
+ tm.assert_frame_equal(df2, expected)
453
+
454
+ df3 = pd.read_excel(
455
+ "test1" + read_ext, sheet_name="Sheet1", index_col=0, skipfooter=1
456
+ )
457
+ tm.assert_frame_equal(df3, df1.iloc[:-1])
458
+
459
+ def test_reader_special_dtypes(self, request, engine, read_ext):
460
+ xfail_datetimes_with_pyxlsb(engine, request)
461
+
462
+ unit = get_exp_unit(read_ext, engine)
463
+ expected = DataFrame.from_dict(
464
+ {
465
+ "IntCol": [1, 2, -3, 4, 0],
466
+ "FloatCol": [1.25, 2.25, 1.83, 1.92, 0.0000000005],
467
+ "BoolCol": [True, False, True, True, False],
468
+ "StrCol": [1, 2, 3, 4, 5],
469
+ "Str2Col": ["a", 3, "c", "d", "e"],
470
+ "DateCol": Index(
471
+ [
472
+ datetime(2013, 10, 30),
473
+ datetime(2013, 10, 31),
474
+ datetime(1905, 1, 1),
475
+ datetime(2013, 12, 14),
476
+ datetime(2015, 3, 14),
477
+ ],
478
+ dtype=f"M8[{unit}]",
479
+ ),
480
+ },
481
+ )
482
+ basename = "test_types"
483
+
484
+ # should read in correctly and infer types
485
+ actual = pd.read_excel(basename + read_ext, sheet_name="Sheet1")
486
+ tm.assert_frame_equal(actual, expected)
487
+
488
+ # if not coercing number, then int comes in as float
489
+ float_expected = expected.copy()
490
+ float_expected.loc[float_expected.index[1], "Str2Col"] = 3.0
491
+ actual = pd.read_excel(basename + read_ext, sheet_name="Sheet1")
492
+ tm.assert_frame_equal(actual, float_expected)
493
+
494
+ # check setting Index (assuming xls and xlsx are the same here)
495
+ for icol, name in enumerate(expected.columns):
496
+ actual = pd.read_excel(
497
+ basename + read_ext, sheet_name="Sheet1", index_col=icol
498
+ )
499
+ exp = expected.set_index(name)
500
+ tm.assert_frame_equal(actual, exp)
501
+
502
+ expected["StrCol"] = expected["StrCol"].apply(str)
503
+ actual = pd.read_excel(
504
+ basename + read_ext, sheet_name="Sheet1", converters={"StrCol": str}
505
+ )
506
+ tm.assert_frame_equal(actual, expected)
507
+
508
+ # GH8212 - support for converters and missing values
509
+ def test_reader_converters(self, read_ext):
510
+ basename = "test_converters"
511
+
512
+ expected = DataFrame.from_dict(
513
+ {
514
+ "IntCol": [1, 2, -3, -1000, 0],
515
+ "FloatCol": [12.5, np.nan, 18.3, 19.2, 0.000000005],
516
+ "BoolCol": ["Found", "Found", "Found", "Not found", "Found"],
517
+ "StrCol": ["1", np.nan, "3", "4", "5"],
518
+ }
519
+ )
520
+
521
+ converters = {
522
+ "IntCol": lambda x: int(x) if x != "" else -1000,
523
+ "FloatCol": lambda x: 10 * x if x else np.nan,
524
+ 2: lambda x: "Found" if x != "" else "Not found",
525
+ 3: lambda x: str(x) if x else "",
526
+ }
527
+
528
+ # should read in correctly and set types of single cells (not array
529
+ # dtypes)
530
+ actual = pd.read_excel(
531
+ basename + read_ext, sheet_name="Sheet1", converters=converters
532
+ )
533
+ tm.assert_frame_equal(actual, expected)
534
+
535
+ def test_reader_dtype(self, read_ext):
536
+ # GH 8212
537
+ basename = "testdtype"
538
+ actual = pd.read_excel(basename + read_ext)
539
+
540
+ expected = DataFrame(
541
+ {
542
+ "a": [1, 2, 3, 4],
543
+ "b": [2.5, 3.5, 4.5, 5.5],
544
+ "c": [1, 2, 3, 4],
545
+ "d": [1.0, 2.0, np.nan, 4.0],
546
+ }
547
+ )
548
+
549
+ tm.assert_frame_equal(actual, expected)
550
+
551
+ actual = pd.read_excel(
552
+ basename + read_ext, dtype={"a": "float64", "b": "float32", "c": str}
553
+ )
554
+
555
+ expected["a"] = expected["a"].astype("float64")
556
+ expected["b"] = expected["b"].astype("float32")
557
+ expected["c"] = Series(["001", "002", "003", "004"], dtype=object)
558
+ tm.assert_frame_equal(actual, expected)
559
+
560
+ msg = "Unable to convert column d to type int64"
561
+ with pytest.raises(ValueError, match=msg):
562
+ pd.read_excel(basename + read_ext, dtype={"d": "int64"})
563
+
564
+ @pytest.mark.parametrize(
565
+ "dtype,expected",
566
+ [
567
+ (
568
+ None,
569
+ DataFrame(
570
+ {
571
+ "a": [1, 2, 3, 4],
572
+ "b": [2.5, 3.5, 4.5, 5.5],
573
+ "c": [1, 2, 3, 4],
574
+ "d": [1.0, 2.0, np.nan, 4.0],
575
+ }
576
+ ),
577
+ ),
578
+ (
579
+ {"a": "float64", "b": "float32", "c": str, "d": str},
580
+ DataFrame(
581
+ {
582
+ "a": Series([1, 2, 3, 4], dtype="float64"),
583
+ "b": Series([2.5, 3.5, 4.5, 5.5], dtype="float32"),
584
+ "c": Series(["001", "002", "003", "004"], dtype=object),
585
+ "d": Series(["1", "2", np.nan, "4"], dtype=object),
586
+ }
587
+ ),
588
+ ),
589
+ ],
590
+ )
591
+ def test_reader_dtype_str(self, read_ext, dtype, expected):
592
+ # see gh-20377
593
+ basename = "testdtype"
594
+
595
+ actual = pd.read_excel(basename + read_ext, dtype=dtype)
596
+ tm.assert_frame_equal(actual, expected)
597
+
598
+ def test_dtype_backend(self, read_ext, dtype_backend, engine):
599
+ # GH#36712
600
+ if read_ext in (".xlsb", ".xls"):
601
+ pytest.skip(f"No engine for filetype: '{read_ext}'")
602
+
603
+ df = DataFrame(
604
+ {
605
+ "a": Series([1, 3], dtype="Int64"),
606
+ "b": Series([2.5, 4.5], dtype="Float64"),
607
+ "c": Series([True, False], dtype="boolean"),
608
+ "d": Series(["a", "b"], dtype="string"),
609
+ "e": Series([pd.NA, 6], dtype="Int64"),
610
+ "f": Series([pd.NA, 7.5], dtype="Float64"),
611
+ "g": Series([pd.NA, True], dtype="boolean"),
612
+ "h": Series([pd.NA, "a"], dtype="string"),
613
+ "i": Series([pd.Timestamp("2019-12-31")] * 2),
614
+ "j": Series([pd.NA, pd.NA], dtype="Int64"),
615
+ }
616
+ )
617
+ with tm.ensure_clean(read_ext) as file_path:
618
+ df.to_excel(file_path, sheet_name="test", index=False)
619
+ result = pd.read_excel(
620
+ file_path, sheet_name="test", dtype_backend=dtype_backend
621
+ )
622
+ if dtype_backend == "pyarrow":
623
+ import pyarrow as pa
624
+
625
+ from pandas.arrays import ArrowExtensionArray
626
+
627
+ expected = DataFrame(
628
+ {
629
+ col: ArrowExtensionArray(pa.array(df[col], from_pandas=True))
630
+ for col in df.columns
631
+ }
632
+ )
633
+ # pyarrow by default infers timestamp resolution as us, not ns
634
+ expected["i"] = ArrowExtensionArray(
635
+ expected["i"].array._pa_array.cast(pa.timestamp(unit="us"))
636
+ )
637
+ # pyarrow supports a null type, so don't have to default to Int64
638
+ expected["j"] = ArrowExtensionArray(pa.array([None, None]))
639
+ else:
640
+ expected = df
641
+ unit = get_exp_unit(read_ext, engine)
642
+ expected["i"] = expected["i"].astype(f"M8[{unit}]")
643
+
644
+ tm.assert_frame_equal(result, expected)
645
+
646
+ def test_dtype_backend_and_dtype(self, read_ext):
647
+ # GH#36712
648
+ if read_ext in (".xlsb", ".xls"):
649
+ pytest.skip(f"No engine for filetype: '{read_ext}'")
650
+
651
+ df = DataFrame({"a": [np.nan, 1.0], "b": [2.5, np.nan]})
652
+ with tm.ensure_clean(read_ext) as file_path:
653
+ df.to_excel(file_path, sheet_name="test", index=False)
654
+ result = pd.read_excel(
655
+ file_path,
656
+ sheet_name="test",
657
+ dtype_backend="numpy_nullable",
658
+ dtype="float64",
659
+ )
660
+ tm.assert_frame_equal(result, df)
661
+
662
+ @pytest.mark.xfail(
663
+ using_pyarrow_string_dtype(), reason="infer_string takes precedence"
664
+ )
665
+ def test_dtype_backend_string(self, read_ext, string_storage):
666
+ # GH#36712
667
+ if read_ext in (".xlsb", ".xls"):
668
+ pytest.skip(f"No engine for filetype: '{read_ext}'")
669
+
670
+ pa = pytest.importorskip("pyarrow")
671
+
672
+ with pd.option_context("mode.string_storage", string_storage):
673
+ df = DataFrame(
674
+ {
675
+ "a": np.array(["a", "b"], dtype=np.object_),
676
+ "b": np.array(["x", pd.NA], dtype=np.object_),
677
+ }
678
+ )
679
+ with tm.ensure_clean(read_ext) as file_path:
680
+ df.to_excel(file_path, sheet_name="test", index=False)
681
+ result = pd.read_excel(
682
+ file_path, sheet_name="test", dtype_backend="numpy_nullable"
683
+ )
684
+
685
+ if string_storage == "python":
686
+ expected = DataFrame(
687
+ {
688
+ "a": StringArray(np.array(["a", "b"], dtype=np.object_)),
689
+ "b": StringArray(np.array(["x", pd.NA], dtype=np.object_)),
690
+ }
691
+ )
692
+ else:
693
+ expected = DataFrame(
694
+ {
695
+ "a": ArrowStringArray(pa.array(["a", "b"])),
696
+ "b": ArrowStringArray(pa.array(["x", None])),
697
+ }
698
+ )
699
+ tm.assert_frame_equal(result, expected)
700
+
701
+ @pytest.mark.parametrize("dtypes, exp_value", [({}, 1), ({"a.1": "int64"}, 1)])
702
+ def test_dtype_mangle_dup_cols(self, read_ext, dtypes, exp_value):
703
+ # GH#35211
704
+ basename = "df_mangle_dup_col_dtypes"
705
+ dtype_dict = {"a": object, **dtypes}
706
+ dtype_dict_copy = dtype_dict.copy()
707
+ # GH#42462
708
+ result = pd.read_excel(basename + read_ext, dtype=dtype_dict)
709
+ expected = DataFrame(
710
+ {
711
+ "a": Series([1], dtype=object),
712
+ "a.1": Series([exp_value], dtype=object if not dtypes else None),
713
+ }
714
+ )
715
+ assert dtype_dict == dtype_dict_copy, "dtype dict changed"
716
+ tm.assert_frame_equal(result, expected)
717
+
718
+ def test_reader_spaces(self, read_ext):
719
+ # see gh-32207
720
+ basename = "test_spaces"
721
+
722
+ actual = pd.read_excel(basename + read_ext)
723
+ expected = DataFrame(
724
+ {
725
+ "testcol": [
726
+ "this is great",
727
+ "4 spaces",
728
+ "1 trailing ",
729
+ " 1 leading",
730
+ "2 spaces multiple times",
731
+ ]
732
+ }
733
+ )
734
+ tm.assert_frame_equal(actual, expected)
735
+
736
+ # gh-36122, gh-35802
737
+ @pytest.mark.parametrize(
738
+ "basename,expected",
739
+ [
740
+ ("gh-35802", DataFrame({"COLUMN": ["Test (1)"]})),
741
+ ("gh-36122", DataFrame(columns=["got 2nd sa"])),
742
+ ],
743
+ )
744
+ def test_read_excel_ods_nested_xml(self, engine, read_ext, basename, expected):
745
+ # see gh-35802
746
+ if engine != "odf":
747
+ pytest.skip(f"Skipped for engine: {engine}")
748
+
749
+ actual = pd.read_excel(basename + read_ext)
750
+ tm.assert_frame_equal(actual, expected)
751
+
752
+ def test_reading_all_sheets(self, read_ext):
753
+ # Test reading all sheet names by setting sheet_name to None,
754
+ # Ensure a dict is returned.
755
+ # See PR #9450
756
+ basename = "test_multisheet"
757
+ dfs = pd.read_excel(basename + read_ext, sheet_name=None)
758
+ # ensure this is not alphabetical to test order preservation
759
+ expected_keys = ["Charlie", "Alpha", "Beta"]
760
+ tm.assert_contains_all(expected_keys, dfs.keys())
761
+ # Issue 9930
762
+ # Ensure sheet order is preserved
763
+ assert expected_keys == list(dfs.keys())
764
+
765
+ def test_reading_multiple_specific_sheets(self, read_ext):
766
+ # Test reading specific sheet names by specifying a mixed list
767
+ # of integers and strings, and confirm that duplicated sheet
768
+ # references (positions/names) are removed properly.
769
+ # Ensure a dict is returned
770
+ # See PR #9450
771
+ basename = "test_multisheet"
772
+ # Explicitly request duplicates. Only the set should be returned.
773
+ expected_keys = [2, "Charlie", "Charlie"]
774
+ dfs = pd.read_excel(basename + read_ext, sheet_name=expected_keys)
775
+ expected_keys = list(set(expected_keys))
776
+ tm.assert_contains_all(expected_keys, dfs.keys())
777
+ assert len(expected_keys) == len(dfs.keys())
778
+
779
+ def test_reading_all_sheets_with_blank(self, read_ext):
780
+ # Test reading all sheet names by setting sheet_name to None,
781
+ # In the case where some sheets are blank.
782
+ # Issue #11711
783
+ basename = "blank_with_header"
784
+ dfs = pd.read_excel(basename + read_ext, sheet_name=None)
785
+ expected_keys = ["Sheet1", "Sheet2", "Sheet3"]
786
+ tm.assert_contains_all(expected_keys, dfs.keys())
787
+
788
+ # GH6403
789
+ def test_read_excel_blank(self, read_ext):
790
+ actual = pd.read_excel("blank" + read_ext, sheet_name="Sheet1")
791
+ tm.assert_frame_equal(actual, DataFrame())
792
+
793
+ def test_read_excel_blank_with_header(self, read_ext):
794
+ expected = DataFrame(columns=["col_1", "col_2"])
795
+ actual = pd.read_excel("blank_with_header" + read_ext, sheet_name="Sheet1")
796
+ tm.assert_frame_equal(actual, expected)
797
+
798
+ def test_exception_message_includes_sheet_name(self, read_ext):
799
+ # GH 48706
800
+ with pytest.raises(ValueError, match=r" \(sheet: Sheet1\)$"):
801
+ pd.read_excel("blank_with_header" + read_ext, header=[1], sheet_name=None)
802
+ with pytest.raises(ZeroDivisionError, match=r" \(sheet: Sheet1\)$"):
803
+ pd.read_excel("test1" + read_ext, usecols=lambda x: 1 / 0, sheet_name=None)
804
+
805
+ @pytest.mark.filterwarnings("ignore:Cell A4 is marked:UserWarning:openpyxl")
806
+ def test_date_conversion_overflow(self, request, engine, read_ext):
807
+ # GH 10001 : pandas.ExcelFile ignore parse_dates=False
808
+ xfail_datetimes_with_pyxlsb(engine, request)
809
+
810
+ expected = DataFrame(
811
+ [
812
+ [pd.Timestamp("2016-03-12"), "Marc Johnson"],
813
+ [pd.Timestamp("2016-03-16"), "Jack Black"],
814
+ [1e20, "Timothy Brown"],
815
+ ],
816
+ columns=["DateColWithBigInt", "StringCol"],
817
+ )
818
+
819
+ if engine == "openpyxl":
820
+ request.applymarker(
821
+ pytest.mark.xfail(reason="Maybe not supported by openpyxl")
822
+ )
823
+
824
+ if engine is None and read_ext in (".xlsx", ".xlsm"):
825
+ # GH 35029
826
+ request.applymarker(
827
+ pytest.mark.xfail(reason="Defaults to openpyxl, maybe not supported")
828
+ )
829
+
830
+ result = pd.read_excel("testdateoverflow" + read_ext)
831
+ tm.assert_frame_equal(result, expected)
832
+
833
+ def test_sheet_name(self, request, read_ext, engine, df_ref):
834
+ xfail_datetimes_with_pyxlsb(engine, request)
835
+
836
+ filename = "test1"
837
+ sheet_name = "Sheet1"
838
+
839
+ expected = df_ref
840
+ adjust_expected(expected, read_ext, engine)
841
+
842
+ df1 = pd.read_excel(
843
+ filename + read_ext, sheet_name=sheet_name, index_col=0
844
+ ) # doc
845
+ df2 = pd.read_excel(filename + read_ext, index_col=0, sheet_name=sheet_name)
846
+
847
+ tm.assert_frame_equal(df1, expected)
848
+ tm.assert_frame_equal(df2, expected)
849
+
850
+ def test_excel_read_buffer(self, read_ext):
851
+ pth = "test1" + read_ext
852
+ expected = pd.read_excel(pth, sheet_name="Sheet1", index_col=0)
853
+ with open(pth, "rb") as f:
854
+ actual = pd.read_excel(f, sheet_name="Sheet1", index_col=0)
855
+ tm.assert_frame_equal(expected, actual)
856
+
857
+ def test_bad_engine_raises(self):
858
+ bad_engine = "foo"
859
+ with pytest.raises(ValueError, match="Unknown engine: foo"):
860
+ pd.read_excel("", engine=bad_engine)
861
+
862
+ @pytest.mark.parametrize(
863
+ "sheet_name",
864
+ [3, [0, 3], [3, 0], "Sheet4", ["Sheet1", "Sheet4"], ["Sheet4", "Sheet1"]],
865
+ )
866
+ def test_bad_sheetname_raises(self, read_ext, sheet_name):
867
+ # GH 39250
868
+ msg = "Worksheet index 3 is invalid|Worksheet named 'Sheet4' not found"
869
+ with pytest.raises(ValueError, match=msg):
870
+ pd.read_excel("blank" + read_ext, sheet_name=sheet_name)
871
+
872
+ def test_missing_file_raises(self, read_ext):
873
+ bad_file = f"foo{read_ext}"
874
+ # CI tests with other languages, translates to "No such file or directory"
875
+ match = "|".join(
876
+ [
877
+ "(No such file or directory",
878
+ "没有那个文件或目录",
879
+ "File o directory non esistente)",
880
+ ]
881
+ )
882
+ with pytest.raises(FileNotFoundError, match=match):
883
+ pd.read_excel(bad_file)
884
+
885
+ def test_corrupt_bytes_raises(self, engine):
886
+ bad_stream = b"foo"
887
+ if engine is None:
888
+ error = ValueError
889
+ msg = (
890
+ "Excel file format cannot be determined, you must "
891
+ "specify an engine manually."
892
+ )
893
+ elif engine == "xlrd":
894
+ from xlrd import XLRDError
895
+
896
+ error = XLRDError
897
+ msg = (
898
+ "Unsupported format, or corrupt file: Expected BOF "
899
+ "record; found b'foo'"
900
+ )
901
+ elif engine == "calamine":
902
+ from python_calamine import CalamineError
903
+
904
+ error = CalamineError
905
+ msg = "Cannot detect file format"
906
+ else:
907
+ error = BadZipFile
908
+ msg = "File is not a zip file"
909
+ with pytest.raises(error, match=msg):
910
+ pd.read_excel(BytesIO(bad_stream))
911
+
912
+ @pytest.mark.network
913
+ @pytest.mark.single_cpu
914
+ def test_read_from_http_url(self, httpserver, read_ext):
915
+ with open("test1" + read_ext, "rb") as f:
916
+ httpserver.serve_content(content=f.read())
917
+ url_table = pd.read_excel(httpserver.url)
918
+ local_table = pd.read_excel("test1" + read_ext)
919
+ tm.assert_frame_equal(url_table, local_table)
920
+
921
+ @td.skip_if_not_us_locale
922
+ @pytest.mark.single_cpu
923
+ def test_read_from_s3_url(self, read_ext, s3_public_bucket, s3so):
924
+ # Bucket created in tests/io/conftest.py
925
+ with open("test1" + read_ext, "rb") as f:
926
+ s3_public_bucket.put_object(Key="test1" + read_ext, Body=f)
927
+
928
+ url = f"s3://{s3_public_bucket.name}/test1" + read_ext
929
+
930
+ url_table = pd.read_excel(url, storage_options=s3so)
931
+ local_table = pd.read_excel("test1" + read_ext)
932
+ tm.assert_frame_equal(url_table, local_table)
933
+
934
+ @pytest.mark.single_cpu
935
+ def test_read_from_s3_object(self, read_ext, s3_public_bucket, s3so):
936
+ # GH 38788
937
+ # Bucket created in tests/io/conftest.py
938
+ with open("test1" + read_ext, "rb") as f:
939
+ s3_public_bucket.put_object(Key="test1" + read_ext, Body=f)
940
+
941
+ import s3fs
942
+
943
+ s3 = s3fs.S3FileSystem(**s3so)
944
+
945
+ with s3.open(f"s3://{s3_public_bucket.name}/test1" + read_ext) as f:
946
+ url_table = pd.read_excel(f)
947
+
948
+ local_table = pd.read_excel("test1" + read_ext)
949
+ tm.assert_frame_equal(url_table, local_table)
950
+
951
+ @pytest.mark.slow
952
+ def test_read_from_file_url(self, read_ext, datapath):
953
+ # FILE
954
+ localtable = os.path.join(datapath("io", "data", "excel"), "test1" + read_ext)
955
+ local_table = pd.read_excel(localtable)
956
+
957
+ try:
958
+ url_table = pd.read_excel("file://localhost/" + localtable)
959
+ except URLError:
960
+ # fails on some systems
961
+ platform_info = " ".join(platform.uname()).strip()
962
+ pytest.skip(f"failing on {platform_info}")
963
+
964
+ tm.assert_frame_equal(url_table, local_table)
965
+
966
+ def test_read_from_pathlib_path(self, read_ext):
967
+ # GH12655
968
+ str_path = "test1" + read_ext
969
+ expected = pd.read_excel(str_path, sheet_name="Sheet1", index_col=0)
970
+
971
+ path_obj = Path("test1" + read_ext)
972
+ actual = pd.read_excel(path_obj, sheet_name="Sheet1", index_col=0)
973
+
974
+ tm.assert_frame_equal(expected, actual)
975
+
976
+ @td.skip_if_no("py.path")
977
+ def test_read_from_py_localpath(self, read_ext):
978
+ # GH12655
979
+ from py.path import local as LocalPath
980
+
981
+ str_path = os.path.join("test1" + read_ext)
982
+ expected = pd.read_excel(str_path, sheet_name="Sheet1", index_col=0)
983
+
984
+ path_obj = LocalPath().join("test1" + read_ext)
985
+ actual = pd.read_excel(path_obj, sheet_name="Sheet1", index_col=0)
986
+
987
+ tm.assert_frame_equal(expected, actual)
988
+
989
+ def test_close_from_py_localpath(self, read_ext):
990
+ # GH31467
991
+ str_path = os.path.join("test1" + read_ext)
992
+ with open(str_path, "rb") as f:
993
+ x = pd.read_excel(f, sheet_name="Sheet1", index_col=0)
994
+ del x
995
+ # should not throw an exception because the passed file was closed
996
+ f.read()
997
+
998
+ def test_reader_seconds(self, request, engine, read_ext):
999
+ xfail_datetimes_with_pyxlsb(engine, request)
1000
+
1001
+ # GH 55045
1002
+ if engine == "calamine" and read_ext == ".ods":
1003
+ request.applymarker(
1004
+ pytest.mark.xfail(
1005
+ reason="ODS file contains bad datetime (seconds as text)"
1006
+ )
1007
+ )
1008
+
1009
+ # Test reading times with and without milliseconds. GH5945.
1010
+ expected = DataFrame.from_dict(
1011
+ {
1012
+ "Time": [
1013
+ time(1, 2, 3),
1014
+ time(2, 45, 56, 100000),
1015
+ time(4, 29, 49, 200000),
1016
+ time(6, 13, 42, 300000),
1017
+ time(7, 57, 35, 400000),
1018
+ time(9, 41, 28, 500000),
1019
+ time(11, 25, 21, 600000),
1020
+ time(13, 9, 14, 700000),
1021
+ time(14, 53, 7, 800000),
1022
+ time(16, 37, 0, 900000),
1023
+ time(18, 20, 54),
1024
+ ]
1025
+ }
1026
+ )
1027
+
1028
+ actual = pd.read_excel("times_1900" + read_ext, sheet_name="Sheet1")
1029
+ tm.assert_frame_equal(actual, expected)
1030
+
1031
+ actual = pd.read_excel("times_1904" + read_ext, sheet_name="Sheet1")
1032
+ tm.assert_frame_equal(actual, expected)
1033
+
1034
+ def test_read_excel_multiindex(self, request, engine, read_ext):
1035
+ # see gh-4679
1036
+ xfail_datetimes_with_pyxlsb(engine, request)
1037
+
1038
+ unit = get_exp_unit(read_ext, engine)
1039
+
1040
+ mi = MultiIndex.from_product([["foo", "bar"], ["a", "b"]])
1041
+ mi_file = "testmultiindex" + read_ext
1042
+
1043
+ # "mi_column" sheet
1044
+ expected = DataFrame(
1045
+ [
1046
+ [1, 2.5, pd.Timestamp("2015-01-01"), True],
1047
+ [2, 3.5, pd.Timestamp("2015-01-02"), False],
1048
+ [3, 4.5, pd.Timestamp("2015-01-03"), False],
1049
+ [4, 5.5, pd.Timestamp("2015-01-04"), True],
1050
+ ],
1051
+ columns=mi,
1052
+ )
1053
+ expected[mi[2]] = expected[mi[2]].astype(f"M8[{unit}]")
1054
+
1055
+ actual = pd.read_excel(
1056
+ mi_file, sheet_name="mi_column", header=[0, 1], index_col=0
1057
+ )
1058
+ tm.assert_frame_equal(actual, expected)
1059
+
1060
+ # "mi_index" sheet
1061
+ expected.index = mi
1062
+ expected.columns = ["a", "b", "c", "d"]
1063
+
1064
+ actual = pd.read_excel(mi_file, sheet_name="mi_index", index_col=[0, 1])
1065
+ tm.assert_frame_equal(actual, expected)
1066
+
1067
+ # "both" sheet
1068
+ expected.columns = mi
1069
+
1070
+ actual = pd.read_excel(
1071
+ mi_file, sheet_name="both", index_col=[0, 1], header=[0, 1]
1072
+ )
1073
+ tm.assert_frame_equal(actual, expected)
1074
+
1075
+ # "mi_index_name" sheet
1076
+ expected.columns = ["a", "b", "c", "d"]
1077
+ expected.index = mi.set_names(["ilvl1", "ilvl2"])
1078
+
1079
+ actual = pd.read_excel(mi_file, sheet_name="mi_index_name", index_col=[0, 1])
1080
+ tm.assert_frame_equal(actual, expected)
1081
+
1082
+ # "mi_column_name" sheet
1083
+ expected.index = list(range(4))
1084
+ expected.columns = mi.set_names(["c1", "c2"])
1085
+ actual = pd.read_excel(
1086
+ mi_file, sheet_name="mi_column_name", header=[0, 1], index_col=0
1087
+ )
1088
+ tm.assert_frame_equal(actual, expected)
1089
+
1090
+ # see gh-11317
1091
+ # "name_with_int" sheet
1092
+ expected.columns = mi.set_levels([1, 2], level=1).set_names(["c1", "c2"])
1093
+
1094
+ actual = pd.read_excel(
1095
+ mi_file, sheet_name="name_with_int", index_col=0, header=[0, 1]
1096
+ )
1097
+ tm.assert_frame_equal(actual, expected)
1098
+
1099
+ # "both_name" sheet
1100
+ expected.columns = mi.set_names(["c1", "c2"])
1101
+ expected.index = mi.set_names(["ilvl1", "ilvl2"])
1102
+
1103
+ actual = pd.read_excel(
1104
+ mi_file, sheet_name="both_name", index_col=[0, 1], header=[0, 1]
1105
+ )
1106
+ tm.assert_frame_equal(actual, expected)
1107
+
1108
+ # "both_skiprows" sheet
1109
+ actual = pd.read_excel(
1110
+ mi_file,
1111
+ sheet_name="both_name_skiprows",
1112
+ index_col=[0, 1],
1113
+ header=[0, 1],
1114
+ skiprows=2,
1115
+ )
1116
+ tm.assert_frame_equal(actual, expected)
1117
+
1118
+ @pytest.mark.parametrize(
1119
+ "sheet_name,idx_lvl2",
1120
+ [
1121
+ ("both_name_blank_after_mi_name", [np.nan, "b", "a", "b"]),
1122
+ ("both_name_multiple_blanks", [np.nan] * 4),
1123
+ ],
1124
+ )
1125
+ def test_read_excel_multiindex_blank_after_name(
1126
+ self, request, engine, read_ext, sheet_name, idx_lvl2
1127
+ ):
1128
+ # GH34673
1129
+ xfail_datetimes_with_pyxlsb(engine, request)
1130
+
1131
+ mi_file = "testmultiindex" + read_ext
1132
+ mi = MultiIndex.from_product([["foo", "bar"], ["a", "b"]], names=["c1", "c2"])
1133
+
1134
+ unit = get_exp_unit(read_ext, engine)
1135
+
1136
+ expected = DataFrame(
1137
+ [
1138
+ [1, 2.5, pd.Timestamp("2015-01-01"), True],
1139
+ [2, 3.5, pd.Timestamp("2015-01-02"), False],
1140
+ [3, 4.5, pd.Timestamp("2015-01-03"), False],
1141
+ [4, 5.5, pd.Timestamp("2015-01-04"), True],
1142
+ ],
1143
+ columns=mi,
1144
+ index=MultiIndex.from_arrays(
1145
+ (["foo", "foo", "bar", "bar"], idx_lvl2),
1146
+ names=["ilvl1", "ilvl2"],
1147
+ ),
1148
+ )
1149
+ expected[mi[2]] = expected[mi[2]].astype(f"M8[{unit}]")
1150
+ result = pd.read_excel(
1151
+ mi_file,
1152
+ sheet_name=sheet_name,
1153
+ index_col=[0, 1],
1154
+ header=[0, 1],
1155
+ )
1156
+ tm.assert_frame_equal(result, expected)
1157
+
1158
+ def test_read_excel_multiindex_header_only(self, read_ext):
1159
+ # see gh-11733.
1160
+ #
1161
+ # Don't try to parse a header name if there isn't one.
1162
+ mi_file = "testmultiindex" + read_ext
1163
+ result = pd.read_excel(mi_file, sheet_name="index_col_none", header=[0, 1])
1164
+
1165
+ exp_columns = MultiIndex.from_product([("A", "B"), ("key", "val")])
1166
+ expected = DataFrame([[1, 2, 3, 4]] * 2, columns=exp_columns)
1167
+ tm.assert_frame_equal(result, expected)
1168
+
1169
+ def test_excel_old_index_format(self, read_ext):
1170
+ # see gh-4679
1171
+ filename = "test_index_name_pre17" + read_ext
1172
+
1173
+ # We detect headers to determine if index names exist, so
1174
+ # that "index" name in the "names" version of the data will
1175
+ # now be interpreted as rows that include null data.
1176
+ data = np.array(
1177
+ [
1178
+ [np.nan, np.nan, np.nan, np.nan, np.nan],
1179
+ ["R0C0", "R0C1", "R0C2", "R0C3", "R0C4"],
1180
+ ["R1C0", "R1C1", "R1C2", "R1C3", "R1C4"],
1181
+ ["R2C0", "R2C1", "R2C2", "R2C3", "R2C4"],
1182
+ ["R3C0", "R3C1", "R3C2", "R3C3", "R3C4"],
1183
+ ["R4C0", "R4C1", "R4C2", "R4C3", "R4C4"],
1184
+ ],
1185
+ dtype=object,
1186
+ )
1187
+ columns = ["C_l0_g0", "C_l0_g1", "C_l0_g2", "C_l0_g3", "C_l0_g4"]
1188
+ mi = MultiIndex(
1189
+ levels=[
1190
+ ["R0", "R_l0_g0", "R_l0_g1", "R_l0_g2", "R_l0_g3", "R_l0_g4"],
1191
+ ["R1", "R_l1_g0", "R_l1_g1", "R_l1_g2", "R_l1_g3", "R_l1_g4"],
1192
+ ],
1193
+ codes=[[0, 1, 2, 3, 4, 5], [0, 1, 2, 3, 4, 5]],
1194
+ names=[None, None],
1195
+ )
1196
+ si = Index(
1197
+ ["R0", "R_l0_g0", "R_l0_g1", "R_l0_g2", "R_l0_g3", "R_l0_g4"], name=None
1198
+ )
1199
+
1200
+ expected = DataFrame(data, index=si, columns=columns)
1201
+
1202
+ actual = pd.read_excel(filename, sheet_name="single_names", index_col=0)
1203
+ tm.assert_frame_equal(actual, expected)
1204
+
1205
+ expected.index = mi
1206
+
1207
+ actual = pd.read_excel(filename, sheet_name="multi_names", index_col=[0, 1])
1208
+ tm.assert_frame_equal(actual, expected)
1209
+
1210
+ # The analogous versions of the "names" version data
1211
+ # where there are explicitly no names for the indices.
1212
+ data = np.array(
1213
+ [
1214
+ ["R0C0", "R0C1", "R0C2", "R0C3", "R0C4"],
1215
+ ["R1C0", "R1C1", "R1C2", "R1C3", "R1C4"],
1216
+ ["R2C0", "R2C1", "R2C2", "R2C3", "R2C4"],
1217
+ ["R3C0", "R3C1", "R3C2", "R3C3", "R3C4"],
1218
+ ["R4C0", "R4C1", "R4C2", "R4C3", "R4C4"],
1219
+ ]
1220
+ )
1221
+ columns = ["C_l0_g0", "C_l0_g1", "C_l0_g2", "C_l0_g3", "C_l0_g4"]
1222
+ mi = MultiIndex(
1223
+ levels=[
1224
+ ["R_l0_g0", "R_l0_g1", "R_l0_g2", "R_l0_g3", "R_l0_g4"],
1225
+ ["R_l1_g0", "R_l1_g1", "R_l1_g2", "R_l1_g3", "R_l1_g4"],
1226
+ ],
1227
+ codes=[[0, 1, 2, 3, 4], [0, 1, 2, 3, 4]],
1228
+ names=[None, None],
1229
+ )
1230
+ si = Index(["R_l0_g0", "R_l0_g1", "R_l0_g2", "R_l0_g3", "R_l0_g4"], name=None)
1231
+
1232
+ expected = DataFrame(data, index=si, columns=columns)
1233
+
1234
+ actual = pd.read_excel(filename, sheet_name="single_no_names", index_col=0)
1235
+ tm.assert_frame_equal(actual, expected)
1236
+
1237
+ expected.index = mi
1238
+
1239
+ actual = pd.read_excel(filename, sheet_name="multi_no_names", index_col=[0, 1])
1240
+ tm.assert_frame_equal(actual, expected)
1241
+
1242
+ def test_read_excel_bool_header_arg(self, read_ext):
1243
+ # GH 6114
1244
+ msg = "Passing a bool to header is invalid"
1245
+ for arg in [True, False]:
1246
+ with pytest.raises(TypeError, match=msg):
1247
+ pd.read_excel("test1" + read_ext, header=arg)
1248
+
1249
+ def test_read_excel_skiprows(self, request, engine, read_ext):
1250
+ # GH 4903
1251
+ xfail_datetimes_with_pyxlsb(engine, request)
1252
+
1253
+ unit = get_exp_unit(read_ext, engine)
1254
+
1255
+ actual = pd.read_excel(
1256
+ "testskiprows" + read_ext, sheet_name="skiprows_list", skiprows=[0, 2]
1257
+ )
1258
+ expected = DataFrame(
1259
+ [
1260
+ [1, 2.5, pd.Timestamp("2015-01-01"), True],
1261
+ [2, 3.5, pd.Timestamp("2015-01-02"), False],
1262
+ [3, 4.5, pd.Timestamp("2015-01-03"), False],
1263
+ [4, 5.5, pd.Timestamp("2015-01-04"), True],
1264
+ ],
1265
+ columns=["a", "b", "c", "d"],
1266
+ )
1267
+ expected["c"] = expected["c"].astype(f"M8[{unit}]")
1268
+ tm.assert_frame_equal(actual, expected)
1269
+
1270
+ actual = pd.read_excel(
1271
+ "testskiprows" + read_ext,
1272
+ sheet_name="skiprows_list",
1273
+ skiprows=np.array([0, 2]),
1274
+ )
1275
+ tm.assert_frame_equal(actual, expected)
1276
+
1277
+ # GH36435
1278
+ actual = pd.read_excel(
1279
+ "testskiprows" + read_ext,
1280
+ sheet_name="skiprows_list",
1281
+ skiprows=lambda x: x in [0, 2],
1282
+ )
1283
+ tm.assert_frame_equal(actual, expected)
1284
+
1285
+ actual = pd.read_excel(
1286
+ "testskiprows" + read_ext,
1287
+ sheet_name="skiprows_list",
1288
+ skiprows=3,
1289
+ names=["a", "b", "c", "d"],
1290
+ )
1291
+ expected = DataFrame(
1292
+ [
1293
+ # [1, 2.5, pd.Timestamp("2015-01-01"), True],
1294
+ [2, 3.5, pd.Timestamp("2015-01-02"), False],
1295
+ [3, 4.5, pd.Timestamp("2015-01-03"), False],
1296
+ [4, 5.5, pd.Timestamp("2015-01-04"), True],
1297
+ ],
1298
+ columns=["a", "b", "c", "d"],
1299
+ )
1300
+ expected["c"] = expected["c"].astype(f"M8[{unit}]")
1301
+ tm.assert_frame_equal(actual, expected)
1302
+
1303
+ def test_read_excel_skiprows_callable_not_in(self, request, engine, read_ext):
1304
+ # GH 4903
1305
+ xfail_datetimes_with_pyxlsb(engine, request)
1306
+ unit = get_exp_unit(read_ext, engine)
1307
+
1308
+ actual = pd.read_excel(
1309
+ "testskiprows" + read_ext,
1310
+ sheet_name="skiprows_list",
1311
+ skiprows=lambda x: x not in [1, 3, 5],
1312
+ )
1313
+ expected = DataFrame(
1314
+ [
1315
+ [1, 2.5, pd.Timestamp("2015-01-01"), True],
1316
+ # [2, 3.5, pd.Timestamp("2015-01-02"), False],
1317
+ [3, 4.5, pd.Timestamp("2015-01-03"), False],
1318
+ # [4, 5.5, pd.Timestamp("2015-01-04"), True],
1319
+ ],
1320
+ columns=["a", "b", "c", "d"],
1321
+ )
1322
+ expected["c"] = expected["c"].astype(f"M8[{unit}]")
1323
+ tm.assert_frame_equal(actual, expected)
1324
+
1325
+ def test_read_excel_nrows(self, read_ext):
1326
+ # GH 16645
1327
+ num_rows_to_pull = 5
1328
+ actual = pd.read_excel("test1" + read_ext, nrows=num_rows_to_pull)
1329
+ expected = pd.read_excel("test1" + read_ext)
1330
+ expected = expected[:num_rows_to_pull]
1331
+ tm.assert_frame_equal(actual, expected)
1332
+
1333
+ def test_read_excel_nrows_greater_than_nrows_in_file(self, read_ext):
1334
+ # GH 16645
1335
+ expected = pd.read_excel("test1" + read_ext)
1336
+ num_records_in_file = len(expected)
1337
+ num_rows_to_pull = num_records_in_file + 10
1338
+ actual = pd.read_excel("test1" + read_ext, nrows=num_rows_to_pull)
1339
+ tm.assert_frame_equal(actual, expected)
1340
+
1341
+ def test_read_excel_nrows_non_integer_parameter(self, read_ext):
1342
+ # GH 16645
1343
+ msg = "'nrows' must be an integer >=0"
1344
+ with pytest.raises(ValueError, match=msg):
1345
+ pd.read_excel("test1" + read_ext, nrows="5")
1346
+
1347
+ @pytest.mark.parametrize(
1348
+ "filename,sheet_name,header,index_col,skiprows",
1349
+ [
1350
+ ("testmultiindex", "mi_column", [0, 1], 0, None),
1351
+ ("testmultiindex", "mi_index", None, [0, 1], None),
1352
+ ("testmultiindex", "both", [0, 1], [0, 1], None),
1353
+ ("testmultiindex", "mi_column_name", [0, 1], 0, None),
1354
+ ("testskiprows", "skiprows_list", None, None, [0, 2]),
1355
+ ("testskiprows", "skiprows_list", None, None, lambda x: x in (0, 2)),
1356
+ ],
1357
+ )
1358
+ def test_read_excel_nrows_params(
1359
+ self, read_ext, filename, sheet_name, header, index_col, skiprows
1360
+ ):
1361
+ """
1362
+ For various parameters, we should get the same result whether we
1363
+ limit the rows during load (nrows=3) or after (df.iloc[:3]).
1364
+ """
1365
+ # GH 46894
1366
+ expected = pd.read_excel(
1367
+ filename + read_ext,
1368
+ sheet_name=sheet_name,
1369
+ header=header,
1370
+ index_col=index_col,
1371
+ skiprows=skiprows,
1372
+ ).iloc[:3]
1373
+ actual = pd.read_excel(
1374
+ filename + read_ext,
1375
+ sheet_name=sheet_name,
1376
+ header=header,
1377
+ index_col=index_col,
1378
+ skiprows=skiprows,
1379
+ nrows=3,
1380
+ )
1381
+ tm.assert_frame_equal(actual, expected)
1382
+
1383
+ def test_deprecated_kwargs(self, read_ext):
1384
+ with pytest.raises(TypeError, match="but 3 positional arguments"):
1385
+ pd.read_excel("test1" + read_ext, "Sheet1", 0)
1386
+
1387
+ def test_no_header_with_list_index_col(self, read_ext):
1388
+ # GH 31783
1389
+ file_name = "testmultiindex" + read_ext
1390
+ data = [("B", "B"), ("key", "val"), (3, 4), (3, 4)]
1391
+ idx = MultiIndex.from_tuples(
1392
+ [("A", "A"), ("key", "val"), (1, 2), (1, 2)], names=(0, 1)
1393
+ )
1394
+ expected = DataFrame(data, index=idx, columns=(2, 3))
1395
+ result = pd.read_excel(
1396
+ file_name, sheet_name="index_col_none", index_col=[0, 1], header=None
1397
+ )
1398
+ tm.assert_frame_equal(expected, result)
1399
+
1400
+ def test_one_col_noskip_blank_line(self, read_ext):
1401
+ # GH 39808
1402
+ file_name = "one_col_blank_line" + read_ext
1403
+ data = [0.5, np.nan, 1, 2]
1404
+ expected = DataFrame(data, columns=["numbers"])
1405
+ result = pd.read_excel(file_name)
1406
+ tm.assert_frame_equal(result, expected)
1407
+
1408
+ def test_multiheader_two_blank_lines(self, read_ext):
1409
+ # GH 40442
1410
+ file_name = "testmultiindex" + read_ext
1411
+ columns = MultiIndex.from_tuples([("a", "A"), ("b", "B")])
1412
+ data = [[np.nan, np.nan], [np.nan, np.nan], [1, 3], [2, 4]]
1413
+ expected = DataFrame(data, columns=columns)
1414
+ result = pd.read_excel(
1415
+ file_name, sheet_name="mi_column_empty_rows", header=[0, 1]
1416
+ )
1417
+ tm.assert_frame_equal(result, expected)
1418
+
1419
+ def test_trailing_blanks(self, read_ext):
1420
+ """
1421
+ Sheets can contain blank cells with no data. Some of our readers
1422
+ were including those cells, creating many empty rows and columns
1423
+ """
1424
+ file_name = "trailing_blanks" + read_ext
1425
+ result = pd.read_excel(file_name)
1426
+ assert result.shape == (3, 3)
1427
+
1428
+ def test_ignore_chartsheets_by_str(self, request, engine, read_ext):
1429
+ # GH 41448
1430
+ if read_ext == ".ods":
1431
+ pytest.skip("chartsheets do not exist in the ODF format")
1432
+ if engine == "pyxlsb":
1433
+ request.applymarker(
1434
+ pytest.mark.xfail(
1435
+ reason="pyxlsb can't distinguish chartsheets from worksheets"
1436
+ )
1437
+ )
1438
+ with pytest.raises(ValueError, match="Worksheet named 'Chart1' not found"):
1439
+ pd.read_excel("chartsheet" + read_ext, sheet_name="Chart1")
1440
+
1441
+ def test_ignore_chartsheets_by_int(self, request, engine, read_ext):
1442
+ # GH 41448
1443
+ if read_ext == ".ods":
1444
+ pytest.skip("chartsheets do not exist in the ODF format")
1445
+ if engine == "pyxlsb":
1446
+ request.applymarker(
1447
+ pytest.mark.xfail(
1448
+ reason="pyxlsb can't distinguish chartsheets from worksheets"
1449
+ )
1450
+ )
1451
+ with pytest.raises(
1452
+ ValueError, match="Worksheet index 1 is invalid, 1 worksheets found"
1453
+ ):
1454
+ pd.read_excel("chartsheet" + read_ext, sheet_name=1)
1455
+
1456
+ def test_euro_decimal_format(self, read_ext):
1457
+ # copied from read_csv
1458
+ result = pd.read_excel("test_decimal" + read_ext, decimal=",", skiprows=1)
1459
+ expected = DataFrame(
1460
+ [
1461
+ [1, 1521.1541, 187101.9543, "ABC", "poi", 4.738797819],
1462
+ [2, 121.12, 14897.76, "DEF", "uyt", 0.377320872],
1463
+ [3, 878.158, 108013.434, "GHI", "rez", 2.735694704],
1464
+ ],
1465
+ columns=["Id", "Number1", "Number2", "Text1", "Text2", "Number3"],
1466
+ )
1467
+ tm.assert_frame_equal(result, expected)
1468
+
1469
+
1470
+ class TestExcelFileRead:
1471
+ def test_deprecate_bytes_input(self, engine, read_ext):
1472
+ # GH 53830
1473
+ msg = (
1474
+ "Passing bytes to 'read_excel' is deprecated and "
1475
+ "will be removed in a future version. To read from a "
1476
+ "byte string, wrap it in a `BytesIO` object."
1477
+ )
1478
+
1479
+ with tm.assert_produces_warning(
1480
+ FutureWarning, match=msg, raise_on_extra_warnings=False
1481
+ ):
1482
+ with open("test1" + read_ext, "rb") as f:
1483
+ pd.read_excel(f.read(), engine=engine)
1484
+
1485
+ @pytest.fixture(autouse=True)
1486
+ def cd_and_set_engine(self, engine, datapath, monkeypatch):
1487
+ """
1488
+ Change directory and set engine for ExcelFile objects.
1489
+ """
1490
+ func = partial(pd.ExcelFile, engine=engine)
1491
+ monkeypatch.chdir(datapath("io", "data", "excel"))
1492
+ monkeypatch.setattr(pd, "ExcelFile", func)
1493
+
1494
+ def test_engine_used(self, read_ext, engine):
1495
+ expected_defaults = {
1496
+ "xlsx": "openpyxl",
1497
+ "xlsm": "openpyxl",
1498
+ "xlsb": "pyxlsb",
1499
+ "xls": "xlrd",
1500
+ "ods": "odf",
1501
+ }
1502
+
1503
+ with pd.ExcelFile("test1" + read_ext) as excel:
1504
+ result = excel.engine
1505
+
1506
+ if engine is not None:
1507
+ expected = engine
1508
+ else:
1509
+ expected = expected_defaults[read_ext[1:]]
1510
+ assert result == expected
1511
+
1512
+ def test_excel_passes_na(self, read_ext):
1513
+ with pd.ExcelFile("test4" + read_ext) as excel:
1514
+ parsed = pd.read_excel(
1515
+ excel, sheet_name="Sheet1", keep_default_na=False, na_values=["apple"]
1516
+ )
1517
+ expected = DataFrame(
1518
+ [["NA"], [1], ["NA"], [np.nan], ["rabbit"]], columns=["Test"]
1519
+ )
1520
+ tm.assert_frame_equal(parsed, expected)
1521
+
1522
+ with pd.ExcelFile("test4" + read_ext) as excel:
1523
+ parsed = pd.read_excel(
1524
+ excel, sheet_name="Sheet1", keep_default_na=True, na_values=["apple"]
1525
+ )
1526
+ expected = DataFrame(
1527
+ [[np.nan], [1], [np.nan], [np.nan], ["rabbit"]], columns=["Test"]
1528
+ )
1529
+ tm.assert_frame_equal(parsed, expected)
1530
+
1531
+ # 13967
1532
+ with pd.ExcelFile("test5" + read_ext) as excel:
1533
+ parsed = pd.read_excel(
1534
+ excel, sheet_name="Sheet1", keep_default_na=False, na_values=["apple"]
1535
+ )
1536
+ expected = DataFrame(
1537
+ [["1.#QNAN"], [1], ["nan"], [np.nan], ["rabbit"]], columns=["Test"]
1538
+ )
1539
+ tm.assert_frame_equal(parsed, expected)
1540
+
1541
+ with pd.ExcelFile("test5" + read_ext) as excel:
1542
+ parsed = pd.read_excel(
1543
+ excel, sheet_name="Sheet1", keep_default_na=True, na_values=["apple"]
1544
+ )
1545
+ expected = DataFrame(
1546
+ [[np.nan], [1], [np.nan], [np.nan], ["rabbit"]], columns=["Test"]
1547
+ )
1548
+ tm.assert_frame_equal(parsed, expected)
1549
+
1550
+ @pytest.mark.parametrize("na_filter", [None, True, False])
1551
+ def test_excel_passes_na_filter(self, read_ext, na_filter):
1552
+ # gh-25453
1553
+ kwargs = {}
1554
+
1555
+ if na_filter is not None:
1556
+ kwargs["na_filter"] = na_filter
1557
+
1558
+ with pd.ExcelFile("test5" + read_ext) as excel:
1559
+ parsed = pd.read_excel(
1560
+ excel,
1561
+ sheet_name="Sheet1",
1562
+ keep_default_na=True,
1563
+ na_values=["apple"],
1564
+ **kwargs,
1565
+ )
1566
+
1567
+ if na_filter is False:
1568
+ expected = [["1.#QNAN"], [1], ["nan"], ["apple"], ["rabbit"]]
1569
+ else:
1570
+ expected = [[np.nan], [1], [np.nan], [np.nan], ["rabbit"]]
1571
+
1572
+ expected = DataFrame(expected, columns=["Test"])
1573
+ tm.assert_frame_equal(parsed, expected)
1574
+
1575
+ def test_excel_table_sheet_by_index(self, request, engine, read_ext, df_ref):
1576
+ xfail_datetimes_with_pyxlsb(engine, request)
1577
+
1578
+ expected = df_ref
1579
+ adjust_expected(expected, read_ext, engine)
1580
+
1581
+ with pd.ExcelFile("test1" + read_ext) as excel:
1582
+ df1 = pd.read_excel(excel, sheet_name=0, index_col=0)
1583
+ df2 = pd.read_excel(excel, sheet_name=1, skiprows=[1], index_col=0)
1584
+ tm.assert_frame_equal(df1, expected)
1585
+ tm.assert_frame_equal(df2, expected)
1586
+
1587
+ with pd.ExcelFile("test1" + read_ext) as excel:
1588
+ df1 = excel.parse(0, index_col=0)
1589
+ df2 = excel.parse(1, skiprows=[1], index_col=0)
1590
+ tm.assert_frame_equal(df1, expected)
1591
+ tm.assert_frame_equal(df2, expected)
1592
+
1593
+ with pd.ExcelFile("test1" + read_ext) as excel:
1594
+ df3 = pd.read_excel(excel, sheet_name=0, index_col=0, skipfooter=1)
1595
+ tm.assert_frame_equal(df3, df1.iloc[:-1])
1596
+
1597
+ with pd.ExcelFile("test1" + read_ext) as excel:
1598
+ df3 = excel.parse(0, index_col=0, skipfooter=1)
1599
+
1600
+ tm.assert_frame_equal(df3, df1.iloc[:-1])
1601
+
1602
+ def test_sheet_name(self, request, engine, read_ext, df_ref):
1603
+ xfail_datetimes_with_pyxlsb(engine, request)
1604
+
1605
+ expected = df_ref
1606
+ adjust_expected(expected, read_ext, engine)
1607
+
1608
+ filename = "test1"
1609
+ sheet_name = "Sheet1"
1610
+
1611
+ with pd.ExcelFile(filename + read_ext) as excel:
1612
+ df1_parse = excel.parse(sheet_name=sheet_name, index_col=0) # doc
1613
+
1614
+ with pd.ExcelFile(filename + read_ext) as excel:
1615
+ df2_parse = excel.parse(index_col=0, sheet_name=sheet_name)
1616
+
1617
+ tm.assert_frame_equal(df1_parse, expected)
1618
+ tm.assert_frame_equal(df2_parse, expected)
1619
+
1620
+ @pytest.mark.parametrize(
1621
+ "sheet_name",
1622
+ [3, [0, 3], [3, 0], "Sheet4", ["Sheet1", "Sheet4"], ["Sheet4", "Sheet1"]],
1623
+ )
1624
+ def test_bad_sheetname_raises(self, read_ext, sheet_name):
1625
+ # GH 39250
1626
+ msg = "Worksheet index 3 is invalid|Worksheet named 'Sheet4' not found"
1627
+ with pytest.raises(ValueError, match=msg):
1628
+ with pd.ExcelFile("blank" + read_ext) as excel:
1629
+ excel.parse(sheet_name=sheet_name)
1630
+
1631
+ def test_excel_read_buffer(self, engine, read_ext):
1632
+ pth = "test1" + read_ext
1633
+ expected = pd.read_excel(pth, sheet_name="Sheet1", index_col=0, engine=engine)
1634
+
1635
+ with open(pth, "rb") as f:
1636
+ with pd.ExcelFile(f) as xls:
1637
+ actual = pd.read_excel(xls, sheet_name="Sheet1", index_col=0)
1638
+
1639
+ tm.assert_frame_equal(expected, actual)
1640
+
1641
+ def test_reader_closes_file(self, engine, read_ext):
1642
+ with open("test1" + read_ext, "rb") as f:
1643
+ with pd.ExcelFile(f) as xlsx:
1644
+ # parses okay
1645
+ pd.read_excel(xlsx, sheet_name="Sheet1", index_col=0, engine=engine)
1646
+
1647
+ assert f.closed
1648
+
1649
+ def test_conflicting_excel_engines(self, read_ext):
1650
+ # GH 26566
1651
+ msg = "Engine should not be specified when passing an ExcelFile"
1652
+
1653
+ with pd.ExcelFile("test1" + read_ext) as xl:
1654
+ with pytest.raises(ValueError, match=msg):
1655
+ pd.read_excel(xl, engine="foo")
1656
+
1657
+ def test_excel_read_binary(self, engine, read_ext):
1658
+ # GH 15914
1659
+ expected = pd.read_excel("test1" + read_ext, engine=engine)
1660
+
1661
+ with open("test1" + read_ext, "rb") as f:
1662
+ data = f.read()
1663
+
1664
+ actual = pd.read_excel(BytesIO(data), engine=engine)
1665
+ tm.assert_frame_equal(expected, actual)
1666
+
1667
+ def test_excel_read_binary_via_read_excel(self, read_ext, engine):
1668
+ # GH 38424
1669
+ with open("test1" + read_ext, "rb") as f:
1670
+ result = pd.read_excel(f, engine=engine)
1671
+ expected = pd.read_excel("test1" + read_ext, engine=engine)
1672
+ tm.assert_frame_equal(result, expected)
1673
+
1674
+ def test_read_excel_header_index_out_of_range(self, engine):
1675
+ # GH#43143
1676
+ with open("df_header_oob.xlsx", "rb") as f:
1677
+ with pytest.raises(ValueError, match="exceeds maximum"):
1678
+ pd.read_excel(f, header=[0, 1])
1679
+
1680
+ @pytest.mark.parametrize("filename", ["df_empty.xlsx", "df_equals.xlsx"])
1681
+ def test_header_with_index_col(self, filename):
1682
+ # GH 33476
1683
+ idx = Index(["Z"], name="I2")
1684
+ cols = MultiIndex.from_tuples([("A", "B"), ("A", "B.1")], names=["I11", "I12"])
1685
+ expected = DataFrame([[1, 3]], index=idx, columns=cols, dtype="int64")
1686
+ result = pd.read_excel(
1687
+ filename, sheet_name="Sheet1", index_col=0, header=[0, 1]
1688
+ )
1689
+ tm.assert_frame_equal(expected, result)
1690
+
1691
+ def test_read_datetime_multiindex(self, request, engine, read_ext):
1692
+ # GH 34748
1693
+ xfail_datetimes_with_pyxlsb(engine, request)
1694
+
1695
+ f = "test_datetime_mi" + read_ext
1696
+ with pd.ExcelFile(f) as excel:
1697
+ actual = pd.read_excel(excel, header=[0, 1], index_col=0, engine=engine)
1698
+
1699
+ unit = get_exp_unit(read_ext, engine)
1700
+ dti = pd.DatetimeIndex(["2020-02-29", "2020-03-01"], dtype=f"M8[{unit}]")
1701
+ expected_column_index = MultiIndex.from_arrays(
1702
+ [dti[:1], dti[1:]],
1703
+ names=[
1704
+ dti[0].to_pydatetime(),
1705
+ dti[1].to_pydatetime(),
1706
+ ],
1707
+ )
1708
+ expected = DataFrame([], index=[], columns=expected_column_index)
1709
+
1710
+ tm.assert_frame_equal(expected, actual)
1711
+
1712
+ def test_engine_invalid_option(self, read_ext):
1713
+ # read_ext includes the '.' hence the weird formatting
1714
+ with pytest.raises(ValueError, match="Value must be one of *"):
1715
+ with pd.option_context(f"io.excel{read_ext}.reader", "abc"):
1716
+ pass
1717
+
1718
+ def test_ignore_chartsheets(self, request, engine, read_ext):
1719
+ # GH 41448
1720
+ if read_ext == ".ods":
1721
+ pytest.skip("chartsheets do not exist in the ODF format")
1722
+ if engine == "pyxlsb":
1723
+ request.applymarker(
1724
+ pytest.mark.xfail(
1725
+ reason="pyxlsb can't distinguish chartsheets from worksheets"
1726
+ )
1727
+ )
1728
+ with pd.ExcelFile("chartsheet" + read_ext) as excel:
1729
+ assert excel.sheet_names == ["Sheet1"]
1730
+
1731
+ def test_corrupt_files_closed(self, engine, read_ext):
1732
+ # GH41778
1733
+ errors = (BadZipFile,)
1734
+ if engine is None:
1735
+ pytest.skip(f"Invalid test for engine={engine}")
1736
+ elif engine == "xlrd":
1737
+ import xlrd
1738
+
1739
+ errors = (BadZipFile, xlrd.biffh.XLRDError)
1740
+ elif engine == "calamine":
1741
+ from python_calamine import CalamineError
1742
+
1743
+ errors = (CalamineError,)
1744
+
1745
+ with tm.ensure_clean(f"corrupt{read_ext}") as file:
1746
+ Path(file).write_text("corrupt", encoding="utf-8")
1747
+ with tm.assert_produces_warning(False):
1748
+ try:
1749
+ pd.ExcelFile(file, engine=engine)
1750
+ except errors:
1751
+ pass
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_style.py ADDED
@@ -0,0 +1,298 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import time
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas.compat import is_platform_windows
8
+ import pandas.util._test_decorators as td
9
+
10
+ from pandas import (
11
+ DataFrame,
12
+ read_excel,
13
+ )
14
+ import pandas._testing as tm
15
+
16
+ from pandas.io.excel import ExcelWriter
17
+ from pandas.io.formats.excel import ExcelFormatter
18
+
19
+ pytest.importorskip("jinja2")
20
+ # jinja2 is currently required for Styler.__init__(). Technically Styler.to_excel
21
+ # could compute styles and render to excel without jinja2, since there is no
22
+ # 'template' file, but this needs the import error to delayed until render time.
23
+
24
+ if is_platform_windows():
25
+ pytestmark = pytest.mark.single_cpu
26
+
27
+
28
+ def assert_equal_cell_styles(cell1, cell2):
29
+ # TODO: should find a better way to check equality
30
+ assert cell1.alignment.__dict__ == cell2.alignment.__dict__
31
+ assert cell1.border.__dict__ == cell2.border.__dict__
32
+ assert cell1.fill.__dict__ == cell2.fill.__dict__
33
+ assert cell1.font.__dict__ == cell2.font.__dict__
34
+ assert cell1.number_format == cell2.number_format
35
+ assert cell1.protection.__dict__ == cell2.protection.__dict__
36
+
37
+
38
+ @pytest.mark.parametrize(
39
+ "engine",
40
+ ["xlsxwriter", "openpyxl"],
41
+ )
42
+ def test_styler_to_excel_unstyled(engine):
43
+ # compare DataFrame.to_excel and Styler.to_excel when no styles applied
44
+ pytest.importorskip(engine)
45
+ df = DataFrame(np.random.default_rng(2).standard_normal((2, 2)))
46
+ with tm.ensure_clean(".xlsx") as path:
47
+ with ExcelWriter(path, engine=engine) as writer:
48
+ df.to_excel(writer, sheet_name="dataframe")
49
+ df.style.to_excel(writer, sheet_name="unstyled")
50
+
51
+ openpyxl = pytest.importorskip("openpyxl") # test loading only with openpyxl
52
+ with contextlib.closing(openpyxl.load_workbook(path)) as wb:
53
+ for col1, col2 in zip(wb["dataframe"].columns, wb["unstyled"].columns):
54
+ assert len(col1) == len(col2)
55
+ for cell1, cell2 in zip(col1, col2):
56
+ assert cell1.value == cell2.value
57
+ assert_equal_cell_styles(cell1, cell2)
58
+
59
+
60
+ shared_style_params = [
61
+ (
62
+ "background-color: #111222",
63
+ ["fill", "fgColor", "rgb"],
64
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
65
+ ),
66
+ (
67
+ "color: #111222",
68
+ ["font", "color", "value"],
69
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
70
+ ),
71
+ ("font-family: Arial;", ["font", "name"], "arial"),
72
+ ("font-weight: bold;", ["font", "b"], True),
73
+ ("font-style: italic;", ["font", "i"], True),
74
+ ("text-decoration: underline;", ["font", "u"], "single"),
75
+ ("number-format: $??,???.00;", ["number_format"], "$??,???.00"),
76
+ ("text-align: left;", ["alignment", "horizontal"], "left"),
77
+ (
78
+ "vertical-align: bottom;",
79
+ ["alignment", "vertical"],
80
+ {"xlsxwriter": None, "openpyxl": "bottom"}, # xlsxwriter Fails
81
+ ),
82
+ ("vertical-align: middle;", ["alignment", "vertical"], "center"),
83
+ # Border widths
84
+ ("border-left: 2pt solid red", ["border", "left", "style"], "medium"),
85
+ ("border-left: 1pt dotted red", ["border", "left", "style"], "dotted"),
86
+ ("border-left: 2pt dotted red", ["border", "left", "style"], "mediumDashDotDot"),
87
+ ("border-left: 1pt dashed red", ["border", "left", "style"], "dashed"),
88
+ ("border-left: 2pt dashed red", ["border", "left", "style"], "mediumDashed"),
89
+ ("border-left: 1pt solid red", ["border", "left", "style"], "thin"),
90
+ ("border-left: 3pt solid red", ["border", "left", "style"], "thick"),
91
+ # Border expansion
92
+ (
93
+ "border-left: 2pt solid #111222",
94
+ ["border", "left", "color", "rgb"],
95
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
96
+ ),
97
+ ("border: 1pt solid red", ["border", "top", "style"], "thin"),
98
+ (
99
+ "border: 1pt solid #111222",
100
+ ["border", "top", "color", "rgb"],
101
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
102
+ ),
103
+ ("border: 1pt solid red", ["border", "right", "style"], "thin"),
104
+ (
105
+ "border: 1pt solid #111222",
106
+ ["border", "right", "color", "rgb"],
107
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
108
+ ),
109
+ ("border: 1pt solid red", ["border", "bottom", "style"], "thin"),
110
+ (
111
+ "border: 1pt solid #111222",
112
+ ["border", "bottom", "color", "rgb"],
113
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
114
+ ),
115
+ ("border: 1pt solid red", ["border", "left", "style"], "thin"),
116
+ (
117
+ "border: 1pt solid #111222",
118
+ ["border", "left", "color", "rgb"],
119
+ {"xlsxwriter": "FF111222", "openpyxl": "00111222"},
120
+ ),
121
+ # Border styles
122
+ (
123
+ "border-left-style: hair; border-left-color: black",
124
+ ["border", "left", "style"],
125
+ "hair",
126
+ ),
127
+ ]
128
+
129
+
130
+ @pytest.mark.parametrize(
131
+ "engine",
132
+ ["xlsxwriter", "openpyxl"],
133
+ )
134
+ @pytest.mark.parametrize("css, attrs, expected", shared_style_params)
135
+ def test_styler_to_excel_basic(engine, css, attrs, expected):
136
+ pytest.importorskip(engine)
137
+ df = DataFrame(np.random.default_rng(2).standard_normal((1, 1)))
138
+ styler = df.style.map(lambda x: css)
139
+
140
+ with tm.ensure_clean(".xlsx") as path:
141
+ with ExcelWriter(path, engine=engine) as writer:
142
+ df.to_excel(writer, sheet_name="dataframe")
143
+ styler.to_excel(writer, sheet_name="styled")
144
+
145
+ openpyxl = pytest.importorskip("openpyxl") # test loading only with openpyxl
146
+ with contextlib.closing(openpyxl.load_workbook(path)) as wb:
147
+ # test unstyled data cell does not have expected styles
148
+ # test styled cell has expected styles
149
+ u_cell, s_cell = wb["dataframe"].cell(2, 2), wb["styled"].cell(2, 2)
150
+ for attr in attrs:
151
+ u_cell, s_cell = getattr(u_cell, attr, None), getattr(s_cell, attr)
152
+
153
+ if isinstance(expected, dict):
154
+ assert u_cell is None or u_cell != expected[engine]
155
+ assert s_cell == expected[engine]
156
+ else:
157
+ assert u_cell is None or u_cell != expected
158
+ assert s_cell == expected
159
+
160
+
161
+ @pytest.mark.parametrize(
162
+ "engine",
163
+ ["xlsxwriter", "openpyxl"],
164
+ )
165
+ @pytest.mark.parametrize("css, attrs, expected", shared_style_params)
166
+ def test_styler_to_excel_basic_indexes(engine, css, attrs, expected):
167
+ pytest.importorskip(engine)
168
+ df = DataFrame(np.random.default_rng(2).standard_normal((1, 1)))
169
+
170
+ styler = df.style
171
+ styler.map_index(lambda x: css, axis=0)
172
+ styler.map_index(lambda x: css, axis=1)
173
+
174
+ null_styler = df.style
175
+ null_styler.map(lambda x: "null: css;")
176
+ null_styler.map_index(lambda x: "null: css;", axis=0)
177
+ null_styler.map_index(lambda x: "null: css;", axis=1)
178
+
179
+ with tm.ensure_clean(".xlsx") as path:
180
+ with ExcelWriter(path, engine=engine) as writer:
181
+ null_styler.to_excel(writer, sheet_name="null_styled")
182
+ styler.to_excel(writer, sheet_name="styled")
183
+
184
+ openpyxl = pytest.importorskip("openpyxl") # test loading only with openpyxl
185
+ with contextlib.closing(openpyxl.load_workbook(path)) as wb:
186
+ # test null styled index cells does not have expected styles
187
+ # test styled cell has expected styles
188
+ ui_cell, si_cell = wb["null_styled"].cell(2, 1), wb["styled"].cell(2, 1)
189
+ uc_cell, sc_cell = wb["null_styled"].cell(1, 2), wb["styled"].cell(1, 2)
190
+ for attr in attrs:
191
+ ui_cell, si_cell = getattr(ui_cell, attr, None), getattr(si_cell, attr)
192
+ uc_cell, sc_cell = getattr(uc_cell, attr, None), getattr(sc_cell, attr)
193
+
194
+ if isinstance(expected, dict):
195
+ assert ui_cell is None or ui_cell != expected[engine]
196
+ assert si_cell == expected[engine]
197
+ assert uc_cell is None or uc_cell != expected[engine]
198
+ assert sc_cell == expected[engine]
199
+ else:
200
+ assert ui_cell is None or ui_cell != expected
201
+ assert si_cell == expected
202
+ assert uc_cell is None or uc_cell != expected
203
+ assert sc_cell == expected
204
+
205
+
206
+ # From https://openpyxl.readthedocs.io/en/stable/api/openpyxl.styles.borders.html
207
+ # Note: Leaving behavior of "width"-type styles undefined; user should use border-width
208
+ # instead
209
+ excel_border_styles = [
210
+ # "thin",
211
+ "dashed",
212
+ "mediumDashDot",
213
+ "dashDotDot",
214
+ "hair",
215
+ "dotted",
216
+ "mediumDashDotDot",
217
+ # "medium",
218
+ "double",
219
+ "dashDot",
220
+ "slantDashDot",
221
+ # "thick",
222
+ "mediumDashed",
223
+ ]
224
+
225
+
226
+ @pytest.mark.parametrize(
227
+ "engine",
228
+ ["xlsxwriter", "openpyxl"],
229
+ )
230
+ @pytest.mark.parametrize("border_style", excel_border_styles)
231
+ def test_styler_to_excel_border_style(engine, border_style):
232
+ css = f"border-left: {border_style} black thin"
233
+ attrs = ["border", "left", "style"]
234
+ expected = border_style
235
+
236
+ pytest.importorskip(engine)
237
+ df = DataFrame(np.random.default_rng(2).standard_normal((1, 1)))
238
+ styler = df.style.map(lambda x: css)
239
+
240
+ with tm.ensure_clean(".xlsx") as path:
241
+ with ExcelWriter(path, engine=engine) as writer:
242
+ df.to_excel(writer, sheet_name="dataframe")
243
+ styler.to_excel(writer, sheet_name="styled")
244
+
245
+ openpyxl = pytest.importorskip("openpyxl") # test loading only with openpyxl
246
+ with contextlib.closing(openpyxl.load_workbook(path)) as wb:
247
+ # test unstyled data cell does not have expected styles
248
+ # test styled cell has expected styles
249
+ u_cell, s_cell = wb["dataframe"].cell(2, 2), wb["styled"].cell(2, 2)
250
+ for attr in attrs:
251
+ u_cell, s_cell = getattr(u_cell, attr, None), getattr(s_cell, attr)
252
+
253
+ if isinstance(expected, dict):
254
+ assert u_cell is None or u_cell != expected[engine]
255
+ assert s_cell == expected[engine]
256
+ else:
257
+ assert u_cell is None or u_cell != expected
258
+ assert s_cell == expected
259
+
260
+
261
+ def test_styler_custom_converter():
262
+ openpyxl = pytest.importorskip("openpyxl")
263
+
264
+ def custom_converter(css):
265
+ return {"font": {"color": {"rgb": "111222"}}}
266
+
267
+ df = DataFrame(np.random.default_rng(2).standard_normal((1, 1)))
268
+ styler = df.style.map(lambda x: "color: #888999")
269
+ with tm.ensure_clean(".xlsx") as path:
270
+ with ExcelWriter(path, engine="openpyxl") as writer:
271
+ ExcelFormatter(styler, style_converter=custom_converter).write(
272
+ writer, sheet_name="custom"
273
+ )
274
+
275
+ with contextlib.closing(openpyxl.load_workbook(path)) as wb:
276
+ assert wb["custom"].cell(2, 2).font.color.value == "00111222"
277
+
278
+
279
+ @pytest.mark.single_cpu
280
+ @td.skip_if_not_us_locale
281
+ def test_styler_to_s3(s3_public_bucket, s3so):
282
+ # GH#46381
283
+
284
+ mock_bucket_name, target_file = s3_public_bucket.name, "test.xlsx"
285
+ df = DataFrame({"x": [1, 2, 3], "y": [2, 4, 6]})
286
+ styler = df.style.set_sticky(axis="index")
287
+ styler.to_excel(f"s3://{mock_bucket_name}/{target_file}", storage_options=s3so)
288
+ timeout = 5
289
+ while True:
290
+ if target_file in (obj.key for obj in s3_public_bucket.objects.all()):
291
+ break
292
+ time.sleep(0.1)
293
+ timeout -= 0.1
294
+ assert timeout > 0, "Timed out waiting for file to appear on moto"
295
+ result = read_excel(
296
+ f"s3://{mock_bucket_name}/{target_file}", index_col=0, storage_options=s3so
297
+ )
298
+ tm.assert_frame_equal(result, df)
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_writers.py ADDED
@@ -0,0 +1,1511 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ date,
3
+ datetime,
4
+ timedelta,
5
+ )
6
+ from functools import partial
7
+ from io import BytesIO
8
+ import os
9
+ import re
10
+
11
+ import numpy as np
12
+ import pytest
13
+
14
+ from pandas.compat import is_platform_windows
15
+ from pandas.compat._constants import PY310
16
+ from pandas.compat._optional import import_optional_dependency
17
+ import pandas.util._test_decorators as td
18
+
19
+ import pandas as pd
20
+ from pandas import (
21
+ DataFrame,
22
+ Index,
23
+ MultiIndex,
24
+ date_range,
25
+ option_context,
26
+ )
27
+ import pandas._testing as tm
28
+
29
+ from pandas.io.excel import (
30
+ ExcelFile,
31
+ ExcelWriter,
32
+ _OpenpyxlWriter,
33
+ _XlsxWriter,
34
+ register_writer,
35
+ )
36
+ from pandas.io.excel._util import _writers
37
+
38
+ if is_platform_windows():
39
+ pytestmark = pytest.mark.single_cpu
40
+
41
+
42
+ def get_exp_unit(path: str) -> str:
43
+ return "ns"
44
+
45
+
46
+ @pytest.fixture
47
+ def frame(float_frame):
48
+ """
49
+ Returns the first ten items in fixture "float_frame".
50
+ """
51
+ return float_frame[:10]
52
+
53
+
54
+ @pytest.fixture(params=[True, False])
55
+ def merge_cells(request):
56
+ return request.param
57
+
58
+
59
+ @pytest.fixture
60
+ def path(ext):
61
+ """
62
+ Fixture to open file for use in each test case.
63
+ """
64
+ with tm.ensure_clean(ext) as file_path:
65
+ yield file_path
66
+
67
+
68
+ @pytest.fixture
69
+ def set_engine(engine, ext):
70
+ """
71
+ Fixture to set engine for use in each test case.
72
+
73
+ Rather than requiring `engine=...` to be provided explicitly as an
74
+ argument in each test, this fixture sets a global option to dictate
75
+ which engine should be used to write Excel files. After executing
76
+ the test it rolls back said change to the global option.
77
+ """
78
+ option_name = f"io.excel.{ext.strip('.')}.writer"
79
+ with option_context(option_name, engine):
80
+ yield
81
+
82
+
83
+ @pytest.mark.parametrize(
84
+ "ext",
85
+ [
86
+ pytest.param(".xlsx", marks=[td.skip_if_no("openpyxl"), td.skip_if_no("xlrd")]),
87
+ pytest.param(".xlsm", marks=[td.skip_if_no("openpyxl"), td.skip_if_no("xlrd")]),
88
+ pytest.param(
89
+ ".xlsx", marks=[td.skip_if_no("xlsxwriter"), td.skip_if_no("xlrd")]
90
+ ),
91
+ pytest.param(".ods", marks=td.skip_if_no("odf")),
92
+ ],
93
+ )
94
+ class TestRoundTrip:
95
+ @pytest.mark.parametrize(
96
+ "header,expected",
97
+ [(None, DataFrame([np.nan] * 4)), (0, DataFrame({"Unnamed: 0": [np.nan] * 3}))],
98
+ )
99
+ def test_read_one_empty_col_no_header(self, ext, header, expected):
100
+ # xref gh-12292
101
+ filename = "no_header"
102
+ df = DataFrame([["", 1, 100], ["", 2, 200], ["", 3, 300], ["", 4, 400]])
103
+
104
+ with tm.ensure_clean(ext) as path:
105
+ df.to_excel(path, sheet_name=filename, index=False, header=False)
106
+ result = pd.read_excel(
107
+ path, sheet_name=filename, usecols=[0], header=header
108
+ )
109
+
110
+ tm.assert_frame_equal(result, expected)
111
+
112
+ @pytest.mark.parametrize(
113
+ "header,expected",
114
+ [(None, DataFrame([0] + [np.nan] * 4)), (0, DataFrame([np.nan] * 4))],
115
+ )
116
+ def test_read_one_empty_col_with_header(self, ext, header, expected):
117
+ filename = "with_header"
118
+ df = DataFrame([["", 1, 100], ["", 2, 200], ["", 3, 300], ["", 4, 400]])
119
+
120
+ with tm.ensure_clean(ext) as path:
121
+ df.to_excel(path, sheet_name="with_header", index=False, header=True)
122
+ result = pd.read_excel(
123
+ path, sheet_name=filename, usecols=[0], header=header
124
+ )
125
+
126
+ tm.assert_frame_equal(result, expected)
127
+
128
+ def test_set_column_names_in_parameter(self, ext):
129
+ # GH 12870 : pass down column names associated with
130
+ # keyword argument names
131
+ refdf = DataFrame([[1, "foo"], [2, "bar"], [3, "baz"]], columns=["a", "b"])
132
+
133
+ with tm.ensure_clean(ext) as pth:
134
+ with ExcelWriter(pth) as writer:
135
+ refdf.to_excel(
136
+ writer, sheet_name="Data_no_head", header=False, index=False
137
+ )
138
+ refdf.to_excel(writer, sheet_name="Data_with_head", index=False)
139
+
140
+ refdf.columns = ["A", "B"]
141
+
142
+ with ExcelFile(pth) as reader:
143
+ xlsdf_no_head = pd.read_excel(
144
+ reader, sheet_name="Data_no_head", header=None, names=["A", "B"]
145
+ )
146
+ xlsdf_with_head = pd.read_excel(
147
+ reader,
148
+ sheet_name="Data_with_head",
149
+ index_col=None,
150
+ names=["A", "B"],
151
+ )
152
+
153
+ tm.assert_frame_equal(xlsdf_no_head, refdf)
154
+ tm.assert_frame_equal(xlsdf_with_head, refdf)
155
+
156
+ def test_creating_and_reading_multiple_sheets(self, ext):
157
+ # see gh-9450
158
+ #
159
+ # Test reading multiple sheets, from a runtime
160
+ # created Excel file with multiple sheets.
161
+ def tdf(col_sheet_name):
162
+ d, i = [11, 22, 33], [1, 2, 3]
163
+ return DataFrame(d, i, columns=[col_sheet_name])
164
+
165
+ sheets = ["AAA", "BBB", "CCC"]
166
+
167
+ dfs = [tdf(s) for s in sheets]
168
+ dfs = dict(zip(sheets, dfs))
169
+
170
+ with tm.ensure_clean(ext) as pth:
171
+ with ExcelWriter(pth) as ew:
172
+ for sheetname, df in dfs.items():
173
+ df.to_excel(ew, sheet_name=sheetname)
174
+
175
+ dfs_returned = pd.read_excel(pth, sheet_name=sheets, index_col=0)
176
+
177
+ for s in sheets:
178
+ tm.assert_frame_equal(dfs[s], dfs_returned[s])
179
+
180
+ def test_read_excel_multiindex_empty_level(self, ext):
181
+ # see gh-12453
182
+ with tm.ensure_clean(ext) as path:
183
+ df = DataFrame(
184
+ {
185
+ ("One", "x"): {0: 1},
186
+ ("Two", "X"): {0: 3},
187
+ ("Two", "Y"): {0: 7},
188
+ ("Zero", ""): {0: 0},
189
+ }
190
+ )
191
+
192
+ expected = DataFrame(
193
+ {
194
+ ("One", "x"): {0: 1},
195
+ ("Two", "X"): {0: 3},
196
+ ("Two", "Y"): {0: 7},
197
+ ("Zero", "Unnamed: 4_level_1"): {0: 0},
198
+ }
199
+ )
200
+
201
+ df.to_excel(path)
202
+ actual = pd.read_excel(path, header=[0, 1], index_col=0)
203
+ tm.assert_frame_equal(actual, expected)
204
+
205
+ df = DataFrame(
206
+ {
207
+ ("Beg", ""): {0: 0},
208
+ ("Middle", "x"): {0: 1},
209
+ ("Tail", "X"): {0: 3},
210
+ ("Tail", "Y"): {0: 7},
211
+ }
212
+ )
213
+
214
+ expected = DataFrame(
215
+ {
216
+ ("Beg", "Unnamed: 1_level_1"): {0: 0},
217
+ ("Middle", "x"): {0: 1},
218
+ ("Tail", "X"): {0: 3},
219
+ ("Tail", "Y"): {0: 7},
220
+ }
221
+ )
222
+
223
+ df.to_excel(path)
224
+ actual = pd.read_excel(path, header=[0, 1], index_col=0)
225
+ tm.assert_frame_equal(actual, expected)
226
+
227
+ @pytest.mark.parametrize("c_idx_names", ["a", None])
228
+ @pytest.mark.parametrize("r_idx_names", ["b", None])
229
+ @pytest.mark.parametrize("c_idx_levels", [1, 3])
230
+ @pytest.mark.parametrize("r_idx_levels", [1, 3])
231
+ def test_excel_multindex_roundtrip(
232
+ self, ext, c_idx_names, r_idx_names, c_idx_levels, r_idx_levels, request
233
+ ):
234
+ # see gh-4679
235
+ with tm.ensure_clean(ext) as pth:
236
+ # Empty name case current read in as
237
+ # unnamed levels, not Nones.
238
+ check_names = bool(r_idx_names) or r_idx_levels <= 1
239
+
240
+ if c_idx_levels == 1:
241
+ columns = Index(list("abcde"))
242
+ else:
243
+ columns = MultiIndex.from_arrays(
244
+ [range(5) for _ in range(c_idx_levels)],
245
+ names=[f"{c_idx_names}-{i}" for i in range(c_idx_levels)],
246
+ )
247
+ if r_idx_levels == 1:
248
+ index = Index(list("ghijk"))
249
+ else:
250
+ index = MultiIndex.from_arrays(
251
+ [range(5) for _ in range(r_idx_levels)],
252
+ names=[f"{r_idx_names}-{i}" for i in range(r_idx_levels)],
253
+ )
254
+ df = DataFrame(
255
+ 1.1 * np.ones((5, 5)),
256
+ columns=columns,
257
+ index=index,
258
+ )
259
+ df.to_excel(pth)
260
+
261
+ act = pd.read_excel(
262
+ pth,
263
+ index_col=list(range(r_idx_levels)),
264
+ header=list(range(c_idx_levels)),
265
+ )
266
+ tm.assert_frame_equal(df, act, check_names=check_names)
267
+
268
+ df.iloc[0, :] = np.nan
269
+ df.to_excel(pth)
270
+
271
+ act = pd.read_excel(
272
+ pth,
273
+ index_col=list(range(r_idx_levels)),
274
+ header=list(range(c_idx_levels)),
275
+ )
276
+ tm.assert_frame_equal(df, act, check_names=check_names)
277
+
278
+ df.iloc[-1, :] = np.nan
279
+ df.to_excel(pth)
280
+ act = pd.read_excel(
281
+ pth,
282
+ index_col=list(range(r_idx_levels)),
283
+ header=list(range(c_idx_levels)),
284
+ )
285
+ tm.assert_frame_equal(df, act, check_names=check_names)
286
+
287
+ def test_read_excel_parse_dates(self, ext):
288
+ # see gh-11544, gh-12051
289
+ df = DataFrame(
290
+ {"col": [1, 2, 3], "date_strings": date_range("2012-01-01", periods=3)}
291
+ )
292
+ df2 = df.copy()
293
+ df2["date_strings"] = df2["date_strings"].dt.strftime("%m/%d/%Y")
294
+
295
+ with tm.ensure_clean(ext) as pth:
296
+ df2.to_excel(pth)
297
+
298
+ res = pd.read_excel(pth, index_col=0)
299
+ tm.assert_frame_equal(df2, res)
300
+
301
+ res = pd.read_excel(pth, parse_dates=["date_strings"], index_col=0)
302
+ tm.assert_frame_equal(df, res)
303
+
304
+ date_parser = lambda x: datetime.strptime(x, "%m/%d/%Y")
305
+ with tm.assert_produces_warning(
306
+ FutureWarning,
307
+ match="use 'date_format' instead",
308
+ raise_on_extra_warnings=False,
309
+ ):
310
+ res = pd.read_excel(
311
+ pth,
312
+ parse_dates=["date_strings"],
313
+ date_parser=date_parser,
314
+ index_col=0,
315
+ )
316
+ tm.assert_frame_equal(df, res)
317
+ res = pd.read_excel(
318
+ pth, parse_dates=["date_strings"], date_format="%m/%d/%Y", index_col=0
319
+ )
320
+ tm.assert_frame_equal(df, res)
321
+
322
+ def test_multiindex_interval_datetimes(self, ext):
323
+ # GH 30986
324
+ midx = MultiIndex.from_arrays(
325
+ [
326
+ range(4),
327
+ pd.interval_range(
328
+ start=pd.Timestamp("2020-01-01"), periods=4, freq="6ME"
329
+ ),
330
+ ]
331
+ )
332
+ df = DataFrame(range(4), index=midx)
333
+ with tm.ensure_clean(ext) as pth:
334
+ df.to_excel(pth)
335
+ result = pd.read_excel(pth, index_col=[0, 1])
336
+ expected = DataFrame(
337
+ range(4),
338
+ MultiIndex.from_arrays(
339
+ [
340
+ range(4),
341
+ [
342
+ "(2020-01-31 00:00:00, 2020-07-31 00:00:00]",
343
+ "(2020-07-31 00:00:00, 2021-01-31 00:00:00]",
344
+ "(2021-01-31 00:00:00, 2021-07-31 00:00:00]",
345
+ "(2021-07-31 00:00:00, 2022-01-31 00:00:00]",
346
+ ],
347
+ ]
348
+ ),
349
+ )
350
+ tm.assert_frame_equal(result, expected)
351
+
352
+
353
+ @pytest.mark.parametrize(
354
+ "engine,ext",
355
+ [
356
+ pytest.param(
357
+ "openpyxl",
358
+ ".xlsx",
359
+ marks=[td.skip_if_no("openpyxl"), td.skip_if_no("xlrd")],
360
+ ),
361
+ pytest.param(
362
+ "openpyxl",
363
+ ".xlsm",
364
+ marks=[td.skip_if_no("openpyxl"), td.skip_if_no("xlrd")],
365
+ ),
366
+ pytest.param(
367
+ "xlsxwriter",
368
+ ".xlsx",
369
+ marks=[td.skip_if_no("xlsxwriter"), td.skip_if_no("xlrd")],
370
+ ),
371
+ pytest.param("odf", ".ods", marks=td.skip_if_no("odf")),
372
+ ],
373
+ )
374
+ @pytest.mark.usefixtures("set_engine")
375
+ class TestExcelWriter:
376
+ def test_excel_sheet_size(self, path):
377
+ # GH 26080
378
+ breaking_row_count = 2**20 + 1
379
+ breaking_col_count = 2**14 + 1
380
+ # purposely using two arrays to prevent memory issues while testing
381
+ row_arr = np.zeros(shape=(breaking_row_count, 1))
382
+ col_arr = np.zeros(shape=(1, breaking_col_count))
383
+ row_df = DataFrame(row_arr)
384
+ col_df = DataFrame(col_arr)
385
+
386
+ msg = "sheet is too large"
387
+ with pytest.raises(ValueError, match=msg):
388
+ row_df.to_excel(path)
389
+
390
+ with pytest.raises(ValueError, match=msg):
391
+ col_df.to_excel(path)
392
+
393
+ def test_excel_sheet_by_name_raise(self, path):
394
+ gt = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
395
+ gt.to_excel(path)
396
+
397
+ with ExcelFile(path) as xl:
398
+ df = pd.read_excel(xl, sheet_name=0, index_col=0)
399
+
400
+ tm.assert_frame_equal(gt, df)
401
+
402
+ msg = "Worksheet named '0' not found"
403
+ with pytest.raises(ValueError, match=msg):
404
+ pd.read_excel(xl, "0")
405
+
406
+ def test_excel_writer_context_manager(self, frame, path):
407
+ with ExcelWriter(path) as writer:
408
+ frame.to_excel(writer, sheet_name="Data1")
409
+ frame2 = frame.copy()
410
+ frame2.columns = frame.columns[::-1]
411
+ frame2.to_excel(writer, sheet_name="Data2")
412
+
413
+ with ExcelFile(path) as reader:
414
+ found_df = pd.read_excel(reader, sheet_name="Data1", index_col=0)
415
+ found_df2 = pd.read_excel(reader, sheet_name="Data2", index_col=0)
416
+
417
+ tm.assert_frame_equal(found_df, frame)
418
+ tm.assert_frame_equal(found_df2, frame2)
419
+
420
+ def test_roundtrip(self, frame, path):
421
+ frame = frame.copy()
422
+ frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
423
+
424
+ frame.to_excel(path, sheet_name="test1")
425
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
426
+ frame.to_excel(path, sheet_name="test1", header=False)
427
+ frame.to_excel(path, sheet_name="test1", index=False)
428
+
429
+ # test roundtrip
430
+ frame.to_excel(path, sheet_name="test1")
431
+ recons = pd.read_excel(path, sheet_name="test1", index_col=0)
432
+ tm.assert_frame_equal(frame, recons)
433
+
434
+ frame.to_excel(path, sheet_name="test1", index=False)
435
+ recons = pd.read_excel(path, sheet_name="test1", index_col=None)
436
+ recons.index = frame.index
437
+ tm.assert_frame_equal(frame, recons)
438
+
439
+ frame.to_excel(path, sheet_name="test1", na_rep="NA")
440
+ recons = pd.read_excel(path, sheet_name="test1", index_col=0, na_values=["NA"])
441
+ tm.assert_frame_equal(frame, recons)
442
+
443
+ # GH 3611
444
+ frame.to_excel(path, sheet_name="test1", na_rep="88")
445
+ recons = pd.read_excel(path, sheet_name="test1", index_col=0, na_values=["88"])
446
+ tm.assert_frame_equal(frame, recons)
447
+
448
+ frame.to_excel(path, sheet_name="test1", na_rep="88")
449
+ recons = pd.read_excel(
450
+ path, sheet_name="test1", index_col=0, na_values=[88, 88.0]
451
+ )
452
+ tm.assert_frame_equal(frame, recons)
453
+
454
+ # GH 6573
455
+ frame.to_excel(path, sheet_name="Sheet1")
456
+ recons = pd.read_excel(path, index_col=0)
457
+ tm.assert_frame_equal(frame, recons)
458
+
459
+ frame.to_excel(path, sheet_name="0")
460
+ recons = pd.read_excel(path, index_col=0)
461
+ tm.assert_frame_equal(frame, recons)
462
+
463
+ # GH 8825 Pandas Series should provide to_excel method
464
+ s = frame["A"]
465
+ s.to_excel(path)
466
+ recons = pd.read_excel(path, index_col=0)
467
+ tm.assert_frame_equal(s.to_frame(), recons)
468
+
469
+ def test_mixed(self, frame, path):
470
+ mixed_frame = frame.copy()
471
+ mixed_frame["foo"] = "bar"
472
+
473
+ mixed_frame.to_excel(path, sheet_name="test1")
474
+ with ExcelFile(path) as reader:
475
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
476
+ tm.assert_frame_equal(mixed_frame, recons)
477
+
478
+ def test_ts_frame(self, path):
479
+ unit = get_exp_unit(path)
480
+ df = DataFrame(
481
+ np.random.default_rng(2).standard_normal((5, 4)),
482
+ columns=Index(list("ABCD")),
483
+ index=date_range("2000-01-01", periods=5, freq="B"),
484
+ )
485
+
486
+ # freq doesn't round-trip
487
+ index = pd.DatetimeIndex(np.asarray(df.index), freq=None)
488
+ df.index = index
489
+
490
+ expected = df[:]
491
+ expected.index = expected.index.as_unit(unit)
492
+
493
+ df.to_excel(path, sheet_name="test1")
494
+ with ExcelFile(path) as reader:
495
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
496
+ tm.assert_frame_equal(expected, recons)
497
+
498
+ def test_basics_with_nan(self, frame, path):
499
+ frame = frame.copy()
500
+ frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
501
+ frame.to_excel(path, sheet_name="test1")
502
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
503
+ frame.to_excel(path, sheet_name="test1", header=False)
504
+ frame.to_excel(path, sheet_name="test1", index=False)
505
+
506
+ @pytest.mark.parametrize("np_type", [np.int8, np.int16, np.int32, np.int64])
507
+ def test_int_types(self, np_type, path):
508
+ # Test np.int values read come back as int
509
+ # (rather than float which is Excel's format).
510
+ df = DataFrame(
511
+ np.random.default_rng(2).integers(-10, 10, size=(10, 2)), dtype=np_type
512
+ )
513
+ df.to_excel(path, sheet_name="test1")
514
+
515
+ with ExcelFile(path) as reader:
516
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
517
+
518
+ int_frame = df.astype(np.int64)
519
+ tm.assert_frame_equal(int_frame, recons)
520
+
521
+ recons2 = pd.read_excel(path, sheet_name="test1", index_col=0)
522
+ tm.assert_frame_equal(int_frame, recons2)
523
+
524
+ @pytest.mark.parametrize("np_type", [np.float16, np.float32, np.float64])
525
+ def test_float_types(self, np_type, path):
526
+ # Test np.float values read come back as float.
527
+ df = DataFrame(np.random.default_rng(2).random(10), dtype=np_type)
528
+ df.to_excel(path, sheet_name="test1")
529
+
530
+ with ExcelFile(path) as reader:
531
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0).astype(
532
+ np_type
533
+ )
534
+
535
+ tm.assert_frame_equal(df, recons)
536
+
537
+ def test_bool_types(self, path):
538
+ # Test np.bool_ values read come back as float.
539
+ df = DataFrame([1, 0, True, False], dtype=np.bool_)
540
+ df.to_excel(path, sheet_name="test1")
541
+
542
+ with ExcelFile(path) as reader:
543
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0).astype(
544
+ np.bool_
545
+ )
546
+
547
+ tm.assert_frame_equal(df, recons)
548
+
549
+ def test_inf_roundtrip(self, path):
550
+ df = DataFrame([(1, np.inf), (2, 3), (5, -np.inf)])
551
+ df.to_excel(path, sheet_name="test1")
552
+
553
+ with ExcelFile(path) as reader:
554
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
555
+
556
+ tm.assert_frame_equal(df, recons)
557
+
558
+ def test_sheets(self, frame, path):
559
+ # freq doesn't round-trip
560
+ unit = get_exp_unit(path)
561
+ tsframe = DataFrame(
562
+ np.random.default_rng(2).standard_normal((5, 4)),
563
+ columns=Index(list("ABCD")),
564
+ index=date_range("2000-01-01", periods=5, freq="B"),
565
+ )
566
+ index = pd.DatetimeIndex(np.asarray(tsframe.index), freq=None)
567
+ tsframe.index = index
568
+
569
+ expected = tsframe[:]
570
+ expected.index = expected.index.as_unit(unit)
571
+
572
+ frame = frame.copy()
573
+ frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
574
+
575
+ frame.to_excel(path, sheet_name="test1")
576
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
577
+ frame.to_excel(path, sheet_name="test1", header=False)
578
+ frame.to_excel(path, sheet_name="test1", index=False)
579
+
580
+ # Test writing to separate sheets
581
+ with ExcelWriter(path) as writer:
582
+ frame.to_excel(writer, sheet_name="test1")
583
+ tsframe.to_excel(writer, sheet_name="test2")
584
+ with ExcelFile(path) as reader:
585
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
586
+ tm.assert_frame_equal(frame, recons)
587
+ recons = pd.read_excel(reader, sheet_name="test2", index_col=0)
588
+ tm.assert_frame_equal(expected, recons)
589
+ assert 2 == len(reader.sheet_names)
590
+ assert "test1" == reader.sheet_names[0]
591
+ assert "test2" == reader.sheet_names[1]
592
+
593
+ def test_colaliases(self, frame, path):
594
+ frame = frame.copy()
595
+ frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
596
+
597
+ frame.to_excel(path, sheet_name="test1")
598
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
599
+ frame.to_excel(path, sheet_name="test1", header=False)
600
+ frame.to_excel(path, sheet_name="test1", index=False)
601
+
602
+ # column aliases
603
+ col_aliases = Index(["AA", "X", "Y", "Z"])
604
+ frame.to_excel(path, sheet_name="test1", header=col_aliases)
605
+ with ExcelFile(path) as reader:
606
+ rs = pd.read_excel(reader, sheet_name="test1", index_col=0)
607
+ xp = frame.copy()
608
+ xp.columns = col_aliases
609
+ tm.assert_frame_equal(xp, rs)
610
+
611
+ def test_roundtrip_indexlabels(self, merge_cells, frame, path):
612
+ frame = frame.copy()
613
+ frame.iloc[:5, frame.columns.get_loc("A")] = np.nan
614
+
615
+ frame.to_excel(path, sheet_name="test1")
616
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
617
+ frame.to_excel(path, sheet_name="test1", header=False)
618
+ frame.to_excel(path, sheet_name="test1", index=False)
619
+
620
+ # test index_label
621
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2))) >= 0
622
+ df.to_excel(
623
+ path, sheet_name="test1", index_label=["test"], merge_cells=merge_cells
624
+ )
625
+ with ExcelFile(path) as reader:
626
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0).astype(
627
+ np.int64
628
+ )
629
+ df.index.names = ["test"]
630
+ assert df.index.names == recons.index.names
631
+
632
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2))) >= 0
633
+ df.to_excel(
634
+ path,
635
+ sheet_name="test1",
636
+ index_label=["test", "dummy", "dummy2"],
637
+ merge_cells=merge_cells,
638
+ )
639
+ with ExcelFile(path) as reader:
640
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0).astype(
641
+ np.int64
642
+ )
643
+ df.index.names = ["test"]
644
+ assert df.index.names == recons.index.names
645
+
646
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2))) >= 0
647
+ df.to_excel(
648
+ path, sheet_name="test1", index_label="test", merge_cells=merge_cells
649
+ )
650
+ with ExcelFile(path) as reader:
651
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0).astype(
652
+ np.int64
653
+ )
654
+ df.index.names = ["test"]
655
+ tm.assert_frame_equal(df, recons.astype(bool))
656
+
657
+ frame.to_excel(
658
+ path,
659
+ sheet_name="test1",
660
+ columns=["A", "B", "C", "D"],
661
+ index=False,
662
+ merge_cells=merge_cells,
663
+ )
664
+ # take 'A' and 'B' as indexes (same row as cols 'C', 'D')
665
+ df = frame.copy()
666
+ df = df.set_index(["A", "B"])
667
+
668
+ with ExcelFile(path) as reader:
669
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=[0, 1])
670
+ tm.assert_frame_equal(df, recons)
671
+
672
+ def test_excel_roundtrip_indexname(self, merge_cells, path):
673
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 4)))
674
+ df.index.name = "foo"
675
+
676
+ df.to_excel(path, merge_cells=merge_cells)
677
+
678
+ with ExcelFile(path) as xf:
679
+ result = pd.read_excel(xf, sheet_name=xf.sheet_names[0], index_col=0)
680
+
681
+ tm.assert_frame_equal(result, df)
682
+ assert result.index.name == "foo"
683
+
684
+ def test_excel_roundtrip_datetime(self, merge_cells, path):
685
+ # datetime.date, not sure what to test here exactly
686
+ unit = get_exp_unit(path)
687
+
688
+ # freq does not round-trip
689
+ tsframe = DataFrame(
690
+ np.random.default_rng(2).standard_normal((5, 4)),
691
+ columns=Index(list("ABCD")),
692
+ index=date_range("2000-01-01", periods=5, freq="B"),
693
+ )
694
+ index = pd.DatetimeIndex(np.asarray(tsframe.index), freq=None)
695
+ tsframe.index = index
696
+
697
+ tsf = tsframe.copy()
698
+
699
+ tsf.index = [x.date() for x in tsframe.index]
700
+ tsf.to_excel(path, sheet_name="test1", merge_cells=merge_cells)
701
+
702
+ with ExcelFile(path) as reader:
703
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
704
+
705
+ expected = tsframe[:]
706
+ expected.index = expected.index.as_unit(unit)
707
+ tm.assert_frame_equal(expected, recons)
708
+
709
+ def test_excel_date_datetime_format(self, ext, path):
710
+ # see gh-4133
711
+ #
712
+ # Excel output format strings
713
+ unit = get_exp_unit(path)
714
+
715
+ df = DataFrame(
716
+ [
717
+ [date(2014, 1, 31), date(1999, 9, 24)],
718
+ [datetime(1998, 5, 26, 23, 33, 4), datetime(2014, 2, 28, 13, 5, 13)],
719
+ ],
720
+ index=["DATE", "DATETIME"],
721
+ columns=["X", "Y"],
722
+ )
723
+ df_expected = DataFrame(
724
+ [
725
+ [datetime(2014, 1, 31), datetime(1999, 9, 24)],
726
+ [datetime(1998, 5, 26, 23, 33, 4), datetime(2014, 2, 28, 13, 5, 13)],
727
+ ],
728
+ index=["DATE", "DATETIME"],
729
+ columns=["X", "Y"],
730
+ )
731
+ df_expected = df_expected.astype(f"M8[{unit}]")
732
+
733
+ with tm.ensure_clean(ext) as filename2:
734
+ with ExcelWriter(path) as writer1:
735
+ df.to_excel(writer1, sheet_name="test1")
736
+
737
+ with ExcelWriter(
738
+ filename2,
739
+ date_format="DD.MM.YYYY",
740
+ datetime_format="DD.MM.YYYY HH-MM-SS",
741
+ ) as writer2:
742
+ df.to_excel(writer2, sheet_name="test1")
743
+
744
+ with ExcelFile(path) as reader1:
745
+ rs1 = pd.read_excel(reader1, sheet_name="test1", index_col=0)
746
+
747
+ with ExcelFile(filename2) as reader2:
748
+ rs2 = pd.read_excel(reader2, sheet_name="test1", index_col=0)
749
+
750
+ tm.assert_frame_equal(rs1, rs2)
751
+
752
+ # Since the reader returns a datetime object for dates,
753
+ # we need to use df_expected to check the result.
754
+ tm.assert_frame_equal(rs2, df_expected)
755
+
756
+ def test_to_excel_interval_no_labels(self, path, using_infer_string):
757
+ # see gh-19242
758
+ #
759
+ # Test writing Interval without labels.
760
+ df = DataFrame(
761
+ np.random.default_rng(2).integers(-10, 10, size=(20, 1)), dtype=np.int64
762
+ )
763
+ expected = df.copy()
764
+
765
+ df["new"] = pd.cut(df[0], 10)
766
+ expected["new"] = pd.cut(expected[0], 10).astype(
767
+ str if not using_infer_string else "string[pyarrow_numpy]"
768
+ )
769
+
770
+ df.to_excel(path, sheet_name="test1")
771
+ with ExcelFile(path) as reader:
772
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
773
+ tm.assert_frame_equal(expected, recons)
774
+
775
+ def test_to_excel_interval_labels(self, path):
776
+ # see gh-19242
777
+ #
778
+ # Test writing Interval with labels.
779
+ df = DataFrame(
780
+ np.random.default_rng(2).integers(-10, 10, size=(20, 1)), dtype=np.int64
781
+ )
782
+ expected = df.copy()
783
+ intervals = pd.cut(
784
+ df[0], 10, labels=["A", "B", "C", "D", "E", "F", "G", "H", "I", "J"]
785
+ )
786
+ df["new"] = intervals
787
+ expected["new"] = pd.Series(list(intervals))
788
+
789
+ df.to_excel(path, sheet_name="test1")
790
+ with ExcelFile(path) as reader:
791
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
792
+ tm.assert_frame_equal(expected, recons)
793
+
794
+ def test_to_excel_timedelta(self, path):
795
+ # see gh-19242, gh-9155
796
+ #
797
+ # Test writing timedelta to xls.
798
+ df = DataFrame(
799
+ np.random.default_rng(2).integers(-10, 10, size=(20, 1)),
800
+ columns=["A"],
801
+ dtype=np.int64,
802
+ )
803
+ expected = df.copy()
804
+
805
+ df["new"] = df["A"].apply(lambda x: timedelta(seconds=x))
806
+ expected["new"] = expected["A"].apply(
807
+ lambda x: timedelta(seconds=x).total_seconds() / 86400
808
+ )
809
+
810
+ df.to_excel(path, sheet_name="test1")
811
+ with ExcelFile(path) as reader:
812
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=0)
813
+ tm.assert_frame_equal(expected, recons)
814
+
815
+ def test_to_excel_periodindex(self, path):
816
+ # xp has a PeriodIndex
817
+ df = DataFrame(
818
+ np.random.default_rng(2).standard_normal((5, 4)),
819
+ columns=Index(list("ABCD")),
820
+ index=date_range("2000-01-01", periods=5, freq="B"),
821
+ )
822
+ xp = df.resample("ME").mean().to_period("M")
823
+
824
+ xp.to_excel(path, sheet_name="sht1")
825
+
826
+ with ExcelFile(path) as reader:
827
+ rs = pd.read_excel(reader, sheet_name="sht1", index_col=0)
828
+ tm.assert_frame_equal(xp, rs.to_period("M"))
829
+
830
+ def test_to_excel_multiindex(self, merge_cells, frame, path):
831
+ arrays = np.arange(len(frame.index) * 2, dtype=np.int64).reshape(2, -1)
832
+ new_index = MultiIndex.from_arrays(arrays, names=["first", "second"])
833
+ frame.index = new_index
834
+
835
+ frame.to_excel(path, sheet_name="test1", header=False)
836
+ frame.to_excel(path, sheet_name="test1", columns=["A", "B"])
837
+
838
+ # round trip
839
+ frame.to_excel(path, sheet_name="test1", merge_cells=merge_cells)
840
+ with ExcelFile(path) as reader:
841
+ df = pd.read_excel(reader, sheet_name="test1", index_col=[0, 1])
842
+ tm.assert_frame_equal(frame, df)
843
+
844
+ # GH13511
845
+ def test_to_excel_multiindex_nan_label(self, merge_cells, path):
846
+ df = DataFrame(
847
+ {
848
+ "A": [None, 2, 3],
849
+ "B": [10, 20, 30],
850
+ "C": np.random.default_rng(2).random(3),
851
+ }
852
+ )
853
+ df = df.set_index(["A", "B"])
854
+
855
+ df.to_excel(path, merge_cells=merge_cells)
856
+ df1 = pd.read_excel(path, index_col=[0, 1])
857
+ tm.assert_frame_equal(df, df1)
858
+
859
+ # Test for Issue 11328. If column indices are integers, make
860
+ # sure they are handled correctly for either setting of
861
+ # merge_cells
862
+ def test_to_excel_multiindex_cols(self, merge_cells, frame, path):
863
+ arrays = np.arange(len(frame.index) * 2, dtype=np.int64).reshape(2, -1)
864
+ new_index = MultiIndex.from_arrays(arrays, names=["first", "second"])
865
+ frame.index = new_index
866
+
867
+ new_cols_index = MultiIndex.from_tuples([(40, 1), (40, 2), (50, 1), (50, 2)])
868
+ frame.columns = new_cols_index
869
+ header = [0, 1]
870
+ if not merge_cells:
871
+ header = 0
872
+
873
+ # round trip
874
+ frame.to_excel(path, sheet_name="test1", merge_cells=merge_cells)
875
+ with ExcelFile(path) as reader:
876
+ df = pd.read_excel(
877
+ reader, sheet_name="test1", header=header, index_col=[0, 1]
878
+ )
879
+ if not merge_cells:
880
+ fm = frame.columns._format_multi(sparsify=False, include_names=False)
881
+ frame.columns = [".".join(map(str, q)) for q in zip(*fm)]
882
+ tm.assert_frame_equal(frame, df)
883
+
884
+ def test_to_excel_multiindex_dates(self, merge_cells, path):
885
+ # try multiindex with dates
886
+ unit = get_exp_unit(path)
887
+ tsframe = DataFrame(
888
+ np.random.default_rng(2).standard_normal((5, 4)),
889
+ columns=Index(list("ABCD")),
890
+ index=date_range("2000-01-01", periods=5, freq="B"),
891
+ )
892
+ tsframe.index = MultiIndex.from_arrays(
893
+ [
894
+ tsframe.index.as_unit(unit),
895
+ np.arange(len(tsframe.index), dtype=np.int64),
896
+ ],
897
+ names=["time", "foo"],
898
+ )
899
+
900
+ tsframe.to_excel(path, sheet_name="test1", merge_cells=merge_cells)
901
+ with ExcelFile(path) as reader:
902
+ recons = pd.read_excel(reader, sheet_name="test1", index_col=[0, 1])
903
+
904
+ tm.assert_frame_equal(tsframe, recons)
905
+ assert recons.index.names == ("time", "foo")
906
+
907
+ def test_to_excel_multiindex_no_write_index(self, path):
908
+ # Test writing and re-reading a MI without the index. GH 5616.
909
+
910
+ # Initial non-MI frame.
911
+ frame1 = DataFrame({"a": [10, 20], "b": [30, 40], "c": [50, 60]})
912
+
913
+ # Add a MI.
914
+ frame2 = frame1.copy()
915
+ multi_index = MultiIndex.from_tuples([(70, 80), (90, 100)])
916
+ frame2.index = multi_index
917
+
918
+ # Write out to Excel without the index.
919
+ frame2.to_excel(path, sheet_name="test1", index=False)
920
+
921
+ # Read it back in.
922
+ with ExcelFile(path) as reader:
923
+ frame3 = pd.read_excel(reader, sheet_name="test1")
924
+
925
+ # Test that it is the same as the initial frame.
926
+ tm.assert_frame_equal(frame1, frame3)
927
+
928
+ def test_to_excel_empty_multiindex(self, path):
929
+ # GH 19543.
930
+ expected = DataFrame([], columns=[0, 1, 2])
931
+
932
+ df = DataFrame([], index=MultiIndex.from_tuples([], names=[0, 1]), columns=[2])
933
+ df.to_excel(path, sheet_name="test1")
934
+
935
+ with ExcelFile(path) as reader:
936
+ result = pd.read_excel(reader, sheet_name="test1")
937
+ tm.assert_frame_equal(
938
+ result, expected, check_index_type=False, check_dtype=False
939
+ )
940
+
941
+ def test_to_excel_float_format(self, path):
942
+ df = DataFrame(
943
+ [[0.123456, 0.234567, 0.567567], [12.32112, 123123.2, 321321.2]],
944
+ index=["A", "B"],
945
+ columns=["X", "Y", "Z"],
946
+ )
947
+ df.to_excel(path, sheet_name="test1", float_format="%.2f")
948
+
949
+ with ExcelFile(path) as reader:
950
+ result = pd.read_excel(reader, sheet_name="test1", index_col=0)
951
+
952
+ expected = DataFrame(
953
+ [[0.12, 0.23, 0.57], [12.32, 123123.20, 321321.20]],
954
+ index=["A", "B"],
955
+ columns=["X", "Y", "Z"],
956
+ )
957
+ tm.assert_frame_equal(result, expected)
958
+
959
+ def test_to_excel_output_encoding(self, ext):
960
+ # Avoid mixed inferred_type.
961
+ df = DataFrame(
962
+ [["\u0192", "\u0193", "\u0194"], ["\u0195", "\u0196", "\u0197"]],
963
+ index=["A\u0192", "B"],
964
+ columns=["X\u0193", "Y", "Z"],
965
+ )
966
+
967
+ with tm.ensure_clean("__tmp_to_excel_float_format__." + ext) as filename:
968
+ df.to_excel(filename, sheet_name="TestSheet")
969
+ result = pd.read_excel(filename, sheet_name="TestSheet", index_col=0)
970
+ tm.assert_frame_equal(result, df)
971
+
972
+ def test_to_excel_unicode_filename(self, ext):
973
+ with tm.ensure_clean("\u0192u." + ext) as filename:
974
+ try:
975
+ with open(filename, "wb"):
976
+ pass
977
+ except UnicodeEncodeError:
978
+ pytest.skip("No unicode file names on this system")
979
+
980
+ df = DataFrame(
981
+ [[0.123456, 0.234567, 0.567567], [12.32112, 123123.2, 321321.2]],
982
+ index=["A", "B"],
983
+ columns=["X", "Y", "Z"],
984
+ )
985
+ df.to_excel(filename, sheet_name="test1", float_format="%.2f")
986
+
987
+ with ExcelFile(filename) as reader:
988
+ result = pd.read_excel(reader, sheet_name="test1", index_col=0)
989
+
990
+ expected = DataFrame(
991
+ [[0.12, 0.23, 0.57], [12.32, 123123.20, 321321.20]],
992
+ index=["A", "B"],
993
+ columns=["X", "Y", "Z"],
994
+ )
995
+ tm.assert_frame_equal(result, expected)
996
+
997
+ @pytest.mark.parametrize("use_headers", [True, False])
998
+ @pytest.mark.parametrize("r_idx_nlevels", [1, 2, 3])
999
+ @pytest.mark.parametrize("c_idx_nlevels", [1, 2, 3])
1000
+ def test_excel_010_hemstring(
1001
+ self, merge_cells, c_idx_nlevels, r_idx_nlevels, use_headers, path
1002
+ ):
1003
+ def roundtrip(data, header=True, parser_hdr=0, index=True):
1004
+ data.to_excel(path, header=header, merge_cells=merge_cells, index=index)
1005
+
1006
+ with ExcelFile(path) as xf:
1007
+ return pd.read_excel(
1008
+ xf, sheet_name=xf.sheet_names[0], header=parser_hdr
1009
+ )
1010
+
1011
+ # Basic test.
1012
+ parser_header = 0 if use_headers else None
1013
+ res = roundtrip(DataFrame([0]), use_headers, parser_header)
1014
+
1015
+ assert res.shape == (1, 2)
1016
+ assert res.iloc[0, 0] is not np.nan
1017
+
1018
+ # More complex tests with multi-index.
1019
+ nrows = 5
1020
+ ncols = 3
1021
+
1022
+ # ensure limited functionality in 0.10
1023
+ # override of gh-2370 until sorted out in 0.11
1024
+
1025
+ if c_idx_nlevels == 1:
1026
+ columns = Index([f"a-{i}" for i in range(ncols)], dtype=object)
1027
+ else:
1028
+ columns = MultiIndex.from_arrays(
1029
+ [range(ncols) for _ in range(c_idx_nlevels)],
1030
+ names=[f"i-{i}" for i in range(c_idx_nlevels)],
1031
+ )
1032
+ if r_idx_nlevels == 1:
1033
+ index = Index([f"b-{i}" for i in range(nrows)], dtype=object)
1034
+ else:
1035
+ index = MultiIndex.from_arrays(
1036
+ [range(nrows) for _ in range(r_idx_nlevels)],
1037
+ names=[f"j-{i}" for i in range(r_idx_nlevels)],
1038
+ )
1039
+
1040
+ df = DataFrame(
1041
+ np.ones((nrows, ncols)),
1042
+ columns=columns,
1043
+ index=index,
1044
+ )
1045
+
1046
+ # This if will be removed once multi-column Excel writing
1047
+ # is implemented. For now fixing gh-9794.
1048
+ if c_idx_nlevels > 1:
1049
+ msg = (
1050
+ "Writing to Excel with MultiIndex columns and no index "
1051
+ "\\('index'=False\\) is not yet implemented."
1052
+ )
1053
+ with pytest.raises(NotImplementedError, match=msg):
1054
+ roundtrip(df, use_headers, index=False)
1055
+ else:
1056
+ res = roundtrip(df, use_headers)
1057
+
1058
+ if use_headers:
1059
+ assert res.shape == (nrows, ncols + r_idx_nlevels)
1060
+ else:
1061
+ # First row taken as columns.
1062
+ assert res.shape == (nrows - 1, ncols + r_idx_nlevels)
1063
+
1064
+ # No NaNs.
1065
+ for r in range(len(res.index)):
1066
+ for c in range(len(res.columns)):
1067
+ assert res.iloc[r, c] is not np.nan
1068
+
1069
+ def test_duplicated_columns(self, path):
1070
+ # see gh-5235
1071
+ df = DataFrame([[1, 2, 3], [1, 2, 3], [1, 2, 3]], columns=["A", "B", "B"])
1072
+ df.to_excel(path, sheet_name="test1")
1073
+ expected = DataFrame(
1074
+ [[1, 2, 3], [1, 2, 3], [1, 2, 3]], columns=["A", "B", "B.1"]
1075
+ )
1076
+
1077
+ # By default, we mangle.
1078
+ result = pd.read_excel(path, sheet_name="test1", index_col=0)
1079
+ tm.assert_frame_equal(result, expected)
1080
+
1081
+ # see gh-11007, gh-10970
1082
+ df = DataFrame([[1, 2, 3, 4], [5, 6, 7, 8]], columns=["A", "B", "A", "B"])
1083
+ df.to_excel(path, sheet_name="test1")
1084
+
1085
+ result = pd.read_excel(path, sheet_name="test1", index_col=0)
1086
+ expected = DataFrame(
1087
+ [[1, 2, 3, 4], [5, 6, 7, 8]], columns=["A", "B", "A.1", "B.1"]
1088
+ )
1089
+ tm.assert_frame_equal(result, expected)
1090
+
1091
+ # see gh-10982
1092
+ df.to_excel(path, sheet_name="test1", index=False, header=False)
1093
+ result = pd.read_excel(path, sheet_name="test1", header=None)
1094
+
1095
+ expected = DataFrame([[1, 2, 3, 4], [5, 6, 7, 8]])
1096
+ tm.assert_frame_equal(result, expected)
1097
+
1098
+ def test_swapped_columns(self, path):
1099
+ # Test for issue #5427.
1100
+ write_frame = DataFrame({"A": [1, 1, 1], "B": [2, 2, 2]})
1101
+ write_frame.to_excel(path, sheet_name="test1", columns=["B", "A"])
1102
+
1103
+ read_frame = pd.read_excel(path, sheet_name="test1", header=0)
1104
+
1105
+ tm.assert_series_equal(write_frame["A"], read_frame["A"])
1106
+ tm.assert_series_equal(write_frame["B"], read_frame["B"])
1107
+
1108
+ def test_invalid_columns(self, path):
1109
+ # see gh-10982
1110
+ write_frame = DataFrame({"A": [1, 1, 1], "B": [2, 2, 2]})
1111
+
1112
+ with pytest.raises(KeyError, match="Not all names specified"):
1113
+ write_frame.to_excel(path, sheet_name="test1", columns=["B", "C"])
1114
+
1115
+ with pytest.raises(
1116
+ KeyError, match="'passes columns are not ALL present dataframe'"
1117
+ ):
1118
+ write_frame.to_excel(path, sheet_name="test1", columns=["C", "D"])
1119
+
1120
+ @pytest.mark.parametrize(
1121
+ "to_excel_index,read_excel_index_col",
1122
+ [
1123
+ (True, 0), # Include index in write to file
1124
+ (False, None), # Dont include index in write to file
1125
+ ],
1126
+ )
1127
+ def test_write_subset_columns(self, path, to_excel_index, read_excel_index_col):
1128
+ # GH 31677
1129
+ write_frame = DataFrame({"A": [1, 1, 1], "B": [2, 2, 2], "C": [3, 3, 3]})
1130
+ write_frame.to_excel(
1131
+ path, sheet_name="col_subset_bug", columns=["A", "B"], index=to_excel_index
1132
+ )
1133
+
1134
+ expected = write_frame[["A", "B"]]
1135
+ read_frame = pd.read_excel(
1136
+ path, sheet_name="col_subset_bug", index_col=read_excel_index_col
1137
+ )
1138
+
1139
+ tm.assert_frame_equal(expected, read_frame)
1140
+
1141
+ def test_comment_arg(self, path):
1142
+ # see gh-18735
1143
+ #
1144
+ # Test the comment argument functionality to pd.read_excel.
1145
+
1146
+ # Create file to read in.
1147
+ df = DataFrame({"A": ["one", "#one", "one"], "B": ["two", "two", "#two"]})
1148
+ df.to_excel(path, sheet_name="test_c")
1149
+
1150
+ # Read file without comment arg.
1151
+ result1 = pd.read_excel(path, sheet_name="test_c", index_col=0)
1152
+
1153
+ result1.iloc[1, 0] = None
1154
+ result1.iloc[1, 1] = None
1155
+ result1.iloc[2, 1] = None
1156
+
1157
+ result2 = pd.read_excel(path, sheet_name="test_c", comment="#", index_col=0)
1158
+ tm.assert_frame_equal(result1, result2)
1159
+
1160
+ def test_comment_default(self, path):
1161
+ # Re issue #18735
1162
+ # Test the comment argument default to pd.read_excel
1163
+
1164
+ # Create file to read in
1165
+ df = DataFrame({"A": ["one", "#one", "one"], "B": ["two", "two", "#two"]})
1166
+ df.to_excel(path, sheet_name="test_c")
1167
+
1168
+ # Read file with default and explicit comment=None
1169
+ result1 = pd.read_excel(path, sheet_name="test_c")
1170
+ result2 = pd.read_excel(path, sheet_name="test_c", comment=None)
1171
+ tm.assert_frame_equal(result1, result2)
1172
+
1173
+ def test_comment_used(self, path):
1174
+ # see gh-18735
1175
+ #
1176
+ # Test the comment argument is working as expected when used.
1177
+
1178
+ # Create file to read in.
1179
+ df = DataFrame({"A": ["one", "#one", "one"], "B": ["two", "two", "#two"]})
1180
+ df.to_excel(path, sheet_name="test_c")
1181
+
1182
+ # Test read_frame_comment against manually produced expected output.
1183
+ expected = DataFrame({"A": ["one", None, "one"], "B": ["two", None, None]})
1184
+ result = pd.read_excel(path, sheet_name="test_c", comment="#", index_col=0)
1185
+ tm.assert_frame_equal(result, expected)
1186
+
1187
+ def test_comment_empty_line(self, path):
1188
+ # Re issue #18735
1189
+ # Test that pd.read_excel ignores commented lines at the end of file
1190
+
1191
+ df = DataFrame({"a": ["1", "#2"], "b": ["2", "3"]})
1192
+ df.to_excel(path, index=False)
1193
+
1194
+ # Test that all-comment lines at EoF are ignored
1195
+ expected = DataFrame({"a": [1], "b": [2]})
1196
+ result = pd.read_excel(path, comment="#")
1197
+ tm.assert_frame_equal(result, expected)
1198
+
1199
+ def test_datetimes(self, path):
1200
+ # Test writing and reading datetimes. For issue #9139. (xref #9185)
1201
+ unit = get_exp_unit(path)
1202
+ datetimes = [
1203
+ datetime(2013, 1, 13, 1, 2, 3),
1204
+ datetime(2013, 1, 13, 2, 45, 56),
1205
+ datetime(2013, 1, 13, 4, 29, 49),
1206
+ datetime(2013, 1, 13, 6, 13, 42),
1207
+ datetime(2013, 1, 13, 7, 57, 35),
1208
+ datetime(2013, 1, 13, 9, 41, 28),
1209
+ datetime(2013, 1, 13, 11, 25, 21),
1210
+ datetime(2013, 1, 13, 13, 9, 14),
1211
+ datetime(2013, 1, 13, 14, 53, 7),
1212
+ datetime(2013, 1, 13, 16, 37, 0),
1213
+ datetime(2013, 1, 13, 18, 20, 52),
1214
+ ]
1215
+
1216
+ write_frame = DataFrame({"A": datetimes})
1217
+ write_frame.to_excel(path, sheet_name="Sheet1")
1218
+ read_frame = pd.read_excel(path, sheet_name="Sheet1", header=0)
1219
+
1220
+ expected = write_frame.astype(f"M8[{unit}]")
1221
+ tm.assert_series_equal(expected["A"], read_frame["A"])
1222
+
1223
+ def test_bytes_io(self, engine):
1224
+ # see gh-7074
1225
+ with BytesIO() as bio:
1226
+ df = DataFrame(np.random.default_rng(2).standard_normal((10, 2)))
1227
+
1228
+ # Pass engine explicitly, as there is no file path to infer from.
1229
+ with ExcelWriter(bio, engine=engine) as writer:
1230
+ df.to_excel(writer)
1231
+
1232
+ bio.seek(0)
1233
+ reread_df = pd.read_excel(bio, index_col=0)
1234
+ tm.assert_frame_equal(df, reread_df)
1235
+
1236
+ def test_engine_kwargs(self, engine, path):
1237
+ # GH#52368
1238
+ df = DataFrame([{"A": 1, "B": 2}, {"A": 3, "B": 4}])
1239
+
1240
+ msgs = {
1241
+ "odf": r"OpenDocumentSpreadsheet() got an unexpected keyword "
1242
+ r"argument 'foo'",
1243
+ "openpyxl": r"__init__() got an unexpected keyword argument 'foo'",
1244
+ "xlsxwriter": r"__init__() got an unexpected keyword argument 'foo'",
1245
+ }
1246
+
1247
+ if PY310:
1248
+ msgs[
1249
+ "openpyxl"
1250
+ ] = "Workbook.__init__() got an unexpected keyword argument 'foo'"
1251
+ msgs[
1252
+ "xlsxwriter"
1253
+ ] = "Workbook.__init__() got an unexpected keyword argument 'foo'"
1254
+
1255
+ # Handle change in error message for openpyxl (write and append mode)
1256
+ if engine == "openpyxl" and not os.path.exists(path):
1257
+ msgs[
1258
+ "openpyxl"
1259
+ ] = r"load_workbook() got an unexpected keyword argument 'foo'"
1260
+
1261
+ with pytest.raises(TypeError, match=re.escape(msgs[engine])):
1262
+ df.to_excel(
1263
+ path,
1264
+ engine=engine,
1265
+ engine_kwargs={"foo": "bar"},
1266
+ )
1267
+
1268
+ def test_write_lists_dict(self, path):
1269
+ # see gh-8188.
1270
+ df = DataFrame(
1271
+ {
1272
+ "mixed": ["a", ["b", "c"], {"d": "e", "f": 2}],
1273
+ "numeric": [1, 2, 3.0],
1274
+ "str": ["apple", "banana", "cherry"],
1275
+ }
1276
+ )
1277
+ df.to_excel(path, sheet_name="Sheet1")
1278
+ read = pd.read_excel(path, sheet_name="Sheet1", header=0, index_col=0)
1279
+
1280
+ expected = df.copy()
1281
+ expected.mixed = expected.mixed.apply(str)
1282
+ expected.numeric = expected.numeric.astype("int64")
1283
+
1284
+ tm.assert_frame_equal(read, expected)
1285
+
1286
+ def test_render_as_column_name(self, path):
1287
+ # see gh-34331
1288
+ df = DataFrame({"render": [1, 2], "data": [3, 4]})
1289
+ df.to_excel(path, sheet_name="Sheet1")
1290
+ read = pd.read_excel(path, "Sheet1", index_col=0)
1291
+ expected = df
1292
+ tm.assert_frame_equal(read, expected)
1293
+
1294
+ def test_true_and_false_value_options(self, path):
1295
+ # see gh-13347
1296
+ df = DataFrame([["foo", "bar"]], columns=["col1", "col2"], dtype=object)
1297
+ with option_context("future.no_silent_downcasting", True):
1298
+ expected = df.replace({"foo": True, "bar": False}).astype("bool")
1299
+
1300
+ df.to_excel(path)
1301
+ read_frame = pd.read_excel(
1302
+ path, true_values=["foo"], false_values=["bar"], index_col=0
1303
+ )
1304
+ tm.assert_frame_equal(read_frame, expected)
1305
+
1306
+ def test_freeze_panes(self, path):
1307
+ # see gh-15160
1308
+ expected = DataFrame([[1, 2], [3, 4]], columns=["col1", "col2"])
1309
+ expected.to_excel(path, sheet_name="Sheet1", freeze_panes=(1, 1))
1310
+
1311
+ result = pd.read_excel(path, index_col=0)
1312
+ tm.assert_frame_equal(result, expected)
1313
+
1314
+ def test_path_path_lib(self, engine, ext):
1315
+ df = DataFrame(
1316
+ 1.1 * np.arange(120).reshape((30, 4)),
1317
+ columns=Index(list("ABCD")),
1318
+ index=Index([f"i-{i}" for i in range(30)], dtype=object),
1319
+ )
1320
+ writer = partial(df.to_excel, engine=engine)
1321
+
1322
+ reader = partial(pd.read_excel, index_col=0)
1323
+ result = tm.round_trip_pathlib(writer, reader, path=f"foo{ext}")
1324
+ tm.assert_frame_equal(result, df)
1325
+
1326
+ def test_path_local_path(self, engine, ext):
1327
+ df = DataFrame(
1328
+ 1.1 * np.arange(120).reshape((30, 4)),
1329
+ columns=Index(list("ABCD")),
1330
+ index=Index([f"i-{i}" for i in range(30)]),
1331
+ )
1332
+ writer = partial(df.to_excel, engine=engine)
1333
+
1334
+ reader = partial(pd.read_excel, index_col=0)
1335
+ result = tm.round_trip_localpath(writer, reader, path=f"foo{ext}")
1336
+ tm.assert_frame_equal(result, df)
1337
+
1338
+ def test_merged_cell_custom_objects(self, path):
1339
+ # see GH-27006
1340
+ mi = MultiIndex.from_tuples(
1341
+ [
1342
+ (pd.Period("2018"), pd.Period("2018Q1")),
1343
+ (pd.Period("2018"), pd.Period("2018Q2")),
1344
+ ]
1345
+ )
1346
+ expected = DataFrame(np.ones((2, 2), dtype="int64"), columns=mi)
1347
+ expected.to_excel(path)
1348
+ result = pd.read_excel(path, header=[0, 1], index_col=0)
1349
+ # need to convert PeriodIndexes to standard Indexes for assert equal
1350
+ expected.columns = expected.columns.set_levels(
1351
+ [[str(i) for i in mi.levels[0]], [str(i) for i in mi.levels[1]]],
1352
+ level=[0, 1],
1353
+ )
1354
+ tm.assert_frame_equal(result, expected)
1355
+
1356
+ @pytest.mark.parametrize("dtype", [None, object])
1357
+ def test_raise_when_saving_timezones(self, dtype, tz_aware_fixture, path):
1358
+ # GH 27008, GH 7056
1359
+ tz = tz_aware_fixture
1360
+ data = pd.Timestamp("2019", tz=tz)
1361
+ df = DataFrame([data], dtype=dtype)
1362
+ with pytest.raises(ValueError, match="Excel does not support"):
1363
+ df.to_excel(path)
1364
+
1365
+ data = data.to_pydatetime()
1366
+ df = DataFrame([data], dtype=dtype)
1367
+ with pytest.raises(ValueError, match="Excel does not support"):
1368
+ df.to_excel(path)
1369
+
1370
+ def test_excel_duplicate_columns_with_names(self, path):
1371
+ # GH#39695
1372
+ df = DataFrame({"A": [0, 1], "B": [10, 11]})
1373
+ df.to_excel(path, columns=["A", "B", "A"], index=False)
1374
+
1375
+ result = pd.read_excel(path)
1376
+ expected = DataFrame([[0, 10, 0], [1, 11, 1]], columns=["A", "B", "A.1"])
1377
+ tm.assert_frame_equal(result, expected)
1378
+
1379
+ def test_if_sheet_exists_raises(self, ext):
1380
+ # GH 40230
1381
+ msg = "if_sheet_exists is only valid in append mode (mode='a')"
1382
+
1383
+ with tm.ensure_clean(ext) as f:
1384
+ with pytest.raises(ValueError, match=re.escape(msg)):
1385
+ ExcelWriter(f, if_sheet_exists="replace")
1386
+
1387
+ def test_excel_writer_empty_frame(self, engine, ext):
1388
+ # GH#45793
1389
+ with tm.ensure_clean(ext) as path:
1390
+ with ExcelWriter(path, engine=engine) as writer:
1391
+ DataFrame().to_excel(writer)
1392
+ result = pd.read_excel(path)
1393
+ expected = DataFrame()
1394
+ tm.assert_frame_equal(result, expected)
1395
+
1396
+ def test_to_excel_empty_frame(self, engine, ext):
1397
+ # GH#45793
1398
+ with tm.ensure_clean(ext) as path:
1399
+ DataFrame().to_excel(path, engine=engine)
1400
+ result = pd.read_excel(path)
1401
+ expected = DataFrame()
1402
+ tm.assert_frame_equal(result, expected)
1403
+
1404
+
1405
+ class TestExcelWriterEngineTests:
1406
+ @pytest.mark.parametrize(
1407
+ "klass,ext",
1408
+ [
1409
+ pytest.param(_XlsxWriter, ".xlsx", marks=td.skip_if_no("xlsxwriter")),
1410
+ pytest.param(_OpenpyxlWriter, ".xlsx", marks=td.skip_if_no("openpyxl")),
1411
+ ],
1412
+ )
1413
+ def test_ExcelWriter_dispatch(self, klass, ext):
1414
+ with tm.ensure_clean(ext) as path:
1415
+ with ExcelWriter(path) as writer:
1416
+ if ext == ".xlsx" and bool(
1417
+ import_optional_dependency("xlsxwriter", errors="ignore")
1418
+ ):
1419
+ # xlsxwriter has preference over openpyxl if both installed
1420
+ assert isinstance(writer, _XlsxWriter)
1421
+ else:
1422
+ assert isinstance(writer, klass)
1423
+
1424
+ def test_ExcelWriter_dispatch_raises(self):
1425
+ with pytest.raises(ValueError, match="No engine"):
1426
+ ExcelWriter("nothing")
1427
+
1428
+ def test_register_writer(self):
1429
+ class DummyClass(ExcelWriter):
1430
+ called_save = False
1431
+ called_write_cells = False
1432
+ called_sheets = False
1433
+ _supported_extensions = ("xlsx", "xls")
1434
+ _engine = "dummy"
1435
+
1436
+ def book(self):
1437
+ pass
1438
+
1439
+ def _save(self):
1440
+ type(self).called_save = True
1441
+
1442
+ def _write_cells(self, *args, **kwargs):
1443
+ type(self).called_write_cells = True
1444
+
1445
+ @property
1446
+ def sheets(self):
1447
+ type(self).called_sheets = True
1448
+
1449
+ @classmethod
1450
+ def assert_called_and_reset(cls):
1451
+ assert cls.called_save
1452
+ assert cls.called_write_cells
1453
+ assert not cls.called_sheets
1454
+ cls.called_save = False
1455
+ cls.called_write_cells = False
1456
+
1457
+ register_writer(DummyClass)
1458
+
1459
+ with option_context("io.excel.xlsx.writer", "dummy"):
1460
+ path = "something.xlsx"
1461
+ with tm.ensure_clean(path) as filepath:
1462
+ with ExcelWriter(filepath) as writer:
1463
+ assert isinstance(writer, DummyClass)
1464
+ df = DataFrame(
1465
+ ["a"],
1466
+ columns=Index(["b"], name="foo"),
1467
+ index=Index(["c"], name="bar"),
1468
+ )
1469
+ df.to_excel(filepath)
1470
+ DummyClass.assert_called_and_reset()
1471
+
1472
+ with tm.ensure_clean("something.xls") as filepath:
1473
+ df.to_excel(filepath, engine="dummy")
1474
+ DummyClass.assert_called_and_reset()
1475
+
1476
+
1477
+ @td.skip_if_no("xlrd")
1478
+ @td.skip_if_no("openpyxl")
1479
+ class TestFSPath:
1480
+ def test_excelfile_fspath(self):
1481
+ with tm.ensure_clean("foo.xlsx") as path:
1482
+ df = DataFrame({"A": [1, 2]})
1483
+ df.to_excel(path)
1484
+ with ExcelFile(path) as xl:
1485
+ result = os.fspath(xl)
1486
+ assert result == path
1487
+
1488
+ def test_excelwriter_fspath(self):
1489
+ with tm.ensure_clean("foo.xlsx") as path:
1490
+ with ExcelWriter(path) as writer:
1491
+ assert os.fspath(writer) == str(path)
1492
+
1493
+ def test_to_excel_pos_args_deprecation(self):
1494
+ # GH-54229
1495
+ df = DataFrame({"a": [1, 2, 3]})
1496
+ msg = (
1497
+ r"Starting with pandas version 3.0 all arguments of to_excel except "
1498
+ r"for the argument 'excel_writer' will be keyword-only."
1499
+ )
1500
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1501
+ buf = BytesIO()
1502
+ writer = ExcelWriter(buf)
1503
+ df.to_excel(writer, "Sheet_name_1")
1504
+
1505
+
1506
+ @pytest.mark.parametrize("klass", _writers.values())
1507
+ def test_subclass_attr(klass):
1508
+ # testing that subclasses of ExcelWriter don't have public attributes (issue 49602)
1509
+ attrs_base = {name for name in dir(ExcelWriter) if not name.startswith("_")}
1510
+ attrs_klass = {name for name in dir(klass) if not name.startswith("_")}
1511
+ assert not attrs_base.symmetric_difference(attrs_klass)
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_xlrd.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.compat import is_platform_windows
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+ from pandas.io.excel import ExcelFile
12
+ from pandas.io.excel._base import inspect_excel_format
13
+
14
+ xlrd = pytest.importorskip("xlrd")
15
+
16
+ if is_platform_windows():
17
+ pytestmark = pytest.mark.single_cpu
18
+
19
+
20
+ @pytest.fixture(params=[".xls"])
21
+ def read_ext_xlrd(request):
22
+ """
23
+ Valid extensions for reading Excel files with xlrd.
24
+
25
+ Similar to read_ext, but excludes .ods, .xlsb, and for xlrd>2 .xlsx, .xlsm
26
+ """
27
+ return request.param
28
+
29
+
30
+ def test_read_xlrd_book(read_ext_xlrd, datapath):
31
+ engine = "xlrd"
32
+ sheet_name = "Sheet1"
33
+ pth = datapath("io", "data", "excel", "test1.xls")
34
+ with xlrd.open_workbook(pth) as book:
35
+ with ExcelFile(book, engine=engine) as xl:
36
+ result = pd.read_excel(xl, sheet_name=sheet_name, index_col=0)
37
+
38
+ expected = pd.read_excel(
39
+ book, sheet_name=sheet_name, engine=engine, index_col=0
40
+ )
41
+ tm.assert_frame_equal(result, expected)
42
+
43
+
44
+ def test_read_xlsx_fails(datapath):
45
+ # GH 29375
46
+ from xlrd.biffh import XLRDError
47
+
48
+ path = datapath("io", "data", "excel", "test1.xlsx")
49
+ with pytest.raises(XLRDError, match="Excel xlsx file; not supported"):
50
+ pd.read_excel(path, engine="xlrd")
51
+
52
+
53
+ def test_nan_in_xls(datapath):
54
+ # GH 54564
55
+ path = datapath("io", "data", "excel", "test6.xls")
56
+
57
+ expected = pd.DataFrame({0: np.r_[0, 2].astype("int64"), 1: np.r_[1, np.nan]})
58
+
59
+ result = pd.read_excel(path, header=None)
60
+
61
+ tm.assert_frame_equal(result, expected)
62
+
63
+
64
+ @pytest.mark.parametrize(
65
+ "file_header",
66
+ [
67
+ b"\x09\x00\x04\x00\x07\x00\x10\x00",
68
+ b"\x09\x02\x06\x00\x00\x00\x10\x00",
69
+ b"\x09\x04\x06\x00\x00\x00\x10\x00",
70
+ b"\xd0\xcf\x11\xe0\xa1\xb1\x1a\xe1",
71
+ ],
72
+ )
73
+ def test_read_old_xls_files(file_header):
74
+ # GH 41226
75
+ f = io.BytesIO(file_header)
76
+ assert inspect_excel_format(f) == "xls"
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/excel/test_xlsxwriter.py ADDED
@@ -0,0 +1,86 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+
3
+ import pytest
4
+
5
+ from pandas.compat import is_platform_windows
6
+
7
+ from pandas import DataFrame
8
+ import pandas._testing as tm
9
+
10
+ from pandas.io.excel import ExcelWriter
11
+
12
+ xlsxwriter = pytest.importorskip("xlsxwriter")
13
+
14
+ if is_platform_windows():
15
+ pytestmark = pytest.mark.single_cpu
16
+
17
+
18
+ @pytest.fixture
19
+ def ext():
20
+ return ".xlsx"
21
+
22
+
23
+ def test_column_format(ext):
24
+ # Test that column formats are applied to cells. Test for issue #9167.
25
+ # Applicable to xlsxwriter only.
26
+ openpyxl = pytest.importorskip("openpyxl")
27
+
28
+ with tm.ensure_clean(ext) as path:
29
+ frame = DataFrame({"A": [123456, 123456], "B": [123456, 123456]})
30
+
31
+ with ExcelWriter(path) as writer:
32
+ frame.to_excel(writer)
33
+
34
+ # Add a number format to col B and ensure it is applied to cells.
35
+ num_format = "#,##0"
36
+ write_workbook = writer.book
37
+ write_worksheet = write_workbook.worksheets()[0]
38
+ col_format = write_workbook.add_format({"num_format": num_format})
39
+ write_worksheet.set_column("B:B", None, col_format)
40
+
41
+ with contextlib.closing(openpyxl.load_workbook(path)) as read_workbook:
42
+ try:
43
+ read_worksheet = read_workbook["Sheet1"]
44
+ except TypeError:
45
+ # compat
46
+ read_worksheet = read_workbook.get_sheet_by_name(name="Sheet1")
47
+
48
+ # Get the number format from the cell.
49
+ try:
50
+ cell = read_worksheet["B2"]
51
+ except TypeError:
52
+ # compat
53
+ cell = read_worksheet.cell("B2")
54
+
55
+ try:
56
+ read_num_format = cell.number_format
57
+ except AttributeError:
58
+ read_num_format = cell.style.number_format._format_code
59
+
60
+ assert read_num_format == num_format
61
+
62
+
63
+ def test_write_append_mode_raises(ext):
64
+ msg = "Append mode is not supported with xlsxwriter!"
65
+
66
+ with tm.ensure_clean(ext) as f:
67
+ with pytest.raises(ValueError, match=msg):
68
+ ExcelWriter(f, engine="xlsxwriter", mode="a")
69
+
70
+
71
+ @pytest.mark.parametrize("nan_inf_to_errors", [True, False])
72
+ def test_engine_kwargs(ext, nan_inf_to_errors):
73
+ # GH 42286
74
+ engine_kwargs = {"options": {"nan_inf_to_errors": nan_inf_to_errors}}
75
+ with tm.ensure_clean(ext) as f:
76
+ with ExcelWriter(f, engine="xlsxwriter", engine_kwargs=engine_kwargs) as writer:
77
+ assert writer.book.nan_inf_to_errors == nan_inf_to_errors
78
+
79
+
80
+ def test_book_and_sheets_consistent(ext):
81
+ # GH#45687 - Ensure sheets is updated if user modifies book
82
+ with tm.ensure_clean(ext) as f:
83
+ with ExcelWriter(f, engine="xlsxwriter") as writer:
84
+ assert writer.sheets == {}
85
+ sheet = writer.book.add_worksheet("test_name")
86
+ assert writer.sheets == {"test_name": sheet}
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_console.cpython-310.pyc ADDED
Binary file (2.81 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_eng_formatting.cpython-310.pyc ADDED
Binary file (7.86 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/__pycache__/test_to_markdown.cpython-310.pyc ADDED
Binary file (3.12 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (194 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_bar.cpython-310.pyc ADDED
Binary file (9.1 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_exceptions.cpython-310.pyc ADDED
Binary file (1.68 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_format.cpython-310.pyc ADDED
Binary file (18.6 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_highlight.cpython-310.pyc ADDED
Binary file (5.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pandas/tests/io/formats/style/__pycache__/test_html.cpython-310.pyc ADDED
Binary file (29.8 kB). View file