applied-ai-018 commited on
Commit
246e212
·
verified ·
1 Parent(s): 9a859f5

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_106_mp_rank_02_optim_states.pt +3 -0
  2. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_161_mp_rank_00_optim_states.pt +3 -0
  3. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_214_mp_rank_03_optim_states.pt +3 -0
  4. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_22_mp_rank_01_optim_states.pt +3 -0
  5. ckpts/llama-3b/global_step100/bf16_zero_pp_rank_45_mp_rank_01_optim_states.pt +3 -0
  6. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc +0 -0
  7. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc +0 -0
  9. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_numba.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/pandas/tests/extension/__init__.py +0 -0
  16. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/__init__.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/conftest.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_arrow.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_categorical.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_common.cpython-310.pyc +0 -0
  21. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_datetime.cpython-310.pyc +0 -0
  22. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_extension.cpython-310.pyc +0 -0
  23. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_interval.cpython-310.pyc +0 -0
  24. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_masked.cpython-310.pyc +0 -0
  25. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_numpy.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_period.cpython-310.pyc +0 -0
  27. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_sparse.cpython-310.pyc +0 -0
  28. venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_string.cpython-310.pyc +0 -0
  29. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__init__.py +6 -0
  30. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/__init__.cpython-310.pyc +0 -0
  31. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/array.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/test_array_with_attr.cpython-310.pyc +0 -0
  33. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/array.py +89 -0
  34. venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/test_array_with_attr.py +33 -0
  35. venv/lib/python3.10/site-packages/pandas/tests/extension/base/__init__.py +131 -0
  36. venv/lib/python3.10/site-packages/pandas/tests/extension/base/accumulate.py +39 -0
  37. venv/lib/python3.10/site-packages/pandas/tests/extension/base/constructors.py +142 -0
  38. venv/lib/python3.10/site-packages/pandas/tests/extension/base/dim2.py +345 -0
  39. venv/lib/python3.10/site-packages/pandas/tests/extension/base/dtype.py +123 -0
  40. venv/lib/python3.10/site-packages/pandas/tests/extension/base/getitem.py +469 -0
  41. venv/lib/python3.10/site-packages/pandas/tests/extension/base/interface.py +137 -0
  42. venv/lib/python3.10/site-packages/pandas/tests/extension/base/methods.py +720 -0
  43. venv/lib/python3.10/site-packages/pandas/tests/extension/base/ops.py +299 -0
  44. venv/lib/python3.10/site-packages/pandas/tests/extension/base/printing.py +41 -0
  45. venv/lib/python3.10/site-packages/pandas/tests/extension/base/reduce.py +153 -0
  46. venv/lib/python3.10/site-packages/pandas/tests/extension/base/setitem.py +451 -0
  47. venv/lib/python3.10/site-packages/pandas/tests/extension/conftest.py +230 -0
  48. venv/lib/python3.10/site-packages/pandas/tests/extension/date/__init__.py +6 -0
  49. venv/lib/python3.10/site-packages/pandas/tests/extension/date/__pycache__/__init__.cpython-310.pyc +0 -0
  50. venv/lib/python3.10/site-packages/pandas/tests/extension/date/__pycache__/array.cpython-310.pyc +0 -0
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_106_mp_rank_02_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:184b8875a8ba1554242e5269de52401a886c7b6f5822981e036b0bfe12811f88
3
+ size 41830340
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_161_mp_rank_00_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:be46368f78105a5943c0060a7246c65d229ba051c44aa8ebc3dd4a83ae629bc8
3
+ size 41830148
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_214_mp_rank_03_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0115cf83389031b1b8fc41f2181bf1d82fd661faa4a009ed64f2cbc44676b111
3
+ size 41830340
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_22_mp_rank_01_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:76b5df43c2fb740b403a1a962e8e53d82e295bc29277e75333e0e7a5b8f928b5
3
+ size 41830202
ckpts/llama-3b/global_step100/bf16_zero_pp_rank_45_mp_rank_01_optim_states.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:444e7ab501d02346484010fdb912ef82cb69385b5a1ce92ed8f33f770a27c55a
3
+ size 41830202
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (186 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_apply_relabeling.cpython-310.pyc ADDED
Binary file (3.3 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_frame_transform.cpython-310.pyc ADDED
Binary file (8.03 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_invalid_arg.cpython-310.pyc ADDED
Binary file (11.8 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_numba.cpython-310.pyc ADDED
Binary file (5.3 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply.cpython-310.pyc ADDED
Binary file (23.7 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_apply_relabeling.cpython-310.pyc ADDED
Binary file (1.45 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_series_transform.cpython-310.pyc ADDED
Binary file (3.24 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/apply/__pycache__/test_str.cpython-310.pyc ADDED
Binary file (7.25 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (190 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (6.09 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_arrow.cpython-310.pyc ADDED
Binary file (90.3 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_categorical.cpython-310.pyc ADDED
Binary file (7.3 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_common.cpython-310.pyc ADDED
Binary file (3.79 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_datetime.cpython-310.pyc ADDED
Binary file (5.47 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_extension.cpython-310.pyc ADDED
Binary file (1.26 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_interval.cpython-310.pyc ADDED
Binary file (3.86 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_masked.cpython-310.pyc ADDED
Binary file (10.9 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_numpy.cpython-310.pyc ADDED
Binary file (15.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_period.cpython-310.pyc ADDED
Binary file (4.65 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_sparse.cpython-310.pyc ADDED
Binary file (17.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/__pycache__/test_string.cpython-310.pyc ADDED
Binary file (7.91 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from pandas.tests.extension.array_with_attr.array import (
2
+ FloatAttrArray,
3
+ FloatAttrDtype,
4
+ )
5
+
6
+ __all__ = ["FloatAttrArray", "FloatAttrDtype"]
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (343 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/array.cpython-310.pyc ADDED
Binary file (3.47 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/__pycache__/test_array_with_attr.cpython-310.pyc ADDED
Binary file (1.21 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/array.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Test extension array that has custom attribute information (not stored on the dtype).
3
+
4
+ """
5
+ from __future__ import annotations
6
+
7
+ import numbers
8
+ from typing import TYPE_CHECKING
9
+
10
+ import numpy as np
11
+
12
+ from pandas.core.dtypes.base import ExtensionDtype
13
+
14
+ import pandas as pd
15
+ from pandas.core.arrays import ExtensionArray
16
+
17
+ if TYPE_CHECKING:
18
+ from pandas._typing import type_t
19
+
20
+
21
+ class FloatAttrDtype(ExtensionDtype):
22
+ type = float
23
+ name = "float_attr"
24
+ na_value = np.nan
25
+
26
+ @classmethod
27
+ def construct_array_type(cls) -> type_t[FloatAttrArray]:
28
+ """
29
+ Return the array type associated with this dtype.
30
+
31
+ Returns
32
+ -------
33
+ type
34
+ """
35
+ return FloatAttrArray
36
+
37
+
38
+ class FloatAttrArray(ExtensionArray):
39
+ dtype = FloatAttrDtype()
40
+ __array_priority__ = 1000
41
+
42
+ def __init__(self, values, attr=None) -> None:
43
+ if not isinstance(values, np.ndarray):
44
+ raise TypeError("Need to pass a numpy array of float64 dtype as values")
45
+ if not values.dtype == "float64":
46
+ raise TypeError("Need to pass a numpy array of float64 dtype as values")
47
+ self.data = values
48
+ self.attr = attr
49
+
50
+ @classmethod
51
+ def _from_sequence(cls, scalars, *, dtype=None, copy=False):
52
+ if not copy:
53
+ data = np.asarray(scalars, dtype="float64")
54
+ else:
55
+ data = np.array(scalars, dtype="float64", copy=copy)
56
+ return cls(data)
57
+
58
+ def __getitem__(self, item):
59
+ if isinstance(item, numbers.Integral):
60
+ return self.data[item]
61
+ else:
62
+ # slice, list-like, mask
63
+ item = pd.api.indexers.check_array_indexer(self, item)
64
+ return type(self)(self.data[item], self.attr)
65
+
66
+ def __len__(self) -> int:
67
+ return len(self.data)
68
+
69
+ def isna(self):
70
+ return np.isnan(self.data)
71
+
72
+ def take(self, indexer, allow_fill=False, fill_value=None):
73
+ from pandas.api.extensions import take
74
+
75
+ data = self.data
76
+ if allow_fill and fill_value is None:
77
+ fill_value = self.dtype.na_value
78
+
79
+ result = take(data, indexer, fill_value=fill_value, allow_fill=allow_fill)
80
+ return type(self)(result, self.attr)
81
+
82
+ def copy(self):
83
+ return type(self)(self.data.copy(), self.attr)
84
+
85
+ @classmethod
86
+ def _concat_same_type(cls, to_concat):
87
+ data = np.concatenate([x.data for x in to_concat])
88
+ attr = to_concat[0].attr if len(to_concat) else None
89
+ return cls(data, attr)
venv/lib/python3.10/site-packages/pandas/tests/extension/array_with_attr/test_array_with_attr.py ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ import pandas as pd
4
+ import pandas._testing as tm
5
+ from pandas.tests.extension.array_with_attr import FloatAttrArray
6
+
7
+
8
+ def test_concat_with_all_na():
9
+ # https://github.com/pandas-dev/pandas/pull/47762
10
+ # ensure that attribute of the column array is preserved (when it gets
11
+ # preserved in reindexing the array) during merge/concat
12
+ arr = FloatAttrArray(np.array([np.nan, np.nan], dtype="float64"), attr="test")
13
+
14
+ df1 = pd.DataFrame({"col": arr, "key": [0, 1]})
15
+ df2 = pd.DataFrame({"key": [0, 1], "col2": [1, 2]})
16
+ result = pd.merge(df1, df2, on="key")
17
+ expected = pd.DataFrame({"col": arr, "key": [0, 1], "col2": [1, 2]})
18
+ tm.assert_frame_equal(result, expected)
19
+ assert result["col"].array.attr == "test"
20
+
21
+ df1 = pd.DataFrame({"col": arr, "key": [0, 1]})
22
+ df2 = pd.DataFrame({"key": [0, 2], "col2": [1, 2]})
23
+ result = pd.merge(df1, df2, on="key")
24
+ expected = pd.DataFrame({"col": arr.take([0]), "key": [0], "col2": [1]})
25
+ tm.assert_frame_equal(result, expected)
26
+ assert result["col"].array.attr == "test"
27
+
28
+ result = pd.concat([df1.set_index("key"), df2.set_index("key")], axis=1)
29
+ expected = pd.DataFrame(
30
+ {"col": arr.take([0, 1, -1]), "col2": [1, np.nan, 2], "key": [0, 1, 2]}
31
+ ).set_index("key")
32
+ tm.assert_frame_equal(result, expected)
33
+ assert result["col"].array.attr == "test"
venv/lib/python3.10/site-packages/pandas/tests/extension/base/__init__.py ADDED
@@ -0,0 +1,131 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Base test suite for extension arrays.
3
+
4
+ These tests are intended for third-party libraries to subclass to validate
5
+ that their extension arrays and dtypes satisfy the interface. Moving or
6
+ renaming the tests should not be done lightly.
7
+
8
+ Libraries are expected to implement a few pytest fixtures to provide data
9
+ for the tests. The fixtures may be located in either
10
+
11
+ * The same module as your test class.
12
+ * A ``conftest.py`` in the same directory as your test class.
13
+
14
+ The full list of fixtures may be found in the ``conftest.py`` next to this
15
+ file.
16
+
17
+ .. code-block:: python
18
+
19
+ import pytest
20
+ from pandas.tests.extension.base import BaseDtypeTests
21
+
22
+
23
+ @pytest.fixture
24
+ def dtype():
25
+ return MyDtype()
26
+
27
+
28
+ class TestMyDtype(BaseDtypeTests):
29
+ pass
30
+
31
+
32
+ Your class ``TestDtype`` will inherit all the tests defined on
33
+ ``BaseDtypeTests``. pytest's fixture discover will supply your ``dtype``
34
+ wherever the test requires it. You're free to implement additional tests.
35
+
36
+ """
37
+ from pandas.tests.extension.base.accumulate import BaseAccumulateTests
38
+ from pandas.tests.extension.base.casting import BaseCastingTests
39
+ from pandas.tests.extension.base.constructors import BaseConstructorsTests
40
+ from pandas.tests.extension.base.dim2 import ( # noqa: F401
41
+ Dim2CompatTests,
42
+ NDArrayBacked2DTests,
43
+ )
44
+ from pandas.tests.extension.base.dtype import BaseDtypeTests
45
+ from pandas.tests.extension.base.getitem import BaseGetitemTests
46
+ from pandas.tests.extension.base.groupby import BaseGroupbyTests
47
+ from pandas.tests.extension.base.index import BaseIndexTests
48
+ from pandas.tests.extension.base.interface import BaseInterfaceTests
49
+ from pandas.tests.extension.base.io import BaseParsingTests
50
+ from pandas.tests.extension.base.methods import BaseMethodsTests
51
+ from pandas.tests.extension.base.missing import BaseMissingTests
52
+ from pandas.tests.extension.base.ops import ( # noqa: F401
53
+ BaseArithmeticOpsTests,
54
+ BaseComparisonOpsTests,
55
+ BaseOpsUtil,
56
+ BaseUnaryOpsTests,
57
+ )
58
+ from pandas.tests.extension.base.printing import BasePrintingTests
59
+ from pandas.tests.extension.base.reduce import BaseReduceTests
60
+ from pandas.tests.extension.base.reshaping import BaseReshapingTests
61
+ from pandas.tests.extension.base.setitem import BaseSetitemTests
62
+
63
+
64
+ # One test class that you can inherit as an alternative to inheriting all the
65
+ # test classes above.
66
+ # Note 1) this excludes Dim2CompatTests and NDArrayBacked2DTests.
67
+ # Note 2) this uses BaseReduceTests and and _not_ BaseBooleanReduceTests,
68
+ # BaseNoReduceTests, or BaseNumericReduceTests
69
+ class ExtensionTests(
70
+ BaseAccumulateTests,
71
+ BaseCastingTests,
72
+ BaseConstructorsTests,
73
+ BaseDtypeTests,
74
+ BaseGetitemTests,
75
+ BaseGroupbyTests,
76
+ BaseIndexTests,
77
+ BaseInterfaceTests,
78
+ BaseParsingTests,
79
+ BaseMethodsTests,
80
+ BaseMissingTests,
81
+ BaseArithmeticOpsTests,
82
+ BaseComparisonOpsTests,
83
+ BaseUnaryOpsTests,
84
+ BasePrintingTests,
85
+ BaseReduceTests,
86
+ BaseReshapingTests,
87
+ BaseSetitemTests,
88
+ Dim2CompatTests,
89
+ ):
90
+ pass
91
+
92
+
93
+ def __getattr__(name: str):
94
+ import warnings
95
+
96
+ if name == "BaseNoReduceTests":
97
+ warnings.warn(
98
+ "BaseNoReduceTests is deprecated and will be removed in a "
99
+ "future version. Use BaseReduceTests and override "
100
+ "`_supports_reduction` instead.",
101
+ FutureWarning,
102
+ )
103
+ from pandas.tests.extension.base.reduce import BaseNoReduceTests
104
+
105
+ return BaseNoReduceTests
106
+
107
+ elif name == "BaseNumericReduceTests":
108
+ warnings.warn(
109
+ "BaseNumericReduceTests is deprecated and will be removed in a "
110
+ "future version. Use BaseReduceTests and override "
111
+ "`_supports_reduction` instead.",
112
+ FutureWarning,
113
+ )
114
+ from pandas.tests.extension.base.reduce import BaseNumericReduceTests
115
+
116
+ return BaseNumericReduceTests
117
+
118
+ elif name == "BaseBooleanReduceTests":
119
+ warnings.warn(
120
+ "BaseBooleanReduceTests is deprecated and will be removed in a "
121
+ "future version. Use BaseReduceTests and override "
122
+ "`_supports_reduction` instead.",
123
+ FutureWarning,
124
+ )
125
+ from pandas.tests.extension.base.reduce import BaseBooleanReduceTests
126
+
127
+ return BaseBooleanReduceTests
128
+
129
+ raise AttributeError(
130
+ f"module 'pandas.tests.extension.base' has no attribute '{name}'"
131
+ )
venv/lib/python3.10/site-packages/pandas/tests/extension/base/accumulate.py ADDED
@@ -0,0 +1,39 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import pandas as pd
4
+ import pandas._testing as tm
5
+
6
+
7
+ class BaseAccumulateTests:
8
+ """
9
+ Accumulation specific tests. Generally these only
10
+ make sense for numeric/boolean operations.
11
+ """
12
+
13
+ def _supports_accumulation(self, ser: pd.Series, op_name: str) -> bool:
14
+ # Do we expect this accumulation to be supported for this dtype?
15
+ # We default to assuming "no"; subclass authors should override here.
16
+ return False
17
+
18
+ def check_accumulate(self, ser: pd.Series, op_name: str, skipna: bool):
19
+ try:
20
+ alt = ser.astype("float64")
21
+ except TypeError:
22
+ # e.g. Period can't be cast to float64
23
+ alt = ser.astype(object)
24
+
25
+ result = getattr(ser, op_name)(skipna=skipna)
26
+ expected = getattr(alt, op_name)(skipna=skipna)
27
+ tm.assert_series_equal(result, expected, check_dtype=False)
28
+
29
+ @pytest.mark.parametrize("skipna", [True, False])
30
+ def test_accumulate_series(self, data, all_numeric_accumulations, skipna):
31
+ op_name = all_numeric_accumulations
32
+ ser = pd.Series(data)
33
+
34
+ if self._supports_accumulation(ser, op_name):
35
+ self.check_accumulate(ser, op_name, skipna)
36
+ else:
37
+ with pytest.raises((NotImplementedError, TypeError)):
38
+ # TODO: require TypeError for things that will _never_ work?
39
+ getattr(ser, op_name)(skipna=skipna)
venv/lib/python3.10/site-packages/pandas/tests/extension/base/constructors.py ADDED
@@ -0,0 +1,142 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+ from pandas.api.extensions import ExtensionArray
7
+ from pandas.core.internals.blocks import EABackedBlock
8
+
9
+
10
+ class BaseConstructorsTests:
11
+ def test_from_sequence_from_cls(self, data):
12
+ result = type(data)._from_sequence(data, dtype=data.dtype)
13
+ tm.assert_extension_array_equal(result, data)
14
+
15
+ data = data[:0]
16
+ result = type(data)._from_sequence(data, dtype=data.dtype)
17
+ tm.assert_extension_array_equal(result, data)
18
+
19
+ def test_array_from_scalars(self, data):
20
+ scalars = [data[0], data[1], data[2]]
21
+ result = data._from_sequence(scalars, dtype=data.dtype)
22
+ assert isinstance(result, type(data))
23
+
24
+ def test_series_constructor(self, data):
25
+ result = pd.Series(data, copy=False)
26
+ assert result.dtype == data.dtype
27
+ assert len(result) == len(data)
28
+ if hasattr(result._mgr, "blocks"):
29
+ assert isinstance(result._mgr.blocks[0], EABackedBlock)
30
+ assert result._mgr.array is data
31
+
32
+ # Series[EA] is unboxed / boxed correctly
33
+ result2 = pd.Series(result)
34
+ assert result2.dtype == data.dtype
35
+ if hasattr(result._mgr, "blocks"):
36
+ assert isinstance(result2._mgr.blocks[0], EABackedBlock)
37
+
38
+ def test_series_constructor_no_data_with_index(self, dtype, na_value):
39
+ result = pd.Series(index=[1, 2, 3], dtype=dtype)
40
+ expected = pd.Series([na_value] * 3, index=[1, 2, 3], dtype=dtype)
41
+ tm.assert_series_equal(result, expected)
42
+
43
+ # GH 33559 - empty index
44
+ result = pd.Series(index=[], dtype=dtype)
45
+ expected = pd.Series([], index=pd.Index([], dtype="object"), dtype=dtype)
46
+ tm.assert_series_equal(result, expected)
47
+
48
+ def test_series_constructor_scalar_na_with_index(self, dtype, na_value):
49
+ result = pd.Series(na_value, index=[1, 2, 3], dtype=dtype)
50
+ expected = pd.Series([na_value] * 3, index=[1, 2, 3], dtype=dtype)
51
+ tm.assert_series_equal(result, expected)
52
+
53
+ def test_series_constructor_scalar_with_index(self, data, dtype):
54
+ scalar = data[0]
55
+ result = pd.Series(scalar, index=[1, 2, 3], dtype=dtype)
56
+ expected = pd.Series([scalar] * 3, index=[1, 2, 3], dtype=dtype)
57
+ tm.assert_series_equal(result, expected)
58
+
59
+ result = pd.Series(scalar, index=["foo"], dtype=dtype)
60
+ expected = pd.Series([scalar], index=["foo"], dtype=dtype)
61
+ tm.assert_series_equal(result, expected)
62
+
63
+ @pytest.mark.parametrize("from_series", [True, False])
64
+ def test_dataframe_constructor_from_dict(self, data, from_series):
65
+ if from_series:
66
+ data = pd.Series(data)
67
+ result = pd.DataFrame({"A": data})
68
+ assert result.dtypes["A"] == data.dtype
69
+ assert result.shape == (len(data), 1)
70
+ if hasattr(result._mgr, "blocks"):
71
+ assert isinstance(result._mgr.blocks[0], EABackedBlock)
72
+ assert isinstance(result._mgr.arrays[0], ExtensionArray)
73
+
74
+ def test_dataframe_from_series(self, data):
75
+ result = pd.DataFrame(pd.Series(data))
76
+ assert result.dtypes[0] == data.dtype
77
+ assert result.shape == (len(data), 1)
78
+ if hasattr(result._mgr, "blocks"):
79
+ assert isinstance(result._mgr.blocks[0], EABackedBlock)
80
+ assert isinstance(result._mgr.arrays[0], ExtensionArray)
81
+
82
+ def test_series_given_mismatched_index_raises(self, data):
83
+ msg = r"Length of values \(3\) does not match length of index \(5\)"
84
+ with pytest.raises(ValueError, match=msg):
85
+ pd.Series(data[:3], index=[0, 1, 2, 3, 4])
86
+
87
+ def test_from_dtype(self, data):
88
+ # construct from our dtype & string dtype
89
+ dtype = data.dtype
90
+
91
+ expected = pd.Series(data)
92
+ result = pd.Series(list(data), dtype=dtype)
93
+ tm.assert_series_equal(result, expected)
94
+
95
+ result = pd.Series(list(data), dtype=str(dtype))
96
+ tm.assert_series_equal(result, expected)
97
+
98
+ # gh-30280
99
+
100
+ expected = pd.DataFrame(data).astype(dtype)
101
+ result = pd.DataFrame(list(data), dtype=dtype)
102
+ tm.assert_frame_equal(result, expected)
103
+
104
+ result = pd.DataFrame(list(data), dtype=str(dtype))
105
+ tm.assert_frame_equal(result, expected)
106
+
107
+ def test_pandas_array(self, data):
108
+ # pd.array(extension_array) should be idempotent...
109
+ result = pd.array(data)
110
+ tm.assert_extension_array_equal(result, data)
111
+
112
+ def test_pandas_array_dtype(self, data):
113
+ # ... but specifying dtype will override idempotency
114
+ result = pd.array(data, dtype=np.dtype(object))
115
+ expected = pd.arrays.NumpyExtensionArray(np.asarray(data, dtype=object))
116
+ tm.assert_equal(result, expected)
117
+
118
+ def test_construct_empty_dataframe(self, dtype):
119
+ # GH 33623
120
+ result = pd.DataFrame(columns=["a"], dtype=dtype)
121
+ expected = pd.DataFrame(
122
+ {"a": pd.array([], dtype=dtype)}, index=pd.RangeIndex(0)
123
+ )
124
+ tm.assert_frame_equal(result, expected)
125
+
126
+ def test_empty(self, dtype):
127
+ cls = dtype.construct_array_type()
128
+ result = cls._empty((4,), dtype=dtype)
129
+ assert isinstance(result, cls)
130
+ assert result.dtype == dtype
131
+ assert result.shape == (4,)
132
+
133
+ # GH#19600 method on ExtensionDtype
134
+ result2 = dtype.empty((4,))
135
+ assert isinstance(result2, cls)
136
+ assert result2.dtype == dtype
137
+ assert result2.shape == (4,)
138
+
139
+ result2 = dtype.empty(4)
140
+ assert isinstance(result2, cls)
141
+ assert result2.dtype == dtype
142
+ assert result2.shape == (4,)
venv/lib/python3.10/site-packages/pandas/tests/extension/base/dim2.py ADDED
@@ -0,0 +1,345 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Tests for 2D compatibility.
3
+ """
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas._libs.missing import is_matching_na
8
+
9
+ from pandas.core.dtypes.common import (
10
+ is_bool_dtype,
11
+ is_integer_dtype,
12
+ )
13
+
14
+ import pandas as pd
15
+ import pandas._testing as tm
16
+ from pandas.core.arrays.integer import NUMPY_INT_TO_DTYPE
17
+
18
+
19
+ class Dim2CompatTests:
20
+ # Note: these are ONLY for ExtensionArray subclasses that support 2D arrays.
21
+ # i.e. not for pyarrow-backed EAs.
22
+
23
+ @pytest.fixture(autouse=True)
24
+ def skip_if_doesnt_support_2d(self, dtype, request):
25
+ if not dtype._supports_2d:
26
+ node = request.node
27
+ # In cases where we are mixed in to ExtensionTests, we only want to
28
+ # skip tests that are defined in Dim2CompatTests
29
+ test_func = node._obj
30
+ if test_func.__qualname__.startswith("Dim2CompatTests"):
31
+ # TODO: is there a less hacky way of checking this?
32
+ pytest.skip(f"{dtype} does not support 2D.")
33
+
34
+ def test_transpose(self, data):
35
+ arr2d = data.repeat(2).reshape(-1, 2)
36
+ shape = arr2d.shape
37
+ assert shape[0] != shape[-1] # otherwise the rest of the test is useless
38
+
39
+ assert arr2d.T.shape == shape[::-1]
40
+
41
+ def test_frame_from_2d_array(self, data):
42
+ arr2d = data.repeat(2).reshape(-1, 2)
43
+
44
+ df = pd.DataFrame(arr2d)
45
+ expected = pd.DataFrame({0: arr2d[:, 0], 1: arr2d[:, 1]})
46
+ tm.assert_frame_equal(df, expected)
47
+
48
+ def test_swapaxes(self, data):
49
+ arr2d = data.repeat(2).reshape(-1, 2)
50
+
51
+ result = arr2d.swapaxes(0, 1)
52
+ expected = arr2d.T
53
+ tm.assert_extension_array_equal(result, expected)
54
+
55
+ def test_delete_2d(self, data):
56
+ arr2d = data.repeat(3).reshape(-1, 3)
57
+
58
+ # axis = 0
59
+ result = arr2d.delete(1, axis=0)
60
+ expected = data.delete(1).repeat(3).reshape(-1, 3)
61
+ tm.assert_extension_array_equal(result, expected)
62
+
63
+ # axis = 1
64
+ result = arr2d.delete(1, axis=1)
65
+ expected = data.repeat(2).reshape(-1, 2)
66
+ tm.assert_extension_array_equal(result, expected)
67
+
68
+ def test_take_2d(self, data):
69
+ arr2d = data.reshape(-1, 1)
70
+
71
+ result = arr2d.take([0, 0, -1], axis=0)
72
+
73
+ expected = data.take([0, 0, -1]).reshape(-1, 1)
74
+ tm.assert_extension_array_equal(result, expected)
75
+
76
+ def test_repr_2d(self, data):
77
+ # this could fail in a corner case where an element contained the name
78
+ res = repr(data.reshape(1, -1))
79
+ assert res.count(f"<{type(data).__name__}") == 1
80
+
81
+ res = repr(data.reshape(-1, 1))
82
+ assert res.count(f"<{type(data).__name__}") == 1
83
+
84
+ def test_reshape(self, data):
85
+ arr2d = data.reshape(-1, 1)
86
+ assert arr2d.shape == (data.size, 1)
87
+ assert len(arr2d) == len(data)
88
+
89
+ arr2d = data.reshape((-1, 1))
90
+ assert arr2d.shape == (data.size, 1)
91
+ assert len(arr2d) == len(data)
92
+
93
+ with pytest.raises(ValueError):
94
+ data.reshape((data.size, 2))
95
+ with pytest.raises(ValueError):
96
+ data.reshape(data.size, 2)
97
+
98
+ def test_getitem_2d(self, data):
99
+ arr2d = data.reshape(1, -1)
100
+
101
+ result = arr2d[0]
102
+ tm.assert_extension_array_equal(result, data)
103
+
104
+ with pytest.raises(IndexError):
105
+ arr2d[1]
106
+
107
+ with pytest.raises(IndexError):
108
+ arr2d[-2]
109
+
110
+ result = arr2d[:]
111
+ tm.assert_extension_array_equal(result, arr2d)
112
+
113
+ result = arr2d[:, :]
114
+ tm.assert_extension_array_equal(result, arr2d)
115
+
116
+ result = arr2d[:, 0]
117
+ expected = data[[0]]
118
+ tm.assert_extension_array_equal(result, expected)
119
+
120
+ # dimension-expanding getitem on 1D
121
+ result = data[:, np.newaxis]
122
+ tm.assert_extension_array_equal(result, arr2d.T)
123
+
124
+ def test_iter_2d(self, data):
125
+ arr2d = data.reshape(1, -1)
126
+
127
+ objs = list(iter(arr2d))
128
+ assert len(objs) == arr2d.shape[0]
129
+
130
+ for obj in objs:
131
+ assert isinstance(obj, type(data))
132
+ assert obj.dtype == data.dtype
133
+ assert obj.ndim == 1
134
+ assert len(obj) == arr2d.shape[1]
135
+
136
+ def test_tolist_2d(self, data):
137
+ arr2d = data.reshape(1, -1)
138
+
139
+ result = arr2d.tolist()
140
+ expected = [data.tolist()]
141
+
142
+ assert isinstance(result, list)
143
+ assert all(isinstance(x, list) for x in result)
144
+
145
+ assert result == expected
146
+
147
+ def test_concat_2d(self, data):
148
+ left = type(data)._concat_same_type([data, data]).reshape(-1, 2)
149
+ right = left.copy()
150
+
151
+ # axis=0
152
+ result = left._concat_same_type([left, right], axis=0)
153
+ expected = data._concat_same_type([data] * 4).reshape(-1, 2)
154
+ tm.assert_extension_array_equal(result, expected)
155
+
156
+ # axis=1
157
+ result = left._concat_same_type([left, right], axis=1)
158
+ assert result.shape == (len(data), 4)
159
+ tm.assert_extension_array_equal(result[:, :2], left)
160
+ tm.assert_extension_array_equal(result[:, 2:], right)
161
+
162
+ # axis > 1 -> invalid
163
+ msg = "axis 2 is out of bounds for array of dimension 2"
164
+ with pytest.raises(ValueError, match=msg):
165
+ left._concat_same_type([left, right], axis=2)
166
+
167
+ @pytest.mark.parametrize("method", ["backfill", "pad"])
168
+ def test_fillna_2d_method(self, data_missing, method):
169
+ # pad_or_backfill is always along axis=0
170
+ arr = data_missing.repeat(2).reshape(2, 2)
171
+ assert arr[0].isna().all()
172
+ assert not arr[1].isna().any()
173
+
174
+ result = arr._pad_or_backfill(method=method, limit=None)
175
+
176
+ expected = data_missing._pad_or_backfill(method=method).repeat(2).reshape(2, 2)
177
+ tm.assert_extension_array_equal(result, expected)
178
+
179
+ # Reverse so that backfill is not a no-op.
180
+ arr2 = arr[::-1]
181
+ assert not arr2[0].isna().any()
182
+ assert arr2[1].isna().all()
183
+
184
+ result2 = arr2._pad_or_backfill(method=method, limit=None)
185
+
186
+ expected2 = (
187
+ data_missing[::-1]._pad_or_backfill(method=method).repeat(2).reshape(2, 2)
188
+ )
189
+ tm.assert_extension_array_equal(result2, expected2)
190
+
191
+ @pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
192
+ def test_reductions_2d_axis_none(self, data, method):
193
+ arr2d = data.reshape(1, -1)
194
+
195
+ err_expected = None
196
+ err_result = None
197
+ try:
198
+ expected = getattr(data, method)()
199
+ except Exception as err:
200
+ # if the 1D reduction is invalid, the 2D reduction should be as well
201
+ err_expected = err
202
+ try:
203
+ result = getattr(arr2d, method)(axis=None)
204
+ except Exception as err2:
205
+ err_result = err2
206
+
207
+ else:
208
+ result = getattr(arr2d, method)(axis=None)
209
+
210
+ if err_result is not None or err_expected is not None:
211
+ assert type(err_result) == type(err_expected)
212
+ return
213
+
214
+ assert is_matching_na(result, expected) or result == expected
215
+
216
+ @pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
217
+ @pytest.mark.parametrize("min_count", [0, 1])
218
+ def test_reductions_2d_axis0(self, data, method, min_count):
219
+ if min_count == 1 and method not in ["sum", "prod"]:
220
+ pytest.skip(f"min_count not relevant for {method}")
221
+
222
+ arr2d = data.reshape(1, -1)
223
+
224
+ kwargs = {}
225
+ if method in ["std", "var"]:
226
+ # pass ddof=0 so we get all-zero std instead of all-NA std
227
+ kwargs["ddof"] = 0
228
+ elif method in ["prod", "sum"]:
229
+ kwargs["min_count"] = min_count
230
+
231
+ try:
232
+ result = getattr(arr2d, method)(axis=0, **kwargs)
233
+ except Exception as err:
234
+ try:
235
+ getattr(data, method)()
236
+ except Exception as err2:
237
+ assert type(err) == type(err2)
238
+ return
239
+ else:
240
+ raise AssertionError("Both reductions should raise or neither")
241
+
242
+ def get_reduction_result_dtype(dtype):
243
+ # windows and 32bit builds will in some cases have int32/uint32
244
+ # where other builds will have int64/uint64.
245
+ if dtype.itemsize == 8:
246
+ return dtype
247
+ elif dtype.kind in "ib":
248
+ return NUMPY_INT_TO_DTYPE[np.dtype(int)]
249
+ else:
250
+ # i.e. dtype.kind == "u"
251
+ return NUMPY_INT_TO_DTYPE[np.dtype("uint")]
252
+
253
+ if method in ["sum", "prod"]:
254
+ # std and var are not dtype-preserving
255
+ expected = data
256
+ if data.dtype.kind in "iub":
257
+ dtype = get_reduction_result_dtype(data.dtype)
258
+ expected = data.astype(dtype)
259
+ assert dtype == expected.dtype
260
+
261
+ if min_count == 0:
262
+ fill_value = 1 if method == "prod" else 0
263
+ expected = expected.fillna(fill_value)
264
+
265
+ tm.assert_extension_array_equal(result, expected)
266
+ elif method == "median":
267
+ # std and var are not dtype-preserving
268
+ expected = data
269
+ tm.assert_extension_array_equal(result, expected)
270
+ elif method in ["mean", "std", "var"]:
271
+ if is_integer_dtype(data) or is_bool_dtype(data):
272
+ data = data.astype("Float64")
273
+ if method == "mean":
274
+ tm.assert_extension_array_equal(result, data)
275
+ else:
276
+ tm.assert_extension_array_equal(result, data - data)
277
+
278
+ @pytest.mark.parametrize("method", ["mean", "median", "var", "std", "sum", "prod"])
279
+ def test_reductions_2d_axis1(self, data, method):
280
+ arr2d = data.reshape(1, -1)
281
+
282
+ try:
283
+ result = getattr(arr2d, method)(axis=1)
284
+ except Exception as err:
285
+ try:
286
+ getattr(data, method)()
287
+ except Exception as err2:
288
+ assert type(err) == type(err2)
289
+ return
290
+ else:
291
+ raise AssertionError("Both reductions should raise or neither")
292
+
293
+ # not necessarily type/dtype-preserving, so weaker assertions
294
+ assert result.shape == (1,)
295
+ expected_scalar = getattr(data, method)()
296
+ res = result[0]
297
+ assert is_matching_na(res, expected_scalar) or res == expected_scalar
298
+
299
+
300
+ class NDArrayBacked2DTests(Dim2CompatTests):
301
+ # More specific tests for NDArrayBackedExtensionArray subclasses
302
+
303
+ def test_copy_order(self, data):
304
+ # We should be matching numpy semantics for the "order" keyword in 'copy'
305
+ arr2d = data.repeat(2).reshape(-1, 2)
306
+ assert arr2d._ndarray.flags["C_CONTIGUOUS"]
307
+
308
+ res = arr2d.copy()
309
+ assert res._ndarray.flags["C_CONTIGUOUS"]
310
+
311
+ res = arr2d[::2, ::2].copy()
312
+ assert res._ndarray.flags["C_CONTIGUOUS"]
313
+
314
+ res = arr2d.copy("F")
315
+ assert not res._ndarray.flags["C_CONTIGUOUS"]
316
+ assert res._ndarray.flags["F_CONTIGUOUS"]
317
+
318
+ res = arr2d.copy("K")
319
+ assert res._ndarray.flags["C_CONTIGUOUS"]
320
+
321
+ res = arr2d.T.copy("K")
322
+ assert not res._ndarray.flags["C_CONTIGUOUS"]
323
+ assert res._ndarray.flags["F_CONTIGUOUS"]
324
+
325
+ # order not accepted by numpy
326
+ msg = r"order must be one of 'C', 'F', 'A', or 'K' \(got 'Q'\)"
327
+ with pytest.raises(ValueError, match=msg):
328
+ arr2d.copy("Q")
329
+
330
+ # neither contiguity
331
+ arr_nc = arr2d[::2]
332
+ assert not arr_nc._ndarray.flags["C_CONTIGUOUS"]
333
+ assert not arr_nc._ndarray.flags["F_CONTIGUOUS"]
334
+
335
+ assert arr_nc.copy()._ndarray.flags["C_CONTIGUOUS"]
336
+ assert not arr_nc.copy()._ndarray.flags["F_CONTIGUOUS"]
337
+
338
+ assert arr_nc.copy("C")._ndarray.flags["C_CONTIGUOUS"]
339
+ assert not arr_nc.copy("C")._ndarray.flags["F_CONTIGUOUS"]
340
+
341
+ assert not arr_nc.copy("F")._ndarray.flags["C_CONTIGUOUS"]
342
+ assert arr_nc.copy("F")._ndarray.flags["F_CONTIGUOUS"]
343
+
344
+ assert arr_nc.copy("K")._ndarray.flags["C_CONTIGUOUS"]
345
+ assert not arr_nc.copy("K")._ndarray.flags["F_CONTIGUOUS"]
venv/lib/python3.10/site-packages/pandas/tests/extension/base/dtype.py ADDED
@@ -0,0 +1,123 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+ from pandas.api.types import (
7
+ infer_dtype,
8
+ is_object_dtype,
9
+ is_string_dtype,
10
+ )
11
+
12
+
13
+ class BaseDtypeTests:
14
+ """Base class for ExtensionDtype classes"""
15
+
16
+ def test_name(self, dtype):
17
+ assert isinstance(dtype.name, str)
18
+
19
+ def test_kind(self, dtype):
20
+ valid = set("biufcmMOSUV")
21
+ assert dtype.kind in valid
22
+
23
+ def test_is_dtype_from_name(self, dtype):
24
+ result = type(dtype).is_dtype(dtype.name)
25
+ assert result is True
26
+
27
+ def test_is_dtype_unboxes_dtype(self, data, dtype):
28
+ assert dtype.is_dtype(data) is True
29
+
30
+ def test_is_dtype_from_self(self, dtype):
31
+ result = type(dtype).is_dtype(dtype)
32
+ assert result is True
33
+
34
+ def test_is_dtype_other_input(self, dtype):
35
+ assert dtype.is_dtype([1, 2, 3]) is False
36
+
37
+ def test_is_not_string_type(self, dtype):
38
+ assert not is_string_dtype(dtype)
39
+
40
+ def test_is_not_object_type(self, dtype):
41
+ assert not is_object_dtype(dtype)
42
+
43
+ def test_eq_with_str(self, dtype):
44
+ assert dtype == dtype.name
45
+ assert dtype != dtype.name + "-suffix"
46
+
47
+ def test_eq_with_numpy_object(self, dtype):
48
+ assert dtype != np.dtype("object")
49
+
50
+ def test_eq_with_self(self, dtype):
51
+ assert dtype == dtype
52
+ assert dtype != object()
53
+
54
+ def test_array_type(self, data, dtype):
55
+ assert dtype.construct_array_type() is type(data)
56
+
57
+ def test_check_dtype(self, data):
58
+ dtype = data.dtype
59
+
60
+ # check equivalency for using .dtypes
61
+ df = pd.DataFrame(
62
+ {
63
+ "A": pd.Series(data, dtype=dtype),
64
+ "B": data,
65
+ "C": pd.Series(["foo"] * len(data), dtype=object),
66
+ "D": 1,
67
+ }
68
+ )
69
+ result = df.dtypes == str(dtype)
70
+ assert np.dtype("int64") != "Int64"
71
+
72
+ expected = pd.Series([True, True, False, False], index=list("ABCD"))
73
+
74
+ tm.assert_series_equal(result, expected)
75
+
76
+ expected = pd.Series([True, True, False, False], index=list("ABCD"))
77
+ result = df.dtypes.apply(str) == str(dtype)
78
+ tm.assert_series_equal(result, expected)
79
+
80
+ def test_hashable(self, dtype):
81
+ hash(dtype) # no error
82
+
83
+ def test_str(self, dtype):
84
+ assert str(dtype) == dtype.name
85
+
86
+ def test_eq(self, dtype):
87
+ assert dtype == dtype.name
88
+ assert dtype != "anonther_type"
89
+
90
+ def test_construct_from_string_own_name(self, dtype):
91
+ result = dtype.construct_from_string(dtype.name)
92
+ assert type(result) is type(dtype)
93
+
94
+ # check OK as classmethod
95
+ result = type(dtype).construct_from_string(dtype.name)
96
+ assert type(result) is type(dtype)
97
+
98
+ def test_construct_from_string_another_type_raises(self, dtype):
99
+ msg = f"Cannot construct a '{type(dtype).__name__}' from 'another_type'"
100
+ with pytest.raises(TypeError, match=msg):
101
+ type(dtype).construct_from_string("another_type")
102
+
103
+ def test_construct_from_string_wrong_type_raises(self, dtype):
104
+ with pytest.raises(
105
+ TypeError,
106
+ match="'construct_from_string' expects a string, got <class 'int'>",
107
+ ):
108
+ type(dtype).construct_from_string(0)
109
+
110
+ def test_get_common_dtype(self, dtype):
111
+ # in practice we will not typically call this with a 1-length list
112
+ # (we shortcut to just use that dtype as the common dtype), but
113
+ # still testing as good practice to have this working (and it is the
114
+ # only case we can test in general)
115
+ assert dtype._get_common_dtype([dtype]) == dtype
116
+
117
+ @pytest.mark.parametrize("skipna", [True, False])
118
+ def test_infer_dtype(self, data, data_missing, skipna):
119
+ # only testing that this works without raising an error
120
+ res = infer_dtype(data, skipna=skipna)
121
+ assert isinstance(res, str)
122
+ res = infer_dtype(data_missing, skipna=skipna)
123
+ assert isinstance(res, str)
venv/lib/python3.10/site-packages/pandas/tests/extension/base/getitem.py ADDED
@@ -0,0 +1,469 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ class BaseGetitemTests:
9
+ """Tests for ExtensionArray.__getitem__."""
10
+
11
+ def test_iloc_series(self, data):
12
+ ser = pd.Series(data)
13
+ result = ser.iloc[:4]
14
+ expected = pd.Series(data[:4])
15
+ tm.assert_series_equal(result, expected)
16
+
17
+ result = ser.iloc[[0, 1, 2, 3]]
18
+ tm.assert_series_equal(result, expected)
19
+
20
+ def test_iloc_frame(self, data):
21
+ df = pd.DataFrame({"A": data, "B": np.arange(len(data), dtype="int64")})
22
+ expected = pd.DataFrame({"A": data[:4]})
23
+
24
+ # slice -> frame
25
+ result = df.iloc[:4, [0]]
26
+ tm.assert_frame_equal(result, expected)
27
+
28
+ # sequence -> frame
29
+ result = df.iloc[[0, 1, 2, 3], [0]]
30
+ tm.assert_frame_equal(result, expected)
31
+
32
+ expected = pd.Series(data[:4], name="A")
33
+
34
+ # slice -> series
35
+ result = df.iloc[:4, 0]
36
+ tm.assert_series_equal(result, expected)
37
+
38
+ # sequence -> series
39
+ result = df.iloc[:4, 0]
40
+ tm.assert_series_equal(result, expected)
41
+
42
+ # GH#32959 slice columns with step
43
+ result = df.iloc[:, ::2]
44
+ tm.assert_frame_equal(result, df[["A"]])
45
+ result = df[["B", "A"]].iloc[:, ::2]
46
+ tm.assert_frame_equal(result, df[["B"]])
47
+
48
+ def test_iloc_frame_single_block(self, data):
49
+ # GH#32959 null slice along index, slice along columns with single-block
50
+ df = pd.DataFrame({"A": data})
51
+
52
+ result = df.iloc[:, :]
53
+ tm.assert_frame_equal(result, df)
54
+
55
+ result = df.iloc[:, :1]
56
+ tm.assert_frame_equal(result, df)
57
+
58
+ result = df.iloc[:, :2]
59
+ tm.assert_frame_equal(result, df)
60
+
61
+ result = df.iloc[:, ::2]
62
+ tm.assert_frame_equal(result, df)
63
+
64
+ result = df.iloc[:, 1:2]
65
+ tm.assert_frame_equal(result, df.iloc[:, :0])
66
+
67
+ result = df.iloc[:, -1:]
68
+ tm.assert_frame_equal(result, df)
69
+
70
+ def test_loc_series(self, data):
71
+ ser = pd.Series(data)
72
+ result = ser.loc[:3]
73
+ expected = pd.Series(data[:4])
74
+ tm.assert_series_equal(result, expected)
75
+
76
+ result = ser.loc[[0, 1, 2, 3]]
77
+ tm.assert_series_equal(result, expected)
78
+
79
+ def test_loc_frame(self, data):
80
+ df = pd.DataFrame({"A": data, "B": np.arange(len(data), dtype="int64")})
81
+ expected = pd.DataFrame({"A": data[:4]})
82
+
83
+ # slice -> frame
84
+ result = df.loc[:3, ["A"]]
85
+ tm.assert_frame_equal(result, expected)
86
+
87
+ # sequence -> frame
88
+ result = df.loc[[0, 1, 2, 3], ["A"]]
89
+ tm.assert_frame_equal(result, expected)
90
+
91
+ expected = pd.Series(data[:4], name="A")
92
+
93
+ # slice -> series
94
+ result = df.loc[:3, "A"]
95
+ tm.assert_series_equal(result, expected)
96
+
97
+ # sequence -> series
98
+ result = df.loc[:3, "A"]
99
+ tm.assert_series_equal(result, expected)
100
+
101
+ def test_loc_iloc_frame_single_dtype(self, data):
102
+ # GH#27110 bug in ExtensionBlock.iget caused df.iloc[n] to incorrectly
103
+ # return a scalar
104
+ df = pd.DataFrame({"A": data})
105
+ expected = pd.Series([data[2]], index=["A"], name=2, dtype=data.dtype)
106
+
107
+ result = df.loc[2]
108
+ tm.assert_series_equal(result, expected)
109
+
110
+ expected = pd.Series(
111
+ [data[-1]], index=["A"], name=len(data) - 1, dtype=data.dtype
112
+ )
113
+ result = df.iloc[-1]
114
+ tm.assert_series_equal(result, expected)
115
+
116
+ def test_getitem_scalar(self, data):
117
+ result = data[0]
118
+ assert isinstance(result, data.dtype.type)
119
+
120
+ result = pd.Series(data)[0]
121
+ assert isinstance(result, data.dtype.type)
122
+
123
+ def test_getitem_invalid(self, data):
124
+ # TODO: box over scalar, [scalar], (scalar,)?
125
+
126
+ msg = (
127
+ r"only integers, slices \(`:`\), ellipsis \(`...`\), numpy.newaxis "
128
+ r"\(`None`\) and integer or boolean arrays are valid indices"
129
+ )
130
+ with pytest.raises(IndexError, match=msg):
131
+ data["foo"]
132
+ with pytest.raises(IndexError, match=msg):
133
+ data[2.5]
134
+
135
+ ub = len(data)
136
+ msg = "|".join(
137
+ [
138
+ "list index out of range", # json
139
+ "index out of bounds", # pyarrow
140
+ "Out of bounds access", # Sparse
141
+ f"loc must be an integer between -{ub} and {ub}", # Sparse
142
+ f"index {ub+1} is out of bounds for axis 0 with size {ub}",
143
+ f"index -{ub+1} is out of bounds for axis 0 with size {ub}",
144
+ ]
145
+ )
146
+ with pytest.raises(IndexError, match=msg):
147
+ data[ub + 1]
148
+ with pytest.raises(IndexError, match=msg):
149
+ data[-ub - 1]
150
+
151
+ def test_getitem_scalar_na(self, data_missing, na_cmp, na_value):
152
+ result = data_missing[0]
153
+ assert na_cmp(result, na_value)
154
+
155
+ def test_getitem_empty(self, data):
156
+ # Indexing with empty list
157
+ result = data[[]]
158
+ assert len(result) == 0
159
+ assert isinstance(result, type(data))
160
+
161
+ expected = data[np.array([], dtype="int64")]
162
+ tm.assert_extension_array_equal(result, expected)
163
+
164
+ def test_getitem_mask(self, data):
165
+ # Empty mask, raw array
166
+ mask = np.zeros(len(data), dtype=bool)
167
+ result = data[mask]
168
+ assert len(result) == 0
169
+ assert isinstance(result, type(data))
170
+
171
+ # Empty mask, in series
172
+ mask = np.zeros(len(data), dtype=bool)
173
+ result = pd.Series(data)[mask]
174
+ assert len(result) == 0
175
+ assert result.dtype == data.dtype
176
+
177
+ # non-empty mask, raw array
178
+ mask[0] = True
179
+ result = data[mask]
180
+ assert len(result) == 1
181
+ assert isinstance(result, type(data))
182
+
183
+ # non-empty mask, in series
184
+ result = pd.Series(data)[mask]
185
+ assert len(result) == 1
186
+ assert result.dtype == data.dtype
187
+
188
+ def test_getitem_mask_raises(self, data):
189
+ mask = np.array([True, False])
190
+ msg = f"Boolean index has wrong length: 2 instead of {len(data)}"
191
+ with pytest.raises(IndexError, match=msg):
192
+ data[mask]
193
+
194
+ mask = pd.array(mask, dtype="boolean")
195
+ with pytest.raises(IndexError, match=msg):
196
+ data[mask]
197
+
198
+ def test_getitem_boolean_array_mask(self, data):
199
+ mask = pd.array(np.zeros(data.shape, dtype="bool"), dtype="boolean")
200
+ result = data[mask]
201
+ assert len(result) == 0
202
+ assert isinstance(result, type(data))
203
+
204
+ result = pd.Series(data)[mask]
205
+ assert len(result) == 0
206
+ assert result.dtype == data.dtype
207
+
208
+ mask[:5] = True
209
+ expected = data.take([0, 1, 2, 3, 4])
210
+ result = data[mask]
211
+ tm.assert_extension_array_equal(result, expected)
212
+
213
+ expected = pd.Series(expected)
214
+ result = pd.Series(data)[mask]
215
+ tm.assert_series_equal(result, expected)
216
+
217
+ def test_getitem_boolean_na_treated_as_false(self, data):
218
+ # https://github.com/pandas-dev/pandas/issues/31503
219
+ mask = pd.array(np.zeros(data.shape, dtype="bool"), dtype="boolean")
220
+ mask[:2] = pd.NA
221
+ mask[2:4] = True
222
+
223
+ result = data[mask]
224
+ expected = data[mask.fillna(False)]
225
+
226
+ tm.assert_extension_array_equal(result, expected)
227
+
228
+ s = pd.Series(data)
229
+
230
+ result = s[mask]
231
+ expected = s[mask.fillna(False)]
232
+
233
+ tm.assert_series_equal(result, expected)
234
+
235
+ @pytest.mark.parametrize(
236
+ "idx",
237
+ [[0, 1, 2], pd.array([0, 1, 2], dtype="Int64"), np.array([0, 1, 2])],
238
+ ids=["list", "integer-array", "numpy-array"],
239
+ )
240
+ def test_getitem_integer_array(self, data, idx):
241
+ result = data[idx]
242
+ assert len(result) == 3
243
+ assert isinstance(result, type(data))
244
+ expected = data.take([0, 1, 2])
245
+ tm.assert_extension_array_equal(result, expected)
246
+
247
+ expected = pd.Series(expected)
248
+ result = pd.Series(data)[idx]
249
+ tm.assert_series_equal(result, expected)
250
+
251
+ @pytest.mark.parametrize(
252
+ "idx",
253
+ [[0, 1, 2, pd.NA], pd.array([0, 1, 2, pd.NA], dtype="Int64")],
254
+ ids=["list", "integer-array"],
255
+ )
256
+ def test_getitem_integer_with_missing_raises(self, data, idx):
257
+ msg = "Cannot index with an integer indexer containing NA values"
258
+ with pytest.raises(ValueError, match=msg):
259
+ data[idx]
260
+
261
+ @pytest.mark.xfail(
262
+ reason="Tries label-based and raises KeyError; "
263
+ "in some cases raises when calling np.asarray"
264
+ )
265
+ @pytest.mark.parametrize(
266
+ "idx",
267
+ [[0, 1, 2, pd.NA], pd.array([0, 1, 2, pd.NA], dtype="Int64")],
268
+ ids=["list", "integer-array"],
269
+ )
270
+ def test_getitem_series_integer_with_missing_raises(self, data, idx):
271
+ msg = "Cannot index with an integer indexer containing NA values"
272
+ # TODO: this raises KeyError about labels not found (it tries label-based)
273
+
274
+ ser = pd.Series(data, index=[chr(100 + i) for i in range(len(data))])
275
+ with pytest.raises(ValueError, match=msg):
276
+ ser[idx]
277
+
278
+ def test_getitem_slice(self, data):
279
+ # getitem[slice] should return an array
280
+ result = data[slice(0)] # empty
281
+ assert isinstance(result, type(data))
282
+
283
+ result = data[slice(1)] # scalar
284
+ assert isinstance(result, type(data))
285
+
286
+ def test_getitem_ellipsis_and_slice(self, data):
287
+ # GH#40353 this is called from slice_block_rows
288
+ result = data[..., :]
289
+ tm.assert_extension_array_equal(result, data)
290
+
291
+ result = data[:, ...]
292
+ tm.assert_extension_array_equal(result, data)
293
+
294
+ result = data[..., :3]
295
+ tm.assert_extension_array_equal(result, data[:3])
296
+
297
+ result = data[:3, ...]
298
+ tm.assert_extension_array_equal(result, data[:3])
299
+
300
+ result = data[..., ::2]
301
+ tm.assert_extension_array_equal(result, data[::2])
302
+
303
+ result = data[::2, ...]
304
+ tm.assert_extension_array_equal(result, data[::2])
305
+
306
+ def test_get(self, data):
307
+ # GH 20882
308
+ s = pd.Series(data, index=[2 * i for i in range(len(data))])
309
+ assert s.get(4) == s.iloc[2]
310
+
311
+ result = s.get([4, 6])
312
+ expected = s.iloc[[2, 3]]
313
+ tm.assert_series_equal(result, expected)
314
+
315
+ result = s.get(slice(2))
316
+ expected = s.iloc[[0, 1]]
317
+ tm.assert_series_equal(result, expected)
318
+
319
+ assert s.get(-1) is None
320
+ assert s.get(s.index.max() + 1) is None
321
+
322
+ s = pd.Series(data[:6], index=list("abcdef"))
323
+ assert s.get("c") == s.iloc[2]
324
+
325
+ result = s.get(slice("b", "d"))
326
+ expected = s.iloc[[1, 2, 3]]
327
+ tm.assert_series_equal(result, expected)
328
+
329
+ result = s.get("Z")
330
+ assert result is None
331
+
332
+ msg = "Series.__getitem__ treating keys as positions is deprecated"
333
+ with tm.assert_produces_warning(FutureWarning, match=msg):
334
+ assert s.get(4) == s.iloc[4]
335
+ assert s.get(-1) == s.iloc[-1]
336
+ assert s.get(len(s)) is None
337
+
338
+ # GH 21257
339
+ s = pd.Series(data)
340
+ with tm.assert_produces_warning(None):
341
+ # GH#45324 make sure we aren't giving a spurious FutureWarning
342
+ s2 = s[::2]
343
+ assert s2.get(1) is None
344
+
345
+ def test_take_sequence(self, data):
346
+ result = pd.Series(data)[[0, 1, 3]]
347
+ assert result.iloc[0] == data[0]
348
+ assert result.iloc[1] == data[1]
349
+ assert result.iloc[2] == data[3]
350
+
351
+ def test_take(self, data, na_value, na_cmp):
352
+ result = data.take([0, -1])
353
+ assert result.dtype == data.dtype
354
+ assert result[0] == data[0]
355
+ assert result[1] == data[-1]
356
+
357
+ result = data.take([0, -1], allow_fill=True, fill_value=na_value)
358
+ assert result[0] == data[0]
359
+ assert na_cmp(result[1], na_value)
360
+
361
+ with pytest.raises(IndexError, match="out of bounds"):
362
+ data.take([len(data) + 1])
363
+
364
+ def test_take_empty(self, data, na_value, na_cmp):
365
+ empty = data[:0]
366
+
367
+ result = empty.take([-1], allow_fill=True)
368
+ assert na_cmp(result[0], na_value)
369
+
370
+ msg = "cannot do a non-empty take from an empty axes|out of bounds"
371
+
372
+ with pytest.raises(IndexError, match=msg):
373
+ empty.take([-1])
374
+
375
+ with pytest.raises(IndexError, match="cannot do a non-empty take"):
376
+ empty.take([0, 1])
377
+
378
+ def test_take_negative(self, data):
379
+ # https://github.com/pandas-dev/pandas/issues/20640
380
+ n = len(data)
381
+ result = data.take([0, -n, n - 1, -1])
382
+ expected = data.take([0, 0, n - 1, n - 1])
383
+ tm.assert_extension_array_equal(result, expected)
384
+
385
+ def test_take_non_na_fill_value(self, data_missing):
386
+ fill_value = data_missing[1] # valid
387
+ na = data_missing[0]
388
+
389
+ arr = data_missing._from_sequence(
390
+ [na, fill_value, na], dtype=data_missing.dtype
391
+ )
392
+ result = arr.take([-1, 1], fill_value=fill_value, allow_fill=True)
393
+ expected = arr.take([1, 1])
394
+ tm.assert_extension_array_equal(result, expected)
395
+
396
+ def test_take_pandas_style_negative_raises(self, data, na_value):
397
+ with pytest.raises(ValueError, match=""):
398
+ data.take([0, -2], fill_value=na_value, allow_fill=True)
399
+
400
+ @pytest.mark.parametrize("allow_fill", [True, False])
401
+ def test_take_out_of_bounds_raises(self, data, allow_fill):
402
+ arr = data[:3]
403
+
404
+ with pytest.raises(IndexError, match="out of bounds|out-of-bounds"):
405
+ arr.take(np.asarray([0, 3]), allow_fill=allow_fill)
406
+
407
+ def test_take_series(self, data):
408
+ s = pd.Series(data)
409
+ result = s.take([0, -1])
410
+ expected = pd.Series(
411
+ data._from_sequence([data[0], data[len(data) - 1]], dtype=s.dtype),
412
+ index=[0, len(data) - 1],
413
+ )
414
+ tm.assert_series_equal(result, expected)
415
+
416
+ def test_reindex(self, data, na_value):
417
+ s = pd.Series(data)
418
+ result = s.reindex([0, 1, 3])
419
+ expected = pd.Series(data.take([0, 1, 3]), index=[0, 1, 3])
420
+ tm.assert_series_equal(result, expected)
421
+
422
+ n = len(data)
423
+ result = s.reindex([-1, 0, n])
424
+ expected = pd.Series(
425
+ data._from_sequence([na_value, data[0], na_value], dtype=s.dtype),
426
+ index=[-1, 0, n],
427
+ )
428
+ tm.assert_series_equal(result, expected)
429
+
430
+ result = s.reindex([n, n + 1])
431
+ expected = pd.Series(
432
+ data._from_sequence([na_value, na_value], dtype=s.dtype), index=[n, n + 1]
433
+ )
434
+ tm.assert_series_equal(result, expected)
435
+
436
+ def test_reindex_non_na_fill_value(self, data_missing):
437
+ valid = data_missing[1]
438
+ na = data_missing[0]
439
+
440
+ arr = data_missing._from_sequence([na, valid], dtype=data_missing.dtype)
441
+ ser = pd.Series(arr)
442
+ result = ser.reindex([0, 1, 2], fill_value=valid)
443
+ expected = pd.Series(
444
+ data_missing._from_sequence([na, valid, valid], dtype=data_missing.dtype)
445
+ )
446
+
447
+ tm.assert_series_equal(result, expected)
448
+
449
+ def test_loc_len1(self, data):
450
+ # see GH-27785 take_nd with indexer of len 1 resulting in wrong ndim
451
+ df = pd.DataFrame({"A": data})
452
+ res = df.loc[[0], "A"]
453
+ assert res.ndim == 1
454
+ assert res._mgr.arrays[0].ndim == 1
455
+ if hasattr(res._mgr, "blocks"):
456
+ assert res._mgr._block.ndim == 1
457
+
458
+ def test_item(self, data):
459
+ # https://github.com/pandas-dev/pandas/pull/30175
460
+ s = pd.Series(data)
461
+ result = s[:1].item()
462
+ assert result == data[0]
463
+
464
+ msg = "can only convert an array of size 1 to a Python scalar"
465
+ with pytest.raises(ValueError, match=msg):
466
+ s[:0].item()
467
+
468
+ with pytest.raises(ValueError, match=msg):
469
+ s.item()
venv/lib/python3.10/site-packages/pandas/tests/extension/base/interface.py ADDED
@@ -0,0 +1,137 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.core.dtypes.cast import construct_1d_object_array_from_listlike
5
+ from pandas.core.dtypes.common import is_extension_array_dtype
6
+ from pandas.core.dtypes.dtypes import ExtensionDtype
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+
12
+ class BaseInterfaceTests:
13
+ """Tests that the basic interface is satisfied."""
14
+
15
+ # ------------------------------------------------------------------------
16
+ # Interface
17
+ # ------------------------------------------------------------------------
18
+
19
+ def test_len(self, data):
20
+ assert len(data) == 100
21
+
22
+ def test_size(self, data):
23
+ assert data.size == 100
24
+
25
+ def test_ndim(self, data):
26
+ assert data.ndim == 1
27
+
28
+ def test_can_hold_na_valid(self, data):
29
+ # GH-20761
30
+ assert data._can_hold_na is True
31
+
32
+ def test_contains(self, data, data_missing):
33
+ # GH-37867
34
+ # Tests for membership checks. Membership checks for nan-likes is tricky and
35
+ # the settled on rule is: `nan_like in arr` is True if nan_like is
36
+ # arr.dtype.na_value and arr.isna().any() is True. Else the check returns False.
37
+
38
+ na_value = data.dtype.na_value
39
+ # ensure data without missing values
40
+ data = data[~data.isna()]
41
+
42
+ # first elements are non-missing
43
+ assert data[0] in data
44
+ assert data_missing[0] in data_missing
45
+
46
+ # check the presence of na_value
47
+ assert na_value in data_missing
48
+ assert na_value not in data
49
+
50
+ # the data can never contain other nan-likes than na_value
51
+ for na_value_obj in tm.NULL_OBJECTS:
52
+ if na_value_obj is na_value or type(na_value_obj) == type(na_value):
53
+ # type check for e.g. two instances of Decimal("NAN")
54
+ continue
55
+ assert na_value_obj not in data
56
+ assert na_value_obj not in data_missing
57
+
58
+ def test_memory_usage(self, data):
59
+ s = pd.Series(data)
60
+ result = s.memory_usage(index=False)
61
+ assert result == s.nbytes
62
+
63
+ def test_array_interface(self, data):
64
+ result = np.array(data)
65
+ assert result[0] == data[0]
66
+
67
+ result = np.array(data, dtype=object)
68
+ expected = np.array(list(data), dtype=object)
69
+ if expected.ndim > 1:
70
+ # nested data, explicitly construct as 1D
71
+ expected = construct_1d_object_array_from_listlike(list(data))
72
+ tm.assert_numpy_array_equal(result, expected)
73
+
74
+ def test_is_extension_array_dtype(self, data):
75
+ assert is_extension_array_dtype(data)
76
+ assert is_extension_array_dtype(data.dtype)
77
+ assert is_extension_array_dtype(pd.Series(data))
78
+ assert isinstance(data.dtype, ExtensionDtype)
79
+
80
+ def test_no_values_attribute(self, data):
81
+ # GH-20735: EA's with .values attribute give problems with internal
82
+ # code, disallowing this for now until solved
83
+ assert not hasattr(data, "values")
84
+ assert not hasattr(data, "_values")
85
+
86
+ def test_is_numeric_honored(self, data):
87
+ result = pd.Series(data)
88
+ if hasattr(result._mgr, "blocks"):
89
+ assert result._mgr.blocks[0].is_numeric is data.dtype._is_numeric
90
+
91
+ def test_isna_extension_array(self, data_missing):
92
+ # If your `isna` returns an ExtensionArray, you must also implement
93
+ # _reduce. At the *very* least, you must implement any and all
94
+ na = data_missing.isna()
95
+ if is_extension_array_dtype(na):
96
+ assert na._reduce("any")
97
+ assert na.any()
98
+
99
+ assert not na._reduce("all")
100
+ assert not na.all()
101
+
102
+ assert na.dtype._is_boolean
103
+
104
+ def test_copy(self, data):
105
+ # GH#27083 removing deep keyword from EA.copy
106
+ assert data[0] != data[1]
107
+ result = data.copy()
108
+
109
+ if data.dtype._is_immutable:
110
+ pytest.skip(f"test_copy assumes mutability and {data.dtype} is immutable")
111
+
112
+ data[1] = data[0]
113
+ assert result[1] != result[0]
114
+
115
+ def test_view(self, data):
116
+ # view with no dtype should return a shallow copy, *not* the same
117
+ # object
118
+ assert data[1] != data[0]
119
+
120
+ result = data.view()
121
+ assert result is not data
122
+ assert type(result) == type(data)
123
+
124
+ if data.dtype._is_immutable:
125
+ pytest.skip(f"test_view assumes mutability and {data.dtype} is immutable")
126
+
127
+ result[1] = result[0]
128
+ assert data[1] == data[0]
129
+
130
+ # check specifically that the `dtype` kwarg is accepted
131
+ data.view(dtype=None)
132
+
133
+ def test_tolist(self, data):
134
+ result = data.tolist()
135
+ expected = list(data)
136
+ assert isinstance(result, list)
137
+ assert result == expected
venv/lib/python3.10/site-packages/pandas/tests/extension/base/methods.py ADDED
@@ -0,0 +1,720 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import inspect
2
+ import operator
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas._typing import Dtype
8
+
9
+ from pandas.core.dtypes.common import is_bool_dtype
10
+ from pandas.core.dtypes.dtypes import NumpyEADtype
11
+ from pandas.core.dtypes.missing import na_value_for_dtype
12
+
13
+ import pandas as pd
14
+ import pandas._testing as tm
15
+ from pandas.core.sorting import nargsort
16
+
17
+
18
+ class BaseMethodsTests:
19
+ """Various Series and DataFrame methods."""
20
+
21
+ def test_hash_pandas_object(self, data):
22
+ # _hash_pandas_object should return a uint64 ndarray of the same length
23
+ # as the data
24
+ from pandas.core.util.hashing import _default_hash_key
25
+
26
+ res = data._hash_pandas_object(
27
+ encoding="utf-8", hash_key=_default_hash_key, categorize=False
28
+ )
29
+ assert res.dtype == np.uint64
30
+ assert res.shape == data.shape
31
+
32
+ def test_value_counts_default_dropna(self, data):
33
+ # make sure we have consistent default dropna kwarg
34
+ if not hasattr(data, "value_counts"):
35
+ pytest.skip(f"value_counts is not implemented for {type(data)}")
36
+ sig = inspect.signature(data.value_counts)
37
+ kwarg = sig.parameters["dropna"]
38
+ assert kwarg.default is True
39
+
40
+ @pytest.mark.parametrize("dropna", [True, False])
41
+ def test_value_counts(self, all_data, dropna):
42
+ all_data = all_data[:10]
43
+ if dropna:
44
+ other = all_data[~all_data.isna()]
45
+ else:
46
+ other = all_data
47
+
48
+ result = pd.Series(all_data).value_counts(dropna=dropna).sort_index()
49
+ expected = pd.Series(other).value_counts(dropna=dropna).sort_index()
50
+
51
+ tm.assert_series_equal(result, expected)
52
+
53
+ def test_value_counts_with_normalize(self, data):
54
+ # GH 33172
55
+ data = data[:10].unique()
56
+ values = np.array(data[~data.isna()])
57
+ ser = pd.Series(data, dtype=data.dtype)
58
+
59
+ result = ser.value_counts(normalize=True).sort_index()
60
+
61
+ if not isinstance(data, pd.Categorical):
62
+ expected = pd.Series(
63
+ [1 / len(values)] * len(values), index=result.index, name="proportion"
64
+ )
65
+ else:
66
+ expected = pd.Series(0.0, index=result.index, name="proportion")
67
+ expected[result > 0] = 1 / len(values)
68
+
69
+ if getattr(data.dtype, "storage", "") == "pyarrow" or isinstance(
70
+ data.dtype, pd.ArrowDtype
71
+ ):
72
+ # TODO: avoid special-casing
73
+ expected = expected.astype("double[pyarrow]")
74
+ elif getattr(data.dtype, "storage", "") == "pyarrow_numpy":
75
+ # TODO: avoid special-casing
76
+ expected = expected.astype("float64")
77
+ elif na_value_for_dtype(data.dtype) is pd.NA:
78
+ # TODO(GH#44692): avoid special-casing
79
+ expected = expected.astype("Float64")
80
+
81
+ tm.assert_series_equal(result, expected)
82
+
83
+ def test_count(self, data_missing):
84
+ df = pd.DataFrame({"A": data_missing})
85
+ result = df.count(axis="columns")
86
+ expected = pd.Series([0, 1])
87
+ tm.assert_series_equal(result, expected)
88
+
89
+ def test_series_count(self, data_missing):
90
+ # GH#26835
91
+ ser = pd.Series(data_missing)
92
+ result = ser.count()
93
+ expected = 1
94
+ assert result == expected
95
+
96
+ def test_apply_simple_series(self, data):
97
+ result = pd.Series(data).apply(id)
98
+ assert isinstance(result, pd.Series)
99
+
100
+ @pytest.mark.parametrize("na_action", [None, "ignore"])
101
+ def test_map(self, data_missing, na_action):
102
+ result = data_missing.map(lambda x: x, na_action=na_action)
103
+ expected = data_missing.to_numpy()
104
+ tm.assert_numpy_array_equal(result, expected)
105
+
106
+ def test_argsort(self, data_for_sorting):
107
+ result = pd.Series(data_for_sorting).argsort()
108
+ # argsort result gets passed to take, so should be np.intp
109
+ expected = pd.Series(np.array([2, 0, 1], dtype=np.intp))
110
+ tm.assert_series_equal(result, expected)
111
+
112
+ def test_argsort_missing_array(self, data_missing_for_sorting):
113
+ result = data_missing_for_sorting.argsort()
114
+ # argsort result gets passed to take, so should be np.intp
115
+ expected = np.array([2, 0, 1], dtype=np.intp)
116
+ tm.assert_numpy_array_equal(result, expected)
117
+
118
+ def test_argsort_missing(self, data_missing_for_sorting):
119
+ msg = "The behavior of Series.argsort in the presence of NA values"
120
+ with tm.assert_produces_warning(FutureWarning, match=msg):
121
+ result = pd.Series(data_missing_for_sorting).argsort()
122
+ expected = pd.Series(np.array([1, -1, 0], dtype=np.intp))
123
+ tm.assert_series_equal(result, expected)
124
+
125
+ def test_argmin_argmax(self, data_for_sorting, data_missing_for_sorting, na_value):
126
+ # GH 24382
127
+ is_bool = data_for_sorting.dtype._is_boolean
128
+
129
+ exp_argmax = 1
130
+ exp_argmax_repeated = 3
131
+ if is_bool:
132
+ # See data_for_sorting docstring
133
+ exp_argmax = 0
134
+ exp_argmax_repeated = 1
135
+
136
+ # data_for_sorting -> [B, C, A] with A < B < C
137
+ assert data_for_sorting.argmax() == exp_argmax
138
+ assert data_for_sorting.argmin() == 2
139
+
140
+ # with repeated values -> first occurrence
141
+ data = data_for_sorting.take([2, 0, 0, 1, 1, 2])
142
+ assert data.argmax() == exp_argmax_repeated
143
+ assert data.argmin() == 0
144
+
145
+ # with missing values
146
+ # data_missing_for_sorting -> [B, NA, A] with A < B and NA missing.
147
+ assert data_missing_for_sorting.argmax() == 0
148
+ assert data_missing_for_sorting.argmin() == 2
149
+
150
+ @pytest.mark.parametrize("method", ["argmax", "argmin"])
151
+ def test_argmin_argmax_empty_array(self, method, data):
152
+ # GH 24382
153
+ err_msg = "attempt to get"
154
+ with pytest.raises(ValueError, match=err_msg):
155
+ getattr(data[:0], method)()
156
+
157
+ @pytest.mark.parametrize("method", ["argmax", "argmin"])
158
+ def test_argmin_argmax_all_na(self, method, data, na_value):
159
+ # all missing with skipna=True is the same as empty
160
+ err_msg = "attempt to get"
161
+ data_na = type(data)._from_sequence([na_value, na_value], dtype=data.dtype)
162
+ with pytest.raises(ValueError, match=err_msg):
163
+ getattr(data_na, method)()
164
+
165
+ @pytest.mark.parametrize(
166
+ "op_name, skipna, expected",
167
+ [
168
+ ("idxmax", True, 0),
169
+ ("idxmin", True, 2),
170
+ ("argmax", True, 0),
171
+ ("argmin", True, 2),
172
+ ("idxmax", False, np.nan),
173
+ ("idxmin", False, np.nan),
174
+ ("argmax", False, -1),
175
+ ("argmin", False, -1),
176
+ ],
177
+ )
178
+ def test_argreduce_series(
179
+ self, data_missing_for_sorting, op_name, skipna, expected
180
+ ):
181
+ # data_missing_for_sorting -> [B, NA, A] with A < B and NA missing.
182
+ warn = None
183
+ msg = "The behavior of Series.argmax/argmin"
184
+ if op_name.startswith("arg") and expected == -1:
185
+ warn = FutureWarning
186
+ if op_name.startswith("idx") and np.isnan(expected):
187
+ warn = FutureWarning
188
+ msg = f"The behavior of Series.{op_name}"
189
+ ser = pd.Series(data_missing_for_sorting)
190
+ with tm.assert_produces_warning(warn, match=msg):
191
+ result = getattr(ser, op_name)(skipna=skipna)
192
+ tm.assert_almost_equal(result, expected)
193
+
194
+ def test_argmax_argmin_no_skipna_notimplemented(self, data_missing_for_sorting):
195
+ # GH#38733
196
+ data = data_missing_for_sorting
197
+
198
+ with pytest.raises(NotImplementedError, match=""):
199
+ data.argmin(skipna=False)
200
+
201
+ with pytest.raises(NotImplementedError, match=""):
202
+ data.argmax(skipna=False)
203
+
204
+ @pytest.mark.parametrize(
205
+ "na_position, expected",
206
+ [
207
+ ("last", np.array([2, 0, 1], dtype=np.dtype("intp"))),
208
+ ("first", np.array([1, 2, 0], dtype=np.dtype("intp"))),
209
+ ],
210
+ )
211
+ def test_nargsort(self, data_missing_for_sorting, na_position, expected):
212
+ # GH 25439
213
+ result = nargsort(data_missing_for_sorting, na_position=na_position)
214
+ tm.assert_numpy_array_equal(result, expected)
215
+
216
+ @pytest.mark.parametrize("ascending", [True, False])
217
+ def test_sort_values(self, data_for_sorting, ascending, sort_by_key):
218
+ ser = pd.Series(data_for_sorting)
219
+ result = ser.sort_values(ascending=ascending, key=sort_by_key)
220
+ expected = ser.iloc[[2, 0, 1]]
221
+ if not ascending:
222
+ # GH 35922. Expect stable sort
223
+ if ser.nunique() == 2:
224
+ expected = ser.iloc[[0, 1, 2]]
225
+ else:
226
+ expected = ser.iloc[[1, 0, 2]]
227
+
228
+ tm.assert_series_equal(result, expected)
229
+
230
+ @pytest.mark.parametrize("ascending", [True, False])
231
+ def test_sort_values_missing(
232
+ self, data_missing_for_sorting, ascending, sort_by_key
233
+ ):
234
+ ser = pd.Series(data_missing_for_sorting)
235
+ result = ser.sort_values(ascending=ascending, key=sort_by_key)
236
+ if ascending:
237
+ expected = ser.iloc[[2, 0, 1]]
238
+ else:
239
+ expected = ser.iloc[[0, 2, 1]]
240
+ tm.assert_series_equal(result, expected)
241
+
242
+ @pytest.mark.parametrize("ascending", [True, False])
243
+ def test_sort_values_frame(self, data_for_sorting, ascending):
244
+ df = pd.DataFrame({"A": [1, 2, 1], "B": data_for_sorting})
245
+ result = df.sort_values(["A", "B"])
246
+ expected = pd.DataFrame(
247
+ {"A": [1, 1, 2], "B": data_for_sorting.take([2, 0, 1])}, index=[2, 0, 1]
248
+ )
249
+ tm.assert_frame_equal(result, expected)
250
+
251
+ @pytest.mark.parametrize("keep", ["first", "last", False])
252
+ def test_duplicated(self, data, keep):
253
+ arr = data.take([0, 1, 0, 1])
254
+ result = arr.duplicated(keep=keep)
255
+ if keep == "first":
256
+ expected = np.array([False, False, True, True])
257
+ elif keep == "last":
258
+ expected = np.array([True, True, False, False])
259
+ else:
260
+ expected = np.array([True, True, True, True])
261
+ tm.assert_numpy_array_equal(result, expected)
262
+
263
+ @pytest.mark.parametrize("box", [pd.Series, lambda x: x])
264
+ @pytest.mark.parametrize("method", [lambda x: x.unique(), pd.unique])
265
+ def test_unique(self, data, box, method):
266
+ duplicated = box(data._from_sequence([data[0], data[0]], dtype=data.dtype))
267
+
268
+ result = method(duplicated)
269
+
270
+ assert len(result) == 1
271
+ assert isinstance(result, type(data))
272
+ assert result[0] == duplicated[0]
273
+
274
+ def test_factorize(self, data_for_grouping):
275
+ codes, uniques = pd.factorize(data_for_grouping, use_na_sentinel=True)
276
+
277
+ is_bool = data_for_grouping.dtype._is_boolean
278
+ if is_bool:
279
+ # only 2 unique values
280
+ expected_codes = np.array([0, 0, -1, -1, 1, 1, 0, 0], dtype=np.intp)
281
+ expected_uniques = data_for_grouping.take([0, 4])
282
+ else:
283
+ expected_codes = np.array([0, 0, -1, -1, 1, 1, 0, 2], dtype=np.intp)
284
+ expected_uniques = data_for_grouping.take([0, 4, 7])
285
+
286
+ tm.assert_numpy_array_equal(codes, expected_codes)
287
+ tm.assert_extension_array_equal(uniques, expected_uniques)
288
+
289
+ def test_factorize_equivalence(self, data_for_grouping):
290
+ codes_1, uniques_1 = pd.factorize(data_for_grouping, use_na_sentinel=True)
291
+ codes_2, uniques_2 = data_for_grouping.factorize(use_na_sentinel=True)
292
+
293
+ tm.assert_numpy_array_equal(codes_1, codes_2)
294
+ tm.assert_extension_array_equal(uniques_1, uniques_2)
295
+ assert len(uniques_1) == len(pd.unique(uniques_1))
296
+ assert uniques_1.dtype == data_for_grouping.dtype
297
+
298
+ def test_factorize_empty(self, data):
299
+ codes, uniques = pd.factorize(data[:0])
300
+ expected_codes = np.array([], dtype=np.intp)
301
+ expected_uniques = type(data)._from_sequence([], dtype=data[:0].dtype)
302
+
303
+ tm.assert_numpy_array_equal(codes, expected_codes)
304
+ tm.assert_extension_array_equal(uniques, expected_uniques)
305
+
306
+ def test_fillna_copy_frame(self, data_missing):
307
+ arr = data_missing.take([1, 1])
308
+ df = pd.DataFrame({"A": arr})
309
+ df_orig = df.copy()
310
+
311
+ filled_val = df.iloc[0, 0]
312
+ result = df.fillna(filled_val)
313
+
314
+ result.iloc[0, 0] = filled_val
315
+
316
+ tm.assert_frame_equal(df, df_orig)
317
+
318
+ def test_fillna_copy_series(self, data_missing):
319
+ arr = data_missing.take([1, 1])
320
+ ser = pd.Series(arr, copy=False)
321
+ ser_orig = ser.copy()
322
+
323
+ filled_val = ser[0]
324
+ result = ser.fillna(filled_val)
325
+ result.iloc[0] = filled_val
326
+
327
+ tm.assert_series_equal(ser, ser_orig)
328
+
329
+ def test_fillna_length_mismatch(self, data_missing):
330
+ msg = "Length of 'value' does not match."
331
+ with pytest.raises(ValueError, match=msg):
332
+ data_missing.fillna(data_missing.take([1]))
333
+
334
+ # Subclasses can override if we expect e.g Sparse[bool], boolean, pyarrow[bool]
335
+ _combine_le_expected_dtype: Dtype = NumpyEADtype("bool")
336
+
337
+ def test_combine_le(self, data_repeated):
338
+ # GH 20825
339
+ # Test that combine works when doing a <= (le) comparison
340
+ orig_data1, orig_data2 = data_repeated(2)
341
+ s1 = pd.Series(orig_data1)
342
+ s2 = pd.Series(orig_data2)
343
+ result = s1.combine(s2, lambda x1, x2: x1 <= x2)
344
+ expected = pd.Series(
345
+ pd.array(
346
+ [a <= b for (a, b) in zip(list(orig_data1), list(orig_data2))],
347
+ dtype=self._combine_le_expected_dtype,
348
+ )
349
+ )
350
+ tm.assert_series_equal(result, expected)
351
+
352
+ val = s1.iloc[0]
353
+ result = s1.combine(val, lambda x1, x2: x1 <= x2)
354
+ expected = pd.Series(
355
+ pd.array(
356
+ [a <= val for a in list(orig_data1)],
357
+ dtype=self._combine_le_expected_dtype,
358
+ )
359
+ )
360
+ tm.assert_series_equal(result, expected)
361
+
362
+ def test_combine_add(self, data_repeated):
363
+ # GH 20825
364
+ orig_data1, orig_data2 = data_repeated(2)
365
+ s1 = pd.Series(orig_data1)
366
+ s2 = pd.Series(orig_data2)
367
+
368
+ # Check if the operation is supported pointwise for our scalars. If not,
369
+ # we will expect Series.combine to raise as well.
370
+ try:
371
+ with np.errstate(over="ignore"):
372
+ expected = pd.Series(
373
+ orig_data1._from_sequence(
374
+ [a + b for (a, b) in zip(list(orig_data1), list(orig_data2))]
375
+ )
376
+ )
377
+ except TypeError:
378
+ # If the operation is not supported pointwise for our scalars,
379
+ # then Series.combine should also raise
380
+ with pytest.raises(TypeError):
381
+ s1.combine(s2, lambda x1, x2: x1 + x2)
382
+ return
383
+
384
+ result = s1.combine(s2, lambda x1, x2: x1 + x2)
385
+ tm.assert_series_equal(result, expected)
386
+
387
+ val = s1.iloc[0]
388
+ result = s1.combine(val, lambda x1, x2: x1 + x2)
389
+ expected = pd.Series(
390
+ orig_data1._from_sequence([a + val for a in list(orig_data1)])
391
+ )
392
+ tm.assert_series_equal(result, expected)
393
+
394
+ def test_combine_first(self, data):
395
+ # https://github.com/pandas-dev/pandas/issues/24147
396
+ a = pd.Series(data[:3])
397
+ b = pd.Series(data[2:5], index=[2, 3, 4])
398
+ result = a.combine_first(b)
399
+ expected = pd.Series(data[:5])
400
+ tm.assert_series_equal(result, expected)
401
+
402
+ @pytest.mark.parametrize("frame", [True, False])
403
+ @pytest.mark.parametrize(
404
+ "periods, indices",
405
+ [(-2, [2, 3, 4, -1, -1]), (0, [0, 1, 2, 3, 4]), (2, [-1, -1, 0, 1, 2])],
406
+ )
407
+ def test_container_shift(self, data, frame, periods, indices):
408
+ # https://github.com/pandas-dev/pandas/issues/22386
409
+ subset = data[:5]
410
+ data = pd.Series(subset, name="A")
411
+ expected = pd.Series(subset.take(indices, allow_fill=True), name="A")
412
+
413
+ if frame:
414
+ result = data.to_frame(name="A").assign(B=1).shift(periods)
415
+ expected = pd.concat(
416
+ [expected, pd.Series([1] * 5, name="B").shift(periods)], axis=1
417
+ )
418
+ compare = tm.assert_frame_equal
419
+ else:
420
+ result = data.shift(periods)
421
+ compare = tm.assert_series_equal
422
+
423
+ compare(result, expected)
424
+
425
+ def test_shift_0_periods(self, data):
426
+ # GH#33856 shifting with periods=0 should return a copy, not same obj
427
+ result = data.shift(0)
428
+ assert data[0] != data[1] # otherwise below is invalid
429
+ data[0] = data[1]
430
+ assert result[0] != result[1] # i.e. not the same object/view
431
+
432
+ @pytest.mark.parametrize("periods", [1, -2])
433
+ def test_diff(self, data, periods):
434
+ data = data[:5]
435
+ if is_bool_dtype(data.dtype):
436
+ op = operator.xor
437
+ else:
438
+ op = operator.sub
439
+ try:
440
+ # does this array implement ops?
441
+ op(data, data)
442
+ except Exception:
443
+ pytest.skip(f"{type(data)} does not support diff")
444
+ s = pd.Series(data)
445
+ result = s.diff(periods)
446
+ expected = pd.Series(op(data, data.shift(periods)))
447
+ tm.assert_series_equal(result, expected)
448
+
449
+ df = pd.DataFrame({"A": data, "B": [1.0] * 5})
450
+ result = df.diff(periods)
451
+ if periods == 1:
452
+ b = [np.nan, 0, 0, 0, 0]
453
+ else:
454
+ b = [0, 0, 0, np.nan, np.nan]
455
+ expected = pd.DataFrame({"A": expected, "B": b})
456
+ tm.assert_frame_equal(result, expected)
457
+
458
+ @pytest.mark.parametrize(
459
+ "periods, indices",
460
+ [[-4, [-1, -1]], [-1, [1, -1]], [0, [0, 1]], [1, [-1, 0]], [4, [-1, -1]]],
461
+ )
462
+ def test_shift_non_empty_array(self, data, periods, indices):
463
+ # https://github.com/pandas-dev/pandas/issues/23911
464
+ subset = data[:2]
465
+ result = subset.shift(periods)
466
+ expected = subset.take(indices, allow_fill=True)
467
+ tm.assert_extension_array_equal(result, expected)
468
+
469
+ @pytest.mark.parametrize("periods", [-4, -1, 0, 1, 4])
470
+ def test_shift_empty_array(self, data, periods):
471
+ # https://github.com/pandas-dev/pandas/issues/23911
472
+ empty = data[:0]
473
+ result = empty.shift(periods)
474
+ expected = empty
475
+ tm.assert_extension_array_equal(result, expected)
476
+
477
+ def test_shift_zero_copies(self, data):
478
+ # GH#31502
479
+ result = data.shift(0)
480
+ assert result is not data
481
+
482
+ result = data[:0].shift(2)
483
+ assert result is not data
484
+
485
+ def test_shift_fill_value(self, data):
486
+ arr = data[:4]
487
+ fill_value = data[0]
488
+ result = arr.shift(1, fill_value=fill_value)
489
+ expected = data.take([0, 0, 1, 2])
490
+ tm.assert_extension_array_equal(result, expected)
491
+
492
+ result = arr.shift(-2, fill_value=fill_value)
493
+ expected = data.take([2, 3, 0, 0])
494
+ tm.assert_extension_array_equal(result, expected)
495
+
496
+ def test_not_hashable(self, data):
497
+ # We are in general mutable, so not hashable
498
+ with pytest.raises(TypeError, match="unhashable type"):
499
+ hash(data)
500
+
501
+ def test_hash_pandas_object_works(self, data, as_frame):
502
+ # https://github.com/pandas-dev/pandas/issues/23066
503
+ data = pd.Series(data)
504
+ if as_frame:
505
+ data = data.to_frame()
506
+ a = pd.util.hash_pandas_object(data)
507
+ b = pd.util.hash_pandas_object(data)
508
+ tm.assert_equal(a, b)
509
+
510
+ def test_searchsorted(self, data_for_sorting, as_series):
511
+ if data_for_sorting.dtype._is_boolean:
512
+ return self._test_searchsorted_bool_dtypes(data_for_sorting, as_series)
513
+
514
+ b, c, a = data_for_sorting
515
+ arr = data_for_sorting.take([2, 0, 1]) # to get [a, b, c]
516
+
517
+ if as_series:
518
+ arr = pd.Series(arr)
519
+ assert arr.searchsorted(a) == 0
520
+ assert arr.searchsorted(a, side="right") == 1
521
+
522
+ assert arr.searchsorted(b) == 1
523
+ assert arr.searchsorted(b, side="right") == 2
524
+
525
+ assert arr.searchsorted(c) == 2
526
+ assert arr.searchsorted(c, side="right") == 3
527
+
528
+ result = arr.searchsorted(arr.take([0, 2]))
529
+ expected = np.array([0, 2], dtype=np.intp)
530
+
531
+ tm.assert_numpy_array_equal(result, expected)
532
+
533
+ # sorter
534
+ sorter = np.array([1, 2, 0])
535
+ assert data_for_sorting.searchsorted(a, sorter=sorter) == 0
536
+
537
+ def _test_searchsorted_bool_dtypes(self, data_for_sorting, as_series):
538
+ # We call this from test_searchsorted in cases where we have a
539
+ # boolean-like dtype. The non-bool test assumes we have more than 2
540
+ # unique values.
541
+ dtype = data_for_sorting.dtype
542
+ data_for_sorting = pd.array([True, False], dtype=dtype)
543
+ b, a = data_for_sorting
544
+ arr = type(data_for_sorting)._from_sequence([a, b])
545
+
546
+ if as_series:
547
+ arr = pd.Series(arr)
548
+ assert arr.searchsorted(a) == 0
549
+ assert arr.searchsorted(a, side="right") == 1
550
+
551
+ assert arr.searchsorted(b) == 1
552
+ assert arr.searchsorted(b, side="right") == 2
553
+
554
+ result = arr.searchsorted(arr.take([0, 1]))
555
+ expected = np.array([0, 1], dtype=np.intp)
556
+
557
+ tm.assert_numpy_array_equal(result, expected)
558
+
559
+ # sorter
560
+ sorter = np.array([1, 0])
561
+ assert data_for_sorting.searchsorted(a, sorter=sorter) == 0
562
+
563
+ def test_where_series(self, data, na_value, as_frame):
564
+ assert data[0] != data[1]
565
+ cls = type(data)
566
+ a, b = data[:2]
567
+
568
+ orig = pd.Series(cls._from_sequence([a, a, b, b], dtype=data.dtype))
569
+ ser = orig.copy()
570
+ cond = np.array([True, True, False, False])
571
+
572
+ if as_frame:
573
+ ser = ser.to_frame(name="a")
574
+ cond = cond.reshape(-1, 1)
575
+
576
+ result = ser.where(cond)
577
+ expected = pd.Series(
578
+ cls._from_sequence([a, a, na_value, na_value], dtype=data.dtype)
579
+ )
580
+
581
+ if as_frame:
582
+ expected = expected.to_frame(name="a")
583
+ tm.assert_equal(result, expected)
584
+
585
+ ser.mask(~cond, inplace=True)
586
+ tm.assert_equal(ser, expected)
587
+
588
+ # array other
589
+ ser = orig.copy()
590
+ if as_frame:
591
+ ser = ser.to_frame(name="a")
592
+ cond = np.array([True, False, True, True])
593
+ other = cls._from_sequence([a, b, a, b], dtype=data.dtype)
594
+ if as_frame:
595
+ other = pd.DataFrame({"a": other})
596
+ cond = pd.DataFrame({"a": cond})
597
+ result = ser.where(cond, other)
598
+ expected = pd.Series(cls._from_sequence([a, b, b, b], dtype=data.dtype))
599
+ if as_frame:
600
+ expected = expected.to_frame(name="a")
601
+ tm.assert_equal(result, expected)
602
+
603
+ ser.mask(~cond, other, inplace=True)
604
+ tm.assert_equal(ser, expected)
605
+
606
+ @pytest.mark.parametrize("repeats", [0, 1, 2, [1, 2, 3]])
607
+ def test_repeat(self, data, repeats, as_series, use_numpy):
608
+ arr = type(data)._from_sequence(data[:3], dtype=data.dtype)
609
+ if as_series:
610
+ arr = pd.Series(arr)
611
+
612
+ result = np.repeat(arr, repeats) if use_numpy else arr.repeat(repeats)
613
+
614
+ repeats = [repeats] * 3 if isinstance(repeats, int) else repeats
615
+ expected = [x for x, n in zip(arr, repeats) for _ in range(n)]
616
+ expected = type(data)._from_sequence(expected, dtype=data.dtype)
617
+ if as_series:
618
+ expected = pd.Series(expected, index=arr.index.repeat(repeats))
619
+
620
+ tm.assert_equal(result, expected)
621
+
622
+ @pytest.mark.parametrize(
623
+ "repeats, kwargs, error, msg",
624
+ [
625
+ (2, {"axis": 1}, ValueError, "axis"),
626
+ (-1, {}, ValueError, "negative"),
627
+ ([1, 2], {}, ValueError, "shape"),
628
+ (2, {"foo": "bar"}, TypeError, "'foo'"),
629
+ ],
630
+ )
631
+ def test_repeat_raises(self, data, repeats, kwargs, error, msg, use_numpy):
632
+ with pytest.raises(error, match=msg):
633
+ if use_numpy:
634
+ np.repeat(data, repeats, **kwargs)
635
+ else:
636
+ data.repeat(repeats, **kwargs)
637
+
638
+ def test_delete(self, data):
639
+ result = data.delete(0)
640
+ expected = data[1:]
641
+ tm.assert_extension_array_equal(result, expected)
642
+
643
+ result = data.delete([1, 3])
644
+ expected = data._concat_same_type([data[[0]], data[[2]], data[4:]])
645
+ tm.assert_extension_array_equal(result, expected)
646
+
647
+ def test_insert(self, data):
648
+ # insert at the beginning
649
+ result = data[1:].insert(0, data[0])
650
+ tm.assert_extension_array_equal(result, data)
651
+
652
+ result = data[1:].insert(-len(data[1:]), data[0])
653
+ tm.assert_extension_array_equal(result, data)
654
+
655
+ # insert at the middle
656
+ result = data[:-1].insert(4, data[-1])
657
+
658
+ taker = np.arange(len(data))
659
+ taker[5:] = taker[4:-1]
660
+ taker[4] = len(data) - 1
661
+ expected = data.take(taker)
662
+ tm.assert_extension_array_equal(result, expected)
663
+
664
+ def test_insert_invalid(self, data, invalid_scalar):
665
+ item = invalid_scalar
666
+
667
+ with pytest.raises((TypeError, ValueError)):
668
+ data.insert(0, item)
669
+
670
+ with pytest.raises((TypeError, ValueError)):
671
+ data.insert(4, item)
672
+
673
+ with pytest.raises((TypeError, ValueError)):
674
+ data.insert(len(data) - 1, item)
675
+
676
+ def test_insert_invalid_loc(self, data):
677
+ ub = len(data)
678
+
679
+ with pytest.raises(IndexError):
680
+ data.insert(ub + 1, data[0])
681
+
682
+ with pytest.raises(IndexError):
683
+ data.insert(-ub - 1, data[0])
684
+
685
+ with pytest.raises(TypeError):
686
+ # we expect TypeError here instead of IndexError to match np.insert
687
+ data.insert(1.5, data[0])
688
+
689
+ @pytest.mark.parametrize("box", [pd.array, pd.Series, pd.DataFrame])
690
+ def test_equals(self, data, na_value, as_series, box):
691
+ data2 = type(data)._from_sequence([data[0]] * len(data), dtype=data.dtype)
692
+ data_na = type(data)._from_sequence([na_value] * len(data), dtype=data.dtype)
693
+
694
+ data = tm.box_expected(data, box, transpose=False)
695
+ data2 = tm.box_expected(data2, box, transpose=False)
696
+ data_na = tm.box_expected(data_na, box, transpose=False)
697
+
698
+ # we are asserting with `is True/False` explicitly, to test that the
699
+ # result is an actual Python bool, and not something "truthy"
700
+
701
+ assert data.equals(data) is True
702
+ assert data.equals(data.copy()) is True
703
+
704
+ # unequal other data
705
+ assert data.equals(data2) is False
706
+ assert data.equals(data_na) is False
707
+
708
+ # different length
709
+ assert data[:2].equals(data[:3]) is False
710
+
711
+ # empty are equal
712
+ assert data[:0].equals(data[:0]) is True
713
+
714
+ # other types
715
+ assert data.equals(None) is False
716
+ assert data[[0]].equals(data[0]) is False
717
+
718
+ def test_equals_same_data_different_object(self, data):
719
+ # https://github.com/pandas-dev/pandas/issues/34660
720
+ assert pd.Series(data).equals(pd.Series(data))
venv/lib/python3.10/site-packages/pandas/tests/extension/base/ops.py ADDED
@@ -0,0 +1,299 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ from typing import final
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas._config import using_pyarrow_string_dtype
9
+
10
+ from pandas.core.dtypes.common import is_string_dtype
11
+
12
+ import pandas as pd
13
+ import pandas._testing as tm
14
+ from pandas.core import ops
15
+
16
+
17
+ class BaseOpsUtil:
18
+ series_scalar_exc: type[Exception] | None = TypeError
19
+ frame_scalar_exc: type[Exception] | None = TypeError
20
+ series_array_exc: type[Exception] | None = TypeError
21
+ divmod_exc: type[Exception] | None = TypeError
22
+
23
+ def _get_expected_exception(
24
+ self, op_name: str, obj, other
25
+ ) -> type[Exception] | None:
26
+ # Find the Exception, if any we expect to raise calling
27
+ # obj.__op_name__(other)
28
+
29
+ # The self.obj_bar_exc pattern isn't great in part because it can depend
30
+ # on op_name or dtypes, but we use it here for backward-compatibility.
31
+ if op_name in ["__divmod__", "__rdivmod__"]:
32
+ result = self.divmod_exc
33
+ elif isinstance(obj, pd.Series) and isinstance(other, pd.Series):
34
+ result = self.series_array_exc
35
+ elif isinstance(obj, pd.Series):
36
+ result = self.series_scalar_exc
37
+ else:
38
+ result = self.frame_scalar_exc
39
+
40
+ if using_pyarrow_string_dtype() and result is not None:
41
+ import pyarrow as pa
42
+
43
+ result = ( # type: ignore[assignment]
44
+ result,
45
+ pa.lib.ArrowNotImplementedError,
46
+ NotImplementedError,
47
+ )
48
+ return result
49
+
50
+ def _cast_pointwise_result(self, op_name: str, obj, other, pointwise_result):
51
+ # In _check_op we check that the result of a pointwise operation
52
+ # (found via _combine) matches the result of the vectorized
53
+ # operation obj.__op_name__(other).
54
+ # In some cases pandas dtype inference on the scalar result may not
55
+ # give a matching dtype even if both operations are behaving "correctly".
56
+ # In these cases, do extra required casting here.
57
+ return pointwise_result
58
+
59
+ def get_op_from_name(self, op_name: str):
60
+ return tm.get_op_from_name(op_name)
61
+
62
+ # Subclasses are not expected to need to override check_opname, _check_op,
63
+ # _check_divmod_op, or _combine.
64
+ # Ideally any relevant overriding can be done in _cast_pointwise_result,
65
+ # get_op_from_name, and the specification of `exc`. If you find a use
66
+ # case that still requires overriding _check_op or _combine, please let
67
+ # us know at github.com/pandas-dev/pandas/issues
68
+ @final
69
+ def check_opname(self, ser: pd.Series, op_name: str, other):
70
+ exc = self._get_expected_exception(op_name, ser, other)
71
+ op = self.get_op_from_name(op_name)
72
+
73
+ self._check_op(ser, op, other, op_name, exc)
74
+
75
+ # see comment on check_opname
76
+ @final
77
+ def _combine(self, obj, other, op):
78
+ if isinstance(obj, pd.DataFrame):
79
+ if len(obj.columns) != 1:
80
+ raise NotImplementedError
81
+ expected = obj.iloc[:, 0].combine(other, op).to_frame()
82
+ else:
83
+ expected = obj.combine(other, op)
84
+ return expected
85
+
86
+ # see comment on check_opname
87
+ @final
88
+ def _check_op(
89
+ self, ser: pd.Series, op, other, op_name: str, exc=NotImplementedError
90
+ ):
91
+ # Check that the Series/DataFrame arithmetic/comparison method matches
92
+ # the pointwise result from _combine.
93
+
94
+ if exc is None:
95
+ result = op(ser, other)
96
+ expected = self._combine(ser, other, op)
97
+ expected = self._cast_pointwise_result(op_name, ser, other, expected)
98
+ assert isinstance(result, type(ser))
99
+ tm.assert_equal(result, expected)
100
+ else:
101
+ with pytest.raises(exc):
102
+ op(ser, other)
103
+
104
+ # see comment on check_opname
105
+ @final
106
+ def _check_divmod_op(self, ser: pd.Series, op, other):
107
+ # check that divmod behavior matches behavior of floordiv+mod
108
+ if op is divmod:
109
+ exc = self._get_expected_exception("__divmod__", ser, other)
110
+ else:
111
+ exc = self._get_expected_exception("__rdivmod__", ser, other)
112
+ if exc is None:
113
+ result_div, result_mod = op(ser, other)
114
+ if op is divmod:
115
+ expected_div, expected_mod = ser // other, ser % other
116
+ else:
117
+ expected_div, expected_mod = other // ser, other % ser
118
+ tm.assert_series_equal(result_div, expected_div)
119
+ tm.assert_series_equal(result_mod, expected_mod)
120
+ else:
121
+ with pytest.raises(exc):
122
+ divmod(ser, other)
123
+
124
+
125
+ class BaseArithmeticOpsTests(BaseOpsUtil):
126
+ """
127
+ Various Series and DataFrame arithmetic ops methods.
128
+
129
+ Subclasses supporting various ops should set the class variables
130
+ to indicate that they support ops of that kind
131
+
132
+ * series_scalar_exc = TypeError
133
+ * frame_scalar_exc = TypeError
134
+ * series_array_exc = TypeError
135
+ * divmod_exc = TypeError
136
+ """
137
+
138
+ series_scalar_exc: type[Exception] | None = TypeError
139
+ frame_scalar_exc: type[Exception] | None = TypeError
140
+ series_array_exc: type[Exception] | None = TypeError
141
+ divmod_exc: type[Exception] | None = TypeError
142
+
143
+ def test_arith_series_with_scalar(self, data, all_arithmetic_operators):
144
+ # series & scalar
145
+ if all_arithmetic_operators == "__rmod__" and is_string_dtype(data.dtype):
146
+ pytest.skip("Skip testing Python string formatting")
147
+
148
+ op_name = all_arithmetic_operators
149
+ ser = pd.Series(data)
150
+ self.check_opname(ser, op_name, ser.iloc[0])
151
+
152
+ def test_arith_frame_with_scalar(self, data, all_arithmetic_operators):
153
+ # frame & scalar
154
+ if all_arithmetic_operators == "__rmod__" and is_string_dtype(data.dtype):
155
+ pytest.skip("Skip testing Python string formatting")
156
+
157
+ op_name = all_arithmetic_operators
158
+ df = pd.DataFrame({"A": data})
159
+ self.check_opname(df, op_name, data[0])
160
+
161
+ def test_arith_series_with_array(self, data, all_arithmetic_operators):
162
+ # ndarray & other series
163
+ op_name = all_arithmetic_operators
164
+ ser = pd.Series(data)
165
+ self.check_opname(ser, op_name, pd.Series([ser.iloc[0]] * len(ser)))
166
+
167
+ def test_divmod(self, data):
168
+ ser = pd.Series(data)
169
+ self._check_divmod_op(ser, divmod, 1)
170
+ self._check_divmod_op(1, ops.rdivmod, ser)
171
+
172
+ def test_divmod_series_array(self, data, data_for_twos):
173
+ ser = pd.Series(data)
174
+ self._check_divmod_op(ser, divmod, data)
175
+
176
+ other = data_for_twos
177
+ self._check_divmod_op(other, ops.rdivmod, ser)
178
+
179
+ other = pd.Series(other)
180
+ self._check_divmod_op(other, ops.rdivmod, ser)
181
+
182
+ def test_add_series_with_extension_array(self, data):
183
+ # Check adding an ExtensionArray to a Series of the same dtype matches
184
+ # the behavior of adding the arrays directly and then wrapping in a
185
+ # Series.
186
+
187
+ ser = pd.Series(data)
188
+
189
+ exc = self._get_expected_exception("__add__", ser, data)
190
+ if exc is not None:
191
+ with pytest.raises(exc):
192
+ ser + data
193
+ return
194
+
195
+ result = ser + data
196
+ expected = pd.Series(data + data)
197
+ tm.assert_series_equal(result, expected)
198
+
199
+ @pytest.mark.parametrize("box", [pd.Series, pd.DataFrame, pd.Index])
200
+ @pytest.mark.parametrize(
201
+ "op_name",
202
+ [
203
+ x
204
+ for x in tm.arithmetic_dunder_methods + tm.comparison_dunder_methods
205
+ if not x.startswith("__r")
206
+ ],
207
+ )
208
+ def test_direct_arith_with_ndframe_returns_not_implemented(
209
+ self, data, box, op_name
210
+ ):
211
+ # EAs should return NotImplemented for ops with Series/DataFrame/Index
212
+ # Pandas takes care of unboxing the series and calling the EA's op.
213
+ other = box(data)
214
+
215
+ if hasattr(data, op_name):
216
+ result = getattr(data, op_name)(other)
217
+ assert result is NotImplemented
218
+
219
+
220
+ class BaseComparisonOpsTests(BaseOpsUtil):
221
+ """Various Series and DataFrame comparison ops methods."""
222
+
223
+ def _compare_other(self, ser: pd.Series, data, op, other):
224
+ if op.__name__ in ["eq", "ne"]:
225
+ # comparison should match point-wise comparisons
226
+ result = op(ser, other)
227
+ expected = ser.combine(other, op)
228
+ expected = self._cast_pointwise_result(op.__name__, ser, other, expected)
229
+ tm.assert_series_equal(result, expected)
230
+
231
+ else:
232
+ exc = None
233
+ try:
234
+ result = op(ser, other)
235
+ except Exception as err:
236
+ exc = err
237
+
238
+ if exc is None:
239
+ # Didn't error, then should match pointwise behavior
240
+ expected = ser.combine(other, op)
241
+ expected = self._cast_pointwise_result(
242
+ op.__name__, ser, other, expected
243
+ )
244
+ tm.assert_series_equal(result, expected)
245
+ else:
246
+ with pytest.raises(type(exc)):
247
+ ser.combine(other, op)
248
+
249
+ def test_compare_scalar(self, data, comparison_op):
250
+ ser = pd.Series(data)
251
+ self._compare_other(ser, data, comparison_op, 0)
252
+
253
+ def test_compare_array(self, data, comparison_op):
254
+ ser = pd.Series(data)
255
+ other = pd.Series([data[0]] * len(data), dtype=data.dtype)
256
+ self._compare_other(ser, data, comparison_op, other)
257
+
258
+
259
+ class BaseUnaryOpsTests(BaseOpsUtil):
260
+ def test_invert(self, data):
261
+ ser = pd.Series(data, name="name")
262
+ try:
263
+ # 10 is an arbitrary choice here, just avoid iterating over
264
+ # the whole array to trim test runtime
265
+ [~x for x in data[:10]]
266
+ except TypeError:
267
+ # scalars don't support invert -> we don't expect the vectorized
268
+ # operation to succeed
269
+ with pytest.raises(TypeError):
270
+ ~ser
271
+ with pytest.raises(TypeError):
272
+ ~data
273
+ else:
274
+ # Note we do not reuse the pointwise result to construct expected
275
+ # because python semantics for negating bools are weird see GH#54569
276
+ result = ~ser
277
+ expected = pd.Series(~data, name="name")
278
+ tm.assert_series_equal(result, expected)
279
+
280
+ @pytest.mark.parametrize("ufunc", [np.positive, np.negative, np.abs])
281
+ def test_unary_ufunc_dunder_equivalence(self, data, ufunc):
282
+ # the dunder __pos__ works if and only if np.positive works,
283
+ # same for __neg__/np.negative and __abs__/np.abs
284
+ attr = {np.positive: "__pos__", np.negative: "__neg__", np.abs: "__abs__"}[
285
+ ufunc
286
+ ]
287
+
288
+ exc = None
289
+ try:
290
+ result = getattr(data, attr)()
291
+ except Exception as err:
292
+ exc = err
293
+
294
+ # if __pos__ raised, then so should the ufunc
295
+ with pytest.raises((type(exc), TypeError)):
296
+ ufunc(data)
297
+ else:
298
+ alt = ufunc(data)
299
+ tm.assert_extension_array_equal(result, alt)
venv/lib/python3.10/site-packages/pandas/tests/extension/base/printing.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import io
2
+
3
+ import pytest
4
+
5
+ import pandas as pd
6
+
7
+
8
+ class BasePrintingTests:
9
+ """Tests checking the formatting of your EA when printed."""
10
+
11
+ @pytest.mark.parametrize("size", ["big", "small"])
12
+ def test_array_repr(self, data, size):
13
+ if size == "small":
14
+ data = data[:5]
15
+ else:
16
+ data = type(data)._concat_same_type([data] * 5)
17
+
18
+ result = repr(data)
19
+ assert type(data).__name__ in result
20
+ assert f"Length: {len(data)}" in result
21
+ assert str(data.dtype) in result
22
+ if size == "big":
23
+ assert "..." in result
24
+
25
+ def test_array_repr_unicode(self, data):
26
+ result = str(data)
27
+ assert isinstance(result, str)
28
+
29
+ def test_series_repr(self, data):
30
+ ser = pd.Series(data)
31
+ assert data.dtype.name in repr(ser)
32
+
33
+ def test_dataframe_repr(self, data):
34
+ df = pd.DataFrame({"A": data})
35
+ repr(df)
36
+
37
+ def test_dtype_name_in_info(self, data):
38
+ buf = io.StringIO()
39
+ pd.DataFrame({"A": data}).info(buf=buf)
40
+ result = buf.getvalue()
41
+ assert data.dtype.name in result
venv/lib/python3.10/site-packages/pandas/tests/extension/base/reduce.py ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import final
2
+
3
+ import pytest
4
+
5
+ import pandas as pd
6
+ import pandas._testing as tm
7
+ from pandas.api.types import is_numeric_dtype
8
+
9
+
10
+ class BaseReduceTests:
11
+ """
12
+ Reduction specific tests. Generally these only
13
+ make sense for numeric/boolean operations.
14
+ """
15
+
16
+ def _supports_reduction(self, ser: pd.Series, op_name: str) -> bool:
17
+ # Specify if we expect this reduction to succeed.
18
+ return False
19
+
20
+ def check_reduce(self, ser: pd.Series, op_name: str, skipna: bool):
21
+ # We perform the same operation on the np.float64 data and check
22
+ # that the results match. Override if you need to cast to something
23
+ # other than float64.
24
+ res_op = getattr(ser, op_name)
25
+
26
+ try:
27
+ alt = ser.astype("float64")
28
+ except (TypeError, ValueError):
29
+ # e.g. Interval can't cast (TypeError), StringArray can't cast
30
+ # (ValueError), so let's cast to object and do
31
+ # the reduction pointwise
32
+ alt = ser.astype(object)
33
+
34
+ exp_op = getattr(alt, op_name)
35
+ if op_name == "count":
36
+ result = res_op()
37
+ expected = exp_op()
38
+ else:
39
+ result = res_op(skipna=skipna)
40
+ expected = exp_op(skipna=skipna)
41
+ tm.assert_almost_equal(result, expected)
42
+
43
+ def _get_expected_reduction_dtype(self, arr, op_name: str, skipna: bool):
44
+ # Find the expected dtype when the given reduction is done on a DataFrame
45
+ # column with this array. The default assumes float64-like behavior,
46
+ # i.e. retains the dtype.
47
+ return arr.dtype
48
+
49
+ # We anticipate that authors should not need to override check_reduce_frame,
50
+ # but should be able to do any necessary overriding in
51
+ # _get_expected_reduction_dtype. If you have a use case where this
52
+ # does not hold, please let us know at github.com/pandas-dev/pandas/issues.
53
+ @final
54
+ def check_reduce_frame(self, ser: pd.Series, op_name: str, skipna: bool):
55
+ # Check that the 2D reduction done in a DataFrame reduction "looks like"
56
+ # a wrapped version of the 1D reduction done by Series.
57
+ arr = ser.array
58
+ df = pd.DataFrame({"a": arr})
59
+
60
+ kwargs = {"ddof": 1} if op_name in ["var", "std"] else {}
61
+
62
+ cmp_dtype = self._get_expected_reduction_dtype(arr, op_name, skipna)
63
+
64
+ # The DataFrame method just calls arr._reduce with keepdims=True,
65
+ # so this first check is perfunctory.
66
+ result1 = arr._reduce(op_name, skipna=skipna, keepdims=True, **kwargs)
67
+ result2 = getattr(df, op_name)(skipna=skipna, **kwargs).array
68
+ tm.assert_extension_array_equal(result1, result2)
69
+
70
+ # Check that the 2D reduction looks like a wrapped version of the
71
+ # 1D reduction
72
+ if not skipna and ser.isna().any():
73
+ expected = pd.array([pd.NA], dtype=cmp_dtype)
74
+ else:
75
+ exp_value = getattr(ser.dropna(), op_name)()
76
+ expected = pd.array([exp_value], dtype=cmp_dtype)
77
+
78
+ tm.assert_extension_array_equal(result1, expected)
79
+
80
+ @pytest.mark.parametrize("skipna", [True, False])
81
+ def test_reduce_series_boolean(self, data, all_boolean_reductions, skipna):
82
+ op_name = all_boolean_reductions
83
+ ser = pd.Series(data)
84
+
85
+ if not self._supports_reduction(ser, op_name):
86
+ # TODO: the message being checked here isn't actually checking anything
87
+ msg = (
88
+ "[Cc]annot perform|Categorical is not ordered for operation|"
89
+ "does not support reduction|"
90
+ )
91
+
92
+ with pytest.raises(TypeError, match=msg):
93
+ getattr(ser, op_name)(skipna=skipna)
94
+
95
+ else:
96
+ self.check_reduce(ser, op_name, skipna)
97
+
98
+ @pytest.mark.filterwarnings("ignore::RuntimeWarning")
99
+ @pytest.mark.parametrize("skipna", [True, False])
100
+ def test_reduce_series_numeric(self, data, all_numeric_reductions, skipna):
101
+ op_name = all_numeric_reductions
102
+ ser = pd.Series(data)
103
+
104
+ if not self._supports_reduction(ser, op_name):
105
+ # TODO: the message being checked here isn't actually checking anything
106
+ msg = (
107
+ "[Cc]annot perform|Categorical is not ordered for operation|"
108
+ "does not support reduction|"
109
+ )
110
+
111
+ with pytest.raises(TypeError, match=msg):
112
+ getattr(ser, op_name)(skipna=skipna)
113
+
114
+ else:
115
+ # min/max with empty produce numpy warnings
116
+ self.check_reduce(ser, op_name, skipna)
117
+
118
+ @pytest.mark.parametrize("skipna", [True, False])
119
+ def test_reduce_frame(self, data, all_numeric_reductions, skipna):
120
+ op_name = all_numeric_reductions
121
+ ser = pd.Series(data)
122
+ if not is_numeric_dtype(ser.dtype):
123
+ pytest.skip(f"{ser.dtype} is not numeric dtype")
124
+
125
+ if op_name in ["count", "kurt", "sem"]:
126
+ pytest.skip(f"{op_name} not an array method")
127
+
128
+ if not self._supports_reduction(ser, op_name):
129
+ pytest.skip(f"Reduction {op_name} not supported for this dtype")
130
+
131
+ self.check_reduce_frame(ser, op_name, skipna)
132
+
133
+
134
+ # TODO(3.0): remove BaseNoReduceTests, BaseNumericReduceTests,
135
+ # BaseBooleanReduceTests
136
+ class BaseNoReduceTests(BaseReduceTests):
137
+ """we don't define any reductions"""
138
+
139
+
140
+ class BaseNumericReduceTests(BaseReduceTests):
141
+ # For backward compatibility only, this only runs the numeric reductions
142
+ def _supports_reduction(self, ser: pd.Series, op_name: str) -> bool:
143
+ if op_name in ["any", "all"]:
144
+ pytest.skip("These are tested in BaseBooleanReduceTests")
145
+ return True
146
+
147
+
148
+ class BaseBooleanReduceTests(BaseReduceTests):
149
+ # For backward compatibility only, this only runs the numeric reductions
150
+ def _supports_reduction(self, ser: pd.Series, op_name: str) -> bool:
151
+ if op_name not in ["any", "all"]:
152
+ pytest.skip("These are tested in BaseNumericReduceTests")
153
+ return True
venv/lib/python3.10/site-packages/pandas/tests/extension/base/setitem.py ADDED
@@ -0,0 +1,451 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ class BaseSetitemTests:
9
+ @pytest.fixture(
10
+ params=[
11
+ lambda x: x.index,
12
+ lambda x: list(x.index),
13
+ lambda x: slice(None),
14
+ lambda x: slice(0, len(x)),
15
+ lambda x: range(len(x)),
16
+ lambda x: list(range(len(x))),
17
+ lambda x: np.ones(len(x), dtype=bool),
18
+ ],
19
+ ids=[
20
+ "index",
21
+ "list[index]",
22
+ "null_slice",
23
+ "full_slice",
24
+ "range",
25
+ "list(range)",
26
+ "mask",
27
+ ],
28
+ )
29
+ def full_indexer(self, request):
30
+ """
31
+ Fixture for an indexer to pass to obj.loc to get/set the full length of the
32
+ object.
33
+
34
+ In some cases, assumes that obj.index is the default RangeIndex.
35
+ """
36
+ return request.param
37
+
38
+ @pytest.fixture(autouse=True)
39
+ def skip_if_immutable(self, dtype, request):
40
+ if dtype._is_immutable:
41
+ node = request.node
42
+ if node.name.split("[")[0] == "test_is_immutable":
43
+ # This fixture is auto-used, but we want to not-skip
44
+ # test_is_immutable.
45
+ return
46
+
47
+ # When BaseSetitemTests is mixed into ExtensionTests, we only
48
+ # want this fixture to operate on the tests defined in this
49
+ # class/file.
50
+ defined_in = node.function.__qualname__.split(".")[0]
51
+ if defined_in == "BaseSetitemTests":
52
+ pytest.skip("__setitem__ test not applicable with immutable dtype")
53
+
54
+ def test_is_immutable(self, data):
55
+ if data.dtype._is_immutable:
56
+ with pytest.raises(TypeError):
57
+ data[0] = data[0]
58
+ else:
59
+ data[0] = data[1]
60
+ assert data[0] == data[1]
61
+
62
+ def test_setitem_scalar_series(self, data, box_in_series):
63
+ if box_in_series:
64
+ data = pd.Series(data)
65
+ data[0] = data[1]
66
+ assert data[0] == data[1]
67
+
68
+ def test_setitem_sequence(self, data, box_in_series):
69
+ if box_in_series:
70
+ data = pd.Series(data)
71
+ original = data.copy()
72
+
73
+ data[[0, 1]] = [data[1], data[0]]
74
+ assert data[0] == original[1]
75
+ assert data[1] == original[0]
76
+
77
+ def test_setitem_sequence_mismatched_length_raises(self, data, as_array):
78
+ ser = pd.Series(data)
79
+ original = ser.copy()
80
+ value = [data[0]]
81
+ if as_array:
82
+ value = data._from_sequence(value, dtype=data.dtype)
83
+
84
+ xpr = "cannot set using a {} indexer with a different length"
85
+ with pytest.raises(ValueError, match=xpr.format("list-like")):
86
+ ser[[0, 1]] = value
87
+ # Ensure no modifications made before the exception
88
+ tm.assert_series_equal(ser, original)
89
+
90
+ with pytest.raises(ValueError, match=xpr.format("slice")):
91
+ ser[slice(3)] = value
92
+ tm.assert_series_equal(ser, original)
93
+
94
+ def test_setitem_empty_indexer(self, data, box_in_series):
95
+ if box_in_series:
96
+ data = pd.Series(data)
97
+ original = data.copy()
98
+ data[np.array([], dtype=int)] = []
99
+ tm.assert_equal(data, original)
100
+
101
+ def test_setitem_sequence_broadcasts(self, data, box_in_series):
102
+ if box_in_series:
103
+ data = pd.Series(data)
104
+ data[[0, 1]] = data[2]
105
+ assert data[0] == data[2]
106
+ assert data[1] == data[2]
107
+
108
+ @pytest.mark.parametrize("setter", ["loc", "iloc"])
109
+ def test_setitem_scalar(self, data, setter):
110
+ arr = pd.Series(data)
111
+ setter = getattr(arr, setter)
112
+ setter[0] = data[1]
113
+ assert arr[0] == data[1]
114
+
115
+ def test_setitem_loc_scalar_mixed(self, data):
116
+ df = pd.DataFrame({"A": np.arange(len(data)), "B": data})
117
+ df.loc[0, "B"] = data[1]
118
+ assert df.loc[0, "B"] == data[1]
119
+
120
+ def test_setitem_loc_scalar_single(self, data):
121
+ df = pd.DataFrame({"B": data})
122
+ df.loc[10, "B"] = data[1]
123
+ assert df.loc[10, "B"] == data[1]
124
+
125
+ def test_setitem_loc_scalar_multiple_homogoneous(self, data):
126
+ df = pd.DataFrame({"A": data, "B": data})
127
+ df.loc[10, "B"] = data[1]
128
+ assert df.loc[10, "B"] == data[1]
129
+
130
+ def test_setitem_iloc_scalar_mixed(self, data):
131
+ df = pd.DataFrame({"A": np.arange(len(data)), "B": data})
132
+ df.iloc[0, 1] = data[1]
133
+ assert df.loc[0, "B"] == data[1]
134
+
135
+ def test_setitem_iloc_scalar_single(self, data):
136
+ df = pd.DataFrame({"B": data})
137
+ df.iloc[10, 0] = data[1]
138
+ assert df.loc[10, "B"] == data[1]
139
+
140
+ def test_setitem_iloc_scalar_multiple_homogoneous(self, data):
141
+ df = pd.DataFrame({"A": data, "B": data})
142
+ df.iloc[10, 1] = data[1]
143
+ assert df.loc[10, "B"] == data[1]
144
+
145
+ @pytest.mark.parametrize(
146
+ "mask",
147
+ [
148
+ np.array([True, True, True, False, False]),
149
+ pd.array([True, True, True, False, False], dtype="boolean"),
150
+ pd.array([True, True, True, pd.NA, pd.NA], dtype="boolean"),
151
+ ],
152
+ ids=["numpy-array", "boolean-array", "boolean-array-na"],
153
+ )
154
+ def test_setitem_mask(self, data, mask, box_in_series):
155
+ arr = data[:5].copy()
156
+ expected = arr.take([0, 0, 0, 3, 4])
157
+ if box_in_series:
158
+ arr = pd.Series(arr)
159
+ expected = pd.Series(expected)
160
+ arr[mask] = data[0]
161
+ tm.assert_equal(expected, arr)
162
+
163
+ def test_setitem_mask_raises(self, data, box_in_series):
164
+ # wrong length
165
+ mask = np.array([True, False])
166
+
167
+ if box_in_series:
168
+ data = pd.Series(data)
169
+
170
+ with pytest.raises(IndexError, match="wrong length"):
171
+ data[mask] = data[0]
172
+
173
+ mask = pd.array(mask, dtype="boolean")
174
+ with pytest.raises(IndexError, match="wrong length"):
175
+ data[mask] = data[0]
176
+
177
+ def test_setitem_mask_boolean_array_with_na(self, data, box_in_series):
178
+ mask = pd.array(np.zeros(data.shape, dtype="bool"), dtype="boolean")
179
+ mask[:3] = True
180
+ mask[3:5] = pd.NA
181
+
182
+ if box_in_series:
183
+ data = pd.Series(data)
184
+
185
+ data[mask] = data[0]
186
+
187
+ assert (data[:3] == data[0]).all()
188
+
189
+ @pytest.mark.parametrize(
190
+ "idx",
191
+ [[0, 1, 2], pd.array([0, 1, 2], dtype="Int64"), np.array([0, 1, 2])],
192
+ ids=["list", "integer-array", "numpy-array"],
193
+ )
194
+ def test_setitem_integer_array(self, data, idx, box_in_series):
195
+ arr = data[:5].copy()
196
+ expected = data.take([0, 0, 0, 3, 4])
197
+
198
+ if box_in_series:
199
+ arr = pd.Series(arr)
200
+ expected = pd.Series(expected)
201
+
202
+ arr[idx] = arr[0]
203
+ tm.assert_equal(arr, expected)
204
+
205
+ @pytest.mark.parametrize(
206
+ "idx, box_in_series",
207
+ [
208
+ ([0, 1, 2, pd.NA], False),
209
+ pytest.param(
210
+ [0, 1, 2, pd.NA], True, marks=pytest.mark.xfail(reason="GH-31948")
211
+ ),
212
+ (pd.array([0, 1, 2, pd.NA], dtype="Int64"), False),
213
+ (pd.array([0, 1, 2, pd.NA], dtype="Int64"), False),
214
+ ],
215
+ ids=["list-False", "list-True", "integer-array-False", "integer-array-True"],
216
+ )
217
+ def test_setitem_integer_with_missing_raises(self, data, idx, box_in_series):
218
+ arr = data.copy()
219
+
220
+ # TODO(xfail) this raises KeyError about labels not found (it tries label-based)
221
+ # for list of labels with Series
222
+ if box_in_series:
223
+ arr = pd.Series(data, index=[chr(100 + i) for i in range(len(data))])
224
+
225
+ msg = "Cannot index with an integer indexer containing NA values"
226
+ with pytest.raises(ValueError, match=msg):
227
+ arr[idx] = arr[0]
228
+
229
+ @pytest.mark.parametrize("as_callable", [True, False])
230
+ @pytest.mark.parametrize("setter", ["loc", None])
231
+ def test_setitem_mask_aligned(self, data, as_callable, setter):
232
+ ser = pd.Series(data)
233
+ mask = np.zeros(len(data), dtype=bool)
234
+ mask[:2] = True
235
+
236
+ if as_callable:
237
+ mask2 = lambda x: mask
238
+ else:
239
+ mask2 = mask
240
+
241
+ if setter:
242
+ # loc
243
+ target = getattr(ser, setter)
244
+ else:
245
+ # Series.__setitem__
246
+ target = ser
247
+
248
+ target[mask2] = data[5:7]
249
+
250
+ ser[mask2] = data[5:7]
251
+ assert ser[0] == data[5]
252
+ assert ser[1] == data[6]
253
+
254
+ @pytest.mark.parametrize("setter", ["loc", None])
255
+ def test_setitem_mask_broadcast(self, data, setter):
256
+ ser = pd.Series(data)
257
+ mask = np.zeros(len(data), dtype=bool)
258
+ mask[:2] = True
259
+
260
+ if setter: # loc
261
+ target = getattr(ser, setter)
262
+ else: # __setitem__
263
+ target = ser
264
+
265
+ target[mask] = data[10]
266
+ assert ser[0] == data[10]
267
+ assert ser[1] == data[10]
268
+
269
+ def test_setitem_expand_columns(self, data):
270
+ df = pd.DataFrame({"A": data})
271
+ result = df.copy()
272
+ result["B"] = 1
273
+ expected = pd.DataFrame({"A": data, "B": [1] * len(data)})
274
+ tm.assert_frame_equal(result, expected)
275
+
276
+ result = df.copy()
277
+ result.loc[:, "B"] = 1
278
+ tm.assert_frame_equal(result, expected)
279
+
280
+ # overwrite with new type
281
+ result["B"] = data
282
+ expected = pd.DataFrame({"A": data, "B": data})
283
+ tm.assert_frame_equal(result, expected)
284
+
285
+ def test_setitem_expand_with_extension(self, data):
286
+ df = pd.DataFrame({"A": [1] * len(data)})
287
+ result = df.copy()
288
+ result["B"] = data
289
+ expected = pd.DataFrame({"A": [1] * len(data), "B": data})
290
+ tm.assert_frame_equal(result, expected)
291
+
292
+ result = df.copy()
293
+ result.loc[:, "B"] = data
294
+ tm.assert_frame_equal(result, expected)
295
+
296
+ def test_setitem_frame_invalid_length(self, data):
297
+ df = pd.DataFrame({"A": [1] * len(data)})
298
+ xpr = (
299
+ rf"Length of values \({len(data[:5])}\) "
300
+ rf"does not match length of index \({len(df)}\)"
301
+ )
302
+ with pytest.raises(ValueError, match=xpr):
303
+ df["B"] = data[:5]
304
+
305
+ def test_setitem_tuple_index(self, data):
306
+ ser = pd.Series(data[:2], index=[(0, 0), (0, 1)])
307
+ expected = pd.Series(data.take([1, 1]), index=ser.index)
308
+ ser[(0, 0)] = data[1]
309
+ tm.assert_series_equal(ser, expected)
310
+
311
+ def test_setitem_slice(self, data, box_in_series):
312
+ arr = data[:5].copy()
313
+ expected = data.take([0, 0, 0, 3, 4])
314
+ if box_in_series:
315
+ arr = pd.Series(arr)
316
+ expected = pd.Series(expected)
317
+
318
+ arr[:3] = data[0]
319
+ tm.assert_equal(arr, expected)
320
+
321
+ def test_setitem_loc_iloc_slice(self, data):
322
+ arr = data[:5].copy()
323
+ s = pd.Series(arr, index=["a", "b", "c", "d", "e"])
324
+ expected = pd.Series(data.take([0, 0, 0, 3, 4]), index=s.index)
325
+
326
+ result = s.copy()
327
+ result.iloc[:3] = data[0]
328
+ tm.assert_equal(result, expected)
329
+
330
+ result = s.copy()
331
+ result.loc[:"c"] = data[0]
332
+ tm.assert_equal(result, expected)
333
+
334
+ def test_setitem_slice_mismatch_length_raises(self, data):
335
+ arr = data[:5]
336
+ with pytest.raises(ValueError):
337
+ arr[:1] = arr[:2]
338
+
339
+ def test_setitem_slice_array(self, data):
340
+ arr = data[:5].copy()
341
+ arr[:5] = data[-5:]
342
+ tm.assert_extension_array_equal(arr, data[-5:])
343
+
344
+ def test_setitem_scalar_key_sequence_raise(self, data):
345
+ arr = data[:5].copy()
346
+ with pytest.raises(ValueError):
347
+ arr[0] = arr[[0, 1]]
348
+
349
+ def test_setitem_preserves_views(self, data):
350
+ # GH#28150 setitem shouldn't swap the underlying data
351
+ view1 = data.view()
352
+ view2 = data[:]
353
+
354
+ data[0] = data[1]
355
+ assert view1[0] == data[1]
356
+ assert view2[0] == data[1]
357
+
358
+ def test_setitem_with_expansion_dataframe_column(self, data, full_indexer):
359
+ # https://github.com/pandas-dev/pandas/issues/32395
360
+ df = expected = pd.DataFrame({0: pd.Series(data)})
361
+ result = pd.DataFrame(index=df.index)
362
+
363
+ key = full_indexer(df)
364
+ result.loc[key, 0] = df[0]
365
+
366
+ tm.assert_frame_equal(result, expected)
367
+
368
+ def test_setitem_with_expansion_row(self, data, na_value):
369
+ df = pd.DataFrame({"data": data[:1]})
370
+
371
+ df.loc[1, "data"] = data[1]
372
+ expected = pd.DataFrame({"data": data[:2]})
373
+ tm.assert_frame_equal(df, expected)
374
+
375
+ # https://github.com/pandas-dev/pandas/issues/47284
376
+ df.loc[2, "data"] = na_value
377
+ expected = pd.DataFrame(
378
+ {"data": pd.Series([data[0], data[1], na_value], dtype=data.dtype)}
379
+ )
380
+ tm.assert_frame_equal(df, expected)
381
+
382
+ def test_setitem_series(self, data, full_indexer):
383
+ # https://github.com/pandas-dev/pandas/issues/32395
384
+ ser = pd.Series(data, name="data")
385
+ result = pd.Series(index=ser.index, dtype=object, name="data")
386
+
387
+ # because result has object dtype, the attempt to do setting inplace
388
+ # is successful, and object dtype is retained
389
+ key = full_indexer(ser)
390
+ result.loc[key] = ser
391
+
392
+ expected = pd.Series(
393
+ data.astype(object), index=ser.index, name="data", dtype=object
394
+ )
395
+ tm.assert_series_equal(result, expected)
396
+
397
+ def test_setitem_frame_2d_values(self, data):
398
+ # GH#44514
399
+ df = pd.DataFrame({"A": data})
400
+
401
+ # Avoiding using_array_manager fixture
402
+ # https://github.com/pandas-dev/pandas/pull/44514#discussion_r754002410
403
+ using_array_manager = isinstance(df._mgr, pd.core.internals.ArrayManager)
404
+ using_copy_on_write = pd.options.mode.copy_on_write
405
+
406
+ blk_data = df._mgr.arrays[0]
407
+
408
+ orig = df.copy()
409
+
410
+ df.iloc[:] = df.copy()
411
+ tm.assert_frame_equal(df, orig)
412
+
413
+ df.iloc[:-1] = df.iloc[:-1].copy()
414
+ tm.assert_frame_equal(df, orig)
415
+
416
+ df.iloc[:] = df.values
417
+ tm.assert_frame_equal(df, orig)
418
+ if not using_array_manager and not using_copy_on_write:
419
+ # GH#33457 Check that this setting occurred in-place
420
+ # FIXME(ArrayManager): this should work there too
421
+ assert df._mgr.arrays[0] is blk_data
422
+
423
+ df.iloc[:-1] = df.values[:-1]
424
+ tm.assert_frame_equal(df, orig)
425
+
426
+ def test_delitem_series(self, data):
427
+ # GH#40763
428
+ ser = pd.Series(data, name="data")
429
+
430
+ taker = np.arange(len(ser))
431
+ taker = np.delete(taker, 1)
432
+
433
+ expected = ser[taker]
434
+ del ser[1]
435
+ tm.assert_series_equal(ser, expected)
436
+
437
+ def test_setitem_invalid(self, data, invalid_scalar):
438
+ msg = "" # messages vary by subclass, so we do not test it
439
+ with pytest.raises((ValueError, TypeError), match=msg):
440
+ data[0] = invalid_scalar
441
+
442
+ with pytest.raises((ValueError, TypeError), match=msg):
443
+ data[:] = invalid_scalar
444
+
445
+ def test_setitem_2d_values(self, data):
446
+ # GH50085
447
+ original = data.copy()
448
+ df = pd.DataFrame({"a": data, "b": data})
449
+ df.loc[[0, 1], :] = df.loc[[1, 0], :].values
450
+ assert (df.loc[0, :] == original[1]).all()
451
+ assert (df.loc[1, :] == original[0]).all()
venv/lib/python3.10/site-packages/pandas/tests/extension/conftest.py ADDED
@@ -0,0 +1,230 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import operator
2
+
3
+ import pytest
4
+
5
+ from pandas._config.config import _get_option
6
+
7
+ from pandas import (
8
+ Series,
9
+ options,
10
+ )
11
+
12
+
13
+ @pytest.fixture
14
+ def dtype():
15
+ """A fixture providing the ExtensionDtype to validate."""
16
+ raise NotImplementedError
17
+
18
+
19
+ @pytest.fixture
20
+ def data():
21
+ """
22
+ Length-100 array for this type.
23
+
24
+ * data[0] and data[1] should both be non missing
25
+ * data[0] and data[1] should not be equal
26
+ """
27
+ raise NotImplementedError
28
+
29
+
30
+ @pytest.fixture
31
+ def data_for_twos(dtype):
32
+ """
33
+ Length-100 array in which all the elements are two.
34
+
35
+ Call pytest.skip in your fixture if the dtype does not support divmod.
36
+ """
37
+ if not (dtype._is_numeric or dtype.kind == "m"):
38
+ # Object-dtypes may want to allow this, but for the most part
39
+ # only numeric and timedelta-like dtypes will need to implement this.
40
+ pytest.skip(f"{dtype} is not a numeric dtype")
41
+
42
+ raise NotImplementedError
43
+
44
+
45
+ @pytest.fixture
46
+ def data_missing():
47
+ """Length-2 array with [NA, Valid]"""
48
+ raise NotImplementedError
49
+
50
+
51
+ @pytest.fixture(params=["data", "data_missing"])
52
+ def all_data(request, data, data_missing):
53
+ """Parametrized fixture giving 'data' and 'data_missing'"""
54
+ if request.param == "data":
55
+ return data
56
+ elif request.param == "data_missing":
57
+ return data_missing
58
+
59
+
60
+ @pytest.fixture
61
+ def data_repeated(data):
62
+ """
63
+ Generate many datasets.
64
+
65
+ Parameters
66
+ ----------
67
+ data : fixture implementing `data`
68
+
69
+ Returns
70
+ -------
71
+ Callable[[int], Generator]:
72
+ A callable that takes a `count` argument and
73
+ returns a generator yielding `count` datasets.
74
+ """
75
+
76
+ def gen(count):
77
+ for _ in range(count):
78
+ yield data
79
+
80
+ return gen
81
+
82
+
83
+ @pytest.fixture
84
+ def data_for_sorting():
85
+ """
86
+ Length-3 array with a known sort order.
87
+
88
+ This should be three items [B, C, A] with
89
+ A < B < C
90
+
91
+ For boolean dtypes (for which there are only 2 values available),
92
+ set B=C=True
93
+ """
94
+ raise NotImplementedError
95
+
96
+
97
+ @pytest.fixture
98
+ def data_missing_for_sorting():
99
+ """
100
+ Length-3 array with a known sort order.
101
+
102
+ This should be three items [B, NA, A] with
103
+ A < B and NA missing.
104
+ """
105
+ raise NotImplementedError
106
+
107
+
108
+ @pytest.fixture
109
+ def na_cmp():
110
+ """
111
+ Binary operator for comparing NA values.
112
+
113
+ Should return a function of two arguments that returns
114
+ True if both arguments are (scalar) NA for your type.
115
+
116
+ By default, uses ``operator.is_``
117
+ """
118
+ return operator.is_
119
+
120
+
121
+ @pytest.fixture
122
+ def na_value(dtype):
123
+ """
124
+ The scalar missing value for this type. Default dtype.na_value.
125
+
126
+ TODO: can be removed in 3.x (see https://github.com/pandas-dev/pandas/pull/54930)
127
+ """
128
+ return dtype.na_value
129
+
130
+
131
+ @pytest.fixture
132
+ def data_for_grouping():
133
+ """
134
+ Data for factorization, grouping, and unique tests.
135
+
136
+ Expected to be like [B, B, NA, NA, A, A, B, C]
137
+
138
+ Where A < B < C and NA is missing.
139
+
140
+ If a dtype has _is_boolean = True, i.e. only 2 unique non-NA entries,
141
+ then set C=B.
142
+ """
143
+ raise NotImplementedError
144
+
145
+
146
+ @pytest.fixture(params=[True, False])
147
+ def box_in_series(request):
148
+ """Whether to box the data in a Series"""
149
+ return request.param
150
+
151
+
152
+ @pytest.fixture(
153
+ params=[
154
+ lambda x: 1,
155
+ lambda x: [1] * len(x),
156
+ lambda x: Series([1] * len(x)),
157
+ lambda x: x,
158
+ ],
159
+ ids=["scalar", "list", "series", "object"],
160
+ )
161
+ def groupby_apply_op(request):
162
+ """
163
+ Functions to test groupby.apply().
164
+ """
165
+ return request.param
166
+
167
+
168
+ @pytest.fixture(params=[True, False])
169
+ def as_frame(request):
170
+ """
171
+ Boolean fixture to support Series and Series.to_frame() comparison testing.
172
+ """
173
+ return request.param
174
+
175
+
176
+ @pytest.fixture(params=[True, False])
177
+ def as_series(request):
178
+ """
179
+ Boolean fixture to support arr and Series(arr) comparison testing.
180
+ """
181
+ return request.param
182
+
183
+
184
+ @pytest.fixture(params=[True, False])
185
+ def use_numpy(request):
186
+ """
187
+ Boolean fixture to support comparison testing of ExtensionDtype array
188
+ and numpy array.
189
+ """
190
+ return request.param
191
+
192
+
193
+ @pytest.fixture(params=["ffill", "bfill"])
194
+ def fillna_method(request):
195
+ """
196
+ Parametrized fixture giving method parameters 'ffill' and 'bfill' for
197
+ Series.fillna(method=<method>) testing.
198
+ """
199
+ return request.param
200
+
201
+
202
+ @pytest.fixture(params=[True, False])
203
+ def as_array(request):
204
+ """
205
+ Boolean fixture to support ExtensionDtype _from_sequence method testing.
206
+ """
207
+ return request.param
208
+
209
+
210
+ @pytest.fixture
211
+ def invalid_scalar(data):
212
+ """
213
+ A scalar that *cannot* be held by this ExtensionArray.
214
+
215
+ The default should work for most subclasses, but is not guaranteed.
216
+
217
+ If the array can hold any item (i.e. object dtype), then use pytest.skip.
218
+ """
219
+ return object.__new__(object)
220
+
221
+
222
+ @pytest.fixture
223
+ def using_copy_on_write() -> bool:
224
+ """
225
+ Fixture to check if Copy-on-Write is enabled.
226
+ """
227
+ return (
228
+ options.mode.copy_on_write is True
229
+ and _get_option("mode.data_manager", silent=True) == "block"
230
+ )
venv/lib/python3.10/site-packages/pandas/tests/extension/date/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from pandas.tests.extension.date.array import (
2
+ DateArray,
3
+ DateDtype,
4
+ )
5
+
6
+ __all__ = ["DateArray", "DateDtype"]
venv/lib/python3.10/site-packages/pandas/tests/extension/date/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (311 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/extension/date/__pycache__/array.cpython-310.pyc ADDED
Binary file (6.21 kB). View file