applied-ai-018 commited on
Commit
f9aa8a3
·
verified ·
1 Parent(s): a16a983

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.pyi +5 -0
  2. llmeval-env/lib/python3.10/site-packages/pandas/compat/__init__.py +197 -0
  3. llmeval-env/lib/python3.10/site-packages/pandas/compat/_constants.py +30 -0
  4. llmeval-env/lib/python3.10/site-packages/pandas/compat/_optional.py +168 -0
  5. llmeval-env/lib/python3.10/site-packages/pandas/compat/compressors.py +77 -0
  6. llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py +53 -0
  7. llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc +0 -0
  8. llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/function.py +418 -0
  9. llmeval-env/lib/python3.10/site-packages/pandas/compat/pickle_compat.py +262 -0
  10. llmeval-env/lib/python3.10/site-packages/pandas/compat/pyarrow.py +27 -0
  11. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/__init__.py +0 -0
  12. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__init__.py +0 -0
  13. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/__init__.cpython-310.pyc +0 -0
  14. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_cat_accessor.cpython-310.pyc +0 -0
  15. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_dt_accessor.cpython-310.pyc +0 -0
  16. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_list_accessor.cpython-310.pyc +0 -0
  17. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_sparse_accessor.cpython-310.pyc +0 -0
  18. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_str_accessor.cpython-310.pyc +0 -0
  19. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_struct_accessor.cpython-310.pyc +0 -0
  20. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_cat_accessor.py +258 -0
  21. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_dt_accessor.py +843 -0
  22. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_list_accessor.py +129 -0
  23. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_sparse_accessor.py +9 -0
  24. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_str_accessor.py +25 -0
  25. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_struct_accessor.py +196 -0
  26. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py +7 -0
  27. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py +84 -0
  28. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py +205 -0
  29. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py +683 -0
  30. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_clip.py +146 -0
  31. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py +91 -0
  32. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py +203 -0
  33. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop.py +99 -0
  34. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py +7 -0
  35. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_equals.py +145 -0
  36. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py +175 -0
  37. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py +1155 -0
  38. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_get_numeric_data.py +38 -0
  39. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py +56 -0
  40. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_isin.py +252 -0
  41. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py +35 -0
  42. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_reindex_like.py +41 -0
  43. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_rename_axis.py +47 -0
  44. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_replace.py +813 -0
  45. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_set_name.py +21 -0
  46. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_size.py +22 -0
  47. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_to_frame.py +63 -0
  48. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_unstack.py +169 -0
  49. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_values.py +29 -0
  50. llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_view.py +61 -0
llmeval-env/lib/python3.10/site-packages/pandas/_libs/ops_dispatch.pyi ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ def maybe_dispatch_ufunc_to_dunder_op(
4
+ self, ufunc: np.ufunc, method: str, *inputs, **kwargs
5
+ ): ...
llmeval-env/lib/python3.10/site-packages/pandas/compat/__init__.py ADDED
@@ -0,0 +1,197 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ compat
3
+ ======
4
+
5
+ Cross-compatible functions for different versions of Python.
6
+
7
+ Other items:
8
+ * platform checker
9
+ """
10
+ from __future__ import annotations
11
+
12
+ import os
13
+ import platform
14
+ import sys
15
+ from typing import TYPE_CHECKING
16
+
17
+ from pandas.compat._constants import (
18
+ IS64,
19
+ ISMUSL,
20
+ PY310,
21
+ PY311,
22
+ PY312,
23
+ PYPY,
24
+ )
25
+ import pandas.compat.compressors
26
+ from pandas.compat.numpy import is_numpy_dev
27
+ from pandas.compat.pyarrow import (
28
+ pa_version_under10p1,
29
+ pa_version_under11p0,
30
+ pa_version_under13p0,
31
+ pa_version_under14p0,
32
+ pa_version_under14p1,
33
+ pa_version_under16p0,
34
+ )
35
+
36
+ if TYPE_CHECKING:
37
+ from pandas._typing import F
38
+
39
+
40
+ def set_function_name(f: F, name: str, cls: type) -> F:
41
+ """
42
+ Bind the name/qualname attributes of the function.
43
+ """
44
+ f.__name__ = name
45
+ f.__qualname__ = f"{cls.__name__}.{name}"
46
+ f.__module__ = cls.__module__
47
+ return f
48
+
49
+
50
+ def is_platform_little_endian() -> bool:
51
+ """
52
+ Checking if the running platform is little endian.
53
+
54
+ Returns
55
+ -------
56
+ bool
57
+ True if the running platform is little endian.
58
+ """
59
+ return sys.byteorder == "little"
60
+
61
+
62
+ def is_platform_windows() -> bool:
63
+ """
64
+ Checking if the running platform is windows.
65
+
66
+ Returns
67
+ -------
68
+ bool
69
+ True if the running platform is windows.
70
+ """
71
+ return sys.platform in ["win32", "cygwin"]
72
+
73
+
74
+ def is_platform_linux() -> bool:
75
+ """
76
+ Checking if the running platform is linux.
77
+
78
+ Returns
79
+ -------
80
+ bool
81
+ True if the running platform is linux.
82
+ """
83
+ return sys.platform == "linux"
84
+
85
+
86
+ def is_platform_mac() -> bool:
87
+ """
88
+ Checking if the running platform is mac.
89
+
90
+ Returns
91
+ -------
92
+ bool
93
+ True if the running platform is mac.
94
+ """
95
+ return sys.platform == "darwin"
96
+
97
+
98
+ def is_platform_arm() -> bool:
99
+ """
100
+ Checking if the running platform use ARM architecture.
101
+
102
+ Returns
103
+ -------
104
+ bool
105
+ True if the running platform uses ARM architecture.
106
+ """
107
+ return platform.machine() in ("arm64", "aarch64") or platform.machine().startswith(
108
+ "armv"
109
+ )
110
+
111
+
112
+ def is_platform_power() -> bool:
113
+ """
114
+ Checking if the running platform use Power architecture.
115
+
116
+ Returns
117
+ -------
118
+ bool
119
+ True if the running platform uses ARM architecture.
120
+ """
121
+ return platform.machine() in ("ppc64", "ppc64le")
122
+
123
+
124
+ def is_ci_environment() -> bool:
125
+ """
126
+ Checking if running in a continuous integration environment by checking
127
+ the PANDAS_CI environment variable.
128
+
129
+ Returns
130
+ -------
131
+ bool
132
+ True if the running in a continuous integration environment.
133
+ """
134
+ return os.environ.get("PANDAS_CI", "0") == "1"
135
+
136
+
137
+ def get_lzma_file() -> type[pandas.compat.compressors.LZMAFile]:
138
+ """
139
+ Importing the `LZMAFile` class from the `lzma` module.
140
+
141
+ Returns
142
+ -------
143
+ class
144
+ The `LZMAFile` class from the `lzma` module.
145
+
146
+ Raises
147
+ ------
148
+ RuntimeError
149
+ If the `lzma` module was not imported correctly, or didn't exist.
150
+ """
151
+ if not pandas.compat.compressors.has_lzma:
152
+ raise RuntimeError(
153
+ "lzma module not available. "
154
+ "A Python re-install with the proper dependencies, "
155
+ "might be required to solve this issue."
156
+ )
157
+ return pandas.compat.compressors.LZMAFile
158
+
159
+
160
+ def get_bz2_file() -> type[pandas.compat.compressors.BZ2File]:
161
+ """
162
+ Importing the `BZ2File` class from the `bz2` module.
163
+
164
+ Returns
165
+ -------
166
+ class
167
+ The `BZ2File` class from the `bz2` module.
168
+
169
+ Raises
170
+ ------
171
+ RuntimeError
172
+ If the `bz2` module was not imported correctly, or didn't exist.
173
+ """
174
+ if not pandas.compat.compressors.has_bz2:
175
+ raise RuntimeError(
176
+ "bz2 module not available. "
177
+ "A Python re-install with the proper dependencies, "
178
+ "might be required to solve this issue."
179
+ )
180
+ return pandas.compat.compressors.BZ2File
181
+
182
+
183
+ __all__ = [
184
+ "is_numpy_dev",
185
+ "pa_version_under10p1",
186
+ "pa_version_under11p0",
187
+ "pa_version_under13p0",
188
+ "pa_version_under14p0",
189
+ "pa_version_under14p1",
190
+ "pa_version_under16p0",
191
+ "IS64",
192
+ "ISMUSL",
193
+ "PY310",
194
+ "PY311",
195
+ "PY312",
196
+ "PYPY",
197
+ ]
llmeval-env/lib/python3.10/site-packages/pandas/compat/_constants.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ _constants
3
+ ======
4
+
5
+ Constants relevant for the Python implementation.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import platform
11
+ import sys
12
+ import sysconfig
13
+
14
+ IS64 = sys.maxsize > 2**32
15
+
16
+ PY310 = sys.version_info >= (3, 10)
17
+ PY311 = sys.version_info >= (3, 11)
18
+ PY312 = sys.version_info >= (3, 12)
19
+ PYPY = platform.python_implementation() == "PyPy"
20
+ ISMUSL = "musl" in (sysconfig.get_config_var("HOST_GNU_TYPE") or "")
21
+ REF_COUNT = 2 if PY311 else 3
22
+
23
+ __all__ = [
24
+ "IS64",
25
+ "ISMUSL",
26
+ "PY310",
27
+ "PY311",
28
+ "PY312",
29
+ "PYPY",
30
+ ]
llmeval-env/lib/python3.10/site-packages/pandas/compat/_optional.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import importlib
4
+ import sys
5
+ from typing import TYPE_CHECKING
6
+ import warnings
7
+
8
+ from pandas.util._exceptions import find_stack_level
9
+
10
+ from pandas.util.version import Version
11
+
12
+ if TYPE_CHECKING:
13
+ import types
14
+
15
+ # Update install.rst & setup.cfg when updating versions!
16
+
17
+ VERSIONS = {
18
+ "adbc-driver-postgresql": "0.8.0",
19
+ "adbc-driver-sqlite": "0.8.0",
20
+ "bs4": "4.11.2",
21
+ "blosc": "1.21.3",
22
+ "bottleneck": "1.3.6",
23
+ "dataframe-api-compat": "0.1.7",
24
+ "fastparquet": "2022.12.0",
25
+ "fsspec": "2022.11.0",
26
+ "html5lib": "1.1",
27
+ "hypothesis": "6.46.1",
28
+ "gcsfs": "2022.11.0",
29
+ "jinja2": "3.1.2",
30
+ "lxml.etree": "4.9.2",
31
+ "matplotlib": "3.6.3",
32
+ "numba": "0.56.4",
33
+ "numexpr": "2.8.4",
34
+ "odfpy": "1.4.1",
35
+ "openpyxl": "3.1.0",
36
+ "pandas_gbq": "0.19.0",
37
+ "psycopg2": "2.9.6", # (dt dec pq3 ext lo64)
38
+ "pymysql": "1.0.2",
39
+ "pyarrow": "10.0.1",
40
+ "pyreadstat": "1.2.0",
41
+ "pytest": "7.3.2",
42
+ "python-calamine": "0.1.7",
43
+ "pyxlsb": "1.0.10",
44
+ "s3fs": "2022.11.0",
45
+ "scipy": "1.10.0",
46
+ "sqlalchemy": "2.0.0",
47
+ "tables": "3.8.0",
48
+ "tabulate": "0.9.0",
49
+ "xarray": "2022.12.0",
50
+ "xlrd": "2.0.1",
51
+ "xlsxwriter": "3.0.5",
52
+ "zstandard": "0.19.0",
53
+ "tzdata": "2022.7",
54
+ "qtpy": "2.3.0",
55
+ "pyqt5": "5.15.9",
56
+ }
57
+
58
+ # A mapping from import name to package name (on PyPI) for packages where
59
+ # these two names are different.
60
+
61
+ INSTALL_MAPPING = {
62
+ "bs4": "beautifulsoup4",
63
+ "bottleneck": "Bottleneck",
64
+ "jinja2": "Jinja2",
65
+ "lxml.etree": "lxml",
66
+ "odf": "odfpy",
67
+ "pandas_gbq": "pandas-gbq",
68
+ "python_calamine": "python-calamine",
69
+ "sqlalchemy": "SQLAlchemy",
70
+ "tables": "pytables",
71
+ }
72
+
73
+
74
+ def get_version(module: types.ModuleType) -> str:
75
+ version = getattr(module, "__version__", None)
76
+
77
+ if version is None:
78
+ raise ImportError(f"Can't determine version for {module.__name__}")
79
+ if module.__name__ == "psycopg2":
80
+ # psycopg2 appends " (dt dec pq3 ext lo64)" to it's version
81
+ version = version.split()[0]
82
+ return version
83
+
84
+
85
+ def import_optional_dependency(
86
+ name: str,
87
+ extra: str = "",
88
+ errors: str = "raise",
89
+ min_version: str | None = None,
90
+ ):
91
+ """
92
+ Import an optional dependency.
93
+
94
+ By default, if a dependency is missing an ImportError with a nice
95
+ message will be raised. If a dependency is present, but too old,
96
+ we raise.
97
+
98
+ Parameters
99
+ ----------
100
+ name : str
101
+ The module name.
102
+ extra : str
103
+ Additional text to include in the ImportError message.
104
+ errors : str {'raise', 'warn', 'ignore'}
105
+ What to do when a dependency is not found or its version is too old.
106
+
107
+ * raise : Raise an ImportError
108
+ * warn : Only applicable when a module's version is to old.
109
+ Warns that the version is too old and returns None
110
+ * ignore: If the module is not installed, return None, otherwise,
111
+ return the module, even if the version is too old.
112
+ It's expected that users validate the version locally when
113
+ using ``errors="ignore"`` (see. ``io/html.py``)
114
+ min_version : str, default None
115
+ Specify a minimum version that is different from the global pandas
116
+ minimum version required.
117
+ Returns
118
+ -------
119
+ maybe_module : Optional[ModuleType]
120
+ The imported module, when found and the version is correct.
121
+ None is returned when the package is not found and `errors`
122
+ is False, or when the package's version is too old and `errors`
123
+ is ``'warn'`` or ``'ignore'``.
124
+ """
125
+ assert errors in {"warn", "raise", "ignore"}
126
+
127
+ package_name = INSTALL_MAPPING.get(name)
128
+ install_name = package_name if package_name is not None else name
129
+
130
+ msg = (
131
+ f"Missing optional dependency '{install_name}'. {extra} "
132
+ f"Use pip or conda to install {install_name}."
133
+ )
134
+ try:
135
+ module = importlib.import_module(name)
136
+ except ImportError:
137
+ if errors == "raise":
138
+ raise ImportError(msg)
139
+ return None
140
+
141
+ # Handle submodules: if we have submodule, grab parent module from sys.modules
142
+ parent = name.split(".")[0]
143
+ if parent != name:
144
+ install_name = parent
145
+ module_to_get = sys.modules[install_name]
146
+ else:
147
+ module_to_get = module
148
+ minimum_version = min_version if min_version is not None else VERSIONS.get(parent)
149
+ if minimum_version:
150
+ version = get_version(module_to_get)
151
+ if version and Version(version) < Version(minimum_version):
152
+ msg = (
153
+ f"Pandas requires version '{minimum_version}' or newer of '{parent}' "
154
+ f"(version '{version}' currently installed)."
155
+ )
156
+ if errors == "warn":
157
+ warnings.warn(
158
+ msg,
159
+ UserWarning,
160
+ stacklevel=find_stack_level(),
161
+ )
162
+ return None
163
+ elif errors == "raise":
164
+ raise ImportError(msg)
165
+ else:
166
+ return None
167
+
168
+ return module
llmeval-env/lib/python3.10/site-packages/pandas/compat/compressors.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Patched ``BZ2File`` and ``LZMAFile`` to handle pickle protocol 5.
3
+ """
4
+
5
+ from __future__ import annotations
6
+
7
+ from pickle import PickleBuffer
8
+
9
+ from pandas.compat._constants import PY310
10
+
11
+ try:
12
+ import bz2
13
+
14
+ has_bz2 = True
15
+ except ImportError:
16
+ has_bz2 = False
17
+
18
+ try:
19
+ import lzma
20
+
21
+ has_lzma = True
22
+ except ImportError:
23
+ has_lzma = False
24
+
25
+
26
+ def flatten_buffer(
27
+ b: bytes | bytearray | memoryview | PickleBuffer,
28
+ ) -> bytes | bytearray | memoryview:
29
+ """
30
+ Return some 1-D `uint8` typed buffer.
31
+
32
+ Coerces anything that does not match that description to one that does
33
+ without copying if possible (otherwise will copy).
34
+ """
35
+
36
+ if isinstance(b, (bytes, bytearray)):
37
+ return b
38
+
39
+ if not isinstance(b, PickleBuffer):
40
+ b = PickleBuffer(b)
41
+
42
+ try:
43
+ # coerce to 1-D `uint8` C-contiguous `memoryview` zero-copy
44
+ return b.raw()
45
+ except BufferError:
46
+ # perform in-memory copy if buffer is not contiguous
47
+ return memoryview(b).tobytes("A")
48
+
49
+
50
+ if has_bz2:
51
+
52
+ class BZ2File(bz2.BZ2File):
53
+ if not PY310:
54
+
55
+ def write(self, b) -> int:
56
+ # Workaround issue where `bz2.BZ2File` expects `len`
57
+ # to return the number of bytes in `b` by converting
58
+ # `b` into something that meets that constraint with
59
+ # minimal copying.
60
+ #
61
+ # Note: This is fixed in Python 3.10.
62
+ return super().write(flatten_buffer(b))
63
+
64
+
65
+ if has_lzma:
66
+
67
+ class LZMAFile(lzma.LZMAFile):
68
+ if not PY310:
69
+
70
+ def write(self, b) -> int:
71
+ # Workaround issue where `lzma.LZMAFile` expects `len`
72
+ # to return the number of bytes in `b` by converting
73
+ # `b` into something that meets that constraint with
74
+ # minimal copying.
75
+ #
76
+ # Note: This is fixed in Python 3.10.
77
+ return super().write(flatten_buffer(b))
llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/__init__.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ support numpy compatibility across versions """
2
+ import warnings
3
+
4
+ import numpy as np
5
+
6
+ from pandas.util.version import Version
7
+
8
+ # numpy versioning
9
+ _np_version = np.__version__
10
+ _nlv = Version(_np_version)
11
+ np_version_lt1p23 = _nlv < Version("1.23")
12
+ np_version_gte1p24 = _nlv >= Version("1.24")
13
+ np_version_gte1p24p3 = _nlv >= Version("1.24.3")
14
+ np_version_gte1p25 = _nlv >= Version("1.25")
15
+ np_version_gt2 = _nlv >= Version("2.0.0.dev0")
16
+ is_numpy_dev = _nlv.dev is not None
17
+ _min_numpy_ver = "1.22.4"
18
+
19
+
20
+ if _nlv < Version(_min_numpy_ver):
21
+ raise ImportError(
22
+ f"this version of pandas is incompatible with numpy < {_min_numpy_ver}\n"
23
+ f"your numpy version is {_np_version}.\n"
24
+ f"Please upgrade numpy to >= {_min_numpy_ver} to use this pandas version"
25
+ )
26
+
27
+
28
+ np_long: type
29
+ np_ulong: type
30
+
31
+ if np_version_gt2:
32
+ try:
33
+ with warnings.catch_warnings():
34
+ warnings.filterwarnings(
35
+ "ignore",
36
+ r".*In the future `np\.long` will be defined as.*",
37
+ FutureWarning,
38
+ )
39
+ np_long = np.long # type: ignore[attr-defined]
40
+ np_ulong = np.ulong # type: ignore[attr-defined]
41
+ except AttributeError:
42
+ np_long = np.int_
43
+ np_ulong = np.uint
44
+ else:
45
+ np_long = np.int_
46
+ np_ulong = np.uint
47
+
48
+
49
+ __all__ = [
50
+ "np",
51
+ "_np_version",
52
+ "is_numpy_dev",
53
+ ]
llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (1.28 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/compat/numpy/function.py ADDED
@@ -0,0 +1,418 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For compatibility with numpy libraries, pandas functions or methods have to
3
+ accept '*args' and '**kwargs' parameters to accommodate numpy arguments that
4
+ are not actually used or respected in the pandas implementation.
5
+
6
+ To ensure that users do not abuse these parameters, validation is performed in
7
+ 'validators.py' to make sure that any extra parameters passed correspond ONLY
8
+ to those in the numpy signature. Part of that validation includes whether or
9
+ not the user attempted to pass in non-default values for these extraneous
10
+ parameters. As we want to discourage users from relying on these parameters
11
+ when calling the pandas implementation, we want them only to pass in the
12
+ default values for these parameters.
13
+
14
+ This module provides a set of commonly used default arguments for functions and
15
+ methods that are spread throughout the codebase. This module will make it
16
+ easier to adjust to future upstream changes in the analogous numpy signatures.
17
+ """
18
+ from __future__ import annotations
19
+
20
+ from typing import (
21
+ TYPE_CHECKING,
22
+ Any,
23
+ TypeVar,
24
+ cast,
25
+ overload,
26
+ )
27
+
28
+ import numpy as np
29
+ from numpy import ndarray
30
+
31
+ from pandas._libs.lib import (
32
+ is_bool,
33
+ is_integer,
34
+ )
35
+ from pandas.errors import UnsupportedFunctionCall
36
+ from pandas.util._validators import (
37
+ validate_args,
38
+ validate_args_and_kwargs,
39
+ validate_kwargs,
40
+ )
41
+
42
+ if TYPE_CHECKING:
43
+ from pandas._typing import (
44
+ Axis,
45
+ AxisInt,
46
+ )
47
+
48
+ AxisNoneT = TypeVar("AxisNoneT", Axis, None)
49
+
50
+
51
+ class CompatValidator:
52
+ def __init__(
53
+ self,
54
+ defaults,
55
+ fname=None,
56
+ method: str | None = None,
57
+ max_fname_arg_count=None,
58
+ ) -> None:
59
+ self.fname = fname
60
+ self.method = method
61
+ self.defaults = defaults
62
+ self.max_fname_arg_count = max_fname_arg_count
63
+
64
+ def __call__(
65
+ self,
66
+ args,
67
+ kwargs,
68
+ fname=None,
69
+ max_fname_arg_count=None,
70
+ method: str | None = None,
71
+ ) -> None:
72
+ if not args and not kwargs:
73
+ return None
74
+
75
+ fname = self.fname if fname is None else fname
76
+ max_fname_arg_count = (
77
+ self.max_fname_arg_count
78
+ if max_fname_arg_count is None
79
+ else max_fname_arg_count
80
+ )
81
+ method = self.method if method is None else method
82
+
83
+ if method == "args":
84
+ validate_args(fname, args, max_fname_arg_count, self.defaults)
85
+ elif method == "kwargs":
86
+ validate_kwargs(fname, kwargs, self.defaults)
87
+ elif method == "both":
88
+ validate_args_and_kwargs(
89
+ fname, args, kwargs, max_fname_arg_count, self.defaults
90
+ )
91
+ else:
92
+ raise ValueError(f"invalid validation method '{method}'")
93
+
94
+
95
+ ARGMINMAX_DEFAULTS = {"out": None}
96
+ validate_argmin = CompatValidator(
97
+ ARGMINMAX_DEFAULTS, fname="argmin", method="both", max_fname_arg_count=1
98
+ )
99
+ validate_argmax = CompatValidator(
100
+ ARGMINMAX_DEFAULTS, fname="argmax", method="both", max_fname_arg_count=1
101
+ )
102
+
103
+
104
+ def process_skipna(skipna: bool | ndarray | None, args) -> tuple[bool, Any]:
105
+ if isinstance(skipna, ndarray) or skipna is None:
106
+ args = (skipna,) + args
107
+ skipna = True
108
+
109
+ return skipna, args
110
+
111
+
112
+ def validate_argmin_with_skipna(skipna: bool | ndarray | None, args, kwargs) -> bool:
113
+ """
114
+ If 'Series.argmin' is called via the 'numpy' library, the third parameter
115
+ in its signature is 'out', which takes either an ndarray or 'None', so
116
+ check if the 'skipna' parameter is either an instance of ndarray or is
117
+ None, since 'skipna' itself should be a boolean
118
+ """
119
+ skipna, args = process_skipna(skipna, args)
120
+ validate_argmin(args, kwargs)
121
+ return skipna
122
+
123
+
124
+ def validate_argmax_with_skipna(skipna: bool | ndarray | None, args, kwargs) -> bool:
125
+ """
126
+ If 'Series.argmax' is called via the 'numpy' library, the third parameter
127
+ in its signature is 'out', which takes either an ndarray or 'None', so
128
+ check if the 'skipna' parameter is either an instance of ndarray or is
129
+ None, since 'skipna' itself should be a boolean
130
+ """
131
+ skipna, args = process_skipna(skipna, args)
132
+ validate_argmax(args, kwargs)
133
+ return skipna
134
+
135
+
136
+ ARGSORT_DEFAULTS: dict[str, int | str | None] = {}
137
+ ARGSORT_DEFAULTS["axis"] = -1
138
+ ARGSORT_DEFAULTS["kind"] = "quicksort"
139
+ ARGSORT_DEFAULTS["order"] = None
140
+ ARGSORT_DEFAULTS["kind"] = None
141
+ ARGSORT_DEFAULTS["stable"] = None
142
+
143
+
144
+ validate_argsort = CompatValidator(
145
+ ARGSORT_DEFAULTS, fname="argsort", max_fname_arg_count=0, method="both"
146
+ )
147
+
148
+ # two different signatures of argsort, this second validation for when the
149
+ # `kind` param is supported
150
+ ARGSORT_DEFAULTS_KIND: dict[str, int | None] = {}
151
+ ARGSORT_DEFAULTS_KIND["axis"] = -1
152
+ ARGSORT_DEFAULTS_KIND["order"] = None
153
+ ARGSORT_DEFAULTS_KIND["stable"] = None
154
+ validate_argsort_kind = CompatValidator(
155
+ ARGSORT_DEFAULTS_KIND, fname="argsort", max_fname_arg_count=0, method="both"
156
+ )
157
+
158
+
159
+ def validate_argsort_with_ascending(ascending: bool | int | None, args, kwargs) -> bool:
160
+ """
161
+ If 'Categorical.argsort' is called via the 'numpy' library, the first
162
+ parameter in its signature is 'axis', which takes either an integer or
163
+ 'None', so check if the 'ascending' parameter has either integer type or is
164
+ None, since 'ascending' itself should be a boolean
165
+ """
166
+ if is_integer(ascending) or ascending is None:
167
+ args = (ascending,) + args
168
+ ascending = True
169
+
170
+ validate_argsort_kind(args, kwargs, max_fname_arg_count=3)
171
+ ascending = cast(bool, ascending)
172
+ return ascending
173
+
174
+
175
+ CLIP_DEFAULTS: dict[str, Any] = {"out": None}
176
+ validate_clip = CompatValidator(
177
+ CLIP_DEFAULTS, fname="clip", method="both", max_fname_arg_count=3
178
+ )
179
+
180
+
181
+ @overload
182
+ def validate_clip_with_axis(axis: ndarray, args, kwargs) -> None:
183
+ ...
184
+
185
+
186
+ @overload
187
+ def validate_clip_with_axis(axis: AxisNoneT, args, kwargs) -> AxisNoneT:
188
+ ...
189
+
190
+
191
+ def validate_clip_with_axis(
192
+ axis: ndarray | AxisNoneT, args, kwargs
193
+ ) -> AxisNoneT | None:
194
+ """
195
+ If 'NDFrame.clip' is called via the numpy library, the third parameter in
196
+ its signature is 'out', which can takes an ndarray, so check if the 'axis'
197
+ parameter is an instance of ndarray, since 'axis' itself should either be
198
+ an integer or None
199
+ """
200
+ if isinstance(axis, ndarray):
201
+ args = (axis,) + args
202
+ # error: Incompatible types in assignment (expression has type "None",
203
+ # variable has type "Union[ndarray[Any, Any], str, int]")
204
+ axis = None # type: ignore[assignment]
205
+
206
+ validate_clip(args, kwargs)
207
+ # error: Incompatible return value type (got "Union[ndarray[Any, Any],
208
+ # str, int]", expected "Union[str, int, None]")
209
+ return axis # type: ignore[return-value]
210
+
211
+
212
+ CUM_FUNC_DEFAULTS: dict[str, Any] = {}
213
+ CUM_FUNC_DEFAULTS["dtype"] = None
214
+ CUM_FUNC_DEFAULTS["out"] = None
215
+ validate_cum_func = CompatValidator(
216
+ CUM_FUNC_DEFAULTS, method="both", max_fname_arg_count=1
217
+ )
218
+ validate_cumsum = CompatValidator(
219
+ CUM_FUNC_DEFAULTS, fname="cumsum", method="both", max_fname_arg_count=1
220
+ )
221
+
222
+
223
+ def validate_cum_func_with_skipna(skipna: bool, args, kwargs, name) -> bool:
224
+ """
225
+ If this function is called via the 'numpy' library, the third parameter in
226
+ its signature is 'dtype', which takes either a 'numpy' dtype or 'None', so
227
+ check if the 'skipna' parameter is a boolean or not
228
+ """
229
+ if not is_bool(skipna):
230
+ args = (skipna,) + args
231
+ skipna = True
232
+ elif isinstance(skipna, np.bool_):
233
+ skipna = bool(skipna)
234
+
235
+ validate_cum_func(args, kwargs, fname=name)
236
+ return skipna
237
+
238
+
239
+ ALLANY_DEFAULTS: dict[str, bool | None] = {}
240
+ ALLANY_DEFAULTS["dtype"] = None
241
+ ALLANY_DEFAULTS["out"] = None
242
+ ALLANY_DEFAULTS["keepdims"] = False
243
+ ALLANY_DEFAULTS["axis"] = None
244
+ validate_all = CompatValidator(
245
+ ALLANY_DEFAULTS, fname="all", method="both", max_fname_arg_count=1
246
+ )
247
+ validate_any = CompatValidator(
248
+ ALLANY_DEFAULTS, fname="any", method="both", max_fname_arg_count=1
249
+ )
250
+
251
+ LOGICAL_FUNC_DEFAULTS = {"out": None, "keepdims": False}
252
+ validate_logical_func = CompatValidator(LOGICAL_FUNC_DEFAULTS, method="kwargs")
253
+
254
+ MINMAX_DEFAULTS = {"axis": None, "dtype": None, "out": None, "keepdims": False}
255
+ validate_min = CompatValidator(
256
+ MINMAX_DEFAULTS, fname="min", method="both", max_fname_arg_count=1
257
+ )
258
+ validate_max = CompatValidator(
259
+ MINMAX_DEFAULTS, fname="max", method="both", max_fname_arg_count=1
260
+ )
261
+
262
+ RESHAPE_DEFAULTS: dict[str, str] = {"order": "C"}
263
+ validate_reshape = CompatValidator(
264
+ RESHAPE_DEFAULTS, fname="reshape", method="both", max_fname_arg_count=1
265
+ )
266
+
267
+ REPEAT_DEFAULTS: dict[str, Any] = {"axis": None}
268
+ validate_repeat = CompatValidator(
269
+ REPEAT_DEFAULTS, fname="repeat", method="both", max_fname_arg_count=1
270
+ )
271
+
272
+ ROUND_DEFAULTS: dict[str, Any] = {"out": None}
273
+ validate_round = CompatValidator(
274
+ ROUND_DEFAULTS, fname="round", method="both", max_fname_arg_count=1
275
+ )
276
+
277
+ SORT_DEFAULTS: dict[str, int | str | None] = {}
278
+ SORT_DEFAULTS["axis"] = -1
279
+ SORT_DEFAULTS["kind"] = "quicksort"
280
+ SORT_DEFAULTS["order"] = None
281
+ validate_sort = CompatValidator(SORT_DEFAULTS, fname="sort", method="kwargs")
282
+
283
+ STAT_FUNC_DEFAULTS: dict[str, Any | None] = {}
284
+ STAT_FUNC_DEFAULTS["dtype"] = None
285
+ STAT_FUNC_DEFAULTS["out"] = None
286
+
287
+ SUM_DEFAULTS = STAT_FUNC_DEFAULTS.copy()
288
+ SUM_DEFAULTS["axis"] = None
289
+ SUM_DEFAULTS["keepdims"] = False
290
+ SUM_DEFAULTS["initial"] = None
291
+
292
+ PROD_DEFAULTS = SUM_DEFAULTS.copy()
293
+
294
+ MEAN_DEFAULTS = SUM_DEFAULTS.copy()
295
+
296
+ MEDIAN_DEFAULTS = STAT_FUNC_DEFAULTS.copy()
297
+ MEDIAN_DEFAULTS["overwrite_input"] = False
298
+ MEDIAN_DEFAULTS["keepdims"] = False
299
+
300
+ STAT_FUNC_DEFAULTS["keepdims"] = False
301
+
302
+ validate_stat_func = CompatValidator(STAT_FUNC_DEFAULTS, method="kwargs")
303
+ validate_sum = CompatValidator(
304
+ SUM_DEFAULTS, fname="sum", method="both", max_fname_arg_count=1
305
+ )
306
+ validate_prod = CompatValidator(
307
+ PROD_DEFAULTS, fname="prod", method="both", max_fname_arg_count=1
308
+ )
309
+ validate_mean = CompatValidator(
310
+ MEAN_DEFAULTS, fname="mean", method="both", max_fname_arg_count=1
311
+ )
312
+ validate_median = CompatValidator(
313
+ MEDIAN_DEFAULTS, fname="median", method="both", max_fname_arg_count=1
314
+ )
315
+
316
+ STAT_DDOF_FUNC_DEFAULTS: dict[str, bool | None] = {}
317
+ STAT_DDOF_FUNC_DEFAULTS["dtype"] = None
318
+ STAT_DDOF_FUNC_DEFAULTS["out"] = None
319
+ STAT_DDOF_FUNC_DEFAULTS["keepdims"] = False
320
+ validate_stat_ddof_func = CompatValidator(STAT_DDOF_FUNC_DEFAULTS, method="kwargs")
321
+
322
+ TAKE_DEFAULTS: dict[str, str | None] = {}
323
+ TAKE_DEFAULTS["out"] = None
324
+ TAKE_DEFAULTS["mode"] = "raise"
325
+ validate_take = CompatValidator(TAKE_DEFAULTS, fname="take", method="kwargs")
326
+
327
+
328
+ def validate_take_with_convert(convert: ndarray | bool | None, args, kwargs) -> bool:
329
+ """
330
+ If this function is called via the 'numpy' library, the third parameter in
331
+ its signature is 'axis', which takes either an ndarray or 'None', so check
332
+ if the 'convert' parameter is either an instance of ndarray or is None
333
+ """
334
+ if isinstance(convert, ndarray) or convert is None:
335
+ args = (convert,) + args
336
+ convert = True
337
+
338
+ validate_take(args, kwargs, max_fname_arg_count=3, method="both")
339
+ return convert
340
+
341
+
342
+ TRANSPOSE_DEFAULTS = {"axes": None}
343
+ validate_transpose = CompatValidator(
344
+ TRANSPOSE_DEFAULTS, fname="transpose", method="both", max_fname_arg_count=0
345
+ )
346
+
347
+
348
+ def validate_groupby_func(name: str, args, kwargs, allowed=None) -> None:
349
+ """
350
+ 'args' and 'kwargs' should be empty, except for allowed kwargs because all
351
+ of their necessary parameters are explicitly listed in the function
352
+ signature
353
+ """
354
+ if allowed is None:
355
+ allowed = []
356
+
357
+ kwargs = set(kwargs) - set(allowed)
358
+
359
+ if len(args) + len(kwargs) > 0:
360
+ raise UnsupportedFunctionCall(
361
+ "numpy operations are not valid with groupby. "
362
+ f"Use .groupby(...).{name}() instead"
363
+ )
364
+
365
+
366
+ RESAMPLER_NUMPY_OPS = ("min", "max", "sum", "prod", "mean", "std", "var")
367
+
368
+
369
+ def validate_resampler_func(method: str, args, kwargs) -> None:
370
+ """
371
+ 'args' and 'kwargs' should be empty because all of their necessary
372
+ parameters are explicitly listed in the function signature
373
+ """
374
+ if len(args) + len(kwargs) > 0:
375
+ if method in RESAMPLER_NUMPY_OPS:
376
+ raise UnsupportedFunctionCall(
377
+ "numpy operations are not valid with resample. "
378
+ f"Use .resample(...).{method}() instead"
379
+ )
380
+ raise TypeError("too many arguments passed in")
381
+
382
+
383
+ def validate_minmax_axis(axis: AxisInt | None, ndim: int = 1) -> None:
384
+ """
385
+ Ensure that the axis argument passed to min, max, argmin, or argmax is zero
386
+ or None, as otherwise it will be incorrectly ignored.
387
+
388
+ Parameters
389
+ ----------
390
+ axis : int or None
391
+ ndim : int, default 1
392
+
393
+ Raises
394
+ ------
395
+ ValueError
396
+ """
397
+ if axis is None:
398
+ return
399
+ if axis >= ndim or (axis < 0 and ndim + axis < 0):
400
+ raise ValueError(f"`axis` must be fewer than the number of dimensions ({ndim})")
401
+
402
+
403
+ _validation_funcs = {
404
+ "median": validate_median,
405
+ "mean": validate_mean,
406
+ "min": validate_min,
407
+ "max": validate_max,
408
+ "sum": validate_sum,
409
+ "prod": validate_prod,
410
+ }
411
+
412
+
413
+ def validate_func(fname, args, kwargs) -> None:
414
+ if fname not in _validation_funcs:
415
+ return validate_stat_func(args, kwargs, fname=fname)
416
+
417
+ validation_func = _validation_funcs[fname]
418
+ return validation_func(args, kwargs)
llmeval-env/lib/python3.10/site-packages/pandas/compat/pickle_compat.py ADDED
@@ -0,0 +1,262 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Support pre-0.12 series pickle compatibility.
3
+ """
4
+ from __future__ import annotations
5
+
6
+ import contextlib
7
+ import copy
8
+ import io
9
+ import pickle as pkl
10
+ from typing import TYPE_CHECKING
11
+
12
+ import numpy as np
13
+
14
+ from pandas._libs.arrays import NDArrayBacked
15
+ from pandas._libs.tslibs import BaseOffset
16
+
17
+ from pandas import Index
18
+ from pandas.core.arrays import (
19
+ DatetimeArray,
20
+ PeriodArray,
21
+ TimedeltaArray,
22
+ )
23
+ from pandas.core.internals import BlockManager
24
+
25
+ if TYPE_CHECKING:
26
+ from collections.abc import Generator
27
+
28
+
29
+ def load_reduce(self) -> None:
30
+ stack = self.stack
31
+ args = stack.pop()
32
+ func = stack[-1]
33
+
34
+ try:
35
+ stack[-1] = func(*args)
36
+ return
37
+ except TypeError as err:
38
+ # If we have a deprecated function,
39
+ # try to replace and try again.
40
+
41
+ msg = "_reconstruct: First argument must be a sub-type of ndarray"
42
+
43
+ if msg in str(err):
44
+ try:
45
+ cls = args[0]
46
+ stack[-1] = object.__new__(cls)
47
+ return
48
+ except TypeError:
49
+ pass
50
+ elif args and isinstance(args[0], type) and issubclass(args[0], BaseOffset):
51
+ # TypeError: object.__new__(Day) is not safe, use Day.__new__()
52
+ cls = args[0]
53
+ stack[-1] = cls.__new__(*args)
54
+ return
55
+ elif args and issubclass(args[0], PeriodArray):
56
+ cls = args[0]
57
+ stack[-1] = NDArrayBacked.__new__(*args)
58
+ return
59
+
60
+ raise
61
+
62
+
63
+ # If classes are moved, provide compat here.
64
+ _class_locations_map = {
65
+ ("pandas.core.sparse.array", "SparseArray"): ("pandas.core.arrays", "SparseArray"),
66
+ # 15477
67
+ ("pandas.core.base", "FrozenNDArray"): ("numpy", "ndarray"),
68
+ # Re-routing unpickle block logic to go through _unpickle_block instead
69
+ # for pandas <= 1.3.5
70
+ ("pandas.core.internals.blocks", "new_block"): (
71
+ "pandas._libs.internals",
72
+ "_unpickle_block",
73
+ ),
74
+ ("pandas.core.indexes.frozen", "FrozenNDArray"): ("numpy", "ndarray"),
75
+ ("pandas.core.base", "FrozenList"): ("pandas.core.indexes.frozen", "FrozenList"),
76
+ # 10890
77
+ ("pandas.core.series", "TimeSeries"): ("pandas.core.series", "Series"),
78
+ ("pandas.sparse.series", "SparseTimeSeries"): (
79
+ "pandas.core.sparse.series",
80
+ "SparseSeries",
81
+ ),
82
+ # 12588, extensions moving
83
+ ("pandas._sparse", "BlockIndex"): ("pandas._libs.sparse", "BlockIndex"),
84
+ ("pandas.tslib", "Timestamp"): ("pandas._libs.tslib", "Timestamp"),
85
+ # 18543 moving period
86
+ ("pandas._period", "Period"): ("pandas._libs.tslibs.period", "Period"),
87
+ ("pandas._libs.period", "Period"): ("pandas._libs.tslibs.period", "Period"),
88
+ # 18014 moved __nat_unpickle from _libs.tslib-->_libs.tslibs.nattype
89
+ ("pandas.tslib", "__nat_unpickle"): (
90
+ "pandas._libs.tslibs.nattype",
91
+ "__nat_unpickle",
92
+ ),
93
+ ("pandas._libs.tslib", "__nat_unpickle"): (
94
+ "pandas._libs.tslibs.nattype",
95
+ "__nat_unpickle",
96
+ ),
97
+ # 15998 top-level dirs moving
98
+ ("pandas.sparse.array", "SparseArray"): (
99
+ "pandas.core.arrays.sparse",
100
+ "SparseArray",
101
+ ),
102
+ ("pandas.indexes.base", "_new_Index"): ("pandas.core.indexes.base", "_new_Index"),
103
+ ("pandas.indexes.base", "Index"): ("pandas.core.indexes.base", "Index"),
104
+ ("pandas.indexes.numeric", "Int64Index"): (
105
+ "pandas.core.indexes.base",
106
+ "Index", # updated in 50775
107
+ ),
108
+ ("pandas.indexes.range", "RangeIndex"): ("pandas.core.indexes.range", "RangeIndex"),
109
+ ("pandas.indexes.multi", "MultiIndex"): ("pandas.core.indexes.multi", "MultiIndex"),
110
+ ("pandas.tseries.index", "_new_DatetimeIndex"): (
111
+ "pandas.core.indexes.datetimes",
112
+ "_new_DatetimeIndex",
113
+ ),
114
+ ("pandas.tseries.index", "DatetimeIndex"): (
115
+ "pandas.core.indexes.datetimes",
116
+ "DatetimeIndex",
117
+ ),
118
+ ("pandas.tseries.period", "PeriodIndex"): (
119
+ "pandas.core.indexes.period",
120
+ "PeriodIndex",
121
+ ),
122
+ # 19269, arrays moving
123
+ ("pandas.core.categorical", "Categorical"): ("pandas.core.arrays", "Categorical"),
124
+ # 19939, add timedeltaindex, float64index compat from 15998 move
125
+ ("pandas.tseries.tdi", "TimedeltaIndex"): (
126
+ "pandas.core.indexes.timedeltas",
127
+ "TimedeltaIndex",
128
+ ),
129
+ ("pandas.indexes.numeric", "Float64Index"): (
130
+ "pandas.core.indexes.base",
131
+ "Index", # updated in 50775
132
+ ),
133
+ # 50775, remove Int64Index, UInt64Index & Float64Index from codabase
134
+ ("pandas.core.indexes.numeric", "Int64Index"): (
135
+ "pandas.core.indexes.base",
136
+ "Index",
137
+ ),
138
+ ("pandas.core.indexes.numeric", "UInt64Index"): (
139
+ "pandas.core.indexes.base",
140
+ "Index",
141
+ ),
142
+ ("pandas.core.indexes.numeric", "Float64Index"): (
143
+ "pandas.core.indexes.base",
144
+ "Index",
145
+ ),
146
+ ("pandas.core.arrays.sparse.dtype", "SparseDtype"): (
147
+ "pandas.core.dtypes.dtypes",
148
+ "SparseDtype",
149
+ ),
150
+ }
151
+
152
+
153
+ # our Unpickler sub-class to override methods and some dispatcher
154
+ # functions for compat and uses a non-public class of the pickle module.
155
+
156
+
157
+ class Unpickler(pkl._Unpickler):
158
+ def find_class(self, module, name):
159
+ # override superclass
160
+ key = (module, name)
161
+ module, name = _class_locations_map.get(key, key)
162
+ return super().find_class(module, name)
163
+
164
+
165
+ Unpickler.dispatch = copy.copy(Unpickler.dispatch)
166
+ Unpickler.dispatch[pkl.REDUCE[0]] = load_reduce
167
+
168
+
169
+ def load_newobj(self) -> None:
170
+ args = self.stack.pop()
171
+ cls = self.stack[-1]
172
+
173
+ # compat
174
+ if issubclass(cls, Index):
175
+ obj = object.__new__(cls)
176
+ elif issubclass(cls, DatetimeArray) and not args:
177
+ arr = np.array([], dtype="M8[ns]")
178
+ obj = cls.__new__(cls, arr, arr.dtype)
179
+ elif issubclass(cls, TimedeltaArray) and not args:
180
+ arr = np.array([], dtype="m8[ns]")
181
+ obj = cls.__new__(cls, arr, arr.dtype)
182
+ elif cls is BlockManager and not args:
183
+ obj = cls.__new__(cls, (), [], False)
184
+ else:
185
+ obj = cls.__new__(cls, *args)
186
+
187
+ self.stack[-1] = obj
188
+
189
+
190
+ Unpickler.dispatch[pkl.NEWOBJ[0]] = load_newobj
191
+
192
+
193
+ def load_newobj_ex(self) -> None:
194
+ kwargs = self.stack.pop()
195
+ args = self.stack.pop()
196
+ cls = self.stack.pop()
197
+
198
+ # compat
199
+ if issubclass(cls, Index):
200
+ obj = object.__new__(cls)
201
+ else:
202
+ obj = cls.__new__(cls, *args, **kwargs)
203
+ self.append(obj)
204
+
205
+
206
+ try:
207
+ Unpickler.dispatch[pkl.NEWOBJ_EX[0]] = load_newobj_ex
208
+ except (AttributeError, KeyError):
209
+ pass
210
+
211
+
212
+ def load(fh, encoding: str | None = None, is_verbose: bool = False):
213
+ """
214
+ Load a pickle, with a provided encoding,
215
+
216
+ Parameters
217
+ ----------
218
+ fh : a filelike object
219
+ encoding : an optional encoding
220
+ is_verbose : show exception output
221
+ """
222
+ try:
223
+ fh.seek(0)
224
+ if encoding is not None:
225
+ up = Unpickler(fh, encoding=encoding)
226
+ else:
227
+ up = Unpickler(fh)
228
+ # "Unpickler" has no attribute "is_verbose" [attr-defined]
229
+ up.is_verbose = is_verbose # type: ignore[attr-defined]
230
+
231
+ return up.load()
232
+ except (ValueError, TypeError):
233
+ raise
234
+
235
+
236
+ def loads(
237
+ bytes_object: bytes,
238
+ *,
239
+ fix_imports: bool = True,
240
+ encoding: str = "ASCII",
241
+ errors: str = "strict",
242
+ ):
243
+ """
244
+ Analogous to pickle._loads.
245
+ """
246
+ fd = io.BytesIO(bytes_object)
247
+ return Unpickler(
248
+ fd, fix_imports=fix_imports, encoding=encoding, errors=errors
249
+ ).load()
250
+
251
+
252
+ @contextlib.contextmanager
253
+ def patch_pickle() -> Generator[None, None, None]:
254
+ """
255
+ Temporarily patch pickle to use our unpickler.
256
+ """
257
+ orig_loads = pkl.loads
258
+ try:
259
+ setattr(pkl, "loads", loads)
260
+ yield
261
+ finally:
262
+ setattr(pkl, "loads", orig_loads)
llmeval-env/lib/python3.10/site-packages/pandas/compat/pyarrow.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ support pyarrow compatibility across versions """
2
+
3
+ from __future__ import annotations
4
+
5
+ from pandas.util.version import Version
6
+
7
+ try:
8
+ import pyarrow as pa
9
+
10
+ _palv = Version(Version(pa.__version__).base_version)
11
+ pa_version_under10p1 = _palv < Version("10.0.1")
12
+ pa_version_under11p0 = _palv < Version("11.0.0")
13
+ pa_version_under12p0 = _palv < Version("12.0.0")
14
+ pa_version_under13p0 = _palv < Version("13.0.0")
15
+ pa_version_under14p0 = _palv < Version("14.0.0")
16
+ pa_version_under14p1 = _palv < Version("14.0.1")
17
+ pa_version_under15p0 = _palv < Version("15.0.0")
18
+ pa_version_under16p0 = _palv < Version("16.0.0")
19
+ except ImportError:
20
+ pa_version_under10p1 = True
21
+ pa_version_under11p0 = True
22
+ pa_version_under12p0 = True
23
+ pa_version_under13p0 = True
24
+ pa_version_under14p0 = True
25
+ pa_version_under14p1 = True
26
+ pa_version_under15p0 = True
27
+ pa_version_under16p0 = True
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (202 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_cat_accessor.cpython-310.pyc ADDED
Binary file (8.64 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_dt_accessor.cpython-310.pyc ADDED
Binary file (24.1 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_list_accessor.cpython-310.pyc ADDED
Binary file (3.82 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_sparse_accessor.cpython-310.pyc ADDED
Binary file (795 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_str_accessor.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/__pycache__/test_struct_accessor.cpython-310.pyc ADDED
Binary file (4.05 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_cat_accessor.py ADDED
@@ -0,0 +1,258 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Categorical,
6
+ DataFrame,
7
+ Index,
8
+ Series,
9
+ Timestamp,
10
+ date_range,
11
+ period_range,
12
+ timedelta_range,
13
+ )
14
+ import pandas._testing as tm
15
+ from pandas.core.arrays.categorical import CategoricalAccessor
16
+ from pandas.core.indexes.accessors import Properties
17
+
18
+
19
+ class TestCatAccessor:
20
+ @pytest.mark.parametrize(
21
+ "method",
22
+ [
23
+ lambda x: x.cat.set_categories([1, 2, 3]),
24
+ lambda x: x.cat.reorder_categories([2, 3, 1], ordered=True),
25
+ lambda x: x.cat.rename_categories([1, 2, 3]),
26
+ lambda x: x.cat.remove_unused_categories(),
27
+ lambda x: x.cat.remove_categories([2]),
28
+ lambda x: x.cat.add_categories([4]),
29
+ lambda x: x.cat.as_ordered(),
30
+ lambda x: x.cat.as_unordered(),
31
+ ],
32
+ )
33
+ def test_getname_categorical_accessor(self, method):
34
+ # GH#17509
35
+ ser = Series([1, 2, 3], name="A").astype("category")
36
+ expected = "A"
37
+ result = method(ser).name
38
+ assert result == expected
39
+
40
+ def test_cat_accessor(self):
41
+ ser = Series(Categorical(["a", "b", np.nan, "a"]))
42
+ tm.assert_index_equal(ser.cat.categories, Index(["a", "b"]))
43
+ assert not ser.cat.ordered, False
44
+
45
+ exp = Categorical(["a", "b", np.nan, "a"], categories=["b", "a"])
46
+
47
+ res = ser.cat.set_categories(["b", "a"])
48
+ tm.assert_categorical_equal(res.values, exp)
49
+
50
+ ser[:] = "a"
51
+ ser = ser.cat.remove_unused_categories()
52
+ tm.assert_index_equal(ser.cat.categories, Index(["a"]))
53
+
54
+ def test_cat_accessor_api(self):
55
+ # GH#9322
56
+
57
+ assert Series.cat is CategoricalAccessor
58
+ ser = Series(list("aabbcde")).astype("category")
59
+ assert isinstance(ser.cat, CategoricalAccessor)
60
+
61
+ invalid = Series([1])
62
+ with pytest.raises(AttributeError, match="only use .cat accessor"):
63
+ invalid.cat
64
+ assert not hasattr(invalid, "cat")
65
+
66
+ def test_cat_accessor_no_new_attributes(self):
67
+ # https://github.com/pandas-dev/pandas/issues/10673
68
+ cat = Series(list("aabbcde")).astype("category")
69
+ with pytest.raises(AttributeError, match="You cannot add any new attribute"):
70
+ cat.cat.xlabel = "a"
71
+
72
+ def test_categorical_delegations(self):
73
+ # invalid accessor
74
+ msg = r"Can only use \.cat accessor with a 'category' dtype"
75
+ with pytest.raises(AttributeError, match=msg):
76
+ Series([1, 2, 3]).cat
77
+ with pytest.raises(AttributeError, match=msg):
78
+ Series([1, 2, 3]).cat()
79
+ with pytest.raises(AttributeError, match=msg):
80
+ Series(["a", "b", "c"]).cat
81
+ with pytest.raises(AttributeError, match=msg):
82
+ Series(np.arange(5.0)).cat
83
+ with pytest.raises(AttributeError, match=msg):
84
+ Series([Timestamp("20130101")]).cat
85
+
86
+ # Series should delegate calls to '.categories', '.codes', '.ordered'
87
+ # and the methods '.set_categories()' 'drop_unused_categories()' to the
88
+ # categorical
89
+ ser = Series(Categorical(["a", "b", "c", "a"], ordered=True))
90
+ exp_categories = Index(["a", "b", "c"])
91
+ tm.assert_index_equal(ser.cat.categories, exp_categories)
92
+ ser = ser.cat.rename_categories([1, 2, 3])
93
+ exp_categories = Index([1, 2, 3])
94
+ tm.assert_index_equal(ser.cat.categories, exp_categories)
95
+
96
+ exp_codes = Series([0, 1, 2, 0], dtype="int8")
97
+ tm.assert_series_equal(ser.cat.codes, exp_codes)
98
+
99
+ assert ser.cat.ordered
100
+ ser = ser.cat.as_unordered()
101
+ assert not ser.cat.ordered
102
+
103
+ ser = ser.cat.as_ordered()
104
+ assert ser.cat.ordered
105
+
106
+ # reorder
107
+ ser = Series(Categorical(["a", "b", "c", "a"], ordered=True))
108
+ exp_categories = Index(["c", "b", "a"])
109
+ exp_values = np.array(["a", "b", "c", "a"], dtype=np.object_)
110
+ ser = ser.cat.set_categories(["c", "b", "a"])
111
+ tm.assert_index_equal(ser.cat.categories, exp_categories)
112
+ tm.assert_numpy_array_equal(ser.values.__array__(), exp_values)
113
+ tm.assert_numpy_array_equal(ser.__array__(), exp_values)
114
+
115
+ # remove unused categories
116
+ ser = Series(Categorical(["a", "b", "b", "a"], categories=["a", "b", "c"]))
117
+ exp_categories = Index(["a", "b"])
118
+ exp_values = np.array(["a", "b", "b", "a"], dtype=np.object_)
119
+ ser = ser.cat.remove_unused_categories()
120
+ tm.assert_index_equal(ser.cat.categories, exp_categories)
121
+ tm.assert_numpy_array_equal(ser.values.__array__(), exp_values)
122
+ tm.assert_numpy_array_equal(ser.__array__(), exp_values)
123
+
124
+ # This method is likely to be confused, so test that it raises an error
125
+ # on wrong inputs:
126
+ msg = "'Series' object has no attribute 'set_categories'"
127
+ with pytest.raises(AttributeError, match=msg):
128
+ ser.set_categories([4, 3, 2, 1])
129
+
130
+ # right: ser.cat.set_categories([4,3,2,1])
131
+
132
+ # GH#18862 (let Series.cat.rename_categories take callables)
133
+ ser = Series(Categorical(["a", "b", "c", "a"], ordered=True))
134
+ result = ser.cat.rename_categories(lambda x: x.upper())
135
+ expected = Series(
136
+ Categorical(["A", "B", "C", "A"], categories=["A", "B", "C"], ordered=True)
137
+ )
138
+ tm.assert_series_equal(result, expected)
139
+
140
+ @pytest.mark.parametrize(
141
+ "idx",
142
+ [
143
+ date_range("1/1/2015", periods=5),
144
+ date_range("1/1/2015", periods=5, tz="MET"),
145
+ period_range("1/1/2015", freq="D", periods=5),
146
+ timedelta_range("1 days", "10 days"),
147
+ ],
148
+ )
149
+ def test_dt_accessor_api_for_categorical(self, idx):
150
+ # https://github.com/pandas-dev/pandas/issues/10661
151
+
152
+ ser = Series(idx)
153
+ cat = ser.astype("category")
154
+
155
+ # only testing field (like .day)
156
+ # and bool (is_month_start)
157
+ attr_names = type(ser._values)._datetimelike_ops
158
+
159
+ assert isinstance(cat.dt, Properties)
160
+
161
+ special_func_defs = [
162
+ ("strftime", ("%Y-%m-%d",), {}),
163
+ ("round", ("D",), {}),
164
+ ("floor", ("D",), {}),
165
+ ("ceil", ("D",), {}),
166
+ ("asfreq", ("D",), {}),
167
+ ("as_unit", ("s"), {}),
168
+ ]
169
+ if idx.dtype == "M8[ns]":
170
+ # exclude dt64tz since that is already localized and would raise
171
+ tup = ("tz_localize", ("UTC",), {})
172
+ special_func_defs.append(tup)
173
+ elif idx.dtype.kind == "M":
174
+ # exclude dt64 since that is not localized so would raise
175
+ tup = ("tz_convert", ("EST",), {})
176
+ special_func_defs.append(tup)
177
+
178
+ _special_func_names = [f[0] for f in special_func_defs]
179
+
180
+ _ignore_names = ["components", "tz_localize", "tz_convert"]
181
+
182
+ func_names = [
183
+ fname
184
+ for fname in dir(ser.dt)
185
+ if not (
186
+ fname.startswith("_")
187
+ or fname in attr_names
188
+ or fname in _special_func_names
189
+ or fname in _ignore_names
190
+ )
191
+ ]
192
+
193
+ func_defs = [(fname, (), {}) for fname in func_names]
194
+ func_defs.extend(
195
+ f_def for f_def in special_func_defs if f_def[0] in dir(ser.dt)
196
+ )
197
+
198
+ for func, args, kwargs in func_defs:
199
+ warn_cls = []
200
+ if func == "to_period" and getattr(idx, "tz", None) is not None:
201
+ # dropping TZ
202
+ warn_cls.append(UserWarning)
203
+ if func == "to_pydatetime":
204
+ # deprecated to return Index[object]
205
+ warn_cls.append(FutureWarning)
206
+ if warn_cls:
207
+ warn_cls = tuple(warn_cls)
208
+ else:
209
+ warn_cls = None
210
+ with tm.assert_produces_warning(warn_cls):
211
+ res = getattr(cat.dt, func)(*args, **kwargs)
212
+ exp = getattr(ser.dt, func)(*args, **kwargs)
213
+
214
+ tm.assert_equal(res, exp)
215
+
216
+ for attr in attr_names:
217
+ res = getattr(cat.dt, attr)
218
+ exp = getattr(ser.dt, attr)
219
+
220
+ tm.assert_equal(res, exp)
221
+
222
+ def test_dt_accessor_api_for_categorical_invalid(self):
223
+ invalid = Series([1, 2, 3]).astype("category")
224
+ msg = "Can only use .dt accessor with datetimelike"
225
+
226
+ with pytest.raises(AttributeError, match=msg):
227
+ invalid.dt
228
+ assert not hasattr(invalid, "str")
229
+
230
+ def test_set_categories_setitem(self):
231
+ # GH#43334
232
+
233
+ df = DataFrame({"Survived": [1, 0, 1], "Sex": [0, 1, 1]}, dtype="category")
234
+
235
+ df["Survived"] = df["Survived"].cat.rename_categories(["No", "Yes"])
236
+ df["Sex"] = df["Sex"].cat.rename_categories(["female", "male"])
237
+
238
+ # values should not be coerced to NaN
239
+ assert list(df["Sex"]) == ["female", "male", "male"]
240
+ assert list(df["Survived"]) == ["Yes", "No", "Yes"]
241
+
242
+ df["Sex"] = Categorical(df["Sex"], categories=["female", "male"], ordered=False)
243
+ df["Survived"] = Categorical(
244
+ df["Survived"], categories=["No", "Yes"], ordered=False
245
+ )
246
+
247
+ # values should not be coerced to NaN
248
+ assert list(df["Sex"]) == ["female", "male", "male"]
249
+ assert list(df["Survived"]) == ["Yes", "No", "Yes"]
250
+
251
+ def test_categorical_of_booleans_is_boolean(self):
252
+ # https://github.com/pandas-dev/pandas/issues/46313
253
+ df = DataFrame(
254
+ {"int_cat": [1, 2, 3], "bool_cat": [True, False, False]}, dtype="category"
255
+ )
256
+ value = df["bool_cat"].cat.categories.dtype
257
+ expected = np.dtype(np.bool_)
258
+ assert value is expected
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_dt_accessor.py ADDED
@@ -0,0 +1,843 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import calendar
2
+ from datetime import (
3
+ date,
4
+ datetime,
5
+ time,
6
+ )
7
+ import locale
8
+ import unicodedata
9
+
10
+ import numpy as np
11
+ import pytest
12
+ import pytz
13
+
14
+ from pandas._libs.tslibs.timezones import maybe_get_tz
15
+ from pandas.errors import SettingWithCopyError
16
+
17
+ from pandas.core.dtypes.common import (
18
+ is_integer_dtype,
19
+ is_list_like,
20
+ )
21
+
22
+ import pandas as pd
23
+ from pandas import (
24
+ DataFrame,
25
+ DatetimeIndex,
26
+ Index,
27
+ Period,
28
+ PeriodIndex,
29
+ Series,
30
+ TimedeltaIndex,
31
+ date_range,
32
+ period_range,
33
+ timedelta_range,
34
+ )
35
+ import pandas._testing as tm
36
+ from pandas.core.arrays import (
37
+ DatetimeArray,
38
+ PeriodArray,
39
+ TimedeltaArray,
40
+ )
41
+
42
+ ok_for_period = PeriodArray._datetimelike_ops
43
+ ok_for_period_methods = ["strftime", "to_timestamp", "asfreq"]
44
+ ok_for_dt = DatetimeArray._datetimelike_ops
45
+ ok_for_dt_methods = [
46
+ "to_period",
47
+ "to_pydatetime",
48
+ "tz_localize",
49
+ "tz_convert",
50
+ "normalize",
51
+ "strftime",
52
+ "round",
53
+ "floor",
54
+ "ceil",
55
+ "day_name",
56
+ "month_name",
57
+ "isocalendar",
58
+ "as_unit",
59
+ ]
60
+ ok_for_td = TimedeltaArray._datetimelike_ops
61
+ ok_for_td_methods = [
62
+ "components",
63
+ "to_pytimedelta",
64
+ "total_seconds",
65
+ "round",
66
+ "floor",
67
+ "ceil",
68
+ "as_unit",
69
+ ]
70
+
71
+
72
+ def get_dir(ser):
73
+ # check limited display api
74
+ results = [r for r in ser.dt.__dir__() if not r.startswith("_")]
75
+ return sorted(set(results))
76
+
77
+
78
+ class TestSeriesDatetimeValues:
79
+ def _compare(self, ser, name):
80
+ # GH 7207, 11128
81
+ # test .dt namespace accessor
82
+
83
+ def get_expected(ser, prop):
84
+ result = getattr(Index(ser._values), prop)
85
+ if isinstance(result, np.ndarray):
86
+ if is_integer_dtype(result):
87
+ result = result.astype("int64")
88
+ elif not is_list_like(result) or isinstance(result, DataFrame):
89
+ return result
90
+ return Series(result, index=ser.index, name=ser.name)
91
+
92
+ left = getattr(ser.dt, name)
93
+ right = get_expected(ser, name)
94
+ if not (is_list_like(left) and is_list_like(right)):
95
+ assert left == right
96
+ elif isinstance(left, DataFrame):
97
+ tm.assert_frame_equal(left, right)
98
+ else:
99
+ tm.assert_series_equal(left, right)
100
+
101
+ @pytest.mark.parametrize("freq", ["D", "s", "ms"])
102
+ def test_dt_namespace_accessor_datetime64(self, freq):
103
+ # GH#7207, GH#11128
104
+ # test .dt namespace accessor
105
+
106
+ # datetimeindex
107
+ dti = date_range("20130101", periods=5, freq=freq)
108
+ ser = Series(dti, name="xxx")
109
+
110
+ for prop in ok_for_dt:
111
+ # we test freq below
112
+ if prop != "freq":
113
+ self._compare(ser, prop)
114
+
115
+ for prop in ok_for_dt_methods:
116
+ getattr(ser.dt, prop)
117
+
118
+ msg = "The behavior of DatetimeProperties.to_pydatetime is deprecated"
119
+ with tm.assert_produces_warning(FutureWarning, match=msg):
120
+ result = ser.dt.to_pydatetime()
121
+ assert isinstance(result, np.ndarray)
122
+ assert result.dtype == object
123
+
124
+ result = ser.dt.tz_localize("US/Eastern")
125
+ exp_values = DatetimeIndex(ser.values).tz_localize("US/Eastern")
126
+ expected = Series(exp_values, index=ser.index, name="xxx")
127
+ tm.assert_series_equal(result, expected)
128
+
129
+ tz_result = result.dt.tz
130
+ assert str(tz_result) == "US/Eastern"
131
+ freq_result = ser.dt.freq
132
+ assert freq_result == DatetimeIndex(ser.values, freq="infer").freq
133
+
134
+ # let's localize, then convert
135
+ result = ser.dt.tz_localize("UTC").dt.tz_convert("US/Eastern")
136
+ exp_values = (
137
+ DatetimeIndex(ser.values).tz_localize("UTC").tz_convert("US/Eastern")
138
+ )
139
+ expected = Series(exp_values, index=ser.index, name="xxx")
140
+ tm.assert_series_equal(result, expected)
141
+
142
+ def test_dt_namespace_accessor_datetime64tz(self):
143
+ # GH#7207, GH#11128
144
+ # test .dt namespace accessor
145
+
146
+ # datetimeindex with tz
147
+ dti = date_range("20130101", periods=5, tz="US/Eastern")
148
+ ser = Series(dti, name="xxx")
149
+ for prop in ok_for_dt:
150
+ # we test freq below
151
+ if prop != "freq":
152
+ self._compare(ser, prop)
153
+
154
+ for prop in ok_for_dt_methods:
155
+ getattr(ser.dt, prop)
156
+
157
+ msg = "The behavior of DatetimeProperties.to_pydatetime is deprecated"
158
+ with tm.assert_produces_warning(FutureWarning, match=msg):
159
+ result = ser.dt.to_pydatetime()
160
+ assert isinstance(result, np.ndarray)
161
+ assert result.dtype == object
162
+
163
+ result = ser.dt.tz_convert("CET")
164
+ expected = Series(ser._values.tz_convert("CET"), index=ser.index, name="xxx")
165
+ tm.assert_series_equal(result, expected)
166
+
167
+ tz_result = result.dt.tz
168
+ assert str(tz_result) == "CET"
169
+ freq_result = ser.dt.freq
170
+ assert freq_result == DatetimeIndex(ser.values, freq="infer").freq
171
+
172
+ def test_dt_namespace_accessor_timedelta(self):
173
+ # GH#7207, GH#11128
174
+ # test .dt namespace accessor
175
+
176
+ # timedelta index
177
+ cases = [
178
+ Series(
179
+ timedelta_range("1 day", periods=5), index=list("abcde"), name="xxx"
180
+ ),
181
+ Series(timedelta_range("1 day 01:23:45", periods=5, freq="s"), name="xxx"),
182
+ Series(
183
+ timedelta_range("2 days 01:23:45.012345", periods=5, freq="ms"),
184
+ name="xxx",
185
+ ),
186
+ ]
187
+ for ser in cases:
188
+ for prop in ok_for_td:
189
+ # we test freq below
190
+ if prop != "freq":
191
+ self._compare(ser, prop)
192
+
193
+ for prop in ok_for_td_methods:
194
+ getattr(ser.dt, prop)
195
+
196
+ result = ser.dt.components
197
+ assert isinstance(result, DataFrame)
198
+ tm.assert_index_equal(result.index, ser.index)
199
+
200
+ result = ser.dt.to_pytimedelta()
201
+ assert isinstance(result, np.ndarray)
202
+ assert result.dtype == object
203
+
204
+ result = ser.dt.total_seconds()
205
+ assert isinstance(result, Series)
206
+ assert result.dtype == "float64"
207
+
208
+ freq_result = ser.dt.freq
209
+ assert freq_result == TimedeltaIndex(ser.values, freq="infer").freq
210
+
211
+ def test_dt_namespace_accessor_period(self):
212
+ # GH#7207, GH#11128
213
+ # test .dt namespace accessor
214
+
215
+ # periodindex
216
+ pi = period_range("20130101", periods=5, freq="D")
217
+ ser = Series(pi, name="xxx")
218
+
219
+ for prop in ok_for_period:
220
+ # we test freq below
221
+ if prop != "freq":
222
+ self._compare(ser, prop)
223
+
224
+ for prop in ok_for_period_methods:
225
+ getattr(ser.dt, prop)
226
+
227
+ freq_result = ser.dt.freq
228
+ assert freq_result == PeriodIndex(ser.values).freq
229
+
230
+ def test_dt_namespace_accessor_index_and_values(self):
231
+ # both
232
+ index = date_range("20130101", periods=3, freq="D")
233
+ dti = date_range("20140204", periods=3, freq="s")
234
+ ser = Series(dti, index=index, name="xxx")
235
+ exp = Series(
236
+ np.array([2014, 2014, 2014], dtype="int32"), index=index, name="xxx"
237
+ )
238
+ tm.assert_series_equal(ser.dt.year, exp)
239
+
240
+ exp = Series(np.array([2, 2, 2], dtype="int32"), index=index, name="xxx")
241
+ tm.assert_series_equal(ser.dt.month, exp)
242
+
243
+ exp = Series(np.array([0, 1, 2], dtype="int32"), index=index, name="xxx")
244
+ tm.assert_series_equal(ser.dt.second, exp)
245
+
246
+ exp = Series([ser.iloc[0]] * 3, index=index, name="xxx")
247
+ tm.assert_series_equal(ser.dt.normalize(), exp)
248
+
249
+ def test_dt_accessor_limited_display_api(self):
250
+ # tznaive
251
+ ser = Series(date_range("20130101", periods=5, freq="D"), name="xxx")
252
+ results = get_dir(ser)
253
+ tm.assert_almost_equal(results, sorted(set(ok_for_dt + ok_for_dt_methods)))
254
+
255
+ # tzaware
256
+ ser = Series(date_range("2015-01-01", "2016-01-01", freq="min"), name="xxx")
257
+ ser = ser.dt.tz_localize("UTC").dt.tz_convert("America/Chicago")
258
+ results = get_dir(ser)
259
+ tm.assert_almost_equal(results, sorted(set(ok_for_dt + ok_for_dt_methods)))
260
+
261
+ # Period
262
+ idx = period_range("20130101", periods=5, freq="D", name="xxx").astype(object)
263
+ with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
264
+ ser = Series(idx)
265
+ results = get_dir(ser)
266
+ tm.assert_almost_equal(
267
+ results, sorted(set(ok_for_period + ok_for_period_methods))
268
+ )
269
+
270
+ def test_dt_accessor_ambiguous_freq_conversions(self):
271
+ # GH#11295
272
+ # ambiguous time error on the conversions
273
+ ser = Series(date_range("2015-01-01", "2016-01-01", freq="min"), name="xxx")
274
+ ser = ser.dt.tz_localize("UTC").dt.tz_convert("America/Chicago")
275
+
276
+ exp_values = date_range(
277
+ "2015-01-01", "2016-01-01", freq="min", tz="UTC"
278
+ ).tz_convert("America/Chicago")
279
+ # freq not preserved by tz_localize above
280
+ exp_values = exp_values._with_freq(None)
281
+ expected = Series(exp_values, name="xxx")
282
+ tm.assert_series_equal(ser, expected)
283
+
284
+ def test_dt_accessor_not_writeable(self, using_copy_on_write, warn_copy_on_write):
285
+ # no setting allowed
286
+ ser = Series(date_range("20130101", periods=5, freq="D"), name="xxx")
287
+ with pytest.raises(ValueError, match="modifications"):
288
+ ser.dt.hour = 5
289
+
290
+ # trying to set a copy
291
+ msg = "modifications to a property of a datetimelike.+not supported"
292
+ with pd.option_context("chained_assignment", "raise"):
293
+ if using_copy_on_write:
294
+ with tm.raises_chained_assignment_error():
295
+ ser.dt.hour[0] = 5
296
+ elif warn_copy_on_write:
297
+ with tm.assert_produces_warning(
298
+ FutureWarning, match="ChainedAssignmentError"
299
+ ):
300
+ ser.dt.hour[0] = 5
301
+ else:
302
+ with pytest.raises(SettingWithCopyError, match=msg):
303
+ ser.dt.hour[0] = 5
304
+
305
+ @pytest.mark.parametrize(
306
+ "method, dates",
307
+ [
308
+ ["round", ["2012-01-02", "2012-01-02", "2012-01-01"]],
309
+ ["floor", ["2012-01-01", "2012-01-01", "2012-01-01"]],
310
+ ["ceil", ["2012-01-02", "2012-01-02", "2012-01-02"]],
311
+ ],
312
+ )
313
+ def test_dt_round(self, method, dates):
314
+ # round
315
+ ser = Series(
316
+ pd.to_datetime(
317
+ ["2012-01-01 13:00:00", "2012-01-01 12:01:00", "2012-01-01 08:00:00"]
318
+ ),
319
+ name="xxx",
320
+ )
321
+ result = getattr(ser.dt, method)("D")
322
+ expected = Series(pd.to_datetime(dates), name="xxx")
323
+ tm.assert_series_equal(result, expected)
324
+
325
+ def test_dt_round_tz(self):
326
+ ser = Series(
327
+ pd.to_datetime(
328
+ ["2012-01-01 13:00:00", "2012-01-01 12:01:00", "2012-01-01 08:00:00"]
329
+ ),
330
+ name="xxx",
331
+ )
332
+ result = ser.dt.tz_localize("UTC").dt.tz_convert("US/Eastern").dt.round("D")
333
+
334
+ exp_values = pd.to_datetime(
335
+ ["2012-01-01", "2012-01-01", "2012-01-01"]
336
+ ).tz_localize("US/Eastern")
337
+ expected = Series(exp_values, name="xxx")
338
+ tm.assert_series_equal(result, expected)
339
+
340
+ @pytest.mark.parametrize("method", ["ceil", "round", "floor"])
341
+ def test_dt_round_tz_ambiguous(self, method):
342
+ # GH 18946 round near "fall back" DST
343
+ df1 = DataFrame(
344
+ [
345
+ pd.to_datetime("2017-10-29 02:00:00+02:00", utc=True),
346
+ pd.to_datetime("2017-10-29 02:00:00+01:00", utc=True),
347
+ pd.to_datetime("2017-10-29 03:00:00+01:00", utc=True),
348
+ ],
349
+ columns=["date"],
350
+ )
351
+ df1["date"] = df1["date"].dt.tz_convert("Europe/Madrid")
352
+ # infer
353
+ result = getattr(df1.date.dt, method)("h", ambiguous="infer")
354
+ expected = df1["date"]
355
+ tm.assert_series_equal(result, expected)
356
+
357
+ # bool-array
358
+ result = getattr(df1.date.dt, method)("h", ambiguous=[True, False, False])
359
+ tm.assert_series_equal(result, expected)
360
+
361
+ # NaT
362
+ result = getattr(df1.date.dt, method)("h", ambiguous="NaT")
363
+ expected = df1["date"].copy()
364
+ expected.iloc[0:2] = pd.NaT
365
+ tm.assert_series_equal(result, expected)
366
+
367
+ # raise
368
+ with tm.external_error_raised(pytz.AmbiguousTimeError):
369
+ getattr(df1.date.dt, method)("h", ambiguous="raise")
370
+
371
+ @pytest.mark.parametrize(
372
+ "method, ts_str, freq",
373
+ [
374
+ ["ceil", "2018-03-11 01:59:00-0600", "5min"],
375
+ ["round", "2018-03-11 01:59:00-0600", "5min"],
376
+ ["floor", "2018-03-11 03:01:00-0500", "2h"],
377
+ ],
378
+ )
379
+ def test_dt_round_tz_nonexistent(self, method, ts_str, freq):
380
+ # GH 23324 round near "spring forward" DST
381
+ ser = Series([pd.Timestamp(ts_str, tz="America/Chicago")])
382
+ result = getattr(ser.dt, method)(freq, nonexistent="shift_forward")
383
+ expected = Series([pd.Timestamp("2018-03-11 03:00:00", tz="America/Chicago")])
384
+ tm.assert_series_equal(result, expected)
385
+
386
+ result = getattr(ser.dt, method)(freq, nonexistent="NaT")
387
+ expected = Series([pd.NaT]).dt.tz_localize(result.dt.tz)
388
+ tm.assert_series_equal(result, expected)
389
+
390
+ with pytest.raises(pytz.NonExistentTimeError, match="2018-03-11 02:00:00"):
391
+ getattr(ser.dt, method)(freq, nonexistent="raise")
392
+
393
+ @pytest.mark.parametrize("freq", ["ns", "us", "1000us"])
394
+ def test_dt_round_nonnano_higher_resolution_no_op(self, freq):
395
+ # GH 52761
396
+ ser = Series(
397
+ ["2020-05-31 08:00:00", "2000-12-31 04:00:05", "1800-03-14 07:30:20"],
398
+ dtype="datetime64[ms]",
399
+ )
400
+ expected = ser.copy()
401
+ result = ser.dt.round(freq)
402
+ tm.assert_series_equal(result, expected)
403
+
404
+ assert not np.shares_memory(ser.array._ndarray, result.array._ndarray)
405
+
406
+ def test_dt_namespace_accessor_categorical(self):
407
+ # GH 19468
408
+ dti = DatetimeIndex(["20171111", "20181212"]).repeat(2)
409
+ ser = Series(pd.Categorical(dti), name="foo")
410
+ result = ser.dt.year
411
+ expected = Series([2017, 2017, 2018, 2018], dtype="int32", name="foo")
412
+ tm.assert_series_equal(result, expected)
413
+
414
+ def test_dt_tz_localize_categorical(self, tz_aware_fixture):
415
+ # GH 27952
416
+ tz = tz_aware_fixture
417
+ datetimes = Series(
418
+ ["2019-01-01", "2019-01-01", "2019-01-02"], dtype="datetime64[ns]"
419
+ )
420
+ categorical = datetimes.astype("category")
421
+ result = categorical.dt.tz_localize(tz)
422
+ expected = datetimes.dt.tz_localize(tz)
423
+ tm.assert_series_equal(result, expected)
424
+
425
+ def test_dt_tz_convert_categorical(self, tz_aware_fixture):
426
+ # GH 27952
427
+ tz = tz_aware_fixture
428
+ datetimes = Series(
429
+ ["2019-01-01", "2019-01-01", "2019-01-02"], dtype="datetime64[ns, MET]"
430
+ )
431
+ categorical = datetimes.astype("category")
432
+ result = categorical.dt.tz_convert(tz)
433
+ expected = datetimes.dt.tz_convert(tz)
434
+ tm.assert_series_equal(result, expected)
435
+
436
+ @pytest.mark.parametrize("accessor", ["year", "month", "day"])
437
+ def test_dt_other_accessors_categorical(self, accessor):
438
+ # GH 27952
439
+ datetimes = Series(
440
+ ["2018-01-01", "2018-01-01", "2019-01-02"], dtype="datetime64[ns]"
441
+ )
442
+ categorical = datetimes.astype("category")
443
+ result = getattr(categorical.dt, accessor)
444
+ expected = getattr(datetimes.dt, accessor)
445
+ tm.assert_series_equal(result, expected)
446
+
447
+ def test_dt_accessor_no_new_attributes(self):
448
+ # https://github.com/pandas-dev/pandas/issues/10673
449
+ ser = Series(date_range("20130101", periods=5, freq="D"))
450
+ with pytest.raises(AttributeError, match="You cannot add any new attribute"):
451
+ ser.dt.xlabel = "a"
452
+
453
+ # error: Unsupported operand types for + ("List[None]" and "List[str]")
454
+ @pytest.mark.parametrize(
455
+ "time_locale", [None] + tm.get_locales() # type: ignore[operator]
456
+ )
457
+ def test_dt_accessor_datetime_name_accessors(self, time_locale):
458
+ # Test Monday -> Sunday and January -> December, in that sequence
459
+ if time_locale is None:
460
+ # If the time_locale is None, day-name and month_name should
461
+ # return the english attributes
462
+ expected_days = [
463
+ "Monday",
464
+ "Tuesday",
465
+ "Wednesday",
466
+ "Thursday",
467
+ "Friday",
468
+ "Saturday",
469
+ "Sunday",
470
+ ]
471
+ expected_months = [
472
+ "January",
473
+ "February",
474
+ "March",
475
+ "April",
476
+ "May",
477
+ "June",
478
+ "July",
479
+ "August",
480
+ "September",
481
+ "October",
482
+ "November",
483
+ "December",
484
+ ]
485
+ else:
486
+ with tm.set_locale(time_locale, locale.LC_TIME):
487
+ expected_days = calendar.day_name[:]
488
+ expected_months = calendar.month_name[1:]
489
+
490
+ ser = Series(date_range(freq="D", start=datetime(1998, 1, 1), periods=365))
491
+ english_days = [
492
+ "Monday",
493
+ "Tuesday",
494
+ "Wednesday",
495
+ "Thursday",
496
+ "Friday",
497
+ "Saturday",
498
+ "Sunday",
499
+ ]
500
+ for day, name, eng_name in zip(range(4, 11), expected_days, english_days):
501
+ name = name.capitalize()
502
+ assert ser.dt.day_name(locale=time_locale)[day] == name
503
+ assert ser.dt.day_name(locale=None)[day] == eng_name
504
+ ser = pd.concat([ser, Series([pd.NaT])])
505
+ assert np.isnan(ser.dt.day_name(locale=time_locale).iloc[-1])
506
+
507
+ ser = Series(date_range(freq="ME", start="2012", end="2013"))
508
+ result = ser.dt.month_name(locale=time_locale)
509
+ expected = Series([month.capitalize() for month in expected_months])
510
+
511
+ # work around https://github.com/pandas-dev/pandas/issues/22342
512
+ result = result.str.normalize("NFD")
513
+ expected = expected.str.normalize("NFD")
514
+
515
+ tm.assert_series_equal(result, expected)
516
+
517
+ for s_date, expected in zip(ser, expected_months):
518
+ result = s_date.month_name(locale=time_locale)
519
+ expected = expected.capitalize()
520
+
521
+ result = unicodedata.normalize("NFD", result)
522
+ expected = unicodedata.normalize("NFD", expected)
523
+
524
+ assert result == expected
525
+
526
+ ser = pd.concat([ser, Series([pd.NaT])])
527
+ assert np.isnan(ser.dt.month_name(locale=time_locale).iloc[-1])
528
+
529
+ def test_strftime(self):
530
+ # GH 10086
531
+ ser = Series(date_range("20130101", periods=5))
532
+ result = ser.dt.strftime("%Y/%m/%d")
533
+ expected = Series(
534
+ ["2013/01/01", "2013/01/02", "2013/01/03", "2013/01/04", "2013/01/05"]
535
+ )
536
+ tm.assert_series_equal(result, expected)
537
+
538
+ ser = Series(date_range("2015-02-03 11:22:33.4567", periods=5))
539
+ result = ser.dt.strftime("%Y/%m/%d %H-%M-%S")
540
+ expected = Series(
541
+ [
542
+ "2015/02/03 11-22-33",
543
+ "2015/02/04 11-22-33",
544
+ "2015/02/05 11-22-33",
545
+ "2015/02/06 11-22-33",
546
+ "2015/02/07 11-22-33",
547
+ ]
548
+ )
549
+ tm.assert_series_equal(result, expected)
550
+
551
+ ser = Series(period_range("20130101", periods=5))
552
+ result = ser.dt.strftime("%Y/%m/%d")
553
+ expected = Series(
554
+ ["2013/01/01", "2013/01/02", "2013/01/03", "2013/01/04", "2013/01/05"]
555
+ )
556
+ tm.assert_series_equal(result, expected)
557
+
558
+ ser = Series(period_range("2015-02-03 11:22:33.4567", periods=5, freq="s"))
559
+ result = ser.dt.strftime("%Y/%m/%d %H-%M-%S")
560
+ expected = Series(
561
+ [
562
+ "2015/02/03 11-22-33",
563
+ "2015/02/03 11-22-34",
564
+ "2015/02/03 11-22-35",
565
+ "2015/02/03 11-22-36",
566
+ "2015/02/03 11-22-37",
567
+ ]
568
+ )
569
+ tm.assert_series_equal(result, expected)
570
+
571
+ def test_strftime_dt64_days(self):
572
+ ser = Series(date_range("20130101", periods=5))
573
+ ser.iloc[0] = pd.NaT
574
+ result = ser.dt.strftime("%Y/%m/%d")
575
+ expected = Series(
576
+ [np.nan, "2013/01/02", "2013/01/03", "2013/01/04", "2013/01/05"]
577
+ )
578
+ tm.assert_series_equal(result, expected)
579
+
580
+ datetime_index = date_range("20150301", periods=5)
581
+ result = datetime_index.strftime("%Y/%m/%d")
582
+
583
+ expected = Index(
584
+ ["2015/03/01", "2015/03/02", "2015/03/03", "2015/03/04", "2015/03/05"],
585
+ dtype=np.object_,
586
+ )
587
+ # dtype may be S10 or U10 depending on python version
588
+ tm.assert_index_equal(result, expected)
589
+
590
+ def test_strftime_period_days(self, using_infer_string):
591
+ period_index = period_range("20150301", periods=5)
592
+ result = period_index.strftime("%Y/%m/%d")
593
+ expected = Index(
594
+ ["2015/03/01", "2015/03/02", "2015/03/03", "2015/03/04", "2015/03/05"],
595
+ dtype="=U10",
596
+ )
597
+ if using_infer_string:
598
+ expected = expected.astype("string[pyarrow_numpy]")
599
+ tm.assert_index_equal(result, expected)
600
+
601
+ def test_strftime_dt64_microsecond_resolution(self):
602
+ ser = Series([datetime(2013, 1, 1, 2, 32, 59), datetime(2013, 1, 2, 14, 32, 1)])
603
+ result = ser.dt.strftime("%Y-%m-%d %H:%M:%S")
604
+ expected = Series(["2013-01-01 02:32:59", "2013-01-02 14:32:01"])
605
+ tm.assert_series_equal(result, expected)
606
+
607
+ def test_strftime_period_hours(self):
608
+ ser = Series(period_range("20130101", periods=4, freq="h"))
609
+ result = ser.dt.strftime("%Y/%m/%d %H:%M:%S")
610
+ expected = Series(
611
+ [
612
+ "2013/01/01 00:00:00",
613
+ "2013/01/01 01:00:00",
614
+ "2013/01/01 02:00:00",
615
+ "2013/01/01 03:00:00",
616
+ ]
617
+ )
618
+ tm.assert_series_equal(result, expected)
619
+
620
+ def test_strftime_period_minutes(self):
621
+ ser = Series(period_range("20130101", periods=4, freq="ms"))
622
+ result = ser.dt.strftime("%Y/%m/%d %H:%M:%S.%l")
623
+ expected = Series(
624
+ [
625
+ "2013/01/01 00:00:00.000",
626
+ "2013/01/01 00:00:00.001",
627
+ "2013/01/01 00:00:00.002",
628
+ "2013/01/01 00:00:00.003",
629
+ ]
630
+ )
631
+ tm.assert_series_equal(result, expected)
632
+
633
+ @pytest.mark.parametrize(
634
+ "data",
635
+ [
636
+ DatetimeIndex(["2019-01-01", pd.NaT]),
637
+ PeriodIndex(["2019-01-01", pd.NaT], dtype="period[D]"),
638
+ ],
639
+ )
640
+ def test_strftime_nat(self, data):
641
+ # GH 29578
642
+ ser = Series(data)
643
+ result = ser.dt.strftime("%Y-%m-%d")
644
+ expected = Series(["2019-01-01", np.nan])
645
+ tm.assert_series_equal(result, expected)
646
+
647
+ @pytest.mark.parametrize(
648
+ "data", [DatetimeIndex([pd.NaT]), PeriodIndex([pd.NaT], dtype="period[D]")]
649
+ )
650
+ def test_strftime_all_nat(self, data):
651
+ # https://github.com/pandas-dev/pandas/issues/45858
652
+ ser = Series(data)
653
+ with tm.assert_produces_warning(None):
654
+ result = ser.dt.strftime("%Y-%m-%d")
655
+ expected = Series([np.nan], dtype=object)
656
+ tm.assert_series_equal(result, expected)
657
+
658
+ def test_valid_dt_with_missing_values(self):
659
+ # GH 8689
660
+ ser = Series(date_range("20130101", periods=5, freq="D"))
661
+ ser.iloc[2] = pd.NaT
662
+
663
+ for attr in ["microsecond", "nanosecond", "second", "minute", "hour", "day"]:
664
+ expected = getattr(ser.dt, attr).copy()
665
+ expected.iloc[2] = np.nan
666
+ result = getattr(ser.dt, attr)
667
+ tm.assert_series_equal(result, expected)
668
+
669
+ result = ser.dt.date
670
+ expected = Series(
671
+ [
672
+ date(2013, 1, 1),
673
+ date(2013, 1, 2),
674
+ pd.NaT,
675
+ date(2013, 1, 4),
676
+ date(2013, 1, 5),
677
+ ],
678
+ dtype="object",
679
+ )
680
+ tm.assert_series_equal(result, expected)
681
+
682
+ result = ser.dt.time
683
+ expected = Series([time(0), time(0), pd.NaT, time(0), time(0)], dtype="object")
684
+ tm.assert_series_equal(result, expected)
685
+
686
+ def test_dt_accessor_api(self):
687
+ # GH 9322
688
+ from pandas.core.indexes.accessors import (
689
+ CombinedDatetimelikeProperties,
690
+ DatetimeProperties,
691
+ )
692
+
693
+ assert Series.dt is CombinedDatetimelikeProperties
694
+
695
+ ser = Series(date_range("2000-01-01", periods=3))
696
+ assert isinstance(ser.dt, DatetimeProperties)
697
+
698
+ @pytest.mark.parametrize(
699
+ "ser",
700
+ [
701
+ Series(np.arange(5)),
702
+ Series(list("abcde")),
703
+ Series(np.random.default_rng(2).standard_normal(5)),
704
+ ],
705
+ )
706
+ def test_dt_accessor_invalid(self, ser):
707
+ # GH#9322 check that series with incorrect dtypes don't have attr
708
+ with pytest.raises(AttributeError, match="only use .dt accessor"):
709
+ ser.dt
710
+ assert not hasattr(ser, "dt")
711
+
712
+ def test_dt_accessor_updates_on_inplace(self):
713
+ ser = Series(date_range("2018-01-01", periods=10))
714
+ ser[2] = None
715
+ return_value = ser.fillna(pd.Timestamp("2018-01-01"), inplace=True)
716
+ assert return_value is None
717
+ result = ser.dt.date
718
+ assert result[0] == result[2]
719
+
720
+ def test_date_tz(self):
721
+ # GH11757
722
+ rng = DatetimeIndex(
723
+ ["2014-04-04 23:56", "2014-07-18 21:24", "2015-11-22 22:14"],
724
+ tz="US/Eastern",
725
+ )
726
+ ser = Series(rng)
727
+ expected = Series([date(2014, 4, 4), date(2014, 7, 18), date(2015, 11, 22)])
728
+ tm.assert_series_equal(ser.dt.date, expected)
729
+ tm.assert_series_equal(ser.apply(lambda x: x.date()), expected)
730
+
731
+ def test_dt_timetz_accessor(self, tz_naive_fixture):
732
+ # GH21358
733
+ tz = maybe_get_tz(tz_naive_fixture)
734
+
735
+ dtindex = DatetimeIndex(
736
+ ["2014-04-04 23:56", "2014-07-18 21:24", "2015-11-22 22:14"], tz=tz
737
+ )
738
+ ser = Series(dtindex)
739
+ expected = Series(
740
+ [time(23, 56, tzinfo=tz), time(21, 24, tzinfo=tz), time(22, 14, tzinfo=tz)]
741
+ )
742
+ result = ser.dt.timetz
743
+ tm.assert_series_equal(result, expected)
744
+
745
+ @pytest.mark.parametrize(
746
+ "input_series, expected_output",
747
+ [
748
+ [["2020-01-01"], [[2020, 1, 3]]],
749
+ [[pd.NaT], [[np.nan, np.nan, np.nan]]],
750
+ [["2019-12-31", "2019-12-29"], [[2020, 1, 2], [2019, 52, 7]]],
751
+ [["2010-01-01", pd.NaT], [[2009, 53, 5], [np.nan, np.nan, np.nan]]],
752
+ # see GH#36032
753
+ [["2016-01-08", "2016-01-04"], [[2016, 1, 5], [2016, 1, 1]]],
754
+ [["2016-01-07", "2016-01-01"], [[2016, 1, 4], [2015, 53, 5]]],
755
+ ],
756
+ )
757
+ def test_isocalendar(self, input_series, expected_output):
758
+ result = pd.to_datetime(Series(input_series)).dt.isocalendar()
759
+ expected_frame = DataFrame(
760
+ expected_output, columns=["year", "week", "day"], dtype="UInt32"
761
+ )
762
+ tm.assert_frame_equal(result, expected_frame)
763
+
764
+ def test_hour_index(self):
765
+ dt_series = Series(
766
+ date_range(start="2021-01-01", periods=5, freq="h"),
767
+ index=[2, 6, 7, 8, 11],
768
+ dtype="category",
769
+ )
770
+ result = dt_series.dt.hour
771
+ expected = Series(
772
+ [0, 1, 2, 3, 4],
773
+ dtype="int32",
774
+ index=[2, 6, 7, 8, 11],
775
+ )
776
+ tm.assert_series_equal(result, expected)
777
+
778
+
779
+ class TestSeriesPeriodValuesDtAccessor:
780
+ @pytest.mark.parametrize(
781
+ "input_vals",
782
+ [
783
+ [Period("2016-01", freq="M"), Period("2016-02", freq="M")],
784
+ [Period("2016-01-01", freq="D"), Period("2016-01-02", freq="D")],
785
+ [
786
+ Period("2016-01-01 00:00:00", freq="h"),
787
+ Period("2016-01-01 01:00:00", freq="h"),
788
+ ],
789
+ [
790
+ Period("2016-01-01 00:00:00", freq="M"),
791
+ Period("2016-01-01 00:01:00", freq="M"),
792
+ ],
793
+ [
794
+ Period("2016-01-01 00:00:00", freq="s"),
795
+ Period("2016-01-01 00:00:01", freq="s"),
796
+ ],
797
+ ],
798
+ )
799
+ def test_end_time_timevalues(self, input_vals):
800
+ # GH#17157
801
+ # Check that the time part of the Period is adjusted by end_time
802
+ # when using the dt accessor on a Series
803
+ input_vals = PeriodArray._from_sequence(np.asarray(input_vals))
804
+
805
+ ser = Series(input_vals)
806
+ result = ser.dt.end_time
807
+ expected = ser.apply(lambda x: x.end_time)
808
+ tm.assert_series_equal(result, expected)
809
+
810
+ @pytest.mark.parametrize("input_vals", [("2001"), ("NaT")])
811
+ def test_to_period(self, input_vals):
812
+ # GH#21205
813
+ expected = Series([input_vals], dtype="Period[D]")
814
+ result = Series([input_vals], dtype="datetime64[ns]").dt.to_period("D")
815
+ tm.assert_series_equal(result, expected)
816
+
817
+
818
+ def test_normalize_pre_epoch_dates():
819
+ # GH: 36294
820
+ ser = pd.to_datetime(Series(["1969-01-01 09:00:00", "2016-01-01 09:00:00"]))
821
+ result = ser.dt.normalize()
822
+ expected = pd.to_datetime(Series(["1969-01-01", "2016-01-01"]))
823
+ tm.assert_series_equal(result, expected)
824
+
825
+
826
+ def test_day_attribute_non_nano_beyond_int32():
827
+ # GH 52386
828
+ data = np.array(
829
+ [
830
+ 136457654736252,
831
+ 134736784364431,
832
+ 245345345545332,
833
+ 223432411,
834
+ 2343241,
835
+ 3634548734,
836
+ 23234,
837
+ ],
838
+ dtype="timedelta64[s]",
839
+ )
840
+ ser = Series(data)
841
+ result = ser.dt.days
842
+ expected = Series([1579371003, 1559453522, 2839645203, 2586, 27, 42066, 0])
843
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_list_accessor.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ import pytest
4
+
5
+ from pandas import (
6
+ ArrowDtype,
7
+ Series,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+ pa = pytest.importorskip("pyarrow")
12
+
13
+ from pandas.compat import pa_version_under11p0
14
+
15
+
16
+ @pytest.mark.parametrize(
17
+ "list_dtype",
18
+ (
19
+ pa.list_(pa.int64()),
20
+ pa.list_(pa.int64(), list_size=3),
21
+ pa.large_list(pa.int64()),
22
+ ),
23
+ )
24
+ def test_list_getitem(list_dtype):
25
+ ser = Series(
26
+ [[1, 2, 3], [4, None, 5], None],
27
+ dtype=ArrowDtype(list_dtype),
28
+ )
29
+ actual = ser.list[1]
30
+ expected = Series([2, None, None], dtype="int64[pyarrow]")
31
+ tm.assert_series_equal(actual, expected)
32
+
33
+
34
+ def test_list_getitem_slice():
35
+ ser = Series(
36
+ [[1, 2, 3], [4, None, 5], None],
37
+ dtype=ArrowDtype(pa.list_(pa.int64())),
38
+ )
39
+ if pa_version_under11p0:
40
+ with pytest.raises(
41
+ NotImplementedError, match="List slice not supported by pyarrow "
42
+ ):
43
+ ser.list[1:None:None]
44
+ else:
45
+ actual = ser.list[1:None:None]
46
+ expected = Series(
47
+ [[2, 3], [None, 5], None], dtype=ArrowDtype(pa.list_(pa.int64()))
48
+ )
49
+ tm.assert_series_equal(actual, expected)
50
+
51
+
52
+ def test_list_len():
53
+ ser = Series(
54
+ [[1, 2, 3], [4, None], None],
55
+ dtype=ArrowDtype(pa.list_(pa.int64())),
56
+ )
57
+ actual = ser.list.len()
58
+ expected = Series([3, 2, None], dtype=ArrowDtype(pa.int32()))
59
+ tm.assert_series_equal(actual, expected)
60
+
61
+
62
+ def test_list_flatten():
63
+ ser = Series(
64
+ [[1, 2, 3], [4, None], None],
65
+ dtype=ArrowDtype(pa.list_(pa.int64())),
66
+ )
67
+ actual = ser.list.flatten()
68
+ expected = Series([1, 2, 3, 4, None], dtype=ArrowDtype(pa.int64()))
69
+ tm.assert_series_equal(actual, expected)
70
+
71
+
72
+ def test_list_getitem_slice_invalid():
73
+ ser = Series(
74
+ [[1, 2, 3], [4, None, 5], None],
75
+ dtype=ArrowDtype(pa.list_(pa.int64())),
76
+ )
77
+ if pa_version_under11p0:
78
+ with pytest.raises(
79
+ NotImplementedError, match="List slice not supported by pyarrow "
80
+ ):
81
+ ser.list[1:None:0]
82
+ else:
83
+ with pytest.raises(pa.lib.ArrowInvalid, match=re.escape("`step` must be >= 1")):
84
+ ser.list[1:None:0]
85
+
86
+
87
+ def test_list_accessor_non_list_dtype():
88
+ ser = Series(
89
+ [1, 2, 4],
90
+ dtype=ArrowDtype(pa.int64()),
91
+ )
92
+ with pytest.raises(
93
+ AttributeError,
94
+ match=re.escape(
95
+ "Can only use the '.list' accessor with 'list[pyarrow]' dtype, "
96
+ "not int64[pyarrow]."
97
+ ),
98
+ ):
99
+ ser.list[1:None:0]
100
+
101
+
102
+ @pytest.mark.parametrize(
103
+ "list_dtype",
104
+ (
105
+ pa.list_(pa.int64()),
106
+ pa.list_(pa.int64(), list_size=3),
107
+ pa.large_list(pa.int64()),
108
+ ),
109
+ )
110
+ def test_list_getitem_invalid_index(list_dtype):
111
+ ser = Series(
112
+ [[1, 2, 3], [4, None, 5], None],
113
+ dtype=ArrowDtype(list_dtype),
114
+ )
115
+ with pytest.raises(pa.lib.ArrowInvalid, match="Index -1 is out of bounds"):
116
+ ser.list[-1]
117
+ with pytest.raises(pa.lib.ArrowInvalid, match="Index 5 is out of bounds"):
118
+ ser.list[5]
119
+ with pytest.raises(ValueError, match="key must be an int or slice, got str"):
120
+ ser.list["abc"]
121
+
122
+
123
+ def test_list_accessor_not_iterable():
124
+ ser = Series(
125
+ [[1, 2, 3], [4, None], None],
126
+ dtype=ArrowDtype(pa.list_(pa.int64())),
127
+ )
128
+ with pytest.raises(TypeError, match="'ListAccessor' object is not iterable"):
129
+ iter(ser.list)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_sparse_accessor.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import Series
2
+
3
+
4
+ class TestSparseAccessor:
5
+ def test_sparse_accessor_updates_on_inplace(self):
6
+ ser = Series([1, 1, 2, 3], dtype="Sparse[int]")
7
+ return_value = ser.drop([0, 1], inplace=True)
8
+ assert return_value is None
9
+ assert ser.sparse.density == 1.0
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_str_accessor.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import Series
4
+ import pandas._testing as tm
5
+
6
+
7
+ class TestStrAccessor:
8
+ def test_str_attribute(self):
9
+ # GH#9068
10
+ methods = ["strip", "rstrip", "lstrip"]
11
+ ser = Series([" jack", "jill ", " jesse ", "frank"])
12
+ for method in methods:
13
+ expected = Series([getattr(str, method)(x) for x in ser.values])
14
+ tm.assert_series_equal(getattr(Series.str, method)(ser.str), expected)
15
+
16
+ # str accessor only valid with string values
17
+ ser = Series(range(5))
18
+ with pytest.raises(AttributeError, match="only use .str accessor"):
19
+ ser.str.repeat(2)
20
+
21
+ def test_str_accessor_updates_on_inplace(self):
22
+ ser = Series(list("abc"))
23
+ return_value = ser.drop([0], inplace=True)
24
+ assert return_value is None
25
+ assert len(ser.str.lower()) == 2
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/accessors/test_struct_accessor.py ADDED
@@ -0,0 +1,196 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ import pytest
4
+
5
+ from pandas.compat.pyarrow import (
6
+ pa_version_under11p0,
7
+ pa_version_under13p0,
8
+ )
9
+
10
+ from pandas import (
11
+ ArrowDtype,
12
+ DataFrame,
13
+ Index,
14
+ Series,
15
+ )
16
+ import pandas._testing as tm
17
+
18
+ pa = pytest.importorskip("pyarrow")
19
+ pc = pytest.importorskip("pyarrow.compute")
20
+
21
+
22
+ def test_struct_accessor_dtypes():
23
+ ser = Series(
24
+ [],
25
+ dtype=ArrowDtype(
26
+ pa.struct(
27
+ [
28
+ ("int_col", pa.int64()),
29
+ ("string_col", pa.string()),
30
+ (
31
+ "struct_col",
32
+ pa.struct(
33
+ [
34
+ ("int_col", pa.int64()),
35
+ ("float_col", pa.float64()),
36
+ ]
37
+ ),
38
+ ),
39
+ ]
40
+ )
41
+ ),
42
+ )
43
+ actual = ser.struct.dtypes
44
+ expected = Series(
45
+ [
46
+ ArrowDtype(pa.int64()),
47
+ ArrowDtype(pa.string()),
48
+ ArrowDtype(
49
+ pa.struct(
50
+ [
51
+ ("int_col", pa.int64()),
52
+ ("float_col", pa.float64()),
53
+ ]
54
+ )
55
+ ),
56
+ ],
57
+ index=Index(["int_col", "string_col", "struct_col"]),
58
+ )
59
+ tm.assert_series_equal(actual, expected)
60
+
61
+
62
+ @pytest.mark.skipif(pa_version_under13p0, reason="pyarrow>=13.0.0 required")
63
+ def test_struct_accessor_field():
64
+ index = Index([-100, 42, 123])
65
+ ser = Series(
66
+ [
67
+ {"rice": 1.0, "maize": -1, "wheat": "a"},
68
+ {"rice": 2.0, "maize": 0, "wheat": "b"},
69
+ {"rice": 3.0, "maize": 1, "wheat": "c"},
70
+ ],
71
+ dtype=ArrowDtype(
72
+ pa.struct(
73
+ [
74
+ ("rice", pa.float64()),
75
+ ("maize", pa.int64()),
76
+ ("wheat", pa.string()),
77
+ ]
78
+ )
79
+ ),
80
+ index=index,
81
+ )
82
+ by_name = ser.struct.field("maize")
83
+ by_name_expected = Series(
84
+ [-1, 0, 1],
85
+ dtype=ArrowDtype(pa.int64()),
86
+ index=index,
87
+ name="maize",
88
+ )
89
+ tm.assert_series_equal(by_name, by_name_expected)
90
+
91
+ by_index = ser.struct.field(2)
92
+ by_index_expected = Series(
93
+ ["a", "b", "c"],
94
+ dtype=ArrowDtype(pa.string()),
95
+ index=index,
96
+ name="wheat",
97
+ )
98
+ tm.assert_series_equal(by_index, by_index_expected)
99
+
100
+
101
+ def test_struct_accessor_field_with_invalid_name_or_index():
102
+ ser = Series([], dtype=ArrowDtype(pa.struct([("field", pa.int64())])))
103
+
104
+ with pytest.raises(ValueError, match="name_or_index must be an int, str,"):
105
+ ser.struct.field(1.1)
106
+
107
+
108
+ @pytest.mark.skipif(pa_version_under11p0, reason="pyarrow>=11.0.0 required")
109
+ def test_struct_accessor_explode():
110
+ index = Index([-100, 42, 123])
111
+ ser = Series(
112
+ [
113
+ {"painted": 1, "snapping": {"sea": "green"}},
114
+ {"painted": 2, "snapping": {"sea": "leatherback"}},
115
+ {"painted": 3, "snapping": {"sea": "hawksbill"}},
116
+ ],
117
+ dtype=ArrowDtype(
118
+ pa.struct(
119
+ [
120
+ ("painted", pa.int64()),
121
+ ("snapping", pa.struct([("sea", pa.string())])),
122
+ ]
123
+ )
124
+ ),
125
+ index=index,
126
+ )
127
+ actual = ser.struct.explode()
128
+ expected = DataFrame(
129
+ {
130
+ "painted": Series([1, 2, 3], index=index, dtype=ArrowDtype(pa.int64())),
131
+ "snapping": Series(
132
+ [{"sea": "green"}, {"sea": "leatherback"}, {"sea": "hawksbill"}],
133
+ index=index,
134
+ dtype=ArrowDtype(pa.struct([("sea", pa.string())])),
135
+ ),
136
+ },
137
+ )
138
+ tm.assert_frame_equal(actual, expected)
139
+
140
+
141
+ @pytest.mark.parametrize(
142
+ "invalid",
143
+ [
144
+ pytest.param(Series([1, 2, 3], dtype="int64"), id="int64"),
145
+ pytest.param(
146
+ Series(["a", "b", "c"], dtype="string[pyarrow]"), id="string-pyarrow"
147
+ ),
148
+ ],
149
+ )
150
+ def test_struct_accessor_api_for_invalid(invalid):
151
+ with pytest.raises(
152
+ AttributeError,
153
+ match=re.escape(
154
+ "Can only use the '.struct' accessor with 'struct[pyarrow]' dtype, "
155
+ f"not {invalid.dtype}."
156
+ ),
157
+ ):
158
+ invalid.struct
159
+
160
+
161
+ @pytest.mark.parametrize(
162
+ ["indices", "name"],
163
+ [
164
+ (0, "int_col"),
165
+ ([1, 2], "str_col"),
166
+ (pc.field("int_col"), "int_col"),
167
+ ("int_col", "int_col"),
168
+ (b"string_col", b"string_col"),
169
+ ([b"string_col"], "string_col"),
170
+ ],
171
+ )
172
+ @pytest.mark.skipif(pa_version_under13p0, reason="pyarrow>=13.0.0 required")
173
+ def test_struct_accessor_field_expanded(indices, name):
174
+ arrow_type = pa.struct(
175
+ [
176
+ ("int_col", pa.int64()),
177
+ (
178
+ "struct_col",
179
+ pa.struct(
180
+ [
181
+ ("int_col", pa.int64()),
182
+ ("float_col", pa.float64()),
183
+ ("str_col", pa.string()),
184
+ ]
185
+ ),
186
+ ),
187
+ (b"string_col", pa.string()),
188
+ ]
189
+ )
190
+
191
+ data = pa.array([], type=arrow_type)
192
+ ser = Series(data, dtype=ArrowDtype(arrow_type))
193
+ expected = pc.struct_field(data, indices)
194
+ result = ser.struct.field(indices)
195
+ tm.assert_equal(result.array._pa_array.combine_chunks(), expected)
196
+ assert result.name == name
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/__init__.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ """
2
+ Test files dedicated to individual (stand-alone) Series methods
3
+
4
+ Ideally these files/tests should correspond 1-to-1 with tests.frame.methods
5
+
6
+ These may also present opportunities for sharing/de-duplicating test code.
7
+ """
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_argsort.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ isna,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestSeriesArgsort:
13
+ def test_argsort_axis(self):
14
+ # GH#54257
15
+ ser = Series(range(3))
16
+
17
+ msg = "No axis named 2 for object type Series"
18
+ with pytest.raises(ValueError, match=msg):
19
+ ser.argsort(axis=2)
20
+
21
+ def test_argsort_numpy(self, datetime_series):
22
+ ser = datetime_series
23
+
24
+ res = np.argsort(ser).values
25
+ expected = np.argsort(np.array(ser))
26
+ tm.assert_numpy_array_equal(res, expected)
27
+
28
+ # with missing values
29
+ ts = ser.copy()
30
+ ts[::2] = np.nan
31
+
32
+ msg = "The behavior of Series.argsort in the presence of NA values"
33
+ with tm.assert_produces_warning(
34
+ FutureWarning, match=msg, check_stacklevel=False
35
+ ):
36
+ result = np.argsort(ts)[1::2]
37
+ expected = np.argsort(np.array(ts.dropna()))
38
+
39
+ tm.assert_numpy_array_equal(result.values, expected)
40
+
41
+ def test_argsort(self, datetime_series):
42
+ argsorted = datetime_series.argsort()
43
+ assert issubclass(argsorted.dtype.type, np.integer)
44
+
45
+ def test_argsort_dt64(self, unit):
46
+ # GH#2967 (introduced bug in 0.11-dev I think)
47
+ ser = Series(
48
+ [Timestamp(f"201301{i:02d}") for i in range(1, 6)], dtype=f"M8[{unit}]"
49
+ )
50
+ assert ser.dtype == f"datetime64[{unit}]"
51
+ shifted = ser.shift(-1)
52
+ assert shifted.dtype == f"datetime64[{unit}]"
53
+ assert isna(shifted[4])
54
+
55
+ result = ser.argsort()
56
+ expected = Series(range(5), dtype=np.intp)
57
+ tm.assert_series_equal(result, expected)
58
+
59
+ msg = "The behavior of Series.argsort in the presence of NA values"
60
+ with tm.assert_produces_warning(FutureWarning, match=msg):
61
+ result = shifted.argsort()
62
+ expected = Series(list(range(4)) + [-1], dtype=np.intp)
63
+ tm.assert_series_equal(result, expected)
64
+
65
+ def test_argsort_stable(self):
66
+ ser = Series(np.random.default_rng(2).integers(0, 100, size=10000))
67
+ mindexer = ser.argsort(kind="mergesort")
68
+ qindexer = ser.argsort()
69
+
70
+ mexpected = np.argsort(ser.values, kind="mergesort")
71
+ qexpected = np.argsort(ser.values, kind="quicksort")
72
+
73
+ tm.assert_series_equal(mindexer.astype(np.intp), Series(mexpected))
74
+ tm.assert_series_equal(qindexer.astype(np.intp), Series(qexpected))
75
+ msg = (
76
+ r"ndarray Expected type <class 'numpy\.ndarray'>, "
77
+ r"found <class 'pandas\.core\.series\.Series'> instead"
78
+ )
79
+ with pytest.raises(AssertionError, match=msg):
80
+ tm.assert_numpy_array_equal(qindexer, mindexer)
81
+
82
+ def test_argsort_preserve_name(self, datetime_series):
83
+ result = datetime_series.argsort()
84
+ assert result.name == datetime_series.name
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_asof.py ADDED
@@ -0,0 +1,205 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas._libs.tslibs import IncompatibleFrequency
5
+
6
+ from pandas import (
7
+ DatetimeIndex,
8
+ PeriodIndex,
9
+ Series,
10
+ Timestamp,
11
+ date_range,
12
+ isna,
13
+ notna,
14
+ offsets,
15
+ period_range,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ class TestSeriesAsof:
21
+ def test_asof_nanosecond_index_access(self):
22
+ ts = Timestamp("20130101").as_unit("ns")._value
23
+ dti = DatetimeIndex([ts + 50 + i for i in range(100)])
24
+ ser = Series(np.random.default_rng(2).standard_normal(100), index=dti)
25
+
26
+ first_value = ser.asof(ser.index[0])
27
+
28
+ # GH#46903 previously incorrectly was "day"
29
+ assert dti.resolution == "nanosecond"
30
+
31
+ # this used to not work bc parsing was done by dateutil that didn't
32
+ # handle nanoseconds
33
+ assert first_value == ser["2013-01-01 00:00:00.000000050"]
34
+
35
+ expected_ts = np.datetime64("2013-01-01 00:00:00.000000050", "ns")
36
+ assert first_value == ser[Timestamp(expected_ts)]
37
+
38
+ def test_basic(self):
39
+ # array or list or dates
40
+ N = 50
41
+ rng = date_range("1/1/1990", periods=N, freq="53s")
42
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
43
+ ts.iloc[15:30] = np.nan
44
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
45
+
46
+ result = ts.asof(dates)
47
+ assert notna(result).all()
48
+ lb = ts.index[14]
49
+ ub = ts.index[30]
50
+
51
+ result = ts.asof(list(dates))
52
+ assert notna(result).all()
53
+ lb = ts.index[14]
54
+ ub = ts.index[30]
55
+
56
+ mask = (result.index >= lb) & (result.index < ub)
57
+ rs = result[mask]
58
+ assert (rs == ts[lb]).all()
59
+
60
+ val = result[result.index[result.index >= ub][0]]
61
+ assert ts[ub] == val
62
+
63
+ def test_scalar(self):
64
+ N = 30
65
+ rng = date_range("1/1/1990", periods=N, freq="53s")
66
+ # Explicit cast to float avoid implicit cast when setting nan
67
+ ts = Series(np.arange(N), index=rng, dtype="float")
68
+ ts.iloc[5:10] = np.nan
69
+ ts.iloc[15:20] = np.nan
70
+
71
+ val1 = ts.asof(ts.index[7])
72
+ val2 = ts.asof(ts.index[19])
73
+
74
+ assert val1 == ts.iloc[4]
75
+ assert val2 == ts.iloc[14]
76
+
77
+ # accepts strings
78
+ val1 = ts.asof(str(ts.index[7]))
79
+ assert val1 == ts.iloc[4]
80
+
81
+ # in there
82
+ result = ts.asof(ts.index[3])
83
+ assert result == ts.iloc[3]
84
+
85
+ # no as of value
86
+ d = ts.index[0] - offsets.BDay()
87
+ assert np.isnan(ts.asof(d))
88
+
89
+ def test_with_nan(self):
90
+ # basic asof test
91
+ rng = date_range("1/1/2000", "1/2/2000", freq="4h")
92
+ s = Series(np.arange(len(rng)), index=rng)
93
+ r = s.resample("2h").mean()
94
+
95
+ result = r.asof(r.index)
96
+ expected = Series(
97
+ [0, 0, 1, 1, 2, 2, 3, 3, 4, 4, 5, 5, 6.0],
98
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
99
+ )
100
+ tm.assert_series_equal(result, expected)
101
+
102
+ r.iloc[3:5] = np.nan
103
+ result = r.asof(r.index)
104
+ expected = Series(
105
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 5, 5, 6.0],
106
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
107
+ )
108
+ tm.assert_series_equal(result, expected)
109
+
110
+ r.iloc[-3:] = np.nan
111
+ result = r.asof(r.index)
112
+ expected = Series(
113
+ [0, 0, 1, 1, 1, 1, 3, 3, 4, 4, 4, 4, 4.0],
114
+ index=date_range("1/1/2000", "1/2/2000", freq="2h"),
115
+ )
116
+ tm.assert_series_equal(result, expected)
117
+
118
+ def test_periodindex(self):
119
+ # array or list or dates
120
+ N = 50
121
+ rng = period_range("1/1/1990", periods=N, freq="h")
122
+ ts = Series(np.random.default_rng(2).standard_normal(N), index=rng)
123
+ ts.iloc[15:30] = np.nan
124
+ dates = date_range("1/1/1990", periods=N * 3, freq="37min")
125
+
126
+ result = ts.asof(dates)
127
+ assert notna(result).all()
128
+ lb = ts.index[14]
129
+ ub = ts.index[30]
130
+
131
+ result = ts.asof(list(dates))
132
+ assert notna(result).all()
133
+ lb = ts.index[14]
134
+ ub = ts.index[30]
135
+
136
+ pix = PeriodIndex(result.index.values, freq="h")
137
+ mask = (pix >= lb) & (pix < ub)
138
+ rs = result[mask]
139
+ assert (rs == ts[lb]).all()
140
+
141
+ ts.iloc[5:10] = np.nan
142
+ ts.iloc[15:20] = np.nan
143
+
144
+ val1 = ts.asof(ts.index[7])
145
+ val2 = ts.asof(ts.index[19])
146
+
147
+ assert val1 == ts.iloc[4]
148
+ assert val2 == ts.iloc[14]
149
+
150
+ # accepts strings
151
+ val1 = ts.asof(str(ts.index[7]))
152
+ assert val1 == ts.iloc[4]
153
+
154
+ # in there
155
+ assert ts.asof(ts.index[3]) == ts.iloc[3]
156
+
157
+ # no as of value
158
+ d = ts.index[0].to_timestamp() - offsets.BDay()
159
+ assert isna(ts.asof(d))
160
+
161
+ # Mismatched freq
162
+ msg = "Input has different freq"
163
+ with pytest.raises(IncompatibleFrequency, match=msg):
164
+ ts.asof(rng.asfreq("D"))
165
+
166
+ def test_errors(self):
167
+ s = Series(
168
+ [1, 2, 3],
169
+ index=[Timestamp("20130101"), Timestamp("20130103"), Timestamp("20130102")],
170
+ )
171
+
172
+ # non-monotonic
173
+ assert not s.index.is_monotonic_increasing
174
+ with pytest.raises(ValueError, match="requires a sorted index"):
175
+ s.asof(s.index[0])
176
+
177
+ # subset with Series
178
+ N = 10
179
+ rng = date_range("1/1/1990", periods=N, freq="53s")
180
+ s = Series(np.random.default_rng(2).standard_normal(N), index=rng)
181
+ with pytest.raises(ValueError, match="not valid for Series"):
182
+ s.asof(s.index[0], subset="foo")
183
+
184
+ def test_all_nans(self):
185
+ # GH 15713
186
+ # series is all nans
187
+
188
+ # testing non-default indexes
189
+ N = 50
190
+ rng = date_range("1/1/1990", periods=N, freq="53s")
191
+
192
+ dates = date_range("1/1/1990", periods=N * 3, freq="25s")
193
+ result = Series(np.nan, index=rng).asof(dates)
194
+ expected = Series(np.nan, index=dates)
195
+ tm.assert_series_equal(result, expected)
196
+
197
+ # testing scalar input
198
+ date = date_range("1/1/1990", periods=N * 3, freq="25s")[0]
199
+ result = Series(np.nan, index=rng).asof(date)
200
+ assert isna(result)
201
+
202
+ # test name is propagated
203
+ result = Series(np.nan, index=[1, 2, 3, 4], name="test").asof([4, 5])
204
+ expected = Series(np.nan, index=[4, 5], name="test")
205
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_astype.py ADDED
@@ -0,0 +1,683 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ )
5
+ from importlib import reload
6
+ import string
7
+ import sys
8
+
9
+ import numpy as np
10
+ import pytest
11
+
12
+ from pandas._libs.tslibs import iNaT
13
+ import pandas.util._test_decorators as td
14
+
15
+ from pandas import (
16
+ NA,
17
+ Categorical,
18
+ CategoricalDtype,
19
+ DatetimeTZDtype,
20
+ Index,
21
+ Interval,
22
+ NaT,
23
+ Series,
24
+ Timedelta,
25
+ Timestamp,
26
+ cut,
27
+ date_range,
28
+ to_datetime,
29
+ )
30
+ import pandas._testing as tm
31
+
32
+
33
+ def rand_str(nchars: int) -> str:
34
+ """
35
+ Generate one random byte string.
36
+ """
37
+ RANDS_CHARS = np.array(
38
+ list(string.ascii_letters + string.digits), dtype=(np.str_, 1)
39
+ )
40
+ return "".join(np.random.default_rng(2).choice(RANDS_CHARS, nchars))
41
+
42
+
43
+ class TestAstypeAPI:
44
+ def test_astype_unitless_dt64_raises(self):
45
+ # GH#47844
46
+ ser = Series(["1970-01-01", "1970-01-01", "1970-01-01"], dtype="datetime64[ns]")
47
+ df = ser.to_frame()
48
+
49
+ msg = "Casting to unit-less dtype 'datetime64' is not supported"
50
+ with pytest.raises(TypeError, match=msg):
51
+ ser.astype(np.datetime64)
52
+ with pytest.raises(TypeError, match=msg):
53
+ df.astype(np.datetime64)
54
+ with pytest.raises(TypeError, match=msg):
55
+ ser.astype("datetime64")
56
+ with pytest.raises(TypeError, match=msg):
57
+ df.astype("datetime64")
58
+
59
+ def test_arg_for_errors_in_astype(self):
60
+ # see GH#14878
61
+ ser = Series([1, 2, 3])
62
+
63
+ msg = (
64
+ r"Expected value of kwarg 'errors' to be one of \['raise', "
65
+ r"'ignore'\]\. Supplied value is 'False'"
66
+ )
67
+ with pytest.raises(ValueError, match=msg):
68
+ ser.astype(np.float64, errors=False)
69
+
70
+ ser.astype(np.int8, errors="raise")
71
+
72
+ @pytest.mark.parametrize("dtype_class", [dict, Series])
73
+ def test_astype_dict_like(self, dtype_class):
74
+ # see GH#7271
75
+ ser = Series(range(0, 10, 2), name="abc")
76
+
77
+ dt1 = dtype_class({"abc": str})
78
+ result = ser.astype(dt1)
79
+ expected = Series(["0", "2", "4", "6", "8"], name="abc", dtype=object)
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ dt2 = dtype_class({"abc": "float64"})
83
+ result = ser.astype(dt2)
84
+ expected = Series([0.0, 2.0, 4.0, 6.0, 8.0], dtype="float64", name="abc")
85
+ tm.assert_series_equal(result, expected)
86
+
87
+ dt3 = dtype_class({"abc": str, "def": str})
88
+ msg = (
89
+ "Only the Series name can be used for the key in Series dtype "
90
+ r"mappings\."
91
+ )
92
+ with pytest.raises(KeyError, match=msg):
93
+ ser.astype(dt3)
94
+
95
+ dt4 = dtype_class({0: str})
96
+ with pytest.raises(KeyError, match=msg):
97
+ ser.astype(dt4)
98
+
99
+ # GH#16717
100
+ # if dtypes provided is empty, it should error
101
+ if dtype_class is Series:
102
+ dt5 = dtype_class({}, dtype=object)
103
+ else:
104
+ dt5 = dtype_class({})
105
+
106
+ with pytest.raises(KeyError, match=msg):
107
+ ser.astype(dt5)
108
+
109
+
110
+ class TestAstype:
111
+ @pytest.mark.parametrize("tz", [None, "UTC", "US/Pacific"])
112
+ def test_astype_object_to_dt64_non_nano(self, tz):
113
+ # GH#55756, GH#54620
114
+ ts = Timestamp("2999-01-01")
115
+ dtype = "M8[us]"
116
+ if tz is not None:
117
+ dtype = f"M8[us, {tz}]"
118
+ vals = [ts, "2999-01-02 03:04:05.678910", 2500]
119
+ ser = Series(vals, dtype=object)
120
+ result = ser.astype(dtype)
121
+
122
+ # The 2500 is interpreted as microseconds, consistent with what
123
+ # we would get if we created DatetimeIndexes from vals[:2] and vals[2:]
124
+ # and concated the results.
125
+ pointwise = [
126
+ vals[0].tz_localize(tz),
127
+ Timestamp(vals[1], tz=tz),
128
+ to_datetime(vals[2], unit="us", utc=True).tz_convert(tz),
129
+ ]
130
+ exp_vals = [x.as_unit("us").asm8 for x in pointwise]
131
+ exp_arr = np.array(exp_vals, dtype="M8[us]")
132
+ expected = Series(exp_arr, dtype="M8[us]")
133
+ if tz is not None:
134
+ expected = expected.dt.tz_localize("UTC").dt.tz_convert(tz)
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_astype_mixed_object_to_dt64tz(self):
138
+ # pre-2.0 this raised ValueError bc of tz mismatch
139
+ # xref GH#32581
140
+ ts = Timestamp("2016-01-04 05:06:07", tz="US/Pacific")
141
+ ts2 = ts.tz_convert("Asia/Tokyo")
142
+
143
+ ser = Series([ts, ts2], dtype=object)
144
+ res = ser.astype("datetime64[ns, Europe/Brussels]")
145
+ expected = Series(
146
+ [ts.tz_convert("Europe/Brussels"), ts2.tz_convert("Europe/Brussels")],
147
+ dtype="datetime64[ns, Europe/Brussels]",
148
+ )
149
+ tm.assert_series_equal(res, expected)
150
+
151
+ @pytest.mark.parametrize("dtype", np.typecodes["All"])
152
+ def test_astype_empty_constructor_equality(self, dtype):
153
+ # see GH#15524
154
+
155
+ if dtype not in (
156
+ "S",
157
+ "V", # poor support (if any) currently
158
+ "M",
159
+ "m", # Generic timestamps raise a ValueError. Already tested.
160
+ ):
161
+ init_empty = Series([], dtype=dtype)
162
+ as_type_empty = Series([]).astype(dtype)
163
+ tm.assert_series_equal(init_empty, as_type_empty)
164
+
165
+ @pytest.mark.parametrize("dtype", [str, np.str_])
166
+ @pytest.mark.parametrize(
167
+ "series",
168
+ [
169
+ Series([string.digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
170
+ Series([string.digits * 10, rand_str(63), rand_str(64), np.nan, 1.0]),
171
+ ],
172
+ )
173
+ def test_astype_str_map(self, dtype, series, using_infer_string):
174
+ # see GH#4405
175
+ result = series.astype(dtype)
176
+ expected = series.map(str)
177
+ if using_infer_string:
178
+ expected = expected.astype(object)
179
+ tm.assert_series_equal(result, expected)
180
+
181
+ def test_astype_float_to_period(self):
182
+ result = Series([np.nan]).astype("period[D]")
183
+ expected = Series([NaT], dtype="period[D]")
184
+ tm.assert_series_equal(result, expected)
185
+
186
+ def test_astype_no_pandas_dtype(self):
187
+ # https://github.com/pandas-dev/pandas/pull/24866
188
+ ser = Series([1, 2], dtype="int64")
189
+ # Don't have NumpyEADtype in the public API, so we use `.array.dtype`,
190
+ # which is a NumpyEADtype.
191
+ result = ser.astype(ser.array.dtype)
192
+ tm.assert_series_equal(result, ser)
193
+
194
+ @pytest.mark.parametrize("dtype", [np.datetime64, np.timedelta64])
195
+ def test_astype_generic_timestamp_no_frequency(self, dtype, request):
196
+ # see GH#15524, GH#15987
197
+ data = [1]
198
+ ser = Series(data)
199
+
200
+ if np.dtype(dtype).name not in ["timedelta64", "datetime64"]:
201
+ mark = pytest.mark.xfail(reason="GH#33890 Is assigned ns unit")
202
+ request.applymarker(mark)
203
+
204
+ msg = (
205
+ rf"The '{dtype.__name__}' dtype has no unit\. "
206
+ rf"Please pass in '{dtype.__name__}\[ns\]' instead."
207
+ )
208
+ with pytest.raises(ValueError, match=msg):
209
+ ser.astype(dtype)
210
+
211
+ def test_astype_dt64_to_str(self):
212
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
213
+ dti = date_range("2012-01-01", periods=3)
214
+ result = Series(dti).astype(str)
215
+ expected = Series(["2012-01-01", "2012-01-02", "2012-01-03"], dtype=object)
216
+ tm.assert_series_equal(result, expected)
217
+
218
+ def test_astype_dt64tz_to_str(self):
219
+ # GH#10442 : testing astype(str) is correct for Series/DatetimeIndex
220
+ dti_tz = date_range("2012-01-01", periods=3, tz="US/Eastern")
221
+ result = Series(dti_tz).astype(str)
222
+ expected = Series(
223
+ [
224
+ "2012-01-01 00:00:00-05:00",
225
+ "2012-01-02 00:00:00-05:00",
226
+ "2012-01-03 00:00:00-05:00",
227
+ ],
228
+ dtype=object,
229
+ )
230
+ tm.assert_series_equal(result, expected)
231
+
232
+ def test_astype_datetime(self, unit):
233
+ ser = Series(iNaT, dtype=f"M8[{unit}]", index=range(5))
234
+
235
+ ser = ser.astype("O")
236
+ assert ser.dtype == np.object_
237
+
238
+ ser = Series([datetime(2001, 1, 2, 0, 0)])
239
+
240
+ ser = ser.astype("O")
241
+ assert ser.dtype == np.object_
242
+
243
+ ser = Series(
244
+ [datetime(2001, 1, 2, 0, 0) for i in range(3)], dtype=f"M8[{unit}]"
245
+ )
246
+
247
+ ser[1] = np.nan
248
+ assert ser.dtype == f"M8[{unit}]"
249
+
250
+ ser = ser.astype("O")
251
+ assert ser.dtype == np.object_
252
+
253
+ def test_astype_datetime64tz(self):
254
+ ser = Series(date_range("20130101", periods=3, tz="US/Eastern"))
255
+
256
+ # astype
257
+ result = ser.astype(object)
258
+ expected = Series(ser.astype(object), dtype=object)
259
+ tm.assert_series_equal(result, expected)
260
+
261
+ result = Series(ser.values).dt.tz_localize("UTC").dt.tz_convert(ser.dt.tz)
262
+ tm.assert_series_equal(result, ser)
263
+
264
+ # astype - object, preserves on construction
265
+ result = Series(ser.astype(object))
266
+ expected = ser.astype(object)
267
+ tm.assert_series_equal(result, expected)
268
+
269
+ # astype - datetime64[ns, tz]
270
+ msg = "Cannot use .astype to convert from timezone-naive"
271
+ with pytest.raises(TypeError, match=msg):
272
+ # dt64->dt64tz astype deprecated
273
+ Series(ser.values).astype("datetime64[ns, US/Eastern]")
274
+
275
+ with pytest.raises(TypeError, match=msg):
276
+ # dt64->dt64tz astype deprecated
277
+ Series(ser.values).astype(ser.dtype)
278
+
279
+ result = ser.astype("datetime64[ns, CET]")
280
+ expected = Series(date_range("20130101 06:00:00", periods=3, tz="CET"))
281
+ tm.assert_series_equal(result, expected)
282
+
283
+ def test_astype_str_cast_dt64(self):
284
+ # see GH#9757
285
+ ts = Series([Timestamp("2010-01-04 00:00:00")])
286
+ res = ts.astype(str)
287
+
288
+ expected = Series(["2010-01-04"], dtype=object)
289
+ tm.assert_series_equal(res, expected)
290
+
291
+ ts = Series([Timestamp("2010-01-04 00:00:00", tz="US/Eastern")])
292
+ res = ts.astype(str)
293
+
294
+ expected = Series(["2010-01-04 00:00:00-05:00"], dtype=object)
295
+ tm.assert_series_equal(res, expected)
296
+
297
+ def test_astype_str_cast_td64(self):
298
+ # see GH#9757
299
+
300
+ td = Series([Timedelta(1, unit="d")])
301
+ ser = td.astype(str)
302
+
303
+ expected = Series(["1 days"], dtype=object)
304
+ tm.assert_series_equal(ser, expected)
305
+
306
+ def test_dt64_series_astype_object(self):
307
+ dt64ser = Series(date_range("20130101", periods=3))
308
+ result = dt64ser.astype(object)
309
+ assert isinstance(result.iloc[0], datetime)
310
+ assert result.dtype == np.object_
311
+
312
+ def test_td64_series_astype_object(self):
313
+ tdser = Series(["59 Days", "59 Days", "NaT"], dtype="timedelta64[ns]")
314
+ result = tdser.astype(object)
315
+ assert isinstance(result.iloc[0], timedelta)
316
+ assert result.dtype == np.object_
317
+
318
+ @pytest.mark.parametrize(
319
+ "data, dtype",
320
+ [
321
+ (["x", "y", "z"], "string[python]"),
322
+ pytest.param(
323
+ ["x", "y", "z"],
324
+ "string[pyarrow]",
325
+ marks=td.skip_if_no("pyarrow"),
326
+ ),
327
+ (["x", "y", "z"], "category"),
328
+ (3 * [Timestamp("2020-01-01", tz="UTC")], None),
329
+ (3 * [Interval(0, 1)], None),
330
+ ],
331
+ )
332
+ @pytest.mark.parametrize("errors", ["raise", "ignore"])
333
+ def test_astype_ignores_errors_for_extension_dtypes(self, data, dtype, errors):
334
+ # https://github.com/pandas-dev/pandas/issues/35471
335
+ ser = Series(data, dtype=dtype)
336
+ if errors == "ignore":
337
+ expected = ser
338
+ result = ser.astype(float, errors="ignore")
339
+ tm.assert_series_equal(result, expected)
340
+ else:
341
+ msg = "(Cannot cast)|(could not convert)"
342
+ with pytest.raises((ValueError, TypeError), match=msg):
343
+ ser.astype(float, errors=errors)
344
+
345
+ @pytest.mark.parametrize("dtype", [np.float16, np.float32, np.float64])
346
+ def test_astype_from_float_to_str(self, dtype):
347
+ # https://github.com/pandas-dev/pandas/issues/36451
348
+ ser = Series([0.1], dtype=dtype)
349
+ result = ser.astype(str)
350
+ expected = Series(["0.1"], dtype=object)
351
+ tm.assert_series_equal(result, expected)
352
+
353
+ @pytest.mark.parametrize(
354
+ "value, string_value",
355
+ [
356
+ (None, "None"),
357
+ (np.nan, "nan"),
358
+ (NA, "<NA>"),
359
+ ],
360
+ )
361
+ def test_astype_to_str_preserves_na(self, value, string_value):
362
+ # https://github.com/pandas-dev/pandas/issues/36904
363
+ ser = Series(["a", "b", value], dtype=object)
364
+ result = ser.astype(str)
365
+ expected = Series(["a", "b", string_value], dtype=object)
366
+ tm.assert_series_equal(result, expected)
367
+
368
+ @pytest.mark.parametrize("dtype", ["float32", "float64", "int64", "int32"])
369
+ def test_astype(self, dtype):
370
+ ser = Series(np.random.default_rng(2).standard_normal(5), name="foo")
371
+ as_typed = ser.astype(dtype)
372
+
373
+ assert as_typed.dtype == dtype
374
+ assert as_typed.name == ser.name
375
+
376
+ @pytest.mark.parametrize("value", [np.nan, np.inf])
377
+ @pytest.mark.parametrize("dtype", [np.int32, np.int64])
378
+ def test_astype_cast_nan_inf_int(self, dtype, value):
379
+ # gh-14265: check NaN and inf raise error when converting to int
380
+ msg = "Cannot convert non-finite values \\(NA or inf\\) to integer"
381
+ ser = Series([value])
382
+
383
+ with pytest.raises(ValueError, match=msg):
384
+ ser.astype(dtype)
385
+
386
+ @pytest.mark.parametrize("dtype", [int, np.int8, np.int64])
387
+ def test_astype_cast_object_int_fail(self, dtype):
388
+ arr = Series(["car", "house", "tree", "1"])
389
+ msg = r"invalid literal for int\(\) with base 10: 'car'"
390
+ with pytest.raises(ValueError, match=msg):
391
+ arr.astype(dtype)
392
+
393
+ def test_astype_float_to_uint_negatives_raise(
394
+ self, float_numpy_dtype, any_unsigned_int_numpy_dtype
395
+ ):
396
+ # GH#45151 We don't cast negative numbers to nonsense values
397
+ # TODO: same for EA float/uint dtypes, signed integers?
398
+ arr = np.arange(5).astype(float_numpy_dtype) - 3 # includes negatives
399
+ ser = Series(arr)
400
+
401
+ msg = "Cannot losslessly cast from .* to .*"
402
+ with pytest.raises(ValueError, match=msg):
403
+ ser.astype(any_unsigned_int_numpy_dtype)
404
+
405
+ with pytest.raises(ValueError, match=msg):
406
+ ser.to_frame().astype(any_unsigned_int_numpy_dtype)
407
+
408
+ with pytest.raises(ValueError, match=msg):
409
+ # We currently catch and re-raise in Index.astype
410
+ Index(ser).astype(any_unsigned_int_numpy_dtype)
411
+
412
+ with pytest.raises(ValueError, match=msg):
413
+ ser.array.astype(any_unsigned_int_numpy_dtype)
414
+
415
+ def test_astype_cast_object_int(self):
416
+ arr = Series(["1", "2", "3", "4"], dtype=object)
417
+ result = arr.astype(int)
418
+
419
+ tm.assert_series_equal(result, Series(np.arange(1, 5)))
420
+
421
+ def test_astype_unicode(self, using_infer_string):
422
+ # see GH#7758: A bit of magic is required to set
423
+ # default encoding to utf-8
424
+ digits = string.digits
425
+ test_series = [
426
+ Series([digits * 10, rand_str(63), rand_str(64), rand_str(1000)]),
427
+ Series(["データーサイエンス、お前はもう死んでいる"]),
428
+ ]
429
+
430
+ former_encoding = None
431
+
432
+ if sys.getdefaultencoding() == "utf-8":
433
+ # GH#45326 as of 2.0 Series.astype matches Index.astype by handling
434
+ # bytes with obj.decode() instead of str(obj)
435
+ item = "野菜食べないとやばい"
436
+ ser = Series([item.encode()])
437
+ result = ser.astype(np.str_)
438
+ expected = Series([item], dtype=object)
439
+ tm.assert_series_equal(result, expected)
440
+
441
+ for ser in test_series:
442
+ res = ser.astype(np.str_)
443
+ expec = ser.map(str)
444
+ if using_infer_string:
445
+ expec = expec.astype(object)
446
+ tm.assert_series_equal(res, expec)
447
+
448
+ # Restore the former encoding
449
+ if former_encoding is not None and former_encoding != "utf-8":
450
+ reload(sys)
451
+ sys.setdefaultencoding(former_encoding)
452
+
453
+ def test_astype_bytes(self):
454
+ # GH#39474
455
+ result = Series(["foo", "bar", "baz"]).astype(bytes)
456
+ assert result.dtypes == np.dtype("S3")
457
+
458
+ def test_astype_nan_to_bool(self):
459
+ # GH#43018
460
+ ser = Series(np.nan, dtype="object")
461
+ result = ser.astype("bool")
462
+ expected = Series(True, dtype="bool")
463
+ tm.assert_series_equal(result, expected)
464
+
465
+ @pytest.mark.parametrize(
466
+ "dtype",
467
+ tm.ALL_INT_EA_DTYPES + tm.FLOAT_EA_DTYPES,
468
+ )
469
+ def test_astype_ea_to_datetimetzdtype(self, dtype):
470
+ # GH37553
471
+ ser = Series([4, 0, 9], dtype=dtype)
472
+ result = ser.astype(DatetimeTZDtype(tz="US/Pacific"))
473
+
474
+ expected = Series(
475
+ {
476
+ 0: Timestamp("1969-12-31 16:00:00.000000004-08:00", tz="US/Pacific"),
477
+ 1: Timestamp("1969-12-31 16:00:00.000000000-08:00", tz="US/Pacific"),
478
+ 2: Timestamp("1969-12-31 16:00:00.000000009-08:00", tz="US/Pacific"),
479
+ }
480
+ )
481
+
482
+ tm.assert_series_equal(result, expected)
483
+
484
+ def test_astype_retain_attrs(self, any_numpy_dtype):
485
+ # GH#44414
486
+ ser = Series([0, 1, 2, 3])
487
+ ser.attrs["Location"] = "Michigan"
488
+
489
+ result = ser.astype(any_numpy_dtype).attrs
490
+ expected = ser.attrs
491
+
492
+ tm.assert_dict_equal(expected, result)
493
+
494
+
495
+ class TestAstypeString:
496
+ @pytest.mark.parametrize(
497
+ "data, dtype",
498
+ [
499
+ ([True, NA], "boolean"),
500
+ (["A", NA], "category"),
501
+ (["2020-10-10", "2020-10-10"], "datetime64[ns]"),
502
+ (["2020-10-10", "2020-10-10", NaT], "datetime64[ns]"),
503
+ (
504
+ ["2012-01-01 00:00:00-05:00", NaT],
505
+ "datetime64[ns, US/Eastern]",
506
+ ),
507
+ ([1, None], "UInt16"),
508
+ (["1/1/2021", "2/1/2021"], "period[M]"),
509
+ (["1/1/2021", "2/1/2021", NaT], "period[M]"),
510
+ (["1 Day", "59 Days", NaT], "timedelta64[ns]"),
511
+ # currently no way to parse IntervalArray from a list of strings
512
+ ],
513
+ )
514
+ def test_astype_string_to_extension_dtype_roundtrip(
515
+ self, data, dtype, request, nullable_string_dtype
516
+ ):
517
+ if dtype == "boolean":
518
+ mark = pytest.mark.xfail(
519
+ reason="TODO StringArray.astype() with missing values #GH40566"
520
+ )
521
+ request.applymarker(mark)
522
+ # GH-40351
523
+ ser = Series(data, dtype=dtype)
524
+
525
+ # Note: just passing .astype(dtype) fails for dtype="category"
526
+ # with bc ser.dtype.categories will be object dtype whereas
527
+ # result.dtype.categories will have string dtype
528
+ result = ser.astype(nullable_string_dtype).astype(ser.dtype)
529
+ tm.assert_series_equal(result, ser)
530
+
531
+
532
+ class TestAstypeCategorical:
533
+ def test_astype_categorical_to_other(self):
534
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
535
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
536
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
537
+
538
+ expected = ser
539
+ tm.assert_series_equal(ser.astype("category"), expected)
540
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), expected)
541
+ msg = r"Cannot cast object|string dtype to float64"
542
+ with pytest.raises(ValueError, match=msg):
543
+ ser.astype("float64")
544
+
545
+ cat = Series(Categorical(["a", "b", "b", "a", "a", "c", "c", "c"]))
546
+ exp = Series(["a", "b", "b", "a", "a", "c", "c", "c"], dtype=object)
547
+ tm.assert_series_equal(cat.astype("str"), exp)
548
+ s2 = Series(Categorical(["1", "2", "3", "4"]))
549
+ exp2 = Series([1, 2, 3, 4]).astype("int")
550
+ tm.assert_series_equal(s2.astype("int"), exp2)
551
+
552
+ # object don't sort correctly, so just compare that we have the same
553
+ # values
554
+ def cmp(a, b):
555
+ tm.assert_almost_equal(np.sort(np.unique(a)), np.sort(np.unique(b)))
556
+
557
+ expected = Series(np.array(ser.values), name="value_group")
558
+ cmp(ser.astype("object"), expected)
559
+ cmp(ser.astype(np.object_), expected)
560
+
561
+ # array conversion
562
+ tm.assert_almost_equal(np.array(ser), np.array(ser.values))
563
+
564
+ tm.assert_series_equal(ser.astype("category"), ser)
565
+ tm.assert_series_equal(ser.astype(CategoricalDtype()), ser)
566
+
567
+ roundtrip_expected = ser.cat.set_categories(
568
+ ser.cat.categories.sort_values()
569
+ ).cat.remove_unused_categories()
570
+ result = ser.astype("object").astype("category")
571
+ tm.assert_series_equal(result, roundtrip_expected)
572
+ result = ser.astype("object").astype(CategoricalDtype())
573
+ tm.assert_series_equal(result, roundtrip_expected)
574
+
575
+ def test_astype_categorical_invalid_conversions(self):
576
+ # invalid conversion (these are NOT a dtype)
577
+ cat = Categorical([f"{i} - {i + 499}" for i in range(0, 10000, 500)])
578
+ ser = Series(np.random.default_rng(2).integers(0, 10000, 100)).sort_values()
579
+ ser = cut(ser, range(0, 10500, 500), right=False, labels=cat)
580
+
581
+ msg = (
582
+ "dtype '<class 'pandas.core.arrays.categorical.Categorical'>' "
583
+ "not understood"
584
+ )
585
+ with pytest.raises(TypeError, match=msg):
586
+ ser.astype(Categorical)
587
+ with pytest.raises(TypeError, match=msg):
588
+ ser.astype("object").astype(Categorical)
589
+
590
+ def test_astype_categoricaldtype(self):
591
+ ser = Series(["a", "b", "a"])
592
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=True))
593
+ expected = Series(Categorical(["a", "b", "a"], ordered=True))
594
+ tm.assert_series_equal(result, expected)
595
+
596
+ result = ser.astype(CategoricalDtype(["a", "b"], ordered=False))
597
+ expected = Series(Categorical(["a", "b", "a"], ordered=False))
598
+ tm.assert_series_equal(result, expected)
599
+
600
+ result = ser.astype(CategoricalDtype(["a", "b", "c"], ordered=False))
601
+ expected = Series(
602
+ Categorical(["a", "b", "a"], categories=["a", "b", "c"], ordered=False)
603
+ )
604
+ tm.assert_series_equal(result, expected)
605
+ tm.assert_index_equal(result.cat.categories, Index(["a", "b", "c"]))
606
+
607
+ @pytest.mark.parametrize("name", [None, "foo"])
608
+ @pytest.mark.parametrize("dtype_ordered", [True, False])
609
+ @pytest.mark.parametrize("series_ordered", [True, False])
610
+ def test_astype_categorical_to_categorical(
611
+ self, name, dtype_ordered, series_ordered
612
+ ):
613
+ # GH#10696, GH#18593
614
+ s_data = list("abcaacbab")
615
+ s_dtype = CategoricalDtype(list("bac"), ordered=series_ordered)
616
+ ser = Series(s_data, dtype=s_dtype, name=name)
617
+
618
+ # unspecified categories
619
+ dtype = CategoricalDtype(ordered=dtype_ordered)
620
+ result = ser.astype(dtype)
621
+ exp_dtype = CategoricalDtype(s_dtype.categories, dtype_ordered)
622
+ expected = Series(s_data, name=name, dtype=exp_dtype)
623
+ tm.assert_series_equal(result, expected)
624
+
625
+ # different categories
626
+ dtype = CategoricalDtype(list("adc"), dtype_ordered)
627
+ result = ser.astype(dtype)
628
+ expected = Series(s_data, name=name, dtype=dtype)
629
+ tm.assert_series_equal(result, expected)
630
+
631
+ if dtype_ordered is False:
632
+ # not specifying ordered, so only test once
633
+ expected = ser
634
+ result = ser.astype("category")
635
+ tm.assert_series_equal(result, expected)
636
+
637
+ def test_astype_bool_missing_to_categorical(self):
638
+ # GH-19182
639
+ ser = Series([True, False, np.nan])
640
+ assert ser.dtypes == np.object_
641
+
642
+ result = ser.astype(CategoricalDtype(categories=[True, False]))
643
+ expected = Series(Categorical([True, False, np.nan], categories=[True, False]))
644
+ tm.assert_series_equal(result, expected)
645
+
646
+ def test_astype_categories_raises(self):
647
+ # deprecated GH#17636, removed in GH#27141
648
+ ser = Series(["a", "b", "a"])
649
+ with pytest.raises(TypeError, match="got an unexpected"):
650
+ ser.astype("category", categories=["a", "b"], ordered=True)
651
+
652
+ @pytest.mark.parametrize("items", [["a", "b", "c", "a"], [1, 2, 3, 1]])
653
+ def test_astype_from_categorical(self, items):
654
+ ser = Series(items)
655
+ exp = Series(Categorical(items))
656
+ res = ser.astype("category")
657
+ tm.assert_series_equal(res, exp)
658
+
659
+ def test_astype_from_categorical_with_keywords(self):
660
+ # with keywords
661
+ lst = ["a", "b", "c", "a"]
662
+ ser = Series(lst)
663
+ exp = Series(Categorical(lst, ordered=True))
664
+ res = ser.astype(CategoricalDtype(None, ordered=True))
665
+ tm.assert_series_equal(res, exp)
666
+
667
+ exp = Series(Categorical(lst, categories=list("abcdef"), ordered=True))
668
+ res = ser.astype(CategoricalDtype(list("abcdef"), ordered=True))
669
+ tm.assert_series_equal(res, exp)
670
+
671
+ def test_astype_timedelta64_with_np_nan(self):
672
+ # GH45798
673
+ result = Series([Timedelta(1), np.nan], dtype="timedelta64[ns]")
674
+ expected = Series([Timedelta(1), NaT], dtype="timedelta64[ns]")
675
+ tm.assert_series_equal(result, expected)
676
+
677
+ @td.skip_if_no("pyarrow")
678
+ def test_astype_int_na_string(self):
679
+ # GH#57418
680
+ ser = Series([12, NA], dtype="Int64[pyarrow]")
681
+ result = ser.astype("string[pyarrow]")
682
+ expected = Series(["12", NA], dtype="string[pyarrow]")
683
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_clip.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas as pd
7
+ from pandas import (
8
+ Series,
9
+ Timestamp,
10
+ isna,
11
+ notna,
12
+ )
13
+ import pandas._testing as tm
14
+
15
+
16
+ class TestSeriesClip:
17
+ def test_clip(self, datetime_series):
18
+ val = datetime_series.median()
19
+
20
+ assert datetime_series.clip(lower=val).min() == val
21
+ assert datetime_series.clip(upper=val).max() == val
22
+
23
+ result = datetime_series.clip(-0.5, 0.5)
24
+ expected = np.clip(datetime_series, -0.5, 0.5)
25
+ tm.assert_series_equal(result, expected)
26
+ assert isinstance(expected, Series)
27
+
28
+ def test_clip_types_and_nulls(self):
29
+ sers = [
30
+ Series([np.nan, 1.0, 2.0, 3.0]),
31
+ Series([None, "a", "b", "c"]),
32
+ Series(pd.to_datetime([np.nan, 1, 2, 3], unit="D")),
33
+ ]
34
+
35
+ for s in sers:
36
+ thresh = s[2]
37
+ lower = s.clip(lower=thresh)
38
+ upper = s.clip(upper=thresh)
39
+ assert lower[notna(lower)].min() == thresh
40
+ assert upper[notna(upper)].max() == thresh
41
+ assert list(isna(s)) == list(isna(lower))
42
+ assert list(isna(s)) == list(isna(upper))
43
+
44
+ def test_series_clipping_with_na_values(self, any_numeric_ea_dtype, nulls_fixture):
45
+ # Ensure that clipping method can handle NA values with out failing
46
+ # GH#40581
47
+
48
+ if nulls_fixture is pd.NaT:
49
+ # constructor will raise, see
50
+ # test_constructor_mismatched_null_nullable_dtype
51
+ pytest.skip("See test_constructor_mismatched_null_nullable_dtype")
52
+
53
+ ser = Series([nulls_fixture, 1.0, 3.0], dtype=any_numeric_ea_dtype)
54
+ s_clipped_upper = ser.clip(upper=2.0)
55
+ s_clipped_lower = ser.clip(lower=2.0)
56
+
57
+ expected_upper = Series([nulls_fixture, 1.0, 2.0], dtype=any_numeric_ea_dtype)
58
+ expected_lower = Series([nulls_fixture, 2.0, 3.0], dtype=any_numeric_ea_dtype)
59
+
60
+ tm.assert_series_equal(s_clipped_upper, expected_upper)
61
+ tm.assert_series_equal(s_clipped_lower, expected_lower)
62
+
63
+ def test_clip_with_na_args(self):
64
+ """Should process np.nan argument as None"""
65
+ # GH#17276
66
+ s = Series([1, 2, 3])
67
+
68
+ tm.assert_series_equal(s.clip(np.nan), Series([1, 2, 3]))
69
+ tm.assert_series_equal(s.clip(upper=np.nan, lower=np.nan), Series([1, 2, 3]))
70
+
71
+ # GH#19992
72
+ msg = "Downcasting behavior in Series and DataFrame methods 'where'"
73
+ # TODO: avoid this warning here? seems like we should never be upcasting
74
+ # in the first place?
75
+ with tm.assert_produces_warning(FutureWarning, match=msg):
76
+ res = s.clip(lower=[0, 4, np.nan])
77
+ tm.assert_series_equal(res, Series([1, 4, 3]))
78
+ with tm.assert_produces_warning(FutureWarning, match=msg):
79
+ res = s.clip(upper=[1, np.nan, 1])
80
+ tm.assert_series_equal(res, Series([1, 2, 1]))
81
+
82
+ # GH#40420
83
+ s = Series([1, 2, 3])
84
+ result = s.clip(0, [np.nan, np.nan, np.nan])
85
+ tm.assert_series_equal(s, result)
86
+
87
+ def test_clip_against_series(self):
88
+ # GH#6966
89
+
90
+ s = Series([1.0, 1.0, 4.0])
91
+
92
+ lower = Series([1.0, 2.0, 3.0])
93
+ upper = Series([1.5, 2.5, 3.5])
94
+
95
+ tm.assert_series_equal(s.clip(lower, upper), Series([1.0, 2.0, 3.5]))
96
+ tm.assert_series_equal(s.clip(1.5, upper), Series([1.5, 1.5, 3.5]))
97
+
98
+ @pytest.mark.parametrize("inplace", [True, False])
99
+ @pytest.mark.parametrize("upper", [[1, 2, 3], np.asarray([1, 2, 3])])
100
+ def test_clip_against_list_like(self, inplace, upper):
101
+ # GH#15390
102
+ original = Series([5, 6, 7])
103
+ result = original.clip(upper=upper, inplace=inplace)
104
+ expected = Series([1, 2, 3])
105
+
106
+ if inplace:
107
+ result = original
108
+ tm.assert_series_equal(result, expected, check_exact=True)
109
+
110
+ def test_clip_with_datetimes(self):
111
+ # GH#11838
112
+ # naive and tz-aware datetimes
113
+
114
+ t = Timestamp("2015-12-01 09:30:30")
115
+ s = Series([Timestamp("2015-12-01 09:30:00"), Timestamp("2015-12-01 09:31:00")])
116
+ result = s.clip(upper=t)
117
+ expected = Series(
118
+ [Timestamp("2015-12-01 09:30:00"), Timestamp("2015-12-01 09:30:30")]
119
+ )
120
+ tm.assert_series_equal(result, expected)
121
+
122
+ t = Timestamp("2015-12-01 09:30:30", tz="US/Eastern")
123
+ s = Series(
124
+ [
125
+ Timestamp("2015-12-01 09:30:00", tz="US/Eastern"),
126
+ Timestamp("2015-12-01 09:31:00", tz="US/Eastern"),
127
+ ]
128
+ )
129
+ result = s.clip(upper=t)
130
+ expected = Series(
131
+ [
132
+ Timestamp("2015-12-01 09:30:00", tz="US/Eastern"),
133
+ Timestamp("2015-12-01 09:30:30", tz="US/Eastern"),
134
+ ]
135
+ )
136
+ tm.assert_series_equal(result, expected)
137
+
138
+ @pytest.mark.parametrize("dtype", [object, "M8[us]"])
139
+ def test_clip_with_timestamps_and_oob_datetimes(self, dtype):
140
+ # GH-42794
141
+ ser = Series([datetime(1, 1, 1), datetime(9999, 9, 9)], dtype=dtype)
142
+
143
+ result = ser.clip(lower=Timestamp.min, upper=Timestamp.max)
144
+ expected = Series([Timestamp.min, Timestamp.max], dtype=dtype)
145
+
146
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_copy.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Series,
6
+ Timestamp,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestCopy:
12
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
13
+ def test_copy(self, deep, using_copy_on_write, warn_copy_on_write):
14
+ ser = Series(np.arange(10), dtype="float64")
15
+
16
+ # default deep is True
17
+ if deep == "default":
18
+ ser2 = ser.copy()
19
+ else:
20
+ ser2 = ser.copy(deep=deep)
21
+
22
+ if using_copy_on_write:
23
+ # INFO(CoW) a shallow copy doesn't yet copy the data
24
+ # but parent will not be modified (CoW)
25
+ if deep is None or deep is False:
26
+ assert np.may_share_memory(ser.values, ser2.values)
27
+ else:
28
+ assert not np.may_share_memory(ser.values, ser2.values)
29
+
30
+ with tm.assert_cow_warning(warn_copy_on_write and deep is False):
31
+ ser2[::2] = np.nan
32
+
33
+ if deep is not False or using_copy_on_write:
34
+ # Did not modify original Series
35
+ assert np.isnan(ser2[0])
36
+ assert not np.isnan(ser[0])
37
+ else:
38
+ # we DID modify the original Series
39
+ assert np.isnan(ser2[0])
40
+ assert np.isnan(ser[0])
41
+
42
+ @pytest.mark.filterwarnings("ignore:Setting a value on a view:FutureWarning")
43
+ @pytest.mark.parametrize("deep", ["default", None, False, True])
44
+ def test_copy_tzaware(self, deep, using_copy_on_write):
45
+ # GH#11794
46
+ # copy of tz-aware
47
+ expected = Series([Timestamp("2012/01/01", tz="UTC")])
48
+ expected2 = Series([Timestamp("1999/01/01", tz="UTC")])
49
+
50
+ ser = Series([Timestamp("2012/01/01", tz="UTC")])
51
+
52
+ if deep == "default":
53
+ ser2 = ser.copy()
54
+ else:
55
+ ser2 = ser.copy(deep=deep)
56
+
57
+ if using_copy_on_write:
58
+ # INFO(CoW) a shallow copy doesn't yet copy the data
59
+ # but parent will not be modified (CoW)
60
+ if deep is None or deep is False:
61
+ assert np.may_share_memory(ser.values, ser2.values)
62
+ else:
63
+ assert not np.may_share_memory(ser.values, ser2.values)
64
+
65
+ ser2[0] = Timestamp("1999/01/01", tz="UTC")
66
+
67
+ # default deep is True
68
+ if deep is not False or using_copy_on_write:
69
+ # Did not modify original Series
70
+ tm.assert_series_equal(ser2, expected2)
71
+ tm.assert_series_equal(ser, expected)
72
+ else:
73
+ # we DID modify the original Series
74
+ tm.assert_series_equal(ser2, expected2)
75
+ tm.assert_series_equal(ser, expected2)
76
+
77
+ def test_copy_name(self, datetime_series):
78
+ result = datetime_series.copy()
79
+ assert result.name == datetime_series.name
80
+
81
+ def test_copy_index_name_checking(self, datetime_series):
82
+ # don't want to be able to modify the index stored elsewhere after
83
+ # making a copy
84
+
85
+ datetime_series.index.name = None
86
+ assert datetime_series.index.name is None
87
+ assert datetime_series is datetime_series
88
+
89
+ cp = datetime_series.copy()
90
+ cp.index.name = "foo"
91
+ assert datetime_series.index.name is None
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_describe.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.compat.numpy import np_version_gte1p25
5
+
6
+ from pandas.core.dtypes.common import (
7
+ is_complex_dtype,
8
+ is_extension_array_dtype,
9
+ )
10
+
11
+ from pandas import (
12
+ NA,
13
+ Period,
14
+ Series,
15
+ Timedelta,
16
+ Timestamp,
17
+ date_range,
18
+ )
19
+ import pandas._testing as tm
20
+
21
+
22
+ class TestSeriesDescribe:
23
+ def test_describe_ints(self):
24
+ ser = Series([0, 1, 2, 3, 4], name="int_data")
25
+ result = ser.describe()
26
+ expected = Series(
27
+ [5, 2, ser.std(), 0, 1, 2, 3, 4],
28
+ name="int_data",
29
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
30
+ )
31
+ tm.assert_series_equal(result, expected)
32
+
33
+ def test_describe_bools(self):
34
+ ser = Series([True, True, False, False, False], name="bool_data")
35
+ result = ser.describe()
36
+ expected = Series(
37
+ [5, 2, False, 3], name="bool_data", index=["count", "unique", "top", "freq"]
38
+ )
39
+ tm.assert_series_equal(result, expected)
40
+
41
+ def test_describe_strs(self):
42
+ ser = Series(["a", "a", "b", "c", "d"], name="str_data")
43
+ result = ser.describe()
44
+ expected = Series(
45
+ [5, 4, "a", 2], name="str_data", index=["count", "unique", "top", "freq"]
46
+ )
47
+ tm.assert_series_equal(result, expected)
48
+
49
+ def test_describe_timedelta64(self):
50
+ ser = Series(
51
+ [
52
+ Timedelta("1 days"),
53
+ Timedelta("2 days"),
54
+ Timedelta("3 days"),
55
+ Timedelta("4 days"),
56
+ Timedelta("5 days"),
57
+ ],
58
+ name="timedelta_data",
59
+ )
60
+ result = ser.describe()
61
+ expected = Series(
62
+ [5, ser[2], ser.std(), ser[0], ser[1], ser[2], ser[3], ser[4]],
63
+ name="timedelta_data",
64
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
65
+ )
66
+ tm.assert_series_equal(result, expected)
67
+
68
+ def test_describe_period(self):
69
+ ser = Series(
70
+ [Period("2020-01", "M"), Period("2020-01", "M"), Period("2019-12", "M")],
71
+ name="period_data",
72
+ )
73
+ result = ser.describe()
74
+ expected = Series(
75
+ [3, 2, ser[0], 2],
76
+ name="period_data",
77
+ index=["count", "unique", "top", "freq"],
78
+ )
79
+ tm.assert_series_equal(result, expected)
80
+
81
+ def test_describe_empty_object(self):
82
+ # https://github.com/pandas-dev/pandas/issues/27183
83
+ s = Series([None, None], dtype=object)
84
+ result = s.describe()
85
+ expected = Series(
86
+ [0, 0, np.nan, np.nan],
87
+ dtype=object,
88
+ index=["count", "unique", "top", "freq"],
89
+ )
90
+ tm.assert_series_equal(result, expected)
91
+
92
+ result = s[:0].describe()
93
+ tm.assert_series_equal(result, expected)
94
+ # ensure NaN, not None
95
+ assert np.isnan(result.iloc[2])
96
+ assert np.isnan(result.iloc[3])
97
+
98
+ def test_describe_with_tz(self, tz_naive_fixture):
99
+ # GH 21332
100
+ tz = tz_naive_fixture
101
+ name = str(tz_naive_fixture)
102
+ start = Timestamp(2018, 1, 1)
103
+ end = Timestamp(2018, 1, 5)
104
+ s = Series(date_range(start, end, tz=tz), name=name)
105
+ result = s.describe()
106
+ expected = Series(
107
+ [
108
+ 5,
109
+ Timestamp(2018, 1, 3).tz_localize(tz),
110
+ start.tz_localize(tz),
111
+ s[1],
112
+ s[2],
113
+ s[3],
114
+ end.tz_localize(tz),
115
+ ],
116
+ name=name,
117
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
118
+ )
119
+ tm.assert_series_equal(result, expected)
120
+
121
+ def test_describe_with_tz_numeric(self):
122
+ name = tz = "CET"
123
+ start = Timestamp(2018, 1, 1)
124
+ end = Timestamp(2018, 1, 5)
125
+ s = Series(date_range(start, end, tz=tz), name=name)
126
+
127
+ result = s.describe()
128
+
129
+ expected = Series(
130
+ [
131
+ 5,
132
+ Timestamp("2018-01-03 00:00:00", tz=tz),
133
+ Timestamp("2018-01-01 00:00:00", tz=tz),
134
+ Timestamp("2018-01-02 00:00:00", tz=tz),
135
+ Timestamp("2018-01-03 00:00:00", tz=tz),
136
+ Timestamp("2018-01-04 00:00:00", tz=tz),
137
+ Timestamp("2018-01-05 00:00:00", tz=tz),
138
+ ],
139
+ name=name,
140
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
141
+ )
142
+ tm.assert_series_equal(result, expected)
143
+
144
+ def test_datetime_is_numeric_includes_datetime(self):
145
+ s = Series(date_range("2012", periods=3))
146
+ result = s.describe()
147
+ expected = Series(
148
+ [
149
+ 3,
150
+ Timestamp("2012-01-02"),
151
+ Timestamp("2012-01-01"),
152
+ Timestamp("2012-01-01T12:00:00"),
153
+ Timestamp("2012-01-02"),
154
+ Timestamp("2012-01-02T12:00:00"),
155
+ Timestamp("2012-01-03"),
156
+ ],
157
+ index=["count", "mean", "min", "25%", "50%", "75%", "max"],
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ @pytest.mark.filterwarnings("ignore:Casting complex values to real discards")
162
+ def test_numeric_result_dtype(self, any_numeric_dtype):
163
+ # GH#48340 - describe should always return float on non-complex numeric input
164
+ if is_extension_array_dtype(any_numeric_dtype):
165
+ dtype = "Float64"
166
+ else:
167
+ dtype = "complex128" if is_complex_dtype(any_numeric_dtype) else None
168
+
169
+ ser = Series([0, 1], dtype=any_numeric_dtype)
170
+ if dtype == "complex128" and np_version_gte1p25:
171
+ with pytest.raises(
172
+ TypeError, match=r"^a must be an array of real numbers$"
173
+ ):
174
+ ser.describe()
175
+ return
176
+ result = ser.describe()
177
+ expected = Series(
178
+ [
179
+ 2.0,
180
+ 0.5,
181
+ ser.std(),
182
+ 0,
183
+ 0.25,
184
+ 0.5,
185
+ 0.75,
186
+ 1.0,
187
+ ],
188
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
189
+ dtype=dtype,
190
+ )
191
+ tm.assert_series_equal(result, expected)
192
+
193
+ def test_describe_one_element_ea(self):
194
+ # GH#52515
195
+ ser = Series([0.0], dtype="Float64")
196
+ with tm.assert_produces_warning(None):
197
+ result = ser.describe()
198
+ expected = Series(
199
+ [1, 0, NA, 0, 0, 0, 0, 0],
200
+ dtype="Float64",
201
+ index=["count", "mean", "std", "min", "25%", "50%", "75%", "max"],
202
+ )
203
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_drop.py ADDED
@@ -0,0 +1,99 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import (
4
+ Index,
5
+ Series,
6
+ )
7
+ import pandas._testing as tm
8
+ from pandas.api.types import is_bool_dtype
9
+
10
+
11
+ @pytest.mark.parametrize(
12
+ "data, index, drop_labels, axis, expected_data, expected_index",
13
+ [
14
+ # Unique Index
15
+ ([1, 2], ["one", "two"], ["two"], 0, [1], ["one"]),
16
+ ([1, 2], ["one", "two"], ["two"], "rows", [1], ["one"]),
17
+ ([1, 1, 2], ["one", "two", "one"], ["two"], 0, [1, 2], ["one", "one"]),
18
+ # GH 5248 Non-Unique Index
19
+ ([1, 1, 2], ["one", "two", "one"], "two", 0, [1, 2], ["one", "one"]),
20
+ ([1, 1, 2], ["one", "two", "one"], ["one"], 0, [1], ["two"]),
21
+ ([1, 1, 2], ["one", "two", "one"], "one", 0, [1], ["two"]),
22
+ ],
23
+ )
24
+ def test_drop_unique_and_non_unique_index(
25
+ data, index, axis, drop_labels, expected_data, expected_index
26
+ ):
27
+ ser = Series(data=data, index=index)
28
+ result = ser.drop(drop_labels, axis=axis)
29
+ expected = Series(data=expected_data, index=expected_index)
30
+ tm.assert_series_equal(result, expected)
31
+
32
+
33
+ @pytest.mark.parametrize(
34
+ "data, index, drop_labels, axis, error_type, error_desc",
35
+ [
36
+ # single string/tuple-like
37
+ (range(3), list("abc"), "bc", 0, KeyError, "not found in axis"),
38
+ # bad axis
39
+ (range(3), list("abc"), ("a",), 0, KeyError, "not found in axis"),
40
+ (range(3), list("abc"), "one", "columns", ValueError, "No axis named columns"),
41
+ ],
42
+ )
43
+ def test_drop_exception_raised(data, index, drop_labels, axis, error_type, error_desc):
44
+ ser = Series(data, index=index)
45
+ with pytest.raises(error_type, match=error_desc):
46
+ ser.drop(drop_labels, axis=axis)
47
+
48
+
49
+ def test_drop_with_ignore_errors():
50
+ # errors='ignore'
51
+ ser = Series(range(3), index=list("abc"))
52
+ result = ser.drop("bc", errors="ignore")
53
+ tm.assert_series_equal(result, ser)
54
+ result = ser.drop(["a", "d"], errors="ignore")
55
+ expected = ser.iloc[1:]
56
+ tm.assert_series_equal(result, expected)
57
+
58
+ # GH 8522
59
+ ser = Series([2, 3], index=[True, False])
60
+ assert is_bool_dtype(ser.index)
61
+ assert ser.index.dtype == bool
62
+ result = ser.drop(True)
63
+ expected = Series([3], index=[False])
64
+ tm.assert_series_equal(result, expected)
65
+
66
+
67
+ @pytest.mark.parametrize("index", [[1, 2, 3], [1, 1, 3]])
68
+ @pytest.mark.parametrize("drop_labels", [[], [1], [3]])
69
+ def test_drop_empty_list(index, drop_labels):
70
+ # GH 21494
71
+ expected_index = [i for i in index if i not in drop_labels]
72
+ series = Series(index=index, dtype=object).drop(drop_labels)
73
+ expected = Series(index=expected_index, dtype=object)
74
+ tm.assert_series_equal(series, expected)
75
+
76
+
77
+ @pytest.mark.parametrize(
78
+ "data, index, drop_labels",
79
+ [
80
+ (None, [1, 2, 3], [1, 4]),
81
+ (None, [1, 2, 2], [1, 4]),
82
+ ([2, 3], [0, 1], [False, True]),
83
+ ],
84
+ )
85
+ def test_drop_non_empty_list(data, index, drop_labels):
86
+ # GH 21494 and GH 16877
87
+ dtype = object if data is None else None
88
+ ser = Series(data=data, index=index, dtype=dtype)
89
+ with pytest.raises(KeyError, match="not found in axis"):
90
+ ser.drop(drop_labels)
91
+
92
+
93
+ def test_drop_index_ea_dtype(any_numeric_ea_dtype):
94
+ # GH#45860
95
+ df = Series(100, index=Index([1, 2, 2], dtype=any_numeric_ea_dtype))
96
+ idx = Index([df.index[1]])
97
+ result = df.drop(idx)
98
+ expected = Series(100, index=Index([1], dtype=any_numeric_ea_dtype))
99
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_dtypes.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+
4
+ class TestSeriesDtypes:
5
+ def test_dtype(self, datetime_series):
6
+ assert datetime_series.dtype == np.dtype("float64")
7
+ assert datetime_series.dtypes == np.dtype("float64")
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_equals.py ADDED
@@ -0,0 +1,145 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import nullcontext
2
+ import copy
3
+
4
+ import numpy as np
5
+ import pytest
6
+
7
+ from pandas._libs.missing import is_matching_na
8
+ from pandas.compat.numpy import np_version_gte1p25
9
+
10
+ from pandas.core.dtypes.common import is_float
11
+
12
+ from pandas import (
13
+ Index,
14
+ MultiIndex,
15
+ Series,
16
+ )
17
+ import pandas._testing as tm
18
+
19
+
20
+ @pytest.mark.parametrize(
21
+ "arr, idx",
22
+ [
23
+ ([1, 2, 3, 4], [0, 2, 1, 3]),
24
+ ([1, np.nan, 3, np.nan], [0, 2, 1, 3]),
25
+ (
26
+ [1, np.nan, 3, np.nan],
27
+ MultiIndex.from_tuples([(0, "a"), (1, "b"), (2, "c"), (3, "c")]),
28
+ ),
29
+ ],
30
+ )
31
+ def test_equals(arr, idx):
32
+ s1 = Series(arr, index=idx)
33
+ s2 = s1.copy()
34
+ assert s1.equals(s2)
35
+
36
+ s1[1] = 9
37
+ assert not s1.equals(s2)
38
+
39
+
40
+ @pytest.mark.parametrize(
41
+ "val", [1, 1.1, 1 + 1j, True, "abc", [1, 2], (1, 2), {1, 2}, {"a": 1}, None]
42
+ )
43
+ def test_equals_list_array(val):
44
+ # GH20676 Verify equals operator for list of Numpy arrays
45
+ arr = np.array([1, 2])
46
+ s1 = Series([arr, arr])
47
+ s2 = s1.copy()
48
+ assert s1.equals(s2)
49
+
50
+ s1[1] = val
51
+
52
+ cm = (
53
+ tm.assert_produces_warning(FutureWarning, check_stacklevel=False)
54
+ if isinstance(val, str) and not np_version_gte1p25
55
+ else nullcontext()
56
+ )
57
+ with cm:
58
+ assert not s1.equals(s2)
59
+
60
+
61
+ def test_equals_false_negative():
62
+ # GH8437 Verify false negative behavior of equals function for dtype object
63
+ arr = [False, np.nan]
64
+ s1 = Series(arr)
65
+ s2 = s1.copy()
66
+ s3 = Series(index=range(2), dtype=object)
67
+ s4 = s3.copy()
68
+ s5 = s3.copy()
69
+ s6 = s3.copy()
70
+
71
+ s3[:-1] = s4[:-1] = s5[0] = s6[0] = False
72
+ assert s1.equals(s1)
73
+ assert s1.equals(s2)
74
+ assert s1.equals(s3)
75
+ assert s1.equals(s4)
76
+ assert s1.equals(s5)
77
+ assert s5.equals(s6)
78
+
79
+
80
+ def test_equals_matching_nas():
81
+ # matching but not identical NAs
82
+ left = Series([np.datetime64("NaT")], dtype=object)
83
+ right = Series([np.datetime64("NaT")], dtype=object)
84
+ assert left.equals(right)
85
+ with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
86
+ assert Index(left).equals(Index(right))
87
+ assert left.array.equals(right.array)
88
+
89
+ left = Series([np.timedelta64("NaT")], dtype=object)
90
+ right = Series([np.timedelta64("NaT")], dtype=object)
91
+ assert left.equals(right)
92
+ with tm.assert_produces_warning(FutureWarning, match="Dtype inference"):
93
+ assert Index(left).equals(Index(right))
94
+ assert left.array.equals(right.array)
95
+
96
+ left = Series([np.float64("NaN")], dtype=object)
97
+ right = Series([np.float64("NaN")], dtype=object)
98
+ assert left.equals(right)
99
+ assert Index(left, dtype=left.dtype).equals(Index(right, dtype=right.dtype))
100
+ assert left.array.equals(right.array)
101
+
102
+
103
+ def test_equals_mismatched_nas(nulls_fixture, nulls_fixture2):
104
+ # GH#39650
105
+ left = nulls_fixture
106
+ right = nulls_fixture2
107
+ if hasattr(right, "copy"):
108
+ right = right.copy()
109
+ else:
110
+ right = copy.copy(right)
111
+
112
+ ser = Series([left], dtype=object)
113
+ ser2 = Series([right], dtype=object)
114
+
115
+ if is_matching_na(left, right):
116
+ assert ser.equals(ser2)
117
+ elif (left is None and is_float(right)) or (right is None and is_float(left)):
118
+ assert ser.equals(ser2)
119
+ else:
120
+ assert not ser.equals(ser2)
121
+
122
+
123
+ def test_equals_none_vs_nan():
124
+ # GH#39650
125
+ ser = Series([1, None], dtype=object)
126
+ ser2 = Series([1, np.nan], dtype=object)
127
+
128
+ assert ser.equals(ser2)
129
+ assert Index(ser, dtype=ser.dtype).equals(Index(ser2, dtype=ser2.dtype))
130
+ assert ser.array.equals(ser2.array)
131
+
132
+
133
+ def test_equals_None_vs_float():
134
+ # GH#44190
135
+ left = Series([-np.inf, np.nan, -1.0, 0.0, 1.0, 10 / 3, np.inf], dtype=object)
136
+ right = Series([None] * len(left))
137
+
138
+ # these series were found to be equal due to a bug, check that they are correctly
139
+ # found to not equal
140
+ assert not left.equals(right)
141
+ assert not right.equals(left)
142
+ assert not left.to_frame().equals(right.to_frame())
143
+ assert not right.to_frame().equals(left.to_frame())
144
+ assert not Index(left, dtype="object").equals(Index(right, dtype="object"))
145
+ assert not Index(right, dtype="object").equals(Index(left, dtype="object"))
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_explode.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+
7
+
8
+ def test_basic():
9
+ s = pd.Series([[0, 1, 2], np.nan, [], (3, 4)], index=list("abcd"), name="foo")
10
+ result = s.explode()
11
+ expected = pd.Series(
12
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=list("aaabcdd"), dtype=object, name="foo"
13
+ )
14
+ tm.assert_series_equal(result, expected)
15
+
16
+
17
+ def test_mixed_type():
18
+ s = pd.Series(
19
+ [[0, 1, 2], np.nan, None, np.array([]), pd.Series(["a", "b"])], name="foo"
20
+ )
21
+ result = s.explode()
22
+ expected = pd.Series(
23
+ [0, 1, 2, np.nan, None, np.nan, "a", "b"],
24
+ index=[0, 0, 0, 1, 2, 3, 4, 4],
25
+ dtype=object,
26
+ name="foo",
27
+ )
28
+ tm.assert_series_equal(result, expected)
29
+
30
+
31
+ def test_empty():
32
+ s = pd.Series(dtype=object)
33
+ result = s.explode()
34
+ expected = s.copy()
35
+ tm.assert_series_equal(result, expected)
36
+
37
+
38
+ def test_nested_lists():
39
+ s = pd.Series([[[1, 2, 3]], [1, 2], 1])
40
+ result = s.explode()
41
+ expected = pd.Series([[1, 2, 3], 1, 2, 1], index=[0, 1, 1, 2])
42
+ tm.assert_series_equal(result, expected)
43
+
44
+
45
+ def test_multi_index():
46
+ s = pd.Series(
47
+ [[0, 1, 2], np.nan, [], (3, 4)],
48
+ name="foo",
49
+ index=pd.MultiIndex.from_product([list("ab"), range(2)], names=["foo", "bar"]),
50
+ )
51
+ result = s.explode()
52
+ index = pd.MultiIndex.from_tuples(
53
+ [("a", 0), ("a", 0), ("a", 0), ("a", 1), ("b", 0), ("b", 1), ("b", 1)],
54
+ names=["foo", "bar"],
55
+ )
56
+ expected = pd.Series(
57
+ [0, 1, 2, np.nan, np.nan, 3, 4], index=index, dtype=object, name="foo"
58
+ )
59
+ tm.assert_series_equal(result, expected)
60
+
61
+
62
+ def test_large():
63
+ s = pd.Series([range(256)]).explode()
64
+ result = s.explode()
65
+ tm.assert_series_equal(result, s)
66
+
67
+
68
+ def test_invert_array():
69
+ df = pd.DataFrame({"a": pd.date_range("20190101", periods=3, tz="UTC")})
70
+
71
+ listify = df.apply(lambda x: x.array, axis=1)
72
+ result = listify.explode()
73
+ tm.assert_series_equal(result, df["a"].rename())
74
+
75
+
76
+ @pytest.mark.parametrize(
77
+ "s", [pd.Series([1, 2, 3]), pd.Series(pd.date_range("2019", periods=3, tz="UTC"))]
78
+ )
79
+ def test_non_object_dtype(s):
80
+ result = s.explode()
81
+ tm.assert_series_equal(result, s)
82
+
83
+
84
+ def test_typical_usecase():
85
+ df = pd.DataFrame(
86
+ [{"var1": "a,b,c", "var2": 1}, {"var1": "d,e,f", "var2": 2}],
87
+ columns=["var1", "var2"],
88
+ )
89
+ exploded = df.var1.str.split(",").explode()
90
+ result = df[["var2"]].join(exploded)
91
+ expected = pd.DataFrame(
92
+ {"var2": [1, 1, 1, 2, 2, 2], "var1": list("abcdef")},
93
+ columns=["var2", "var1"],
94
+ index=[0, 0, 0, 1, 1, 1],
95
+ )
96
+ tm.assert_frame_equal(result, expected)
97
+
98
+
99
+ def test_nested_EA():
100
+ # a nested EA array
101
+ s = pd.Series(
102
+ [
103
+ pd.date_range("20170101", periods=3, tz="UTC"),
104
+ pd.date_range("20170104", periods=3, tz="UTC"),
105
+ ]
106
+ )
107
+ result = s.explode()
108
+ expected = pd.Series(
109
+ pd.date_range("20170101", periods=6, tz="UTC"), index=[0, 0, 0, 1, 1, 1]
110
+ )
111
+ tm.assert_series_equal(result, expected)
112
+
113
+
114
+ def test_duplicate_index():
115
+ # GH 28005
116
+ s = pd.Series([[1, 2], [3, 4]], index=[0, 0])
117
+ result = s.explode()
118
+ expected = pd.Series([1, 2, 3, 4], index=[0, 0, 0, 0], dtype=object)
119
+ tm.assert_series_equal(result, expected)
120
+
121
+
122
+ def test_ignore_index():
123
+ # GH 34932
124
+ s = pd.Series([[1, 2], [3, 4]])
125
+ result = s.explode(ignore_index=True)
126
+ expected = pd.Series([1, 2, 3, 4], index=[0, 1, 2, 3], dtype=object)
127
+ tm.assert_series_equal(result, expected)
128
+
129
+
130
+ def test_explode_sets():
131
+ # https://github.com/pandas-dev/pandas/issues/35614
132
+ s = pd.Series([{"a", "b", "c"}], index=[1])
133
+ result = s.explode().sort_values()
134
+ expected = pd.Series(["a", "b", "c"], index=[1, 1, 1])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+
138
+ def test_explode_scalars_can_ignore_index():
139
+ # https://github.com/pandas-dev/pandas/issues/40487
140
+ s = pd.Series([1, 2, 3], index=["a", "b", "c"])
141
+ result = s.explode(ignore_index=True)
142
+ expected = pd.Series([1, 2, 3])
143
+ tm.assert_series_equal(result, expected)
144
+
145
+
146
+ @pytest.mark.parametrize("ignore_index", [True, False])
147
+ def test_explode_pyarrow_list_type(ignore_index):
148
+ # GH 53602
149
+ pa = pytest.importorskip("pyarrow")
150
+
151
+ data = [
152
+ [None, None],
153
+ [1],
154
+ [],
155
+ [2, 3],
156
+ None,
157
+ ]
158
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.list_(pa.int64())))
159
+ result = ser.explode(ignore_index=ignore_index)
160
+ expected = pd.Series(
161
+ data=[None, None, 1, None, 2, 3, None],
162
+ index=None if ignore_index else [0, 0, 1, 2, 3, 3, 4],
163
+ dtype=pd.ArrowDtype(pa.int64()),
164
+ )
165
+ tm.assert_series_equal(result, expected)
166
+
167
+
168
+ @pytest.mark.parametrize("ignore_index", [True, False])
169
+ def test_explode_pyarrow_non_list_type(ignore_index):
170
+ pa = pytest.importorskip("pyarrow")
171
+ data = [1, 2, 3]
172
+ ser = pd.Series(data, dtype=pd.ArrowDtype(pa.int64()))
173
+ result = ser.explode(ignore_index=ignore_index)
174
+ expected = pd.Series([1, 2, 3], dtype="int64[pyarrow]", index=[0, 1, 2])
175
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_fillna.py ADDED
@@ -0,0 +1,1155 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ datetime,
3
+ timedelta,
4
+ timezone,
5
+ )
6
+
7
+ import numpy as np
8
+ import pytest
9
+ import pytz
10
+
11
+ from pandas import (
12
+ Categorical,
13
+ DataFrame,
14
+ DatetimeIndex,
15
+ NaT,
16
+ Period,
17
+ Series,
18
+ Timedelta,
19
+ Timestamp,
20
+ date_range,
21
+ isna,
22
+ timedelta_range,
23
+ )
24
+ import pandas._testing as tm
25
+ from pandas.core.arrays import period_array
26
+
27
+
28
+ @pytest.mark.filterwarnings(
29
+ "ignore:(Series|DataFrame).fillna with 'method' is deprecated:FutureWarning"
30
+ )
31
+ class TestSeriesFillNA:
32
+ def test_fillna_nat(self):
33
+ series = Series([0, 1, 2, NaT._value], dtype="M8[ns]")
34
+
35
+ filled = series.fillna(method="pad")
36
+ filled2 = series.fillna(value=series.values[2])
37
+
38
+ expected = series.copy()
39
+ expected.iloc[3] = expected.iloc[2]
40
+
41
+ tm.assert_series_equal(filled, expected)
42
+ tm.assert_series_equal(filled2, expected)
43
+
44
+ df = DataFrame({"A": series})
45
+ filled = df.fillna(method="pad")
46
+ filled2 = df.fillna(value=series.values[2])
47
+ expected = DataFrame({"A": expected})
48
+ tm.assert_frame_equal(filled, expected)
49
+ tm.assert_frame_equal(filled2, expected)
50
+
51
+ series = Series([NaT._value, 0, 1, 2], dtype="M8[ns]")
52
+
53
+ filled = series.fillna(method="bfill")
54
+ filled2 = series.fillna(value=series[1])
55
+
56
+ expected = series.copy()
57
+ expected[0] = expected[1]
58
+
59
+ tm.assert_series_equal(filled, expected)
60
+ tm.assert_series_equal(filled2, expected)
61
+
62
+ df = DataFrame({"A": series})
63
+ filled = df.fillna(method="bfill")
64
+ filled2 = df.fillna(value=series[1])
65
+ expected = DataFrame({"A": expected})
66
+ tm.assert_frame_equal(filled, expected)
67
+ tm.assert_frame_equal(filled2, expected)
68
+
69
+ def test_fillna_value_or_method(self, datetime_series):
70
+ msg = "Cannot specify both 'value' and 'method'"
71
+ with pytest.raises(ValueError, match=msg):
72
+ datetime_series.fillna(value=0, method="ffill")
73
+
74
+ def test_fillna(self):
75
+ ts = Series(
76
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
77
+ )
78
+
79
+ tm.assert_series_equal(ts, ts.fillna(method="ffill"))
80
+
81
+ ts.iloc[2] = np.nan
82
+
83
+ exp = Series([0.0, 1.0, 1.0, 3.0, 4.0], index=ts.index)
84
+ tm.assert_series_equal(ts.fillna(method="ffill"), exp)
85
+
86
+ exp = Series([0.0, 1.0, 3.0, 3.0, 4.0], index=ts.index)
87
+ tm.assert_series_equal(ts.fillna(method="backfill"), exp)
88
+
89
+ exp = Series([0.0, 1.0, 5.0, 3.0, 4.0], index=ts.index)
90
+ tm.assert_series_equal(ts.fillna(value=5), exp)
91
+
92
+ msg = "Must specify a fill 'value' or 'method'"
93
+ with pytest.raises(ValueError, match=msg):
94
+ ts.fillna()
95
+
96
+ def test_fillna_nonscalar(self):
97
+ # GH#5703
98
+ s1 = Series([np.nan])
99
+ s2 = Series([1])
100
+ result = s1.fillna(s2)
101
+ expected = Series([1.0])
102
+ tm.assert_series_equal(result, expected)
103
+ result = s1.fillna({})
104
+ tm.assert_series_equal(result, s1)
105
+ result = s1.fillna(Series((), dtype=object))
106
+ tm.assert_series_equal(result, s1)
107
+ result = s2.fillna(s1)
108
+ tm.assert_series_equal(result, s2)
109
+ result = s1.fillna({0: 1})
110
+ tm.assert_series_equal(result, expected)
111
+ result = s1.fillna({1: 1})
112
+ tm.assert_series_equal(result, Series([np.nan]))
113
+ result = s1.fillna({0: 1, 1: 1})
114
+ tm.assert_series_equal(result, expected)
115
+ result = s1.fillna(Series({0: 1, 1: 1}))
116
+ tm.assert_series_equal(result, expected)
117
+ result = s1.fillna(Series({0: 1, 1: 1}, index=[4, 5]))
118
+ tm.assert_series_equal(result, s1)
119
+
120
+ def test_fillna_aligns(self):
121
+ s1 = Series([0, 1, 2], list("abc"))
122
+ s2 = Series([0, np.nan, 2], list("bac"))
123
+ result = s2.fillna(s1)
124
+ expected = Series([0, 0, 2.0], list("bac"))
125
+ tm.assert_series_equal(result, expected)
126
+
127
+ def test_fillna_limit(self):
128
+ ser = Series(np.nan, index=[0, 1, 2])
129
+ result = ser.fillna(999, limit=1)
130
+ expected = Series([999, np.nan, np.nan], index=[0, 1, 2])
131
+ tm.assert_series_equal(result, expected)
132
+
133
+ result = ser.fillna(999, limit=2)
134
+ expected = Series([999, 999, np.nan], index=[0, 1, 2])
135
+ tm.assert_series_equal(result, expected)
136
+
137
+ def test_fillna_dont_cast_strings(self):
138
+ # GH#9043
139
+ # make sure a string representation of int/float values can be filled
140
+ # correctly without raising errors or being converted
141
+ vals = ["0", "1.5", "-0.3"]
142
+ for val in vals:
143
+ ser = Series([0, 1, np.nan, np.nan, 4], dtype="float64")
144
+ result = ser.fillna(val)
145
+ expected = Series([0, 1, val, val, 4], dtype="object")
146
+ tm.assert_series_equal(result, expected)
147
+
148
+ def test_fillna_consistency(self):
149
+ # GH#16402
150
+ # fillna with a tz aware to a tz-naive, should result in object
151
+
152
+ ser = Series([Timestamp("20130101"), NaT])
153
+
154
+ result = ser.fillna(Timestamp("20130101", tz="US/Eastern"))
155
+ expected = Series(
156
+ [Timestamp("20130101"), Timestamp("2013-01-01", tz="US/Eastern")],
157
+ dtype="object",
158
+ )
159
+ tm.assert_series_equal(result, expected)
160
+
161
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
162
+ tm.assert_series_equal(result, expected)
163
+
164
+ result = ser.where([True, False], Timestamp("20130101", tz="US/Eastern"))
165
+ tm.assert_series_equal(result, expected)
166
+
167
+ # with a non-datetime
168
+ result = ser.fillna("foo")
169
+ expected = Series([Timestamp("20130101"), "foo"])
170
+ tm.assert_series_equal(result, expected)
171
+
172
+ # assignment
173
+ ser2 = ser.copy()
174
+ with tm.assert_produces_warning(FutureWarning, match="incompatible dtype"):
175
+ ser2[1] = "foo"
176
+ tm.assert_series_equal(ser2, expected)
177
+
178
+ def test_fillna_downcast(self):
179
+ # GH#15277
180
+ # infer int64 from float64
181
+ ser = Series([1.0, np.nan])
182
+ msg = "The 'downcast' keyword in fillna is deprecated"
183
+ with tm.assert_produces_warning(FutureWarning, match=msg):
184
+ result = ser.fillna(0, downcast="infer")
185
+ expected = Series([1, 0])
186
+ tm.assert_series_equal(result, expected)
187
+
188
+ # infer int64 from float64 when fillna value is a dict
189
+ ser = Series([1.0, np.nan])
190
+ with tm.assert_produces_warning(FutureWarning, match=msg):
191
+ result = ser.fillna({1: 0}, downcast="infer")
192
+ expected = Series([1, 0])
193
+ tm.assert_series_equal(result, expected)
194
+
195
+ def test_fillna_downcast_infer_objects_to_numeric(self):
196
+ # GH#44241 if we have object-dtype, 'downcast="infer"' should
197
+ # _actually_ infer
198
+
199
+ arr = np.arange(5).astype(object)
200
+ arr[3] = np.nan
201
+
202
+ ser = Series(arr)
203
+
204
+ msg = "The 'downcast' keyword in fillna is deprecated"
205
+ with tm.assert_produces_warning(FutureWarning, match=msg):
206
+ res = ser.fillna(3, downcast="infer")
207
+ expected = Series(np.arange(5), dtype=np.int64)
208
+ tm.assert_series_equal(res, expected)
209
+
210
+ msg = "The 'downcast' keyword in ffill is deprecated"
211
+ with tm.assert_produces_warning(FutureWarning, match=msg):
212
+ res = ser.ffill(downcast="infer")
213
+ expected = Series([0, 1, 2, 2, 4], dtype=np.int64)
214
+ tm.assert_series_equal(res, expected)
215
+
216
+ msg = "The 'downcast' keyword in bfill is deprecated"
217
+ with tm.assert_produces_warning(FutureWarning, match=msg):
218
+ res = ser.bfill(downcast="infer")
219
+ expected = Series([0, 1, 2, 4, 4], dtype=np.int64)
220
+ tm.assert_series_equal(res, expected)
221
+
222
+ # with a non-round float present, we will downcast to float64
223
+ ser[2] = 2.5
224
+
225
+ expected = Series([0, 1, 2.5, 3, 4], dtype=np.float64)
226
+ msg = "The 'downcast' keyword in fillna is deprecated"
227
+ with tm.assert_produces_warning(FutureWarning, match=msg):
228
+ res = ser.fillna(3, downcast="infer")
229
+ tm.assert_series_equal(res, expected)
230
+
231
+ msg = "The 'downcast' keyword in ffill is deprecated"
232
+ with tm.assert_produces_warning(FutureWarning, match=msg):
233
+ res = ser.ffill(downcast="infer")
234
+ expected = Series([0, 1, 2.5, 2.5, 4], dtype=np.float64)
235
+ tm.assert_series_equal(res, expected)
236
+
237
+ msg = "The 'downcast' keyword in bfill is deprecated"
238
+ with tm.assert_produces_warning(FutureWarning, match=msg):
239
+ res = ser.bfill(downcast="infer")
240
+ expected = Series([0, 1, 2.5, 4, 4], dtype=np.float64)
241
+ tm.assert_series_equal(res, expected)
242
+
243
+ def test_timedelta_fillna(self, frame_or_series, unit):
244
+ # GH#3371
245
+ ser = Series(
246
+ [
247
+ Timestamp("20130101"),
248
+ Timestamp("20130101"),
249
+ Timestamp("20130102"),
250
+ Timestamp("20130103 9:01:01"),
251
+ ],
252
+ dtype=f"M8[{unit}]",
253
+ )
254
+ td = ser.diff()
255
+ obj = frame_or_series(td).copy()
256
+
257
+ # reg fillna
258
+ result = obj.fillna(Timedelta(seconds=0))
259
+ expected = Series(
260
+ [
261
+ timedelta(0),
262
+ timedelta(0),
263
+ timedelta(1),
264
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
265
+ ],
266
+ dtype=f"m8[{unit}]",
267
+ )
268
+ expected = frame_or_series(expected)
269
+ tm.assert_equal(result, expected)
270
+
271
+ # GH#45746 pre-1.? ints were interpreted as seconds. then that was
272
+ # deprecated and changed to raise. In 2.0 it casts to common dtype,
273
+ # consistent with every other dtype's behavior
274
+ res = obj.fillna(1)
275
+ expected = obj.astype(object).fillna(1)
276
+ tm.assert_equal(res, expected)
277
+
278
+ result = obj.fillna(Timedelta(seconds=1))
279
+ expected = Series(
280
+ [
281
+ timedelta(seconds=1),
282
+ timedelta(0),
283
+ timedelta(1),
284
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
285
+ ],
286
+ dtype=f"m8[{unit}]",
287
+ )
288
+ expected = frame_or_series(expected)
289
+ tm.assert_equal(result, expected)
290
+
291
+ result = obj.fillna(timedelta(days=1, seconds=1))
292
+ expected = Series(
293
+ [
294
+ timedelta(days=1, seconds=1),
295
+ timedelta(0),
296
+ timedelta(1),
297
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
298
+ ],
299
+ dtype=f"m8[{unit}]",
300
+ )
301
+ expected = frame_or_series(expected)
302
+ tm.assert_equal(result, expected)
303
+
304
+ result = obj.fillna(np.timedelta64(10**9))
305
+ expected = Series(
306
+ [
307
+ timedelta(seconds=1),
308
+ timedelta(0),
309
+ timedelta(1),
310
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
311
+ ],
312
+ dtype=f"m8[{unit}]",
313
+ )
314
+ expected = frame_or_series(expected)
315
+ tm.assert_equal(result, expected)
316
+
317
+ result = obj.fillna(NaT)
318
+ expected = Series(
319
+ [
320
+ NaT,
321
+ timedelta(0),
322
+ timedelta(1),
323
+ timedelta(days=1, seconds=9 * 3600 + 60 + 1),
324
+ ],
325
+ dtype=f"m8[{unit}]",
326
+ )
327
+ expected = frame_or_series(expected)
328
+ tm.assert_equal(result, expected)
329
+
330
+ # ffill
331
+ td[2] = np.nan
332
+ obj = frame_or_series(td).copy()
333
+ result = obj.ffill()
334
+ expected = td.fillna(Timedelta(seconds=0))
335
+ expected[0] = np.nan
336
+ expected = frame_or_series(expected)
337
+
338
+ tm.assert_equal(result, expected)
339
+
340
+ # bfill
341
+ td[2] = np.nan
342
+ obj = frame_or_series(td)
343
+ result = obj.bfill()
344
+ expected = td.fillna(Timedelta(seconds=0))
345
+ expected[2] = timedelta(days=1, seconds=9 * 3600 + 60 + 1)
346
+ expected = frame_or_series(expected)
347
+ tm.assert_equal(result, expected)
348
+
349
+ def test_datetime64_fillna(self):
350
+ ser = Series(
351
+ [
352
+ Timestamp("20130101"),
353
+ Timestamp("20130101"),
354
+ Timestamp("20130102"),
355
+ Timestamp("20130103 9:01:01"),
356
+ ]
357
+ )
358
+ ser[2] = np.nan
359
+
360
+ # ffill
361
+ result = ser.ffill()
362
+ expected = Series(
363
+ [
364
+ Timestamp("20130101"),
365
+ Timestamp("20130101"),
366
+ Timestamp("20130101"),
367
+ Timestamp("20130103 9:01:01"),
368
+ ]
369
+ )
370
+ tm.assert_series_equal(result, expected)
371
+
372
+ # bfill
373
+ result = ser.bfill()
374
+ expected = Series(
375
+ [
376
+ Timestamp("20130101"),
377
+ Timestamp("20130101"),
378
+ Timestamp("20130103 9:01:01"),
379
+ Timestamp("20130103 9:01:01"),
380
+ ]
381
+ )
382
+ tm.assert_series_equal(result, expected)
383
+
384
+ @pytest.mark.parametrize(
385
+ "scalar",
386
+ [
387
+ False,
388
+ pytest.param(
389
+ True,
390
+ marks=pytest.mark.xfail(
391
+ reason="GH#56410 scalar case not yet addressed"
392
+ ),
393
+ ),
394
+ ],
395
+ )
396
+ @pytest.mark.parametrize("tz", [None, "UTC"])
397
+ def test_datetime64_fillna_mismatched_reso_no_rounding(self, tz, scalar):
398
+ # GH#56410
399
+ dti = date_range("2016-01-01", periods=3, unit="s", tz=tz)
400
+ item = Timestamp("2016-02-03 04:05:06.789", tz=tz)
401
+ vec = date_range(item, periods=3, unit="ms")
402
+
403
+ exp_dtype = "M8[ms]" if tz is None else "M8[ms, UTC]"
404
+ expected = Series([item, dti[1], dti[2]], dtype=exp_dtype)
405
+
406
+ ser = Series(dti)
407
+ ser[0] = NaT
408
+ ser2 = ser.copy()
409
+
410
+ res = ser.fillna(item)
411
+ res2 = ser2.fillna(Series(vec))
412
+
413
+ if scalar:
414
+ tm.assert_series_equal(res, expected)
415
+ else:
416
+ tm.assert_series_equal(res2, expected)
417
+
418
+ @pytest.mark.parametrize(
419
+ "scalar",
420
+ [
421
+ False,
422
+ pytest.param(
423
+ True,
424
+ marks=pytest.mark.xfail(
425
+ reason="GH#56410 scalar case not yet addressed"
426
+ ),
427
+ ),
428
+ ],
429
+ )
430
+ def test_timedelta64_fillna_mismatched_reso_no_rounding(self, scalar):
431
+ # GH#56410
432
+ tdi = date_range("2016-01-01", periods=3, unit="s") - Timestamp("1970-01-01")
433
+ item = Timestamp("2016-02-03 04:05:06.789") - Timestamp("1970-01-01")
434
+ vec = timedelta_range(item, periods=3, unit="ms")
435
+
436
+ expected = Series([item, tdi[1], tdi[2]], dtype="m8[ms]")
437
+
438
+ ser = Series(tdi)
439
+ ser[0] = NaT
440
+ ser2 = ser.copy()
441
+
442
+ res = ser.fillna(item)
443
+ res2 = ser2.fillna(Series(vec))
444
+
445
+ if scalar:
446
+ tm.assert_series_equal(res, expected)
447
+ else:
448
+ tm.assert_series_equal(res2, expected)
449
+
450
+ def test_datetime64_fillna_backfill(self):
451
+ # GH#6587
452
+ # make sure that we are treating as integer when filling
453
+ ser = Series([NaT, NaT, "2013-08-05 15:30:00.000001"], dtype="M8[ns]")
454
+
455
+ expected = Series(
456
+ [
457
+ "2013-08-05 15:30:00.000001",
458
+ "2013-08-05 15:30:00.000001",
459
+ "2013-08-05 15:30:00.000001",
460
+ ],
461
+ dtype="M8[ns]",
462
+ )
463
+ result = ser.fillna(method="backfill")
464
+ tm.assert_series_equal(result, expected)
465
+
466
+ @pytest.mark.parametrize("tz", ["US/Eastern", "Asia/Tokyo"])
467
+ def test_datetime64_tz_fillna(self, tz, unit):
468
+ # DatetimeLikeBlock
469
+ ser = Series(
470
+ [
471
+ Timestamp("2011-01-01 10:00"),
472
+ NaT,
473
+ Timestamp("2011-01-03 10:00"),
474
+ NaT,
475
+ ],
476
+ dtype=f"M8[{unit}]",
477
+ )
478
+ null_loc = Series([False, True, False, True])
479
+
480
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
481
+ expected = Series(
482
+ [
483
+ Timestamp("2011-01-01 10:00"),
484
+ Timestamp("2011-01-02 10:00"),
485
+ Timestamp("2011-01-03 10:00"),
486
+ Timestamp("2011-01-02 10:00"),
487
+ ],
488
+ dtype=f"M8[{unit}]",
489
+ )
490
+ tm.assert_series_equal(expected, result)
491
+ # check s is not changed
492
+ tm.assert_series_equal(isna(ser), null_loc)
493
+
494
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
495
+ expected = Series(
496
+ [
497
+ Timestamp("2011-01-01 10:00"),
498
+ Timestamp("2011-01-02 10:00", tz=tz),
499
+ Timestamp("2011-01-03 10:00"),
500
+ Timestamp("2011-01-02 10:00", tz=tz),
501
+ ]
502
+ )
503
+ tm.assert_series_equal(expected, result)
504
+ tm.assert_series_equal(isna(ser), null_loc)
505
+
506
+ result = ser.fillna("AAA")
507
+ expected = Series(
508
+ [
509
+ Timestamp("2011-01-01 10:00"),
510
+ "AAA",
511
+ Timestamp("2011-01-03 10:00"),
512
+ "AAA",
513
+ ],
514
+ dtype=object,
515
+ )
516
+ tm.assert_series_equal(expected, result)
517
+ tm.assert_series_equal(isna(ser), null_loc)
518
+
519
+ result = ser.fillna(
520
+ {
521
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
522
+ 3: Timestamp("2011-01-04 10:00"),
523
+ }
524
+ )
525
+ expected = Series(
526
+ [
527
+ Timestamp("2011-01-01 10:00"),
528
+ Timestamp("2011-01-02 10:00", tz=tz),
529
+ Timestamp("2011-01-03 10:00"),
530
+ Timestamp("2011-01-04 10:00"),
531
+ ]
532
+ )
533
+ tm.assert_series_equal(expected, result)
534
+ tm.assert_series_equal(isna(ser), null_loc)
535
+
536
+ result = ser.fillna(
537
+ {1: Timestamp("2011-01-02 10:00"), 3: Timestamp("2011-01-04 10:00")}
538
+ )
539
+ expected = Series(
540
+ [
541
+ Timestamp("2011-01-01 10:00"),
542
+ Timestamp("2011-01-02 10:00"),
543
+ Timestamp("2011-01-03 10:00"),
544
+ Timestamp("2011-01-04 10:00"),
545
+ ],
546
+ dtype=f"M8[{unit}]",
547
+ )
548
+ tm.assert_series_equal(expected, result)
549
+ tm.assert_series_equal(isna(ser), null_loc)
550
+
551
+ # DatetimeTZBlock
552
+ idx = DatetimeIndex(
553
+ ["2011-01-01 10:00", NaT, "2011-01-03 10:00", NaT], tz=tz
554
+ ).as_unit(unit)
555
+ ser = Series(idx)
556
+ assert ser.dtype == f"datetime64[{unit}, {tz}]"
557
+ tm.assert_series_equal(isna(ser), null_loc)
558
+
559
+ result = ser.fillna(Timestamp("2011-01-02 10:00"))
560
+ expected = Series(
561
+ [
562
+ Timestamp("2011-01-01 10:00", tz=tz),
563
+ Timestamp("2011-01-02 10:00"),
564
+ Timestamp("2011-01-03 10:00", tz=tz),
565
+ Timestamp("2011-01-02 10:00"),
566
+ ]
567
+ )
568
+ tm.assert_series_equal(expected, result)
569
+ tm.assert_series_equal(isna(ser), null_loc)
570
+
571
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz))
572
+ idx = DatetimeIndex(
573
+ [
574
+ "2011-01-01 10:00",
575
+ "2011-01-02 10:00",
576
+ "2011-01-03 10:00",
577
+ "2011-01-02 10:00",
578
+ ],
579
+ tz=tz,
580
+ ).as_unit(unit)
581
+ expected = Series(idx)
582
+ tm.assert_series_equal(expected, result)
583
+ tm.assert_series_equal(isna(ser), null_loc)
584
+
585
+ result = ser.fillna(Timestamp("2011-01-02 10:00", tz=tz).to_pydatetime())
586
+ idx = DatetimeIndex(
587
+ [
588
+ "2011-01-01 10:00",
589
+ "2011-01-02 10:00",
590
+ "2011-01-03 10:00",
591
+ "2011-01-02 10:00",
592
+ ],
593
+ tz=tz,
594
+ ).as_unit(unit)
595
+ expected = Series(idx)
596
+ tm.assert_series_equal(expected, result)
597
+ tm.assert_series_equal(isna(ser), null_loc)
598
+
599
+ result = ser.fillna("AAA")
600
+ expected = Series(
601
+ [
602
+ Timestamp("2011-01-01 10:00", tz=tz),
603
+ "AAA",
604
+ Timestamp("2011-01-03 10:00", tz=tz),
605
+ "AAA",
606
+ ],
607
+ dtype=object,
608
+ )
609
+ tm.assert_series_equal(expected, result)
610
+ tm.assert_series_equal(isna(ser), null_loc)
611
+
612
+ result = ser.fillna(
613
+ {
614
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
615
+ 3: Timestamp("2011-01-04 10:00"),
616
+ }
617
+ )
618
+ expected = Series(
619
+ [
620
+ Timestamp("2011-01-01 10:00", tz=tz),
621
+ Timestamp("2011-01-02 10:00", tz=tz),
622
+ Timestamp("2011-01-03 10:00", tz=tz),
623
+ Timestamp("2011-01-04 10:00"),
624
+ ]
625
+ )
626
+ tm.assert_series_equal(expected, result)
627
+ tm.assert_series_equal(isna(ser), null_loc)
628
+
629
+ result = ser.fillna(
630
+ {
631
+ 1: Timestamp("2011-01-02 10:00", tz=tz),
632
+ 3: Timestamp("2011-01-04 10:00", tz=tz),
633
+ }
634
+ )
635
+ expected = Series(
636
+ [
637
+ Timestamp("2011-01-01 10:00", tz=tz),
638
+ Timestamp("2011-01-02 10:00", tz=tz),
639
+ Timestamp("2011-01-03 10:00", tz=tz),
640
+ Timestamp("2011-01-04 10:00", tz=tz),
641
+ ]
642
+ ).dt.as_unit(unit)
643
+ tm.assert_series_equal(expected, result)
644
+ tm.assert_series_equal(isna(ser), null_loc)
645
+
646
+ # filling with a naive/other zone, coerce to object
647
+ result = ser.fillna(Timestamp("20130101"))
648
+ expected = Series(
649
+ [
650
+ Timestamp("2011-01-01 10:00", tz=tz),
651
+ Timestamp("2013-01-01"),
652
+ Timestamp("2011-01-03 10:00", tz=tz),
653
+ Timestamp("2013-01-01"),
654
+ ]
655
+ )
656
+ tm.assert_series_equal(expected, result)
657
+ tm.assert_series_equal(isna(ser), null_loc)
658
+
659
+ # pre-2.0 fillna with mixed tzs would cast to object, in 2.0
660
+ # it retains dtype.
661
+ result = ser.fillna(Timestamp("20130101", tz="US/Pacific"))
662
+ expected = Series(
663
+ [
664
+ Timestamp("2011-01-01 10:00", tz=tz),
665
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
666
+ Timestamp("2011-01-03 10:00", tz=tz),
667
+ Timestamp("2013-01-01", tz="US/Pacific").tz_convert(tz),
668
+ ]
669
+ ).dt.as_unit(unit)
670
+ tm.assert_series_equal(expected, result)
671
+ tm.assert_series_equal(isna(ser), null_loc)
672
+
673
+ def test_fillna_dt64tz_with_method(self):
674
+ # with timezone
675
+ # GH#15855
676
+ ser = Series([Timestamp("2012-11-11 00:00:00+01:00"), NaT])
677
+ exp = Series(
678
+ [
679
+ Timestamp("2012-11-11 00:00:00+01:00"),
680
+ Timestamp("2012-11-11 00:00:00+01:00"),
681
+ ]
682
+ )
683
+ tm.assert_series_equal(ser.fillna(method="pad"), exp)
684
+
685
+ ser = Series([NaT, Timestamp("2012-11-11 00:00:00+01:00")])
686
+ exp = Series(
687
+ [
688
+ Timestamp("2012-11-11 00:00:00+01:00"),
689
+ Timestamp("2012-11-11 00:00:00+01:00"),
690
+ ]
691
+ )
692
+ tm.assert_series_equal(ser.fillna(method="bfill"), exp)
693
+
694
+ def test_fillna_pytimedelta(self):
695
+ # GH#8209
696
+ ser = Series([np.nan, Timedelta("1 days")], index=["A", "B"])
697
+
698
+ result = ser.fillna(timedelta(1))
699
+ expected = Series(Timedelta("1 days"), index=["A", "B"])
700
+ tm.assert_series_equal(result, expected)
701
+
702
+ def test_fillna_period(self):
703
+ # GH#13737
704
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
705
+
706
+ res = ser.fillna(Period("2012-01", freq="M"))
707
+ exp = Series([Period("2011-01", freq="M"), Period("2012-01", freq="M")])
708
+ tm.assert_series_equal(res, exp)
709
+ assert res.dtype == "Period[M]"
710
+
711
+ def test_fillna_dt64_timestamp(self, frame_or_series):
712
+ ser = Series(
713
+ [
714
+ Timestamp("20130101"),
715
+ Timestamp("20130101"),
716
+ Timestamp("20130102"),
717
+ Timestamp("20130103 9:01:01"),
718
+ ]
719
+ )
720
+ ser[2] = np.nan
721
+ obj = frame_or_series(ser)
722
+
723
+ # reg fillna
724
+ result = obj.fillna(Timestamp("20130104"))
725
+ expected = Series(
726
+ [
727
+ Timestamp("20130101"),
728
+ Timestamp("20130101"),
729
+ Timestamp("20130104"),
730
+ Timestamp("20130103 9:01:01"),
731
+ ]
732
+ )
733
+ expected = frame_or_series(expected)
734
+ tm.assert_equal(result, expected)
735
+
736
+ result = obj.fillna(NaT)
737
+ expected = obj
738
+ tm.assert_equal(result, expected)
739
+
740
+ def test_fillna_dt64_non_nao(self):
741
+ # GH#27419
742
+ ser = Series([Timestamp("2010-01-01"), NaT, Timestamp("2000-01-01")])
743
+ val = np.datetime64("1975-04-05", "ms")
744
+
745
+ result = ser.fillna(val)
746
+ expected = Series(
747
+ [Timestamp("2010-01-01"), Timestamp("1975-04-05"), Timestamp("2000-01-01")]
748
+ )
749
+ tm.assert_series_equal(result, expected)
750
+
751
+ def test_fillna_numeric_inplace(self):
752
+ x = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
753
+ y = x.copy()
754
+
755
+ return_value = y.fillna(value=0, inplace=True)
756
+ assert return_value is None
757
+
758
+ expected = x.fillna(value=0)
759
+ tm.assert_series_equal(y, expected)
760
+
761
+ # ---------------------------------------------------------------
762
+ # CategoricalDtype
763
+
764
+ @pytest.mark.parametrize(
765
+ "fill_value, expected_output",
766
+ [
767
+ ("a", ["a", "a", "b", "a", "a"]),
768
+ ({1: "a", 3: "b", 4: "b"}, ["a", "a", "b", "b", "b"]),
769
+ ({1: "a"}, ["a", "a", "b", np.nan, np.nan]),
770
+ ({1: "a", 3: "b"}, ["a", "a", "b", "b", np.nan]),
771
+ (Series("a"), ["a", np.nan, "b", np.nan, np.nan]),
772
+ (Series("a", index=[1]), ["a", "a", "b", np.nan, np.nan]),
773
+ (Series({1: "a", 3: "b"}), ["a", "a", "b", "b", np.nan]),
774
+ (Series(["a", "b"], index=[3, 4]), ["a", np.nan, "b", "a", "b"]),
775
+ ],
776
+ )
777
+ def test_fillna_categorical(self, fill_value, expected_output):
778
+ # GH#17033
779
+ # Test fillna for a Categorical series
780
+ data = ["a", np.nan, "b", np.nan, np.nan]
781
+ ser = Series(Categorical(data, categories=["a", "b"]))
782
+ exp = Series(Categorical(expected_output, categories=["a", "b"]))
783
+ result = ser.fillna(fill_value)
784
+ tm.assert_series_equal(result, exp)
785
+
786
+ @pytest.mark.parametrize(
787
+ "fill_value, expected_output",
788
+ [
789
+ (Series(["a", "b", "c", "d", "e"]), ["a", "b", "b", "d", "e"]),
790
+ (Series(["b", "d", "a", "d", "a"]), ["a", "d", "b", "d", "a"]),
791
+ (
792
+ Series(
793
+ Categorical(
794
+ ["b", "d", "a", "d", "a"], categories=["b", "c", "d", "e", "a"]
795
+ )
796
+ ),
797
+ ["a", "d", "b", "d", "a"],
798
+ ),
799
+ ],
800
+ )
801
+ def test_fillna_categorical_with_new_categories(self, fill_value, expected_output):
802
+ # GH#26215
803
+ data = ["a", np.nan, "b", np.nan, np.nan]
804
+ ser = Series(Categorical(data, categories=["a", "b", "c", "d", "e"]))
805
+ exp = Series(Categorical(expected_output, categories=["a", "b", "c", "d", "e"]))
806
+ result = ser.fillna(fill_value)
807
+ tm.assert_series_equal(result, exp)
808
+
809
+ def test_fillna_categorical_raises(self):
810
+ data = ["a", np.nan, "b", np.nan, np.nan]
811
+ ser = Series(Categorical(data, categories=["a", "b"]))
812
+ cat = ser._values
813
+
814
+ msg = "Cannot setitem on a Categorical with a new category"
815
+ with pytest.raises(TypeError, match=msg):
816
+ ser.fillna("d")
817
+
818
+ msg2 = "Length of 'value' does not match."
819
+ with pytest.raises(ValueError, match=msg2):
820
+ cat.fillna(Series("d"))
821
+
822
+ with pytest.raises(TypeError, match=msg):
823
+ ser.fillna({1: "d", 3: "a"})
824
+
825
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
826
+ with pytest.raises(TypeError, match=msg):
827
+ ser.fillna(["a", "b"])
828
+
829
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
830
+ with pytest.raises(TypeError, match=msg):
831
+ ser.fillna(("a", "b"))
832
+
833
+ msg = (
834
+ '"value" parameter must be a scalar, dict '
835
+ 'or Series, but you passed a "DataFrame"'
836
+ )
837
+ with pytest.raises(TypeError, match=msg):
838
+ ser.fillna(DataFrame({1: ["a"], 3: ["b"]}))
839
+
840
+ @pytest.mark.parametrize("dtype", [float, "float32", "float64"])
841
+ @pytest.mark.parametrize("fill_type", tm.ALL_REAL_NUMPY_DTYPES)
842
+ @pytest.mark.parametrize("scalar", [True, False])
843
+ def test_fillna_float_casting(self, dtype, fill_type, scalar):
844
+ # GH-43424
845
+ ser = Series([np.nan, 1.2], dtype=dtype)
846
+ fill_values = Series([2, 2], dtype=fill_type)
847
+ if scalar:
848
+ fill_values = fill_values.dtype.type(2)
849
+
850
+ result = ser.fillna(fill_values)
851
+ expected = Series([2.0, 1.2], dtype=dtype)
852
+ tm.assert_series_equal(result, expected)
853
+
854
+ ser = Series([np.nan, 1.2], dtype=dtype)
855
+ mask = ser.isna().to_numpy()
856
+ ser[mask] = fill_values
857
+ tm.assert_series_equal(ser, expected)
858
+
859
+ ser = Series([np.nan, 1.2], dtype=dtype)
860
+ ser.mask(mask, fill_values, inplace=True)
861
+ tm.assert_series_equal(ser, expected)
862
+
863
+ ser = Series([np.nan, 1.2], dtype=dtype)
864
+ res = ser.where(~mask, fill_values)
865
+ tm.assert_series_equal(res, expected)
866
+
867
+ def test_fillna_f32_upcast_with_dict(self):
868
+ # GH-43424
869
+ ser = Series([np.nan, 1.2], dtype=np.float32)
870
+ result = ser.fillna({0: 1})
871
+ expected = Series([1.0, 1.2], dtype=np.float32)
872
+ tm.assert_series_equal(result, expected)
873
+
874
+ # ---------------------------------------------------------------
875
+ # Invalid Usages
876
+
877
+ def test_fillna_invalid_method(self, datetime_series):
878
+ try:
879
+ datetime_series.fillna(method="ffil")
880
+ except ValueError as inst:
881
+ assert "ffil" in str(inst)
882
+
883
+ def test_fillna_listlike_invalid(self):
884
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
885
+ msg = '"value" parameter must be a scalar or dict, but you passed a "list"'
886
+ with pytest.raises(TypeError, match=msg):
887
+ ser.fillna([1, 2])
888
+
889
+ msg = '"value" parameter must be a scalar or dict, but you passed a "tuple"'
890
+ with pytest.raises(TypeError, match=msg):
891
+ ser.fillna((1, 2))
892
+
893
+ def test_fillna_method_and_limit_invalid(self):
894
+ # related GH#9217, make sure limit is an int and greater than 0
895
+ ser = Series([1, 2, 3, None])
896
+ msg = "|".join(
897
+ [
898
+ r"Cannot specify both 'value' and 'method'\.",
899
+ "Limit must be greater than 0",
900
+ "Limit must be an integer",
901
+ ]
902
+ )
903
+ for limit in [-1, 0, 1.0, 2.0]:
904
+ for method in ["backfill", "bfill", "pad", "ffill", None]:
905
+ with pytest.raises(ValueError, match=msg):
906
+ ser.fillna(1, limit=limit, method=method)
907
+
908
+ def test_fillna_datetime64_with_timezone_tzinfo(self):
909
+ # https://github.com/pandas-dev/pandas/issues/38851
910
+ # different tzinfos representing UTC treated as equal
911
+ ser = Series(date_range("2020", periods=3, tz="UTC"))
912
+ expected = ser.copy()
913
+ ser[1] = NaT
914
+ result = ser.fillna(datetime(2020, 1, 2, tzinfo=timezone.utc))
915
+ tm.assert_series_equal(result, expected)
916
+
917
+ # pre-2.0 we cast to object with mixed tzs, in 2.0 we retain dtype
918
+ ts = Timestamp("2000-01-01", tz="US/Pacific")
919
+ ser2 = Series(ser._values.tz_convert("dateutil/US/Pacific"))
920
+ assert ser2.dtype.kind == "M"
921
+ result = ser2.fillna(ts)
922
+ expected = Series(
923
+ [ser2[0], ts.tz_convert(ser2.dtype.tz), ser2[2]],
924
+ dtype=ser2.dtype,
925
+ )
926
+ tm.assert_series_equal(result, expected)
927
+
928
+ @pytest.mark.parametrize(
929
+ "input, input_fillna, expected_data, expected_categories",
930
+ [
931
+ (["A", "B", None, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
932
+ (["A", "B", np.nan, "A"], "B", ["A", "B", "B", "A"], ["A", "B"]),
933
+ ],
934
+ )
935
+ def test_fillna_categorical_accept_same_type(
936
+ self, input, input_fillna, expected_data, expected_categories
937
+ ):
938
+ # GH32414
939
+ cat = Categorical(input)
940
+ ser = Series(cat).fillna(input_fillna)
941
+ filled = cat.fillna(ser)
942
+ result = cat.fillna(filled)
943
+ expected = Categorical(expected_data, categories=expected_categories)
944
+ tm.assert_categorical_equal(result, expected)
945
+
946
+
947
+ @pytest.mark.filterwarnings(
948
+ "ignore:Series.fillna with 'method' is deprecated:FutureWarning"
949
+ )
950
+ class TestFillnaPad:
951
+ def test_fillna_bug(self):
952
+ ser = Series([np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"])
953
+ filled = ser.fillna(method="ffill")
954
+ expected = Series([np.nan, 1.0, 1.0, 3.0, 3.0], ser.index)
955
+ tm.assert_series_equal(filled, expected)
956
+
957
+ filled = ser.fillna(method="bfill")
958
+ expected = Series([1.0, 1.0, 3.0, 3.0, np.nan], ser.index)
959
+ tm.assert_series_equal(filled, expected)
960
+
961
+ def test_ffill(self):
962
+ ts = Series(
963
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
964
+ )
965
+ ts.iloc[2] = np.nan
966
+ tm.assert_series_equal(ts.ffill(), ts.fillna(method="ffill"))
967
+
968
+ def test_ffill_mixed_dtypes_without_missing_data(self):
969
+ # GH#14956
970
+ series = Series([datetime(2015, 1, 1, tzinfo=pytz.utc), 1])
971
+ result = series.ffill()
972
+ tm.assert_series_equal(series, result)
973
+
974
+ def test_bfill(self):
975
+ ts = Series(
976
+ [0.0, 1.0, 2.0, 3.0, 4.0], index=date_range("2020-01-01", periods=5)
977
+ )
978
+ ts.iloc[2] = np.nan
979
+ tm.assert_series_equal(ts.bfill(), ts.fillna(method="bfill"))
980
+
981
+ def test_pad_nan(self):
982
+ x = Series(
983
+ [np.nan, 1.0, np.nan, 3.0, np.nan], ["z", "a", "b", "c", "d"], dtype=float
984
+ )
985
+
986
+ return_value = x.fillna(method="pad", inplace=True)
987
+ assert return_value is None
988
+
989
+ expected = Series(
990
+ [np.nan, 1.0, 1.0, 3.0, 3.0], ["z", "a", "b", "c", "d"], dtype=float
991
+ )
992
+ tm.assert_series_equal(x[1:], expected[1:])
993
+ assert np.isnan(x.iloc[0]), np.isnan(expected.iloc[0])
994
+
995
+ def test_series_fillna_limit(self):
996
+ index = np.arange(10)
997
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
998
+
999
+ result = s[:2].reindex(index)
1000
+ result = result.fillna(method="pad", limit=5)
1001
+
1002
+ expected = s[:2].reindex(index).fillna(method="pad")
1003
+ expected[-3:] = np.nan
1004
+ tm.assert_series_equal(result, expected)
1005
+
1006
+ result = s[-2:].reindex(index)
1007
+ result = result.fillna(method="bfill", limit=5)
1008
+
1009
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1010
+ expected[:3] = np.nan
1011
+ tm.assert_series_equal(result, expected)
1012
+
1013
+ def test_series_pad_backfill_limit(self):
1014
+ index = np.arange(10)
1015
+ s = Series(np.random.default_rng(2).standard_normal(10), index=index)
1016
+
1017
+ result = s[:2].reindex(index, method="pad", limit=5)
1018
+
1019
+ expected = s[:2].reindex(index).fillna(method="pad")
1020
+ expected[-3:] = np.nan
1021
+ tm.assert_series_equal(result, expected)
1022
+
1023
+ result = s[-2:].reindex(index, method="backfill", limit=5)
1024
+
1025
+ expected = s[-2:].reindex(index).fillna(method="backfill")
1026
+ expected[:3] = np.nan
1027
+ tm.assert_series_equal(result, expected)
1028
+
1029
+ def test_fillna_int(self):
1030
+ ser = Series(np.random.default_rng(2).integers(-100, 100, 50))
1031
+ return_value = ser.fillna(method="ffill", inplace=True)
1032
+ assert return_value is None
1033
+ tm.assert_series_equal(ser.fillna(method="ffill", inplace=False), ser)
1034
+
1035
+ def test_datetime64tz_fillna_round_issue(self):
1036
+ # GH#14872
1037
+
1038
+ data = Series(
1039
+ [NaT, NaT, datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc)]
1040
+ )
1041
+
1042
+ filled = data.bfill()
1043
+
1044
+ expected = Series(
1045
+ [
1046
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1047
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1048
+ datetime(2016, 12, 12, 22, 24, 6, 100001, tzinfo=pytz.utc),
1049
+ ]
1050
+ )
1051
+
1052
+ tm.assert_series_equal(filled, expected)
1053
+
1054
+ def test_fillna_parr(self):
1055
+ # GH-24537
1056
+ dti = date_range(
1057
+ Timestamp.max - Timedelta(nanoseconds=10), periods=5, freq="ns"
1058
+ )
1059
+ ser = Series(dti.to_period("ns"))
1060
+ ser[2] = NaT
1061
+ arr = period_array(
1062
+ [
1063
+ Timestamp("2262-04-11 23:47:16.854775797"),
1064
+ Timestamp("2262-04-11 23:47:16.854775798"),
1065
+ Timestamp("2262-04-11 23:47:16.854775798"),
1066
+ Timestamp("2262-04-11 23:47:16.854775800"),
1067
+ Timestamp("2262-04-11 23:47:16.854775801"),
1068
+ ],
1069
+ freq="ns",
1070
+ )
1071
+ expected = Series(arr)
1072
+
1073
+ filled = ser.ffill()
1074
+
1075
+ tm.assert_series_equal(filled, expected)
1076
+
1077
+ @pytest.mark.parametrize("func", ["pad", "backfill"])
1078
+ def test_pad_backfill_deprecated(self, func):
1079
+ # GH#33396
1080
+ ser = Series([1, 2, 3])
1081
+ with tm.assert_produces_warning(FutureWarning):
1082
+ getattr(ser, func)()
1083
+
1084
+
1085
+ @pytest.mark.parametrize(
1086
+ "data, expected_data, method, kwargs",
1087
+ (
1088
+ (
1089
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1090
+ [np.nan, np.nan, 3.0, 3.0, 3.0, 3.0, 7.0, np.nan, np.nan],
1091
+ "ffill",
1092
+ {"limit_area": "inside"},
1093
+ ),
1094
+ (
1095
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1096
+ [np.nan, np.nan, 3.0, 3.0, np.nan, np.nan, 7.0, np.nan, np.nan],
1097
+ "ffill",
1098
+ {"limit_area": "inside", "limit": 1},
1099
+ ),
1100
+ (
1101
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1102
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, 7.0],
1103
+ "ffill",
1104
+ {"limit_area": "outside"},
1105
+ ),
1106
+ (
1107
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1108
+ [np.nan, np.nan, 3.0, np.nan, np.nan, np.nan, 7.0, 7.0, np.nan],
1109
+ "ffill",
1110
+ {"limit_area": "outside", "limit": 1},
1111
+ ),
1112
+ (
1113
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1114
+ [np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan],
1115
+ "ffill",
1116
+ {"limit_area": "outside", "limit": 1},
1117
+ ),
1118
+ (
1119
+ range(5),
1120
+ range(5),
1121
+ "ffill",
1122
+ {"limit_area": "outside", "limit": 1},
1123
+ ),
1124
+ (
1125
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1126
+ [np.nan, np.nan, 3.0, 7.0, 7.0, 7.0, 7.0, np.nan, np.nan],
1127
+ "bfill",
1128
+ {"limit_area": "inside"},
1129
+ ),
1130
+ (
1131
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1132
+ [np.nan, np.nan, 3.0, np.nan, np.nan, 7.0, 7.0, np.nan, np.nan],
1133
+ "bfill",
1134
+ {"limit_area": "inside", "limit": 1},
1135
+ ),
1136
+ (
1137
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1138
+ [3.0, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1139
+ "bfill",
1140
+ {"limit_area": "outside"},
1141
+ ),
1142
+ (
1143
+ [np.nan, np.nan, 3, np.nan, np.nan, np.nan, 7, np.nan, np.nan],
1144
+ [np.nan, 3.0, 3.0, np.nan, np.nan, np.nan, 7.0, np.nan, np.nan],
1145
+ "bfill",
1146
+ {"limit_area": "outside", "limit": 1},
1147
+ ),
1148
+ ),
1149
+ )
1150
+ def test_ffill_bfill_limit_area(data, expected_data, method, kwargs):
1151
+ # GH#56492
1152
+ s = Series(data)
1153
+ expected = Series(expected_data)
1154
+ result = getattr(s, method)(**kwargs)
1155
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_get_numeric_data.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pandas import (
2
+ Index,
3
+ Series,
4
+ date_range,
5
+ )
6
+ import pandas._testing as tm
7
+
8
+
9
+ class TestGetNumericData:
10
+ def test_get_numeric_data_preserve_dtype(
11
+ self, using_copy_on_write, warn_copy_on_write
12
+ ):
13
+ # get the numeric data
14
+ obj = Series([1, 2, 3])
15
+ result = obj._get_numeric_data()
16
+ tm.assert_series_equal(result, obj)
17
+
18
+ # returned object is a shallow copy
19
+ with tm.assert_cow_warning(warn_copy_on_write):
20
+ result.iloc[0] = 0
21
+ if using_copy_on_write:
22
+ assert obj.iloc[0] == 1
23
+ else:
24
+ assert obj.iloc[0] == 0
25
+
26
+ obj = Series([1, "2", 3.0])
27
+ result = obj._get_numeric_data()
28
+ expected = Series([], dtype=object, index=Index([], dtype=object))
29
+ tm.assert_series_equal(result, expected)
30
+
31
+ obj = Series([True, False, True])
32
+ result = obj._get_numeric_data()
33
+ tm.assert_series_equal(result, obj)
34
+
35
+ obj = Series(date_range("20130101", periods=3))
36
+ result = obj._get_numeric_data()
37
+ expected = Series([], dtype="M8[ns]", index=Index([], dtype=object))
38
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_infer_objects.py ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas import (
4
+ Series,
5
+ interval_range,
6
+ )
7
+ import pandas._testing as tm
8
+
9
+
10
+ class TestInferObjects:
11
+ def test_copy(self, index_or_series):
12
+ # GH#50096
13
+ # case where we don't need to do inference because it is already non-object
14
+ obj = index_or_series(np.array([1, 2, 3], dtype="int64"))
15
+
16
+ result = obj.infer_objects(copy=False)
17
+ assert tm.shares_memory(result, obj)
18
+
19
+ # case where we try to do inference but can't do better than object
20
+ obj2 = index_or_series(np.array(["foo", 2], dtype=object))
21
+ result2 = obj2.infer_objects(copy=False)
22
+ assert tm.shares_memory(result2, obj2)
23
+
24
+ def test_infer_objects_series(self, index_or_series):
25
+ # GH#11221
26
+ actual = index_or_series(np.array([1, 2, 3], dtype="O")).infer_objects()
27
+ expected = index_or_series([1, 2, 3])
28
+ tm.assert_equal(actual, expected)
29
+
30
+ actual = index_or_series(np.array([1, 2, 3, None], dtype="O")).infer_objects()
31
+ expected = index_or_series([1.0, 2.0, 3.0, np.nan])
32
+ tm.assert_equal(actual, expected)
33
+
34
+ # only soft conversions, unconvertible pass thru unchanged
35
+
36
+ obj = index_or_series(np.array([1, 2, 3, None, "a"], dtype="O"))
37
+ actual = obj.infer_objects()
38
+ expected = index_or_series([1, 2, 3, None, "a"], dtype=object)
39
+
40
+ assert actual.dtype == "object"
41
+ tm.assert_equal(actual, expected)
42
+
43
+ def test_infer_objects_interval(self, index_or_series):
44
+ # GH#50090
45
+ ii = interval_range(1, 10)
46
+ obj = index_or_series(ii)
47
+
48
+ result = obj.astype(object).infer_objects()
49
+ tm.assert_equal(result, obj)
50
+
51
+ def test_infer_objects_bytes(self):
52
+ # GH#49650
53
+ ser = Series([b"a"], dtype="bytes")
54
+ expected = ser.copy()
55
+ result = ser.infer_objects()
56
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_isin.py ADDED
@@ -0,0 +1,252 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ Series,
7
+ date_range,
8
+ )
9
+ import pandas._testing as tm
10
+ from pandas.core import algorithms
11
+ from pandas.core.arrays import PeriodArray
12
+
13
+
14
+ class TestSeriesIsIn:
15
+ def test_isin(self):
16
+ s = Series(["A", "B", "C", "a", "B", "B", "A", "C"])
17
+
18
+ result = s.isin(["A", "C"])
19
+ expected = Series([True, False, True, False, False, False, True, True])
20
+ tm.assert_series_equal(result, expected)
21
+
22
+ # GH#16012
23
+ # This specific issue has to have a series over 1e6 in len, but the
24
+ # comparison array (in_list) must be large enough so that numpy doesn't
25
+ # do a manual masking trick that will avoid this issue altogether
26
+ s = Series(list("abcdefghijk" * 10**5))
27
+ # If numpy doesn't do the manual comparison/mask, these
28
+ # unorderable mixed types are what cause the exception in numpy
29
+ in_list = [-1, "a", "b", "G", "Y", "Z", "E", "K", "E", "S", "I", "R", "R"] * 6
30
+
31
+ assert s.isin(in_list).sum() == 200000
32
+
33
+ def test_isin_with_string_scalar(self):
34
+ # GH#4763
35
+ s = Series(["A", "B", "C", "a", "B", "B", "A", "C"])
36
+ msg = (
37
+ r"only list-like objects are allowed to be passed to isin\(\), "
38
+ r"you passed a `str`"
39
+ )
40
+ with pytest.raises(TypeError, match=msg):
41
+ s.isin("a")
42
+
43
+ s = Series(["aaa", "b", "c"])
44
+ with pytest.raises(TypeError, match=msg):
45
+ s.isin("aaa")
46
+
47
+ def test_isin_datetimelike_mismatched_reso(self):
48
+ expected = Series([True, True, False, False, False])
49
+
50
+ ser = Series(date_range("jan-01-2013", "jan-05-2013"))
51
+
52
+ # fails on dtype conversion in the first place
53
+ day_values = np.asarray(ser[0:2].values).astype("datetime64[D]")
54
+ result = ser.isin(day_values)
55
+ tm.assert_series_equal(result, expected)
56
+
57
+ dta = ser[:2]._values.astype("M8[s]")
58
+ result = ser.isin(dta)
59
+ tm.assert_series_equal(result, expected)
60
+
61
+ def test_isin_datetimelike_mismatched_reso_list(self):
62
+ expected = Series([True, True, False, False, False])
63
+
64
+ ser = Series(date_range("jan-01-2013", "jan-05-2013"))
65
+
66
+ dta = ser[:2]._values.astype("M8[s]")
67
+ result = ser.isin(list(dta))
68
+ tm.assert_series_equal(result, expected)
69
+
70
+ def test_isin_with_i8(self):
71
+ # GH#5021
72
+
73
+ expected = Series([True, True, False, False, False])
74
+ expected2 = Series([False, True, False, False, False])
75
+
76
+ # datetime64[ns]
77
+ s = Series(date_range("jan-01-2013", "jan-05-2013"))
78
+
79
+ result = s.isin(s[0:2])
80
+ tm.assert_series_equal(result, expected)
81
+
82
+ result = s.isin(s[0:2].values)
83
+ tm.assert_series_equal(result, expected)
84
+
85
+ result = s.isin([s[1]])
86
+ tm.assert_series_equal(result, expected2)
87
+
88
+ result = s.isin([np.datetime64(s[1])])
89
+ tm.assert_series_equal(result, expected2)
90
+
91
+ result = s.isin(set(s[0:2]))
92
+ tm.assert_series_equal(result, expected)
93
+
94
+ # timedelta64[ns]
95
+ s = Series(pd.to_timedelta(range(5), unit="d"))
96
+ result = s.isin(s[0:2])
97
+ tm.assert_series_equal(result, expected)
98
+
99
+ @pytest.mark.parametrize("empty", [[], Series(dtype=object), np.array([])])
100
+ def test_isin_empty(self, empty):
101
+ # see GH#16991
102
+ s = Series(["a", "b"])
103
+ expected = Series([False, False])
104
+
105
+ result = s.isin(empty)
106
+ tm.assert_series_equal(expected, result)
107
+
108
+ def test_isin_read_only(self):
109
+ # https://github.com/pandas-dev/pandas/issues/37174
110
+ arr = np.array([1, 2, 3])
111
+ arr.setflags(write=False)
112
+ s = Series([1, 2, 3])
113
+ result = s.isin(arr)
114
+ expected = Series([True, True, True])
115
+ tm.assert_series_equal(result, expected)
116
+
117
+ @pytest.mark.parametrize("dtype", [object, None])
118
+ def test_isin_dt64_values_vs_ints(self, dtype):
119
+ # GH#36621 dont cast integers to datetimes for isin
120
+ dti = date_range("2013-01-01", "2013-01-05")
121
+ ser = Series(dti)
122
+
123
+ comps = np.asarray([1356998400000000000], dtype=dtype)
124
+
125
+ res = dti.isin(comps)
126
+ expected = np.array([False] * len(dti), dtype=bool)
127
+ tm.assert_numpy_array_equal(res, expected)
128
+
129
+ res = ser.isin(comps)
130
+ tm.assert_series_equal(res, Series(expected))
131
+
132
+ res = pd.core.algorithms.isin(ser, comps)
133
+ tm.assert_numpy_array_equal(res, expected)
134
+
135
+ def test_isin_tzawareness_mismatch(self):
136
+ dti = date_range("2013-01-01", "2013-01-05")
137
+ ser = Series(dti)
138
+
139
+ other = dti.tz_localize("UTC")
140
+
141
+ res = dti.isin(other)
142
+ expected = np.array([False] * len(dti), dtype=bool)
143
+ tm.assert_numpy_array_equal(res, expected)
144
+
145
+ res = ser.isin(other)
146
+ tm.assert_series_equal(res, Series(expected))
147
+
148
+ res = pd.core.algorithms.isin(ser, other)
149
+ tm.assert_numpy_array_equal(res, expected)
150
+
151
+ def test_isin_period_freq_mismatch(self):
152
+ dti = date_range("2013-01-01", "2013-01-05")
153
+ pi = dti.to_period("M")
154
+ ser = Series(pi)
155
+
156
+ # We construct another PeriodIndex with the same i8 values
157
+ # but different dtype
158
+ dtype = dti.to_period("Y").dtype
159
+ other = PeriodArray._simple_new(pi.asi8, dtype=dtype)
160
+
161
+ res = pi.isin(other)
162
+ expected = np.array([False] * len(pi), dtype=bool)
163
+ tm.assert_numpy_array_equal(res, expected)
164
+
165
+ res = ser.isin(other)
166
+ tm.assert_series_equal(res, Series(expected))
167
+
168
+ res = pd.core.algorithms.isin(ser, other)
169
+ tm.assert_numpy_array_equal(res, expected)
170
+
171
+ @pytest.mark.parametrize("values", [[-9.0, 0.0], [-9, 0]])
172
+ def test_isin_float_in_int_series(self, values):
173
+ # GH#19356 GH#21804
174
+ ser = Series(values)
175
+ result = ser.isin([-9, -0.5])
176
+ expected = Series([True, False])
177
+ tm.assert_series_equal(result, expected)
178
+
179
+ @pytest.mark.parametrize("dtype", ["boolean", "Int64", "Float64"])
180
+ @pytest.mark.parametrize(
181
+ "data,values,expected",
182
+ [
183
+ ([0, 1, 0], [1], [False, True, False]),
184
+ ([0, 1, 0], [1, pd.NA], [False, True, False]),
185
+ ([0, pd.NA, 0], [1, 0], [True, False, True]),
186
+ ([0, 1, pd.NA], [1, pd.NA], [False, True, True]),
187
+ ([0, 1, pd.NA], [1, np.nan], [False, True, False]),
188
+ ([0, pd.NA, pd.NA], [np.nan, pd.NaT, None], [False, False, False]),
189
+ ],
190
+ )
191
+ def test_isin_masked_types(self, dtype, data, values, expected):
192
+ # GH#42405
193
+ ser = Series(data, dtype=dtype)
194
+
195
+ result = ser.isin(values)
196
+ expected = Series(expected, dtype="boolean")
197
+
198
+ tm.assert_series_equal(result, expected)
199
+
200
+
201
+ def test_isin_large_series_mixed_dtypes_and_nan(monkeypatch):
202
+ # https://github.com/pandas-dev/pandas/issues/37094
203
+ # combination of object dtype for the values
204
+ # and > _MINIMUM_COMP_ARR_LEN elements
205
+ min_isin_comp = 5
206
+ ser = Series([1, 2, np.nan] * min_isin_comp)
207
+ with monkeypatch.context() as m:
208
+ m.setattr(algorithms, "_MINIMUM_COMP_ARR_LEN", min_isin_comp)
209
+ result = ser.isin({"foo", "bar"})
210
+ expected = Series([False] * 3 * min_isin_comp)
211
+ tm.assert_series_equal(result, expected)
212
+
213
+
214
+ @pytest.mark.parametrize(
215
+ "array,expected",
216
+ [
217
+ (
218
+ [0, 1j, 1j, 1, 1 + 1j, 1 + 2j, 1 + 1j],
219
+ Series([False, True, True, False, True, True, True], dtype=bool),
220
+ )
221
+ ],
222
+ )
223
+ def test_isin_complex_numbers(array, expected):
224
+ # GH 17927
225
+ result = Series(array).isin([1j, 1 + 1j, 1 + 2j])
226
+ tm.assert_series_equal(result, expected)
227
+
228
+
229
+ @pytest.mark.parametrize(
230
+ "data,is_in",
231
+ [([1, [2]], [1]), (["simple str", [{"values": 3}]], ["simple str"])],
232
+ )
233
+ def test_isin_filtering_with_mixed_object_types(data, is_in):
234
+ # GH 20883
235
+
236
+ ser = Series(data)
237
+ result = ser.isin(is_in)
238
+ expected = Series([True, False])
239
+
240
+ tm.assert_series_equal(result, expected)
241
+
242
+
243
+ @pytest.mark.parametrize("data", [[1, 2, 3], [1.0, 2.0, 3.0]])
244
+ @pytest.mark.parametrize("isin", [[1, 2], [1.0, 2.0]])
245
+ def test_isin_filtering_on_iterable(data, isin):
246
+ # GH 50234
247
+
248
+ ser = Series(data)
249
+ result = ser.isin(i for i in isin)
250
+ expected_result = Series([True, True, False])
251
+
252
+ tm.assert_series_equal(result, expected_result)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_isna.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ We also test Series.notna in this file.
3
+ """
4
+ import numpy as np
5
+
6
+ from pandas import (
7
+ Period,
8
+ Series,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+
13
+ class TestIsna:
14
+ def test_isna_period_dtype(self):
15
+ # GH#13737
16
+ ser = Series([Period("2011-01", freq="M"), Period("NaT", freq="M")])
17
+
18
+ expected = Series([False, True])
19
+
20
+ result = ser.isna()
21
+ tm.assert_series_equal(result, expected)
22
+
23
+ result = ser.notna()
24
+ tm.assert_series_equal(result, ~expected)
25
+
26
+ def test_isna(self):
27
+ ser = Series([0, 5.4, 3, np.nan, -0.001])
28
+ expected = Series([False, False, False, True, False])
29
+ tm.assert_series_equal(ser.isna(), expected)
30
+ tm.assert_series_equal(ser.notna(), ~expected)
31
+
32
+ ser = Series(["hi", "", np.nan])
33
+ expected = Series([False, False, True])
34
+ tm.assert_series_equal(ser.isna(), expected)
35
+ tm.assert_series_equal(ser.notna(), ~expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_reindex_like.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+
5
+ from pandas import Series
6
+ import pandas._testing as tm
7
+
8
+
9
+ def test_reindex_like(datetime_series):
10
+ other = datetime_series[::2]
11
+ tm.assert_series_equal(
12
+ datetime_series.reindex(other.index), datetime_series.reindex_like(other)
13
+ )
14
+
15
+ # GH#7179
16
+ day1 = datetime(2013, 3, 5)
17
+ day2 = datetime(2013, 5, 5)
18
+ day3 = datetime(2014, 3, 5)
19
+
20
+ series1 = Series([5, None, None], [day1, day2, day3])
21
+ series2 = Series([None, None], [day1, day3])
22
+
23
+ result = series1.reindex_like(series2, method="pad")
24
+ expected = Series([5, np.nan], index=[day1, day3])
25
+ tm.assert_series_equal(result, expected)
26
+
27
+
28
+ def test_reindex_like_nearest():
29
+ ser = Series(np.arange(10, dtype="int64"))
30
+
31
+ target = [0.1, 0.9, 1.5, 2.0]
32
+ other = ser.reindex(target, method="nearest")
33
+ expected = Series(np.around(target).astype("int64"), target)
34
+
35
+ result = ser.reindex_like(other, method="nearest")
36
+ tm.assert_series_equal(expected, result)
37
+
38
+ result = ser.reindex_like(other, method="nearest", tolerance=1)
39
+ tm.assert_series_equal(expected, result)
40
+ result = ser.reindex_like(other, method="nearest", tolerance=[1, 2, 3, 4])
41
+ tm.assert_series_equal(expected, result)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_rename_axis.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import (
4
+ Index,
5
+ MultiIndex,
6
+ Series,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestSeriesRenameAxis:
12
+ def test_rename_axis_mapper(self):
13
+ # GH 19978
14
+ mi = MultiIndex.from_product([["a", "b", "c"], [1, 2]], names=["ll", "nn"])
15
+ ser = Series(list(range(len(mi))), index=mi)
16
+
17
+ result = ser.rename_axis(index={"ll": "foo"})
18
+ assert result.index.names == ["foo", "nn"]
19
+
20
+ result = ser.rename_axis(index=str.upper, axis=0)
21
+ assert result.index.names == ["LL", "NN"]
22
+
23
+ result = ser.rename_axis(index=["foo", "goo"])
24
+ assert result.index.names == ["foo", "goo"]
25
+
26
+ with pytest.raises(TypeError, match="unexpected"):
27
+ ser.rename_axis(columns="wrong")
28
+
29
+ def test_rename_axis_inplace(self, datetime_series):
30
+ # GH 15704
31
+ expected = datetime_series.rename_axis("foo")
32
+ result = datetime_series
33
+ no_return = result.rename_axis("foo", inplace=True)
34
+
35
+ assert no_return is None
36
+ tm.assert_series_equal(result, expected)
37
+
38
+ @pytest.mark.parametrize("kwargs", [{"mapper": None}, {"index": None}, {}])
39
+ def test_rename_axis_none(self, kwargs):
40
+ # GH 25034
41
+ index = Index(list("abc"), name="foo")
42
+ ser = Series([1, 2, 3], index=index)
43
+
44
+ result = ser.rename_axis(**kwargs)
45
+ expected_index = index.rename(None) if kwargs else index
46
+ expected = Series([1, 2, 3], index=expected_index)
47
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_replace.py ADDED
@@ -0,0 +1,813 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas._config import using_pyarrow_string_dtype
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+ from pandas.core.arrays import IntervalArray
11
+
12
+
13
+ class TestSeriesReplace:
14
+ def test_replace_explicit_none(self):
15
+ # GH#36984 if the user explicitly passes value=None, give it to them
16
+ ser = pd.Series([0, 0, ""], dtype=object)
17
+ result = ser.replace("", None)
18
+ expected = pd.Series([0, 0, None], dtype=object)
19
+ tm.assert_series_equal(result, expected)
20
+
21
+ # Cast column 2 to object to avoid implicit cast when setting entry to ""
22
+ df = pd.DataFrame(np.zeros((3, 3))).astype({2: object})
23
+ df.iloc[2, 2] = ""
24
+ result = df.replace("", None)
25
+ expected = pd.DataFrame(
26
+ {
27
+ 0: np.zeros(3),
28
+ 1: np.zeros(3),
29
+ 2: np.array([0.0, 0.0, None], dtype=object),
30
+ }
31
+ )
32
+ assert expected.iloc[2, 2] is None
33
+ tm.assert_frame_equal(result, expected)
34
+
35
+ # GH#19998 same thing with object dtype
36
+ ser = pd.Series([10, 20, 30, "a", "a", "b", "a"])
37
+ result = ser.replace("a", None)
38
+ expected = pd.Series([10, 20, 30, None, None, "b", None])
39
+ assert expected.iloc[-1] is None
40
+ tm.assert_series_equal(result, expected)
41
+
42
+ def test_replace_noop_doesnt_downcast(self):
43
+ # GH#44498
44
+ ser = pd.Series([None, None, pd.Timestamp("2021-12-16 17:31")], dtype=object)
45
+ res = ser.replace({np.nan: None}) # should be a no-op
46
+ tm.assert_series_equal(res, ser)
47
+ assert res.dtype == object
48
+
49
+ # same thing but different calling convention
50
+ res = ser.replace(np.nan, None)
51
+ tm.assert_series_equal(res, ser)
52
+ assert res.dtype == object
53
+
54
+ def test_replace(self):
55
+ N = 50
56
+ ser = pd.Series(np.random.default_rng(2).standard_normal(N))
57
+ ser[0:4] = np.nan
58
+ ser[6:10] = 0
59
+
60
+ # replace list with a single value
61
+ return_value = ser.replace([np.nan], -1, inplace=True)
62
+ assert return_value is None
63
+
64
+ exp = ser.fillna(-1)
65
+ tm.assert_series_equal(ser, exp)
66
+
67
+ rs = ser.replace(0.0, np.nan)
68
+ ser[ser == 0.0] = np.nan
69
+ tm.assert_series_equal(rs, ser)
70
+
71
+ ser = pd.Series(
72
+ np.fabs(np.random.default_rng(2).standard_normal(N)),
73
+ pd.date_range("2020-01-01", periods=N),
74
+ dtype=object,
75
+ )
76
+ ser[:5] = np.nan
77
+ ser[6:10] = "foo"
78
+ ser[20:30] = "bar"
79
+
80
+ # replace list with a single value
81
+ msg = "Downcasting behavior in `replace`"
82
+ with tm.assert_produces_warning(FutureWarning, match=msg):
83
+ rs = ser.replace([np.nan, "foo", "bar"], -1)
84
+
85
+ assert (rs[:5] == -1).all()
86
+ assert (rs[6:10] == -1).all()
87
+ assert (rs[20:30] == -1).all()
88
+ assert (pd.isna(ser[:5])).all()
89
+
90
+ # replace with different values
91
+ with tm.assert_produces_warning(FutureWarning, match=msg):
92
+ rs = ser.replace({np.nan: -1, "foo": -2, "bar": -3})
93
+
94
+ assert (rs[:5] == -1).all()
95
+ assert (rs[6:10] == -2).all()
96
+ assert (rs[20:30] == -3).all()
97
+ assert (pd.isna(ser[:5])).all()
98
+
99
+ # replace with different values with 2 lists
100
+ with tm.assert_produces_warning(FutureWarning, match=msg):
101
+ rs2 = ser.replace([np.nan, "foo", "bar"], [-1, -2, -3])
102
+ tm.assert_series_equal(rs, rs2)
103
+
104
+ # replace inplace
105
+ with tm.assert_produces_warning(FutureWarning, match=msg):
106
+ return_value = ser.replace([np.nan, "foo", "bar"], -1, inplace=True)
107
+ assert return_value is None
108
+
109
+ assert (ser[:5] == -1).all()
110
+ assert (ser[6:10] == -1).all()
111
+ assert (ser[20:30] == -1).all()
112
+
113
+ def test_replace_nan_with_inf(self):
114
+ ser = pd.Series([np.nan, 0, np.inf])
115
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
116
+
117
+ ser = pd.Series([np.nan, 0, "foo", "bar", np.inf, None, pd.NaT])
118
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
119
+ filled = ser.copy()
120
+ filled[4] = 0
121
+ tm.assert_series_equal(ser.replace(np.inf, 0), filled)
122
+
123
+ def test_replace_listlike_value_listlike_target(self, datetime_series):
124
+ ser = pd.Series(datetime_series.index)
125
+ tm.assert_series_equal(ser.replace(np.nan, 0), ser.fillna(0))
126
+
127
+ # malformed
128
+ msg = r"Replacement lists must match in length\. Expecting 3 got 2"
129
+ with pytest.raises(ValueError, match=msg):
130
+ ser.replace([1, 2, 3], [np.nan, 0])
131
+
132
+ # ser is dt64 so can't hold 1 or 2, so this replace is a no-op
133
+ result = ser.replace([1, 2], [np.nan, 0])
134
+ tm.assert_series_equal(result, ser)
135
+
136
+ ser = pd.Series([0, 1, 2, 3, 4])
137
+ result = ser.replace([0, 1, 2, 3, 4], [4, 3, 2, 1, 0])
138
+ tm.assert_series_equal(result, pd.Series([4, 3, 2, 1, 0]))
139
+
140
+ def test_replace_gh5319(self):
141
+ # API change from 0.12?
142
+ # GH 5319
143
+ ser = pd.Series([0, np.nan, 2, 3, 4])
144
+ expected = ser.ffill()
145
+ msg = (
146
+ "Series.replace without 'value' and with non-dict-like "
147
+ "'to_replace' is deprecated"
148
+ )
149
+ with tm.assert_produces_warning(FutureWarning, match=msg):
150
+ result = ser.replace([np.nan])
151
+ tm.assert_series_equal(result, expected)
152
+
153
+ ser = pd.Series([0, np.nan, 2, 3, 4])
154
+ expected = ser.ffill()
155
+ with tm.assert_produces_warning(FutureWarning, match=msg):
156
+ result = ser.replace(np.nan)
157
+ tm.assert_series_equal(result, expected)
158
+
159
+ def test_replace_datetime64(self):
160
+ # GH 5797
161
+ ser = pd.Series(pd.date_range("20130101", periods=5))
162
+ expected = ser.copy()
163
+ expected.loc[2] = pd.Timestamp("20120101")
164
+ result = ser.replace({pd.Timestamp("20130103"): pd.Timestamp("20120101")})
165
+ tm.assert_series_equal(result, expected)
166
+ result = ser.replace(pd.Timestamp("20130103"), pd.Timestamp("20120101"))
167
+ tm.assert_series_equal(result, expected)
168
+
169
+ def test_replace_nat_with_tz(self):
170
+ # GH 11792: Test with replacing NaT in a list with tz data
171
+ ts = pd.Timestamp("2015/01/01", tz="UTC")
172
+ s = pd.Series([pd.NaT, pd.Timestamp("2015/01/01", tz="UTC")])
173
+ result = s.replace([np.nan, pd.NaT], pd.Timestamp.min)
174
+ expected = pd.Series([pd.Timestamp.min, ts], dtype=object)
175
+ tm.assert_series_equal(expected, result)
176
+
177
+ def test_replace_timedelta_td64(self):
178
+ tdi = pd.timedelta_range(0, periods=5)
179
+ ser = pd.Series(tdi)
180
+
181
+ # Using a single dict argument means we go through replace_list
182
+ result = ser.replace({ser[1]: ser[3]})
183
+
184
+ expected = pd.Series([ser[0], ser[3], ser[2], ser[3], ser[4]])
185
+ tm.assert_series_equal(result, expected)
186
+
187
+ def test_replace_with_single_list(self):
188
+ ser = pd.Series([0, 1, 2, 3, 4])
189
+ msg2 = (
190
+ "Series.replace without 'value' and with non-dict-like "
191
+ "'to_replace' is deprecated"
192
+ )
193
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
194
+ result = ser.replace([1, 2, 3])
195
+ tm.assert_series_equal(result, pd.Series([0, 0, 0, 0, 4]))
196
+
197
+ s = ser.copy()
198
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
199
+ return_value = s.replace([1, 2, 3], inplace=True)
200
+ assert return_value is None
201
+ tm.assert_series_equal(s, pd.Series([0, 0, 0, 0, 4]))
202
+
203
+ # make sure things don't get corrupted when fillna call fails
204
+ s = ser.copy()
205
+ msg = (
206
+ r"Invalid fill method\. Expecting pad \(ffill\) or backfill "
207
+ r"\(bfill\)\. Got crash_cymbal"
208
+ )
209
+ msg3 = "The 'method' keyword in Series.replace is deprecated"
210
+ with pytest.raises(ValueError, match=msg):
211
+ with tm.assert_produces_warning(FutureWarning, match=msg3):
212
+ return_value = s.replace([1, 2, 3], inplace=True, method="crash_cymbal")
213
+ assert return_value is None
214
+ tm.assert_series_equal(s, ser)
215
+
216
+ def test_replace_mixed_types(self):
217
+ ser = pd.Series(np.arange(5), dtype="int64")
218
+
219
+ def check_replace(to_rep, val, expected):
220
+ sc = ser.copy()
221
+ result = ser.replace(to_rep, val)
222
+ return_value = sc.replace(to_rep, val, inplace=True)
223
+ assert return_value is None
224
+ tm.assert_series_equal(expected, result)
225
+ tm.assert_series_equal(expected, sc)
226
+
227
+ # 3.0 can still be held in our int64 series, so we do not upcast GH#44940
228
+ tr, v = [3], [3.0]
229
+ check_replace(tr, v, ser)
230
+ # Note this matches what we get with the scalars 3 and 3.0
231
+ check_replace(tr[0], v[0], ser)
232
+
233
+ # MUST upcast to float
234
+ e = pd.Series([0, 1, 2, 3.5, 4])
235
+ tr, v = [3], [3.5]
236
+ check_replace(tr, v, e)
237
+
238
+ # casts to object
239
+ e = pd.Series([0, 1, 2, 3.5, "a"])
240
+ tr, v = [3, 4], [3.5, "a"]
241
+ check_replace(tr, v, e)
242
+
243
+ # again casts to object
244
+ e = pd.Series([0, 1, 2, 3.5, pd.Timestamp("20130101")])
245
+ tr, v = [3, 4], [3.5, pd.Timestamp("20130101")]
246
+ check_replace(tr, v, e)
247
+
248
+ # casts to object
249
+ e = pd.Series([0, 1, 2, 3.5, True], dtype="object")
250
+ tr, v = [3, 4], [3.5, True]
251
+ check_replace(tr, v, e)
252
+
253
+ # test an object with dates + floats + integers + strings
254
+ dr = pd.Series(pd.date_range("1/1/2001", "1/10/2001", freq="D"))
255
+ result = dr.astype(object).replace([dr[0], dr[1], dr[2]], [1.0, 2, "a"])
256
+ expected = pd.Series([1.0, 2, "a"] + dr[3:].tolist(), dtype=object)
257
+ tm.assert_series_equal(result, expected)
258
+
259
+ def test_replace_bool_with_string_no_op(self):
260
+ s = pd.Series([True, False, True])
261
+ result = s.replace("fun", "in-the-sun")
262
+ tm.assert_series_equal(s, result)
263
+
264
+ def test_replace_bool_with_string(self):
265
+ # nonexistent elements
266
+ s = pd.Series([True, False, True])
267
+ result = s.replace(True, "2u")
268
+ expected = pd.Series(["2u", False, "2u"])
269
+ tm.assert_series_equal(expected, result)
270
+
271
+ def test_replace_bool_with_bool(self):
272
+ s = pd.Series([True, False, True])
273
+ result = s.replace(True, False)
274
+ expected = pd.Series([False] * len(s))
275
+ tm.assert_series_equal(expected, result)
276
+
277
+ def test_replace_with_dict_with_bool_keys(self):
278
+ s = pd.Series([True, False, True])
279
+ result = s.replace({"asdf": "asdb", True: "yes"})
280
+ expected = pd.Series(["yes", False, "yes"])
281
+ tm.assert_series_equal(result, expected)
282
+
283
+ def test_replace_Int_with_na(self, any_int_ea_dtype):
284
+ # GH 38267
285
+ result = pd.Series([0, None], dtype=any_int_ea_dtype).replace(0, pd.NA)
286
+ expected = pd.Series([pd.NA, pd.NA], dtype=any_int_ea_dtype)
287
+ tm.assert_series_equal(result, expected)
288
+ result = pd.Series([0, 1], dtype=any_int_ea_dtype).replace(0, pd.NA)
289
+ result.replace(1, pd.NA, inplace=True)
290
+ tm.assert_series_equal(result, expected)
291
+
292
+ def test_replace2(self):
293
+ N = 50
294
+ ser = pd.Series(
295
+ np.fabs(np.random.default_rng(2).standard_normal(N)),
296
+ pd.date_range("2020-01-01", periods=N),
297
+ dtype=object,
298
+ )
299
+ ser[:5] = np.nan
300
+ ser[6:10] = "foo"
301
+ ser[20:30] = "bar"
302
+
303
+ # replace list with a single value
304
+ msg = "Downcasting behavior in `replace`"
305
+ with tm.assert_produces_warning(FutureWarning, match=msg):
306
+ rs = ser.replace([np.nan, "foo", "bar"], -1)
307
+
308
+ assert (rs[:5] == -1).all()
309
+ assert (rs[6:10] == -1).all()
310
+ assert (rs[20:30] == -1).all()
311
+ assert (pd.isna(ser[:5])).all()
312
+
313
+ # replace with different values
314
+ with tm.assert_produces_warning(FutureWarning, match=msg):
315
+ rs = ser.replace({np.nan: -1, "foo": -2, "bar": -3})
316
+
317
+ assert (rs[:5] == -1).all()
318
+ assert (rs[6:10] == -2).all()
319
+ assert (rs[20:30] == -3).all()
320
+ assert (pd.isna(ser[:5])).all()
321
+
322
+ # replace with different values with 2 lists
323
+ with tm.assert_produces_warning(FutureWarning, match=msg):
324
+ rs2 = ser.replace([np.nan, "foo", "bar"], [-1, -2, -3])
325
+ tm.assert_series_equal(rs, rs2)
326
+
327
+ # replace inplace
328
+ with tm.assert_produces_warning(FutureWarning, match=msg):
329
+ return_value = ser.replace([np.nan, "foo", "bar"], -1, inplace=True)
330
+ assert return_value is None
331
+ assert (ser[:5] == -1).all()
332
+ assert (ser[6:10] == -1).all()
333
+ assert (ser[20:30] == -1).all()
334
+
335
+ @pytest.mark.parametrize("inplace", [True, False])
336
+ def test_replace_cascade(self, inplace):
337
+ # Test that replaced values are not replaced again
338
+ # GH #50778
339
+ ser = pd.Series([1, 2, 3])
340
+ expected = pd.Series([2, 3, 4])
341
+
342
+ res = ser.replace([1, 2, 3], [2, 3, 4], inplace=inplace)
343
+ if inplace:
344
+ tm.assert_series_equal(ser, expected)
345
+ else:
346
+ tm.assert_series_equal(res, expected)
347
+
348
+ def test_replace_with_dictlike_and_string_dtype(self, nullable_string_dtype):
349
+ # GH 32621, GH#44940
350
+ ser = pd.Series(["one", "two", np.nan], dtype=nullable_string_dtype)
351
+ expected = pd.Series(["1", "2", np.nan], dtype=nullable_string_dtype)
352
+ result = ser.replace({"one": "1", "two": "2"})
353
+ tm.assert_series_equal(expected, result)
354
+
355
+ def test_replace_with_empty_dictlike(self):
356
+ # GH 15289
357
+ s = pd.Series(list("abcd"))
358
+ tm.assert_series_equal(s, s.replace({}))
359
+
360
+ empty_series = pd.Series([])
361
+ tm.assert_series_equal(s, s.replace(empty_series))
362
+
363
+ def test_replace_string_with_number(self):
364
+ # GH 15743
365
+ s = pd.Series([1, 2, 3])
366
+ result = s.replace("2", np.nan)
367
+ expected = pd.Series([1, 2, 3])
368
+ tm.assert_series_equal(expected, result)
369
+
370
+ def test_replace_replacer_equals_replacement(self):
371
+ # GH 20656
372
+ # make sure all replacers are matching against original values
373
+ s = pd.Series(["a", "b"])
374
+ expected = pd.Series(["b", "a"])
375
+ result = s.replace({"a": "b", "b": "a"})
376
+ tm.assert_series_equal(expected, result)
377
+
378
+ def test_replace_unicode_with_number(self):
379
+ # GH 15743
380
+ s = pd.Series([1, 2, 3])
381
+ result = s.replace("2", np.nan)
382
+ expected = pd.Series([1, 2, 3])
383
+ tm.assert_series_equal(expected, result)
384
+
385
+ def test_replace_mixed_types_with_string(self):
386
+ # Testing mixed
387
+ s = pd.Series([1, 2, 3, "4", 4, 5])
388
+ msg = "Downcasting behavior in `replace`"
389
+ with tm.assert_produces_warning(FutureWarning, match=msg):
390
+ result = s.replace([2, "4"], np.nan)
391
+ expected = pd.Series([1, np.nan, 3, np.nan, 4, 5])
392
+ tm.assert_series_equal(expected, result)
393
+
394
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 0 in string")
395
+ @pytest.mark.parametrize(
396
+ "categorical, numeric",
397
+ [
398
+ (pd.Categorical(["A"], categories=["A", "B"]), [1]),
399
+ (pd.Categorical(["A", "B"], categories=["A", "B"]), [1, 2]),
400
+ ],
401
+ )
402
+ def test_replace_categorical(self, categorical, numeric):
403
+ # GH 24971, GH#23305
404
+ ser = pd.Series(categorical)
405
+ msg = "Downcasting behavior in `replace`"
406
+ msg = "with CategoricalDtype is deprecated"
407
+ with tm.assert_produces_warning(FutureWarning, match=msg):
408
+ result = ser.replace({"A": 1, "B": 2})
409
+ expected = pd.Series(numeric).astype("category")
410
+ if 2 not in expected.cat.categories:
411
+ # i.e. categories should be [1, 2] even if there are no "B"s present
412
+ # GH#44940
413
+ expected = expected.cat.add_categories(2)
414
+ tm.assert_series_equal(expected, result)
415
+
416
+ @pytest.mark.parametrize(
417
+ "data, data_exp", [(["a", "b", "c"], ["b", "b", "c"]), (["a"], ["b"])]
418
+ )
419
+ def test_replace_categorical_inplace(self, data, data_exp):
420
+ # GH 53358
421
+ result = pd.Series(data, dtype="category")
422
+ msg = "with CategoricalDtype is deprecated"
423
+ with tm.assert_produces_warning(FutureWarning, match=msg):
424
+ result.replace(to_replace="a", value="b", inplace=True)
425
+ expected = pd.Series(data_exp, dtype="category")
426
+ tm.assert_series_equal(result, expected)
427
+
428
+ def test_replace_categorical_single(self):
429
+ # GH 26988
430
+ dti = pd.date_range("2016-01-01", periods=3, tz="US/Pacific")
431
+ s = pd.Series(dti)
432
+ c = s.astype("category")
433
+
434
+ expected = c.copy()
435
+ expected = expected.cat.add_categories("foo")
436
+ expected[2] = "foo"
437
+ expected = expected.cat.remove_unused_categories()
438
+ assert c[2] != "foo"
439
+
440
+ msg = "with CategoricalDtype is deprecated"
441
+ with tm.assert_produces_warning(FutureWarning, match=msg):
442
+ result = c.replace(c[2], "foo")
443
+ tm.assert_series_equal(expected, result)
444
+ assert c[2] != "foo" # ensure non-inplace call does not alter original
445
+
446
+ msg = "with CategoricalDtype is deprecated"
447
+ with tm.assert_produces_warning(FutureWarning, match=msg):
448
+ return_value = c.replace(c[2], "foo", inplace=True)
449
+ assert return_value is None
450
+ tm.assert_series_equal(expected, c)
451
+
452
+ first_value = c[0]
453
+ msg = "with CategoricalDtype is deprecated"
454
+ with tm.assert_produces_warning(FutureWarning, match=msg):
455
+ return_value = c.replace(c[1], c[0], inplace=True)
456
+ assert return_value is None
457
+ assert c[0] == c[1] == first_value # test replacing with existing value
458
+
459
+ def test_replace_with_no_overflowerror(self):
460
+ # GH 25616
461
+ # casts to object without Exception from OverflowError
462
+ s = pd.Series([0, 1, 2, 3, 4])
463
+ result = s.replace([3], ["100000000000000000000"])
464
+ expected = pd.Series([0, 1, 2, "100000000000000000000", 4])
465
+ tm.assert_series_equal(result, expected)
466
+
467
+ s = pd.Series([0, "100000000000000000000", "100000000000000000001"])
468
+ result = s.replace(["100000000000000000000"], [1])
469
+ expected = pd.Series([0, 1, "100000000000000000001"])
470
+ tm.assert_series_equal(result, expected)
471
+
472
+ @pytest.mark.parametrize(
473
+ "ser, to_replace, exp",
474
+ [
475
+ ([1, 2, 3], {1: 2, 2: 3, 3: 4}, [2, 3, 4]),
476
+ (["1", "2", "3"], {"1": "2", "2": "3", "3": "4"}, ["2", "3", "4"]),
477
+ ],
478
+ )
479
+ def test_replace_commutative(self, ser, to_replace, exp):
480
+ # GH 16051
481
+ # DataFrame.replace() overwrites when values are non-numeric
482
+
483
+ series = pd.Series(ser)
484
+
485
+ expected = pd.Series(exp)
486
+ result = series.replace(to_replace)
487
+
488
+ tm.assert_series_equal(result, expected)
489
+
490
+ @pytest.mark.parametrize(
491
+ "ser, exp", [([1, 2, 3], [1, True, 3]), (["x", 2, 3], ["x", True, 3])]
492
+ )
493
+ def test_replace_no_cast(self, ser, exp):
494
+ # GH 9113
495
+ # BUG: replace int64 dtype with bool coerces to int64
496
+
497
+ series = pd.Series(ser)
498
+ result = series.replace(2, True)
499
+ expected = pd.Series(exp)
500
+
501
+ tm.assert_series_equal(result, expected)
502
+
503
+ def test_replace_invalid_to_replace(self):
504
+ # GH 18634
505
+ # API: replace() should raise an exception if invalid argument is given
506
+ series = pd.Series(["a", "b", "c "])
507
+ msg = (
508
+ r"Expecting 'to_replace' to be either a scalar, array-like, "
509
+ r"dict or None, got invalid type.*"
510
+ )
511
+ msg2 = (
512
+ "Series.replace without 'value' and with non-dict-like "
513
+ "'to_replace' is deprecated"
514
+ )
515
+ with pytest.raises(TypeError, match=msg):
516
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
517
+ series.replace(lambda x: x.strip())
518
+
519
+ @pytest.mark.parametrize("frame", [False, True])
520
+ def test_replace_nonbool_regex(self, frame):
521
+ obj = pd.Series(["a", "b", "c "])
522
+ if frame:
523
+ obj = obj.to_frame()
524
+
525
+ msg = "'to_replace' must be 'None' if 'regex' is not a bool"
526
+ with pytest.raises(ValueError, match=msg):
527
+ obj.replace(to_replace=["a"], regex="foo")
528
+
529
+ @pytest.mark.parametrize("frame", [False, True])
530
+ def test_replace_empty_copy(self, frame):
531
+ obj = pd.Series([], dtype=np.float64)
532
+ if frame:
533
+ obj = obj.to_frame()
534
+
535
+ res = obj.replace(4, 5, inplace=True)
536
+ assert res is None
537
+
538
+ res = obj.replace(4, 5, inplace=False)
539
+ tm.assert_equal(res, obj)
540
+ assert res is not obj
541
+
542
+ def test_replace_only_one_dictlike_arg(self, fixed_now_ts):
543
+ # GH#33340
544
+
545
+ ser = pd.Series([1, 2, "A", fixed_now_ts, True])
546
+ to_replace = {0: 1, 2: "A"}
547
+ value = "foo"
548
+ msg = "Series.replace cannot use dict-like to_replace and non-None value"
549
+ with pytest.raises(ValueError, match=msg):
550
+ ser.replace(to_replace, value)
551
+
552
+ to_replace = 1
553
+ value = {0: "foo", 2: "bar"}
554
+ msg = "Series.replace cannot use dict-value and non-None to_replace"
555
+ with pytest.raises(ValueError, match=msg):
556
+ ser.replace(to_replace, value)
557
+
558
+ def test_replace_extension_other(self, frame_or_series):
559
+ # https://github.com/pandas-dev/pandas/issues/34530
560
+ obj = frame_or_series(pd.array([1, 2, 3], dtype="Int64"))
561
+ result = obj.replace("", "") # no exception
562
+ # should not have changed dtype
563
+ tm.assert_equal(obj, result)
564
+
565
+ def _check_replace_with_method(self, ser: pd.Series):
566
+ df = ser.to_frame()
567
+
568
+ msg1 = "The 'method' keyword in Series.replace is deprecated"
569
+ with tm.assert_produces_warning(FutureWarning, match=msg1):
570
+ res = ser.replace(ser[1], method="pad")
571
+ expected = pd.Series([ser[0], ser[0]] + list(ser[2:]), dtype=ser.dtype)
572
+ tm.assert_series_equal(res, expected)
573
+
574
+ msg2 = "The 'method' keyword in DataFrame.replace is deprecated"
575
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
576
+ res_df = df.replace(ser[1], method="pad")
577
+ tm.assert_frame_equal(res_df, expected.to_frame())
578
+
579
+ ser2 = ser.copy()
580
+ with tm.assert_produces_warning(FutureWarning, match=msg1):
581
+ res2 = ser2.replace(ser[1], method="pad", inplace=True)
582
+ assert res2 is None
583
+ tm.assert_series_equal(ser2, expected)
584
+
585
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
586
+ res_df2 = df.replace(ser[1], method="pad", inplace=True)
587
+ assert res_df2 is None
588
+ tm.assert_frame_equal(df, expected.to_frame())
589
+
590
+ def test_replace_ea_dtype_with_method(self, any_numeric_ea_dtype):
591
+ arr = pd.array([1, 2, pd.NA, 4], dtype=any_numeric_ea_dtype)
592
+ ser = pd.Series(arr)
593
+
594
+ self._check_replace_with_method(ser)
595
+
596
+ @pytest.mark.parametrize("as_categorical", [True, False])
597
+ def test_replace_interval_with_method(self, as_categorical):
598
+ # in particular interval that can't hold NA
599
+
600
+ idx = pd.IntervalIndex.from_breaks(range(4))
601
+ ser = pd.Series(idx)
602
+ if as_categorical:
603
+ ser = ser.astype("category")
604
+
605
+ self._check_replace_with_method(ser)
606
+
607
+ @pytest.mark.parametrize("as_period", [True, False])
608
+ @pytest.mark.parametrize("as_categorical", [True, False])
609
+ def test_replace_datetimelike_with_method(self, as_period, as_categorical):
610
+ idx = pd.date_range("2016-01-01", periods=5, tz="US/Pacific")
611
+ if as_period:
612
+ idx = idx.tz_localize(None).to_period("D")
613
+
614
+ ser = pd.Series(idx)
615
+ ser.iloc[-2] = pd.NaT
616
+ if as_categorical:
617
+ ser = ser.astype("category")
618
+
619
+ self._check_replace_with_method(ser)
620
+
621
+ def test_replace_with_compiled_regex(self):
622
+ # https://github.com/pandas-dev/pandas/issues/35680
623
+ s = pd.Series(["a", "b", "c"])
624
+ regex = re.compile("^a$")
625
+ result = s.replace({regex: "z"}, regex=True)
626
+ expected = pd.Series(["z", "b", "c"])
627
+ tm.assert_series_equal(result, expected)
628
+
629
+ def test_pandas_replace_na(self):
630
+ # GH#43344
631
+ ser = pd.Series(["AA", "BB", "CC", "DD", "EE", "", pd.NA], dtype="string")
632
+ regex_mapping = {
633
+ "AA": "CC",
634
+ "BB": "CC",
635
+ "EE": "CC",
636
+ "CC": "CC-REPL",
637
+ }
638
+ result = ser.replace(regex_mapping, regex=True)
639
+ exp = pd.Series(["CC", "CC", "CC-REPL", "DD", "CC", "", pd.NA], dtype="string")
640
+ tm.assert_series_equal(result, exp)
641
+
642
+ @pytest.mark.parametrize(
643
+ "dtype, input_data, to_replace, expected_data",
644
+ [
645
+ ("bool", [True, False], {True: False}, [False, False]),
646
+ ("int64", [1, 2], {1: 10, 2: 20}, [10, 20]),
647
+ ("Int64", [1, 2], {1: 10, 2: 20}, [10, 20]),
648
+ ("float64", [1.1, 2.2], {1.1: 10.1, 2.2: 20.5}, [10.1, 20.5]),
649
+ ("Float64", [1.1, 2.2], {1.1: 10.1, 2.2: 20.5}, [10.1, 20.5]),
650
+ ("string", ["one", "two"], {"one": "1", "two": "2"}, ["1", "2"]),
651
+ (
652
+ pd.IntervalDtype("int64"),
653
+ IntervalArray([pd.Interval(1, 2), pd.Interval(2, 3)]),
654
+ {pd.Interval(1, 2): pd.Interval(10, 20)},
655
+ IntervalArray([pd.Interval(10, 20), pd.Interval(2, 3)]),
656
+ ),
657
+ (
658
+ pd.IntervalDtype("float64"),
659
+ IntervalArray([pd.Interval(1.0, 2.7), pd.Interval(2.8, 3.1)]),
660
+ {pd.Interval(1.0, 2.7): pd.Interval(10.6, 20.8)},
661
+ IntervalArray([pd.Interval(10.6, 20.8), pd.Interval(2.8, 3.1)]),
662
+ ),
663
+ (
664
+ pd.PeriodDtype("M"),
665
+ [pd.Period("2020-05", freq="M")],
666
+ {pd.Period("2020-05", freq="M"): pd.Period("2020-06", freq="M")},
667
+ [pd.Period("2020-06", freq="M")],
668
+ ),
669
+ ],
670
+ )
671
+ def test_replace_dtype(self, dtype, input_data, to_replace, expected_data):
672
+ # GH#33484
673
+ ser = pd.Series(input_data, dtype=dtype)
674
+ result = ser.replace(to_replace)
675
+ expected = pd.Series(expected_data, dtype=dtype)
676
+ tm.assert_series_equal(result, expected)
677
+
678
+ def test_replace_string_dtype(self):
679
+ # GH#40732, GH#44940
680
+ ser = pd.Series(["one", "two", np.nan], dtype="string")
681
+ res = ser.replace({"one": "1", "two": "2"})
682
+ expected = pd.Series(["1", "2", np.nan], dtype="string")
683
+ tm.assert_series_equal(res, expected)
684
+
685
+ # GH#31644
686
+ ser2 = pd.Series(["A", np.nan], dtype="string")
687
+ res2 = ser2.replace("A", "B")
688
+ expected2 = pd.Series(["B", np.nan], dtype="string")
689
+ tm.assert_series_equal(res2, expected2)
690
+
691
+ ser3 = pd.Series(["A", "B"], dtype="string")
692
+ res3 = ser3.replace("A", pd.NA)
693
+ expected3 = pd.Series([pd.NA, "B"], dtype="string")
694
+ tm.assert_series_equal(res3, expected3)
695
+
696
+ def test_replace_string_dtype_list_to_replace(self):
697
+ # GH#41215, GH#44940
698
+ ser = pd.Series(["abc", "def"], dtype="string")
699
+ res = ser.replace(["abc", "any other string"], "xyz")
700
+ expected = pd.Series(["xyz", "def"], dtype="string")
701
+ tm.assert_series_equal(res, expected)
702
+
703
+ def test_replace_string_dtype_regex(self):
704
+ # GH#31644
705
+ ser = pd.Series(["A", "B"], dtype="string")
706
+ res = ser.replace(r".", "C", regex=True)
707
+ expected = pd.Series(["C", "C"], dtype="string")
708
+ tm.assert_series_equal(res, expected)
709
+
710
+ def test_replace_nullable_numeric(self):
711
+ # GH#40732, GH#44940
712
+
713
+ floats = pd.Series([1.0, 2.0, 3.999, 4.4], dtype=pd.Float64Dtype())
714
+ assert floats.replace({1.0: 9}).dtype == floats.dtype
715
+ assert floats.replace(1.0, 9).dtype == floats.dtype
716
+ assert floats.replace({1.0: 9.0}).dtype == floats.dtype
717
+ assert floats.replace(1.0, 9.0).dtype == floats.dtype
718
+
719
+ res = floats.replace(to_replace=[1.0, 2.0], value=[9.0, 10.0])
720
+ assert res.dtype == floats.dtype
721
+
722
+ ints = pd.Series([1, 2, 3, 4], dtype=pd.Int64Dtype())
723
+ assert ints.replace({1: 9}).dtype == ints.dtype
724
+ assert ints.replace(1, 9).dtype == ints.dtype
725
+ assert ints.replace({1: 9.0}).dtype == ints.dtype
726
+ assert ints.replace(1, 9.0).dtype == ints.dtype
727
+
728
+ # nullable (for now) raises instead of casting
729
+ with pytest.raises(TypeError, match="Invalid value"):
730
+ ints.replace({1: 9.5})
731
+ with pytest.raises(TypeError, match="Invalid value"):
732
+ ints.replace(1, 9.5)
733
+
734
+ @pytest.mark.xfail(using_pyarrow_string_dtype(), reason="can't fill 1 in string")
735
+ @pytest.mark.parametrize("regex", [False, True])
736
+ def test_replace_regex_dtype_series(self, regex):
737
+ # GH-48644
738
+ series = pd.Series(["0"])
739
+ expected = pd.Series([1])
740
+ msg = "Downcasting behavior in `replace`"
741
+ with tm.assert_produces_warning(FutureWarning, match=msg):
742
+ result = series.replace(to_replace="0", value=1, regex=regex)
743
+ tm.assert_series_equal(result, expected)
744
+
745
+ def test_replace_different_int_types(self, any_int_numpy_dtype):
746
+ # GH#45311
747
+ labs = pd.Series([1, 1, 1, 0, 0, 2, 2, 2], dtype=any_int_numpy_dtype)
748
+
749
+ maps = pd.Series([0, 2, 1], dtype=any_int_numpy_dtype)
750
+ map_dict = dict(zip(maps.values, maps.index))
751
+
752
+ result = labs.replace(map_dict)
753
+ expected = labs.replace({0: 0, 2: 1, 1: 2})
754
+ tm.assert_series_equal(result, expected)
755
+
756
+ @pytest.mark.parametrize("val", [2, np.nan, 2.0])
757
+ def test_replace_value_none_dtype_numeric(self, val):
758
+ # GH#48231
759
+ ser = pd.Series([1, val])
760
+ result = ser.replace(val, None)
761
+ expected = pd.Series([1, None], dtype=object)
762
+ tm.assert_series_equal(result, expected)
763
+
764
+ def test_replace_change_dtype_series(self, using_infer_string):
765
+ # GH#25797
766
+ df = pd.DataFrame.from_dict({"Test": ["0.5", True, "0.6"]})
767
+ warn = FutureWarning if using_infer_string else None
768
+ with tm.assert_produces_warning(warn, match="Downcasting"):
769
+ df["Test"] = df["Test"].replace([True], [np.nan])
770
+ expected = pd.DataFrame.from_dict({"Test": ["0.5", np.nan, "0.6"]})
771
+ tm.assert_frame_equal(df, expected)
772
+
773
+ df = pd.DataFrame.from_dict({"Test": ["0.5", None, "0.6"]})
774
+ df["Test"] = df["Test"].replace([None], [np.nan])
775
+ tm.assert_frame_equal(df, expected)
776
+
777
+ df = pd.DataFrame.from_dict({"Test": ["0.5", None, "0.6"]})
778
+ df["Test"] = df["Test"].fillna(np.nan)
779
+ tm.assert_frame_equal(df, expected)
780
+
781
+ @pytest.mark.parametrize("dtype", ["object", "Int64"])
782
+ def test_replace_na_in_obj_column(self, dtype):
783
+ # GH#47480
784
+ ser = pd.Series([0, 1, pd.NA], dtype=dtype)
785
+ expected = pd.Series([0, 2, pd.NA], dtype=dtype)
786
+ result = ser.replace(to_replace=1, value=2)
787
+ tm.assert_series_equal(result, expected)
788
+
789
+ ser.replace(to_replace=1, value=2, inplace=True)
790
+ tm.assert_series_equal(ser, expected)
791
+
792
+ @pytest.mark.parametrize("val", [0, 0.5])
793
+ def test_replace_numeric_column_with_na(self, val):
794
+ # GH#50758
795
+ ser = pd.Series([val, 1])
796
+ expected = pd.Series([val, pd.NA])
797
+ result = ser.replace(to_replace=1, value=pd.NA)
798
+ tm.assert_series_equal(result, expected)
799
+
800
+ ser.replace(to_replace=1, value=pd.NA, inplace=True)
801
+ tm.assert_series_equal(ser, expected)
802
+
803
+ def test_replace_ea_float_with_bool(self):
804
+ # GH#55398
805
+ ser = pd.Series([0.0], dtype="Float64")
806
+ expected = ser.copy()
807
+ result = ser.replace(False, 1.0)
808
+ tm.assert_series_equal(result, expected)
809
+
810
+ ser = pd.Series([False], dtype="boolean")
811
+ expected = ser.copy()
812
+ result = ser.replace(0.0, True)
813
+ tm.assert_series_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_set_name.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ from pandas import Series
4
+
5
+
6
+ class TestSetName:
7
+ def test_set_name(self):
8
+ ser = Series([1, 2, 3])
9
+ ser2 = ser._set_name("foo")
10
+ assert ser2.name == "foo"
11
+ assert ser.name is None
12
+ assert ser is not ser2
13
+
14
+ def test_set_name_attribute(self):
15
+ ser = Series([1, 2, 3])
16
+ ser2 = Series([1, 2, 3], name="bar")
17
+ for name in [7, 7.0, "name", datetime(2001, 1, 1), (1,), "\u05D0"]:
18
+ ser.name = name
19
+ assert ser.name == name
20
+ ser2.name = name
21
+ assert ser2.name == name
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_size.py ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import Series
4
+
5
+
6
+ @pytest.mark.parametrize(
7
+ "data, index, expected",
8
+ [
9
+ ([1, 2, 3], None, 3),
10
+ ({"a": 1, "b": 2, "c": 3}, None, 3),
11
+ ([1, 2, 3], ["x", "y", "z"], 3),
12
+ ([1, 2, 3, 4, 5], ["x", "y", "z", "w", "n"], 5),
13
+ ([1, 2, 3], None, 3),
14
+ ([1, 2, 3], ["x", "y", "z"], 3),
15
+ ([1, 2, 3, 4], ["x", "y", "z", "w"], 4),
16
+ ],
17
+ )
18
+ def test_series(data, index, expected):
19
+ # GH#52897
20
+ ser = Series(data, index=index)
21
+ assert ser.size == expected
22
+ assert isinstance(ser.size, int)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_to_frame.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas import (
4
+ DataFrame,
5
+ Index,
6
+ Series,
7
+ )
8
+ import pandas._testing as tm
9
+
10
+
11
+ class TestToFrame:
12
+ def test_to_frame_respects_name_none(self):
13
+ # GH#44212 if we explicitly pass name=None, then that should be respected,
14
+ # not changed to 0
15
+ # GH-45448 this is first deprecated & enforced in 2.0
16
+ ser = Series(range(3))
17
+ result = ser.to_frame(None)
18
+
19
+ exp_index = Index([None], dtype=object)
20
+ tm.assert_index_equal(result.columns, exp_index)
21
+
22
+ result = ser.rename("foo").to_frame(None)
23
+ exp_index = Index([None], dtype=object)
24
+ tm.assert_index_equal(result.columns, exp_index)
25
+
26
+ def test_to_frame(self, datetime_series):
27
+ datetime_series.name = None
28
+ rs = datetime_series.to_frame()
29
+ xp = DataFrame(datetime_series.values, index=datetime_series.index)
30
+ tm.assert_frame_equal(rs, xp)
31
+
32
+ datetime_series.name = "testname"
33
+ rs = datetime_series.to_frame()
34
+ xp = DataFrame(
35
+ {"testname": datetime_series.values}, index=datetime_series.index
36
+ )
37
+ tm.assert_frame_equal(rs, xp)
38
+
39
+ rs = datetime_series.to_frame(name="testdifferent")
40
+ xp = DataFrame(
41
+ {"testdifferent": datetime_series.values}, index=datetime_series.index
42
+ )
43
+ tm.assert_frame_equal(rs, xp)
44
+
45
+ @pytest.mark.filterwarnings(
46
+ "ignore:Passing a BlockManager|Passing a SingleBlockManager:DeprecationWarning"
47
+ )
48
+ def test_to_frame_expanddim(self):
49
+ # GH#9762
50
+
51
+ class SubclassedSeries(Series):
52
+ @property
53
+ def _constructor_expanddim(self):
54
+ return SubclassedFrame
55
+
56
+ class SubclassedFrame(DataFrame):
57
+ pass
58
+
59
+ ser = SubclassedSeries([1, 2, 3], name="X")
60
+ result = ser.to_frame()
61
+ assert isinstance(result, SubclassedFrame)
62
+ expected = SubclassedFrame({"X": [1, 2, 3]})
63
+ tm.assert_frame_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_unstack.py ADDED
@@ -0,0 +1,169 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ from pandas import (
6
+ DataFrame,
7
+ Index,
8
+ MultiIndex,
9
+ Series,
10
+ date_range,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ def test_unstack_preserves_object():
16
+ mi = MultiIndex.from_product([["bar", "foo"], ["one", "two"]])
17
+
18
+ ser = Series(np.arange(4.0), index=mi, dtype=object)
19
+
20
+ res1 = ser.unstack()
21
+ assert (res1.dtypes == object).all()
22
+
23
+ res2 = ser.unstack(level=0)
24
+ assert (res2.dtypes == object).all()
25
+
26
+
27
+ def test_unstack():
28
+ index = MultiIndex(
29
+ levels=[["bar", "foo"], ["one", "three", "two"]],
30
+ codes=[[1, 1, 0, 0], [0, 1, 0, 2]],
31
+ )
32
+
33
+ s = Series(np.arange(4.0), index=index)
34
+ unstacked = s.unstack()
35
+
36
+ expected = DataFrame(
37
+ [[2.0, np.nan, 3.0], [0.0, 1.0, np.nan]],
38
+ index=["bar", "foo"],
39
+ columns=["one", "three", "two"],
40
+ )
41
+
42
+ tm.assert_frame_equal(unstacked, expected)
43
+
44
+ unstacked = s.unstack(level=0)
45
+ tm.assert_frame_equal(unstacked, expected.T)
46
+
47
+ index = MultiIndex(
48
+ levels=[["bar"], ["one", "two", "three"], [0, 1]],
49
+ codes=[[0, 0, 0, 0, 0, 0], [0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]],
50
+ )
51
+ s = Series(np.random.default_rng(2).standard_normal(6), index=index)
52
+ exp_index = MultiIndex(
53
+ levels=[["one", "two", "three"], [0, 1]],
54
+ codes=[[0, 1, 2, 0, 1, 2], [0, 1, 0, 1, 0, 1]],
55
+ )
56
+ expected = DataFrame({"bar": s.values}, index=exp_index).sort_index(level=0)
57
+ unstacked = s.unstack(0).sort_index()
58
+ tm.assert_frame_equal(unstacked, expected)
59
+
60
+ # GH5873
61
+ idx = MultiIndex.from_arrays([[101, 102], [3.5, np.nan]])
62
+ ts = Series([1, 2], index=idx)
63
+ left = ts.unstack()
64
+ right = DataFrame(
65
+ [[np.nan, 1], [2, np.nan]], index=[101, 102], columns=[np.nan, 3.5]
66
+ )
67
+ tm.assert_frame_equal(left, right)
68
+
69
+ idx = MultiIndex.from_arrays(
70
+ [
71
+ ["cat", "cat", "cat", "dog", "dog"],
72
+ ["a", "a", "b", "a", "b"],
73
+ [1, 2, 1, 1, np.nan],
74
+ ]
75
+ )
76
+ ts = Series([1.0, 1.1, 1.2, 1.3, 1.4], index=idx)
77
+ right = DataFrame(
78
+ [[1.0, 1.3], [1.1, np.nan], [np.nan, 1.4], [1.2, np.nan]],
79
+ columns=["cat", "dog"],
80
+ )
81
+ tpls = [("a", 1), ("a", 2), ("b", np.nan), ("b", 1)]
82
+ right.index = MultiIndex.from_tuples(tpls)
83
+ tm.assert_frame_equal(ts.unstack(level=0), right)
84
+
85
+
86
+ def test_unstack_tuplename_in_multiindex():
87
+ # GH 19966
88
+ idx = MultiIndex.from_product(
89
+ [["a", "b", "c"], [1, 2, 3]], names=[("A", "a"), ("B", "b")]
90
+ )
91
+ ser = Series(1, index=idx)
92
+ result = ser.unstack(("A", "a"))
93
+
94
+ expected = DataFrame(
95
+ [[1, 1, 1], [1, 1, 1], [1, 1, 1]],
96
+ columns=MultiIndex.from_tuples([("a",), ("b",), ("c",)], names=[("A", "a")]),
97
+ index=Index([1, 2, 3], name=("B", "b")),
98
+ )
99
+ tm.assert_frame_equal(result, expected)
100
+
101
+
102
+ @pytest.mark.parametrize(
103
+ "unstack_idx, expected_values, expected_index, expected_columns",
104
+ [
105
+ (
106
+ ("A", "a"),
107
+ [[1, 1], [1, 1], [1, 1], [1, 1]],
108
+ MultiIndex.from_tuples([(1, 3), (1, 4), (2, 3), (2, 4)], names=["B", "C"]),
109
+ MultiIndex.from_tuples([("a",), ("b",)], names=[("A", "a")]),
110
+ ),
111
+ (
112
+ (("A", "a"), "B"),
113
+ [[1, 1, 1, 1], [1, 1, 1, 1]],
114
+ Index([3, 4], name="C"),
115
+ MultiIndex.from_tuples(
116
+ [("a", 1), ("a", 2), ("b", 1), ("b", 2)], names=[("A", "a"), "B"]
117
+ ),
118
+ ),
119
+ ],
120
+ )
121
+ def test_unstack_mixed_type_name_in_multiindex(
122
+ unstack_idx, expected_values, expected_index, expected_columns
123
+ ):
124
+ # GH 19966
125
+ idx = MultiIndex.from_product(
126
+ [["a", "b"], [1, 2], [3, 4]], names=[("A", "a"), "B", "C"]
127
+ )
128
+ ser = Series(1, index=idx)
129
+ result = ser.unstack(unstack_idx)
130
+
131
+ expected = DataFrame(
132
+ expected_values, columns=expected_columns, index=expected_index
133
+ )
134
+ tm.assert_frame_equal(result, expected)
135
+
136
+
137
+ def test_unstack_multi_index_categorical_values():
138
+ df = DataFrame(
139
+ np.random.default_rng(2).standard_normal((10, 4)),
140
+ columns=Index(list("ABCD"), dtype=object),
141
+ index=date_range("2000-01-01", periods=10, freq="B"),
142
+ )
143
+ mi = df.stack(future_stack=True).index.rename(["major", "minor"])
144
+ ser = Series(["foo"] * len(mi), index=mi, name="category", dtype="category")
145
+
146
+ result = ser.unstack()
147
+
148
+ dti = ser.index.levels[0]
149
+ c = pd.Categorical(["foo"] * len(dti))
150
+ expected = DataFrame(
151
+ {"A": c.copy(), "B": c.copy(), "C": c.copy(), "D": c.copy()},
152
+ columns=Index(list("ABCD"), name="minor"),
153
+ index=dti.rename("major"),
154
+ )
155
+ tm.assert_frame_equal(result, expected)
156
+
157
+
158
+ def test_unstack_mixed_level_names():
159
+ # GH#48763
160
+ arrays = [["a", "a"], [1, 2], ["red", "blue"]]
161
+ idx = MultiIndex.from_arrays(arrays, names=("x", 0, "y"))
162
+ ser = Series([1, 2], index=idx)
163
+ result = ser.unstack("x")
164
+ expected = DataFrame(
165
+ [[1], [2]],
166
+ columns=Index(["a"], name="x"),
167
+ index=MultiIndex.from_tuples([(1, "red"), (2, "blue")], names=[0, "y"]),
168
+ )
169
+ tm.assert_frame_equal(result, expected)
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_values.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ IntervalIndex,
6
+ Series,
7
+ period_range,
8
+ )
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestValues:
13
+ @pytest.mark.parametrize(
14
+ "data",
15
+ [
16
+ period_range("2000", periods=4),
17
+ IntervalIndex.from_breaks([1, 2, 3, 4]),
18
+ ],
19
+ )
20
+ def test_values_object_extension_dtypes(self, data):
21
+ # https://github.com/pandas-dev/pandas/issues/23995
22
+ result = Series(data).values
23
+ expected = np.array(data.astype(object))
24
+ tm.assert_numpy_array_equal(result, expected)
25
+
26
+ def test_values(self, datetime_series):
27
+ tm.assert_almost_equal(
28
+ datetime_series.values, list(datetime_series), check_dtype=False
29
+ )
llmeval-env/lib/python3.10/site-packages/pandas/tests/series/methods/test_view.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ Index,
6
+ Series,
7
+ array,
8
+ date_range,
9
+ )
10
+ import pandas._testing as tm
11
+
12
+ pytestmark = pytest.mark.filterwarnings(
13
+ "ignore:Series.view is deprecated and will be removed in a future version.:FutureWarning" # noqa: E501
14
+ )
15
+
16
+
17
+ class TestView:
18
+ def test_view_i8_to_datetimelike(self):
19
+ dti = date_range("2000", periods=4, tz="US/Central")
20
+ ser = Series(dti.asi8)
21
+
22
+ result = ser.view(dti.dtype)
23
+ tm.assert_datetime_array_equal(result._values, dti._data._with_freq(None))
24
+
25
+ pi = dti.tz_localize(None).to_period("D")
26
+ ser = Series(pi.asi8)
27
+ result = ser.view(pi.dtype)
28
+ tm.assert_period_array_equal(result._values, pi._data)
29
+
30
+ def test_view_tz(self):
31
+ # GH#24024
32
+ ser = Series(date_range("2000", periods=4, tz="US/Central"))
33
+ result = ser.view("i8")
34
+ expected = Series(
35
+ [
36
+ 946706400000000000,
37
+ 946792800000000000,
38
+ 946879200000000000,
39
+ 946965600000000000,
40
+ ]
41
+ )
42
+ tm.assert_series_equal(result, expected)
43
+
44
+ @pytest.mark.parametrize(
45
+ "first", ["m8[ns]", "M8[ns]", "M8[ns, US/Central]", "period[D]"]
46
+ )
47
+ @pytest.mark.parametrize(
48
+ "second", ["m8[ns]", "M8[ns]", "M8[ns, US/Central]", "period[D]"]
49
+ )
50
+ @pytest.mark.parametrize("box", [Series, Index, array])
51
+ def test_view_between_datetimelike(self, first, second, box):
52
+ dti = date_range("2016-01-01", periods=3)
53
+
54
+ orig = box(dti)
55
+ obj = orig.view(first)
56
+ assert obj.dtype == first
57
+ tm.assert_numpy_array_equal(np.asarray(obj.view("i8")), dti.asi8)
58
+
59
+ res = obj.view(second)
60
+ assert res.dtype == second
61
+ tm.assert_numpy_array_equal(np.asarray(obj.view("i8")), dti.asi8)