applied-ai-018 commited on
Commit
3d14805
·
verified ·
1 Parent(s): ece90fb

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt +3 -0
  2. ckpts/universal/global_step80/zero/5.attention.dense.weight/exp_avg_sq.pt +3 -0
  3. ckpts/universal/global_step80/zero/5.attention.dense.weight/fp32.pt +3 -0
  4. ckpts/universal/global_step80/zero/9.attention.dense.weight/exp_avg.pt +3 -0
  5. ckpts/universal/global_step80/zero/9.attention.dense.weight/exp_avg_sq.pt +3 -0
  6. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/__init__.cpython-310.pyc +0 -0
  7. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_common.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_concat.cpython-310.pyc +0 -0
  9. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_dtypes.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_generic.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_inference.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_missing.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__init__.py +0 -0
  14. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/__init__.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_can_hold_element.cpython-310.pyc +0 -0
  16. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_from_scalar.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_ndarray.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_object_arr.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_dict_compat.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_downcast.cpython-310.pyc +0 -0
  21. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_find_common_type.cpython-310.pyc +0 -0
  22. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_infer_datetimelike.cpython-310.pyc +0 -0
  23. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_infer_dtype.cpython-310.pyc +0 -0
  24. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_maybe_box_native.cpython-310.pyc +0 -0
  25. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_promote.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_can_hold_element.py +79 -0
  27. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_from_scalar.py +55 -0
  28. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_ndarray.py +36 -0
  29. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_object_arr.py +20 -0
  30. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_dict_compat.py +14 -0
  31. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_downcast.py +97 -0
  32. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_find_common_type.py +175 -0
  33. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_infer_datetimelike.py +28 -0
  34. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_infer_dtype.py +216 -0
  35. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_maybe_box_native.py +40 -0
  36. venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_promote.py +530 -0
  37. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_common.py +801 -0
  38. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_concat.py +51 -0
  39. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_dtypes.py +1234 -0
  40. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_generic.py +130 -0
  41. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_inference.py +2047 -0
  42. venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_missing.py +923 -0
  43. venv/lib/python3.10/site-packages/pandas/tests/scalar/__init__.py +0 -0
  44. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__init__.py +0 -0
  45. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/__init__.cpython-310.pyc +0 -0
  46. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_arithmetic.cpython-310.pyc +0 -0
  47. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_asfreq.cpython-310.pyc +0 -0
  48. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_period.cpython-310.pyc +0 -0
  49. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/test_arithmetic.py +486 -0
  50. venv/lib/python3.10/site-packages/pandas/tests/scalar/period/test_asfreq.py +828 -0
ckpts/universal/global_step80/zero/26.mlp.dense_h_to_4h_swiglu.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:11bbc8a84b97727084c901f3e966cc4ce09242b295e464b0659e0f35aadedd50
3
+ size 33555612
ckpts/universal/global_step80/zero/5.attention.dense.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:817d3994058e0589f2e491033dac6750c6ec177ff2e04c8d64276c761b85325a
3
+ size 16778411
ckpts/universal/global_step80/zero/5.attention.dense.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8176931b85d45564d59578d63581fb9818a3f8f999c9510bf61109cdb2f9b1f9
3
+ size 16778317
ckpts/universal/global_step80/zero/9.attention.dense.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b089b79fa88bcbf7975e0c3c4817b386a1bc7eaa7811f246f7c27876b8b8caf4
3
+ size 16778396
ckpts/universal/global_step80/zero/9.attention.dense.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7850b3c4471d518ccd5ef9c9dab61a543715698272e3b6ec398edf835a29c95a
3
+ size 16778411
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (187 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_common.cpython-310.pyc ADDED
Binary file (23.9 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_concat.cpython-310.pyc ADDED
Binary file (2.09 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_dtypes.cpython-310.pyc ADDED
Binary file (40.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_generic.cpython-310.pyc ADDED
Binary file (4.64 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_inference.cpython-310.pyc ADDED
Binary file (58.5 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/__pycache__/test_missing.cpython-310.pyc ADDED
Binary file (23.3 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (192 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_can_hold_element.cpython-310.pyc ADDED
Binary file (1.82 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_from_scalar.cpython-310.pyc ADDED
Binary file (1.86 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_ndarray.cpython-310.pyc ADDED
Binary file (1.61 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_construct_object_arr.cpython-310.pyc ADDED
Binary file (978 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_dict_compat.cpython-310.pyc ADDED
Binary file (681 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_downcast.cpython-310.pyc ADDED
Binary file (2.57 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_find_common_type.cpython-310.pyc ADDED
Binary file (4.12 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_infer_datetimelike.cpython-310.pyc ADDED
Binary file (897 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_infer_dtype.cpython-310.pyc ADDED
Binary file (5.69 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_maybe_box_native.cpython-310.pyc ADDED
Binary file (1.09 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/__pycache__/test_promote.cpython-310.pyc ADDED
Binary file (10.6 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_can_hold_element.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas.core.dtypes.cast import can_hold_element
4
+
5
+
6
+ def test_can_hold_element_range(any_int_numpy_dtype):
7
+ # GH#44261
8
+ dtype = np.dtype(any_int_numpy_dtype)
9
+ arr = np.array([], dtype=dtype)
10
+
11
+ rng = range(2, 127)
12
+ assert can_hold_element(arr, rng)
13
+
14
+ # negatives -> can't be held by uint dtypes
15
+ rng = range(-2, 127)
16
+ if dtype.kind == "i":
17
+ assert can_hold_element(arr, rng)
18
+ else:
19
+ assert not can_hold_element(arr, rng)
20
+
21
+ rng = range(2, 255)
22
+ if dtype == "int8":
23
+ assert not can_hold_element(arr, rng)
24
+ else:
25
+ assert can_hold_element(arr, rng)
26
+
27
+ rng = range(-255, 65537)
28
+ if dtype.kind == "u":
29
+ assert not can_hold_element(arr, rng)
30
+ elif dtype.itemsize < 4:
31
+ assert not can_hold_element(arr, rng)
32
+ else:
33
+ assert can_hold_element(arr, rng)
34
+
35
+ # empty
36
+ rng = range(-(10**10), -(10**10))
37
+ assert len(rng) == 0
38
+ # assert can_hold_element(arr, rng)
39
+
40
+ rng = range(10**10, 10**10)
41
+ assert len(rng) == 0
42
+ assert can_hold_element(arr, rng)
43
+
44
+
45
+ def test_can_hold_element_int_values_float_ndarray():
46
+ arr = np.array([], dtype=np.int64)
47
+
48
+ element = np.array([1.0, 2.0])
49
+ assert can_hold_element(arr, element)
50
+
51
+ assert not can_hold_element(arr, element + 0.5)
52
+
53
+ # integer but not losslessly castable to int64
54
+ element = np.array([3, 2**65], dtype=np.float64)
55
+ assert not can_hold_element(arr, element)
56
+
57
+
58
+ def test_can_hold_element_int8_int():
59
+ arr = np.array([], dtype=np.int8)
60
+
61
+ element = 2
62
+ assert can_hold_element(arr, element)
63
+ assert can_hold_element(arr, np.int8(element))
64
+ assert can_hold_element(arr, np.uint8(element))
65
+ assert can_hold_element(arr, np.int16(element))
66
+ assert can_hold_element(arr, np.uint16(element))
67
+ assert can_hold_element(arr, np.int32(element))
68
+ assert can_hold_element(arr, np.uint32(element))
69
+ assert can_hold_element(arr, np.int64(element))
70
+ assert can_hold_element(arr, np.uint64(element))
71
+
72
+ element = 2**9
73
+ assert not can_hold_element(arr, element)
74
+ assert not can_hold_element(arr, np.int16(element))
75
+ assert not can_hold_element(arr, np.uint16(element))
76
+ assert not can_hold_element(arr, np.int32(element))
77
+ assert not can_hold_element(arr, np.uint32(element))
78
+ assert not can_hold_element(arr, np.int64(element))
79
+ assert not can_hold_element(arr, np.uint64(element))
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_from_scalar.py ADDED
@@ -0,0 +1,55 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.core.dtypes.cast import construct_1d_arraylike_from_scalar
5
+ from pandas.core.dtypes.dtypes import CategoricalDtype
6
+
7
+ from pandas import (
8
+ Categorical,
9
+ Timedelta,
10
+ )
11
+ import pandas._testing as tm
12
+
13
+
14
+ def test_cast_1d_array_like_from_scalar_categorical():
15
+ # see gh-19565
16
+ #
17
+ # Categorical result from scalar did not maintain
18
+ # categories and ordering of the passed dtype.
19
+ cats = ["a", "b", "c"]
20
+ cat_type = CategoricalDtype(categories=cats, ordered=False)
21
+ expected = Categorical(["a", "a"], categories=cats)
22
+
23
+ result = construct_1d_arraylike_from_scalar("a", len(expected), cat_type)
24
+ tm.assert_categorical_equal(result, expected)
25
+
26
+
27
+ def test_cast_1d_array_like_from_timestamp(fixed_now_ts):
28
+ # check we dont lose nanoseconds
29
+ ts = fixed_now_ts + Timedelta(1)
30
+ res = construct_1d_arraylike_from_scalar(ts, 2, np.dtype("M8[ns]"))
31
+ assert res[0] == ts
32
+
33
+
34
+ def test_cast_1d_array_like_from_timedelta():
35
+ # check we dont lose nanoseconds
36
+ td = Timedelta(1)
37
+ res = construct_1d_arraylike_from_scalar(td, 2, np.dtype("m8[ns]"))
38
+ assert res[0] == td
39
+
40
+
41
+ def test_cast_1d_array_like_mismatched_datetimelike():
42
+ td = np.timedelta64("NaT", "ns")
43
+ dt = np.datetime64("NaT", "ns")
44
+
45
+ with pytest.raises(TypeError, match="Cannot cast"):
46
+ construct_1d_arraylike_from_scalar(td, 2, dt.dtype)
47
+
48
+ with pytest.raises(TypeError, match="Cannot cast"):
49
+ construct_1d_arraylike_from_scalar(np.timedelta64(4, "ns"), 2, dt.dtype)
50
+
51
+ with pytest.raises(TypeError, match="Cannot cast"):
52
+ construct_1d_arraylike_from_scalar(dt, 2, td.dtype)
53
+
54
+ with pytest.raises(TypeError, match="Cannot cast"):
55
+ construct_1d_arraylike_from_scalar(np.datetime64(4, "ns"), 2, td.dtype)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_ndarray.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ import pandas as pd
5
+ import pandas._testing as tm
6
+ from pandas.core.construction import sanitize_array
7
+
8
+
9
+ @pytest.mark.parametrize(
10
+ "values, dtype, expected",
11
+ [
12
+ ([1, 2, 3], None, np.array([1, 2, 3], dtype=np.int64)),
13
+ (np.array([1, 2, 3]), None, np.array([1, 2, 3])),
14
+ (["1", "2", None], None, np.array(["1", "2", None])),
15
+ (["1", "2", None], np.dtype("str"), np.array(["1", "2", None])),
16
+ ([1, 2, None], np.dtype("str"), np.array(["1", "2", None])),
17
+ ],
18
+ )
19
+ def test_construct_1d_ndarray_preserving_na(
20
+ values, dtype, expected, using_infer_string
21
+ ):
22
+ result = sanitize_array(values, index=None, dtype=dtype)
23
+ if using_infer_string and expected.dtype == object and dtype is None:
24
+ tm.assert_extension_array_equal(result, pd.array(expected))
25
+ else:
26
+ tm.assert_numpy_array_equal(result, expected)
27
+
28
+
29
+ @pytest.mark.parametrize("dtype", ["m8[ns]", "M8[ns]"])
30
+ def test_construct_1d_ndarray_preserving_na_datetimelike(dtype):
31
+ arr = np.arange(5, dtype=np.int64).view(dtype)
32
+ expected = np.array(list(arr), dtype=object)
33
+ assert all(isinstance(x, type(arr[0])) for x in expected)
34
+
35
+ result = sanitize_array(arr, index=None, dtype=np.dtype(object))
36
+ tm.assert_numpy_array_equal(result, expected)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_construct_object_arr.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas.core.dtypes.cast import construct_1d_object_array_from_listlike
4
+
5
+
6
+ @pytest.mark.parametrize("datum1", [1, 2.0, "3", (4, 5), [6, 7], None])
7
+ @pytest.mark.parametrize("datum2", [8, 9.0, "10", (11, 12), [13, 14], None])
8
+ def test_cast_1d_array(datum1, datum2):
9
+ data = [datum1, datum2]
10
+ result = construct_1d_object_array_from_listlike(data)
11
+
12
+ # Direct comparison fails: https://github.com/numpy/numpy/issues/10218
13
+ assert result.dtype == "object"
14
+ assert list(result) == data
15
+
16
+
17
+ @pytest.mark.parametrize("val", [1, 2.0, None])
18
+ def test_cast_1d_array_invalid_scalar(val):
19
+ with pytest.raises(TypeError, match="has no len()"):
20
+ construct_1d_object_array_from_listlike(val)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_dict_compat.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+
3
+ from pandas.core.dtypes.cast import dict_compat
4
+
5
+ from pandas import Timestamp
6
+
7
+
8
+ def test_dict_compat():
9
+ data_datetime64 = {np.datetime64("1990-03-15"): 1, np.datetime64("2015-03-15"): 2}
10
+ data_unchanged = {1: 2, 3: 4, 5: 6}
11
+ expected = {Timestamp("1990-3-15"): 1, Timestamp("2015-03-15"): 2}
12
+ assert dict_compat(data_datetime64) == expected
13
+ assert dict_compat(expected) == expected
14
+ assert dict_compat(data_unchanged) == data_unchanged
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_downcast.py ADDED
@@ -0,0 +1,97 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import decimal
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.core.dtypes.cast import maybe_downcast_to_dtype
7
+
8
+ from pandas import (
9
+ Series,
10
+ Timedelta,
11
+ )
12
+ import pandas._testing as tm
13
+
14
+
15
+ @pytest.mark.parametrize(
16
+ "arr,dtype,expected",
17
+ [
18
+ (
19
+ np.array([8.5, 8.6, 8.7, 8.8, 8.9999999999995]),
20
+ "infer",
21
+ np.array([8.5, 8.6, 8.7, 8.8, 8.9999999999995]),
22
+ ),
23
+ (
24
+ np.array([8.0, 8.0, 8.0, 8.0, 8.9999999999995]),
25
+ "infer",
26
+ np.array([8, 8, 8, 8, 9], dtype=np.int64),
27
+ ),
28
+ (
29
+ np.array([8.0, 8.0, 8.0, 8.0, 9.0000000000005]),
30
+ "infer",
31
+ np.array([8, 8, 8, 8, 9], dtype=np.int64),
32
+ ),
33
+ (
34
+ # This is a judgement call, but we do _not_ downcast Decimal
35
+ # objects
36
+ np.array([decimal.Decimal(0.0)]),
37
+ "int64",
38
+ np.array([decimal.Decimal(0.0)]),
39
+ ),
40
+ (
41
+ # GH#45837
42
+ np.array([Timedelta(days=1), Timedelta(days=2)], dtype=object),
43
+ "infer",
44
+ np.array([1, 2], dtype="m8[D]").astype("m8[ns]"),
45
+ ),
46
+ # TODO: similar for dt64, dt64tz, Period, Interval?
47
+ ],
48
+ )
49
+ def test_downcast(arr, expected, dtype):
50
+ result = maybe_downcast_to_dtype(arr, dtype)
51
+ tm.assert_numpy_array_equal(result, expected)
52
+
53
+
54
+ def test_downcast_booleans():
55
+ # see gh-16875: coercing of booleans.
56
+ ser = Series([True, True, False])
57
+ result = maybe_downcast_to_dtype(ser, np.dtype(np.float64))
58
+
59
+ expected = ser.values
60
+ tm.assert_numpy_array_equal(result, expected)
61
+
62
+
63
+ def test_downcast_conversion_no_nan(any_real_numpy_dtype):
64
+ dtype = any_real_numpy_dtype
65
+ expected = np.array([1, 2])
66
+ arr = np.array([1.0, 2.0], dtype=dtype)
67
+
68
+ result = maybe_downcast_to_dtype(arr, "infer")
69
+ tm.assert_almost_equal(result, expected, check_dtype=False)
70
+
71
+
72
+ def test_downcast_conversion_nan(float_numpy_dtype):
73
+ dtype = float_numpy_dtype
74
+ data = [1.0, 2.0, np.nan]
75
+
76
+ expected = np.array(data, dtype=dtype)
77
+ arr = np.array(data, dtype=dtype)
78
+
79
+ result = maybe_downcast_to_dtype(arr, "infer")
80
+ tm.assert_almost_equal(result, expected)
81
+
82
+
83
+ def test_downcast_conversion_empty(any_real_numpy_dtype):
84
+ dtype = any_real_numpy_dtype
85
+ arr = np.array([], dtype=dtype)
86
+ result = maybe_downcast_to_dtype(arr, np.dtype("int64"))
87
+ tm.assert_numpy_array_equal(result, np.array([], dtype=np.int64))
88
+
89
+
90
+ @pytest.mark.parametrize("klass", [np.datetime64, np.timedelta64])
91
+ def test_datetime_likes_nan(klass):
92
+ dtype = klass.__name__ + "[ns]"
93
+ arr = np.array([1, 2, np.nan])
94
+
95
+ exp = np.array([1, 2, klass("NaT")], dtype)
96
+ res = maybe_downcast_to_dtype(arr, dtype)
97
+ tm.assert_numpy_array_equal(res, exp)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_find_common_type.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas.core.dtypes.cast import find_common_type
5
+ from pandas.core.dtypes.common import pandas_dtype
6
+ from pandas.core.dtypes.dtypes import (
7
+ CategoricalDtype,
8
+ DatetimeTZDtype,
9
+ IntervalDtype,
10
+ PeriodDtype,
11
+ )
12
+
13
+ from pandas import (
14
+ Categorical,
15
+ Index,
16
+ )
17
+
18
+
19
+ @pytest.mark.parametrize(
20
+ "source_dtypes,expected_common_dtype",
21
+ [
22
+ ((np.int64,), np.int64),
23
+ ((np.uint64,), np.uint64),
24
+ ((np.float32,), np.float32),
25
+ ((object,), object),
26
+ # Into ints.
27
+ ((np.int16, np.int64), np.int64),
28
+ ((np.int32, np.uint32), np.int64),
29
+ ((np.uint16, np.uint64), np.uint64),
30
+ # Into floats.
31
+ ((np.float16, np.float32), np.float32),
32
+ ((np.float16, np.int16), np.float32),
33
+ ((np.float32, np.int16), np.float32),
34
+ ((np.uint64, np.int64), np.float64),
35
+ ((np.int16, np.float64), np.float64),
36
+ ((np.float16, np.int64), np.float64),
37
+ # Into others.
38
+ ((np.complex128, np.int32), np.complex128),
39
+ ((object, np.float32), object),
40
+ ((object, np.int16), object),
41
+ # Bool with int.
42
+ ((np.dtype("bool"), np.int64), object),
43
+ ((np.dtype("bool"), np.int32), object),
44
+ ((np.dtype("bool"), np.int16), object),
45
+ ((np.dtype("bool"), np.int8), object),
46
+ ((np.dtype("bool"), np.uint64), object),
47
+ ((np.dtype("bool"), np.uint32), object),
48
+ ((np.dtype("bool"), np.uint16), object),
49
+ ((np.dtype("bool"), np.uint8), object),
50
+ # Bool with float.
51
+ ((np.dtype("bool"), np.float64), object),
52
+ ((np.dtype("bool"), np.float32), object),
53
+ (
54
+ (np.dtype("datetime64[ns]"), np.dtype("datetime64[ns]")),
55
+ np.dtype("datetime64[ns]"),
56
+ ),
57
+ (
58
+ (np.dtype("timedelta64[ns]"), np.dtype("timedelta64[ns]")),
59
+ np.dtype("timedelta64[ns]"),
60
+ ),
61
+ (
62
+ (np.dtype("datetime64[ns]"), np.dtype("datetime64[ms]")),
63
+ np.dtype("datetime64[ns]"),
64
+ ),
65
+ (
66
+ (np.dtype("timedelta64[ms]"), np.dtype("timedelta64[ns]")),
67
+ np.dtype("timedelta64[ns]"),
68
+ ),
69
+ ((np.dtype("datetime64[ns]"), np.dtype("timedelta64[ns]")), object),
70
+ ((np.dtype("datetime64[ns]"), np.int64), object),
71
+ ],
72
+ )
73
+ def test_numpy_dtypes(source_dtypes, expected_common_dtype):
74
+ source_dtypes = [pandas_dtype(x) for x in source_dtypes]
75
+ assert find_common_type(source_dtypes) == expected_common_dtype
76
+
77
+
78
+ def test_raises_empty_input():
79
+ with pytest.raises(ValueError, match="no types given"):
80
+ find_common_type([])
81
+
82
+
83
+ @pytest.mark.parametrize(
84
+ "dtypes,exp_type",
85
+ [
86
+ ([CategoricalDtype()], "category"),
87
+ ([object, CategoricalDtype()], object),
88
+ ([CategoricalDtype(), CategoricalDtype()], "category"),
89
+ ],
90
+ )
91
+ def test_categorical_dtype(dtypes, exp_type):
92
+ assert find_common_type(dtypes) == exp_type
93
+
94
+
95
+ def test_datetimetz_dtype_match():
96
+ dtype = DatetimeTZDtype(unit="ns", tz="US/Eastern")
97
+ assert find_common_type([dtype, dtype]) == "datetime64[ns, US/Eastern]"
98
+
99
+
100
+ @pytest.mark.parametrize(
101
+ "dtype2",
102
+ [
103
+ DatetimeTZDtype(unit="ns", tz="Asia/Tokyo"),
104
+ np.dtype("datetime64[ns]"),
105
+ object,
106
+ np.int64,
107
+ ],
108
+ )
109
+ def test_datetimetz_dtype_mismatch(dtype2):
110
+ dtype = DatetimeTZDtype(unit="ns", tz="US/Eastern")
111
+ assert find_common_type([dtype, dtype2]) == object
112
+ assert find_common_type([dtype2, dtype]) == object
113
+
114
+
115
+ def test_period_dtype_match():
116
+ dtype = PeriodDtype(freq="D")
117
+ assert find_common_type([dtype, dtype]) == "period[D]"
118
+
119
+
120
+ @pytest.mark.parametrize(
121
+ "dtype2",
122
+ [
123
+ DatetimeTZDtype(unit="ns", tz="Asia/Tokyo"),
124
+ PeriodDtype(freq="2D"),
125
+ PeriodDtype(freq="h"),
126
+ np.dtype("datetime64[ns]"),
127
+ object,
128
+ np.int64,
129
+ ],
130
+ )
131
+ def test_period_dtype_mismatch(dtype2):
132
+ dtype = PeriodDtype(freq="D")
133
+ assert find_common_type([dtype, dtype2]) == object
134
+ assert find_common_type([dtype2, dtype]) == object
135
+
136
+
137
+ interval_dtypes = [
138
+ IntervalDtype(np.int64, "right"),
139
+ IntervalDtype(np.float64, "right"),
140
+ IntervalDtype(np.uint64, "right"),
141
+ IntervalDtype(DatetimeTZDtype(unit="ns", tz="US/Eastern"), "right"),
142
+ IntervalDtype("M8[ns]", "right"),
143
+ IntervalDtype("m8[ns]", "right"),
144
+ ]
145
+
146
+
147
+ @pytest.mark.parametrize("left", interval_dtypes)
148
+ @pytest.mark.parametrize("right", interval_dtypes)
149
+ def test_interval_dtype(left, right):
150
+ result = find_common_type([left, right])
151
+
152
+ if left is right:
153
+ assert result is left
154
+
155
+ elif left.subtype.kind in ["i", "u", "f"]:
156
+ # i.e. numeric
157
+ if right.subtype.kind in ["i", "u", "f"]:
158
+ # both numeric -> common numeric subtype
159
+ expected = IntervalDtype(np.float64, "right")
160
+ assert result == expected
161
+ else:
162
+ assert result == object
163
+
164
+ else:
165
+ assert result == object
166
+
167
+
168
+ @pytest.mark.parametrize("dtype", interval_dtypes)
169
+ def test_interval_dtype_with_categorical(dtype):
170
+ obj = Index([], dtype=dtype)
171
+
172
+ cat = Categorical([], categories=obj)
173
+
174
+ result = find_common_type([dtype, cat.dtype])
175
+ assert result == dtype
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_infer_datetimelike.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import numpy as np
2
+ import pytest
3
+
4
+ from pandas import (
5
+ DataFrame,
6
+ NaT,
7
+ Series,
8
+ Timestamp,
9
+ )
10
+
11
+
12
+ @pytest.mark.parametrize(
13
+ "data,exp_size",
14
+ [
15
+ # see gh-16362.
16
+ ([[NaT, "a", "b", 0], [NaT, "b", "c", 1]], 8),
17
+ ([[NaT, "a", 0], [NaT, "b", 1]], 6),
18
+ ],
19
+ )
20
+ def test_maybe_infer_to_datetimelike_df_construct(data, exp_size):
21
+ result = DataFrame(np.array(data))
22
+ assert result.size == exp_size
23
+
24
+
25
+ def test_maybe_infer_to_datetimelike_ser_construct():
26
+ # see gh-19671.
27
+ result = Series(["M1701", Timestamp("20130101")])
28
+ assert result.dtype.kind == "O"
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_infer_dtype.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import (
2
+ date,
3
+ datetime,
4
+ timedelta,
5
+ )
6
+
7
+ import numpy as np
8
+ import pytest
9
+
10
+ from pandas.core.dtypes.cast import (
11
+ infer_dtype_from,
12
+ infer_dtype_from_array,
13
+ infer_dtype_from_scalar,
14
+ )
15
+ from pandas.core.dtypes.common import is_dtype_equal
16
+
17
+ from pandas import (
18
+ Categorical,
19
+ Interval,
20
+ Period,
21
+ Series,
22
+ Timedelta,
23
+ Timestamp,
24
+ date_range,
25
+ )
26
+
27
+
28
+ def test_infer_dtype_from_int_scalar(any_int_numpy_dtype):
29
+ # Test that infer_dtype_from_scalar is
30
+ # returning correct dtype for int and float.
31
+ data = np.dtype(any_int_numpy_dtype).type(12)
32
+ dtype, val = infer_dtype_from_scalar(data)
33
+ assert dtype == type(data)
34
+
35
+
36
+ def test_infer_dtype_from_float_scalar(float_numpy_dtype):
37
+ float_numpy_dtype = np.dtype(float_numpy_dtype).type
38
+ data = float_numpy_dtype(12)
39
+
40
+ dtype, val = infer_dtype_from_scalar(data)
41
+ assert dtype == float_numpy_dtype
42
+
43
+
44
+ @pytest.mark.parametrize(
45
+ "data,exp_dtype", [(12, np.int64), (np.float64(12), np.float64)]
46
+ )
47
+ def test_infer_dtype_from_python_scalar(data, exp_dtype):
48
+ dtype, val = infer_dtype_from_scalar(data)
49
+ assert dtype == exp_dtype
50
+
51
+
52
+ @pytest.mark.parametrize("bool_val", [True, False])
53
+ def test_infer_dtype_from_boolean(bool_val):
54
+ dtype, val = infer_dtype_from_scalar(bool_val)
55
+ assert dtype == np.bool_
56
+
57
+
58
+ def test_infer_dtype_from_complex(complex_dtype):
59
+ data = np.dtype(complex_dtype).type(1)
60
+ dtype, val = infer_dtype_from_scalar(data)
61
+ assert dtype == np.complex128
62
+
63
+
64
+ def test_infer_dtype_from_datetime():
65
+ dt64 = np.datetime64(1, "ns")
66
+ dtype, val = infer_dtype_from_scalar(dt64)
67
+ assert dtype == "M8[ns]"
68
+
69
+ ts = Timestamp(1)
70
+ dtype, val = infer_dtype_from_scalar(ts)
71
+ assert dtype == "M8[ns]"
72
+
73
+ dt = datetime(2000, 1, 1, 0, 0)
74
+ dtype, val = infer_dtype_from_scalar(dt)
75
+ assert dtype == "M8[us]"
76
+
77
+
78
+ def test_infer_dtype_from_timedelta():
79
+ td64 = np.timedelta64(1, "ns")
80
+ dtype, val = infer_dtype_from_scalar(td64)
81
+ assert dtype == "m8[ns]"
82
+
83
+ pytd = timedelta(1)
84
+ dtype, val = infer_dtype_from_scalar(pytd)
85
+ assert dtype == "m8[us]"
86
+
87
+ td = Timedelta(1)
88
+ dtype, val = infer_dtype_from_scalar(td)
89
+ assert dtype == "m8[ns]"
90
+
91
+
92
+ @pytest.mark.parametrize("freq", ["M", "D"])
93
+ def test_infer_dtype_from_period(freq):
94
+ p = Period("2011-01-01", freq=freq)
95
+ dtype, val = infer_dtype_from_scalar(p)
96
+
97
+ exp_dtype = f"period[{freq}]"
98
+
99
+ assert dtype == exp_dtype
100
+ assert val == p
101
+
102
+
103
+ def test_infer_dtype_misc():
104
+ dt = date(2000, 1, 1)
105
+ dtype, val = infer_dtype_from_scalar(dt)
106
+ assert dtype == np.object_
107
+
108
+ ts = Timestamp(1, tz="US/Eastern")
109
+ dtype, val = infer_dtype_from_scalar(ts)
110
+ assert dtype == "datetime64[ns, US/Eastern]"
111
+
112
+
113
+ @pytest.mark.parametrize("tz", ["UTC", "US/Eastern", "Asia/Tokyo"])
114
+ def test_infer_from_scalar_tz(tz):
115
+ dt = Timestamp(1, tz=tz)
116
+ dtype, val = infer_dtype_from_scalar(dt)
117
+
118
+ exp_dtype = f"datetime64[ns, {tz}]"
119
+
120
+ assert dtype == exp_dtype
121
+ assert val == dt
122
+
123
+
124
+ @pytest.mark.parametrize(
125
+ "left, right, subtype",
126
+ [
127
+ (0, 1, "int64"),
128
+ (0.0, 1.0, "float64"),
129
+ (Timestamp(0), Timestamp(1), "datetime64[ns]"),
130
+ (Timestamp(0, tz="UTC"), Timestamp(1, tz="UTC"), "datetime64[ns, UTC]"),
131
+ (Timedelta(0), Timedelta(1), "timedelta64[ns]"),
132
+ ],
133
+ )
134
+ def test_infer_from_interval(left, right, subtype, closed):
135
+ # GH 30337
136
+ interval = Interval(left, right, closed)
137
+ result_dtype, result_value = infer_dtype_from_scalar(interval)
138
+ expected_dtype = f"interval[{subtype}, {closed}]"
139
+ assert result_dtype == expected_dtype
140
+ assert result_value == interval
141
+
142
+
143
+ def test_infer_dtype_from_scalar_errors():
144
+ msg = "invalid ndarray passed to infer_dtype_from_scalar"
145
+
146
+ with pytest.raises(ValueError, match=msg):
147
+ infer_dtype_from_scalar(np.array([1]))
148
+
149
+
150
+ @pytest.mark.parametrize(
151
+ "value, expected",
152
+ [
153
+ ("foo", np.object_),
154
+ (b"foo", np.object_),
155
+ (1, np.int64),
156
+ (1.5, np.float64),
157
+ (np.datetime64("2016-01-01"), np.dtype("M8[s]")),
158
+ (Timestamp("20160101"), np.dtype("M8[s]")),
159
+ (Timestamp("20160101", tz="UTC"), "datetime64[s, UTC]"),
160
+ ],
161
+ )
162
+ def test_infer_dtype_from_scalar(value, expected, using_infer_string):
163
+ dtype, _ = infer_dtype_from_scalar(value)
164
+ if using_infer_string and value == "foo":
165
+ expected = "string"
166
+ assert is_dtype_equal(dtype, expected)
167
+
168
+ with pytest.raises(TypeError, match="must be list-like"):
169
+ infer_dtype_from_array(value)
170
+
171
+
172
+ @pytest.mark.parametrize(
173
+ "arr, expected",
174
+ [
175
+ ([1], np.dtype(int)),
176
+ (np.array([1], dtype=np.int64), np.int64),
177
+ ([np.nan, 1, ""], np.object_),
178
+ (np.array([[1.0, 2.0]]), np.float64),
179
+ (Categorical(list("aabc")), "category"),
180
+ (Categorical([1, 2, 3]), "category"),
181
+ (date_range("20160101", periods=3), np.dtype("=M8[ns]")),
182
+ (
183
+ date_range("20160101", periods=3, tz="US/Eastern"),
184
+ "datetime64[ns, US/Eastern]",
185
+ ),
186
+ (Series([1.0, 2, 3]), np.float64),
187
+ (Series(list("abc")), np.object_),
188
+ (
189
+ Series(date_range("20160101", periods=3, tz="US/Eastern")),
190
+ "datetime64[ns, US/Eastern]",
191
+ ),
192
+ ],
193
+ )
194
+ def test_infer_dtype_from_array(arr, expected, using_infer_string):
195
+ dtype, _ = infer_dtype_from_array(arr)
196
+ if (
197
+ using_infer_string
198
+ and isinstance(arr, Series)
199
+ and arr.tolist() == ["a", "b", "c"]
200
+ ):
201
+ expected = "string"
202
+ assert is_dtype_equal(dtype, expected)
203
+
204
+
205
+ @pytest.mark.parametrize("cls", [np.datetime64, np.timedelta64])
206
+ def test_infer_dtype_from_scalar_zerodim_datetimelike(cls):
207
+ # ndarray.item() can incorrectly return int instead of td64/dt64
208
+ val = cls(1234, "ns")
209
+ arr = np.array(val)
210
+
211
+ dtype, res = infer_dtype_from_scalar(arr)
212
+ assert dtype.type is cls
213
+ assert isinstance(res, cls)
214
+
215
+ dtype, res = infer_dtype_from(arr)
216
+ assert dtype.type is cls
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_maybe_box_native.py ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import datetime
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.core.dtypes.cast import maybe_box_native
7
+
8
+ from pandas import (
9
+ Interval,
10
+ Period,
11
+ Timedelta,
12
+ Timestamp,
13
+ )
14
+
15
+
16
+ @pytest.mark.parametrize(
17
+ "obj,expected_dtype",
18
+ [
19
+ (b"\x00\x10", bytes),
20
+ (int(4), int),
21
+ (np.uint(4), int),
22
+ (np.int32(-4), int),
23
+ (np.uint8(4), int),
24
+ (float(454.98), float),
25
+ (np.float16(0.4), float),
26
+ (np.float64(1.4), float),
27
+ (np.bool_(False), bool),
28
+ (datetime(2005, 2, 25), datetime),
29
+ (np.datetime64("2005-02-25"), Timestamp),
30
+ (Timestamp("2005-02-25"), Timestamp),
31
+ (np.timedelta64(1, "D"), Timedelta),
32
+ (Timedelta(1, "D"), Timedelta),
33
+ (Interval(0, 1), Interval),
34
+ (Period("4Q2005"), Period),
35
+ ],
36
+ )
37
+ def test_maybe_box_native(obj, expected_dtype):
38
+ boxed_obj = maybe_box_native(obj)
39
+ result_dtype = type(boxed_obj)
40
+ assert result_dtype is expected_dtype
venv/lib/python3.10/site-packages/pandas/tests/dtypes/cast/test_promote.py ADDED
@@ -0,0 +1,530 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ These test the method maybe_promote from core/dtypes/cast.py
3
+ """
4
+
5
+ import datetime
6
+ from decimal import Decimal
7
+
8
+ import numpy as np
9
+ import pytest
10
+
11
+ from pandas._libs.tslibs import NaT
12
+
13
+ from pandas.core.dtypes.cast import maybe_promote
14
+ from pandas.core.dtypes.common import is_scalar
15
+ from pandas.core.dtypes.dtypes import DatetimeTZDtype
16
+ from pandas.core.dtypes.missing import isna
17
+
18
+ import pandas as pd
19
+
20
+
21
+ def _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar=None):
22
+ """
23
+ Auxiliary function to unify testing of scalar/array promotion.
24
+
25
+ Parameters
26
+ ----------
27
+ dtype : dtype
28
+ The value to pass on as the first argument to maybe_promote.
29
+ fill_value : scalar
30
+ The value to pass on as the second argument to maybe_promote as
31
+ a scalar.
32
+ expected_dtype : dtype
33
+ The expected dtype returned by maybe_promote (by design this is the
34
+ same regardless of whether fill_value was passed as a scalar or in an
35
+ array!).
36
+ exp_val_for_scalar : scalar
37
+ The expected value for the (potentially upcast) fill_value returned by
38
+ maybe_promote.
39
+ """
40
+ assert is_scalar(fill_value)
41
+
42
+ # here, we pass on fill_value as a scalar directly; the expected value
43
+ # returned from maybe_promote is fill_value, potentially upcast to the
44
+ # returned dtype.
45
+ result_dtype, result_fill_value = maybe_promote(dtype, fill_value)
46
+ expected_fill_value = exp_val_for_scalar
47
+
48
+ assert result_dtype == expected_dtype
49
+ _assert_match(result_fill_value, expected_fill_value)
50
+
51
+
52
+ def _assert_match(result_fill_value, expected_fill_value):
53
+ # GH#23982/25425 require the same type in addition to equality/NA-ness
54
+ res_type = type(result_fill_value)
55
+ ex_type = type(expected_fill_value)
56
+
57
+ if hasattr(result_fill_value, "dtype"):
58
+ # Compare types in a way that is robust to platform-specific
59
+ # idiosyncrasies where e.g. sometimes we get "ulonglong" as an alias
60
+ # for "uint64" or "intc" as an alias for "int32"
61
+ assert result_fill_value.dtype.kind == expected_fill_value.dtype.kind
62
+ assert result_fill_value.dtype.itemsize == expected_fill_value.dtype.itemsize
63
+ else:
64
+ # On some builds, type comparison fails, e.g. np.int32 != np.int32
65
+ assert res_type == ex_type or res_type.__name__ == ex_type.__name__
66
+
67
+ match_value = result_fill_value == expected_fill_value
68
+ if match_value is pd.NA:
69
+ match_value = False
70
+
71
+ # Note: type check above ensures that we have the _same_ NA value
72
+ # for missing values, None == None (which is checked
73
+ # through match_value above), but np.nan != np.nan and pd.NaT != pd.NaT
74
+ match_missing = isna(result_fill_value) and isna(expected_fill_value)
75
+
76
+ assert match_value or match_missing
77
+
78
+
79
+ @pytest.mark.parametrize(
80
+ "dtype, fill_value, expected_dtype",
81
+ [
82
+ # size 8
83
+ ("int8", 1, "int8"),
84
+ ("int8", np.iinfo("int8").max + 1, "int16"),
85
+ ("int8", np.iinfo("int16").max + 1, "int32"),
86
+ ("int8", np.iinfo("int32").max + 1, "int64"),
87
+ ("int8", np.iinfo("int64").max + 1, "object"),
88
+ ("int8", -1, "int8"),
89
+ ("int8", np.iinfo("int8").min - 1, "int16"),
90
+ ("int8", np.iinfo("int16").min - 1, "int32"),
91
+ ("int8", np.iinfo("int32").min - 1, "int64"),
92
+ ("int8", np.iinfo("int64").min - 1, "object"),
93
+ # keep signed-ness as long as possible
94
+ ("uint8", 1, "uint8"),
95
+ ("uint8", np.iinfo("int8").max + 1, "uint8"),
96
+ ("uint8", np.iinfo("uint8").max + 1, "uint16"),
97
+ ("uint8", np.iinfo("int16").max + 1, "uint16"),
98
+ ("uint8", np.iinfo("uint16").max + 1, "uint32"),
99
+ ("uint8", np.iinfo("int32").max + 1, "uint32"),
100
+ ("uint8", np.iinfo("uint32").max + 1, "uint64"),
101
+ ("uint8", np.iinfo("int64").max + 1, "uint64"),
102
+ ("uint8", np.iinfo("uint64").max + 1, "object"),
103
+ # max of uint8 cannot be contained in int8
104
+ ("uint8", -1, "int16"),
105
+ ("uint8", np.iinfo("int8").min - 1, "int16"),
106
+ ("uint8", np.iinfo("int16").min - 1, "int32"),
107
+ ("uint8", np.iinfo("int32").min - 1, "int64"),
108
+ ("uint8", np.iinfo("int64").min - 1, "object"),
109
+ # size 16
110
+ ("int16", 1, "int16"),
111
+ ("int16", np.iinfo("int8").max + 1, "int16"),
112
+ ("int16", np.iinfo("int16").max + 1, "int32"),
113
+ ("int16", np.iinfo("int32").max + 1, "int64"),
114
+ ("int16", np.iinfo("int64").max + 1, "object"),
115
+ ("int16", -1, "int16"),
116
+ ("int16", np.iinfo("int8").min - 1, "int16"),
117
+ ("int16", np.iinfo("int16").min - 1, "int32"),
118
+ ("int16", np.iinfo("int32").min - 1, "int64"),
119
+ ("int16", np.iinfo("int64").min - 1, "object"),
120
+ ("uint16", 1, "uint16"),
121
+ ("uint16", np.iinfo("int8").max + 1, "uint16"),
122
+ ("uint16", np.iinfo("uint8").max + 1, "uint16"),
123
+ ("uint16", np.iinfo("int16").max + 1, "uint16"),
124
+ ("uint16", np.iinfo("uint16").max + 1, "uint32"),
125
+ ("uint16", np.iinfo("int32").max + 1, "uint32"),
126
+ ("uint16", np.iinfo("uint32").max + 1, "uint64"),
127
+ ("uint16", np.iinfo("int64").max + 1, "uint64"),
128
+ ("uint16", np.iinfo("uint64").max + 1, "object"),
129
+ ("uint16", -1, "int32"),
130
+ ("uint16", np.iinfo("int8").min - 1, "int32"),
131
+ ("uint16", np.iinfo("int16").min - 1, "int32"),
132
+ ("uint16", np.iinfo("int32").min - 1, "int64"),
133
+ ("uint16", np.iinfo("int64").min - 1, "object"),
134
+ # size 32
135
+ ("int32", 1, "int32"),
136
+ ("int32", np.iinfo("int8").max + 1, "int32"),
137
+ ("int32", np.iinfo("int16").max + 1, "int32"),
138
+ ("int32", np.iinfo("int32").max + 1, "int64"),
139
+ ("int32", np.iinfo("int64").max + 1, "object"),
140
+ ("int32", -1, "int32"),
141
+ ("int32", np.iinfo("int8").min - 1, "int32"),
142
+ ("int32", np.iinfo("int16").min - 1, "int32"),
143
+ ("int32", np.iinfo("int32").min - 1, "int64"),
144
+ ("int32", np.iinfo("int64").min - 1, "object"),
145
+ ("uint32", 1, "uint32"),
146
+ ("uint32", np.iinfo("int8").max + 1, "uint32"),
147
+ ("uint32", np.iinfo("uint8").max + 1, "uint32"),
148
+ ("uint32", np.iinfo("int16").max + 1, "uint32"),
149
+ ("uint32", np.iinfo("uint16").max + 1, "uint32"),
150
+ ("uint32", np.iinfo("int32").max + 1, "uint32"),
151
+ ("uint32", np.iinfo("uint32").max + 1, "uint64"),
152
+ ("uint32", np.iinfo("int64").max + 1, "uint64"),
153
+ ("uint32", np.iinfo("uint64").max + 1, "object"),
154
+ ("uint32", -1, "int64"),
155
+ ("uint32", np.iinfo("int8").min - 1, "int64"),
156
+ ("uint32", np.iinfo("int16").min - 1, "int64"),
157
+ ("uint32", np.iinfo("int32").min - 1, "int64"),
158
+ ("uint32", np.iinfo("int64").min - 1, "object"),
159
+ # size 64
160
+ ("int64", 1, "int64"),
161
+ ("int64", np.iinfo("int8").max + 1, "int64"),
162
+ ("int64", np.iinfo("int16").max + 1, "int64"),
163
+ ("int64", np.iinfo("int32").max + 1, "int64"),
164
+ ("int64", np.iinfo("int64").max + 1, "object"),
165
+ ("int64", -1, "int64"),
166
+ ("int64", np.iinfo("int8").min - 1, "int64"),
167
+ ("int64", np.iinfo("int16").min - 1, "int64"),
168
+ ("int64", np.iinfo("int32").min - 1, "int64"),
169
+ ("int64", np.iinfo("int64").min - 1, "object"),
170
+ ("uint64", 1, "uint64"),
171
+ ("uint64", np.iinfo("int8").max + 1, "uint64"),
172
+ ("uint64", np.iinfo("uint8").max + 1, "uint64"),
173
+ ("uint64", np.iinfo("int16").max + 1, "uint64"),
174
+ ("uint64", np.iinfo("uint16").max + 1, "uint64"),
175
+ ("uint64", np.iinfo("int32").max + 1, "uint64"),
176
+ ("uint64", np.iinfo("uint32").max + 1, "uint64"),
177
+ ("uint64", np.iinfo("int64").max + 1, "uint64"),
178
+ ("uint64", np.iinfo("uint64").max + 1, "object"),
179
+ ("uint64", -1, "object"),
180
+ ("uint64", np.iinfo("int8").min - 1, "object"),
181
+ ("uint64", np.iinfo("int16").min - 1, "object"),
182
+ ("uint64", np.iinfo("int32").min - 1, "object"),
183
+ ("uint64", np.iinfo("int64").min - 1, "object"),
184
+ ],
185
+ )
186
+ def test_maybe_promote_int_with_int(dtype, fill_value, expected_dtype):
187
+ dtype = np.dtype(dtype)
188
+ expected_dtype = np.dtype(expected_dtype)
189
+
190
+ # output is not a generic int, but corresponds to expected_dtype
191
+ exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
192
+
193
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
194
+
195
+
196
+ def test_maybe_promote_int_with_float(any_int_numpy_dtype, float_numpy_dtype):
197
+ dtype = np.dtype(any_int_numpy_dtype)
198
+ fill_dtype = np.dtype(float_numpy_dtype)
199
+
200
+ # create array of given dtype; casts "1" to correct dtype
201
+ fill_value = np.array([1], dtype=fill_dtype)[0]
202
+
203
+ # filling int with float always upcasts to float64
204
+ expected_dtype = np.float64
205
+ # fill_value can be different float type
206
+ exp_val_for_scalar = np.float64(fill_value)
207
+
208
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
209
+
210
+
211
+ def test_maybe_promote_float_with_int(float_numpy_dtype, any_int_numpy_dtype):
212
+ dtype = np.dtype(float_numpy_dtype)
213
+ fill_dtype = np.dtype(any_int_numpy_dtype)
214
+
215
+ # create array of given dtype; casts "1" to correct dtype
216
+ fill_value = np.array([1], dtype=fill_dtype)[0]
217
+
218
+ # filling float with int always keeps float dtype
219
+ # because: np.finfo('float32').max > np.iinfo('uint64').max
220
+ expected_dtype = dtype
221
+ # output is not a generic float, but corresponds to expected_dtype
222
+ exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
223
+
224
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
225
+
226
+
227
+ @pytest.mark.parametrize(
228
+ "dtype, fill_value, expected_dtype",
229
+ [
230
+ # float filled with float
231
+ ("float32", 1, "float32"),
232
+ ("float32", float(np.finfo("float32").max) * 1.1, "float64"),
233
+ ("float64", 1, "float64"),
234
+ ("float64", float(np.finfo("float32").max) * 1.1, "float64"),
235
+ # complex filled with float
236
+ ("complex64", 1, "complex64"),
237
+ ("complex64", float(np.finfo("float32").max) * 1.1, "complex128"),
238
+ ("complex128", 1, "complex128"),
239
+ ("complex128", float(np.finfo("float32").max) * 1.1, "complex128"),
240
+ # float filled with complex
241
+ ("float32", 1 + 1j, "complex64"),
242
+ ("float32", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
243
+ ("float64", 1 + 1j, "complex128"),
244
+ ("float64", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
245
+ # complex filled with complex
246
+ ("complex64", 1 + 1j, "complex64"),
247
+ ("complex64", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
248
+ ("complex128", 1 + 1j, "complex128"),
249
+ ("complex128", float(np.finfo("float32").max) * (1.1 + 1j), "complex128"),
250
+ ],
251
+ )
252
+ def test_maybe_promote_float_with_float(dtype, fill_value, expected_dtype):
253
+ dtype = np.dtype(dtype)
254
+ expected_dtype = np.dtype(expected_dtype)
255
+
256
+ # output is not a generic float, but corresponds to expected_dtype
257
+ exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
258
+
259
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
260
+
261
+
262
+ def test_maybe_promote_bool_with_any(any_numpy_dtype):
263
+ dtype = np.dtype(bool)
264
+ fill_dtype = np.dtype(any_numpy_dtype)
265
+
266
+ # create array of given dtype; casts "1" to correct dtype
267
+ fill_value = np.array([1], dtype=fill_dtype)[0]
268
+
269
+ # filling bool with anything but bool casts to object
270
+ expected_dtype = np.dtype(object) if fill_dtype != bool else fill_dtype
271
+ exp_val_for_scalar = fill_value
272
+
273
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
274
+
275
+
276
+ def test_maybe_promote_any_with_bool(any_numpy_dtype):
277
+ dtype = np.dtype(any_numpy_dtype)
278
+ fill_value = True
279
+
280
+ # filling anything but bool with bool casts to object
281
+ expected_dtype = np.dtype(object) if dtype != bool else dtype
282
+ # output is not a generic bool, but corresponds to expected_dtype
283
+ exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
284
+
285
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
286
+
287
+
288
+ def test_maybe_promote_bytes_with_any(bytes_dtype, any_numpy_dtype):
289
+ dtype = np.dtype(bytes_dtype)
290
+ fill_dtype = np.dtype(any_numpy_dtype)
291
+
292
+ # create array of given dtype; casts "1" to correct dtype
293
+ fill_value = np.array([1], dtype=fill_dtype)[0]
294
+
295
+ # we never use bytes dtype internally, always promote to object
296
+ expected_dtype = np.dtype(np.object_)
297
+ exp_val_for_scalar = fill_value
298
+
299
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
300
+
301
+
302
+ def test_maybe_promote_any_with_bytes(any_numpy_dtype):
303
+ dtype = np.dtype(any_numpy_dtype)
304
+
305
+ # create array of given dtype
306
+ fill_value = b"abc"
307
+
308
+ # we never use bytes dtype internally, always promote to object
309
+ expected_dtype = np.dtype(np.object_)
310
+ # output is not a generic bytes, but corresponds to expected_dtype
311
+ exp_val_for_scalar = np.array([fill_value], dtype=expected_dtype)[0]
312
+
313
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
314
+
315
+
316
+ def test_maybe_promote_datetime64_with_any(datetime64_dtype, any_numpy_dtype):
317
+ dtype = np.dtype(datetime64_dtype)
318
+ fill_dtype = np.dtype(any_numpy_dtype)
319
+
320
+ # create array of given dtype; casts "1" to correct dtype
321
+ fill_value = np.array([1], dtype=fill_dtype)[0]
322
+
323
+ # filling datetime with anything but datetime casts to object
324
+ if fill_dtype.kind == "M":
325
+ expected_dtype = dtype
326
+ # for datetime dtypes, scalar values get cast to to_datetime64
327
+ exp_val_for_scalar = pd.Timestamp(fill_value).to_datetime64()
328
+ else:
329
+ expected_dtype = np.dtype(object)
330
+ exp_val_for_scalar = fill_value
331
+
332
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
333
+
334
+
335
+ @pytest.mark.parametrize(
336
+ "fill_value",
337
+ [
338
+ pd.Timestamp("now"),
339
+ np.datetime64("now"),
340
+ datetime.datetime.now(),
341
+ datetime.date.today(),
342
+ ],
343
+ ids=["pd.Timestamp", "np.datetime64", "datetime.datetime", "datetime.date"],
344
+ )
345
+ def test_maybe_promote_any_with_datetime64(any_numpy_dtype, fill_value):
346
+ dtype = np.dtype(any_numpy_dtype)
347
+
348
+ # filling datetime with anything but datetime casts to object
349
+ if dtype.kind == "M":
350
+ expected_dtype = dtype
351
+ # for datetime dtypes, scalar values get cast to pd.Timestamp.value
352
+ exp_val_for_scalar = pd.Timestamp(fill_value).to_datetime64()
353
+ else:
354
+ expected_dtype = np.dtype(object)
355
+ exp_val_for_scalar = fill_value
356
+
357
+ if type(fill_value) is datetime.date and dtype.kind == "M":
358
+ # Casting date to dt64 is deprecated, in 2.0 enforced to cast to object
359
+ expected_dtype = np.dtype(object)
360
+ exp_val_for_scalar = fill_value
361
+
362
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
363
+
364
+
365
+ @pytest.mark.parametrize(
366
+ "fill_value",
367
+ [
368
+ pd.Timestamp(2023, 1, 1),
369
+ np.datetime64("2023-01-01"),
370
+ datetime.datetime(2023, 1, 1),
371
+ datetime.date(2023, 1, 1),
372
+ ],
373
+ ids=["pd.Timestamp", "np.datetime64", "datetime.datetime", "datetime.date"],
374
+ )
375
+ def test_maybe_promote_any_numpy_dtype_with_datetimetz(
376
+ any_numpy_dtype, tz_aware_fixture, fill_value
377
+ ):
378
+ dtype = np.dtype(any_numpy_dtype)
379
+ fill_dtype = DatetimeTZDtype(tz=tz_aware_fixture)
380
+
381
+ fill_value = pd.Series([fill_value], dtype=fill_dtype)[0]
382
+
383
+ # filling any numpy dtype with datetimetz casts to object
384
+ expected_dtype = np.dtype(object)
385
+ exp_val_for_scalar = fill_value
386
+
387
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
388
+
389
+
390
+ def test_maybe_promote_timedelta64_with_any(timedelta64_dtype, any_numpy_dtype):
391
+ dtype = np.dtype(timedelta64_dtype)
392
+ fill_dtype = np.dtype(any_numpy_dtype)
393
+
394
+ # create array of given dtype; casts "1" to correct dtype
395
+ fill_value = np.array([1], dtype=fill_dtype)[0]
396
+
397
+ # filling timedelta with anything but timedelta casts to object
398
+ if fill_dtype.kind == "m":
399
+ expected_dtype = dtype
400
+ # for timedelta dtypes, scalar values get cast to pd.Timedelta.value
401
+ exp_val_for_scalar = pd.Timedelta(fill_value).to_timedelta64()
402
+ else:
403
+ expected_dtype = np.dtype(object)
404
+ exp_val_for_scalar = fill_value
405
+
406
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
407
+
408
+
409
+ @pytest.mark.parametrize(
410
+ "fill_value",
411
+ [pd.Timedelta(days=1), np.timedelta64(24, "h"), datetime.timedelta(1)],
412
+ ids=["pd.Timedelta", "np.timedelta64", "datetime.timedelta"],
413
+ )
414
+ def test_maybe_promote_any_with_timedelta64(any_numpy_dtype, fill_value):
415
+ dtype = np.dtype(any_numpy_dtype)
416
+
417
+ # filling anything but timedelta with timedelta casts to object
418
+ if dtype.kind == "m":
419
+ expected_dtype = dtype
420
+ # for timedelta dtypes, scalar values get cast to pd.Timedelta.value
421
+ exp_val_for_scalar = pd.Timedelta(fill_value).to_timedelta64()
422
+ else:
423
+ expected_dtype = np.dtype(object)
424
+ exp_val_for_scalar = fill_value
425
+
426
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
427
+
428
+
429
+ def test_maybe_promote_string_with_any(string_dtype, any_numpy_dtype):
430
+ dtype = np.dtype(string_dtype)
431
+ fill_dtype = np.dtype(any_numpy_dtype)
432
+
433
+ # create array of given dtype; casts "1" to correct dtype
434
+ fill_value = np.array([1], dtype=fill_dtype)[0]
435
+
436
+ # filling string with anything casts to object
437
+ expected_dtype = np.dtype(object)
438
+ exp_val_for_scalar = fill_value
439
+
440
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
441
+
442
+
443
+ def test_maybe_promote_any_with_string(any_numpy_dtype):
444
+ dtype = np.dtype(any_numpy_dtype)
445
+
446
+ # create array of given dtype
447
+ fill_value = "abc"
448
+
449
+ # filling anything with a string casts to object
450
+ expected_dtype = np.dtype(object)
451
+ exp_val_for_scalar = fill_value
452
+
453
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
454
+
455
+
456
+ def test_maybe_promote_object_with_any(object_dtype, any_numpy_dtype):
457
+ dtype = np.dtype(object_dtype)
458
+ fill_dtype = np.dtype(any_numpy_dtype)
459
+
460
+ # create array of given dtype; casts "1" to correct dtype
461
+ fill_value = np.array([1], dtype=fill_dtype)[0]
462
+
463
+ # filling object with anything stays object
464
+ expected_dtype = np.dtype(object)
465
+ exp_val_for_scalar = fill_value
466
+
467
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
468
+
469
+
470
+ def test_maybe_promote_any_with_object(any_numpy_dtype):
471
+ dtype = np.dtype(any_numpy_dtype)
472
+
473
+ # create array of object dtype from a scalar value (i.e. passing
474
+ # dtypes.common.is_scalar), which can however not be cast to int/float etc.
475
+ fill_value = pd.DateOffset(1)
476
+
477
+ # filling object with anything stays object
478
+ expected_dtype = np.dtype(object)
479
+ exp_val_for_scalar = fill_value
480
+
481
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
482
+
483
+
484
+ def test_maybe_promote_any_numpy_dtype_with_na(any_numpy_dtype, nulls_fixture):
485
+ fill_value = nulls_fixture
486
+ dtype = np.dtype(any_numpy_dtype)
487
+
488
+ if isinstance(fill_value, Decimal):
489
+ # Subject to change, but ATM (When Decimal(NAN) is being added to nulls_fixture)
490
+ # this is the existing behavior in maybe_promote,
491
+ # hinges on is_valid_na_for_dtype
492
+ if dtype.kind in "iufc":
493
+ if dtype.kind in "iu":
494
+ expected_dtype = np.dtype(np.float64)
495
+ else:
496
+ expected_dtype = dtype
497
+ exp_val_for_scalar = np.nan
498
+ else:
499
+ expected_dtype = np.dtype(object)
500
+ exp_val_for_scalar = fill_value
501
+ elif dtype.kind in "iu" and fill_value is not NaT:
502
+ # integer + other missing value (np.nan / None) casts to float
503
+ expected_dtype = np.float64
504
+ exp_val_for_scalar = np.nan
505
+ elif dtype == object and fill_value is NaT:
506
+ # inserting into object does not cast the value
507
+ # but *does* cast None to np.nan
508
+ expected_dtype = np.dtype(object)
509
+ exp_val_for_scalar = fill_value
510
+ elif dtype.kind in "mM":
511
+ # datetime / timedelta cast all missing values to dtyped-NaT
512
+ expected_dtype = dtype
513
+ exp_val_for_scalar = dtype.type("NaT", "ns")
514
+ elif fill_value is NaT:
515
+ # NaT upcasts everything that's not datetime/timedelta to object
516
+ expected_dtype = np.dtype(object)
517
+ exp_val_for_scalar = NaT
518
+ elif dtype.kind in "fc":
519
+ # float / complex + missing value (!= NaT) stays the same
520
+ expected_dtype = dtype
521
+ exp_val_for_scalar = np.nan
522
+ else:
523
+ # all other cases cast to object, and use np.nan as missing value
524
+ expected_dtype = np.dtype(object)
525
+ if fill_value is pd.NA:
526
+ exp_val_for_scalar = pd.NA
527
+ else:
528
+ exp_val_for_scalar = np.nan
529
+
530
+ _check_promote(dtype, fill_value, expected_dtype, exp_val_for_scalar)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_common.py ADDED
@@ -0,0 +1,801 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import annotations
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ import pandas.util._test_decorators as td
7
+
8
+ from pandas.core.dtypes.astype import astype_array
9
+ import pandas.core.dtypes.common as com
10
+ from pandas.core.dtypes.dtypes import (
11
+ CategoricalDtype,
12
+ CategoricalDtypeType,
13
+ DatetimeTZDtype,
14
+ ExtensionDtype,
15
+ IntervalDtype,
16
+ PeriodDtype,
17
+ )
18
+ from pandas.core.dtypes.missing import isna
19
+
20
+ import pandas as pd
21
+ import pandas._testing as tm
22
+ from pandas.api.types import pandas_dtype
23
+ from pandas.arrays import SparseArray
24
+
25
+
26
+ # EA & Actual Dtypes
27
+ def to_ea_dtypes(dtypes):
28
+ """convert list of string dtypes to EA dtype"""
29
+ return [getattr(pd, dt + "Dtype") for dt in dtypes]
30
+
31
+
32
+ def to_numpy_dtypes(dtypes):
33
+ """convert list of string dtypes to numpy dtype"""
34
+ return [getattr(np, dt) for dt in dtypes if isinstance(dt, str)]
35
+
36
+
37
+ class TestNumpyEADtype:
38
+ # Passing invalid dtype, both as a string or object, must raise TypeError
39
+ # Per issue GH15520
40
+ @pytest.mark.parametrize("box", [pd.Timestamp, "pd.Timestamp", list])
41
+ def test_invalid_dtype_error(self, box):
42
+ with pytest.raises(TypeError, match="not understood"):
43
+ com.pandas_dtype(box)
44
+
45
+ @pytest.mark.parametrize(
46
+ "dtype",
47
+ [
48
+ object,
49
+ "float64",
50
+ np.object_,
51
+ np.dtype("object"),
52
+ "O",
53
+ np.float64,
54
+ float,
55
+ np.dtype("float64"),
56
+ "object_",
57
+ ],
58
+ )
59
+ def test_pandas_dtype_valid(self, dtype):
60
+ assert com.pandas_dtype(dtype) == dtype
61
+
62
+ @pytest.mark.parametrize(
63
+ "dtype", ["M8[ns]", "m8[ns]", "object", "float64", "int64"]
64
+ )
65
+ def test_numpy_dtype(self, dtype):
66
+ assert com.pandas_dtype(dtype) == np.dtype(dtype)
67
+
68
+ def test_numpy_string_dtype(self):
69
+ # do not parse freq-like string as period dtype
70
+ assert com.pandas_dtype("U") == np.dtype("U")
71
+ assert com.pandas_dtype("S") == np.dtype("S")
72
+
73
+ @pytest.mark.parametrize(
74
+ "dtype",
75
+ [
76
+ "datetime64[ns, US/Eastern]",
77
+ "datetime64[ns, Asia/Tokyo]",
78
+ "datetime64[ns, UTC]",
79
+ # GH#33885 check that the M8 alias is understood
80
+ "M8[ns, US/Eastern]",
81
+ "M8[ns, Asia/Tokyo]",
82
+ "M8[ns, UTC]",
83
+ ],
84
+ )
85
+ def test_datetimetz_dtype(self, dtype):
86
+ assert com.pandas_dtype(dtype) == DatetimeTZDtype.construct_from_string(dtype)
87
+ assert com.pandas_dtype(dtype) == dtype
88
+
89
+ def test_categorical_dtype(self):
90
+ assert com.pandas_dtype("category") == CategoricalDtype()
91
+
92
+ @pytest.mark.parametrize(
93
+ "dtype",
94
+ [
95
+ "period[D]",
96
+ "period[3M]",
97
+ "period[us]",
98
+ "Period[D]",
99
+ "Period[3M]",
100
+ "Period[us]",
101
+ ],
102
+ )
103
+ def test_period_dtype(self, dtype):
104
+ assert com.pandas_dtype(dtype) is not PeriodDtype(dtype)
105
+ assert com.pandas_dtype(dtype) == PeriodDtype(dtype)
106
+ assert com.pandas_dtype(dtype) == dtype
107
+
108
+
109
+ dtypes = {
110
+ "datetime_tz": com.pandas_dtype("datetime64[ns, US/Eastern]"),
111
+ "datetime": com.pandas_dtype("datetime64[ns]"),
112
+ "timedelta": com.pandas_dtype("timedelta64[ns]"),
113
+ "period": PeriodDtype("D"),
114
+ "integer": np.dtype(np.int64),
115
+ "float": np.dtype(np.float64),
116
+ "object": np.dtype(object),
117
+ "category": com.pandas_dtype("category"),
118
+ "string": pd.StringDtype(),
119
+ }
120
+
121
+
122
+ @pytest.mark.parametrize("name1,dtype1", list(dtypes.items()), ids=lambda x: str(x))
123
+ @pytest.mark.parametrize("name2,dtype2", list(dtypes.items()), ids=lambda x: str(x))
124
+ def test_dtype_equal(name1, dtype1, name2, dtype2):
125
+ # match equal to self, but not equal to other
126
+ assert com.is_dtype_equal(dtype1, dtype1)
127
+ if name1 != name2:
128
+ assert not com.is_dtype_equal(dtype1, dtype2)
129
+
130
+
131
+ @pytest.mark.parametrize("name,dtype", list(dtypes.items()), ids=lambda x: str(x))
132
+ def test_pyarrow_string_import_error(name, dtype):
133
+ # GH-44276
134
+ assert not com.is_dtype_equal(dtype, "string[pyarrow]")
135
+
136
+
137
+ @pytest.mark.parametrize(
138
+ "dtype1,dtype2",
139
+ [
140
+ (np.int8, np.int64),
141
+ (np.int16, np.int64),
142
+ (np.int32, np.int64),
143
+ (np.float32, np.float64),
144
+ (PeriodDtype("D"), PeriodDtype("2D")), # PeriodType
145
+ (
146
+ com.pandas_dtype("datetime64[ns, US/Eastern]"),
147
+ com.pandas_dtype("datetime64[ns, CET]"),
148
+ ), # Datetime
149
+ (None, None), # gh-15941: no exception should be raised.
150
+ ],
151
+ )
152
+ def test_dtype_equal_strict(dtype1, dtype2):
153
+ assert not com.is_dtype_equal(dtype1, dtype2)
154
+
155
+
156
+ def get_is_dtype_funcs():
157
+ """
158
+ Get all functions in pandas.core.dtypes.common that
159
+ begin with 'is_' and end with 'dtype'
160
+
161
+ """
162
+ fnames = [f for f in dir(com) if (f.startswith("is_") and f.endswith("dtype"))]
163
+ fnames.remove("is_string_or_object_np_dtype") # fastpath requires np.dtype obj
164
+ return [getattr(com, fname) for fname in fnames]
165
+
166
+
167
+ @pytest.mark.filterwarnings(
168
+ "ignore:is_categorical_dtype is deprecated:DeprecationWarning"
169
+ )
170
+ @pytest.mark.parametrize("func", get_is_dtype_funcs(), ids=lambda x: x.__name__)
171
+ def test_get_dtype_error_catch(func):
172
+ # see gh-15941
173
+ #
174
+ # No exception should be raised.
175
+
176
+ msg = f"{func.__name__} is deprecated"
177
+ warn = None
178
+ if (
179
+ func is com.is_int64_dtype
180
+ or func is com.is_interval_dtype
181
+ or func is com.is_datetime64tz_dtype
182
+ or func is com.is_categorical_dtype
183
+ or func is com.is_period_dtype
184
+ ):
185
+ warn = DeprecationWarning
186
+
187
+ with tm.assert_produces_warning(warn, match=msg):
188
+ assert not func(None)
189
+
190
+
191
+ def test_is_object():
192
+ assert com.is_object_dtype(object)
193
+ assert com.is_object_dtype(np.array([], dtype=object))
194
+
195
+ assert not com.is_object_dtype(int)
196
+ assert not com.is_object_dtype(np.array([], dtype=int))
197
+ assert not com.is_object_dtype([1, 2, 3])
198
+
199
+
200
+ @pytest.mark.parametrize(
201
+ "check_scipy", [False, pytest.param(True, marks=td.skip_if_no("scipy"))]
202
+ )
203
+ def test_is_sparse(check_scipy):
204
+ msg = "is_sparse is deprecated"
205
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
206
+ assert com.is_sparse(SparseArray([1, 2, 3]))
207
+
208
+ assert not com.is_sparse(np.array([1, 2, 3]))
209
+
210
+ if check_scipy:
211
+ import scipy.sparse
212
+
213
+ assert not com.is_sparse(scipy.sparse.bsr_matrix([1, 2, 3]))
214
+
215
+
216
+ def test_is_scipy_sparse():
217
+ sp_sparse = pytest.importorskip("scipy.sparse")
218
+
219
+ assert com.is_scipy_sparse(sp_sparse.bsr_matrix([1, 2, 3]))
220
+
221
+ assert not com.is_scipy_sparse(SparseArray([1, 2, 3]))
222
+
223
+
224
+ def test_is_datetime64_dtype():
225
+ assert not com.is_datetime64_dtype(object)
226
+ assert not com.is_datetime64_dtype([1, 2, 3])
227
+ assert not com.is_datetime64_dtype(np.array([], dtype=int))
228
+
229
+ assert com.is_datetime64_dtype(np.datetime64)
230
+ assert com.is_datetime64_dtype(np.array([], dtype=np.datetime64))
231
+
232
+
233
+ def test_is_datetime64tz_dtype():
234
+ msg = "is_datetime64tz_dtype is deprecated"
235
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
236
+ assert not com.is_datetime64tz_dtype(object)
237
+ assert not com.is_datetime64tz_dtype([1, 2, 3])
238
+ assert not com.is_datetime64tz_dtype(pd.DatetimeIndex([1, 2, 3]))
239
+ assert com.is_datetime64tz_dtype(pd.DatetimeIndex(["2000"], tz="US/Eastern"))
240
+
241
+
242
+ def test_custom_ea_kind_M_not_datetime64tz():
243
+ # GH 34986
244
+ class NotTZDtype(ExtensionDtype):
245
+ @property
246
+ def kind(self) -> str:
247
+ return "M"
248
+
249
+ not_tz_dtype = NotTZDtype()
250
+ msg = "is_datetime64tz_dtype is deprecated"
251
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
252
+ assert not com.is_datetime64tz_dtype(not_tz_dtype)
253
+ assert not com.needs_i8_conversion(not_tz_dtype)
254
+
255
+
256
+ def test_is_timedelta64_dtype():
257
+ assert not com.is_timedelta64_dtype(object)
258
+ assert not com.is_timedelta64_dtype(None)
259
+ assert not com.is_timedelta64_dtype([1, 2, 3])
260
+ assert not com.is_timedelta64_dtype(np.array([], dtype=np.datetime64))
261
+ assert not com.is_timedelta64_dtype("0 days")
262
+ assert not com.is_timedelta64_dtype("0 days 00:00:00")
263
+ assert not com.is_timedelta64_dtype(["0 days 00:00:00"])
264
+ assert not com.is_timedelta64_dtype("NO DATE")
265
+
266
+ assert com.is_timedelta64_dtype(np.timedelta64)
267
+ assert com.is_timedelta64_dtype(pd.Series([], dtype="timedelta64[ns]"))
268
+ assert com.is_timedelta64_dtype(pd.to_timedelta(["0 days", "1 days"]))
269
+
270
+
271
+ def test_is_period_dtype():
272
+ msg = "is_period_dtype is deprecated"
273
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
274
+ assert not com.is_period_dtype(object)
275
+ assert not com.is_period_dtype([1, 2, 3])
276
+ assert not com.is_period_dtype(pd.Period("2017-01-01"))
277
+
278
+ assert com.is_period_dtype(PeriodDtype(freq="D"))
279
+ assert com.is_period_dtype(pd.PeriodIndex([], freq="Y"))
280
+
281
+
282
+ def test_is_interval_dtype():
283
+ msg = "is_interval_dtype is deprecated"
284
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
285
+ assert not com.is_interval_dtype(object)
286
+ assert not com.is_interval_dtype([1, 2, 3])
287
+
288
+ assert com.is_interval_dtype(IntervalDtype())
289
+
290
+ interval = pd.Interval(1, 2, closed="right")
291
+ assert not com.is_interval_dtype(interval)
292
+ assert com.is_interval_dtype(pd.IntervalIndex([interval]))
293
+
294
+
295
+ def test_is_categorical_dtype():
296
+ msg = "is_categorical_dtype is deprecated"
297
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
298
+ assert not com.is_categorical_dtype(object)
299
+ assert not com.is_categorical_dtype([1, 2, 3])
300
+
301
+ assert com.is_categorical_dtype(CategoricalDtype())
302
+ assert com.is_categorical_dtype(pd.Categorical([1, 2, 3]))
303
+ assert com.is_categorical_dtype(pd.CategoricalIndex([1, 2, 3]))
304
+
305
+
306
+ @pytest.mark.parametrize(
307
+ "dtype, expected",
308
+ [
309
+ (int, False),
310
+ (pd.Series([1, 2]), False),
311
+ (str, True),
312
+ (object, True),
313
+ (np.array(["a", "b"]), True),
314
+ (pd.StringDtype(), True),
315
+ (pd.Index([], dtype="O"), True),
316
+ ],
317
+ )
318
+ def test_is_string_dtype(dtype, expected):
319
+ # GH#54661
320
+
321
+ result = com.is_string_dtype(dtype)
322
+ assert result is expected
323
+
324
+
325
+ @pytest.mark.parametrize(
326
+ "data",
327
+ [[(0, 1), (1, 1)], pd.Categorical([1, 2, 3]), np.array([1, 2], dtype=object)],
328
+ )
329
+ def test_is_string_dtype_arraylike_with_object_elements_not_strings(data):
330
+ # GH 15585
331
+ assert not com.is_string_dtype(pd.Series(data))
332
+
333
+
334
+ def test_is_string_dtype_nullable(nullable_string_dtype):
335
+ assert com.is_string_dtype(pd.array(["a", "b"], dtype=nullable_string_dtype))
336
+
337
+
338
+ integer_dtypes: list = []
339
+
340
+
341
+ @pytest.mark.parametrize(
342
+ "dtype",
343
+ integer_dtypes
344
+ + [pd.Series([1, 2])]
345
+ + tm.ALL_INT_NUMPY_DTYPES
346
+ + to_numpy_dtypes(tm.ALL_INT_NUMPY_DTYPES)
347
+ + tm.ALL_INT_EA_DTYPES
348
+ + to_ea_dtypes(tm.ALL_INT_EA_DTYPES),
349
+ )
350
+ def test_is_integer_dtype(dtype):
351
+ assert com.is_integer_dtype(dtype)
352
+
353
+
354
+ @pytest.mark.parametrize(
355
+ "dtype",
356
+ [
357
+ str,
358
+ float,
359
+ np.datetime64,
360
+ np.timedelta64,
361
+ pd.Index([1, 2.0]),
362
+ np.array(["a", "b"]),
363
+ np.array([], dtype=np.timedelta64),
364
+ ],
365
+ )
366
+ def test_is_not_integer_dtype(dtype):
367
+ assert not com.is_integer_dtype(dtype)
368
+
369
+
370
+ signed_integer_dtypes: list = []
371
+
372
+
373
+ @pytest.mark.parametrize(
374
+ "dtype",
375
+ signed_integer_dtypes
376
+ + [pd.Series([1, 2])]
377
+ + tm.SIGNED_INT_NUMPY_DTYPES
378
+ + to_numpy_dtypes(tm.SIGNED_INT_NUMPY_DTYPES)
379
+ + tm.SIGNED_INT_EA_DTYPES
380
+ + to_ea_dtypes(tm.SIGNED_INT_EA_DTYPES),
381
+ )
382
+ def test_is_signed_integer_dtype(dtype):
383
+ assert com.is_integer_dtype(dtype)
384
+
385
+
386
+ @pytest.mark.parametrize(
387
+ "dtype",
388
+ [
389
+ str,
390
+ float,
391
+ np.datetime64,
392
+ np.timedelta64,
393
+ pd.Index([1, 2.0]),
394
+ np.array(["a", "b"]),
395
+ np.array([], dtype=np.timedelta64),
396
+ ]
397
+ + tm.UNSIGNED_INT_NUMPY_DTYPES
398
+ + to_numpy_dtypes(tm.UNSIGNED_INT_NUMPY_DTYPES)
399
+ + tm.UNSIGNED_INT_EA_DTYPES
400
+ + to_ea_dtypes(tm.UNSIGNED_INT_EA_DTYPES),
401
+ )
402
+ def test_is_not_signed_integer_dtype(dtype):
403
+ assert not com.is_signed_integer_dtype(dtype)
404
+
405
+
406
+ unsigned_integer_dtypes: list = []
407
+
408
+
409
+ @pytest.mark.parametrize(
410
+ "dtype",
411
+ unsigned_integer_dtypes
412
+ + [pd.Series([1, 2], dtype=np.uint32)]
413
+ + tm.UNSIGNED_INT_NUMPY_DTYPES
414
+ + to_numpy_dtypes(tm.UNSIGNED_INT_NUMPY_DTYPES)
415
+ + tm.UNSIGNED_INT_EA_DTYPES
416
+ + to_ea_dtypes(tm.UNSIGNED_INT_EA_DTYPES),
417
+ )
418
+ def test_is_unsigned_integer_dtype(dtype):
419
+ assert com.is_unsigned_integer_dtype(dtype)
420
+
421
+
422
+ @pytest.mark.parametrize(
423
+ "dtype",
424
+ [
425
+ str,
426
+ float,
427
+ np.datetime64,
428
+ np.timedelta64,
429
+ pd.Index([1, 2.0]),
430
+ np.array(["a", "b"]),
431
+ np.array([], dtype=np.timedelta64),
432
+ ]
433
+ + tm.SIGNED_INT_NUMPY_DTYPES
434
+ + to_numpy_dtypes(tm.SIGNED_INT_NUMPY_DTYPES)
435
+ + tm.SIGNED_INT_EA_DTYPES
436
+ + to_ea_dtypes(tm.SIGNED_INT_EA_DTYPES),
437
+ )
438
+ def test_is_not_unsigned_integer_dtype(dtype):
439
+ assert not com.is_unsigned_integer_dtype(dtype)
440
+
441
+
442
+ @pytest.mark.parametrize(
443
+ "dtype", [np.int64, np.array([1, 2], dtype=np.int64), "Int64", pd.Int64Dtype]
444
+ )
445
+ def test_is_int64_dtype(dtype):
446
+ msg = "is_int64_dtype is deprecated"
447
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
448
+ assert com.is_int64_dtype(dtype)
449
+
450
+
451
+ def test_type_comparison_with_numeric_ea_dtype(any_numeric_ea_dtype):
452
+ # GH#43038
453
+ assert pandas_dtype(any_numeric_ea_dtype) == any_numeric_ea_dtype
454
+
455
+
456
+ def test_type_comparison_with_real_numpy_dtype(any_real_numpy_dtype):
457
+ # GH#43038
458
+ assert pandas_dtype(any_real_numpy_dtype) == any_real_numpy_dtype
459
+
460
+
461
+ def test_type_comparison_with_signed_int_ea_dtype_and_signed_int_numpy_dtype(
462
+ any_signed_int_ea_dtype, any_signed_int_numpy_dtype
463
+ ):
464
+ # GH#43038
465
+ assert not pandas_dtype(any_signed_int_ea_dtype) == any_signed_int_numpy_dtype
466
+
467
+
468
+ @pytest.mark.parametrize(
469
+ "dtype",
470
+ [
471
+ str,
472
+ float,
473
+ np.int32,
474
+ np.uint64,
475
+ pd.Index([1, 2.0]),
476
+ np.array(["a", "b"]),
477
+ np.array([1, 2], dtype=np.uint32),
478
+ "int8",
479
+ "Int8",
480
+ pd.Int8Dtype,
481
+ ],
482
+ )
483
+ def test_is_not_int64_dtype(dtype):
484
+ msg = "is_int64_dtype is deprecated"
485
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
486
+ assert not com.is_int64_dtype(dtype)
487
+
488
+
489
+ def test_is_datetime64_any_dtype():
490
+ assert not com.is_datetime64_any_dtype(int)
491
+ assert not com.is_datetime64_any_dtype(str)
492
+ assert not com.is_datetime64_any_dtype(np.array([1, 2]))
493
+ assert not com.is_datetime64_any_dtype(np.array(["a", "b"]))
494
+
495
+ assert com.is_datetime64_any_dtype(np.datetime64)
496
+ assert com.is_datetime64_any_dtype(np.array([], dtype=np.datetime64))
497
+ assert com.is_datetime64_any_dtype(DatetimeTZDtype("ns", "US/Eastern"))
498
+ assert com.is_datetime64_any_dtype(
499
+ pd.DatetimeIndex([1, 2, 3], dtype="datetime64[ns]")
500
+ )
501
+
502
+
503
+ def test_is_datetime64_ns_dtype():
504
+ assert not com.is_datetime64_ns_dtype(int)
505
+ assert not com.is_datetime64_ns_dtype(str)
506
+ assert not com.is_datetime64_ns_dtype(np.datetime64)
507
+ assert not com.is_datetime64_ns_dtype(np.array([1, 2]))
508
+ assert not com.is_datetime64_ns_dtype(np.array(["a", "b"]))
509
+ assert not com.is_datetime64_ns_dtype(np.array([], dtype=np.datetime64))
510
+
511
+ # This datetime array has the wrong unit (ps instead of ns)
512
+ assert not com.is_datetime64_ns_dtype(np.array([], dtype="datetime64[ps]"))
513
+
514
+ assert com.is_datetime64_ns_dtype(DatetimeTZDtype("ns", "US/Eastern"))
515
+ assert com.is_datetime64_ns_dtype(
516
+ pd.DatetimeIndex([1, 2, 3], dtype=np.dtype("datetime64[ns]"))
517
+ )
518
+
519
+ # non-nano dt64tz
520
+ assert not com.is_datetime64_ns_dtype(DatetimeTZDtype("us", "US/Eastern"))
521
+
522
+
523
+ def test_is_timedelta64_ns_dtype():
524
+ assert not com.is_timedelta64_ns_dtype(np.dtype("m8[ps]"))
525
+ assert not com.is_timedelta64_ns_dtype(np.array([1, 2], dtype=np.timedelta64))
526
+
527
+ assert com.is_timedelta64_ns_dtype(np.dtype("m8[ns]"))
528
+ assert com.is_timedelta64_ns_dtype(np.array([1, 2], dtype="m8[ns]"))
529
+
530
+
531
+ def test_is_numeric_v_string_like():
532
+ assert not com.is_numeric_v_string_like(np.array([1]), 1)
533
+ assert not com.is_numeric_v_string_like(np.array([1]), np.array([2]))
534
+ assert not com.is_numeric_v_string_like(np.array(["foo"]), np.array(["foo"]))
535
+
536
+ assert com.is_numeric_v_string_like(np.array([1]), "foo")
537
+ assert com.is_numeric_v_string_like(np.array([1, 2]), np.array(["foo"]))
538
+ assert com.is_numeric_v_string_like(np.array(["foo"]), np.array([1, 2]))
539
+
540
+
541
+ def test_needs_i8_conversion():
542
+ assert not com.needs_i8_conversion(str)
543
+ assert not com.needs_i8_conversion(np.int64)
544
+ assert not com.needs_i8_conversion(pd.Series([1, 2]))
545
+ assert not com.needs_i8_conversion(np.array(["a", "b"]))
546
+
547
+ assert not com.needs_i8_conversion(np.datetime64)
548
+ assert com.needs_i8_conversion(np.dtype(np.datetime64))
549
+ assert not com.needs_i8_conversion(pd.Series([], dtype="timedelta64[ns]"))
550
+ assert com.needs_i8_conversion(pd.Series([], dtype="timedelta64[ns]").dtype)
551
+ assert not com.needs_i8_conversion(pd.DatetimeIndex(["2000"], tz="US/Eastern"))
552
+ assert com.needs_i8_conversion(pd.DatetimeIndex(["2000"], tz="US/Eastern").dtype)
553
+
554
+
555
+ def test_is_numeric_dtype():
556
+ assert not com.is_numeric_dtype(str)
557
+ assert not com.is_numeric_dtype(np.datetime64)
558
+ assert not com.is_numeric_dtype(np.timedelta64)
559
+ assert not com.is_numeric_dtype(np.array(["a", "b"]))
560
+ assert not com.is_numeric_dtype(np.array([], dtype=np.timedelta64))
561
+
562
+ assert com.is_numeric_dtype(int)
563
+ assert com.is_numeric_dtype(float)
564
+ assert com.is_numeric_dtype(np.uint64)
565
+ assert com.is_numeric_dtype(pd.Series([1, 2]))
566
+ assert com.is_numeric_dtype(pd.Index([1, 2.0]))
567
+
568
+ class MyNumericDType(ExtensionDtype):
569
+ @property
570
+ def type(self):
571
+ return str
572
+
573
+ @property
574
+ def name(self):
575
+ raise NotImplementedError
576
+
577
+ @classmethod
578
+ def construct_array_type(cls):
579
+ raise NotImplementedError
580
+
581
+ def _is_numeric(self) -> bool:
582
+ return True
583
+
584
+ assert com.is_numeric_dtype(MyNumericDType())
585
+
586
+
587
+ def test_is_any_real_numeric_dtype():
588
+ assert not com.is_any_real_numeric_dtype(str)
589
+ assert not com.is_any_real_numeric_dtype(bool)
590
+ assert not com.is_any_real_numeric_dtype(complex)
591
+ assert not com.is_any_real_numeric_dtype(object)
592
+ assert not com.is_any_real_numeric_dtype(np.datetime64)
593
+ assert not com.is_any_real_numeric_dtype(np.array(["a", "b", complex(1, 2)]))
594
+ assert not com.is_any_real_numeric_dtype(pd.DataFrame([complex(1, 2), True]))
595
+
596
+ assert com.is_any_real_numeric_dtype(int)
597
+ assert com.is_any_real_numeric_dtype(float)
598
+ assert com.is_any_real_numeric_dtype(np.array([1, 2.5]))
599
+
600
+
601
+ def test_is_float_dtype():
602
+ assert not com.is_float_dtype(str)
603
+ assert not com.is_float_dtype(int)
604
+ assert not com.is_float_dtype(pd.Series([1, 2]))
605
+ assert not com.is_float_dtype(np.array(["a", "b"]))
606
+
607
+ assert com.is_float_dtype(float)
608
+ assert com.is_float_dtype(pd.Index([1, 2.0]))
609
+
610
+
611
+ def test_is_bool_dtype():
612
+ assert not com.is_bool_dtype(int)
613
+ assert not com.is_bool_dtype(str)
614
+ assert not com.is_bool_dtype(pd.Series([1, 2]))
615
+ assert not com.is_bool_dtype(pd.Series(["a", "b"], dtype="category"))
616
+ assert not com.is_bool_dtype(np.array(["a", "b"]))
617
+ assert not com.is_bool_dtype(pd.Index(["a", "b"]))
618
+ assert not com.is_bool_dtype("Int64")
619
+
620
+ assert com.is_bool_dtype(bool)
621
+ assert com.is_bool_dtype(np.bool_)
622
+ assert com.is_bool_dtype(pd.Series([True, False], dtype="category"))
623
+ assert com.is_bool_dtype(np.array([True, False]))
624
+ assert com.is_bool_dtype(pd.Index([True, False]))
625
+
626
+ assert com.is_bool_dtype(pd.BooleanDtype())
627
+ assert com.is_bool_dtype(pd.array([True, False, None], dtype="boolean"))
628
+ assert com.is_bool_dtype("boolean")
629
+
630
+
631
+ def test_is_bool_dtype_numpy_error():
632
+ # GH39010
633
+ assert not com.is_bool_dtype("0 - Name")
634
+
635
+
636
+ @pytest.mark.parametrize(
637
+ "check_scipy", [False, pytest.param(True, marks=td.skip_if_no("scipy"))]
638
+ )
639
+ def test_is_extension_array_dtype(check_scipy):
640
+ assert not com.is_extension_array_dtype([1, 2, 3])
641
+ assert not com.is_extension_array_dtype(np.array([1, 2, 3]))
642
+ assert not com.is_extension_array_dtype(pd.DatetimeIndex([1, 2, 3]))
643
+
644
+ cat = pd.Categorical([1, 2, 3])
645
+ assert com.is_extension_array_dtype(cat)
646
+ assert com.is_extension_array_dtype(pd.Series(cat))
647
+ assert com.is_extension_array_dtype(SparseArray([1, 2, 3]))
648
+ assert com.is_extension_array_dtype(pd.DatetimeIndex(["2000"], tz="US/Eastern"))
649
+
650
+ dtype = DatetimeTZDtype("ns", tz="US/Eastern")
651
+ s = pd.Series([], dtype=dtype)
652
+ assert com.is_extension_array_dtype(s)
653
+
654
+ if check_scipy:
655
+ import scipy.sparse
656
+
657
+ assert not com.is_extension_array_dtype(scipy.sparse.bsr_matrix([1, 2, 3]))
658
+
659
+
660
+ def test_is_complex_dtype():
661
+ assert not com.is_complex_dtype(int)
662
+ assert not com.is_complex_dtype(str)
663
+ assert not com.is_complex_dtype(pd.Series([1, 2]))
664
+ assert not com.is_complex_dtype(np.array(["a", "b"]))
665
+
666
+ assert com.is_complex_dtype(np.complex128)
667
+ assert com.is_complex_dtype(complex)
668
+ assert com.is_complex_dtype(np.array([1 + 1j, 5]))
669
+
670
+
671
+ @pytest.mark.parametrize(
672
+ "input_param,result",
673
+ [
674
+ (int, np.dtype(int)),
675
+ ("int32", np.dtype("int32")),
676
+ (float, np.dtype(float)),
677
+ ("float64", np.dtype("float64")),
678
+ (np.dtype("float64"), np.dtype("float64")),
679
+ (str, np.dtype(str)),
680
+ (pd.Series([1, 2], dtype=np.dtype("int16")), np.dtype("int16")),
681
+ (pd.Series(["a", "b"], dtype=object), np.dtype(object)),
682
+ (pd.Index([1, 2]), np.dtype("int64")),
683
+ (pd.Index(["a", "b"], dtype=object), np.dtype(object)),
684
+ ("category", "category"),
685
+ (pd.Categorical(["a", "b"]).dtype, CategoricalDtype(["a", "b"])),
686
+ (pd.Categorical(["a", "b"]), CategoricalDtype(["a", "b"])),
687
+ (pd.CategoricalIndex(["a", "b"]).dtype, CategoricalDtype(["a", "b"])),
688
+ (pd.CategoricalIndex(["a", "b"]), CategoricalDtype(["a", "b"])),
689
+ (CategoricalDtype(), CategoricalDtype()),
690
+ (pd.DatetimeIndex([1, 2]), np.dtype("=M8[ns]")),
691
+ (pd.DatetimeIndex([1, 2]).dtype, np.dtype("=M8[ns]")),
692
+ ("<M8[ns]", np.dtype("<M8[ns]")),
693
+ ("datetime64[ns, Europe/London]", DatetimeTZDtype("ns", "Europe/London")),
694
+ (PeriodDtype(freq="D"), PeriodDtype(freq="D")),
695
+ ("period[D]", PeriodDtype(freq="D")),
696
+ (IntervalDtype(), IntervalDtype()),
697
+ ],
698
+ )
699
+ def test_get_dtype(input_param, result):
700
+ assert com._get_dtype(input_param) == result
701
+
702
+
703
+ @pytest.mark.parametrize(
704
+ "input_param,expected_error_message",
705
+ [
706
+ (None, "Cannot deduce dtype from null object"),
707
+ (1, "data type not understood"),
708
+ (1.2, "data type not understood"),
709
+ # numpy dev changed from double-quotes to single quotes
710
+ ("random string", "data type [\"']random string[\"'] not understood"),
711
+ (pd.DataFrame([1, 2]), "data type not understood"),
712
+ ],
713
+ )
714
+ def test_get_dtype_fails(input_param, expected_error_message):
715
+ # python objects
716
+ # 2020-02-02 npdev changed error message
717
+ expected_error_message += f"|Cannot interpret '{input_param}' as a data type"
718
+ with pytest.raises(TypeError, match=expected_error_message):
719
+ com._get_dtype(input_param)
720
+
721
+
722
+ @pytest.mark.parametrize(
723
+ "input_param,result",
724
+ [
725
+ (int, np.dtype(int).type),
726
+ ("int32", np.int32),
727
+ (float, np.dtype(float).type),
728
+ ("float64", np.float64),
729
+ (np.dtype("float64"), np.float64),
730
+ (str, np.dtype(str).type),
731
+ (pd.Series([1, 2], dtype=np.dtype("int16")), np.int16),
732
+ (pd.Series(["a", "b"], dtype=object), np.object_),
733
+ (pd.Index([1, 2], dtype="int64"), np.int64),
734
+ (pd.Index(["a", "b"], dtype=object), np.object_),
735
+ ("category", CategoricalDtypeType),
736
+ (pd.Categorical(["a", "b"]).dtype, CategoricalDtypeType),
737
+ (pd.Categorical(["a", "b"]), CategoricalDtypeType),
738
+ (pd.CategoricalIndex(["a", "b"]).dtype, CategoricalDtypeType),
739
+ (pd.CategoricalIndex(["a", "b"]), CategoricalDtypeType),
740
+ (pd.DatetimeIndex([1, 2]), np.datetime64),
741
+ (pd.DatetimeIndex([1, 2]).dtype, np.datetime64),
742
+ ("<M8[ns]", np.datetime64),
743
+ (pd.DatetimeIndex(["2000"], tz="Europe/London"), pd.Timestamp),
744
+ (pd.DatetimeIndex(["2000"], tz="Europe/London").dtype, pd.Timestamp),
745
+ ("datetime64[ns, Europe/London]", pd.Timestamp),
746
+ (PeriodDtype(freq="D"), pd.Period),
747
+ ("period[D]", pd.Period),
748
+ (IntervalDtype(), pd.Interval),
749
+ (None, type(None)),
750
+ (1, type(None)),
751
+ (1.2, type(None)),
752
+ (pd.DataFrame([1, 2]), type(None)), # composite dtype
753
+ ],
754
+ )
755
+ def test__is_dtype_type(input_param, result):
756
+ assert com._is_dtype_type(input_param, lambda tipo: tipo == result)
757
+
758
+
759
+ def test_astype_nansafe_copy_false(any_int_numpy_dtype):
760
+ # GH#34457 use astype, not view
761
+ arr = np.array([1, 2, 3], dtype=any_int_numpy_dtype)
762
+
763
+ dtype = np.dtype("float64")
764
+ result = astype_array(arr, dtype, copy=False)
765
+
766
+ expected = np.array([1.0, 2.0, 3.0], dtype=dtype)
767
+ tm.assert_numpy_array_equal(result, expected)
768
+
769
+
770
+ @pytest.mark.parametrize("from_type", [np.datetime64, np.timedelta64])
771
+ def test_astype_object_preserves_datetime_na(from_type):
772
+ arr = np.array([from_type("NaT", "ns")])
773
+ result = astype_array(arr, dtype=np.dtype("object"))
774
+
775
+ assert isna(result)[0]
776
+
777
+
778
+ def test_validate_allhashable():
779
+ assert com.validate_all_hashable(1, "a") is None
780
+
781
+ with pytest.raises(TypeError, match="All elements must be hashable"):
782
+ com.validate_all_hashable([])
783
+
784
+ with pytest.raises(TypeError, match="list must be a hashable type"):
785
+ com.validate_all_hashable([], error_name="list")
786
+
787
+
788
+ def test_pandas_dtype_numpy_warning():
789
+ # GH#51523
790
+ with tm.assert_produces_warning(
791
+ DeprecationWarning,
792
+ check_stacklevel=False,
793
+ match="Converting `np.integer` or `np.signedinteger` to a dtype is deprecated",
794
+ ):
795
+ pandas_dtype(np.integer)
796
+
797
+
798
+ def test_pandas_dtype_ea_not_instance():
799
+ # GH 31356 GH 54592
800
+ with tm.assert_produces_warning(UserWarning):
801
+ assert pandas_dtype(CategoricalDtype) == CategoricalDtype()
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_concat.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ import pandas.core.dtypes.concat as _concat
4
+
5
+ import pandas as pd
6
+ from pandas import Series
7
+ import pandas._testing as tm
8
+
9
+
10
+ def test_concat_mismatched_categoricals_with_empty():
11
+ # concat_compat behavior on series._values should match pd.concat on series
12
+ ser1 = Series(["a", "b", "c"], dtype="category")
13
+ ser2 = Series([], dtype="category")
14
+
15
+ msg = "The behavior of array concatenation with empty entries is deprecated"
16
+ with tm.assert_produces_warning(FutureWarning, match=msg):
17
+ result = _concat.concat_compat([ser1._values, ser2._values])
18
+ with tm.assert_produces_warning(FutureWarning, match=msg):
19
+ expected = pd.concat([ser1, ser2])._values
20
+ tm.assert_categorical_equal(result, expected)
21
+
22
+
23
+ @pytest.mark.parametrize("copy", [True, False])
24
+ def test_concat_single_dataframe_tz_aware(copy):
25
+ # https://github.com/pandas-dev/pandas/issues/25257
26
+ df = pd.DataFrame(
27
+ {"timestamp": [pd.Timestamp("2020-04-08 09:00:00.709949+0000", tz="UTC")]}
28
+ )
29
+ expected = df.copy()
30
+ result = pd.concat([df], copy=copy)
31
+ tm.assert_frame_equal(result, expected)
32
+
33
+
34
+ def test_concat_periodarray_2d():
35
+ pi = pd.period_range("2016-01-01", periods=36, freq="D")
36
+ arr = pi._data.reshape(6, 6)
37
+
38
+ result = _concat.concat_compat([arr[:2], arr[2:]], axis=0)
39
+ tm.assert_period_array_equal(result, arr)
40
+
41
+ result = _concat.concat_compat([arr[:, :2], arr[:, 2:]], axis=1)
42
+ tm.assert_period_array_equal(result, arr)
43
+
44
+ msg = (
45
+ "all the input array dimensions.* for the concatenation axis must match exactly"
46
+ )
47
+ with pytest.raises(ValueError, match=msg):
48
+ _concat.concat_compat([arr[:, :2], arr[:, 2:]], axis=0)
49
+
50
+ with pytest.raises(ValueError, match=msg):
51
+ _concat.concat_compat([arr[:2], arr[2:]], axis=1)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_dtypes.py ADDED
@@ -0,0 +1,1234 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+ import weakref
3
+
4
+ import numpy as np
5
+ import pytest
6
+ import pytz
7
+
8
+ from pandas._libs.tslibs.dtypes import NpyDatetimeUnit
9
+
10
+ from pandas.core.dtypes.base import _registry as registry
11
+ from pandas.core.dtypes.common import (
12
+ is_bool_dtype,
13
+ is_categorical_dtype,
14
+ is_datetime64_any_dtype,
15
+ is_datetime64_dtype,
16
+ is_datetime64_ns_dtype,
17
+ is_datetime64tz_dtype,
18
+ is_dtype_equal,
19
+ is_interval_dtype,
20
+ is_period_dtype,
21
+ is_string_dtype,
22
+ )
23
+ from pandas.core.dtypes.dtypes import (
24
+ CategoricalDtype,
25
+ DatetimeTZDtype,
26
+ IntervalDtype,
27
+ PeriodDtype,
28
+ )
29
+
30
+ import pandas as pd
31
+ from pandas import (
32
+ Categorical,
33
+ CategoricalIndex,
34
+ DatetimeIndex,
35
+ IntervalIndex,
36
+ Series,
37
+ SparseDtype,
38
+ date_range,
39
+ )
40
+ import pandas._testing as tm
41
+ from pandas.core.arrays.sparse import SparseArray
42
+
43
+
44
+ class Base:
45
+ def test_hash(self, dtype):
46
+ hash(dtype)
47
+
48
+ def test_equality_invalid(self, dtype):
49
+ assert not dtype == "foo"
50
+ assert not is_dtype_equal(dtype, np.int64)
51
+
52
+ def test_numpy_informed(self, dtype):
53
+ # npdev 2020-02-02 changed from "data type not understood" to
54
+ # "Cannot interpret 'foo' as a data type"
55
+ msg = "|".join(
56
+ ["data type not understood", "Cannot interpret '.*' as a data type"]
57
+ )
58
+ with pytest.raises(TypeError, match=msg):
59
+ np.dtype(dtype)
60
+
61
+ assert not dtype == np.str_
62
+ assert not np.str_ == dtype
63
+
64
+ def test_pickle(self, dtype):
65
+ # make sure our cache is NOT pickled
66
+
67
+ # clear the cache
68
+ type(dtype).reset_cache()
69
+ assert not len(dtype._cache_dtypes)
70
+
71
+ # force back to the cache
72
+ result = tm.round_trip_pickle(dtype)
73
+ if not isinstance(dtype, PeriodDtype):
74
+ # Because PeriodDtype has a cython class as a base class,
75
+ # it has different pickle semantics, and its cache is re-populated
76
+ # on un-pickling.
77
+ assert not len(dtype._cache_dtypes)
78
+ assert result == dtype
79
+
80
+
81
+ class TestCategoricalDtype(Base):
82
+ @pytest.fixture
83
+ def dtype(self):
84
+ """
85
+ Class level fixture of dtype for TestCategoricalDtype
86
+ """
87
+ return CategoricalDtype()
88
+
89
+ def test_hash_vs_equality(self, dtype):
90
+ dtype2 = CategoricalDtype()
91
+ assert dtype == dtype2
92
+ assert dtype2 == dtype
93
+ assert hash(dtype) == hash(dtype2)
94
+
95
+ def test_equality(self, dtype):
96
+ assert dtype == "category"
97
+ assert is_dtype_equal(dtype, "category")
98
+ assert "category" == dtype
99
+ assert is_dtype_equal("category", dtype)
100
+
101
+ assert dtype == CategoricalDtype()
102
+ assert is_dtype_equal(dtype, CategoricalDtype())
103
+ assert CategoricalDtype() == dtype
104
+ assert is_dtype_equal(CategoricalDtype(), dtype)
105
+
106
+ assert dtype != "foo"
107
+ assert not is_dtype_equal(dtype, "foo")
108
+ assert "foo" != dtype
109
+ assert not is_dtype_equal("foo", dtype)
110
+
111
+ def test_construction_from_string(self, dtype):
112
+ result = CategoricalDtype.construct_from_string("category")
113
+ assert is_dtype_equal(dtype, result)
114
+ msg = "Cannot construct a 'CategoricalDtype' from 'foo'"
115
+ with pytest.raises(TypeError, match=msg):
116
+ CategoricalDtype.construct_from_string("foo")
117
+
118
+ def test_constructor_invalid(self):
119
+ msg = "Parameter 'categories' must be list-like"
120
+ with pytest.raises(TypeError, match=msg):
121
+ CategoricalDtype("category")
122
+
123
+ dtype1 = CategoricalDtype(["a", "b"], ordered=True)
124
+ dtype2 = CategoricalDtype(["x", "y"], ordered=False)
125
+ c = Categorical([0, 1], dtype=dtype1)
126
+
127
+ @pytest.mark.parametrize(
128
+ "values, categories, ordered, dtype, expected",
129
+ [
130
+ [None, None, None, None, CategoricalDtype()],
131
+ [None, ["a", "b"], True, None, dtype1],
132
+ [c, None, None, dtype2, dtype2],
133
+ [c, ["x", "y"], False, None, dtype2],
134
+ ],
135
+ )
136
+ def test_from_values_or_dtype(self, values, categories, ordered, dtype, expected):
137
+ result = CategoricalDtype._from_values_or_dtype(
138
+ values, categories, ordered, dtype
139
+ )
140
+ assert result == expected
141
+
142
+ @pytest.mark.parametrize(
143
+ "values, categories, ordered, dtype",
144
+ [
145
+ [None, ["a", "b"], True, dtype2],
146
+ [None, ["a", "b"], None, dtype2],
147
+ [None, None, True, dtype2],
148
+ ],
149
+ )
150
+ def test_from_values_or_dtype_raises(self, values, categories, ordered, dtype):
151
+ msg = "Cannot specify `categories` or `ordered` together with `dtype`."
152
+ with pytest.raises(ValueError, match=msg):
153
+ CategoricalDtype._from_values_or_dtype(values, categories, ordered, dtype)
154
+
155
+ def test_from_values_or_dtype_invalid_dtype(self):
156
+ msg = "Cannot not construct CategoricalDtype from <class 'object'>"
157
+ with pytest.raises(ValueError, match=msg):
158
+ CategoricalDtype._from_values_or_dtype(None, None, None, object)
159
+
160
+ def test_is_dtype(self, dtype):
161
+ assert CategoricalDtype.is_dtype(dtype)
162
+ assert CategoricalDtype.is_dtype("category")
163
+ assert CategoricalDtype.is_dtype(CategoricalDtype())
164
+ assert not CategoricalDtype.is_dtype("foo")
165
+ assert not CategoricalDtype.is_dtype(np.float64)
166
+
167
+ def test_basic(self, dtype):
168
+ msg = "is_categorical_dtype is deprecated"
169
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
170
+ assert is_categorical_dtype(dtype)
171
+
172
+ factor = Categorical(["a", "b", "b", "a", "a", "c", "c", "c"])
173
+
174
+ s = Series(factor, name="A")
175
+
176
+ # dtypes
177
+ assert is_categorical_dtype(s.dtype)
178
+ assert is_categorical_dtype(s)
179
+ assert not is_categorical_dtype(np.dtype("float64"))
180
+
181
+ def test_tuple_categories(self):
182
+ categories = [(1, "a"), (2, "b"), (3, "c")]
183
+ result = CategoricalDtype(categories)
184
+ assert all(result.categories == categories)
185
+
186
+ @pytest.mark.parametrize(
187
+ "categories, expected",
188
+ [
189
+ ([True, False], True),
190
+ ([True, False, None], True),
191
+ ([True, False, "a", "b'"], False),
192
+ ([0, 1], False),
193
+ ],
194
+ )
195
+ def test_is_boolean(self, categories, expected):
196
+ cat = Categorical(categories)
197
+ assert cat.dtype._is_boolean is expected
198
+ assert is_bool_dtype(cat) is expected
199
+ assert is_bool_dtype(cat.dtype) is expected
200
+
201
+ def test_dtype_specific_categorical_dtype(self):
202
+ expected = "datetime64[ns]"
203
+ dti = DatetimeIndex([], dtype=expected)
204
+ result = str(Categorical(dti).categories.dtype)
205
+ assert result == expected
206
+
207
+ def test_not_string(self):
208
+ # though CategoricalDtype has object kind, it cannot be string
209
+ assert not is_string_dtype(CategoricalDtype())
210
+
211
+ def test_repr_range_categories(self):
212
+ rng = pd.Index(range(3))
213
+ dtype = CategoricalDtype(categories=rng, ordered=False)
214
+ result = repr(dtype)
215
+
216
+ expected = (
217
+ "CategoricalDtype(categories=range(0, 3), ordered=False, "
218
+ "categories_dtype=int64)"
219
+ )
220
+ assert result == expected
221
+
222
+ def test_update_dtype(self):
223
+ # GH 27338
224
+ result = CategoricalDtype(["a"]).update_dtype(Categorical(["b"], ordered=True))
225
+ expected = CategoricalDtype(["b"], ordered=True)
226
+ assert result == expected
227
+
228
+ def test_repr(self):
229
+ cat = Categorical(pd.Index([1, 2, 3], dtype="int32"))
230
+ result = cat.dtype.__repr__()
231
+ expected = (
232
+ "CategoricalDtype(categories=[1, 2, 3], ordered=False, "
233
+ "categories_dtype=int32)"
234
+ )
235
+ assert result == expected
236
+
237
+
238
+ class TestDatetimeTZDtype(Base):
239
+ @pytest.fixture
240
+ def dtype(self):
241
+ """
242
+ Class level fixture of dtype for TestDatetimeTZDtype
243
+ """
244
+ return DatetimeTZDtype("ns", "US/Eastern")
245
+
246
+ def test_alias_to_unit_raises(self):
247
+ # 23990
248
+ with pytest.raises(ValueError, match="Passing a dtype alias"):
249
+ DatetimeTZDtype("datetime64[ns, US/Central]")
250
+
251
+ def test_alias_to_unit_bad_alias_raises(self):
252
+ # 23990
253
+ with pytest.raises(TypeError, match=""):
254
+ DatetimeTZDtype("this is a bad string")
255
+
256
+ with pytest.raises(TypeError, match=""):
257
+ DatetimeTZDtype("datetime64[ns, US/NotATZ]")
258
+
259
+ def test_hash_vs_equality(self, dtype):
260
+ # make sure that we satisfy is semantics
261
+ dtype2 = DatetimeTZDtype("ns", "US/Eastern")
262
+ dtype3 = DatetimeTZDtype(dtype2)
263
+ assert dtype == dtype2
264
+ assert dtype2 == dtype
265
+ assert dtype3 == dtype
266
+ assert hash(dtype) == hash(dtype2)
267
+ assert hash(dtype) == hash(dtype3)
268
+
269
+ dtype4 = DatetimeTZDtype("ns", "US/Central")
270
+ assert dtype2 != dtype4
271
+ assert hash(dtype2) != hash(dtype4)
272
+
273
+ def test_construction_non_nanosecond(self):
274
+ res = DatetimeTZDtype("ms", "US/Eastern")
275
+ assert res.unit == "ms"
276
+ assert res._creso == NpyDatetimeUnit.NPY_FR_ms.value
277
+ assert res.str == "|M8[ms]"
278
+ assert str(res) == "datetime64[ms, US/Eastern]"
279
+ assert res.base == np.dtype("M8[ms]")
280
+
281
+ def test_day_not_supported(self):
282
+ msg = "DatetimeTZDtype only supports s, ms, us, ns units"
283
+ with pytest.raises(ValueError, match=msg):
284
+ DatetimeTZDtype("D", "US/Eastern")
285
+
286
+ def test_subclass(self):
287
+ a = DatetimeTZDtype.construct_from_string("datetime64[ns, US/Eastern]")
288
+ b = DatetimeTZDtype.construct_from_string("datetime64[ns, CET]")
289
+
290
+ assert issubclass(type(a), type(a))
291
+ assert issubclass(type(a), type(b))
292
+
293
+ def test_compat(self, dtype):
294
+ msg = "is_datetime64tz_dtype is deprecated"
295
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
296
+ assert is_datetime64tz_dtype(dtype)
297
+ assert is_datetime64tz_dtype("datetime64[ns, US/Eastern]")
298
+ assert is_datetime64_any_dtype(dtype)
299
+ assert is_datetime64_any_dtype("datetime64[ns, US/Eastern]")
300
+ assert is_datetime64_ns_dtype(dtype)
301
+ assert is_datetime64_ns_dtype("datetime64[ns, US/Eastern]")
302
+ assert not is_datetime64_dtype(dtype)
303
+ assert not is_datetime64_dtype("datetime64[ns, US/Eastern]")
304
+
305
+ def test_construction_from_string(self, dtype):
306
+ result = DatetimeTZDtype.construct_from_string("datetime64[ns, US/Eastern]")
307
+ assert is_dtype_equal(dtype, result)
308
+
309
+ @pytest.mark.parametrize(
310
+ "string",
311
+ [
312
+ "foo",
313
+ "datetime64[ns, notatz]",
314
+ # non-nano unit
315
+ "datetime64[ps, UTC]",
316
+ # dateutil str that returns None from gettz
317
+ "datetime64[ns, dateutil/invalid]",
318
+ ],
319
+ )
320
+ def test_construct_from_string_invalid_raises(self, string):
321
+ msg = f"Cannot construct a 'DatetimeTZDtype' from '{string}'"
322
+ with pytest.raises(TypeError, match=re.escape(msg)):
323
+ DatetimeTZDtype.construct_from_string(string)
324
+
325
+ def test_construct_from_string_wrong_type_raises(self):
326
+ msg = "'construct_from_string' expects a string, got <class 'list'>"
327
+ with pytest.raises(TypeError, match=msg):
328
+ DatetimeTZDtype.construct_from_string(["datetime64[ns, notatz]"])
329
+
330
+ def test_is_dtype(self, dtype):
331
+ assert not DatetimeTZDtype.is_dtype(None)
332
+ assert DatetimeTZDtype.is_dtype(dtype)
333
+ assert DatetimeTZDtype.is_dtype("datetime64[ns, US/Eastern]")
334
+ assert DatetimeTZDtype.is_dtype("M8[ns, US/Eastern]")
335
+ assert not DatetimeTZDtype.is_dtype("foo")
336
+ assert DatetimeTZDtype.is_dtype(DatetimeTZDtype("ns", "US/Pacific"))
337
+ assert not DatetimeTZDtype.is_dtype(np.float64)
338
+
339
+ def test_equality(self, dtype):
340
+ assert is_dtype_equal(dtype, "datetime64[ns, US/Eastern]")
341
+ assert is_dtype_equal(dtype, "M8[ns, US/Eastern]")
342
+ assert is_dtype_equal(dtype, DatetimeTZDtype("ns", "US/Eastern"))
343
+ assert not is_dtype_equal(dtype, "foo")
344
+ assert not is_dtype_equal(dtype, DatetimeTZDtype("ns", "CET"))
345
+ assert not is_dtype_equal(
346
+ DatetimeTZDtype("ns", "US/Eastern"), DatetimeTZDtype("ns", "US/Pacific")
347
+ )
348
+
349
+ # numpy compat
350
+ assert is_dtype_equal(np.dtype("M8[ns]"), "datetime64[ns]")
351
+
352
+ assert dtype == "M8[ns, US/Eastern]"
353
+
354
+ def test_basic(self, dtype):
355
+ msg = "is_datetime64tz_dtype is deprecated"
356
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
357
+ assert is_datetime64tz_dtype(dtype)
358
+
359
+ dr = date_range("20130101", periods=3, tz="US/Eastern")
360
+ s = Series(dr, name="A")
361
+
362
+ # dtypes
363
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
364
+ assert is_datetime64tz_dtype(s.dtype)
365
+ assert is_datetime64tz_dtype(s)
366
+ assert not is_datetime64tz_dtype(np.dtype("float64"))
367
+ assert not is_datetime64tz_dtype(1.0)
368
+
369
+ def test_dst(self):
370
+ dr1 = date_range("2013-01-01", periods=3, tz="US/Eastern")
371
+ s1 = Series(dr1, name="A")
372
+ assert isinstance(s1.dtype, DatetimeTZDtype)
373
+
374
+ dr2 = date_range("2013-08-01", periods=3, tz="US/Eastern")
375
+ s2 = Series(dr2, name="A")
376
+ assert isinstance(s2.dtype, DatetimeTZDtype)
377
+ assert s1.dtype == s2.dtype
378
+
379
+ @pytest.mark.parametrize("tz", ["UTC", "US/Eastern"])
380
+ @pytest.mark.parametrize("constructor", ["M8", "datetime64"])
381
+ def test_parser(self, tz, constructor):
382
+ # pr #11245
383
+ dtz_str = f"{constructor}[ns, {tz}]"
384
+ result = DatetimeTZDtype.construct_from_string(dtz_str)
385
+ expected = DatetimeTZDtype("ns", tz)
386
+ assert result == expected
387
+
388
+ def test_empty(self):
389
+ with pytest.raises(TypeError, match="A 'tz' is required."):
390
+ DatetimeTZDtype()
391
+
392
+ def test_tz_standardize(self):
393
+ # GH 24713
394
+ tz = pytz.timezone("US/Eastern")
395
+ dr = date_range("2013-01-01", periods=3, tz="US/Eastern")
396
+ dtype = DatetimeTZDtype("ns", dr.tz)
397
+ assert dtype.tz == tz
398
+ dtype = DatetimeTZDtype("ns", dr[0].tz)
399
+ assert dtype.tz == tz
400
+
401
+
402
+ class TestPeriodDtype(Base):
403
+ @pytest.fixture
404
+ def dtype(self):
405
+ """
406
+ Class level fixture of dtype for TestPeriodDtype
407
+ """
408
+ return PeriodDtype("D")
409
+
410
+ def test_hash_vs_equality(self, dtype):
411
+ # make sure that we satisfy is semantics
412
+ dtype2 = PeriodDtype("D")
413
+ dtype3 = PeriodDtype(dtype2)
414
+ assert dtype == dtype2
415
+ assert dtype2 == dtype
416
+ assert dtype3 == dtype
417
+ assert dtype is not dtype2
418
+ assert dtype2 is not dtype
419
+ assert dtype3 is not dtype
420
+ assert hash(dtype) == hash(dtype2)
421
+ assert hash(dtype) == hash(dtype3)
422
+
423
+ def test_construction(self):
424
+ with pytest.raises(ValueError, match="Invalid frequency: xx"):
425
+ PeriodDtype("xx")
426
+
427
+ for s in ["period[D]", "Period[D]", "D"]:
428
+ dt = PeriodDtype(s)
429
+ assert dt.freq == pd.tseries.offsets.Day()
430
+
431
+ for s in ["period[3D]", "Period[3D]", "3D"]:
432
+ dt = PeriodDtype(s)
433
+ assert dt.freq == pd.tseries.offsets.Day(3)
434
+
435
+ for s in [
436
+ "period[26h]",
437
+ "Period[26h]",
438
+ "26h",
439
+ "period[1D2h]",
440
+ "Period[1D2h]",
441
+ "1D2h",
442
+ ]:
443
+ dt = PeriodDtype(s)
444
+ assert dt.freq == pd.tseries.offsets.Hour(26)
445
+
446
+ def test_cannot_use_custom_businessday(self):
447
+ # GH#52534
448
+ msg = "C is not supported as period frequency"
449
+ msg1 = "<CustomBusinessDay> is not supported as period frequency"
450
+ msg2 = r"PeriodDtype\[B\] is deprecated"
451
+ with pytest.raises(ValueError, match=msg):
452
+ PeriodDtype("C")
453
+ with pytest.raises(ValueError, match=msg1):
454
+ with tm.assert_produces_warning(FutureWarning, match=msg2):
455
+ PeriodDtype(pd.offsets.CustomBusinessDay())
456
+
457
+ def test_subclass(self):
458
+ a = PeriodDtype("period[D]")
459
+ b = PeriodDtype("period[3D]")
460
+
461
+ assert issubclass(type(a), type(a))
462
+ assert issubclass(type(a), type(b))
463
+
464
+ def test_identity(self):
465
+ assert PeriodDtype("period[D]") == PeriodDtype("period[D]")
466
+ assert PeriodDtype("period[D]") is not PeriodDtype("period[D]")
467
+
468
+ assert PeriodDtype("period[3D]") == PeriodDtype("period[3D]")
469
+ assert PeriodDtype("period[3D]") is not PeriodDtype("period[3D]")
470
+
471
+ assert PeriodDtype("period[1s1us]") == PeriodDtype("period[1000001us]")
472
+ assert PeriodDtype("period[1s1us]") is not PeriodDtype("period[1000001us]")
473
+
474
+ def test_compat(self, dtype):
475
+ assert not is_datetime64_ns_dtype(dtype)
476
+ assert not is_datetime64_ns_dtype("period[D]")
477
+ assert not is_datetime64_dtype(dtype)
478
+ assert not is_datetime64_dtype("period[D]")
479
+
480
+ def test_construction_from_string(self, dtype):
481
+ result = PeriodDtype("period[D]")
482
+ assert is_dtype_equal(dtype, result)
483
+ result = PeriodDtype.construct_from_string("period[D]")
484
+ assert is_dtype_equal(dtype, result)
485
+
486
+ with pytest.raises(TypeError, match="list"):
487
+ PeriodDtype.construct_from_string([1, 2, 3])
488
+
489
+ @pytest.mark.parametrize(
490
+ "string",
491
+ [
492
+ "foo",
493
+ "period[foo]",
494
+ "foo[D]",
495
+ "datetime64[ns]",
496
+ "datetime64[ns, US/Eastern]",
497
+ ],
498
+ )
499
+ def test_construct_dtype_from_string_invalid_raises(self, string):
500
+ msg = f"Cannot construct a 'PeriodDtype' from '{string}'"
501
+ with pytest.raises(TypeError, match=re.escape(msg)):
502
+ PeriodDtype.construct_from_string(string)
503
+
504
+ def test_is_dtype(self, dtype):
505
+ assert PeriodDtype.is_dtype(dtype)
506
+ assert PeriodDtype.is_dtype("period[D]")
507
+ assert PeriodDtype.is_dtype("period[3D]")
508
+ assert PeriodDtype.is_dtype(PeriodDtype("3D"))
509
+ assert PeriodDtype.is_dtype("period[us]")
510
+ assert PeriodDtype.is_dtype("period[s]")
511
+ assert PeriodDtype.is_dtype(PeriodDtype("us"))
512
+ assert PeriodDtype.is_dtype(PeriodDtype("s"))
513
+
514
+ assert not PeriodDtype.is_dtype("D")
515
+ assert not PeriodDtype.is_dtype("3D")
516
+ assert not PeriodDtype.is_dtype("U")
517
+ assert not PeriodDtype.is_dtype("s")
518
+ assert not PeriodDtype.is_dtype("foo")
519
+ assert not PeriodDtype.is_dtype(np.object_)
520
+ assert not PeriodDtype.is_dtype(np.int64)
521
+ assert not PeriodDtype.is_dtype(np.float64)
522
+
523
+ def test_equality(self, dtype):
524
+ assert is_dtype_equal(dtype, "period[D]")
525
+ assert is_dtype_equal(dtype, PeriodDtype("D"))
526
+ assert is_dtype_equal(dtype, PeriodDtype("D"))
527
+ assert is_dtype_equal(PeriodDtype("D"), PeriodDtype("D"))
528
+
529
+ assert not is_dtype_equal(dtype, "D")
530
+ assert not is_dtype_equal(PeriodDtype("D"), PeriodDtype("2D"))
531
+
532
+ def test_basic(self, dtype):
533
+ msg = "is_period_dtype is deprecated"
534
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
535
+ assert is_period_dtype(dtype)
536
+
537
+ pidx = pd.period_range("2013-01-01 09:00", periods=5, freq="h")
538
+
539
+ assert is_period_dtype(pidx.dtype)
540
+ assert is_period_dtype(pidx)
541
+
542
+ s = Series(pidx, name="A")
543
+
544
+ assert is_period_dtype(s.dtype)
545
+ assert is_period_dtype(s)
546
+
547
+ assert not is_period_dtype(np.dtype("float64"))
548
+ assert not is_period_dtype(1.0)
549
+
550
+ def test_freq_argument_required(self):
551
+ # GH#27388
552
+ msg = "missing 1 required positional argument: 'freq'"
553
+ with pytest.raises(TypeError, match=msg):
554
+ PeriodDtype()
555
+
556
+ msg = "PeriodDtype argument should be string or BaseOffset, got NoneType"
557
+ with pytest.raises(TypeError, match=msg):
558
+ # GH#51790
559
+ PeriodDtype(None)
560
+
561
+ def test_not_string(self):
562
+ # though PeriodDtype has object kind, it cannot be string
563
+ assert not is_string_dtype(PeriodDtype("D"))
564
+
565
+ def test_perioddtype_caching_dateoffset_normalize(self):
566
+ # GH 24121
567
+ per_d = PeriodDtype(pd.offsets.YearEnd(normalize=True))
568
+ assert per_d.freq.normalize
569
+
570
+ per_d2 = PeriodDtype(pd.offsets.YearEnd(normalize=False))
571
+ assert not per_d2.freq.normalize
572
+
573
+ def test_dont_keep_ref_after_del(self):
574
+ # GH 54184
575
+ dtype = PeriodDtype("D")
576
+ ref = weakref.ref(dtype)
577
+ del dtype
578
+ assert ref() is None
579
+
580
+
581
+ class TestIntervalDtype(Base):
582
+ @pytest.fixture
583
+ def dtype(self):
584
+ """
585
+ Class level fixture of dtype for TestIntervalDtype
586
+ """
587
+ return IntervalDtype("int64", "right")
588
+
589
+ def test_hash_vs_equality(self, dtype):
590
+ # make sure that we satisfy is semantics
591
+ dtype2 = IntervalDtype("int64", "right")
592
+ dtype3 = IntervalDtype(dtype2)
593
+ assert dtype == dtype2
594
+ assert dtype2 == dtype
595
+ assert dtype3 == dtype
596
+ assert dtype is not dtype2
597
+ assert dtype2 is not dtype3
598
+ assert dtype3 is not dtype
599
+ assert hash(dtype) == hash(dtype2)
600
+ assert hash(dtype) == hash(dtype3)
601
+
602
+ dtype1 = IntervalDtype("interval")
603
+ dtype2 = IntervalDtype(dtype1)
604
+ dtype3 = IntervalDtype("interval")
605
+ assert dtype2 == dtype1
606
+ assert dtype2 == dtype2
607
+ assert dtype2 == dtype3
608
+ assert dtype2 is not dtype1
609
+ assert dtype2 is dtype2
610
+ assert dtype2 is not dtype3
611
+ assert hash(dtype2) == hash(dtype1)
612
+ assert hash(dtype2) == hash(dtype2)
613
+ assert hash(dtype2) == hash(dtype3)
614
+
615
+ @pytest.mark.parametrize(
616
+ "subtype", ["interval[int64]", "Interval[int64]", "int64", np.dtype("int64")]
617
+ )
618
+ def test_construction(self, subtype):
619
+ i = IntervalDtype(subtype, closed="right")
620
+ assert i.subtype == np.dtype("int64")
621
+ msg = "is_interval_dtype is deprecated"
622
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
623
+ assert is_interval_dtype(i)
624
+
625
+ @pytest.mark.parametrize(
626
+ "subtype", ["interval[int64]", "Interval[int64]", "int64", np.dtype("int64")]
627
+ )
628
+ def test_construction_allows_closed_none(self, subtype):
629
+ # GH#38394
630
+ dtype = IntervalDtype(subtype)
631
+
632
+ assert dtype.closed is None
633
+
634
+ def test_closed_mismatch(self):
635
+ msg = "'closed' keyword does not match value specified in dtype string"
636
+ with pytest.raises(ValueError, match=msg):
637
+ IntervalDtype("interval[int64, left]", "right")
638
+
639
+ @pytest.mark.parametrize("subtype", [None, "interval", "Interval"])
640
+ def test_construction_generic(self, subtype):
641
+ # generic
642
+ i = IntervalDtype(subtype)
643
+ assert i.subtype is None
644
+ msg = "is_interval_dtype is deprecated"
645
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
646
+ assert is_interval_dtype(i)
647
+
648
+ @pytest.mark.parametrize(
649
+ "subtype",
650
+ [
651
+ CategoricalDtype(list("abc"), False),
652
+ CategoricalDtype(list("wxyz"), True),
653
+ object,
654
+ str,
655
+ "<U10",
656
+ "interval[category]",
657
+ "interval[object]",
658
+ ],
659
+ )
660
+ def test_construction_not_supported(self, subtype):
661
+ # GH 19016
662
+ msg = (
663
+ "category, object, and string subtypes are not supported "
664
+ "for IntervalDtype"
665
+ )
666
+ with pytest.raises(TypeError, match=msg):
667
+ IntervalDtype(subtype)
668
+
669
+ @pytest.mark.parametrize("subtype", ["xx", "IntervalA", "Interval[foo]"])
670
+ def test_construction_errors(self, subtype):
671
+ msg = "could not construct IntervalDtype"
672
+ with pytest.raises(TypeError, match=msg):
673
+ IntervalDtype(subtype)
674
+
675
+ def test_closed_must_match(self):
676
+ # GH#37933
677
+ dtype = IntervalDtype(np.float64, "left")
678
+
679
+ msg = "dtype.closed and 'closed' do not match"
680
+ with pytest.raises(ValueError, match=msg):
681
+ IntervalDtype(dtype, closed="both")
682
+
683
+ def test_closed_invalid(self):
684
+ with pytest.raises(ValueError, match="closed must be one of"):
685
+ IntervalDtype(np.float64, "foo")
686
+
687
+ def test_construction_from_string(self, dtype):
688
+ result = IntervalDtype("interval[int64, right]")
689
+ assert is_dtype_equal(dtype, result)
690
+ result = IntervalDtype.construct_from_string("interval[int64, right]")
691
+ assert is_dtype_equal(dtype, result)
692
+
693
+ @pytest.mark.parametrize("string", [0, 3.14, ("a", "b"), None])
694
+ def test_construction_from_string_errors(self, string):
695
+ # these are invalid entirely
696
+ msg = f"'construct_from_string' expects a string, got {type(string)}"
697
+
698
+ with pytest.raises(TypeError, match=re.escape(msg)):
699
+ IntervalDtype.construct_from_string(string)
700
+
701
+ @pytest.mark.parametrize("string", ["foo", "foo[int64]", "IntervalA"])
702
+ def test_construction_from_string_error_subtype(self, string):
703
+ # this is an invalid subtype
704
+ msg = (
705
+ "Incorrectly formatted string passed to constructor. "
706
+ r"Valid formats include Interval or Interval\[dtype\] "
707
+ "where dtype is numeric, datetime, or timedelta"
708
+ )
709
+
710
+ with pytest.raises(TypeError, match=msg):
711
+ IntervalDtype.construct_from_string(string)
712
+
713
+ def test_subclass(self):
714
+ a = IntervalDtype("interval[int64, right]")
715
+ b = IntervalDtype("interval[int64, right]")
716
+
717
+ assert issubclass(type(a), type(a))
718
+ assert issubclass(type(a), type(b))
719
+
720
+ def test_is_dtype(self, dtype):
721
+ assert IntervalDtype.is_dtype(dtype)
722
+ assert IntervalDtype.is_dtype("interval")
723
+ assert IntervalDtype.is_dtype(IntervalDtype("float64"))
724
+ assert IntervalDtype.is_dtype(IntervalDtype("int64"))
725
+ assert IntervalDtype.is_dtype(IntervalDtype(np.int64))
726
+ assert IntervalDtype.is_dtype(IntervalDtype("float64", "left"))
727
+ assert IntervalDtype.is_dtype(IntervalDtype("int64", "right"))
728
+ assert IntervalDtype.is_dtype(IntervalDtype(np.int64, "both"))
729
+
730
+ assert not IntervalDtype.is_dtype("D")
731
+ assert not IntervalDtype.is_dtype("3D")
732
+ assert not IntervalDtype.is_dtype("us")
733
+ assert not IntervalDtype.is_dtype("S")
734
+ assert not IntervalDtype.is_dtype("foo")
735
+ assert not IntervalDtype.is_dtype("IntervalA")
736
+ assert not IntervalDtype.is_dtype(np.object_)
737
+ assert not IntervalDtype.is_dtype(np.int64)
738
+ assert not IntervalDtype.is_dtype(np.float64)
739
+
740
+ def test_equality(self, dtype):
741
+ assert is_dtype_equal(dtype, "interval[int64, right]")
742
+ assert is_dtype_equal(dtype, IntervalDtype("int64", "right"))
743
+ assert is_dtype_equal(
744
+ IntervalDtype("int64", "right"), IntervalDtype("int64", "right")
745
+ )
746
+
747
+ assert not is_dtype_equal(dtype, "interval[int64]")
748
+ assert not is_dtype_equal(dtype, IntervalDtype("int64"))
749
+ assert not is_dtype_equal(
750
+ IntervalDtype("int64", "right"), IntervalDtype("int64")
751
+ )
752
+
753
+ assert not is_dtype_equal(dtype, "int64")
754
+ assert not is_dtype_equal(
755
+ IntervalDtype("int64", "neither"), IntervalDtype("float64", "right")
756
+ )
757
+ assert not is_dtype_equal(
758
+ IntervalDtype("int64", "both"), IntervalDtype("int64", "left")
759
+ )
760
+
761
+ # invalid subtype comparisons do not raise when directly compared
762
+ dtype1 = IntervalDtype("float64", "left")
763
+ dtype2 = IntervalDtype("datetime64[ns, US/Eastern]", "left")
764
+ assert dtype1 != dtype2
765
+ assert dtype2 != dtype1
766
+
767
+ @pytest.mark.parametrize(
768
+ "subtype",
769
+ [
770
+ None,
771
+ "interval",
772
+ "Interval",
773
+ "int64",
774
+ "uint64",
775
+ "float64",
776
+ "complex128",
777
+ "datetime64",
778
+ "timedelta64",
779
+ PeriodDtype("Q"),
780
+ ],
781
+ )
782
+ def test_equality_generic(self, subtype):
783
+ # GH 18980
784
+ closed = "right" if subtype is not None else None
785
+ dtype = IntervalDtype(subtype, closed=closed)
786
+ assert is_dtype_equal(dtype, "interval")
787
+ assert is_dtype_equal(dtype, IntervalDtype())
788
+
789
+ @pytest.mark.parametrize(
790
+ "subtype",
791
+ [
792
+ "int64",
793
+ "uint64",
794
+ "float64",
795
+ "complex128",
796
+ "datetime64",
797
+ "timedelta64",
798
+ PeriodDtype("Q"),
799
+ ],
800
+ )
801
+ def test_name_repr(self, subtype):
802
+ # GH 18980
803
+ closed = "right" if subtype is not None else None
804
+ dtype = IntervalDtype(subtype, closed=closed)
805
+ expected = f"interval[{subtype}, {closed}]"
806
+ assert str(dtype) == expected
807
+ assert dtype.name == "interval"
808
+
809
+ @pytest.mark.parametrize("subtype", [None, "interval", "Interval"])
810
+ def test_name_repr_generic(self, subtype):
811
+ # GH 18980
812
+ dtype = IntervalDtype(subtype)
813
+ assert str(dtype) == "interval"
814
+ assert dtype.name == "interval"
815
+
816
+ def test_basic(self, dtype):
817
+ msg = "is_interval_dtype is deprecated"
818
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
819
+ assert is_interval_dtype(dtype)
820
+
821
+ ii = IntervalIndex.from_breaks(range(3))
822
+
823
+ assert is_interval_dtype(ii.dtype)
824
+ assert is_interval_dtype(ii)
825
+
826
+ s = Series(ii, name="A")
827
+
828
+ assert is_interval_dtype(s.dtype)
829
+ assert is_interval_dtype(s)
830
+
831
+ def test_basic_dtype(self):
832
+ msg = "is_interval_dtype is deprecated"
833
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
834
+ assert is_interval_dtype("interval[int64, both]")
835
+ assert is_interval_dtype(IntervalIndex.from_tuples([(0, 1)]))
836
+ assert is_interval_dtype(IntervalIndex.from_breaks(np.arange(4)))
837
+ assert is_interval_dtype(
838
+ IntervalIndex.from_breaks(date_range("20130101", periods=3))
839
+ )
840
+ assert not is_interval_dtype("U")
841
+ assert not is_interval_dtype("S")
842
+ assert not is_interval_dtype("foo")
843
+ assert not is_interval_dtype(np.object_)
844
+ assert not is_interval_dtype(np.int64)
845
+ assert not is_interval_dtype(np.float64)
846
+
847
+ def test_caching(self):
848
+ # GH 54184: Caching not shown to improve performance
849
+ IntervalDtype.reset_cache()
850
+ dtype = IntervalDtype("int64", "right")
851
+ assert len(IntervalDtype._cache_dtypes) == 0
852
+
853
+ IntervalDtype("interval")
854
+ assert len(IntervalDtype._cache_dtypes) == 0
855
+
856
+ IntervalDtype.reset_cache()
857
+ tm.round_trip_pickle(dtype)
858
+ assert len(IntervalDtype._cache_dtypes) == 0
859
+
860
+ def test_not_string(self):
861
+ # GH30568: though IntervalDtype has object kind, it cannot be string
862
+ assert not is_string_dtype(IntervalDtype())
863
+
864
+ def test_unpickling_without_closed(self):
865
+ # GH#38394
866
+ dtype = IntervalDtype("interval")
867
+
868
+ assert dtype._closed is None
869
+
870
+ tm.round_trip_pickle(dtype)
871
+
872
+ def test_dont_keep_ref_after_del(self):
873
+ # GH 54184
874
+ dtype = IntervalDtype("int64", "right")
875
+ ref = weakref.ref(dtype)
876
+ del dtype
877
+ assert ref() is None
878
+
879
+
880
+ class TestCategoricalDtypeParametrized:
881
+ @pytest.mark.parametrize(
882
+ "categories",
883
+ [
884
+ list("abcd"),
885
+ np.arange(1000),
886
+ ["a", "b", 10, 2, 1.3, True],
887
+ [True, False],
888
+ date_range("2017", periods=4),
889
+ ],
890
+ )
891
+ def test_basic(self, categories, ordered):
892
+ c1 = CategoricalDtype(categories, ordered=ordered)
893
+ tm.assert_index_equal(c1.categories, pd.Index(categories))
894
+ assert c1.ordered is ordered
895
+
896
+ def test_order_matters(self):
897
+ categories = ["a", "b"]
898
+ c1 = CategoricalDtype(categories, ordered=True)
899
+ c2 = CategoricalDtype(categories, ordered=False)
900
+ c3 = CategoricalDtype(categories, ordered=None)
901
+ assert c1 is not c2
902
+ assert c1 is not c3
903
+
904
+ @pytest.mark.parametrize("ordered", [False, None])
905
+ def test_unordered_same(self, ordered):
906
+ c1 = CategoricalDtype(["a", "b"], ordered=ordered)
907
+ c2 = CategoricalDtype(["b", "a"], ordered=ordered)
908
+ assert hash(c1) == hash(c2)
909
+
910
+ def test_categories(self):
911
+ result = CategoricalDtype(["a", "b", "c"])
912
+ tm.assert_index_equal(result.categories, pd.Index(["a", "b", "c"]))
913
+ assert result.ordered is False
914
+
915
+ def test_equal_but_different(self):
916
+ c1 = CategoricalDtype([1, 2, 3])
917
+ c2 = CategoricalDtype([1.0, 2.0, 3.0])
918
+ assert c1 is not c2
919
+ assert c1 != c2
920
+
921
+ def test_equal_but_different_mixed_dtypes(self):
922
+ c1 = CategoricalDtype([1, 2, "3"])
923
+ c2 = CategoricalDtype(["3", 1, 2])
924
+ assert c1 is not c2
925
+ assert c1 == c2
926
+
927
+ def test_equal_empty_ordered(self):
928
+ c1 = CategoricalDtype([], ordered=True)
929
+ c2 = CategoricalDtype([], ordered=True)
930
+ assert c1 is not c2
931
+ assert c1 == c2
932
+
933
+ def test_equal_empty_unordered(self):
934
+ c1 = CategoricalDtype([])
935
+ c2 = CategoricalDtype([])
936
+ assert c1 is not c2
937
+ assert c1 == c2
938
+
939
+ @pytest.mark.parametrize("v1, v2", [([1, 2, 3], [1, 2, 3]), ([1, 2, 3], [3, 2, 1])])
940
+ def test_order_hashes_different(self, v1, v2):
941
+ c1 = CategoricalDtype(v1, ordered=False)
942
+ c2 = CategoricalDtype(v2, ordered=True)
943
+ c3 = CategoricalDtype(v1, ordered=None)
944
+ assert c1 is not c2
945
+ assert c1 is not c3
946
+
947
+ def test_nan_invalid(self):
948
+ msg = "Categorical categories cannot be null"
949
+ with pytest.raises(ValueError, match=msg):
950
+ CategoricalDtype([1, 2, np.nan])
951
+
952
+ def test_non_unique_invalid(self):
953
+ msg = "Categorical categories must be unique"
954
+ with pytest.raises(ValueError, match=msg):
955
+ CategoricalDtype([1, 2, 1])
956
+
957
+ def test_same_categories_different_order(self):
958
+ c1 = CategoricalDtype(["a", "b"], ordered=True)
959
+ c2 = CategoricalDtype(["b", "a"], ordered=True)
960
+ assert c1 is not c2
961
+
962
+ @pytest.mark.parametrize("ordered1", [True, False, None])
963
+ @pytest.mark.parametrize("ordered2", [True, False, None])
964
+ def test_categorical_equality(self, ordered1, ordered2):
965
+ # same categories, same order
966
+ # any combination of None/False are equal
967
+ # True/True is the only combination with True that are equal
968
+ c1 = CategoricalDtype(list("abc"), ordered1)
969
+ c2 = CategoricalDtype(list("abc"), ordered2)
970
+ result = c1 == c2
971
+ expected = bool(ordered1) is bool(ordered2)
972
+ assert result is expected
973
+
974
+ # same categories, different order
975
+ # any combination of None/False are equal (order doesn't matter)
976
+ # any combination with True are not equal (different order of cats)
977
+ c1 = CategoricalDtype(list("abc"), ordered1)
978
+ c2 = CategoricalDtype(list("cab"), ordered2)
979
+ result = c1 == c2
980
+ expected = (bool(ordered1) is False) and (bool(ordered2) is False)
981
+ assert result is expected
982
+
983
+ # different categories
984
+ c2 = CategoricalDtype([1, 2, 3], ordered2)
985
+ assert c1 != c2
986
+
987
+ # none categories
988
+ c1 = CategoricalDtype(list("abc"), ordered1)
989
+ c2 = CategoricalDtype(None, ordered2)
990
+ c3 = CategoricalDtype(None, ordered1)
991
+ assert c1 != c2
992
+ assert c2 != c1
993
+ assert c2 == c3
994
+
995
+ def test_categorical_dtype_equality_requires_categories(self):
996
+ # CategoricalDtype with categories=None is *not* equal to
997
+ # any fully-initialized CategoricalDtype
998
+ first = CategoricalDtype(["a", "b"])
999
+ second = CategoricalDtype()
1000
+ third = CategoricalDtype(ordered=True)
1001
+
1002
+ assert second == second
1003
+ assert third == third
1004
+
1005
+ assert first != second
1006
+ assert second != first
1007
+ assert first != third
1008
+ assert third != first
1009
+ assert second == third
1010
+ assert third == second
1011
+
1012
+ @pytest.mark.parametrize("categories", [list("abc"), None])
1013
+ @pytest.mark.parametrize("other", ["category", "not a category"])
1014
+ def test_categorical_equality_strings(self, categories, ordered, other):
1015
+ c1 = CategoricalDtype(categories, ordered)
1016
+ result = c1 == other
1017
+ expected = other == "category"
1018
+ assert result is expected
1019
+
1020
+ def test_invalid_raises(self):
1021
+ with pytest.raises(TypeError, match="ordered"):
1022
+ CategoricalDtype(["a", "b"], ordered="foo")
1023
+
1024
+ with pytest.raises(TypeError, match="'categories' must be list-like"):
1025
+ CategoricalDtype("category")
1026
+
1027
+ def test_mixed(self):
1028
+ a = CategoricalDtype(["a", "b", 1, 2])
1029
+ b = CategoricalDtype(["a", "b", "1", "2"])
1030
+ assert hash(a) != hash(b)
1031
+
1032
+ def test_from_categorical_dtype_identity(self):
1033
+ c1 = Categorical([1, 2], categories=[1, 2, 3], ordered=True)
1034
+ # Identity test for no changes
1035
+ c2 = CategoricalDtype._from_categorical_dtype(c1)
1036
+ assert c2 is c1
1037
+
1038
+ def test_from_categorical_dtype_categories(self):
1039
+ c1 = Categorical([1, 2], categories=[1, 2, 3], ordered=True)
1040
+ # override categories
1041
+ result = CategoricalDtype._from_categorical_dtype(c1, categories=[2, 3])
1042
+ assert result == CategoricalDtype([2, 3], ordered=True)
1043
+
1044
+ def test_from_categorical_dtype_ordered(self):
1045
+ c1 = Categorical([1, 2], categories=[1, 2, 3], ordered=True)
1046
+ # override ordered
1047
+ result = CategoricalDtype._from_categorical_dtype(c1, ordered=False)
1048
+ assert result == CategoricalDtype([1, 2, 3], ordered=False)
1049
+
1050
+ def test_from_categorical_dtype_both(self):
1051
+ c1 = Categorical([1, 2], categories=[1, 2, 3], ordered=True)
1052
+ # override ordered
1053
+ result = CategoricalDtype._from_categorical_dtype(
1054
+ c1, categories=[1, 2], ordered=False
1055
+ )
1056
+ assert result == CategoricalDtype([1, 2], ordered=False)
1057
+
1058
+ def test_str_vs_repr(self, ordered, using_infer_string):
1059
+ c1 = CategoricalDtype(["a", "b"], ordered=ordered)
1060
+ assert str(c1) == "category"
1061
+ # Py2 will have unicode prefixes
1062
+ dtype = "string" if using_infer_string else "object"
1063
+ pat = (
1064
+ r"CategoricalDtype\(categories=\[.*\], ordered={ordered}, "
1065
+ rf"categories_dtype={dtype}\)"
1066
+ )
1067
+ assert re.match(pat.format(ordered=ordered), repr(c1))
1068
+
1069
+ def test_categorical_categories(self):
1070
+ # GH17884
1071
+ c1 = CategoricalDtype(Categorical(["a", "b"]))
1072
+ tm.assert_index_equal(c1.categories, pd.Index(["a", "b"]))
1073
+ c1 = CategoricalDtype(CategoricalIndex(["a", "b"]))
1074
+ tm.assert_index_equal(c1.categories, pd.Index(["a", "b"]))
1075
+
1076
+ @pytest.mark.parametrize(
1077
+ "new_categories", [list("abc"), list("cba"), list("wxyz"), None]
1078
+ )
1079
+ @pytest.mark.parametrize("new_ordered", [True, False, None])
1080
+ def test_update_dtype(self, ordered, new_categories, new_ordered):
1081
+ original_categories = list("abc")
1082
+ dtype = CategoricalDtype(original_categories, ordered)
1083
+ new_dtype = CategoricalDtype(new_categories, new_ordered)
1084
+
1085
+ result = dtype.update_dtype(new_dtype)
1086
+ expected_categories = pd.Index(new_categories or original_categories)
1087
+ expected_ordered = new_ordered if new_ordered is not None else dtype.ordered
1088
+
1089
+ tm.assert_index_equal(result.categories, expected_categories)
1090
+ assert result.ordered is expected_ordered
1091
+
1092
+ def test_update_dtype_string(self, ordered):
1093
+ dtype = CategoricalDtype(list("abc"), ordered)
1094
+ expected_categories = dtype.categories
1095
+ expected_ordered = dtype.ordered
1096
+ result = dtype.update_dtype("category")
1097
+ tm.assert_index_equal(result.categories, expected_categories)
1098
+ assert result.ordered is expected_ordered
1099
+
1100
+ @pytest.mark.parametrize("bad_dtype", ["foo", object, np.int64, PeriodDtype("Q")])
1101
+ def test_update_dtype_errors(self, bad_dtype):
1102
+ dtype = CategoricalDtype(list("abc"), False)
1103
+ msg = "a CategoricalDtype must be passed to perform an update, "
1104
+ with pytest.raises(ValueError, match=msg):
1105
+ dtype.update_dtype(bad_dtype)
1106
+
1107
+
1108
+ @pytest.mark.parametrize(
1109
+ "dtype", [CategoricalDtype, IntervalDtype, DatetimeTZDtype, PeriodDtype]
1110
+ )
1111
+ def test_registry(dtype):
1112
+ assert dtype in registry.dtypes
1113
+
1114
+
1115
+ @pytest.mark.parametrize(
1116
+ "dtype, expected",
1117
+ [
1118
+ ("int64", None),
1119
+ ("interval", IntervalDtype()),
1120
+ ("interval[int64, neither]", IntervalDtype()),
1121
+ ("interval[datetime64[ns], left]", IntervalDtype("datetime64[ns]", "left")),
1122
+ ("period[D]", PeriodDtype("D")),
1123
+ ("category", CategoricalDtype()),
1124
+ ("datetime64[ns, US/Eastern]", DatetimeTZDtype("ns", "US/Eastern")),
1125
+ ],
1126
+ )
1127
+ def test_registry_find(dtype, expected):
1128
+ assert registry.find(dtype) == expected
1129
+
1130
+
1131
+ @pytest.mark.parametrize(
1132
+ "dtype, expected",
1133
+ [
1134
+ (str, False),
1135
+ (int, False),
1136
+ (bool, True),
1137
+ (np.bool_, True),
1138
+ (np.array(["a", "b"]), False),
1139
+ (Series([1, 2]), False),
1140
+ (np.array([True, False]), True),
1141
+ (Series([True, False]), True),
1142
+ (SparseArray([True, False]), True),
1143
+ (SparseDtype(bool), True),
1144
+ ],
1145
+ )
1146
+ def test_is_bool_dtype(dtype, expected):
1147
+ result = is_bool_dtype(dtype)
1148
+ assert result is expected
1149
+
1150
+
1151
+ def test_is_bool_dtype_sparse():
1152
+ result = is_bool_dtype(Series(SparseArray([True, False])))
1153
+ assert result is True
1154
+
1155
+
1156
+ @pytest.mark.parametrize(
1157
+ "check",
1158
+ [
1159
+ is_categorical_dtype,
1160
+ is_datetime64tz_dtype,
1161
+ is_period_dtype,
1162
+ is_datetime64_ns_dtype,
1163
+ is_datetime64_dtype,
1164
+ is_interval_dtype,
1165
+ is_datetime64_any_dtype,
1166
+ is_string_dtype,
1167
+ is_bool_dtype,
1168
+ ],
1169
+ )
1170
+ def test_is_dtype_no_warning(check):
1171
+ data = pd.DataFrame({"A": [1, 2]})
1172
+
1173
+ warn = None
1174
+ msg = f"{check.__name__} is deprecated"
1175
+ if (
1176
+ check is is_categorical_dtype
1177
+ or check is is_interval_dtype
1178
+ or check is is_datetime64tz_dtype
1179
+ or check is is_period_dtype
1180
+ ):
1181
+ warn = DeprecationWarning
1182
+
1183
+ with tm.assert_produces_warning(warn, match=msg):
1184
+ check(data)
1185
+
1186
+ with tm.assert_produces_warning(warn, match=msg):
1187
+ check(data["A"])
1188
+
1189
+
1190
+ def test_period_dtype_compare_to_string():
1191
+ # https://github.com/pandas-dev/pandas/issues/37265
1192
+ dtype = PeriodDtype(freq="M")
1193
+ assert (dtype == "period[M]") is True
1194
+ assert (dtype != "period[M]") is False
1195
+
1196
+
1197
+ def test_compare_complex_dtypes():
1198
+ # GH 28050
1199
+ df = pd.DataFrame(np.arange(5).astype(np.complex128))
1200
+ msg = "'<' not supported between instances of 'complex' and 'complex'"
1201
+
1202
+ with pytest.raises(TypeError, match=msg):
1203
+ df < df.astype(object)
1204
+
1205
+ with pytest.raises(TypeError, match=msg):
1206
+ df.lt(df.astype(object))
1207
+
1208
+
1209
+ def test_cast_string_to_complex():
1210
+ # GH 4895
1211
+ expected = pd.DataFrame(["1.0+5j", "1.5-3j"], dtype=complex)
1212
+ result = pd.DataFrame(["1.0+5j", "1.5-3j"]).astype(complex)
1213
+ tm.assert_frame_equal(result, expected)
1214
+
1215
+
1216
+ def test_categorical_complex():
1217
+ result = Categorical([1, 2 + 2j])
1218
+ expected = Categorical([1.0 + 0.0j, 2.0 + 2.0j])
1219
+ tm.assert_categorical_equal(result, expected)
1220
+ result = Categorical([1, 2, 2 + 2j])
1221
+ expected = Categorical([1.0 + 0.0j, 2.0 + 0.0j, 2.0 + 2.0j])
1222
+ tm.assert_categorical_equal(result, expected)
1223
+
1224
+
1225
+ def test_multi_column_dtype_assignment():
1226
+ # GH #27583
1227
+ df = pd.DataFrame({"a": [0.0], "b": 0.0})
1228
+ expected = pd.DataFrame({"a": [0], "b": 0})
1229
+
1230
+ df[["a", "b"]] = 0
1231
+ tm.assert_frame_equal(df, expected)
1232
+
1233
+ df["b"] = 0
1234
+ tm.assert_frame_equal(df, expected)
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_generic.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import re
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas.core.dtypes import generic as gt
7
+
8
+ import pandas as pd
9
+ import pandas._testing as tm
10
+
11
+
12
+ class TestABCClasses:
13
+ tuples = [[1, 2, 2], ["red", "blue", "red"]]
14
+ multi_index = pd.MultiIndex.from_arrays(tuples, names=("number", "color"))
15
+ datetime_index = pd.to_datetime(["2000/1/1", "2010/1/1"])
16
+ timedelta_index = pd.to_timedelta(np.arange(5), unit="s")
17
+ period_index = pd.period_range("2000/1/1", "2010/1/1/", freq="M")
18
+ categorical = pd.Categorical([1, 2, 3], categories=[2, 3, 1])
19
+ categorical_df = pd.DataFrame({"values": [1, 2, 3]}, index=categorical)
20
+ df = pd.DataFrame({"names": ["a", "b", "c"]}, index=multi_index)
21
+ sparse_array = pd.arrays.SparseArray(np.random.default_rng(2).standard_normal(10))
22
+
23
+ datetime_array = pd.core.arrays.DatetimeArray._from_sequence(datetime_index)
24
+ timedelta_array = pd.core.arrays.TimedeltaArray._from_sequence(timedelta_index)
25
+
26
+ abc_pairs = [
27
+ ("ABCMultiIndex", multi_index),
28
+ ("ABCDatetimeIndex", datetime_index),
29
+ ("ABCRangeIndex", pd.RangeIndex(3)),
30
+ ("ABCTimedeltaIndex", timedelta_index),
31
+ ("ABCIntervalIndex", pd.interval_range(start=0, end=3)),
32
+ (
33
+ "ABCPeriodArray",
34
+ pd.arrays.PeriodArray([2000, 2001, 2002], dtype="period[D]"),
35
+ ),
36
+ ("ABCNumpyExtensionArray", pd.arrays.NumpyExtensionArray(np.array([0, 1, 2]))),
37
+ ("ABCPeriodIndex", period_index),
38
+ ("ABCCategoricalIndex", categorical_df.index),
39
+ ("ABCSeries", pd.Series([1, 2, 3])),
40
+ ("ABCDataFrame", df),
41
+ ("ABCCategorical", categorical),
42
+ ("ABCDatetimeArray", datetime_array),
43
+ ("ABCTimedeltaArray", timedelta_array),
44
+ ]
45
+
46
+ @pytest.mark.parametrize("abctype1, inst", abc_pairs)
47
+ @pytest.mark.parametrize("abctype2, _", abc_pairs)
48
+ def test_abc_pairs_instance_check(self, abctype1, abctype2, inst, _):
49
+ # GH 38588, 46719
50
+ if abctype1 == abctype2:
51
+ assert isinstance(inst, getattr(gt, abctype2))
52
+ assert not isinstance(type(inst), getattr(gt, abctype2))
53
+ else:
54
+ assert not isinstance(inst, getattr(gt, abctype2))
55
+
56
+ @pytest.mark.parametrize("abctype1, inst", abc_pairs)
57
+ @pytest.mark.parametrize("abctype2, _", abc_pairs)
58
+ def test_abc_pairs_subclass_check(self, abctype1, abctype2, inst, _):
59
+ # GH 38588, 46719
60
+ if abctype1 == abctype2:
61
+ assert issubclass(type(inst), getattr(gt, abctype2))
62
+
63
+ with pytest.raises(
64
+ TypeError, match=re.escape("issubclass() arg 1 must be a class")
65
+ ):
66
+ issubclass(inst, getattr(gt, abctype2))
67
+ else:
68
+ assert not issubclass(type(inst), getattr(gt, abctype2))
69
+
70
+ abc_subclasses = {
71
+ "ABCIndex": [
72
+ abctype
73
+ for abctype, _ in abc_pairs
74
+ if "Index" in abctype and abctype != "ABCIndex"
75
+ ],
76
+ "ABCNDFrame": ["ABCSeries", "ABCDataFrame"],
77
+ "ABCExtensionArray": [
78
+ "ABCCategorical",
79
+ "ABCDatetimeArray",
80
+ "ABCPeriodArray",
81
+ "ABCTimedeltaArray",
82
+ ],
83
+ }
84
+
85
+ @pytest.mark.parametrize("parent, subs", abc_subclasses.items())
86
+ @pytest.mark.parametrize("abctype, inst", abc_pairs)
87
+ def test_abc_hierarchy(self, parent, subs, abctype, inst):
88
+ # GH 38588
89
+ if abctype in subs:
90
+ assert isinstance(inst, getattr(gt, parent))
91
+ else:
92
+ assert not isinstance(inst, getattr(gt, parent))
93
+
94
+ @pytest.mark.parametrize("abctype", [e for e in gt.__dict__ if e.startswith("ABC")])
95
+ def test_abc_coverage(self, abctype):
96
+ # GH 38588
97
+ assert (
98
+ abctype in (e for e, _ in self.abc_pairs) or abctype in self.abc_subclasses
99
+ )
100
+
101
+
102
+ def test_setattr_warnings():
103
+ # GH7175 - GOTCHA: You can't use dot notation to add a column...
104
+ d = {
105
+ "one": pd.Series([1.0, 2.0, 3.0], index=["a", "b", "c"]),
106
+ "two": pd.Series([1.0, 2.0, 3.0, 4.0], index=["a", "b", "c", "d"]),
107
+ }
108
+ df = pd.DataFrame(d)
109
+
110
+ with tm.assert_produces_warning(None):
111
+ # successfully add new column
112
+ # this should not raise a warning
113
+ df["three"] = df.two + 1
114
+ assert df.three.sum() > df.two.sum()
115
+
116
+ with tm.assert_produces_warning(None):
117
+ # successfully modify column in place
118
+ # this should not raise a warning
119
+ df.one += 1
120
+ assert df.one.iloc[0] == 2
121
+
122
+ with tm.assert_produces_warning(None):
123
+ # successfully add an attribute to a series
124
+ # this should not raise a warning
125
+ df.two.not_an_index = [1, 2]
126
+
127
+ with tm.assert_produces_warning(UserWarning):
128
+ # warn when setting column to nonexistent name
129
+ df.four = df.two + 2
130
+ assert df.four.sum() > df.two.sum()
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_inference.py ADDED
@@ -0,0 +1,2047 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ These the test the public routines exposed in types/common.py
3
+ related to inference and not otherwise tested in types/test_common.py
4
+
5
+ """
6
+ import collections
7
+ from collections import namedtuple
8
+ from collections.abc import Iterator
9
+ from datetime import (
10
+ date,
11
+ datetime,
12
+ time,
13
+ timedelta,
14
+ )
15
+ from decimal import Decimal
16
+ from fractions import Fraction
17
+ from io import StringIO
18
+ import itertools
19
+ from numbers import Number
20
+ import re
21
+ import sys
22
+ from typing import (
23
+ Generic,
24
+ TypeVar,
25
+ )
26
+
27
+ import numpy as np
28
+ import pytest
29
+ import pytz
30
+
31
+ from pandas._libs import (
32
+ lib,
33
+ missing as libmissing,
34
+ ops as libops,
35
+ )
36
+ from pandas.compat.numpy import np_version_gt2
37
+
38
+ from pandas.core.dtypes import inference
39
+ from pandas.core.dtypes.cast import find_result_type
40
+ from pandas.core.dtypes.common import (
41
+ ensure_int32,
42
+ is_bool,
43
+ is_complex,
44
+ is_datetime64_any_dtype,
45
+ is_datetime64_dtype,
46
+ is_datetime64_ns_dtype,
47
+ is_datetime64tz_dtype,
48
+ is_float,
49
+ is_integer,
50
+ is_number,
51
+ is_scalar,
52
+ is_scipy_sparse,
53
+ is_timedelta64_dtype,
54
+ is_timedelta64_ns_dtype,
55
+ )
56
+
57
+ import pandas as pd
58
+ from pandas import (
59
+ Categorical,
60
+ DataFrame,
61
+ DateOffset,
62
+ DatetimeIndex,
63
+ Index,
64
+ Interval,
65
+ Period,
66
+ PeriodIndex,
67
+ Series,
68
+ Timedelta,
69
+ TimedeltaIndex,
70
+ Timestamp,
71
+ )
72
+ import pandas._testing as tm
73
+ from pandas.core.arrays import (
74
+ BooleanArray,
75
+ FloatingArray,
76
+ IntegerArray,
77
+ )
78
+
79
+
80
+ @pytest.fixture(params=[True, False], ids=str)
81
+ def coerce(request):
82
+ return request.param
83
+
84
+
85
+ class MockNumpyLikeArray:
86
+ """
87
+ A class which is numpy-like (e.g. Pint's Quantity) but not actually numpy
88
+
89
+ The key is that it is not actually a numpy array so
90
+ ``util.is_array(mock_numpy_like_array_instance)`` returns ``False``. Other
91
+ important properties are that the class defines a :meth:`__iter__` method
92
+ (so that ``isinstance(abc.Iterable)`` returns ``True``) and has a
93
+ :meth:`ndim` property, as pandas special-cases 0-dimensional arrays in some
94
+ cases.
95
+
96
+ We expect pandas to behave with respect to such duck arrays exactly as
97
+ with real numpy arrays. In particular, a 0-dimensional duck array is *NOT*
98
+ a scalar (`is_scalar(np.array(1)) == False`), but it is not list-like either.
99
+ """
100
+
101
+ def __init__(self, values) -> None:
102
+ self._values = values
103
+
104
+ def __iter__(self) -> Iterator:
105
+ iter_values = iter(self._values)
106
+
107
+ def it_outer():
108
+ yield from iter_values
109
+
110
+ return it_outer()
111
+
112
+ def __len__(self) -> int:
113
+ return len(self._values)
114
+
115
+ def __array__(self, dtype=None, copy=None):
116
+ return np.asarray(self._values, dtype=dtype)
117
+
118
+ @property
119
+ def ndim(self):
120
+ return self._values.ndim
121
+
122
+ @property
123
+ def dtype(self):
124
+ return self._values.dtype
125
+
126
+ @property
127
+ def size(self):
128
+ return self._values.size
129
+
130
+ @property
131
+ def shape(self):
132
+ return self._values.shape
133
+
134
+
135
+ # collect all objects to be tested for list-like-ness; use tuples of objects,
136
+ # whether they are list-like or not (special casing for sets), and their ID
137
+ ll_params = [
138
+ ([1], True, "list"),
139
+ ([], True, "list-empty"),
140
+ ((1,), True, "tuple"),
141
+ ((), True, "tuple-empty"),
142
+ ({"a": 1}, True, "dict"),
143
+ ({}, True, "dict-empty"),
144
+ ({"a", 1}, "set", "set"),
145
+ (set(), "set", "set-empty"),
146
+ (frozenset({"a", 1}), "set", "frozenset"),
147
+ (frozenset(), "set", "frozenset-empty"),
148
+ (iter([1, 2]), True, "iterator"),
149
+ (iter([]), True, "iterator-empty"),
150
+ ((x for x in [1, 2]), True, "generator"),
151
+ ((_ for _ in []), True, "generator-empty"),
152
+ (Series([1]), True, "Series"),
153
+ (Series([], dtype=object), True, "Series-empty"),
154
+ # Series.str will still raise a TypeError if iterated
155
+ (Series(["a"]).str, True, "StringMethods"),
156
+ (Series([], dtype="O").str, True, "StringMethods-empty"),
157
+ (Index([1]), True, "Index"),
158
+ (Index([]), True, "Index-empty"),
159
+ (DataFrame([[1]]), True, "DataFrame"),
160
+ (DataFrame(), True, "DataFrame-empty"),
161
+ (np.ndarray((2,) * 1), True, "ndarray-1d"),
162
+ (np.array([]), True, "ndarray-1d-empty"),
163
+ (np.ndarray((2,) * 2), True, "ndarray-2d"),
164
+ (np.array([[]]), True, "ndarray-2d-empty"),
165
+ (np.ndarray((2,) * 3), True, "ndarray-3d"),
166
+ (np.array([[[]]]), True, "ndarray-3d-empty"),
167
+ (np.ndarray((2,) * 4), True, "ndarray-4d"),
168
+ (np.array([[[[]]]]), True, "ndarray-4d-empty"),
169
+ (np.array(2), False, "ndarray-0d"),
170
+ (MockNumpyLikeArray(np.ndarray((2,) * 1)), True, "duck-ndarray-1d"),
171
+ (MockNumpyLikeArray(np.array([])), True, "duck-ndarray-1d-empty"),
172
+ (MockNumpyLikeArray(np.ndarray((2,) * 2)), True, "duck-ndarray-2d"),
173
+ (MockNumpyLikeArray(np.array([[]])), True, "duck-ndarray-2d-empty"),
174
+ (MockNumpyLikeArray(np.ndarray((2,) * 3)), True, "duck-ndarray-3d"),
175
+ (MockNumpyLikeArray(np.array([[[]]])), True, "duck-ndarray-3d-empty"),
176
+ (MockNumpyLikeArray(np.ndarray((2,) * 4)), True, "duck-ndarray-4d"),
177
+ (MockNumpyLikeArray(np.array([[[[]]]])), True, "duck-ndarray-4d-empty"),
178
+ (MockNumpyLikeArray(np.array(2)), False, "duck-ndarray-0d"),
179
+ (1, False, "int"),
180
+ (b"123", False, "bytes"),
181
+ (b"", False, "bytes-empty"),
182
+ ("123", False, "string"),
183
+ ("", False, "string-empty"),
184
+ (str, False, "string-type"),
185
+ (object(), False, "object"),
186
+ (np.nan, False, "NaN"),
187
+ (None, False, "None"),
188
+ ]
189
+ objs, expected, ids = zip(*ll_params)
190
+
191
+
192
+ @pytest.fixture(params=zip(objs, expected), ids=ids)
193
+ def maybe_list_like(request):
194
+ return request.param
195
+
196
+
197
+ def test_is_list_like(maybe_list_like):
198
+ obj, expected = maybe_list_like
199
+ expected = True if expected == "set" else expected
200
+ assert inference.is_list_like(obj) == expected
201
+
202
+
203
+ def test_is_list_like_disallow_sets(maybe_list_like):
204
+ obj, expected = maybe_list_like
205
+ expected = False if expected == "set" else expected
206
+ assert inference.is_list_like(obj, allow_sets=False) == expected
207
+
208
+
209
+ def test_is_list_like_recursion():
210
+ # GH 33721
211
+ # interpreter would crash with SIGABRT
212
+ def list_like():
213
+ inference.is_list_like([])
214
+ list_like()
215
+
216
+ rec_limit = sys.getrecursionlimit()
217
+ try:
218
+ # Limit to avoid stack overflow on Windows CI
219
+ sys.setrecursionlimit(100)
220
+ with tm.external_error_raised(RecursionError):
221
+ list_like()
222
+ finally:
223
+ sys.setrecursionlimit(rec_limit)
224
+
225
+
226
+ def test_is_list_like_iter_is_none():
227
+ # GH 43373
228
+ # is_list_like was yielding false positives with __iter__ == None
229
+ class NotListLike:
230
+ def __getitem__(self, item):
231
+ return self
232
+
233
+ __iter__ = None
234
+
235
+ assert not inference.is_list_like(NotListLike())
236
+
237
+
238
+ def test_is_list_like_generic():
239
+ # GH 49649
240
+ # is_list_like was yielding false positives for Generic classes in python 3.11
241
+ T = TypeVar("T")
242
+
243
+ class MyDataFrame(DataFrame, Generic[T]):
244
+ ...
245
+
246
+ tstc = MyDataFrame[int]
247
+ tst = MyDataFrame[int]({"x": [1, 2, 3]})
248
+
249
+ assert not inference.is_list_like(tstc)
250
+ assert isinstance(tst, DataFrame)
251
+ assert inference.is_list_like(tst)
252
+
253
+
254
+ def test_is_sequence():
255
+ is_seq = inference.is_sequence
256
+ assert is_seq((1, 2))
257
+ assert is_seq([1, 2])
258
+ assert not is_seq("abcd")
259
+ assert not is_seq(np.int64)
260
+
261
+ class A:
262
+ def __getitem__(self, item):
263
+ return 1
264
+
265
+ assert not is_seq(A())
266
+
267
+
268
+ def test_is_array_like():
269
+ assert inference.is_array_like(Series([], dtype=object))
270
+ assert inference.is_array_like(Series([1, 2]))
271
+ assert inference.is_array_like(np.array(["a", "b"]))
272
+ assert inference.is_array_like(Index(["2016-01-01"]))
273
+ assert inference.is_array_like(np.array([2, 3]))
274
+ assert inference.is_array_like(MockNumpyLikeArray(np.array([2, 3])))
275
+
276
+ class DtypeList(list):
277
+ dtype = "special"
278
+
279
+ assert inference.is_array_like(DtypeList())
280
+
281
+ assert not inference.is_array_like([1, 2, 3])
282
+ assert not inference.is_array_like(())
283
+ assert not inference.is_array_like("foo")
284
+ assert not inference.is_array_like(123)
285
+
286
+
287
+ @pytest.mark.parametrize(
288
+ "inner",
289
+ [
290
+ [],
291
+ [1],
292
+ (1,),
293
+ (1, 2),
294
+ {"a": 1},
295
+ {1, "a"},
296
+ Series([1]),
297
+ Series([], dtype=object),
298
+ Series(["a"]).str,
299
+ (x for x in range(5)),
300
+ ],
301
+ )
302
+ @pytest.mark.parametrize("outer", [list, Series, np.array, tuple])
303
+ def test_is_nested_list_like_passes(inner, outer):
304
+ result = outer([inner for _ in range(5)])
305
+ assert inference.is_list_like(result)
306
+
307
+
308
+ @pytest.mark.parametrize(
309
+ "obj",
310
+ [
311
+ "abc",
312
+ [],
313
+ [1],
314
+ (1,),
315
+ ["a"],
316
+ "a",
317
+ {"a"},
318
+ [1, 2, 3],
319
+ Series([1]),
320
+ DataFrame({"A": [1]}),
321
+ ([1, 2] for _ in range(5)),
322
+ ],
323
+ )
324
+ def test_is_nested_list_like_fails(obj):
325
+ assert not inference.is_nested_list_like(obj)
326
+
327
+
328
+ @pytest.mark.parametrize("ll", [{}, {"A": 1}, Series([1]), collections.defaultdict()])
329
+ def test_is_dict_like_passes(ll):
330
+ assert inference.is_dict_like(ll)
331
+
332
+
333
+ @pytest.mark.parametrize(
334
+ "ll",
335
+ [
336
+ "1",
337
+ 1,
338
+ [1, 2],
339
+ (1, 2),
340
+ range(2),
341
+ Index([1]),
342
+ dict,
343
+ collections.defaultdict,
344
+ Series,
345
+ ],
346
+ )
347
+ def test_is_dict_like_fails(ll):
348
+ assert not inference.is_dict_like(ll)
349
+
350
+
351
+ @pytest.mark.parametrize("has_keys", [True, False])
352
+ @pytest.mark.parametrize("has_getitem", [True, False])
353
+ @pytest.mark.parametrize("has_contains", [True, False])
354
+ def test_is_dict_like_duck_type(has_keys, has_getitem, has_contains):
355
+ class DictLike:
356
+ def __init__(self, d) -> None:
357
+ self.d = d
358
+
359
+ if has_keys:
360
+
361
+ def keys(self):
362
+ return self.d.keys()
363
+
364
+ if has_getitem:
365
+
366
+ def __getitem__(self, key):
367
+ return self.d.__getitem__(key)
368
+
369
+ if has_contains:
370
+
371
+ def __contains__(self, key) -> bool:
372
+ return self.d.__contains__(key)
373
+
374
+ d = DictLike({1: 2})
375
+ result = inference.is_dict_like(d)
376
+ expected = has_keys and has_getitem and has_contains
377
+
378
+ assert result is expected
379
+
380
+
381
+ def test_is_file_like():
382
+ class MockFile:
383
+ pass
384
+
385
+ is_file = inference.is_file_like
386
+
387
+ data = StringIO("data")
388
+ assert is_file(data)
389
+
390
+ # No read / write attributes
391
+ # No iterator attributes
392
+ m = MockFile()
393
+ assert not is_file(m)
394
+
395
+ MockFile.write = lambda self: 0
396
+
397
+ # Write attribute but not an iterator
398
+ m = MockFile()
399
+ assert not is_file(m)
400
+
401
+ # gh-16530: Valid iterator just means we have the
402
+ # __iter__ attribute for our purposes.
403
+ MockFile.__iter__ = lambda self: self
404
+
405
+ # Valid write-only file
406
+ m = MockFile()
407
+ assert is_file(m)
408
+
409
+ del MockFile.write
410
+ MockFile.read = lambda self: 0
411
+
412
+ # Valid read-only file
413
+ m = MockFile()
414
+ assert is_file(m)
415
+
416
+ # Iterator but no read / write attributes
417
+ data = [1, 2, 3]
418
+ assert not is_file(data)
419
+
420
+
421
+ test_tuple = collections.namedtuple("test_tuple", ["a", "b", "c"])
422
+
423
+
424
+ @pytest.mark.parametrize("ll", [test_tuple(1, 2, 3)])
425
+ def test_is_names_tuple_passes(ll):
426
+ assert inference.is_named_tuple(ll)
427
+
428
+
429
+ @pytest.mark.parametrize("ll", [(1, 2, 3), "a", Series({"pi": 3.14})])
430
+ def test_is_names_tuple_fails(ll):
431
+ assert not inference.is_named_tuple(ll)
432
+
433
+
434
+ def test_is_hashable():
435
+ # all new-style classes are hashable by default
436
+ class HashableClass:
437
+ pass
438
+
439
+ class UnhashableClass1:
440
+ __hash__ = None
441
+
442
+ class UnhashableClass2:
443
+ def __hash__(self):
444
+ raise TypeError("Not hashable")
445
+
446
+ hashable = (1, 3.14, np.float64(3.14), "a", (), (1,), HashableClass())
447
+ not_hashable = ([], UnhashableClass1())
448
+ abc_hashable_not_really_hashable = (([],), UnhashableClass2())
449
+
450
+ for i in hashable:
451
+ assert inference.is_hashable(i)
452
+ for i in not_hashable:
453
+ assert not inference.is_hashable(i)
454
+ for i in abc_hashable_not_really_hashable:
455
+ assert not inference.is_hashable(i)
456
+
457
+ # numpy.array is no longer collections.abc.Hashable as of
458
+ # https://github.com/numpy/numpy/pull/5326, just test
459
+ # is_hashable()
460
+ assert not inference.is_hashable(np.array([]))
461
+
462
+
463
+ @pytest.mark.parametrize("ll", [re.compile("ad")])
464
+ def test_is_re_passes(ll):
465
+ assert inference.is_re(ll)
466
+
467
+
468
+ @pytest.mark.parametrize("ll", ["x", 2, 3, object()])
469
+ def test_is_re_fails(ll):
470
+ assert not inference.is_re(ll)
471
+
472
+
473
+ @pytest.mark.parametrize(
474
+ "ll", [r"a", "x", r"asdf", re.compile("adsf"), r"\u2233\s*", re.compile(r"")]
475
+ )
476
+ def test_is_recompilable_passes(ll):
477
+ assert inference.is_re_compilable(ll)
478
+
479
+
480
+ @pytest.mark.parametrize("ll", [1, [], object()])
481
+ def test_is_recompilable_fails(ll):
482
+ assert not inference.is_re_compilable(ll)
483
+
484
+
485
+ class TestInference:
486
+ @pytest.mark.parametrize(
487
+ "arr",
488
+ [
489
+ np.array(list("abc"), dtype="S1"),
490
+ np.array(list("abc"), dtype="S1").astype(object),
491
+ [b"a", np.nan, b"c"],
492
+ ],
493
+ )
494
+ def test_infer_dtype_bytes(self, arr):
495
+ result = lib.infer_dtype(arr, skipna=True)
496
+ assert result == "bytes"
497
+
498
+ @pytest.mark.parametrize(
499
+ "value, expected",
500
+ [
501
+ (float("inf"), True),
502
+ (np.inf, True),
503
+ (-np.inf, False),
504
+ (1, False),
505
+ ("a", False),
506
+ ],
507
+ )
508
+ def test_isposinf_scalar(self, value, expected):
509
+ # GH 11352
510
+ result = libmissing.isposinf_scalar(value)
511
+ assert result is expected
512
+
513
+ @pytest.mark.parametrize(
514
+ "value, expected",
515
+ [
516
+ (float("-inf"), True),
517
+ (-np.inf, True),
518
+ (np.inf, False),
519
+ (1, False),
520
+ ("a", False),
521
+ ],
522
+ )
523
+ def test_isneginf_scalar(self, value, expected):
524
+ result = libmissing.isneginf_scalar(value)
525
+ assert result is expected
526
+
527
+ @pytest.mark.parametrize(
528
+ "convert_to_masked_nullable, exp",
529
+ [
530
+ (
531
+ True,
532
+ BooleanArray(
533
+ np.array([True, False], dtype="bool"), np.array([False, True])
534
+ ),
535
+ ),
536
+ (False, np.array([True, np.nan], dtype="object")),
537
+ ],
538
+ )
539
+ def test_maybe_convert_nullable_boolean(self, convert_to_masked_nullable, exp):
540
+ # GH 40687
541
+ arr = np.array([True, np.nan], dtype=object)
542
+ result = libops.maybe_convert_bool(
543
+ arr, set(), convert_to_masked_nullable=convert_to_masked_nullable
544
+ )
545
+ if convert_to_masked_nullable:
546
+ tm.assert_extension_array_equal(BooleanArray(*result), exp)
547
+ else:
548
+ result = result[0]
549
+ tm.assert_numpy_array_equal(result, exp)
550
+
551
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
552
+ @pytest.mark.parametrize("coerce_numeric", [True, False])
553
+ @pytest.mark.parametrize(
554
+ "infinity", ["inf", "inF", "iNf", "Inf", "iNF", "InF", "INf", "INF"]
555
+ )
556
+ @pytest.mark.parametrize("prefix", ["", "-", "+"])
557
+ def test_maybe_convert_numeric_infinities(
558
+ self, coerce_numeric, infinity, prefix, convert_to_masked_nullable
559
+ ):
560
+ # see gh-13274
561
+ result, _ = lib.maybe_convert_numeric(
562
+ np.array([prefix + infinity], dtype=object),
563
+ na_values={"", "NULL", "nan"},
564
+ coerce_numeric=coerce_numeric,
565
+ convert_to_masked_nullable=convert_to_masked_nullable,
566
+ )
567
+ expected = np.array([np.inf if prefix in ["", "+"] else -np.inf])
568
+ tm.assert_numpy_array_equal(result, expected)
569
+
570
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
571
+ def test_maybe_convert_numeric_infinities_raises(self, convert_to_masked_nullable):
572
+ msg = "Unable to parse string"
573
+ with pytest.raises(ValueError, match=msg):
574
+ lib.maybe_convert_numeric(
575
+ np.array(["foo_inf"], dtype=object),
576
+ na_values={"", "NULL", "nan"},
577
+ coerce_numeric=False,
578
+ convert_to_masked_nullable=convert_to_masked_nullable,
579
+ )
580
+
581
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
582
+ def test_maybe_convert_numeric_post_floatify_nan(
583
+ self, coerce, convert_to_masked_nullable
584
+ ):
585
+ # see gh-13314
586
+ data = np.array(["1.200", "-999.000", "4.500"], dtype=object)
587
+ expected = np.array([1.2, np.nan, 4.5], dtype=np.float64)
588
+ nan_values = {-999, -999.0}
589
+
590
+ out = lib.maybe_convert_numeric(
591
+ data,
592
+ nan_values,
593
+ coerce,
594
+ convert_to_masked_nullable=convert_to_masked_nullable,
595
+ )
596
+ if convert_to_masked_nullable:
597
+ expected = FloatingArray(expected, np.isnan(expected))
598
+ tm.assert_extension_array_equal(expected, FloatingArray(*out))
599
+ else:
600
+ out = out[0]
601
+ tm.assert_numpy_array_equal(out, expected)
602
+
603
+ def test_convert_infs(self):
604
+ arr = np.array(["inf", "inf", "inf"], dtype="O")
605
+ result, _ = lib.maybe_convert_numeric(arr, set(), False)
606
+ assert result.dtype == np.float64
607
+
608
+ arr = np.array(["-inf", "-inf", "-inf"], dtype="O")
609
+ result, _ = lib.maybe_convert_numeric(arr, set(), False)
610
+ assert result.dtype == np.float64
611
+
612
+ def test_scientific_no_exponent(self):
613
+ # See PR 12215
614
+ arr = np.array(["42E", "2E", "99e", "6e"], dtype="O")
615
+ result, _ = lib.maybe_convert_numeric(arr, set(), False, True)
616
+ assert np.all(np.isnan(result))
617
+
618
+ def test_convert_non_hashable(self):
619
+ # GH13324
620
+ # make sure that we are handing non-hashables
621
+ arr = np.array([[10.0, 2], 1.0, "apple"], dtype=object)
622
+ result, _ = lib.maybe_convert_numeric(arr, set(), False, True)
623
+ tm.assert_numpy_array_equal(result, np.array([np.nan, 1.0, np.nan]))
624
+
625
+ def test_convert_numeric_uint64(self):
626
+ arr = np.array([2**63], dtype=object)
627
+ exp = np.array([2**63], dtype=np.uint64)
628
+ tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp)
629
+
630
+ arr = np.array([str(2**63)], dtype=object)
631
+ exp = np.array([2**63], dtype=np.uint64)
632
+ tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp)
633
+
634
+ arr = np.array([np.uint64(2**63)], dtype=object)
635
+ exp = np.array([2**63], dtype=np.uint64)
636
+ tm.assert_numpy_array_equal(lib.maybe_convert_numeric(arr, set())[0], exp)
637
+
638
+ @pytest.mark.parametrize(
639
+ "arr",
640
+ [
641
+ np.array([2**63, np.nan], dtype=object),
642
+ np.array([str(2**63), np.nan], dtype=object),
643
+ np.array([np.nan, 2**63], dtype=object),
644
+ np.array([np.nan, str(2**63)], dtype=object),
645
+ ],
646
+ )
647
+ def test_convert_numeric_uint64_nan(self, coerce, arr):
648
+ expected = arr.astype(float) if coerce else arr.copy()
649
+ result, _ = lib.maybe_convert_numeric(arr, set(), coerce_numeric=coerce)
650
+ tm.assert_almost_equal(result, expected)
651
+
652
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
653
+ def test_convert_numeric_uint64_nan_values(
654
+ self, coerce, convert_to_masked_nullable
655
+ ):
656
+ arr = np.array([2**63, 2**63 + 1], dtype=object)
657
+ na_values = {2**63}
658
+
659
+ expected = (
660
+ np.array([np.nan, 2**63 + 1], dtype=float) if coerce else arr.copy()
661
+ )
662
+ result = lib.maybe_convert_numeric(
663
+ arr,
664
+ na_values,
665
+ coerce_numeric=coerce,
666
+ convert_to_masked_nullable=convert_to_masked_nullable,
667
+ )
668
+ if convert_to_masked_nullable and coerce:
669
+ expected = IntegerArray(
670
+ np.array([0, 2**63 + 1], dtype="u8"),
671
+ np.array([True, False], dtype="bool"),
672
+ )
673
+ result = IntegerArray(*result)
674
+ else:
675
+ result = result[0] # discard mask
676
+ tm.assert_almost_equal(result, expected)
677
+
678
+ @pytest.mark.parametrize(
679
+ "case",
680
+ [
681
+ np.array([2**63, -1], dtype=object),
682
+ np.array([str(2**63), -1], dtype=object),
683
+ np.array([str(2**63), str(-1)], dtype=object),
684
+ np.array([-1, 2**63], dtype=object),
685
+ np.array([-1, str(2**63)], dtype=object),
686
+ np.array([str(-1), str(2**63)], dtype=object),
687
+ ],
688
+ )
689
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
690
+ def test_convert_numeric_int64_uint64(
691
+ self, case, coerce, convert_to_masked_nullable
692
+ ):
693
+ expected = case.astype(float) if coerce else case.copy()
694
+ result, _ = lib.maybe_convert_numeric(
695
+ case,
696
+ set(),
697
+ coerce_numeric=coerce,
698
+ convert_to_masked_nullable=convert_to_masked_nullable,
699
+ )
700
+
701
+ tm.assert_almost_equal(result, expected)
702
+
703
+ @pytest.mark.parametrize("convert_to_masked_nullable", [True, False])
704
+ def test_convert_numeric_string_uint64(self, convert_to_masked_nullable):
705
+ # GH32394
706
+ result = lib.maybe_convert_numeric(
707
+ np.array(["uint64"], dtype=object),
708
+ set(),
709
+ coerce_numeric=True,
710
+ convert_to_masked_nullable=convert_to_masked_nullable,
711
+ )
712
+ if convert_to_masked_nullable:
713
+ result = FloatingArray(*result)
714
+ else:
715
+ result = result[0]
716
+ assert np.isnan(result)
717
+
718
+ @pytest.mark.parametrize("value", [-(2**63) - 1, 2**64])
719
+ def test_convert_int_overflow(self, value):
720
+ # see gh-18584
721
+ arr = np.array([value], dtype=object)
722
+ result = lib.maybe_convert_objects(arr)
723
+ tm.assert_numpy_array_equal(arr, result)
724
+
725
+ @pytest.mark.parametrize("val", [None, np.nan, float("nan")])
726
+ @pytest.mark.parametrize("dtype", ["M8[ns]", "m8[ns]"])
727
+ def test_maybe_convert_objects_nat_inference(self, val, dtype):
728
+ dtype = np.dtype(dtype)
729
+ vals = np.array([pd.NaT, val], dtype=object)
730
+ result = lib.maybe_convert_objects(
731
+ vals,
732
+ convert_non_numeric=True,
733
+ dtype_if_all_nat=dtype,
734
+ )
735
+ assert result.dtype == dtype
736
+ assert np.isnat(result).all()
737
+
738
+ result = lib.maybe_convert_objects(
739
+ vals[::-1],
740
+ convert_non_numeric=True,
741
+ dtype_if_all_nat=dtype,
742
+ )
743
+ assert result.dtype == dtype
744
+ assert np.isnat(result).all()
745
+
746
+ @pytest.mark.parametrize(
747
+ "value, expected_dtype",
748
+ [
749
+ # see gh-4471
750
+ ([2**63], np.uint64),
751
+ # NumPy bug: can't compare uint64 to int64, as that
752
+ # results in both casting to float64, so we should
753
+ # make sure that this function is robust against it
754
+ ([np.uint64(2**63)], np.uint64),
755
+ ([2, -1], np.int64),
756
+ ([2**63, -1], object),
757
+ # GH#47294
758
+ ([np.uint8(1)], np.uint8),
759
+ ([np.uint16(1)], np.uint16),
760
+ ([np.uint32(1)], np.uint32),
761
+ ([np.uint64(1)], np.uint64),
762
+ ([np.uint8(2), np.uint16(1)], np.uint16),
763
+ ([np.uint32(2), np.uint16(1)], np.uint32),
764
+ ([np.uint32(2), -1], object),
765
+ ([np.uint32(2), 1], np.uint64),
766
+ ([np.uint32(2), np.int32(1)], object),
767
+ ],
768
+ )
769
+ def test_maybe_convert_objects_uint(self, value, expected_dtype):
770
+ arr = np.array(value, dtype=object)
771
+ exp = np.array(value, dtype=expected_dtype)
772
+ tm.assert_numpy_array_equal(lib.maybe_convert_objects(arr), exp)
773
+
774
+ def test_maybe_convert_objects_datetime(self):
775
+ # GH27438
776
+ arr = np.array(
777
+ [np.datetime64("2000-01-01"), np.timedelta64(1, "s")], dtype=object
778
+ )
779
+ exp = arr.copy()
780
+ out = lib.maybe_convert_objects(arr, convert_non_numeric=True)
781
+ tm.assert_numpy_array_equal(out, exp)
782
+
783
+ arr = np.array([pd.NaT, np.timedelta64(1, "s")], dtype=object)
784
+ exp = np.array([np.timedelta64("NaT"), np.timedelta64(1, "s")], dtype="m8[ns]")
785
+ out = lib.maybe_convert_objects(arr, convert_non_numeric=True)
786
+ tm.assert_numpy_array_equal(out, exp)
787
+
788
+ # with convert_non_numeric=True, the nan is a valid NA value for td64
789
+ arr = np.array([np.timedelta64(1, "s"), np.nan], dtype=object)
790
+ exp = exp[::-1]
791
+ out = lib.maybe_convert_objects(arr, convert_non_numeric=True)
792
+ tm.assert_numpy_array_equal(out, exp)
793
+
794
+ def test_maybe_convert_objects_dtype_if_all_nat(self):
795
+ arr = np.array([pd.NaT, pd.NaT], dtype=object)
796
+ out = lib.maybe_convert_objects(arr, convert_non_numeric=True)
797
+ # no dtype_if_all_nat passed -> we dont guess
798
+ tm.assert_numpy_array_equal(out, arr)
799
+
800
+ out = lib.maybe_convert_objects(
801
+ arr,
802
+ convert_non_numeric=True,
803
+ dtype_if_all_nat=np.dtype("timedelta64[ns]"),
804
+ )
805
+ exp = np.array(["NaT", "NaT"], dtype="timedelta64[ns]")
806
+ tm.assert_numpy_array_equal(out, exp)
807
+
808
+ out = lib.maybe_convert_objects(
809
+ arr,
810
+ convert_non_numeric=True,
811
+ dtype_if_all_nat=np.dtype("datetime64[ns]"),
812
+ )
813
+ exp = np.array(["NaT", "NaT"], dtype="datetime64[ns]")
814
+ tm.assert_numpy_array_equal(out, exp)
815
+
816
+ def test_maybe_convert_objects_dtype_if_all_nat_invalid(self):
817
+ # we accept datetime64[ns], timedelta64[ns], and EADtype
818
+ arr = np.array([pd.NaT, pd.NaT], dtype=object)
819
+
820
+ with pytest.raises(ValueError, match="int64"):
821
+ lib.maybe_convert_objects(
822
+ arr,
823
+ convert_non_numeric=True,
824
+ dtype_if_all_nat=np.dtype("int64"),
825
+ )
826
+
827
+ @pytest.mark.parametrize("dtype", ["datetime64[ns]", "timedelta64[ns]"])
828
+ def test_maybe_convert_objects_datetime_overflow_safe(self, dtype):
829
+ stamp = datetime(2363, 10, 4) # Enterprise-D launch date
830
+ if dtype == "timedelta64[ns]":
831
+ stamp = stamp - datetime(1970, 1, 1)
832
+ arr = np.array([stamp], dtype=object)
833
+
834
+ out = lib.maybe_convert_objects(arr, convert_non_numeric=True)
835
+ # no OutOfBoundsDatetime/OutOfBoundsTimedeltas
836
+ tm.assert_numpy_array_equal(out, arr)
837
+
838
+ def test_maybe_convert_objects_mixed_datetimes(self):
839
+ ts = Timestamp("now")
840
+ vals = [ts, ts.to_pydatetime(), ts.to_datetime64(), pd.NaT, np.nan, None]
841
+
842
+ for data in itertools.permutations(vals):
843
+ data = np.array(list(data), dtype=object)
844
+ expected = DatetimeIndex(data)._data._ndarray
845
+ result = lib.maybe_convert_objects(data, convert_non_numeric=True)
846
+ tm.assert_numpy_array_equal(result, expected)
847
+
848
+ def test_maybe_convert_objects_timedelta64_nat(self):
849
+ obj = np.timedelta64("NaT", "ns")
850
+ arr = np.array([obj], dtype=object)
851
+ assert arr[0] is obj
852
+
853
+ result = lib.maybe_convert_objects(arr, convert_non_numeric=True)
854
+
855
+ expected = np.array([obj], dtype="m8[ns]")
856
+ tm.assert_numpy_array_equal(result, expected)
857
+
858
+ @pytest.mark.parametrize(
859
+ "exp",
860
+ [
861
+ IntegerArray(np.array([2, 0], dtype="i8"), np.array([False, True])),
862
+ IntegerArray(np.array([2, 0], dtype="int64"), np.array([False, True])),
863
+ ],
864
+ )
865
+ def test_maybe_convert_objects_nullable_integer(self, exp):
866
+ # GH27335
867
+ arr = np.array([2, np.nan], dtype=object)
868
+ result = lib.maybe_convert_objects(arr, convert_to_nullable_dtype=True)
869
+
870
+ tm.assert_extension_array_equal(result, exp)
871
+
872
+ @pytest.mark.parametrize(
873
+ "dtype, val", [("int64", 1), ("uint64", np.iinfo(np.int64).max + 1)]
874
+ )
875
+ def test_maybe_convert_objects_nullable_none(self, dtype, val):
876
+ # GH#50043
877
+ arr = np.array([val, None, 3], dtype="object")
878
+ result = lib.maybe_convert_objects(arr, convert_to_nullable_dtype=True)
879
+ expected = IntegerArray(
880
+ np.array([val, 0, 3], dtype=dtype), np.array([False, True, False])
881
+ )
882
+ tm.assert_extension_array_equal(result, expected)
883
+
884
+ @pytest.mark.parametrize(
885
+ "convert_to_masked_nullable, exp",
886
+ [
887
+ (True, IntegerArray(np.array([2, 0], dtype="i8"), np.array([False, True]))),
888
+ (False, np.array([2, np.nan], dtype="float64")),
889
+ ],
890
+ )
891
+ def test_maybe_convert_numeric_nullable_integer(
892
+ self, convert_to_masked_nullable, exp
893
+ ):
894
+ # GH 40687
895
+ arr = np.array([2, np.nan], dtype=object)
896
+ result = lib.maybe_convert_numeric(
897
+ arr, set(), convert_to_masked_nullable=convert_to_masked_nullable
898
+ )
899
+ if convert_to_masked_nullable:
900
+ result = IntegerArray(*result)
901
+ tm.assert_extension_array_equal(result, exp)
902
+ else:
903
+ result = result[0]
904
+ tm.assert_numpy_array_equal(result, exp)
905
+
906
+ @pytest.mark.parametrize(
907
+ "convert_to_masked_nullable, exp",
908
+ [
909
+ (
910
+ True,
911
+ FloatingArray(
912
+ np.array([2.0, 0.0], dtype="float64"), np.array([False, True])
913
+ ),
914
+ ),
915
+ (False, np.array([2.0, np.nan], dtype="float64")),
916
+ ],
917
+ )
918
+ def test_maybe_convert_numeric_floating_array(
919
+ self, convert_to_masked_nullable, exp
920
+ ):
921
+ # GH 40687
922
+ arr = np.array([2.0, np.nan], dtype=object)
923
+ result = lib.maybe_convert_numeric(
924
+ arr, set(), convert_to_masked_nullable=convert_to_masked_nullable
925
+ )
926
+ if convert_to_masked_nullable:
927
+ tm.assert_extension_array_equal(FloatingArray(*result), exp)
928
+ else:
929
+ result = result[0]
930
+ tm.assert_numpy_array_equal(result, exp)
931
+
932
+ def test_maybe_convert_objects_bool_nan(self):
933
+ # GH32146
934
+ ind = Index([True, False, np.nan], dtype=object)
935
+ exp = np.array([True, False, np.nan], dtype=object)
936
+ out = lib.maybe_convert_objects(ind.values, safe=1)
937
+ tm.assert_numpy_array_equal(out, exp)
938
+
939
+ def test_maybe_convert_objects_nullable_boolean(self):
940
+ # GH50047
941
+ arr = np.array([True, False], dtype=object)
942
+ exp = np.array([True, False])
943
+ out = lib.maybe_convert_objects(arr, convert_to_nullable_dtype=True)
944
+ tm.assert_numpy_array_equal(out, exp)
945
+
946
+ arr = np.array([True, False, pd.NaT], dtype=object)
947
+ exp = np.array([True, False, pd.NaT], dtype=object)
948
+ out = lib.maybe_convert_objects(arr, convert_to_nullable_dtype=True)
949
+ tm.assert_numpy_array_equal(out, exp)
950
+
951
+ @pytest.mark.parametrize("val", [None, np.nan])
952
+ def test_maybe_convert_objects_nullable_boolean_na(self, val):
953
+ # GH50047
954
+ arr = np.array([True, False, val], dtype=object)
955
+ exp = BooleanArray(
956
+ np.array([True, False, False]), np.array([False, False, True])
957
+ )
958
+ out = lib.maybe_convert_objects(arr, convert_to_nullable_dtype=True)
959
+ tm.assert_extension_array_equal(out, exp)
960
+
961
+ @pytest.mark.parametrize(
962
+ "data0",
963
+ [
964
+ True,
965
+ 1,
966
+ 1.0,
967
+ 1.0 + 1.0j,
968
+ np.int8(1),
969
+ np.int16(1),
970
+ np.int32(1),
971
+ np.int64(1),
972
+ np.float16(1),
973
+ np.float32(1),
974
+ np.float64(1),
975
+ np.complex64(1),
976
+ np.complex128(1),
977
+ ],
978
+ )
979
+ @pytest.mark.parametrize(
980
+ "data1",
981
+ [
982
+ True,
983
+ 1,
984
+ 1.0,
985
+ 1.0 + 1.0j,
986
+ np.int8(1),
987
+ np.int16(1),
988
+ np.int32(1),
989
+ np.int64(1),
990
+ np.float16(1),
991
+ np.float32(1),
992
+ np.float64(1),
993
+ np.complex64(1),
994
+ np.complex128(1),
995
+ ],
996
+ )
997
+ def test_maybe_convert_objects_itemsize(self, data0, data1):
998
+ # GH 40908
999
+ data = [data0, data1]
1000
+ arr = np.array(data, dtype="object")
1001
+
1002
+ common_kind = np.result_type(type(data0), type(data1)).kind
1003
+ kind0 = "python" if not hasattr(data0, "dtype") else data0.dtype.kind
1004
+ kind1 = "python" if not hasattr(data1, "dtype") else data1.dtype.kind
1005
+ if kind0 != "python" and kind1 != "python":
1006
+ kind = common_kind
1007
+ itemsize = max(data0.dtype.itemsize, data1.dtype.itemsize)
1008
+ elif is_bool(data0) or is_bool(data1):
1009
+ kind = "bool" if (is_bool(data0) and is_bool(data1)) else "object"
1010
+ itemsize = ""
1011
+ elif is_complex(data0) or is_complex(data1):
1012
+ kind = common_kind
1013
+ itemsize = 16
1014
+ else:
1015
+ kind = common_kind
1016
+ itemsize = 8
1017
+
1018
+ expected = np.array(data, dtype=f"{kind}{itemsize}")
1019
+ result = lib.maybe_convert_objects(arr)
1020
+ tm.assert_numpy_array_equal(result, expected)
1021
+
1022
+ def test_mixed_dtypes_remain_object_array(self):
1023
+ # GH14956
1024
+ arr = np.array([datetime(2015, 1, 1, tzinfo=pytz.utc), 1], dtype=object)
1025
+ result = lib.maybe_convert_objects(arr, convert_non_numeric=True)
1026
+ tm.assert_numpy_array_equal(result, arr)
1027
+
1028
+ @pytest.mark.parametrize(
1029
+ "idx",
1030
+ [
1031
+ pd.IntervalIndex.from_breaks(range(5), closed="both"),
1032
+ pd.period_range("2016-01-01", periods=3, freq="D"),
1033
+ ],
1034
+ )
1035
+ def test_maybe_convert_objects_ea(self, idx):
1036
+ result = lib.maybe_convert_objects(
1037
+ np.array(idx, dtype=object),
1038
+ convert_non_numeric=True,
1039
+ )
1040
+ tm.assert_extension_array_equal(result, idx._data)
1041
+
1042
+
1043
+ class TestTypeInference:
1044
+ # Dummy class used for testing with Python objects
1045
+ class Dummy:
1046
+ pass
1047
+
1048
+ def test_inferred_dtype_fixture(self, any_skipna_inferred_dtype):
1049
+ # see pandas/conftest.py
1050
+ inferred_dtype, values = any_skipna_inferred_dtype
1051
+
1052
+ # make sure the inferred dtype of the fixture is as requested
1053
+ assert inferred_dtype == lib.infer_dtype(values, skipna=True)
1054
+
1055
+ @pytest.mark.parametrize("skipna", [True, False])
1056
+ def test_length_zero(self, skipna):
1057
+ result = lib.infer_dtype(np.array([], dtype="i4"), skipna=skipna)
1058
+ assert result == "integer"
1059
+
1060
+ result = lib.infer_dtype([], skipna=skipna)
1061
+ assert result == "empty"
1062
+
1063
+ # GH 18004
1064
+ arr = np.array([np.array([], dtype=object), np.array([], dtype=object)])
1065
+ result = lib.infer_dtype(arr, skipna=skipna)
1066
+ assert result == "empty"
1067
+
1068
+ def test_integers(self):
1069
+ arr = np.array([1, 2, 3, np.int64(4), np.int32(5)], dtype="O")
1070
+ result = lib.infer_dtype(arr, skipna=True)
1071
+ assert result == "integer"
1072
+
1073
+ arr = np.array([1, 2, 3, np.int64(4), np.int32(5), "foo"], dtype="O")
1074
+ result = lib.infer_dtype(arr, skipna=True)
1075
+ assert result == "mixed-integer"
1076
+
1077
+ arr = np.array([1, 2, 3, 4, 5], dtype="i4")
1078
+ result = lib.infer_dtype(arr, skipna=True)
1079
+ assert result == "integer"
1080
+
1081
+ @pytest.mark.parametrize(
1082
+ "arr, skipna",
1083
+ [
1084
+ (np.array([1, 2, np.nan, np.nan, 3], dtype="O"), False),
1085
+ (np.array([1, 2, np.nan, np.nan, 3], dtype="O"), True),
1086
+ (np.array([1, 2, 3, np.int64(4), np.int32(5), np.nan], dtype="O"), False),
1087
+ (np.array([1, 2, 3, np.int64(4), np.int32(5), np.nan], dtype="O"), True),
1088
+ ],
1089
+ )
1090
+ def test_integer_na(self, arr, skipna):
1091
+ # GH 27392
1092
+ result = lib.infer_dtype(arr, skipna=skipna)
1093
+ expected = "integer" if skipna else "integer-na"
1094
+ assert result == expected
1095
+
1096
+ def test_infer_dtype_skipna_default(self):
1097
+ # infer_dtype `skipna` default deprecated in GH#24050,
1098
+ # changed to True in GH#29876
1099
+ arr = np.array([1, 2, 3, np.nan], dtype=object)
1100
+
1101
+ result = lib.infer_dtype(arr)
1102
+ assert result == "integer"
1103
+
1104
+ def test_bools(self):
1105
+ arr = np.array([True, False, True, True, True], dtype="O")
1106
+ result = lib.infer_dtype(arr, skipna=True)
1107
+ assert result == "boolean"
1108
+
1109
+ arr = np.array([np.bool_(True), np.bool_(False)], dtype="O")
1110
+ result = lib.infer_dtype(arr, skipna=True)
1111
+ assert result == "boolean"
1112
+
1113
+ arr = np.array([True, False, True, "foo"], dtype="O")
1114
+ result = lib.infer_dtype(arr, skipna=True)
1115
+ assert result == "mixed"
1116
+
1117
+ arr = np.array([True, False, True], dtype=bool)
1118
+ result = lib.infer_dtype(arr, skipna=True)
1119
+ assert result == "boolean"
1120
+
1121
+ arr = np.array([True, np.nan, False], dtype="O")
1122
+ result = lib.infer_dtype(arr, skipna=True)
1123
+ assert result == "boolean"
1124
+
1125
+ result = lib.infer_dtype(arr, skipna=False)
1126
+ assert result == "mixed"
1127
+
1128
+ def test_floats(self):
1129
+ arr = np.array([1.0, 2.0, 3.0, np.float64(4), np.float32(5)], dtype="O")
1130
+ result = lib.infer_dtype(arr, skipna=True)
1131
+ assert result == "floating"
1132
+
1133
+ arr = np.array([1, 2, 3, np.float64(4), np.float32(5), "foo"], dtype="O")
1134
+ result = lib.infer_dtype(arr, skipna=True)
1135
+ assert result == "mixed-integer"
1136
+
1137
+ arr = np.array([1, 2, 3, 4, 5], dtype="f4")
1138
+ result = lib.infer_dtype(arr, skipna=True)
1139
+ assert result == "floating"
1140
+
1141
+ arr = np.array([1, 2, 3, 4, 5], dtype="f8")
1142
+ result = lib.infer_dtype(arr, skipna=True)
1143
+ assert result == "floating"
1144
+
1145
+ def test_decimals(self):
1146
+ # GH15690
1147
+ arr = np.array([Decimal(1), Decimal(2), Decimal(3)])
1148
+ result = lib.infer_dtype(arr, skipna=True)
1149
+ assert result == "decimal"
1150
+
1151
+ arr = np.array([1.0, 2.0, Decimal(3)])
1152
+ result = lib.infer_dtype(arr, skipna=True)
1153
+ assert result == "mixed"
1154
+
1155
+ result = lib.infer_dtype(arr[::-1], skipna=True)
1156
+ assert result == "mixed"
1157
+
1158
+ arr = np.array([Decimal(1), Decimal("NaN"), Decimal(3)])
1159
+ result = lib.infer_dtype(arr, skipna=True)
1160
+ assert result == "decimal"
1161
+
1162
+ arr = np.array([Decimal(1), np.nan, Decimal(3)], dtype="O")
1163
+ result = lib.infer_dtype(arr, skipna=True)
1164
+ assert result == "decimal"
1165
+
1166
+ # complex is compatible with nan, so skipna has no effect
1167
+ @pytest.mark.parametrize("skipna", [True, False])
1168
+ def test_complex(self, skipna):
1169
+ # gets cast to complex on array construction
1170
+ arr = np.array([1.0, 2.0, 1 + 1j])
1171
+ result = lib.infer_dtype(arr, skipna=skipna)
1172
+ assert result == "complex"
1173
+
1174
+ arr = np.array([1.0, 2.0, 1 + 1j], dtype="O")
1175
+ result = lib.infer_dtype(arr, skipna=skipna)
1176
+ assert result == "mixed"
1177
+
1178
+ result = lib.infer_dtype(arr[::-1], skipna=skipna)
1179
+ assert result == "mixed"
1180
+
1181
+ # gets cast to complex on array construction
1182
+ arr = np.array([1, np.nan, 1 + 1j])
1183
+ result = lib.infer_dtype(arr, skipna=skipna)
1184
+ assert result == "complex"
1185
+
1186
+ arr = np.array([1.0, np.nan, 1 + 1j], dtype="O")
1187
+ result = lib.infer_dtype(arr, skipna=skipna)
1188
+ assert result == "mixed"
1189
+
1190
+ # complex with nans stays complex
1191
+ arr = np.array([1 + 1j, np.nan, 3 + 3j], dtype="O")
1192
+ result = lib.infer_dtype(arr, skipna=skipna)
1193
+ assert result == "complex"
1194
+
1195
+ # test smaller complex dtype; will pass through _try_infer_map fastpath
1196
+ arr = np.array([1 + 1j, np.nan, 3 + 3j], dtype=np.complex64)
1197
+ result = lib.infer_dtype(arr, skipna=skipna)
1198
+ assert result == "complex"
1199
+
1200
+ def test_string(self):
1201
+ pass
1202
+
1203
+ def test_unicode(self):
1204
+ arr = ["a", np.nan, "c"]
1205
+ result = lib.infer_dtype(arr, skipna=False)
1206
+ # This currently returns "mixed", but it's not clear that's optimal.
1207
+ # This could also return "string" or "mixed-string"
1208
+ assert result == "mixed"
1209
+
1210
+ # even though we use skipna, we are only skipping those NAs that are
1211
+ # considered matching by is_string_array
1212
+ arr = ["a", np.nan, "c"]
1213
+ result = lib.infer_dtype(arr, skipna=True)
1214
+ assert result == "string"
1215
+
1216
+ arr = ["a", pd.NA, "c"]
1217
+ result = lib.infer_dtype(arr, skipna=True)
1218
+ assert result == "string"
1219
+
1220
+ arr = ["a", pd.NaT, "c"]
1221
+ result = lib.infer_dtype(arr, skipna=True)
1222
+ assert result == "mixed"
1223
+
1224
+ arr = ["a", "c"]
1225
+ result = lib.infer_dtype(arr, skipna=False)
1226
+ assert result == "string"
1227
+
1228
+ @pytest.mark.parametrize(
1229
+ "dtype, missing, skipna, expected",
1230
+ [
1231
+ (float, np.nan, False, "floating"),
1232
+ (float, np.nan, True, "floating"),
1233
+ (object, np.nan, False, "floating"),
1234
+ (object, np.nan, True, "empty"),
1235
+ (object, None, False, "mixed"),
1236
+ (object, None, True, "empty"),
1237
+ ],
1238
+ )
1239
+ @pytest.mark.parametrize("box", [Series, np.array])
1240
+ def test_object_empty(self, box, missing, dtype, skipna, expected):
1241
+ # GH 23421
1242
+ arr = box([missing, missing], dtype=dtype)
1243
+
1244
+ result = lib.infer_dtype(arr, skipna=skipna)
1245
+ assert result == expected
1246
+
1247
+ def test_datetime(self):
1248
+ dates = [datetime(2012, 1, x) for x in range(1, 20)]
1249
+ index = Index(dates)
1250
+ assert index.inferred_type == "datetime64"
1251
+
1252
+ def test_infer_dtype_datetime64(self):
1253
+ arr = np.array(
1254
+ [np.datetime64("2011-01-01"), np.datetime64("2011-01-01")], dtype=object
1255
+ )
1256
+ assert lib.infer_dtype(arr, skipna=True) == "datetime64"
1257
+
1258
+ @pytest.mark.parametrize("na_value", [pd.NaT, np.nan])
1259
+ def test_infer_dtype_datetime64_with_na(self, na_value):
1260
+ # starts with nan
1261
+ arr = np.array([na_value, np.datetime64("2011-01-02")])
1262
+ assert lib.infer_dtype(arr, skipna=True) == "datetime64"
1263
+
1264
+ arr = np.array([na_value, np.datetime64("2011-01-02"), na_value])
1265
+ assert lib.infer_dtype(arr, skipna=True) == "datetime64"
1266
+
1267
+ @pytest.mark.parametrize(
1268
+ "arr",
1269
+ [
1270
+ np.array(
1271
+ [np.timedelta64("nat"), np.datetime64("2011-01-02")], dtype=object
1272
+ ),
1273
+ np.array(
1274
+ [np.datetime64("2011-01-02"), np.timedelta64("nat")], dtype=object
1275
+ ),
1276
+ np.array([np.datetime64("2011-01-01"), Timestamp("2011-01-02")]),
1277
+ np.array([Timestamp("2011-01-02"), np.datetime64("2011-01-01")]),
1278
+ np.array([np.nan, Timestamp("2011-01-02"), 1.1]),
1279
+ np.array([np.nan, "2011-01-01", Timestamp("2011-01-02")], dtype=object),
1280
+ np.array([np.datetime64("nat"), np.timedelta64(1, "D")], dtype=object),
1281
+ np.array([np.timedelta64(1, "D"), np.datetime64("nat")], dtype=object),
1282
+ ],
1283
+ )
1284
+ def test_infer_datetimelike_dtype_mixed(self, arr):
1285
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1286
+
1287
+ def test_infer_dtype_mixed_integer(self):
1288
+ arr = np.array([np.nan, Timestamp("2011-01-02"), 1])
1289
+ assert lib.infer_dtype(arr, skipna=True) == "mixed-integer"
1290
+
1291
+ @pytest.mark.parametrize(
1292
+ "arr",
1293
+ [
1294
+ np.array([Timestamp("2011-01-01"), Timestamp("2011-01-02")]),
1295
+ np.array([datetime(2011, 1, 1), datetime(2012, 2, 1)]),
1296
+ np.array([datetime(2011, 1, 1), Timestamp("2011-01-02")]),
1297
+ ],
1298
+ )
1299
+ def test_infer_dtype_datetime(self, arr):
1300
+ assert lib.infer_dtype(arr, skipna=True) == "datetime"
1301
+
1302
+ @pytest.mark.parametrize("na_value", [pd.NaT, np.nan])
1303
+ @pytest.mark.parametrize(
1304
+ "time_stamp", [Timestamp("2011-01-01"), datetime(2011, 1, 1)]
1305
+ )
1306
+ def test_infer_dtype_datetime_with_na(self, na_value, time_stamp):
1307
+ # starts with nan
1308
+ arr = np.array([na_value, time_stamp])
1309
+ assert lib.infer_dtype(arr, skipna=True) == "datetime"
1310
+
1311
+ arr = np.array([na_value, time_stamp, na_value])
1312
+ assert lib.infer_dtype(arr, skipna=True) == "datetime"
1313
+
1314
+ @pytest.mark.parametrize(
1315
+ "arr",
1316
+ [
1317
+ np.array([Timedelta("1 days"), Timedelta("2 days")]),
1318
+ np.array([np.timedelta64(1, "D"), np.timedelta64(2, "D")], dtype=object),
1319
+ np.array([timedelta(1), timedelta(2)]),
1320
+ ],
1321
+ )
1322
+ def test_infer_dtype_timedelta(self, arr):
1323
+ assert lib.infer_dtype(arr, skipna=True) == "timedelta"
1324
+
1325
+ @pytest.mark.parametrize("na_value", [pd.NaT, np.nan])
1326
+ @pytest.mark.parametrize(
1327
+ "delta", [Timedelta("1 days"), np.timedelta64(1, "D"), timedelta(1)]
1328
+ )
1329
+ def test_infer_dtype_timedelta_with_na(self, na_value, delta):
1330
+ # starts with nan
1331
+ arr = np.array([na_value, delta])
1332
+ assert lib.infer_dtype(arr, skipna=True) == "timedelta"
1333
+
1334
+ arr = np.array([na_value, delta, na_value])
1335
+ assert lib.infer_dtype(arr, skipna=True) == "timedelta"
1336
+
1337
+ def test_infer_dtype_period(self):
1338
+ # GH 13664
1339
+ arr = np.array([Period("2011-01", freq="D"), Period("2011-02", freq="D")])
1340
+ assert lib.infer_dtype(arr, skipna=True) == "period"
1341
+
1342
+ # non-homogeneous freqs -> mixed
1343
+ arr = np.array([Period("2011-01", freq="D"), Period("2011-02", freq="M")])
1344
+ assert lib.infer_dtype(arr, skipna=True) == "mixed"
1345
+
1346
+ @pytest.mark.parametrize("klass", [pd.array, Series, Index])
1347
+ @pytest.mark.parametrize("skipna", [True, False])
1348
+ def test_infer_dtype_period_array(self, klass, skipna):
1349
+ # https://github.com/pandas-dev/pandas/issues/23553
1350
+ values = klass(
1351
+ [
1352
+ Period("2011-01-01", freq="D"),
1353
+ Period("2011-01-02", freq="D"),
1354
+ pd.NaT,
1355
+ ]
1356
+ )
1357
+ assert lib.infer_dtype(values, skipna=skipna) == "period"
1358
+
1359
+ # periods but mixed freq
1360
+ values = klass(
1361
+ [
1362
+ Period("2011-01-01", freq="D"),
1363
+ Period("2011-01-02", freq="M"),
1364
+ pd.NaT,
1365
+ ]
1366
+ )
1367
+ # with pd.array this becomes NumpyExtensionArray which ends up
1368
+ # as "unknown-array"
1369
+ exp = "unknown-array" if klass is pd.array else "mixed"
1370
+ assert lib.infer_dtype(values, skipna=skipna) == exp
1371
+
1372
+ def test_infer_dtype_period_mixed(self):
1373
+ arr = np.array(
1374
+ [Period("2011-01", freq="M"), np.datetime64("nat")], dtype=object
1375
+ )
1376
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1377
+
1378
+ arr = np.array(
1379
+ [np.datetime64("nat"), Period("2011-01", freq="M")], dtype=object
1380
+ )
1381
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1382
+
1383
+ @pytest.mark.parametrize("na_value", [pd.NaT, np.nan])
1384
+ def test_infer_dtype_period_with_na(self, na_value):
1385
+ # starts with nan
1386
+ arr = np.array([na_value, Period("2011-01", freq="D")])
1387
+ assert lib.infer_dtype(arr, skipna=True) == "period"
1388
+
1389
+ arr = np.array([na_value, Period("2011-01", freq="D"), na_value])
1390
+ assert lib.infer_dtype(arr, skipna=True) == "period"
1391
+
1392
+ def test_infer_dtype_all_nan_nat_like(self):
1393
+ arr = np.array([np.nan, np.nan])
1394
+ assert lib.infer_dtype(arr, skipna=True) == "floating"
1395
+
1396
+ # nan and None mix are result in mixed
1397
+ arr = np.array([np.nan, np.nan, None])
1398
+ assert lib.infer_dtype(arr, skipna=True) == "empty"
1399
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1400
+
1401
+ arr = np.array([None, np.nan, np.nan])
1402
+ assert lib.infer_dtype(arr, skipna=True) == "empty"
1403
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1404
+
1405
+ # pd.NaT
1406
+ arr = np.array([pd.NaT])
1407
+ assert lib.infer_dtype(arr, skipna=False) == "datetime"
1408
+
1409
+ arr = np.array([pd.NaT, np.nan])
1410
+ assert lib.infer_dtype(arr, skipna=False) == "datetime"
1411
+
1412
+ arr = np.array([np.nan, pd.NaT])
1413
+ assert lib.infer_dtype(arr, skipna=False) == "datetime"
1414
+
1415
+ arr = np.array([np.nan, pd.NaT, np.nan])
1416
+ assert lib.infer_dtype(arr, skipna=False) == "datetime"
1417
+
1418
+ arr = np.array([None, pd.NaT, None])
1419
+ assert lib.infer_dtype(arr, skipna=False) == "datetime"
1420
+
1421
+ # np.datetime64(nat)
1422
+ arr = np.array([np.datetime64("nat")])
1423
+ assert lib.infer_dtype(arr, skipna=False) == "datetime64"
1424
+
1425
+ for n in [np.nan, pd.NaT, None]:
1426
+ arr = np.array([n, np.datetime64("nat"), n])
1427
+ assert lib.infer_dtype(arr, skipna=False) == "datetime64"
1428
+
1429
+ arr = np.array([pd.NaT, n, np.datetime64("nat"), n])
1430
+ assert lib.infer_dtype(arr, skipna=False) == "datetime64"
1431
+
1432
+ arr = np.array([np.timedelta64("nat")], dtype=object)
1433
+ assert lib.infer_dtype(arr, skipna=False) == "timedelta"
1434
+
1435
+ for n in [np.nan, pd.NaT, None]:
1436
+ arr = np.array([n, np.timedelta64("nat"), n])
1437
+ assert lib.infer_dtype(arr, skipna=False) == "timedelta"
1438
+
1439
+ arr = np.array([pd.NaT, n, np.timedelta64("nat"), n])
1440
+ assert lib.infer_dtype(arr, skipna=False) == "timedelta"
1441
+
1442
+ # datetime / timedelta mixed
1443
+ arr = np.array([pd.NaT, np.datetime64("nat"), np.timedelta64("nat"), np.nan])
1444
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1445
+
1446
+ arr = np.array([np.timedelta64("nat"), np.datetime64("nat")], dtype=object)
1447
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1448
+
1449
+ def test_is_datetimelike_array_all_nan_nat_like(self):
1450
+ arr = np.array([np.nan, pd.NaT, np.datetime64("nat")])
1451
+ assert lib.is_datetime_array(arr)
1452
+ assert lib.is_datetime64_array(arr)
1453
+ assert not lib.is_timedelta_or_timedelta64_array(arr)
1454
+
1455
+ arr = np.array([np.nan, pd.NaT, np.timedelta64("nat")])
1456
+ assert not lib.is_datetime_array(arr)
1457
+ assert not lib.is_datetime64_array(arr)
1458
+ assert lib.is_timedelta_or_timedelta64_array(arr)
1459
+
1460
+ arr = np.array([np.nan, pd.NaT, np.datetime64("nat"), np.timedelta64("nat")])
1461
+ assert not lib.is_datetime_array(arr)
1462
+ assert not lib.is_datetime64_array(arr)
1463
+ assert not lib.is_timedelta_or_timedelta64_array(arr)
1464
+
1465
+ arr = np.array([np.nan, pd.NaT])
1466
+ assert lib.is_datetime_array(arr)
1467
+ assert lib.is_datetime64_array(arr)
1468
+ assert lib.is_timedelta_or_timedelta64_array(arr)
1469
+
1470
+ arr = np.array([np.nan, np.nan], dtype=object)
1471
+ assert not lib.is_datetime_array(arr)
1472
+ assert not lib.is_datetime64_array(arr)
1473
+ assert not lib.is_timedelta_or_timedelta64_array(arr)
1474
+
1475
+ assert lib.is_datetime_with_singletz_array(
1476
+ np.array(
1477
+ [
1478
+ Timestamp("20130101", tz="US/Eastern"),
1479
+ Timestamp("20130102", tz="US/Eastern"),
1480
+ ],
1481
+ dtype=object,
1482
+ )
1483
+ )
1484
+ assert not lib.is_datetime_with_singletz_array(
1485
+ np.array(
1486
+ [
1487
+ Timestamp("20130101", tz="US/Eastern"),
1488
+ Timestamp("20130102", tz="CET"),
1489
+ ],
1490
+ dtype=object,
1491
+ )
1492
+ )
1493
+
1494
+ @pytest.mark.parametrize(
1495
+ "func",
1496
+ [
1497
+ "is_datetime_array",
1498
+ "is_datetime64_array",
1499
+ "is_bool_array",
1500
+ "is_timedelta_or_timedelta64_array",
1501
+ "is_date_array",
1502
+ "is_time_array",
1503
+ "is_interval_array",
1504
+ ],
1505
+ )
1506
+ def test_other_dtypes_for_array(self, func):
1507
+ func = getattr(lib, func)
1508
+ arr = np.array(["foo", "bar"])
1509
+ assert not func(arr)
1510
+ assert not func(arr.reshape(2, 1))
1511
+
1512
+ arr = np.array([1, 2])
1513
+ assert not func(arr)
1514
+ assert not func(arr.reshape(2, 1))
1515
+
1516
+ def test_date(self):
1517
+ dates = [date(2012, 1, day) for day in range(1, 20)]
1518
+ index = Index(dates)
1519
+ assert index.inferred_type == "date"
1520
+
1521
+ dates = [date(2012, 1, day) for day in range(1, 20)] + [np.nan]
1522
+ result = lib.infer_dtype(dates, skipna=False)
1523
+ assert result == "mixed"
1524
+
1525
+ result = lib.infer_dtype(dates, skipna=True)
1526
+ assert result == "date"
1527
+
1528
+ @pytest.mark.parametrize(
1529
+ "values",
1530
+ [
1531
+ [date(2020, 1, 1), Timestamp("2020-01-01")],
1532
+ [Timestamp("2020-01-01"), date(2020, 1, 1)],
1533
+ [date(2020, 1, 1), pd.NaT],
1534
+ [pd.NaT, date(2020, 1, 1)],
1535
+ ],
1536
+ )
1537
+ @pytest.mark.parametrize("skipna", [True, False])
1538
+ def test_infer_dtype_date_order_invariant(self, values, skipna):
1539
+ # https://github.com/pandas-dev/pandas/issues/33741
1540
+ result = lib.infer_dtype(values, skipna=skipna)
1541
+ assert result == "date"
1542
+
1543
+ def test_is_numeric_array(self):
1544
+ assert lib.is_float_array(np.array([1, 2.0]))
1545
+ assert lib.is_float_array(np.array([1, 2.0, np.nan]))
1546
+ assert not lib.is_float_array(np.array([1, 2]))
1547
+
1548
+ assert lib.is_integer_array(np.array([1, 2]))
1549
+ assert not lib.is_integer_array(np.array([1, 2.0]))
1550
+
1551
+ def test_is_string_array(self):
1552
+ # We should only be accepting pd.NA, np.nan,
1553
+ # other floating point nans e.g. float('nan')]
1554
+ # when skipna is True.
1555
+ assert lib.is_string_array(np.array(["foo", "bar"]))
1556
+ assert not lib.is_string_array(
1557
+ np.array(["foo", "bar", pd.NA], dtype=object), skipna=False
1558
+ )
1559
+ assert lib.is_string_array(
1560
+ np.array(["foo", "bar", pd.NA], dtype=object), skipna=True
1561
+ )
1562
+ # we allow NaN/None in the StringArray constructor, so its allowed here
1563
+ assert lib.is_string_array(
1564
+ np.array(["foo", "bar", None], dtype=object), skipna=True
1565
+ )
1566
+ assert lib.is_string_array(
1567
+ np.array(["foo", "bar", np.nan], dtype=object), skipna=True
1568
+ )
1569
+ # But not e.g. datetimelike or Decimal NAs
1570
+ assert not lib.is_string_array(
1571
+ np.array(["foo", "bar", pd.NaT], dtype=object), skipna=True
1572
+ )
1573
+ assert not lib.is_string_array(
1574
+ np.array(["foo", "bar", np.datetime64("NaT")], dtype=object), skipna=True
1575
+ )
1576
+ assert not lib.is_string_array(
1577
+ np.array(["foo", "bar", Decimal("NaN")], dtype=object), skipna=True
1578
+ )
1579
+
1580
+ assert not lib.is_string_array(
1581
+ np.array(["foo", "bar", None], dtype=object), skipna=False
1582
+ )
1583
+ assert not lib.is_string_array(
1584
+ np.array(["foo", "bar", np.nan], dtype=object), skipna=False
1585
+ )
1586
+ assert not lib.is_string_array(np.array([1, 2]))
1587
+
1588
+ def test_to_object_array_tuples(self):
1589
+ r = (5, 6)
1590
+ values = [r]
1591
+ lib.to_object_array_tuples(values)
1592
+
1593
+ # make sure record array works
1594
+ record = namedtuple("record", "x y")
1595
+ r = record(5, 6)
1596
+ values = [r]
1597
+ lib.to_object_array_tuples(values)
1598
+
1599
+ def test_object(self):
1600
+ # GH 7431
1601
+ # cannot infer more than this as only a single element
1602
+ arr = np.array([None], dtype="O")
1603
+ result = lib.infer_dtype(arr, skipna=False)
1604
+ assert result == "mixed"
1605
+ result = lib.infer_dtype(arr, skipna=True)
1606
+ assert result == "empty"
1607
+
1608
+ def test_to_object_array_width(self):
1609
+ # see gh-13320
1610
+ rows = [[1, 2, 3], [4, 5, 6]]
1611
+
1612
+ expected = np.array(rows, dtype=object)
1613
+ out = lib.to_object_array(rows)
1614
+ tm.assert_numpy_array_equal(out, expected)
1615
+
1616
+ expected = np.array(rows, dtype=object)
1617
+ out = lib.to_object_array(rows, min_width=1)
1618
+ tm.assert_numpy_array_equal(out, expected)
1619
+
1620
+ expected = np.array(
1621
+ [[1, 2, 3, None, None], [4, 5, 6, None, None]], dtype=object
1622
+ )
1623
+ out = lib.to_object_array(rows, min_width=5)
1624
+ tm.assert_numpy_array_equal(out, expected)
1625
+
1626
+ def test_is_period(self):
1627
+ # GH#55264
1628
+ msg = "is_period is deprecated and will be removed in a future version"
1629
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1630
+ assert lib.is_period(Period("2011-01", freq="M"))
1631
+ assert not lib.is_period(PeriodIndex(["2011-01"], freq="M"))
1632
+ assert not lib.is_period(Timestamp("2011-01"))
1633
+ assert not lib.is_period(1)
1634
+ assert not lib.is_period(np.nan)
1635
+
1636
+ def test_is_interval(self):
1637
+ # GH#55264
1638
+ msg = "is_interval is deprecated and will be removed in a future version"
1639
+ item = Interval(1, 2)
1640
+ with tm.assert_produces_warning(FutureWarning, match=msg):
1641
+ assert lib.is_interval(item)
1642
+ assert not lib.is_interval(pd.IntervalIndex([item]))
1643
+ assert not lib.is_interval(pd.IntervalIndex([item])._engine)
1644
+
1645
+ def test_categorical(self):
1646
+ # GH 8974
1647
+ arr = Categorical(list("abc"))
1648
+ result = lib.infer_dtype(arr, skipna=True)
1649
+ assert result == "categorical"
1650
+
1651
+ result = lib.infer_dtype(Series(arr), skipna=True)
1652
+ assert result == "categorical"
1653
+
1654
+ arr = Categorical(list("abc"), categories=["cegfab"], ordered=True)
1655
+ result = lib.infer_dtype(arr, skipna=True)
1656
+ assert result == "categorical"
1657
+
1658
+ result = lib.infer_dtype(Series(arr), skipna=True)
1659
+ assert result == "categorical"
1660
+
1661
+ @pytest.mark.parametrize("asobject", [True, False])
1662
+ def test_interval(self, asobject):
1663
+ idx = pd.IntervalIndex.from_breaks(range(5), closed="both")
1664
+ if asobject:
1665
+ idx = idx.astype(object)
1666
+
1667
+ inferred = lib.infer_dtype(idx, skipna=False)
1668
+ assert inferred == "interval"
1669
+
1670
+ inferred = lib.infer_dtype(idx._data, skipna=False)
1671
+ assert inferred == "interval"
1672
+
1673
+ inferred = lib.infer_dtype(Series(idx, dtype=idx.dtype), skipna=False)
1674
+ assert inferred == "interval"
1675
+
1676
+ @pytest.mark.parametrize("value", [Timestamp(0), Timedelta(0), 0, 0.0])
1677
+ def test_interval_mismatched_closed(self, value):
1678
+ first = Interval(value, value, closed="left")
1679
+ second = Interval(value, value, closed="right")
1680
+
1681
+ # if closed match, we should infer "interval"
1682
+ arr = np.array([first, first], dtype=object)
1683
+ assert lib.infer_dtype(arr, skipna=False) == "interval"
1684
+
1685
+ # if closed dont match, we should _not_ get "interval"
1686
+ arr2 = np.array([first, second], dtype=object)
1687
+ assert lib.infer_dtype(arr2, skipna=False) == "mixed"
1688
+
1689
+ def test_interval_mismatched_subtype(self):
1690
+ first = Interval(0, 1, closed="left")
1691
+ second = Interval(Timestamp(0), Timestamp(1), closed="left")
1692
+ third = Interval(Timedelta(0), Timedelta(1), closed="left")
1693
+
1694
+ arr = np.array([first, second])
1695
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1696
+
1697
+ arr = np.array([second, third])
1698
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1699
+
1700
+ arr = np.array([first, third])
1701
+ assert lib.infer_dtype(arr, skipna=False) == "mixed"
1702
+
1703
+ # float vs int subdtype are compatible
1704
+ flt_interval = Interval(1.5, 2.5, closed="left")
1705
+ arr = np.array([first, flt_interval], dtype=object)
1706
+ assert lib.infer_dtype(arr, skipna=False) == "interval"
1707
+
1708
+ @pytest.mark.parametrize("klass", [pd.array, Series])
1709
+ @pytest.mark.parametrize("skipna", [True, False])
1710
+ @pytest.mark.parametrize("data", [["a", "b", "c"], ["a", "b", pd.NA]])
1711
+ def test_string_dtype(self, data, skipna, klass, nullable_string_dtype):
1712
+ # StringArray
1713
+ val = klass(data, dtype=nullable_string_dtype)
1714
+ inferred = lib.infer_dtype(val, skipna=skipna)
1715
+ assert inferred == "string"
1716
+
1717
+ @pytest.mark.parametrize("klass", [pd.array, Series])
1718
+ @pytest.mark.parametrize("skipna", [True, False])
1719
+ @pytest.mark.parametrize("data", [[True, False, True], [True, False, pd.NA]])
1720
+ def test_boolean_dtype(self, data, skipna, klass):
1721
+ # BooleanArray
1722
+ val = klass(data, dtype="boolean")
1723
+ inferred = lib.infer_dtype(val, skipna=skipna)
1724
+ assert inferred == "boolean"
1725
+
1726
+
1727
+ class TestNumberScalar:
1728
+ def test_is_number(self):
1729
+ assert is_number(True)
1730
+ assert is_number(1)
1731
+ assert is_number(1.1)
1732
+ assert is_number(1 + 3j)
1733
+ assert is_number(np.int64(1))
1734
+ assert is_number(np.float64(1.1))
1735
+ assert is_number(np.complex128(1 + 3j))
1736
+ assert is_number(np.nan)
1737
+
1738
+ assert not is_number(None)
1739
+ assert not is_number("x")
1740
+ assert not is_number(datetime(2011, 1, 1))
1741
+ assert not is_number(np.datetime64("2011-01-01"))
1742
+ assert not is_number(Timestamp("2011-01-01"))
1743
+ assert not is_number(Timestamp("2011-01-01", tz="US/Eastern"))
1744
+ assert not is_number(timedelta(1000))
1745
+ assert not is_number(Timedelta("1 days"))
1746
+
1747
+ # questionable
1748
+ assert not is_number(np.bool_(False))
1749
+ assert is_number(np.timedelta64(1, "D"))
1750
+
1751
+ def test_is_bool(self):
1752
+ assert is_bool(True)
1753
+ assert is_bool(False)
1754
+ assert is_bool(np.bool_(False))
1755
+
1756
+ assert not is_bool(1)
1757
+ assert not is_bool(1.1)
1758
+ assert not is_bool(1 + 3j)
1759
+ assert not is_bool(np.int64(1))
1760
+ assert not is_bool(np.float64(1.1))
1761
+ assert not is_bool(np.complex128(1 + 3j))
1762
+ assert not is_bool(np.nan)
1763
+ assert not is_bool(None)
1764
+ assert not is_bool("x")
1765
+ assert not is_bool(datetime(2011, 1, 1))
1766
+ assert not is_bool(np.datetime64("2011-01-01"))
1767
+ assert not is_bool(Timestamp("2011-01-01"))
1768
+ assert not is_bool(Timestamp("2011-01-01", tz="US/Eastern"))
1769
+ assert not is_bool(timedelta(1000))
1770
+ assert not is_bool(np.timedelta64(1, "D"))
1771
+ assert not is_bool(Timedelta("1 days"))
1772
+
1773
+ def test_is_integer(self):
1774
+ assert is_integer(1)
1775
+ assert is_integer(np.int64(1))
1776
+
1777
+ assert not is_integer(True)
1778
+ assert not is_integer(1.1)
1779
+ assert not is_integer(1 + 3j)
1780
+ assert not is_integer(False)
1781
+ assert not is_integer(np.bool_(False))
1782
+ assert not is_integer(np.float64(1.1))
1783
+ assert not is_integer(np.complex128(1 + 3j))
1784
+ assert not is_integer(np.nan)
1785
+ assert not is_integer(None)
1786
+ assert not is_integer("x")
1787
+ assert not is_integer(datetime(2011, 1, 1))
1788
+ assert not is_integer(np.datetime64("2011-01-01"))
1789
+ assert not is_integer(Timestamp("2011-01-01"))
1790
+ assert not is_integer(Timestamp("2011-01-01", tz="US/Eastern"))
1791
+ assert not is_integer(timedelta(1000))
1792
+ assert not is_integer(Timedelta("1 days"))
1793
+ assert not is_integer(np.timedelta64(1, "D"))
1794
+
1795
+ def test_is_float(self):
1796
+ assert is_float(1.1)
1797
+ assert is_float(np.float64(1.1))
1798
+ assert is_float(np.nan)
1799
+
1800
+ assert not is_float(True)
1801
+ assert not is_float(1)
1802
+ assert not is_float(1 + 3j)
1803
+ assert not is_float(False)
1804
+ assert not is_float(np.bool_(False))
1805
+ assert not is_float(np.int64(1))
1806
+ assert not is_float(np.complex128(1 + 3j))
1807
+ assert not is_float(None)
1808
+ assert not is_float("x")
1809
+ assert not is_float(datetime(2011, 1, 1))
1810
+ assert not is_float(np.datetime64("2011-01-01"))
1811
+ assert not is_float(Timestamp("2011-01-01"))
1812
+ assert not is_float(Timestamp("2011-01-01", tz="US/Eastern"))
1813
+ assert not is_float(timedelta(1000))
1814
+ assert not is_float(np.timedelta64(1, "D"))
1815
+ assert not is_float(Timedelta("1 days"))
1816
+
1817
+ def test_is_datetime_dtypes(self):
1818
+ ts = pd.date_range("20130101", periods=3)
1819
+ tsa = pd.date_range("20130101", periods=3, tz="US/Eastern")
1820
+
1821
+ msg = "is_datetime64tz_dtype is deprecated"
1822
+
1823
+ assert is_datetime64_dtype("datetime64")
1824
+ assert is_datetime64_dtype("datetime64[ns]")
1825
+ assert is_datetime64_dtype(ts)
1826
+ assert not is_datetime64_dtype(tsa)
1827
+
1828
+ assert not is_datetime64_ns_dtype("datetime64")
1829
+ assert is_datetime64_ns_dtype("datetime64[ns]")
1830
+ assert is_datetime64_ns_dtype(ts)
1831
+ assert is_datetime64_ns_dtype(tsa)
1832
+
1833
+ assert is_datetime64_any_dtype("datetime64")
1834
+ assert is_datetime64_any_dtype("datetime64[ns]")
1835
+ assert is_datetime64_any_dtype(ts)
1836
+ assert is_datetime64_any_dtype(tsa)
1837
+
1838
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
1839
+ assert not is_datetime64tz_dtype("datetime64")
1840
+ assert not is_datetime64tz_dtype("datetime64[ns]")
1841
+ assert not is_datetime64tz_dtype(ts)
1842
+ assert is_datetime64tz_dtype(tsa)
1843
+
1844
+ @pytest.mark.parametrize("tz", ["US/Eastern", "UTC"])
1845
+ def test_is_datetime_dtypes_with_tz(self, tz):
1846
+ dtype = f"datetime64[ns, {tz}]"
1847
+ assert not is_datetime64_dtype(dtype)
1848
+
1849
+ msg = "is_datetime64tz_dtype is deprecated"
1850
+ with tm.assert_produces_warning(DeprecationWarning, match=msg):
1851
+ assert is_datetime64tz_dtype(dtype)
1852
+ assert is_datetime64_ns_dtype(dtype)
1853
+ assert is_datetime64_any_dtype(dtype)
1854
+
1855
+ def test_is_timedelta(self):
1856
+ assert is_timedelta64_dtype("timedelta64")
1857
+ assert is_timedelta64_dtype("timedelta64[ns]")
1858
+ assert not is_timedelta64_ns_dtype("timedelta64")
1859
+ assert is_timedelta64_ns_dtype("timedelta64[ns]")
1860
+
1861
+ tdi = TimedeltaIndex([1e14, 2e14], dtype="timedelta64[ns]")
1862
+ assert is_timedelta64_dtype(tdi)
1863
+ assert is_timedelta64_ns_dtype(tdi)
1864
+ assert is_timedelta64_ns_dtype(tdi.astype("timedelta64[ns]"))
1865
+
1866
+ assert not is_timedelta64_ns_dtype(Index([], dtype=np.float64))
1867
+ assert not is_timedelta64_ns_dtype(Index([], dtype=np.int64))
1868
+
1869
+
1870
+ class TestIsScalar:
1871
+ def test_is_scalar_builtin_scalars(self):
1872
+ assert is_scalar(None)
1873
+ assert is_scalar(True)
1874
+ assert is_scalar(False)
1875
+ assert is_scalar(Fraction())
1876
+ assert is_scalar(0.0)
1877
+ assert is_scalar(1)
1878
+ assert is_scalar(complex(2))
1879
+ assert is_scalar(float("NaN"))
1880
+ assert is_scalar(np.nan)
1881
+ assert is_scalar("foobar")
1882
+ assert is_scalar(b"foobar")
1883
+ assert is_scalar(datetime(2014, 1, 1))
1884
+ assert is_scalar(date(2014, 1, 1))
1885
+ assert is_scalar(time(12, 0))
1886
+ assert is_scalar(timedelta(hours=1))
1887
+ assert is_scalar(pd.NaT)
1888
+ assert is_scalar(pd.NA)
1889
+
1890
+ def test_is_scalar_builtin_nonscalars(self):
1891
+ assert not is_scalar({})
1892
+ assert not is_scalar([])
1893
+ assert not is_scalar([1])
1894
+ assert not is_scalar(())
1895
+ assert not is_scalar((1,))
1896
+ assert not is_scalar(slice(None))
1897
+ assert not is_scalar(Ellipsis)
1898
+
1899
+ def test_is_scalar_numpy_array_scalars(self):
1900
+ assert is_scalar(np.int64(1))
1901
+ assert is_scalar(np.float64(1.0))
1902
+ assert is_scalar(np.int32(1))
1903
+ assert is_scalar(np.complex64(2))
1904
+ assert is_scalar(np.object_("foobar"))
1905
+ assert is_scalar(np.str_("foobar"))
1906
+ assert is_scalar(np.bytes_(b"foobar"))
1907
+ assert is_scalar(np.datetime64("2014-01-01"))
1908
+ assert is_scalar(np.timedelta64(1, "h"))
1909
+
1910
+ @pytest.mark.parametrize(
1911
+ "zerodim",
1912
+ [
1913
+ np.array(1),
1914
+ np.array("foobar"),
1915
+ np.array(np.datetime64("2014-01-01")),
1916
+ np.array(np.timedelta64(1, "h")),
1917
+ np.array(np.datetime64("NaT")),
1918
+ ],
1919
+ )
1920
+ def test_is_scalar_numpy_zerodim_arrays(self, zerodim):
1921
+ assert not is_scalar(zerodim)
1922
+ assert is_scalar(lib.item_from_zerodim(zerodim))
1923
+
1924
+ @pytest.mark.parametrize("arr", [np.array([]), np.array([[]])])
1925
+ def test_is_scalar_numpy_arrays(self, arr):
1926
+ assert not is_scalar(arr)
1927
+ assert not is_scalar(MockNumpyLikeArray(arr))
1928
+
1929
+ def test_is_scalar_pandas_scalars(self):
1930
+ assert is_scalar(Timestamp("2014-01-01"))
1931
+ assert is_scalar(Timedelta(hours=1))
1932
+ assert is_scalar(Period("2014-01-01"))
1933
+ assert is_scalar(Interval(left=0, right=1))
1934
+ assert is_scalar(DateOffset(days=1))
1935
+ assert is_scalar(pd.offsets.Minute(3))
1936
+
1937
+ def test_is_scalar_pandas_containers(self):
1938
+ assert not is_scalar(Series(dtype=object))
1939
+ assert not is_scalar(Series([1]))
1940
+ assert not is_scalar(DataFrame())
1941
+ assert not is_scalar(DataFrame([[1]]))
1942
+ assert not is_scalar(Index([]))
1943
+ assert not is_scalar(Index([1]))
1944
+ assert not is_scalar(Categorical([]))
1945
+ assert not is_scalar(DatetimeIndex([])._data)
1946
+ assert not is_scalar(TimedeltaIndex([])._data)
1947
+ assert not is_scalar(DatetimeIndex([])._data.to_period("D"))
1948
+ assert not is_scalar(pd.array([1, 2, 3]))
1949
+
1950
+ def test_is_scalar_number(self):
1951
+ # Number() is not recognied by PyNumber_Check, so by extension
1952
+ # is not recognized by is_scalar, but instances of non-abstract
1953
+ # subclasses are.
1954
+
1955
+ class Numeric(Number):
1956
+ def __init__(self, value) -> None:
1957
+ self.value = value
1958
+
1959
+ def __int__(self) -> int:
1960
+ return self.value
1961
+
1962
+ num = Numeric(1)
1963
+ assert is_scalar(num)
1964
+
1965
+
1966
+ @pytest.mark.parametrize("unit", ["ms", "us", "ns"])
1967
+ def test_datetimeindex_from_empty_datetime64_array(unit):
1968
+ idx = DatetimeIndex(np.array([], dtype=f"datetime64[{unit}]"))
1969
+ assert len(idx) == 0
1970
+
1971
+
1972
+ def test_nan_to_nat_conversions():
1973
+ df = DataFrame(
1974
+ {"A": np.asarray(range(10), dtype="float64"), "B": Timestamp("20010101")}
1975
+ )
1976
+ df.iloc[3:6, :] = np.nan
1977
+ result = df.loc[4, "B"]
1978
+ assert result is pd.NaT
1979
+
1980
+ s = df["B"].copy()
1981
+ s[8:9] = np.nan
1982
+ assert s[8] is pd.NaT
1983
+
1984
+
1985
+ @pytest.mark.filterwarnings("ignore::PendingDeprecationWarning")
1986
+ def test_is_scipy_sparse(spmatrix):
1987
+ pytest.importorskip("scipy")
1988
+ assert is_scipy_sparse(spmatrix([[0, 1]]))
1989
+ assert not is_scipy_sparse(np.array([1]))
1990
+
1991
+
1992
+ def test_ensure_int32():
1993
+ values = np.arange(10, dtype=np.int32)
1994
+ result = ensure_int32(values)
1995
+ assert result.dtype == np.int32
1996
+
1997
+ values = np.arange(10, dtype=np.int64)
1998
+ result = ensure_int32(values)
1999
+ assert result.dtype == np.int32
2000
+
2001
+
2002
+ @pytest.mark.parametrize(
2003
+ "right,result",
2004
+ [
2005
+ (0, np.uint8),
2006
+ (-1, np.int16),
2007
+ (300, np.uint16),
2008
+ # For floats, we just upcast directly to float64 instead of trying to
2009
+ # find a smaller floating dtype
2010
+ (300.0, np.uint16), # for integer floats, we convert them to ints
2011
+ (300.1, np.float64),
2012
+ (np.int16(300), np.int16 if np_version_gt2 else np.uint16),
2013
+ ],
2014
+ )
2015
+ def test_find_result_type_uint_int(right, result):
2016
+ left_dtype = np.dtype("uint8")
2017
+ assert find_result_type(left_dtype, right) == result
2018
+
2019
+
2020
+ @pytest.mark.parametrize(
2021
+ "right,result",
2022
+ [
2023
+ (0, np.int8),
2024
+ (-1, np.int8),
2025
+ (300, np.int16),
2026
+ # For floats, we just upcast directly to float64 instead of trying to
2027
+ # find a smaller floating dtype
2028
+ (300.0, np.int16), # for integer floats, we convert them to ints
2029
+ (300.1, np.float64),
2030
+ (np.int16(300), np.int16),
2031
+ ],
2032
+ )
2033
+ def test_find_result_type_int_int(right, result):
2034
+ left_dtype = np.dtype("int8")
2035
+ assert find_result_type(left_dtype, right) == result
2036
+
2037
+
2038
+ @pytest.mark.parametrize(
2039
+ "right,result",
2040
+ [
2041
+ (300.0, np.float64),
2042
+ (np.float32(300), np.float32),
2043
+ ],
2044
+ )
2045
+ def test_find_result_type_floats(right, result):
2046
+ left_dtype = np.dtype("float16")
2047
+ assert find_result_type(left_dtype, right) == result
venv/lib/python3.10/site-packages/pandas/tests/dtypes/test_missing.py ADDED
@@ -0,0 +1,923 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import nullcontext
2
+ from datetime import datetime
3
+ from decimal import Decimal
4
+
5
+ import numpy as np
6
+ import pytest
7
+
8
+ from pandas._config import config as cf
9
+
10
+ from pandas._libs import missing as libmissing
11
+ from pandas._libs.tslibs import iNaT
12
+ from pandas.compat.numpy import np_version_gte1p25
13
+
14
+ from pandas.core.dtypes.common import (
15
+ is_float,
16
+ is_scalar,
17
+ pandas_dtype,
18
+ )
19
+ from pandas.core.dtypes.dtypes import (
20
+ CategoricalDtype,
21
+ DatetimeTZDtype,
22
+ IntervalDtype,
23
+ PeriodDtype,
24
+ )
25
+ from pandas.core.dtypes.missing import (
26
+ array_equivalent,
27
+ is_valid_na_for_dtype,
28
+ isna,
29
+ isnull,
30
+ na_value_for_dtype,
31
+ notna,
32
+ notnull,
33
+ )
34
+
35
+ import pandas as pd
36
+ from pandas import (
37
+ DatetimeIndex,
38
+ Index,
39
+ NaT,
40
+ Series,
41
+ TimedeltaIndex,
42
+ date_range,
43
+ period_range,
44
+ )
45
+ import pandas._testing as tm
46
+
47
+ fix_now = pd.Timestamp("2021-01-01")
48
+ fix_utcnow = pd.Timestamp("2021-01-01", tz="UTC")
49
+
50
+
51
+ @pytest.mark.parametrize("notna_f", [notna, notnull])
52
+ def test_notna_notnull(notna_f):
53
+ assert notna_f(1.0)
54
+ assert not notna_f(None)
55
+ assert not notna_f(np.nan)
56
+
57
+ msg = "use_inf_as_na option is deprecated"
58
+ with tm.assert_produces_warning(FutureWarning, match=msg):
59
+ with cf.option_context("mode.use_inf_as_na", False):
60
+ assert notna_f(np.inf)
61
+ assert notna_f(-np.inf)
62
+
63
+ arr = np.array([1.5, np.inf, 3.5, -np.inf])
64
+ result = notna_f(arr)
65
+ assert result.all()
66
+
67
+ with tm.assert_produces_warning(FutureWarning, match=msg):
68
+ with cf.option_context("mode.use_inf_as_na", True):
69
+ assert not notna_f(np.inf)
70
+ assert not notna_f(-np.inf)
71
+
72
+ arr = np.array([1.5, np.inf, 3.5, -np.inf])
73
+ result = notna_f(arr)
74
+ assert result.sum() == 2
75
+
76
+
77
+ @pytest.mark.parametrize("null_func", [notna, notnull, isna, isnull])
78
+ @pytest.mark.parametrize(
79
+ "ser",
80
+ [
81
+ Series(
82
+ [str(i) for i in range(5)],
83
+ index=Index([str(i) for i in range(5)], dtype=object),
84
+ dtype=object,
85
+ ),
86
+ Series(range(5), date_range("2020-01-01", periods=5)),
87
+ Series(range(5), period_range("2020-01-01", periods=5)),
88
+ ],
89
+ )
90
+ def test_null_check_is_series(null_func, ser):
91
+ msg = "use_inf_as_na option is deprecated"
92
+ with tm.assert_produces_warning(FutureWarning, match=msg):
93
+ with cf.option_context("mode.use_inf_as_na", False):
94
+ assert isinstance(null_func(ser), Series)
95
+
96
+
97
+ class TestIsNA:
98
+ def test_0d_array(self):
99
+ assert isna(np.array(np.nan))
100
+ assert not isna(np.array(0.0))
101
+ assert not isna(np.array(0))
102
+ # test object dtype
103
+ assert isna(np.array(np.nan, dtype=object))
104
+ assert not isna(np.array(0.0, dtype=object))
105
+ assert not isna(np.array(0, dtype=object))
106
+
107
+ @pytest.mark.parametrize("shape", [(4, 0), (4,)])
108
+ def test_empty_object(self, shape):
109
+ arr = np.empty(shape=shape, dtype=object)
110
+ result = isna(arr)
111
+ expected = np.ones(shape=shape, dtype=bool)
112
+ tm.assert_numpy_array_equal(result, expected)
113
+
114
+ @pytest.mark.parametrize("isna_f", [isna, isnull])
115
+ def test_isna_isnull(self, isna_f):
116
+ assert not isna_f(1.0)
117
+ assert isna_f(None)
118
+ assert isna_f(np.nan)
119
+ assert float("nan")
120
+ assert not isna_f(np.inf)
121
+ assert not isna_f(-np.inf)
122
+
123
+ # type
124
+ assert not isna_f(type(Series(dtype=object)))
125
+ assert not isna_f(type(Series(dtype=np.float64)))
126
+ assert not isna_f(type(pd.DataFrame()))
127
+
128
+ @pytest.mark.parametrize("isna_f", [isna, isnull])
129
+ @pytest.mark.parametrize(
130
+ "data",
131
+ [
132
+ np.arange(4, dtype=float),
133
+ [0.0, 1.0, 0.0, 1.0],
134
+ Series(list("abcd"), dtype=object),
135
+ date_range("2020-01-01", periods=4),
136
+ ],
137
+ )
138
+ @pytest.mark.parametrize(
139
+ "index",
140
+ [
141
+ date_range("2020-01-01", periods=4),
142
+ range(4),
143
+ period_range("2020-01-01", periods=4),
144
+ ],
145
+ )
146
+ def test_isna_isnull_frame(self, isna_f, data, index):
147
+ # frame
148
+ df = pd.DataFrame(data, index=index)
149
+ result = isna_f(df)
150
+ expected = df.apply(isna_f)
151
+ tm.assert_frame_equal(result, expected)
152
+
153
+ def test_isna_lists(self):
154
+ result = isna([[False]])
155
+ exp = np.array([[False]])
156
+ tm.assert_numpy_array_equal(result, exp)
157
+
158
+ result = isna([[1], [2]])
159
+ exp = np.array([[False], [False]])
160
+ tm.assert_numpy_array_equal(result, exp)
161
+
162
+ # list of strings / unicode
163
+ result = isna(["foo", "bar"])
164
+ exp = np.array([False, False])
165
+ tm.assert_numpy_array_equal(result, exp)
166
+
167
+ result = isna(["foo", "bar"])
168
+ exp = np.array([False, False])
169
+ tm.assert_numpy_array_equal(result, exp)
170
+
171
+ # GH20675
172
+ result = isna([np.nan, "world"])
173
+ exp = np.array([True, False])
174
+ tm.assert_numpy_array_equal(result, exp)
175
+
176
+ def test_isna_nat(self):
177
+ result = isna([NaT])
178
+ exp = np.array([True])
179
+ tm.assert_numpy_array_equal(result, exp)
180
+
181
+ result = isna(np.array([NaT], dtype=object))
182
+ exp = np.array([True])
183
+ tm.assert_numpy_array_equal(result, exp)
184
+
185
+ def test_isna_numpy_nat(self):
186
+ arr = np.array(
187
+ [
188
+ NaT,
189
+ np.datetime64("NaT"),
190
+ np.timedelta64("NaT"),
191
+ np.datetime64("NaT", "s"),
192
+ ]
193
+ )
194
+ result = isna(arr)
195
+ expected = np.array([True] * 4)
196
+ tm.assert_numpy_array_equal(result, expected)
197
+
198
+ def test_isna_datetime(self):
199
+ assert not isna(datetime.now())
200
+ assert notna(datetime.now())
201
+
202
+ idx = date_range("1/1/1990", periods=20)
203
+ exp = np.ones(len(idx), dtype=bool)
204
+ tm.assert_numpy_array_equal(notna(idx), exp)
205
+
206
+ idx = np.asarray(idx)
207
+ idx[0] = iNaT
208
+ idx = DatetimeIndex(idx)
209
+ mask = isna(idx)
210
+ assert mask[0]
211
+ exp = np.array([True] + [False] * (len(idx) - 1), dtype=bool)
212
+ tm.assert_numpy_array_equal(mask, exp)
213
+
214
+ # GH 9129
215
+ pidx = idx.to_period(freq="M")
216
+ mask = isna(pidx)
217
+ assert mask[0]
218
+ exp = np.array([True] + [False] * (len(idx) - 1), dtype=bool)
219
+ tm.assert_numpy_array_equal(mask, exp)
220
+
221
+ mask = isna(pidx[1:])
222
+ exp = np.zeros(len(mask), dtype=bool)
223
+ tm.assert_numpy_array_equal(mask, exp)
224
+
225
+ def test_isna_old_datetimelike(self):
226
+ # isna_old should work for dt64tz, td64, and period, not just tznaive
227
+ dti = date_range("2016-01-01", periods=3)
228
+ dta = dti._data
229
+ dta[-1] = NaT
230
+ expected = np.array([False, False, True], dtype=bool)
231
+
232
+ objs = [dta, dta.tz_localize("US/Eastern"), dta - dta, dta.to_period("D")]
233
+
234
+ for obj in objs:
235
+ msg = "use_inf_as_na option is deprecated"
236
+ with tm.assert_produces_warning(FutureWarning, match=msg):
237
+ with cf.option_context("mode.use_inf_as_na", True):
238
+ result = isna(obj)
239
+
240
+ tm.assert_numpy_array_equal(result, expected)
241
+
242
+ @pytest.mark.parametrize(
243
+ "value, expected",
244
+ [
245
+ (np.complex128(np.nan), True),
246
+ (np.float64(1), False),
247
+ (np.array([1, 1 + 0j, np.nan, 3]), np.array([False, False, True, False])),
248
+ (
249
+ np.array([1, 1 + 0j, np.nan, 3], dtype=object),
250
+ np.array([False, False, True, False]),
251
+ ),
252
+ (
253
+ np.array([1, 1 + 0j, np.nan, 3]).astype(object),
254
+ np.array([False, False, True, False]),
255
+ ),
256
+ ],
257
+ )
258
+ def test_complex(self, value, expected):
259
+ result = isna(value)
260
+ if is_scalar(result):
261
+ assert result is expected
262
+ else:
263
+ tm.assert_numpy_array_equal(result, expected)
264
+
265
+ def test_datetime_other_units(self):
266
+ idx = DatetimeIndex(["2011-01-01", "NaT", "2011-01-02"])
267
+ exp = np.array([False, True, False])
268
+ tm.assert_numpy_array_equal(isna(idx), exp)
269
+ tm.assert_numpy_array_equal(notna(idx), ~exp)
270
+ tm.assert_numpy_array_equal(isna(idx.values), exp)
271
+ tm.assert_numpy_array_equal(notna(idx.values), ~exp)
272
+
273
+ @pytest.mark.parametrize(
274
+ "dtype",
275
+ [
276
+ "datetime64[D]",
277
+ "datetime64[h]",
278
+ "datetime64[m]",
279
+ "datetime64[s]",
280
+ "datetime64[ms]",
281
+ "datetime64[us]",
282
+ "datetime64[ns]",
283
+ ],
284
+ )
285
+ def test_datetime_other_units_astype(self, dtype):
286
+ idx = DatetimeIndex(["2011-01-01", "NaT", "2011-01-02"])
287
+ values = idx.values.astype(dtype)
288
+
289
+ exp = np.array([False, True, False])
290
+ tm.assert_numpy_array_equal(isna(values), exp)
291
+ tm.assert_numpy_array_equal(notna(values), ~exp)
292
+
293
+ exp = Series([False, True, False])
294
+ s = Series(values)
295
+ tm.assert_series_equal(isna(s), exp)
296
+ tm.assert_series_equal(notna(s), ~exp)
297
+ s = Series(values, dtype=object)
298
+ tm.assert_series_equal(isna(s), exp)
299
+ tm.assert_series_equal(notna(s), ~exp)
300
+
301
+ def test_timedelta_other_units(self):
302
+ idx = TimedeltaIndex(["1 days", "NaT", "2 days"])
303
+ exp = np.array([False, True, False])
304
+ tm.assert_numpy_array_equal(isna(idx), exp)
305
+ tm.assert_numpy_array_equal(notna(idx), ~exp)
306
+ tm.assert_numpy_array_equal(isna(idx.values), exp)
307
+ tm.assert_numpy_array_equal(notna(idx.values), ~exp)
308
+
309
+ @pytest.mark.parametrize(
310
+ "dtype",
311
+ [
312
+ "timedelta64[D]",
313
+ "timedelta64[h]",
314
+ "timedelta64[m]",
315
+ "timedelta64[s]",
316
+ "timedelta64[ms]",
317
+ "timedelta64[us]",
318
+ "timedelta64[ns]",
319
+ ],
320
+ )
321
+ def test_timedelta_other_units_dtype(self, dtype):
322
+ idx = TimedeltaIndex(["1 days", "NaT", "2 days"])
323
+ values = idx.values.astype(dtype)
324
+
325
+ exp = np.array([False, True, False])
326
+ tm.assert_numpy_array_equal(isna(values), exp)
327
+ tm.assert_numpy_array_equal(notna(values), ~exp)
328
+
329
+ exp = Series([False, True, False])
330
+ s = Series(values)
331
+ tm.assert_series_equal(isna(s), exp)
332
+ tm.assert_series_equal(notna(s), ~exp)
333
+ s = Series(values, dtype=object)
334
+ tm.assert_series_equal(isna(s), exp)
335
+ tm.assert_series_equal(notna(s), ~exp)
336
+
337
+ def test_period(self):
338
+ idx = pd.PeriodIndex(["2011-01", "NaT", "2012-01"], freq="M")
339
+ exp = np.array([False, True, False])
340
+ tm.assert_numpy_array_equal(isna(idx), exp)
341
+ tm.assert_numpy_array_equal(notna(idx), ~exp)
342
+
343
+ exp = Series([False, True, False])
344
+ s = Series(idx)
345
+ tm.assert_series_equal(isna(s), exp)
346
+ tm.assert_series_equal(notna(s), ~exp)
347
+ s = Series(idx, dtype=object)
348
+ tm.assert_series_equal(isna(s), exp)
349
+ tm.assert_series_equal(notna(s), ~exp)
350
+
351
+ def test_decimal(self):
352
+ # scalars GH#23530
353
+ a = Decimal(1.0)
354
+ assert isna(a) is False
355
+ assert notna(a) is True
356
+
357
+ b = Decimal("NaN")
358
+ assert isna(b) is True
359
+ assert notna(b) is False
360
+
361
+ # array
362
+ arr = np.array([a, b])
363
+ expected = np.array([False, True])
364
+ result = isna(arr)
365
+ tm.assert_numpy_array_equal(result, expected)
366
+
367
+ result = notna(arr)
368
+ tm.assert_numpy_array_equal(result, ~expected)
369
+
370
+ # series
371
+ ser = Series(arr)
372
+ expected = Series(expected)
373
+ result = isna(ser)
374
+ tm.assert_series_equal(result, expected)
375
+
376
+ result = notna(ser)
377
+ tm.assert_series_equal(result, ~expected)
378
+
379
+ # index
380
+ idx = Index(arr)
381
+ expected = np.array([False, True])
382
+ result = isna(idx)
383
+ tm.assert_numpy_array_equal(result, expected)
384
+
385
+ result = notna(idx)
386
+ tm.assert_numpy_array_equal(result, ~expected)
387
+
388
+
389
+ @pytest.mark.parametrize("dtype_equal", [True, False])
390
+ def test_array_equivalent(dtype_equal):
391
+ assert array_equivalent(
392
+ np.array([np.nan, np.nan]), np.array([np.nan, np.nan]), dtype_equal=dtype_equal
393
+ )
394
+ assert array_equivalent(
395
+ np.array([np.nan, 1, np.nan]),
396
+ np.array([np.nan, 1, np.nan]),
397
+ dtype_equal=dtype_equal,
398
+ )
399
+ assert array_equivalent(
400
+ np.array([np.nan, None], dtype="object"),
401
+ np.array([np.nan, None], dtype="object"),
402
+ dtype_equal=dtype_equal,
403
+ )
404
+ # Check the handling of nested arrays in array_equivalent_object
405
+ assert array_equivalent(
406
+ np.array([np.array([np.nan, None], dtype="object"), None], dtype="object"),
407
+ np.array([np.array([np.nan, None], dtype="object"), None], dtype="object"),
408
+ dtype_equal=dtype_equal,
409
+ )
410
+ assert array_equivalent(
411
+ np.array([np.nan, 1 + 1j], dtype="complex"),
412
+ np.array([np.nan, 1 + 1j], dtype="complex"),
413
+ dtype_equal=dtype_equal,
414
+ )
415
+ assert not array_equivalent(
416
+ np.array([np.nan, 1 + 1j], dtype="complex"),
417
+ np.array([np.nan, 1 + 2j], dtype="complex"),
418
+ dtype_equal=dtype_equal,
419
+ )
420
+ assert not array_equivalent(
421
+ np.array([np.nan, 1, np.nan]),
422
+ np.array([np.nan, 2, np.nan]),
423
+ dtype_equal=dtype_equal,
424
+ )
425
+ assert not array_equivalent(
426
+ np.array(["a", "b", "c", "d"]), np.array(["e", "e"]), dtype_equal=dtype_equal
427
+ )
428
+ assert array_equivalent(
429
+ Index([0, np.nan]), Index([0, np.nan]), dtype_equal=dtype_equal
430
+ )
431
+ assert not array_equivalent(
432
+ Index([0, np.nan]), Index([1, np.nan]), dtype_equal=dtype_equal
433
+ )
434
+
435
+
436
+ @pytest.mark.parametrize("dtype_equal", [True, False])
437
+ def test_array_equivalent_tdi(dtype_equal):
438
+ assert array_equivalent(
439
+ TimedeltaIndex([0, np.nan]),
440
+ TimedeltaIndex([0, np.nan]),
441
+ dtype_equal=dtype_equal,
442
+ )
443
+ assert not array_equivalent(
444
+ TimedeltaIndex([0, np.nan]),
445
+ TimedeltaIndex([1, np.nan]),
446
+ dtype_equal=dtype_equal,
447
+ )
448
+
449
+
450
+ @pytest.mark.parametrize("dtype_equal", [True, False])
451
+ def test_array_equivalent_dti(dtype_equal):
452
+ assert array_equivalent(
453
+ DatetimeIndex([0, np.nan]), DatetimeIndex([0, np.nan]), dtype_equal=dtype_equal
454
+ )
455
+ assert not array_equivalent(
456
+ DatetimeIndex([0, np.nan]), DatetimeIndex([1, np.nan]), dtype_equal=dtype_equal
457
+ )
458
+
459
+ dti1 = DatetimeIndex([0, np.nan], tz="US/Eastern")
460
+ dti2 = DatetimeIndex([0, np.nan], tz="CET")
461
+ dti3 = DatetimeIndex([1, np.nan], tz="US/Eastern")
462
+
463
+ assert array_equivalent(
464
+ dti1,
465
+ dti1,
466
+ dtype_equal=dtype_equal,
467
+ )
468
+ assert not array_equivalent(
469
+ dti1,
470
+ dti3,
471
+ dtype_equal=dtype_equal,
472
+ )
473
+ # The rest are not dtype_equal
474
+ assert not array_equivalent(DatetimeIndex([0, np.nan]), dti1)
475
+ assert array_equivalent(
476
+ dti2,
477
+ dti1,
478
+ )
479
+
480
+ assert not array_equivalent(DatetimeIndex([0, np.nan]), TimedeltaIndex([0, np.nan]))
481
+
482
+
483
+ @pytest.mark.parametrize(
484
+ "val", [1, 1.1, 1 + 1j, True, "abc", [1, 2], (1, 2), {1, 2}, {"a": 1}, None]
485
+ )
486
+ def test_array_equivalent_series(val):
487
+ arr = np.array([1, 2])
488
+ msg = "elementwise comparison failed"
489
+ cm = (
490
+ # stacklevel is chosen to make sense when called from .equals
491
+ tm.assert_produces_warning(FutureWarning, match=msg, check_stacklevel=False)
492
+ if isinstance(val, str) and not np_version_gte1p25
493
+ else nullcontext()
494
+ )
495
+ with cm:
496
+ assert not array_equivalent(Series([arr, arr]), Series([arr, val]))
497
+
498
+
499
+ def test_array_equivalent_array_mismatched_shape():
500
+ # to trigger the motivating bug, the first N elements of the arrays need
501
+ # to match
502
+ first = np.array([1, 2, 3])
503
+ second = np.array([1, 2])
504
+
505
+ left = Series([first, "a"], dtype=object)
506
+ right = Series([second, "a"], dtype=object)
507
+ assert not array_equivalent(left, right)
508
+
509
+
510
+ def test_array_equivalent_array_mismatched_dtype():
511
+ # same shape, different dtype can still be equivalent
512
+ first = np.array([1, 2], dtype=np.float64)
513
+ second = np.array([1, 2])
514
+
515
+ left = Series([first, "a"], dtype=object)
516
+ right = Series([second, "a"], dtype=object)
517
+ assert array_equivalent(left, right)
518
+
519
+
520
+ def test_array_equivalent_different_dtype_but_equal():
521
+ # Unclear if this is exposed anywhere in the public-facing API
522
+ assert array_equivalent(np.array([1, 2]), np.array([1.0, 2.0]))
523
+
524
+
525
+ @pytest.mark.parametrize(
526
+ "lvalue, rvalue",
527
+ [
528
+ # There are 3 variants for each of lvalue and rvalue. We include all
529
+ # three for the tz-naive `now` and exclude the datetim64 variant
530
+ # for utcnow because it drops tzinfo.
531
+ (fix_now, fix_utcnow),
532
+ (fix_now.to_datetime64(), fix_utcnow),
533
+ (fix_now.to_pydatetime(), fix_utcnow),
534
+ (fix_now, fix_utcnow),
535
+ (fix_now.to_datetime64(), fix_utcnow.to_pydatetime()),
536
+ (fix_now.to_pydatetime(), fix_utcnow.to_pydatetime()),
537
+ ],
538
+ )
539
+ def test_array_equivalent_tzawareness(lvalue, rvalue):
540
+ # we shouldn't raise if comparing tzaware and tznaive datetimes
541
+ left = np.array([lvalue], dtype=object)
542
+ right = np.array([rvalue], dtype=object)
543
+
544
+ assert not array_equivalent(left, right, strict_nan=True)
545
+ assert not array_equivalent(left, right, strict_nan=False)
546
+
547
+
548
+ def test_array_equivalent_compat():
549
+ # see gh-13388
550
+ m = np.array([(1, 2), (3, 4)], dtype=[("a", int), ("b", float)])
551
+ n = np.array([(1, 2), (3, 4)], dtype=[("a", int), ("b", float)])
552
+ assert array_equivalent(m, n, strict_nan=True)
553
+ assert array_equivalent(m, n, strict_nan=False)
554
+
555
+ m = np.array([(1, 2), (3, 4)], dtype=[("a", int), ("b", float)])
556
+ n = np.array([(1, 2), (4, 3)], dtype=[("a", int), ("b", float)])
557
+ assert not array_equivalent(m, n, strict_nan=True)
558
+ assert not array_equivalent(m, n, strict_nan=False)
559
+
560
+ m = np.array([(1, 2), (3, 4)], dtype=[("a", int), ("b", float)])
561
+ n = np.array([(1, 2), (3, 4)], dtype=[("b", int), ("a", float)])
562
+ assert not array_equivalent(m, n, strict_nan=True)
563
+ assert not array_equivalent(m, n, strict_nan=False)
564
+
565
+
566
+ @pytest.mark.parametrize("dtype", ["O", "S", "U"])
567
+ def test_array_equivalent_str(dtype):
568
+ assert array_equivalent(
569
+ np.array(["A", "B"], dtype=dtype), np.array(["A", "B"], dtype=dtype)
570
+ )
571
+ assert not array_equivalent(
572
+ np.array(["A", "B"], dtype=dtype), np.array(["A", "X"], dtype=dtype)
573
+ )
574
+
575
+
576
+ @pytest.mark.parametrize("strict_nan", [True, False])
577
+ def test_array_equivalent_nested(strict_nan):
578
+ # reached in groupby aggregations, make sure we use np.any when checking
579
+ # if the comparison is truthy
580
+ left = np.array([np.array([50, 70, 90]), np.array([20, 30])], dtype=object)
581
+ right = np.array([np.array([50, 70, 90]), np.array([20, 30])], dtype=object)
582
+
583
+ assert array_equivalent(left, right, strict_nan=strict_nan)
584
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
585
+
586
+ left = np.empty(2, dtype=object)
587
+ left[:] = [np.array([50, 70, 90]), np.array([20, 30, 40])]
588
+ right = np.empty(2, dtype=object)
589
+ right[:] = [np.array([50, 70, 90]), np.array([20, 30, 40])]
590
+ assert array_equivalent(left, right, strict_nan=strict_nan)
591
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
592
+
593
+ left = np.array([np.array([50, 50, 50]), np.array([40, 40])], dtype=object)
594
+ right = np.array([50, 40])
595
+ assert not array_equivalent(left, right, strict_nan=strict_nan)
596
+
597
+
598
+ @pytest.mark.filterwarnings("ignore:elementwise comparison failed:DeprecationWarning")
599
+ @pytest.mark.parametrize("strict_nan", [True, False])
600
+ def test_array_equivalent_nested2(strict_nan):
601
+ # more than one level of nesting
602
+ left = np.array(
603
+ [
604
+ np.array([np.array([50, 70]), np.array([90])], dtype=object),
605
+ np.array([np.array([20, 30])], dtype=object),
606
+ ],
607
+ dtype=object,
608
+ )
609
+ right = np.array(
610
+ [
611
+ np.array([np.array([50, 70]), np.array([90])], dtype=object),
612
+ np.array([np.array([20, 30])], dtype=object),
613
+ ],
614
+ dtype=object,
615
+ )
616
+ assert array_equivalent(left, right, strict_nan=strict_nan)
617
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
618
+
619
+ left = np.array([np.array([np.array([50, 50, 50])], dtype=object)], dtype=object)
620
+ right = np.array([50])
621
+ assert not array_equivalent(left, right, strict_nan=strict_nan)
622
+
623
+
624
+ @pytest.mark.parametrize("strict_nan", [True, False])
625
+ def test_array_equivalent_nested_list(strict_nan):
626
+ left = np.array([[50, 70, 90], [20, 30]], dtype=object)
627
+ right = np.array([[50, 70, 90], [20, 30]], dtype=object)
628
+
629
+ assert array_equivalent(left, right, strict_nan=strict_nan)
630
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
631
+
632
+ left = np.array([[50, 50, 50], [40, 40]], dtype=object)
633
+ right = np.array([50, 40])
634
+ assert not array_equivalent(left, right, strict_nan=strict_nan)
635
+
636
+
637
+ @pytest.mark.filterwarnings("ignore:elementwise comparison failed:DeprecationWarning")
638
+ @pytest.mark.xfail(reason="failing")
639
+ @pytest.mark.parametrize("strict_nan", [True, False])
640
+ def test_array_equivalent_nested_mixed_list(strict_nan):
641
+ # mixed arrays / lists in left and right
642
+ # https://github.com/pandas-dev/pandas/issues/50360
643
+ left = np.array([np.array([1, 2, 3]), np.array([4, 5])], dtype=object)
644
+ right = np.array([[1, 2, 3], [4, 5]], dtype=object)
645
+
646
+ assert array_equivalent(left, right, strict_nan=strict_nan)
647
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
648
+
649
+ # multiple levels of nesting
650
+ left = np.array(
651
+ [
652
+ np.array([np.array([1, 2, 3]), np.array([4, 5])], dtype=object),
653
+ np.array([np.array([6]), np.array([7, 8]), np.array([9])], dtype=object),
654
+ ],
655
+ dtype=object,
656
+ )
657
+ right = np.array([[[1, 2, 3], [4, 5]], [[6], [7, 8], [9]]], dtype=object)
658
+ assert array_equivalent(left, right, strict_nan=strict_nan)
659
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
660
+
661
+ # same-length lists
662
+ subarr = np.empty(2, dtype=object)
663
+ subarr[:] = [
664
+ np.array([None, "b"], dtype=object),
665
+ np.array(["c", "d"], dtype=object),
666
+ ]
667
+ left = np.array([subarr, None], dtype=object)
668
+ right = np.array([[[None, "b"], ["c", "d"]], None], dtype=object)
669
+ assert array_equivalent(left, right, strict_nan=strict_nan)
670
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
671
+
672
+
673
+ @pytest.mark.xfail(reason="failing")
674
+ @pytest.mark.parametrize("strict_nan", [True, False])
675
+ def test_array_equivalent_nested_dicts(strict_nan):
676
+ left = np.array([{"f1": 1, "f2": np.array(["a", "b"], dtype=object)}], dtype=object)
677
+ right = np.array(
678
+ [{"f1": 1, "f2": np.array(["a", "b"], dtype=object)}], dtype=object
679
+ )
680
+ assert array_equivalent(left, right, strict_nan=strict_nan)
681
+ assert not array_equivalent(left, right[::-1], strict_nan=strict_nan)
682
+
683
+ right2 = np.array([{"f1": 1, "f2": ["a", "b"]}], dtype=object)
684
+ assert array_equivalent(left, right2, strict_nan=strict_nan)
685
+ assert not array_equivalent(left, right2[::-1], strict_nan=strict_nan)
686
+
687
+
688
+ def test_array_equivalent_index_with_tuples():
689
+ # GH#48446
690
+ idx1 = Index(np.array([(pd.NA, 4), (1, 1)], dtype="object"))
691
+ idx2 = Index(np.array([(1, 1), (pd.NA, 4)], dtype="object"))
692
+ assert not array_equivalent(idx1, idx2)
693
+ assert not idx1.equals(idx2)
694
+ assert not array_equivalent(idx2, idx1)
695
+ assert not idx2.equals(idx1)
696
+
697
+ idx1 = Index(np.array([(4, pd.NA), (1, 1)], dtype="object"))
698
+ idx2 = Index(np.array([(1, 1), (4, pd.NA)], dtype="object"))
699
+ assert not array_equivalent(idx1, idx2)
700
+ assert not idx1.equals(idx2)
701
+ assert not array_equivalent(idx2, idx1)
702
+ assert not idx2.equals(idx1)
703
+
704
+
705
+ @pytest.mark.parametrize(
706
+ "dtype, na_value",
707
+ [
708
+ # Datetime-like
709
+ (np.dtype("M8[ns]"), np.datetime64("NaT", "ns")),
710
+ (np.dtype("m8[ns]"), np.timedelta64("NaT", "ns")),
711
+ (DatetimeTZDtype.construct_from_string("datetime64[ns, US/Eastern]"), NaT),
712
+ (PeriodDtype("M"), NaT),
713
+ # Integer
714
+ ("u1", 0),
715
+ ("u2", 0),
716
+ ("u4", 0),
717
+ ("u8", 0),
718
+ ("i1", 0),
719
+ ("i2", 0),
720
+ ("i4", 0),
721
+ ("i8", 0),
722
+ # Bool
723
+ ("bool", False),
724
+ # Float
725
+ ("f2", np.nan),
726
+ ("f4", np.nan),
727
+ ("f8", np.nan),
728
+ # Object
729
+ ("O", np.nan),
730
+ # Interval
731
+ (IntervalDtype(), np.nan),
732
+ ],
733
+ )
734
+ def test_na_value_for_dtype(dtype, na_value):
735
+ result = na_value_for_dtype(pandas_dtype(dtype))
736
+ # identify check doesn't work for datetime64/timedelta64("NaT") bc they
737
+ # are not singletons
738
+ assert result is na_value or (
739
+ isna(result) and isna(na_value) and type(result) is type(na_value)
740
+ )
741
+
742
+
743
+ class TestNAObj:
744
+ def _check_behavior(self, arr, expected):
745
+ result = libmissing.isnaobj(arr)
746
+ tm.assert_numpy_array_equal(result, expected)
747
+ result = libmissing.isnaobj(arr, inf_as_na=True)
748
+ tm.assert_numpy_array_equal(result, expected)
749
+
750
+ arr = np.atleast_2d(arr)
751
+ expected = np.atleast_2d(expected)
752
+
753
+ result = libmissing.isnaobj(arr)
754
+ tm.assert_numpy_array_equal(result, expected)
755
+ result = libmissing.isnaobj(arr, inf_as_na=True)
756
+ tm.assert_numpy_array_equal(result, expected)
757
+
758
+ # Test fortran order
759
+ arr = arr.copy(order="F")
760
+ result = libmissing.isnaobj(arr)
761
+ tm.assert_numpy_array_equal(result, expected)
762
+ result = libmissing.isnaobj(arr, inf_as_na=True)
763
+ tm.assert_numpy_array_equal(result, expected)
764
+
765
+ def test_basic(self):
766
+ arr = np.array([1, None, "foo", -5.1, NaT, np.nan])
767
+ expected = np.array([False, True, False, False, True, True])
768
+
769
+ self._check_behavior(arr, expected)
770
+
771
+ def test_non_obj_dtype(self):
772
+ arr = np.array([1, 3, np.nan, 5], dtype=float)
773
+ expected = np.array([False, False, True, False])
774
+
775
+ self._check_behavior(arr, expected)
776
+
777
+ def test_empty_arr(self):
778
+ arr = np.array([])
779
+ expected = np.array([], dtype=bool)
780
+
781
+ self._check_behavior(arr, expected)
782
+
783
+ def test_empty_str_inp(self):
784
+ arr = np.array([""]) # empty but not na
785
+ expected = np.array([False])
786
+
787
+ self._check_behavior(arr, expected)
788
+
789
+ def test_empty_like(self):
790
+ # see gh-13717: no segfaults!
791
+ arr = np.empty_like([None])
792
+ expected = np.array([True])
793
+
794
+ self._check_behavior(arr, expected)
795
+
796
+
797
+ m8_units = ["as", "ps", "ns", "us", "ms", "s", "m", "h", "D", "W", "M", "Y"]
798
+
799
+ na_vals = (
800
+ [
801
+ None,
802
+ NaT,
803
+ float("NaN"),
804
+ complex("NaN"),
805
+ np.nan,
806
+ np.float64("NaN"),
807
+ np.float32("NaN"),
808
+ np.complex64(np.nan),
809
+ np.complex128(np.nan),
810
+ np.datetime64("NaT"),
811
+ np.timedelta64("NaT"),
812
+ ]
813
+ + [np.datetime64("NaT", unit) for unit in m8_units]
814
+ + [np.timedelta64("NaT", unit) for unit in m8_units]
815
+ )
816
+
817
+ inf_vals = [
818
+ float("inf"),
819
+ float("-inf"),
820
+ complex("inf"),
821
+ complex("-inf"),
822
+ np.inf,
823
+ -np.inf,
824
+ ]
825
+
826
+ int_na_vals = [
827
+ # Values that match iNaT, which we treat as null in specific cases
828
+ np.int64(NaT._value),
829
+ int(NaT._value),
830
+ ]
831
+
832
+ sometimes_na_vals = [Decimal("NaN")]
833
+
834
+ never_na_vals = [
835
+ # float/complex values that when viewed as int64 match iNaT
836
+ -0.0,
837
+ np.float64("-0.0"),
838
+ -0j,
839
+ np.complex64(-0j),
840
+ ]
841
+
842
+
843
+ class TestLibMissing:
844
+ @pytest.mark.parametrize("func", [libmissing.checknull, isna])
845
+ @pytest.mark.parametrize(
846
+ "value", na_vals + sometimes_na_vals # type: ignore[operator]
847
+ )
848
+ def test_checknull_na_vals(self, func, value):
849
+ assert func(value)
850
+
851
+ @pytest.mark.parametrize("func", [libmissing.checknull, isna])
852
+ @pytest.mark.parametrize("value", inf_vals)
853
+ def test_checknull_inf_vals(self, func, value):
854
+ assert not func(value)
855
+
856
+ @pytest.mark.parametrize("func", [libmissing.checknull, isna])
857
+ @pytest.mark.parametrize("value", int_na_vals)
858
+ def test_checknull_intna_vals(self, func, value):
859
+ assert not func(value)
860
+
861
+ @pytest.mark.parametrize("func", [libmissing.checknull, isna])
862
+ @pytest.mark.parametrize("value", never_na_vals)
863
+ def test_checknull_never_na_vals(self, func, value):
864
+ assert not func(value)
865
+
866
+ @pytest.mark.parametrize(
867
+ "value", na_vals + sometimes_na_vals # type: ignore[operator]
868
+ )
869
+ def test_checknull_old_na_vals(self, value):
870
+ assert libmissing.checknull(value, inf_as_na=True)
871
+
872
+ @pytest.mark.parametrize("value", inf_vals)
873
+ def test_checknull_old_inf_vals(self, value):
874
+ assert libmissing.checknull(value, inf_as_na=True)
875
+
876
+ @pytest.mark.parametrize("value", int_na_vals)
877
+ def test_checknull_old_intna_vals(self, value):
878
+ assert not libmissing.checknull(value, inf_as_na=True)
879
+
880
+ @pytest.mark.parametrize("value", int_na_vals)
881
+ def test_checknull_old_never_na_vals(self, value):
882
+ assert not libmissing.checknull(value, inf_as_na=True)
883
+
884
+ def test_is_matching_na(self, nulls_fixture, nulls_fixture2):
885
+ left = nulls_fixture
886
+ right = nulls_fixture2
887
+
888
+ assert libmissing.is_matching_na(left, left)
889
+
890
+ if left is right:
891
+ assert libmissing.is_matching_na(left, right)
892
+ elif is_float(left) and is_float(right):
893
+ # np.nan vs float("NaN") we consider as matching
894
+ assert libmissing.is_matching_na(left, right)
895
+ elif type(left) is type(right):
896
+ # e.g. both Decimal("NaN")
897
+ assert libmissing.is_matching_na(left, right)
898
+ else:
899
+ assert not libmissing.is_matching_na(left, right)
900
+
901
+ def test_is_matching_na_nan_matches_none(self):
902
+ assert not libmissing.is_matching_na(None, np.nan)
903
+ assert not libmissing.is_matching_na(np.nan, None)
904
+
905
+ assert libmissing.is_matching_na(None, np.nan, nan_matches_none=True)
906
+ assert libmissing.is_matching_na(np.nan, None, nan_matches_none=True)
907
+
908
+
909
+ class TestIsValidNAForDtype:
910
+ def test_is_valid_na_for_dtype_interval(self):
911
+ dtype = IntervalDtype("int64", "left")
912
+ assert not is_valid_na_for_dtype(NaT, dtype)
913
+
914
+ dtype = IntervalDtype("datetime64[ns]", "both")
915
+ assert not is_valid_na_for_dtype(NaT, dtype)
916
+
917
+ def test_is_valid_na_for_dtype_categorical(self):
918
+ dtype = CategoricalDtype(categories=[0, 1, 2])
919
+ assert is_valid_na_for_dtype(np.nan, dtype)
920
+
921
+ assert not is_valid_na_for_dtype(NaT, dtype)
922
+ assert not is_valid_na_for_dtype(np.datetime64("NaT", "ns"), dtype)
923
+ assert not is_valid_na_for_dtype(np.timedelta64("NaT", "ns"), dtype)
venv/lib/python3.10/site-packages/pandas/tests/scalar/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (194 Bytes). View file
 
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_arithmetic.cpython-310.pyc ADDED
Binary file (13.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_asfreq.cpython-310.pyc ADDED
Binary file (22.1 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/__pycache__/test_period.cpython-310.pyc ADDED
Binary file (33.7 kB). View file
 
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/test_arithmetic.py ADDED
@@ -0,0 +1,486 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from datetime import timedelta
2
+
3
+ import numpy as np
4
+ import pytest
5
+
6
+ from pandas._libs.tslibs.period import IncompatibleFrequency
7
+
8
+ from pandas import (
9
+ NaT,
10
+ Period,
11
+ Timedelta,
12
+ Timestamp,
13
+ offsets,
14
+ )
15
+
16
+
17
+ class TestPeriodArithmetic:
18
+ def test_add_overflow_raises(self):
19
+ # GH#55503
20
+ per = Timestamp.max.to_period("ns")
21
+
22
+ msg = "|".join(
23
+ [
24
+ "Python int too large to convert to C long",
25
+ # windows, 32bit linux builds
26
+ "int too big to convert",
27
+ ]
28
+ )
29
+ with pytest.raises(OverflowError, match=msg):
30
+ per + 1
31
+
32
+ msg = "value too large"
33
+ with pytest.raises(OverflowError, match=msg):
34
+ per + Timedelta(1)
35
+ with pytest.raises(OverflowError, match=msg):
36
+ per + offsets.Nano(1)
37
+
38
+ def test_period_add_integer(self):
39
+ per1 = Period(freq="D", year=2008, month=1, day=1)
40
+ per2 = Period(freq="D", year=2008, month=1, day=2)
41
+ assert per1 + 1 == per2
42
+ assert 1 + per1 == per2
43
+
44
+ def test_period_add_invalid(self):
45
+ # GH#4731
46
+ per1 = Period(freq="D", year=2008, month=1, day=1)
47
+ per2 = Period(freq="D", year=2008, month=1, day=2)
48
+
49
+ msg = "|".join(
50
+ [
51
+ r"unsupported operand type\(s\)",
52
+ "can only concatenate str",
53
+ "must be str, not Period",
54
+ ]
55
+ )
56
+ with pytest.raises(TypeError, match=msg):
57
+ per1 + "str"
58
+ with pytest.raises(TypeError, match=msg):
59
+ "str" + per1
60
+ with pytest.raises(TypeError, match=msg):
61
+ per1 + per2
62
+
63
+ def test_period_sub_period_annual(self):
64
+ left, right = Period("2011", freq="Y"), Period("2007", freq="Y")
65
+ result = left - right
66
+ assert result == 4 * right.freq
67
+
68
+ msg = r"Input has different freq=M from Period\(freq=Y-DEC\)"
69
+ with pytest.raises(IncompatibleFrequency, match=msg):
70
+ left - Period("2007-01", freq="M")
71
+
72
+ def test_period_sub_period(self):
73
+ per1 = Period("2011-01-01", freq="D")
74
+ per2 = Period("2011-01-15", freq="D")
75
+
76
+ off = per1.freq
77
+ assert per1 - per2 == -14 * off
78
+ assert per2 - per1 == 14 * off
79
+
80
+ msg = r"Input has different freq=M from Period\(freq=D\)"
81
+ with pytest.raises(IncompatibleFrequency, match=msg):
82
+ per1 - Period("2011-02", freq="M")
83
+
84
+ @pytest.mark.parametrize("n", [1, 2, 3, 4])
85
+ def test_sub_n_gt_1_ticks(self, tick_classes, n):
86
+ # GH#23878
87
+ p1 = Period("19910905", freq=tick_classes(n))
88
+ p2 = Period("19920406", freq=tick_classes(n))
89
+
90
+ expected = Period(str(p2), freq=p2.freq.base) - Period(
91
+ str(p1), freq=p1.freq.base
92
+ )
93
+
94
+ assert (p2 - p1) == expected
95
+
96
+ @pytest.mark.parametrize("normalize", [True, False])
97
+ @pytest.mark.parametrize("n", [1, 2, 3, 4])
98
+ @pytest.mark.parametrize(
99
+ "offset, kwd_name",
100
+ [
101
+ (offsets.YearEnd, "month"),
102
+ (offsets.QuarterEnd, "startingMonth"),
103
+ (offsets.MonthEnd, None),
104
+ (offsets.Week, "weekday"),
105
+ ],
106
+ )
107
+ def test_sub_n_gt_1_offsets(self, offset, kwd_name, n, normalize):
108
+ # GH#23878
109
+ kwds = {kwd_name: 3} if kwd_name is not None else {}
110
+ p1_d = "19910905"
111
+ p2_d = "19920406"
112
+ p1 = Period(p1_d, freq=offset(n, normalize, **kwds))
113
+ p2 = Period(p2_d, freq=offset(n, normalize, **kwds))
114
+
115
+ expected = Period(p2_d, freq=p2.freq.base) - Period(p1_d, freq=p1.freq.base)
116
+
117
+ assert (p2 - p1) == expected
118
+
119
+ def test_period_add_offset(self):
120
+ # freq is DateOffset
121
+ for freq in ["Y", "2Y", "3Y"]:
122
+ per = Period("2011", freq=freq)
123
+ exp = Period("2013", freq=freq)
124
+ assert per + offsets.YearEnd(2) == exp
125
+ assert offsets.YearEnd(2) + per == exp
126
+
127
+ for off in [
128
+ offsets.YearBegin(2),
129
+ offsets.MonthBegin(1),
130
+ offsets.Minute(),
131
+ np.timedelta64(365, "D"),
132
+ timedelta(365),
133
+ ]:
134
+ msg = "Input has different freq|Input cannot be converted to Period"
135
+ with pytest.raises(IncompatibleFrequency, match=msg):
136
+ per + off
137
+ with pytest.raises(IncompatibleFrequency, match=msg):
138
+ off + per
139
+
140
+ for freq in ["M", "2M", "3M"]:
141
+ per = Period("2011-03", freq=freq)
142
+ exp = Period("2011-05", freq=freq)
143
+ assert per + offsets.MonthEnd(2) == exp
144
+ assert offsets.MonthEnd(2) + per == exp
145
+
146
+ exp = Period("2012-03", freq=freq)
147
+ assert per + offsets.MonthEnd(12) == exp
148
+ assert offsets.MonthEnd(12) + per == exp
149
+
150
+ msg = "|".join(
151
+ [
152
+ "Input has different freq",
153
+ "Input cannot be converted to Period",
154
+ ]
155
+ )
156
+
157
+ for off in [
158
+ offsets.YearBegin(2),
159
+ offsets.MonthBegin(1),
160
+ offsets.Minute(),
161
+ np.timedelta64(365, "D"),
162
+ timedelta(365),
163
+ ]:
164
+ with pytest.raises(IncompatibleFrequency, match=msg):
165
+ per + off
166
+ with pytest.raises(IncompatibleFrequency, match=msg):
167
+ off + per
168
+
169
+ # freq is Tick
170
+ for freq in ["D", "2D", "3D"]:
171
+ per = Period("2011-04-01", freq=freq)
172
+
173
+ exp = Period("2011-04-06", freq=freq)
174
+ assert per + offsets.Day(5) == exp
175
+ assert offsets.Day(5) + per == exp
176
+
177
+ exp = Period("2011-04-02", freq=freq)
178
+ assert per + offsets.Hour(24) == exp
179
+ assert offsets.Hour(24) + per == exp
180
+
181
+ exp = Period("2011-04-03", freq=freq)
182
+ assert per + np.timedelta64(2, "D") == exp
183
+ assert np.timedelta64(2, "D") + per == exp
184
+
185
+ exp = Period("2011-04-02", freq=freq)
186
+ assert per + np.timedelta64(3600 * 24, "s") == exp
187
+ assert np.timedelta64(3600 * 24, "s") + per == exp
188
+
189
+ exp = Period("2011-03-30", freq=freq)
190
+ assert per + timedelta(-2) == exp
191
+ assert timedelta(-2) + per == exp
192
+
193
+ exp = Period("2011-04-03", freq=freq)
194
+ assert per + timedelta(hours=48) == exp
195
+ assert timedelta(hours=48) + per == exp
196
+
197
+ msg = "|".join(
198
+ [
199
+ "Input has different freq",
200
+ "Input cannot be converted to Period",
201
+ ]
202
+ )
203
+
204
+ for off in [
205
+ offsets.YearBegin(2),
206
+ offsets.MonthBegin(1),
207
+ offsets.Minute(),
208
+ np.timedelta64(4, "h"),
209
+ timedelta(hours=23),
210
+ ]:
211
+ with pytest.raises(IncompatibleFrequency, match=msg):
212
+ per + off
213
+ with pytest.raises(IncompatibleFrequency, match=msg):
214
+ off + per
215
+
216
+ for freq in ["h", "2h", "3h"]:
217
+ per = Period("2011-04-01 09:00", freq=freq)
218
+
219
+ exp = Period("2011-04-03 09:00", freq=freq)
220
+ assert per + offsets.Day(2) == exp
221
+ assert offsets.Day(2) + per == exp
222
+
223
+ exp = Period("2011-04-01 12:00", freq=freq)
224
+ assert per + offsets.Hour(3) == exp
225
+ assert offsets.Hour(3) + per == exp
226
+
227
+ msg = "cannot use operands with types"
228
+ exp = Period("2011-04-01 12:00", freq=freq)
229
+ assert per + np.timedelta64(3, "h") == exp
230
+ assert np.timedelta64(3, "h") + per == exp
231
+
232
+ exp = Period("2011-04-01 10:00", freq=freq)
233
+ assert per + np.timedelta64(3600, "s") == exp
234
+ assert np.timedelta64(3600, "s") + per == exp
235
+
236
+ exp = Period("2011-04-01 11:00", freq=freq)
237
+ assert per + timedelta(minutes=120) == exp
238
+ assert timedelta(minutes=120) + per == exp
239
+
240
+ exp = Period("2011-04-05 12:00", freq=freq)
241
+ assert per + timedelta(days=4, minutes=180) == exp
242
+ assert timedelta(days=4, minutes=180) + per == exp
243
+
244
+ msg = "|".join(
245
+ [
246
+ "Input has different freq",
247
+ "Input cannot be converted to Period",
248
+ ]
249
+ )
250
+
251
+ for off in [
252
+ offsets.YearBegin(2),
253
+ offsets.MonthBegin(1),
254
+ offsets.Minute(),
255
+ np.timedelta64(3200, "s"),
256
+ timedelta(hours=23, minutes=30),
257
+ ]:
258
+ with pytest.raises(IncompatibleFrequency, match=msg):
259
+ per + off
260
+ with pytest.raises(IncompatibleFrequency, match=msg):
261
+ off + per
262
+
263
+ def test_period_sub_offset(self):
264
+ # freq is DateOffset
265
+ msg = "|".join(
266
+ [
267
+ "Input has different freq",
268
+ "Input cannot be converted to Period",
269
+ ]
270
+ )
271
+
272
+ for freq in ["Y", "2Y", "3Y"]:
273
+ per = Period("2011", freq=freq)
274
+ assert per - offsets.YearEnd(2) == Period("2009", freq=freq)
275
+
276
+ for off in [
277
+ offsets.YearBegin(2),
278
+ offsets.MonthBegin(1),
279
+ offsets.Minute(),
280
+ np.timedelta64(365, "D"),
281
+ timedelta(365),
282
+ ]:
283
+ with pytest.raises(IncompatibleFrequency, match=msg):
284
+ per - off
285
+
286
+ for freq in ["M", "2M", "3M"]:
287
+ per = Period("2011-03", freq=freq)
288
+ assert per - offsets.MonthEnd(2) == Period("2011-01", freq=freq)
289
+ assert per - offsets.MonthEnd(12) == Period("2010-03", freq=freq)
290
+
291
+ for off in [
292
+ offsets.YearBegin(2),
293
+ offsets.MonthBegin(1),
294
+ offsets.Minute(),
295
+ np.timedelta64(365, "D"),
296
+ timedelta(365),
297
+ ]:
298
+ with pytest.raises(IncompatibleFrequency, match=msg):
299
+ per - off
300
+
301
+ # freq is Tick
302
+ for freq in ["D", "2D", "3D"]:
303
+ per = Period("2011-04-01", freq=freq)
304
+ assert per - offsets.Day(5) == Period("2011-03-27", freq=freq)
305
+ assert per - offsets.Hour(24) == Period("2011-03-31", freq=freq)
306
+ assert per - np.timedelta64(2, "D") == Period("2011-03-30", freq=freq)
307
+ assert per - np.timedelta64(3600 * 24, "s") == Period(
308
+ "2011-03-31", freq=freq
309
+ )
310
+ assert per - timedelta(-2) == Period("2011-04-03", freq=freq)
311
+ assert per - timedelta(hours=48) == Period("2011-03-30", freq=freq)
312
+
313
+ for off in [
314
+ offsets.YearBegin(2),
315
+ offsets.MonthBegin(1),
316
+ offsets.Minute(),
317
+ np.timedelta64(4, "h"),
318
+ timedelta(hours=23),
319
+ ]:
320
+ with pytest.raises(IncompatibleFrequency, match=msg):
321
+ per - off
322
+
323
+ for freq in ["h", "2h", "3h"]:
324
+ per = Period("2011-04-01 09:00", freq=freq)
325
+ assert per - offsets.Day(2) == Period("2011-03-30 09:00", freq=freq)
326
+ assert per - offsets.Hour(3) == Period("2011-04-01 06:00", freq=freq)
327
+ assert per - np.timedelta64(3, "h") == Period("2011-04-01 06:00", freq=freq)
328
+ assert per - np.timedelta64(3600, "s") == Period(
329
+ "2011-04-01 08:00", freq=freq
330
+ )
331
+ assert per - timedelta(minutes=120) == Period("2011-04-01 07:00", freq=freq)
332
+ assert per - timedelta(days=4, minutes=180) == Period(
333
+ "2011-03-28 06:00", freq=freq
334
+ )
335
+
336
+ for off in [
337
+ offsets.YearBegin(2),
338
+ offsets.MonthBegin(1),
339
+ offsets.Minute(),
340
+ np.timedelta64(3200, "s"),
341
+ timedelta(hours=23, minutes=30),
342
+ ]:
343
+ with pytest.raises(IncompatibleFrequency, match=msg):
344
+ per - off
345
+
346
+ @pytest.mark.parametrize("freq", ["M", "2M", "3M"])
347
+ def test_period_addsub_nat(self, freq):
348
+ # GH#13071
349
+ per = Period("2011-01", freq=freq)
350
+
351
+ # For subtraction, NaT is treated as another Period object
352
+ assert NaT - per is NaT
353
+ assert per - NaT is NaT
354
+
355
+ # For addition, NaT is treated as offset-like
356
+ assert NaT + per is NaT
357
+ assert per + NaT is NaT
358
+
359
+ @pytest.mark.parametrize("unit", ["ns", "us", "ms", "s", "m"])
360
+ def test_period_add_sub_td64_nat(self, unit):
361
+ # GH#47196
362
+ per = Period("2022-06-01", "D")
363
+ nat = np.timedelta64("NaT", unit)
364
+
365
+ assert per + nat is NaT
366
+ assert nat + per is NaT
367
+ assert per - nat is NaT
368
+
369
+ with pytest.raises(TypeError, match="unsupported operand"):
370
+ nat - per
371
+
372
+ def test_period_ops_offset(self):
373
+ per = Period("2011-04-01", freq="D")
374
+ result = per + offsets.Day()
375
+ exp = Period("2011-04-02", freq="D")
376
+ assert result == exp
377
+
378
+ result = per - offsets.Day(2)
379
+ exp = Period("2011-03-30", freq="D")
380
+ assert result == exp
381
+
382
+ msg = r"Input cannot be converted to Period\(freq=D\)"
383
+ with pytest.raises(IncompatibleFrequency, match=msg):
384
+ per + offsets.Hour(2)
385
+
386
+ with pytest.raises(IncompatibleFrequency, match=msg):
387
+ per - offsets.Hour(2)
388
+
389
+ def test_period_add_timestamp_raises(self):
390
+ # GH#17983
391
+ ts = Timestamp("2017")
392
+ per = Period("2017", freq="M")
393
+
394
+ msg = r"unsupported operand type\(s\) for \+: 'Timestamp' and 'Period'"
395
+ with pytest.raises(TypeError, match=msg):
396
+ ts + per
397
+
398
+ msg = r"unsupported operand type\(s\) for \+: 'Period' and 'Timestamp'"
399
+ with pytest.raises(TypeError, match=msg):
400
+ per + ts
401
+
402
+
403
+ class TestPeriodComparisons:
404
+ def test_period_comparison_same_freq(self):
405
+ jan = Period("2000-01", "M")
406
+ feb = Period("2000-02", "M")
407
+
408
+ assert not jan == feb
409
+ assert jan != feb
410
+ assert jan < feb
411
+ assert jan <= feb
412
+ assert not jan > feb
413
+ assert not jan >= feb
414
+
415
+ def test_period_comparison_same_period_different_object(self):
416
+ # Separate Period objects for the same period
417
+ left = Period("2000-01", "M")
418
+ right = Period("2000-01", "M")
419
+
420
+ assert left == right
421
+ assert left >= right
422
+ assert left <= right
423
+ assert not left < right
424
+ assert not left > right
425
+
426
+ def test_period_comparison_mismatched_freq(self):
427
+ jan = Period("2000-01", "M")
428
+ day = Period("2012-01-01", "D")
429
+
430
+ assert not jan == day
431
+ assert jan != day
432
+ msg = r"Input has different freq=D from Period\(freq=M\)"
433
+ with pytest.raises(IncompatibleFrequency, match=msg):
434
+ jan < day
435
+ with pytest.raises(IncompatibleFrequency, match=msg):
436
+ jan <= day
437
+ with pytest.raises(IncompatibleFrequency, match=msg):
438
+ jan > day
439
+ with pytest.raises(IncompatibleFrequency, match=msg):
440
+ jan >= day
441
+
442
+ def test_period_comparison_invalid_type(self):
443
+ jan = Period("2000-01", "M")
444
+
445
+ assert not jan == 1
446
+ assert jan != 1
447
+
448
+ int_or_per = "'(Period|int)'"
449
+ msg = f"not supported between instances of {int_or_per} and {int_or_per}"
450
+ for left, right in [(jan, 1), (1, jan)]:
451
+ with pytest.raises(TypeError, match=msg):
452
+ left > right
453
+ with pytest.raises(TypeError, match=msg):
454
+ left >= right
455
+ with pytest.raises(TypeError, match=msg):
456
+ left < right
457
+ with pytest.raises(TypeError, match=msg):
458
+ left <= right
459
+
460
+ def test_period_comparison_nat(self):
461
+ per = Period("2011-01-01", freq="D")
462
+
463
+ ts = Timestamp("2011-01-01")
464
+ # confirm Period('NaT') work identical with Timestamp('NaT')
465
+ for left, right in [
466
+ (NaT, per),
467
+ (per, NaT),
468
+ (NaT, ts),
469
+ (ts, NaT),
470
+ ]:
471
+ assert not left < right
472
+ assert not left > right
473
+ assert not left == right
474
+ assert left != right
475
+ assert not left <= right
476
+ assert not left >= right
477
+
478
+ @pytest.mark.parametrize(
479
+ "zerodim_arr, expected",
480
+ ((np.array(0), False), (np.array(Period("2000-01", "M")), True)),
481
+ )
482
+ def test_period_comparison_numpy_zerodim_arr(self, zerodim_arr, expected):
483
+ per = Period("2000-01", "M")
484
+
485
+ assert (per == zerodim_arr) is expected
486
+ assert (zerodim_arr == per) is expected
venv/lib/python3.10/site-packages/pandas/tests/scalar/period/test_asfreq.py ADDED
@@ -0,0 +1,828 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from pandas._libs.tslibs.period import INVALID_FREQ_ERR_MSG
4
+ from pandas.errors import OutOfBoundsDatetime
5
+
6
+ from pandas import (
7
+ Period,
8
+ Timestamp,
9
+ offsets,
10
+ )
11
+ import pandas._testing as tm
12
+
13
+ bday_msg = "Period with BDay freq is deprecated"
14
+
15
+
16
+ class TestFreqConversion:
17
+ """Test frequency conversion of date objects"""
18
+
19
+ @pytest.mark.filterwarnings("ignore:Period with BDay:FutureWarning")
20
+ @pytest.mark.parametrize("freq", ["Y", "Q", "M", "W", "B", "D"])
21
+ def test_asfreq_near_zero(self, freq):
22
+ # GH#19643, GH#19650
23
+ per = Period("0001-01-01", freq=freq)
24
+ tup1 = (per.year, per.hour, per.day)
25
+
26
+ prev = per - 1
27
+ assert prev.ordinal == per.ordinal - 1
28
+ tup2 = (prev.year, prev.month, prev.day)
29
+ assert tup2 < tup1
30
+
31
+ def test_asfreq_near_zero_weekly(self):
32
+ # GH#19834
33
+ per1 = Period("0001-01-01", "D") + 6
34
+ per2 = Period("0001-01-01", "D") - 6
35
+ week1 = per1.asfreq("W")
36
+ week2 = per2.asfreq("W")
37
+ assert week1 != week2
38
+ assert week1.asfreq("D", "E") >= per1
39
+ assert week2.asfreq("D", "S") <= per2
40
+
41
+ def test_to_timestamp_out_of_bounds(self):
42
+ # GH#19643, used to incorrectly give Timestamp in 1754
43
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
44
+ per = Period("0001-01-01", freq="B")
45
+ msg = "Out of bounds nanosecond timestamp"
46
+ with pytest.raises(OutOfBoundsDatetime, match=msg):
47
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
48
+ per.to_timestamp()
49
+
50
+ def test_asfreq_corner(self):
51
+ val = Period(freq="Y", year=2007)
52
+ result1 = val.asfreq("5min")
53
+ result2 = val.asfreq("min")
54
+ expected = Period("2007-12-31 23:59", freq="min")
55
+ assert result1.ordinal == expected.ordinal
56
+ assert result1.freqstr == "5min"
57
+ assert result2.ordinal == expected.ordinal
58
+ assert result2.freqstr == "min"
59
+
60
+ def test_conv_annual(self):
61
+ # frequency conversion tests: from Annual Frequency
62
+
63
+ ival_A = Period(freq="Y", year=2007)
64
+
65
+ ival_AJAN = Period(freq="Y-JAN", year=2007)
66
+ ival_AJUN = Period(freq="Y-JUN", year=2007)
67
+ ival_ANOV = Period(freq="Y-NOV", year=2007)
68
+
69
+ ival_A_to_Q_start = Period(freq="Q", year=2007, quarter=1)
70
+ ival_A_to_Q_end = Period(freq="Q", year=2007, quarter=4)
71
+ ival_A_to_M_start = Period(freq="M", year=2007, month=1)
72
+ ival_A_to_M_end = Period(freq="M", year=2007, month=12)
73
+ ival_A_to_W_start = Period(freq="W", year=2007, month=1, day=1)
74
+ ival_A_to_W_end = Period(freq="W", year=2007, month=12, day=31)
75
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
76
+ ival_A_to_B_start = Period(freq="B", year=2007, month=1, day=1)
77
+ ival_A_to_B_end = Period(freq="B", year=2007, month=12, day=31)
78
+ ival_A_to_D_start = Period(freq="D", year=2007, month=1, day=1)
79
+ ival_A_to_D_end = Period(freq="D", year=2007, month=12, day=31)
80
+ ival_A_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
81
+ ival_A_to_H_end = Period(freq="h", year=2007, month=12, day=31, hour=23)
82
+ ival_A_to_T_start = Period(
83
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
84
+ )
85
+ ival_A_to_T_end = Period(
86
+ freq="Min", year=2007, month=12, day=31, hour=23, minute=59
87
+ )
88
+ ival_A_to_S_start = Period(
89
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
90
+ )
91
+ ival_A_to_S_end = Period(
92
+ freq="s", year=2007, month=12, day=31, hour=23, minute=59, second=59
93
+ )
94
+
95
+ ival_AJAN_to_D_end = Period(freq="D", year=2007, month=1, day=31)
96
+ ival_AJAN_to_D_start = Period(freq="D", year=2006, month=2, day=1)
97
+ ival_AJUN_to_D_end = Period(freq="D", year=2007, month=6, day=30)
98
+ ival_AJUN_to_D_start = Period(freq="D", year=2006, month=7, day=1)
99
+ ival_ANOV_to_D_end = Period(freq="D", year=2007, month=11, day=30)
100
+ ival_ANOV_to_D_start = Period(freq="D", year=2006, month=12, day=1)
101
+
102
+ assert ival_A.asfreq("Q", "s") == ival_A_to_Q_start
103
+ assert ival_A.asfreq("Q", "e") == ival_A_to_Q_end
104
+ assert ival_A.asfreq("M", "s") == ival_A_to_M_start
105
+ assert ival_A.asfreq("M", "E") == ival_A_to_M_end
106
+ assert ival_A.asfreq("W", "s") == ival_A_to_W_start
107
+ assert ival_A.asfreq("W", "E") == ival_A_to_W_end
108
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
109
+ assert ival_A.asfreq("B", "s") == ival_A_to_B_start
110
+ assert ival_A.asfreq("B", "E") == ival_A_to_B_end
111
+ assert ival_A.asfreq("D", "s") == ival_A_to_D_start
112
+ assert ival_A.asfreq("D", "E") == ival_A_to_D_end
113
+ msg = "'H' is deprecated and will be removed in a future version."
114
+ with tm.assert_produces_warning(FutureWarning, match=msg):
115
+ assert ival_A.asfreq("H", "s") == ival_A_to_H_start
116
+ assert ival_A.asfreq("H", "E") == ival_A_to_H_end
117
+ assert ival_A.asfreq("min", "s") == ival_A_to_T_start
118
+ assert ival_A.asfreq("min", "E") == ival_A_to_T_end
119
+ msg = "'T' is deprecated and will be removed in a future version."
120
+ with tm.assert_produces_warning(FutureWarning, match=msg):
121
+ assert ival_A.asfreq("T", "s") == ival_A_to_T_start
122
+ assert ival_A.asfreq("T", "E") == ival_A_to_T_end
123
+ msg = "'S' is deprecated and will be removed in a future version."
124
+ with tm.assert_produces_warning(FutureWarning, match=msg):
125
+ assert ival_A.asfreq("S", "S") == ival_A_to_S_start
126
+ assert ival_A.asfreq("S", "E") == ival_A_to_S_end
127
+
128
+ assert ival_AJAN.asfreq("D", "s") == ival_AJAN_to_D_start
129
+ assert ival_AJAN.asfreq("D", "E") == ival_AJAN_to_D_end
130
+
131
+ assert ival_AJUN.asfreq("D", "s") == ival_AJUN_to_D_start
132
+ assert ival_AJUN.asfreq("D", "E") == ival_AJUN_to_D_end
133
+
134
+ assert ival_ANOV.asfreq("D", "s") == ival_ANOV_to_D_start
135
+ assert ival_ANOV.asfreq("D", "E") == ival_ANOV_to_D_end
136
+
137
+ assert ival_A.asfreq("Y") == ival_A
138
+
139
+ def test_conv_quarterly(self):
140
+ # frequency conversion tests: from Quarterly Frequency
141
+
142
+ ival_Q = Period(freq="Q", year=2007, quarter=1)
143
+ ival_Q_end_of_year = Period(freq="Q", year=2007, quarter=4)
144
+
145
+ ival_QEJAN = Period(freq="Q-JAN", year=2007, quarter=1)
146
+ ival_QEJUN = Period(freq="Q-JUN", year=2007, quarter=1)
147
+
148
+ ival_Q_to_A = Period(freq="Y", year=2007)
149
+ ival_Q_to_M_start = Period(freq="M", year=2007, month=1)
150
+ ival_Q_to_M_end = Period(freq="M", year=2007, month=3)
151
+ ival_Q_to_W_start = Period(freq="W", year=2007, month=1, day=1)
152
+ ival_Q_to_W_end = Period(freq="W", year=2007, month=3, day=31)
153
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
154
+ ival_Q_to_B_start = Period(freq="B", year=2007, month=1, day=1)
155
+ ival_Q_to_B_end = Period(freq="B", year=2007, month=3, day=30)
156
+ ival_Q_to_D_start = Period(freq="D", year=2007, month=1, day=1)
157
+ ival_Q_to_D_end = Period(freq="D", year=2007, month=3, day=31)
158
+ ival_Q_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
159
+ ival_Q_to_H_end = Period(freq="h", year=2007, month=3, day=31, hour=23)
160
+ ival_Q_to_T_start = Period(
161
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
162
+ )
163
+ ival_Q_to_T_end = Period(
164
+ freq="Min", year=2007, month=3, day=31, hour=23, minute=59
165
+ )
166
+ ival_Q_to_S_start = Period(
167
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
168
+ )
169
+ ival_Q_to_S_end = Period(
170
+ freq="s", year=2007, month=3, day=31, hour=23, minute=59, second=59
171
+ )
172
+
173
+ ival_QEJAN_to_D_start = Period(freq="D", year=2006, month=2, day=1)
174
+ ival_QEJAN_to_D_end = Period(freq="D", year=2006, month=4, day=30)
175
+
176
+ ival_QEJUN_to_D_start = Period(freq="D", year=2006, month=7, day=1)
177
+ ival_QEJUN_to_D_end = Period(freq="D", year=2006, month=9, day=30)
178
+
179
+ assert ival_Q.asfreq("Y") == ival_Q_to_A
180
+ assert ival_Q_end_of_year.asfreq("Y") == ival_Q_to_A
181
+
182
+ assert ival_Q.asfreq("M", "s") == ival_Q_to_M_start
183
+ assert ival_Q.asfreq("M", "E") == ival_Q_to_M_end
184
+ assert ival_Q.asfreq("W", "s") == ival_Q_to_W_start
185
+ assert ival_Q.asfreq("W", "E") == ival_Q_to_W_end
186
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
187
+ assert ival_Q.asfreq("B", "s") == ival_Q_to_B_start
188
+ assert ival_Q.asfreq("B", "E") == ival_Q_to_B_end
189
+ assert ival_Q.asfreq("D", "s") == ival_Q_to_D_start
190
+ assert ival_Q.asfreq("D", "E") == ival_Q_to_D_end
191
+ assert ival_Q.asfreq("h", "s") == ival_Q_to_H_start
192
+ assert ival_Q.asfreq("h", "E") == ival_Q_to_H_end
193
+ assert ival_Q.asfreq("Min", "s") == ival_Q_to_T_start
194
+ assert ival_Q.asfreq("Min", "E") == ival_Q_to_T_end
195
+ assert ival_Q.asfreq("s", "s") == ival_Q_to_S_start
196
+ assert ival_Q.asfreq("s", "E") == ival_Q_to_S_end
197
+
198
+ assert ival_QEJAN.asfreq("D", "s") == ival_QEJAN_to_D_start
199
+ assert ival_QEJAN.asfreq("D", "E") == ival_QEJAN_to_D_end
200
+ assert ival_QEJUN.asfreq("D", "s") == ival_QEJUN_to_D_start
201
+ assert ival_QEJUN.asfreq("D", "E") == ival_QEJUN_to_D_end
202
+
203
+ assert ival_Q.asfreq("Q") == ival_Q
204
+
205
+ def test_conv_monthly(self):
206
+ # frequency conversion tests: from Monthly Frequency
207
+
208
+ ival_M = Period(freq="M", year=2007, month=1)
209
+ ival_M_end_of_year = Period(freq="M", year=2007, month=12)
210
+ ival_M_end_of_quarter = Period(freq="M", year=2007, month=3)
211
+ ival_M_to_A = Period(freq="Y", year=2007)
212
+ ival_M_to_Q = Period(freq="Q", year=2007, quarter=1)
213
+ ival_M_to_W_start = Period(freq="W", year=2007, month=1, day=1)
214
+ ival_M_to_W_end = Period(freq="W", year=2007, month=1, day=31)
215
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
216
+ ival_M_to_B_start = Period(freq="B", year=2007, month=1, day=1)
217
+ ival_M_to_B_end = Period(freq="B", year=2007, month=1, day=31)
218
+ ival_M_to_D_start = Period(freq="D", year=2007, month=1, day=1)
219
+ ival_M_to_D_end = Period(freq="D", year=2007, month=1, day=31)
220
+ ival_M_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
221
+ ival_M_to_H_end = Period(freq="h", year=2007, month=1, day=31, hour=23)
222
+ ival_M_to_T_start = Period(
223
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
224
+ )
225
+ ival_M_to_T_end = Period(
226
+ freq="Min", year=2007, month=1, day=31, hour=23, minute=59
227
+ )
228
+ ival_M_to_S_start = Period(
229
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
230
+ )
231
+ ival_M_to_S_end = Period(
232
+ freq="s", year=2007, month=1, day=31, hour=23, minute=59, second=59
233
+ )
234
+
235
+ assert ival_M.asfreq("Y") == ival_M_to_A
236
+ assert ival_M_end_of_year.asfreq("Y") == ival_M_to_A
237
+ assert ival_M.asfreq("Q") == ival_M_to_Q
238
+ assert ival_M_end_of_quarter.asfreq("Q") == ival_M_to_Q
239
+
240
+ assert ival_M.asfreq("W", "s") == ival_M_to_W_start
241
+ assert ival_M.asfreq("W", "E") == ival_M_to_W_end
242
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
243
+ assert ival_M.asfreq("B", "s") == ival_M_to_B_start
244
+ assert ival_M.asfreq("B", "E") == ival_M_to_B_end
245
+ assert ival_M.asfreq("D", "s") == ival_M_to_D_start
246
+ assert ival_M.asfreq("D", "E") == ival_M_to_D_end
247
+ assert ival_M.asfreq("h", "s") == ival_M_to_H_start
248
+ assert ival_M.asfreq("h", "E") == ival_M_to_H_end
249
+ assert ival_M.asfreq("Min", "s") == ival_M_to_T_start
250
+ assert ival_M.asfreq("Min", "E") == ival_M_to_T_end
251
+ assert ival_M.asfreq("s", "s") == ival_M_to_S_start
252
+ assert ival_M.asfreq("s", "E") == ival_M_to_S_end
253
+
254
+ assert ival_M.asfreq("M") == ival_M
255
+
256
+ def test_conv_weekly(self):
257
+ # frequency conversion tests: from Weekly Frequency
258
+ ival_W = Period(freq="W", year=2007, month=1, day=1)
259
+
260
+ ival_WSUN = Period(freq="W", year=2007, month=1, day=7)
261
+ ival_WSAT = Period(freq="W-SAT", year=2007, month=1, day=6)
262
+ ival_WFRI = Period(freq="W-FRI", year=2007, month=1, day=5)
263
+ ival_WTHU = Period(freq="W-THU", year=2007, month=1, day=4)
264
+ ival_WWED = Period(freq="W-WED", year=2007, month=1, day=3)
265
+ ival_WTUE = Period(freq="W-TUE", year=2007, month=1, day=2)
266
+ ival_WMON = Period(freq="W-MON", year=2007, month=1, day=1)
267
+
268
+ ival_WSUN_to_D_start = Period(freq="D", year=2007, month=1, day=1)
269
+ ival_WSUN_to_D_end = Period(freq="D", year=2007, month=1, day=7)
270
+ ival_WSAT_to_D_start = Period(freq="D", year=2006, month=12, day=31)
271
+ ival_WSAT_to_D_end = Period(freq="D", year=2007, month=1, day=6)
272
+ ival_WFRI_to_D_start = Period(freq="D", year=2006, month=12, day=30)
273
+ ival_WFRI_to_D_end = Period(freq="D", year=2007, month=1, day=5)
274
+ ival_WTHU_to_D_start = Period(freq="D", year=2006, month=12, day=29)
275
+ ival_WTHU_to_D_end = Period(freq="D", year=2007, month=1, day=4)
276
+ ival_WWED_to_D_start = Period(freq="D", year=2006, month=12, day=28)
277
+ ival_WWED_to_D_end = Period(freq="D", year=2007, month=1, day=3)
278
+ ival_WTUE_to_D_start = Period(freq="D", year=2006, month=12, day=27)
279
+ ival_WTUE_to_D_end = Period(freq="D", year=2007, month=1, day=2)
280
+ ival_WMON_to_D_start = Period(freq="D", year=2006, month=12, day=26)
281
+ ival_WMON_to_D_end = Period(freq="D", year=2007, month=1, day=1)
282
+
283
+ ival_W_end_of_year = Period(freq="W", year=2007, month=12, day=31)
284
+ ival_W_end_of_quarter = Period(freq="W", year=2007, month=3, day=31)
285
+ ival_W_end_of_month = Period(freq="W", year=2007, month=1, day=31)
286
+ ival_W_to_A = Period(freq="Y", year=2007)
287
+ ival_W_to_Q = Period(freq="Q", year=2007, quarter=1)
288
+ ival_W_to_M = Period(freq="M", year=2007, month=1)
289
+
290
+ if Period(freq="D", year=2007, month=12, day=31).weekday == 6:
291
+ ival_W_to_A_end_of_year = Period(freq="Y", year=2007)
292
+ else:
293
+ ival_W_to_A_end_of_year = Period(freq="Y", year=2008)
294
+
295
+ if Period(freq="D", year=2007, month=3, day=31).weekday == 6:
296
+ ival_W_to_Q_end_of_quarter = Period(freq="Q", year=2007, quarter=1)
297
+ else:
298
+ ival_W_to_Q_end_of_quarter = Period(freq="Q", year=2007, quarter=2)
299
+
300
+ if Period(freq="D", year=2007, month=1, day=31).weekday == 6:
301
+ ival_W_to_M_end_of_month = Period(freq="M", year=2007, month=1)
302
+ else:
303
+ ival_W_to_M_end_of_month = Period(freq="M", year=2007, month=2)
304
+
305
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
306
+ ival_W_to_B_start = Period(freq="B", year=2007, month=1, day=1)
307
+ ival_W_to_B_end = Period(freq="B", year=2007, month=1, day=5)
308
+ ival_W_to_D_start = Period(freq="D", year=2007, month=1, day=1)
309
+ ival_W_to_D_end = Period(freq="D", year=2007, month=1, day=7)
310
+ ival_W_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
311
+ ival_W_to_H_end = Period(freq="h", year=2007, month=1, day=7, hour=23)
312
+ ival_W_to_T_start = Period(
313
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
314
+ )
315
+ ival_W_to_T_end = Period(
316
+ freq="Min", year=2007, month=1, day=7, hour=23, minute=59
317
+ )
318
+ ival_W_to_S_start = Period(
319
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
320
+ )
321
+ ival_W_to_S_end = Period(
322
+ freq="s", year=2007, month=1, day=7, hour=23, minute=59, second=59
323
+ )
324
+
325
+ assert ival_W.asfreq("Y") == ival_W_to_A
326
+ assert ival_W_end_of_year.asfreq("Y") == ival_W_to_A_end_of_year
327
+
328
+ assert ival_W.asfreq("Q") == ival_W_to_Q
329
+ assert ival_W_end_of_quarter.asfreq("Q") == ival_W_to_Q_end_of_quarter
330
+
331
+ assert ival_W.asfreq("M") == ival_W_to_M
332
+ assert ival_W_end_of_month.asfreq("M") == ival_W_to_M_end_of_month
333
+
334
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
335
+ assert ival_W.asfreq("B", "s") == ival_W_to_B_start
336
+ assert ival_W.asfreq("B", "E") == ival_W_to_B_end
337
+
338
+ assert ival_W.asfreq("D", "s") == ival_W_to_D_start
339
+ assert ival_W.asfreq("D", "E") == ival_W_to_D_end
340
+
341
+ assert ival_WSUN.asfreq("D", "s") == ival_WSUN_to_D_start
342
+ assert ival_WSUN.asfreq("D", "E") == ival_WSUN_to_D_end
343
+ assert ival_WSAT.asfreq("D", "s") == ival_WSAT_to_D_start
344
+ assert ival_WSAT.asfreq("D", "E") == ival_WSAT_to_D_end
345
+ assert ival_WFRI.asfreq("D", "s") == ival_WFRI_to_D_start
346
+ assert ival_WFRI.asfreq("D", "E") == ival_WFRI_to_D_end
347
+ assert ival_WTHU.asfreq("D", "s") == ival_WTHU_to_D_start
348
+ assert ival_WTHU.asfreq("D", "E") == ival_WTHU_to_D_end
349
+ assert ival_WWED.asfreq("D", "s") == ival_WWED_to_D_start
350
+ assert ival_WWED.asfreq("D", "E") == ival_WWED_to_D_end
351
+ assert ival_WTUE.asfreq("D", "s") == ival_WTUE_to_D_start
352
+ assert ival_WTUE.asfreq("D", "E") == ival_WTUE_to_D_end
353
+ assert ival_WMON.asfreq("D", "s") == ival_WMON_to_D_start
354
+ assert ival_WMON.asfreq("D", "E") == ival_WMON_to_D_end
355
+
356
+ assert ival_W.asfreq("h", "s") == ival_W_to_H_start
357
+ assert ival_W.asfreq("h", "E") == ival_W_to_H_end
358
+ assert ival_W.asfreq("Min", "s") == ival_W_to_T_start
359
+ assert ival_W.asfreq("Min", "E") == ival_W_to_T_end
360
+ assert ival_W.asfreq("s", "s") == ival_W_to_S_start
361
+ assert ival_W.asfreq("s", "E") == ival_W_to_S_end
362
+
363
+ assert ival_W.asfreq("W") == ival_W
364
+
365
+ msg = INVALID_FREQ_ERR_MSG
366
+ with pytest.raises(ValueError, match=msg):
367
+ ival_W.asfreq("WK")
368
+
369
+ def test_conv_weekly_legacy(self):
370
+ # frequency conversion tests: from Weekly Frequency
371
+ msg = INVALID_FREQ_ERR_MSG
372
+ with pytest.raises(ValueError, match=msg):
373
+ Period(freq="WK", year=2007, month=1, day=1)
374
+
375
+ with pytest.raises(ValueError, match=msg):
376
+ Period(freq="WK-SAT", year=2007, month=1, day=6)
377
+ with pytest.raises(ValueError, match=msg):
378
+ Period(freq="WK-FRI", year=2007, month=1, day=5)
379
+ with pytest.raises(ValueError, match=msg):
380
+ Period(freq="WK-THU", year=2007, month=1, day=4)
381
+ with pytest.raises(ValueError, match=msg):
382
+ Period(freq="WK-WED", year=2007, month=1, day=3)
383
+ with pytest.raises(ValueError, match=msg):
384
+ Period(freq="WK-TUE", year=2007, month=1, day=2)
385
+ with pytest.raises(ValueError, match=msg):
386
+ Period(freq="WK-MON", year=2007, month=1, day=1)
387
+
388
+ def test_conv_business(self):
389
+ # frequency conversion tests: from Business Frequency"
390
+
391
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
392
+ ival_B = Period(freq="B", year=2007, month=1, day=1)
393
+ ival_B_end_of_year = Period(freq="B", year=2007, month=12, day=31)
394
+ ival_B_end_of_quarter = Period(freq="B", year=2007, month=3, day=30)
395
+ ival_B_end_of_month = Period(freq="B", year=2007, month=1, day=31)
396
+ ival_B_end_of_week = Period(freq="B", year=2007, month=1, day=5)
397
+
398
+ ival_B_to_A = Period(freq="Y", year=2007)
399
+ ival_B_to_Q = Period(freq="Q", year=2007, quarter=1)
400
+ ival_B_to_M = Period(freq="M", year=2007, month=1)
401
+ ival_B_to_W = Period(freq="W", year=2007, month=1, day=7)
402
+ ival_B_to_D = Period(freq="D", year=2007, month=1, day=1)
403
+ ival_B_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
404
+ ival_B_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23)
405
+ ival_B_to_T_start = Period(
406
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
407
+ )
408
+ ival_B_to_T_end = Period(
409
+ freq="Min", year=2007, month=1, day=1, hour=23, minute=59
410
+ )
411
+ ival_B_to_S_start = Period(
412
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
413
+ )
414
+ ival_B_to_S_end = Period(
415
+ freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
416
+ )
417
+
418
+ assert ival_B.asfreq("Y") == ival_B_to_A
419
+ assert ival_B_end_of_year.asfreq("Y") == ival_B_to_A
420
+ assert ival_B.asfreq("Q") == ival_B_to_Q
421
+ assert ival_B_end_of_quarter.asfreq("Q") == ival_B_to_Q
422
+ assert ival_B.asfreq("M") == ival_B_to_M
423
+ assert ival_B_end_of_month.asfreq("M") == ival_B_to_M
424
+ assert ival_B.asfreq("W") == ival_B_to_W
425
+ assert ival_B_end_of_week.asfreq("W") == ival_B_to_W
426
+
427
+ assert ival_B.asfreq("D") == ival_B_to_D
428
+
429
+ assert ival_B.asfreq("h", "s") == ival_B_to_H_start
430
+ assert ival_B.asfreq("h", "E") == ival_B_to_H_end
431
+ assert ival_B.asfreq("Min", "s") == ival_B_to_T_start
432
+ assert ival_B.asfreq("Min", "E") == ival_B_to_T_end
433
+ assert ival_B.asfreq("s", "s") == ival_B_to_S_start
434
+ assert ival_B.asfreq("s", "E") == ival_B_to_S_end
435
+
436
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
437
+ assert ival_B.asfreq("B") == ival_B
438
+
439
+ def test_conv_daily(self):
440
+ # frequency conversion tests: from Business Frequency"
441
+
442
+ ival_D = Period(freq="D", year=2007, month=1, day=1)
443
+ ival_D_end_of_year = Period(freq="D", year=2007, month=12, day=31)
444
+ ival_D_end_of_quarter = Period(freq="D", year=2007, month=3, day=31)
445
+ ival_D_end_of_month = Period(freq="D", year=2007, month=1, day=31)
446
+ ival_D_end_of_week = Period(freq="D", year=2007, month=1, day=7)
447
+
448
+ ival_D_friday = Period(freq="D", year=2007, month=1, day=5)
449
+ ival_D_saturday = Period(freq="D", year=2007, month=1, day=6)
450
+ ival_D_sunday = Period(freq="D", year=2007, month=1, day=7)
451
+
452
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
453
+ ival_B_friday = Period(freq="B", year=2007, month=1, day=5)
454
+ ival_B_monday = Period(freq="B", year=2007, month=1, day=8)
455
+
456
+ ival_D_to_A = Period(freq="Y", year=2007)
457
+
458
+ ival_Deoq_to_AJAN = Period(freq="Y-JAN", year=2008)
459
+ ival_Deoq_to_AJUN = Period(freq="Y-JUN", year=2007)
460
+ ival_Deoq_to_ADEC = Period(freq="Y-DEC", year=2007)
461
+
462
+ ival_D_to_QEJAN = Period(freq="Q-JAN", year=2007, quarter=4)
463
+ ival_D_to_QEJUN = Period(freq="Q-JUN", year=2007, quarter=3)
464
+ ival_D_to_QEDEC = Period(freq="Q-DEC", year=2007, quarter=1)
465
+
466
+ ival_D_to_M = Period(freq="M", year=2007, month=1)
467
+ ival_D_to_W = Period(freq="W", year=2007, month=1, day=7)
468
+
469
+ ival_D_to_H_start = Period(freq="h", year=2007, month=1, day=1, hour=0)
470
+ ival_D_to_H_end = Period(freq="h", year=2007, month=1, day=1, hour=23)
471
+ ival_D_to_T_start = Period(
472
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
473
+ )
474
+ ival_D_to_T_end = Period(
475
+ freq="Min", year=2007, month=1, day=1, hour=23, minute=59
476
+ )
477
+ ival_D_to_S_start = Period(
478
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
479
+ )
480
+ ival_D_to_S_end = Period(
481
+ freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
482
+ )
483
+
484
+ assert ival_D.asfreq("Y") == ival_D_to_A
485
+
486
+ assert ival_D_end_of_quarter.asfreq("Y-JAN") == ival_Deoq_to_AJAN
487
+ assert ival_D_end_of_quarter.asfreq("Y-JUN") == ival_Deoq_to_AJUN
488
+ assert ival_D_end_of_quarter.asfreq("Y-DEC") == ival_Deoq_to_ADEC
489
+
490
+ assert ival_D_end_of_year.asfreq("Y") == ival_D_to_A
491
+ assert ival_D_end_of_quarter.asfreq("Q") == ival_D_to_QEDEC
492
+ assert ival_D.asfreq("Q-JAN") == ival_D_to_QEJAN
493
+ assert ival_D.asfreq("Q-JUN") == ival_D_to_QEJUN
494
+ assert ival_D.asfreq("Q-DEC") == ival_D_to_QEDEC
495
+ assert ival_D.asfreq("M") == ival_D_to_M
496
+ assert ival_D_end_of_month.asfreq("M") == ival_D_to_M
497
+ assert ival_D.asfreq("W") == ival_D_to_W
498
+ assert ival_D_end_of_week.asfreq("W") == ival_D_to_W
499
+
500
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
501
+ assert ival_D_friday.asfreq("B") == ival_B_friday
502
+ assert ival_D_saturday.asfreq("B", "s") == ival_B_friday
503
+ assert ival_D_saturday.asfreq("B", "E") == ival_B_monday
504
+ assert ival_D_sunday.asfreq("B", "s") == ival_B_friday
505
+ assert ival_D_sunday.asfreq("B", "E") == ival_B_monday
506
+
507
+ assert ival_D.asfreq("h", "s") == ival_D_to_H_start
508
+ assert ival_D.asfreq("h", "E") == ival_D_to_H_end
509
+ assert ival_D.asfreq("Min", "s") == ival_D_to_T_start
510
+ assert ival_D.asfreq("Min", "E") == ival_D_to_T_end
511
+ assert ival_D.asfreq("s", "s") == ival_D_to_S_start
512
+ assert ival_D.asfreq("s", "E") == ival_D_to_S_end
513
+
514
+ assert ival_D.asfreq("D") == ival_D
515
+
516
+ def test_conv_hourly(self):
517
+ # frequency conversion tests: from Hourly Frequency"
518
+
519
+ ival_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
520
+ ival_H_end_of_year = Period(freq="h", year=2007, month=12, day=31, hour=23)
521
+ ival_H_end_of_quarter = Period(freq="h", year=2007, month=3, day=31, hour=23)
522
+ ival_H_end_of_month = Period(freq="h", year=2007, month=1, day=31, hour=23)
523
+ ival_H_end_of_week = Period(freq="h", year=2007, month=1, day=7, hour=23)
524
+ ival_H_end_of_day = Period(freq="h", year=2007, month=1, day=1, hour=23)
525
+ ival_H_end_of_bus = Period(freq="h", year=2007, month=1, day=1, hour=23)
526
+
527
+ ival_H_to_A = Period(freq="Y", year=2007)
528
+ ival_H_to_Q = Period(freq="Q", year=2007, quarter=1)
529
+ ival_H_to_M = Period(freq="M", year=2007, month=1)
530
+ ival_H_to_W = Period(freq="W", year=2007, month=1, day=7)
531
+ ival_H_to_D = Period(freq="D", year=2007, month=1, day=1)
532
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
533
+ ival_H_to_B = Period(freq="B", year=2007, month=1, day=1)
534
+
535
+ ival_H_to_T_start = Period(
536
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=0
537
+ )
538
+ ival_H_to_T_end = Period(
539
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=59
540
+ )
541
+ ival_H_to_S_start = Period(
542
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
543
+ )
544
+ ival_H_to_S_end = Period(
545
+ freq="s", year=2007, month=1, day=1, hour=0, minute=59, second=59
546
+ )
547
+
548
+ assert ival_H.asfreq("Y") == ival_H_to_A
549
+ assert ival_H_end_of_year.asfreq("Y") == ival_H_to_A
550
+ assert ival_H.asfreq("Q") == ival_H_to_Q
551
+ assert ival_H_end_of_quarter.asfreq("Q") == ival_H_to_Q
552
+ assert ival_H.asfreq("M") == ival_H_to_M
553
+ assert ival_H_end_of_month.asfreq("M") == ival_H_to_M
554
+ assert ival_H.asfreq("W") == ival_H_to_W
555
+ assert ival_H_end_of_week.asfreq("W") == ival_H_to_W
556
+ assert ival_H.asfreq("D") == ival_H_to_D
557
+ assert ival_H_end_of_day.asfreq("D") == ival_H_to_D
558
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
559
+ assert ival_H.asfreq("B") == ival_H_to_B
560
+ assert ival_H_end_of_bus.asfreq("B") == ival_H_to_B
561
+
562
+ assert ival_H.asfreq("Min", "s") == ival_H_to_T_start
563
+ assert ival_H.asfreq("Min", "E") == ival_H_to_T_end
564
+ assert ival_H.asfreq("s", "s") == ival_H_to_S_start
565
+ assert ival_H.asfreq("s", "E") == ival_H_to_S_end
566
+
567
+ assert ival_H.asfreq("h") == ival_H
568
+
569
+ def test_conv_minutely(self):
570
+ # frequency conversion tests: from Minutely Frequency"
571
+
572
+ ival_T = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
573
+ ival_T_end_of_year = Period(
574
+ freq="Min", year=2007, month=12, day=31, hour=23, minute=59
575
+ )
576
+ ival_T_end_of_quarter = Period(
577
+ freq="Min", year=2007, month=3, day=31, hour=23, minute=59
578
+ )
579
+ ival_T_end_of_month = Period(
580
+ freq="Min", year=2007, month=1, day=31, hour=23, minute=59
581
+ )
582
+ ival_T_end_of_week = Period(
583
+ freq="Min", year=2007, month=1, day=7, hour=23, minute=59
584
+ )
585
+ ival_T_end_of_day = Period(
586
+ freq="Min", year=2007, month=1, day=1, hour=23, minute=59
587
+ )
588
+ ival_T_end_of_bus = Period(
589
+ freq="Min", year=2007, month=1, day=1, hour=23, minute=59
590
+ )
591
+ ival_T_end_of_hour = Period(
592
+ freq="Min", year=2007, month=1, day=1, hour=0, minute=59
593
+ )
594
+
595
+ ival_T_to_A = Period(freq="Y", year=2007)
596
+ ival_T_to_Q = Period(freq="Q", year=2007, quarter=1)
597
+ ival_T_to_M = Period(freq="M", year=2007, month=1)
598
+ ival_T_to_W = Period(freq="W", year=2007, month=1, day=7)
599
+ ival_T_to_D = Period(freq="D", year=2007, month=1, day=1)
600
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
601
+ ival_T_to_B = Period(freq="B", year=2007, month=1, day=1)
602
+ ival_T_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
603
+
604
+ ival_T_to_S_start = Period(
605
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0
606
+ )
607
+ ival_T_to_S_end = Period(
608
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=59
609
+ )
610
+
611
+ assert ival_T.asfreq("Y") == ival_T_to_A
612
+ assert ival_T_end_of_year.asfreq("Y") == ival_T_to_A
613
+ assert ival_T.asfreq("Q") == ival_T_to_Q
614
+ assert ival_T_end_of_quarter.asfreq("Q") == ival_T_to_Q
615
+ assert ival_T.asfreq("M") == ival_T_to_M
616
+ assert ival_T_end_of_month.asfreq("M") == ival_T_to_M
617
+ assert ival_T.asfreq("W") == ival_T_to_W
618
+ assert ival_T_end_of_week.asfreq("W") == ival_T_to_W
619
+ assert ival_T.asfreq("D") == ival_T_to_D
620
+ assert ival_T_end_of_day.asfreq("D") == ival_T_to_D
621
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
622
+ assert ival_T.asfreq("B") == ival_T_to_B
623
+ assert ival_T_end_of_bus.asfreq("B") == ival_T_to_B
624
+ assert ival_T.asfreq("h") == ival_T_to_H
625
+ assert ival_T_end_of_hour.asfreq("h") == ival_T_to_H
626
+
627
+ assert ival_T.asfreq("s", "s") == ival_T_to_S_start
628
+ assert ival_T.asfreq("s", "E") == ival_T_to_S_end
629
+
630
+ assert ival_T.asfreq("Min") == ival_T
631
+
632
+ def test_conv_secondly(self):
633
+ # frequency conversion tests: from Secondly Frequency"
634
+
635
+ ival_S = Period(freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=0)
636
+ ival_S_end_of_year = Period(
637
+ freq="s", year=2007, month=12, day=31, hour=23, minute=59, second=59
638
+ )
639
+ ival_S_end_of_quarter = Period(
640
+ freq="s", year=2007, month=3, day=31, hour=23, minute=59, second=59
641
+ )
642
+ ival_S_end_of_month = Period(
643
+ freq="s", year=2007, month=1, day=31, hour=23, minute=59, second=59
644
+ )
645
+ ival_S_end_of_week = Period(
646
+ freq="s", year=2007, month=1, day=7, hour=23, minute=59, second=59
647
+ )
648
+ ival_S_end_of_day = Period(
649
+ freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
650
+ )
651
+ ival_S_end_of_bus = Period(
652
+ freq="s", year=2007, month=1, day=1, hour=23, minute=59, second=59
653
+ )
654
+ ival_S_end_of_hour = Period(
655
+ freq="s", year=2007, month=1, day=1, hour=0, minute=59, second=59
656
+ )
657
+ ival_S_end_of_minute = Period(
658
+ freq="s", year=2007, month=1, day=1, hour=0, minute=0, second=59
659
+ )
660
+
661
+ ival_S_to_A = Period(freq="Y", year=2007)
662
+ ival_S_to_Q = Period(freq="Q", year=2007, quarter=1)
663
+ ival_S_to_M = Period(freq="M", year=2007, month=1)
664
+ ival_S_to_W = Period(freq="W", year=2007, month=1, day=7)
665
+ ival_S_to_D = Period(freq="D", year=2007, month=1, day=1)
666
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
667
+ ival_S_to_B = Period(freq="B", year=2007, month=1, day=1)
668
+ ival_S_to_H = Period(freq="h", year=2007, month=1, day=1, hour=0)
669
+ ival_S_to_T = Period(freq="Min", year=2007, month=1, day=1, hour=0, minute=0)
670
+
671
+ assert ival_S.asfreq("Y") == ival_S_to_A
672
+ assert ival_S_end_of_year.asfreq("Y") == ival_S_to_A
673
+ assert ival_S.asfreq("Q") == ival_S_to_Q
674
+ assert ival_S_end_of_quarter.asfreq("Q") == ival_S_to_Q
675
+ assert ival_S.asfreq("M") == ival_S_to_M
676
+ assert ival_S_end_of_month.asfreq("M") == ival_S_to_M
677
+ assert ival_S.asfreq("W") == ival_S_to_W
678
+ assert ival_S_end_of_week.asfreq("W") == ival_S_to_W
679
+ assert ival_S.asfreq("D") == ival_S_to_D
680
+ assert ival_S_end_of_day.asfreq("D") == ival_S_to_D
681
+ with tm.assert_produces_warning(FutureWarning, match=bday_msg):
682
+ assert ival_S.asfreq("B") == ival_S_to_B
683
+ assert ival_S_end_of_bus.asfreq("B") == ival_S_to_B
684
+ assert ival_S.asfreq("h") == ival_S_to_H
685
+ assert ival_S_end_of_hour.asfreq("h") == ival_S_to_H
686
+ assert ival_S.asfreq("Min") == ival_S_to_T
687
+ assert ival_S_end_of_minute.asfreq("Min") == ival_S_to_T
688
+
689
+ assert ival_S.asfreq("s") == ival_S
690
+
691
+ def test_conv_microsecond(self):
692
+ # GH#31475 Avoid floating point errors dropping the start_time to
693
+ # before the beginning of the Period
694
+ per = Period("2020-01-30 15:57:27.576166", freq="us")
695
+ assert per.ordinal == 1580399847576166
696
+
697
+ start = per.start_time
698
+ expected = Timestamp("2020-01-30 15:57:27.576166")
699
+ assert start == expected
700
+ assert start._value == per.ordinal * 1000
701
+
702
+ per2 = Period("2300-01-01", "us")
703
+ msg = "2300-01-01"
704
+ with pytest.raises(OutOfBoundsDatetime, match=msg):
705
+ per2.start_time
706
+ with pytest.raises(OutOfBoundsDatetime, match=msg):
707
+ per2.end_time
708
+
709
+ def test_asfreq_mult(self):
710
+ # normal freq to mult freq
711
+ p = Period(freq="Y", year=2007)
712
+ # ordinal will not change
713
+ for freq in ["3Y", offsets.YearEnd(3)]:
714
+ result = p.asfreq(freq)
715
+ expected = Period("2007", freq="3Y")
716
+
717
+ assert result == expected
718
+ assert result.ordinal == expected.ordinal
719
+ assert result.freq == expected.freq
720
+ # ordinal will not change
721
+ for freq in ["3Y", offsets.YearEnd(3)]:
722
+ result = p.asfreq(freq, how="S")
723
+ expected = Period("2007", freq="3Y")
724
+
725
+ assert result == expected
726
+ assert result.ordinal == expected.ordinal
727
+ assert result.freq == expected.freq
728
+
729
+ # mult freq to normal freq
730
+ p = Period(freq="3Y", year=2007)
731
+ # ordinal will change because how=E is the default
732
+ for freq in ["Y", offsets.YearEnd()]:
733
+ result = p.asfreq(freq)
734
+ expected = Period("2009", freq="Y")
735
+
736
+ assert result == expected
737
+ assert result.ordinal == expected.ordinal
738
+ assert result.freq == expected.freq
739
+ # ordinal will not change
740
+ for freq in ["Y", offsets.YearEnd()]:
741
+ result = p.asfreq(freq, how="s")
742
+ expected = Period("2007", freq="Y")
743
+
744
+ assert result == expected
745
+ assert result.ordinal == expected.ordinal
746
+ assert result.freq == expected.freq
747
+
748
+ p = Period(freq="Y", year=2007)
749
+ for freq in ["2M", offsets.MonthEnd(2)]:
750
+ result = p.asfreq(freq)
751
+ expected = Period("2007-12", freq="2M")
752
+
753
+ assert result == expected
754
+ assert result.ordinal == expected.ordinal
755
+ assert result.freq == expected.freq
756
+ for freq in ["2M", offsets.MonthEnd(2)]:
757
+ result = p.asfreq(freq, how="s")
758
+ expected = Period("2007-01", freq="2M")
759
+
760
+ assert result == expected
761
+ assert result.ordinal == expected.ordinal
762
+ assert result.freq == expected.freq
763
+
764
+ p = Period(freq="3Y", year=2007)
765
+ for freq in ["2M", offsets.MonthEnd(2)]:
766
+ result = p.asfreq(freq)
767
+ expected = Period("2009-12", freq="2M")
768
+
769
+ assert result == expected
770
+ assert result.ordinal == expected.ordinal
771
+ assert result.freq == expected.freq
772
+ for freq in ["2M", offsets.MonthEnd(2)]:
773
+ result = p.asfreq(freq, how="s")
774
+ expected = Period("2007-01", freq="2M")
775
+
776
+ assert result == expected
777
+ assert result.ordinal == expected.ordinal
778
+ assert result.freq == expected.freq
779
+
780
+ def test_asfreq_combined(self):
781
+ # normal freq to combined freq
782
+ p = Period("2007", freq="h")
783
+
784
+ # ordinal will not change
785
+ expected = Period("2007", freq="25h")
786
+ for freq, how in zip(["1D1h", "1h1D"], ["E", "S"]):
787
+ result = p.asfreq(freq, how=how)
788
+ assert result == expected
789
+ assert result.ordinal == expected.ordinal
790
+ assert result.freq == expected.freq
791
+
792
+ # combined freq to normal freq
793
+ p1 = Period(freq="1D1h", year=2007)
794
+ p2 = Period(freq="1h1D", year=2007)
795
+
796
+ # ordinal will change because how=E is the default
797
+ result1 = p1.asfreq("h")
798
+ result2 = p2.asfreq("h")
799
+ expected = Period("2007-01-02", freq="h")
800
+ assert result1 == expected
801
+ assert result1.ordinal == expected.ordinal
802
+ assert result1.freq == expected.freq
803
+ assert result2 == expected
804
+ assert result2.ordinal == expected.ordinal
805
+ assert result2.freq == expected.freq
806
+
807
+ # ordinal will not change
808
+ result1 = p1.asfreq("h", how="S")
809
+ result2 = p2.asfreq("h", how="S")
810
+ expected = Period("2007-01-01", freq="h")
811
+ assert result1 == expected
812
+ assert result1.ordinal == expected.ordinal
813
+ assert result1.freq == expected.freq
814
+ assert result2 == expected
815
+ assert result2.ordinal == expected.ordinal
816
+ assert result2.freq == expected.freq
817
+
818
+ def test_asfreq_MS(self):
819
+ initial = Period("2013")
820
+
821
+ assert initial.asfreq(freq="M", how="S") == Period("2013-01", "M")
822
+
823
+ msg = "MS is not supported as period frequency"
824
+ with pytest.raises(ValueError, match=msg):
825
+ initial.asfreq(freq="MS", how="S")
826
+
827
+ with pytest.raises(ValueError, match=msg):
828
+ Period("2013-01", "MS")