Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_category.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_formats.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_indexing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_reindex.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_append.py +62 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_fillna.py +54 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_indexing.py +420 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_map.py +144 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_setops.py +18 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_astype.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_constructors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_equals.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_formats.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_indexing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval_range.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval_tree.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_join.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_pickle.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_setops.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_constructors.py +535 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_equals.py +36 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_formats.py +119 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_indexing.py +671 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval.py +918 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval_range.py +369 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval_tree.py +209 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_join.py +44 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_analytics.py +263 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_compat.py +122 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_drop.py +190 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_duplicates.py +363 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_formats.py +249 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_get_level_values.py +124 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_isin.py +103 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_monotonic.py +188 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_reshape.py +224 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_asfreq.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_astype.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_factorize.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_fillna.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_insert.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_is_full.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_repeat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_shift.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (205 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_category.cpython-310.pyc
ADDED
Binary file (11.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_formats.cpython-310.pyc
ADDED
Binary file (5.39 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_indexing.cpython-310.pyc
ADDED
Binary file (13 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/__pycache__/test_reindex.cpython-310.pyc
ADDED
Binary file (3.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_append.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from pandas import (
|
4 |
+
CategoricalIndex,
|
5 |
+
Index,
|
6 |
+
)
|
7 |
+
import pandas._testing as tm
|
8 |
+
|
9 |
+
|
10 |
+
class TestAppend:
|
11 |
+
@pytest.fixture
|
12 |
+
def ci(self):
|
13 |
+
categories = list("cab")
|
14 |
+
return CategoricalIndex(list("aabbca"), categories=categories, ordered=False)
|
15 |
+
|
16 |
+
def test_append(self, ci):
|
17 |
+
# append cats with the same categories
|
18 |
+
result = ci[:3].append(ci[3:])
|
19 |
+
tm.assert_index_equal(result, ci, exact=True)
|
20 |
+
|
21 |
+
foos = [ci[:1], ci[1:3], ci[3:]]
|
22 |
+
result = foos[0].append(foos[1:])
|
23 |
+
tm.assert_index_equal(result, ci, exact=True)
|
24 |
+
|
25 |
+
def test_append_empty(self, ci):
|
26 |
+
# empty
|
27 |
+
result = ci.append([])
|
28 |
+
tm.assert_index_equal(result, ci, exact=True)
|
29 |
+
|
30 |
+
def test_append_mismatched_categories(self, ci):
|
31 |
+
# appending with different categories or reordered is not ok
|
32 |
+
msg = "all inputs must be Index"
|
33 |
+
with pytest.raises(TypeError, match=msg):
|
34 |
+
ci.append(ci.values.set_categories(list("abcd")))
|
35 |
+
with pytest.raises(TypeError, match=msg):
|
36 |
+
ci.append(ci.values.reorder_categories(list("abc")))
|
37 |
+
|
38 |
+
def test_append_category_objects(self, ci):
|
39 |
+
# with objects
|
40 |
+
result = ci.append(Index(["c", "a"]))
|
41 |
+
expected = CategoricalIndex(list("aabbcaca"), categories=ci.categories)
|
42 |
+
tm.assert_index_equal(result, expected, exact=True)
|
43 |
+
|
44 |
+
def test_append_non_categories(self, ci):
|
45 |
+
# invalid objects -> cast to object via concat_compat
|
46 |
+
result = ci.append(Index(["a", "d"]))
|
47 |
+
expected = Index(["a", "a", "b", "b", "c", "a", "a", "d"])
|
48 |
+
tm.assert_index_equal(result, expected, exact=True)
|
49 |
+
|
50 |
+
def test_append_object(self, ci):
|
51 |
+
# GH#14298 - if base object is not categorical -> coerce to object
|
52 |
+
result = Index(["c", "a"]).append(ci)
|
53 |
+
expected = Index(list("caaabbca"))
|
54 |
+
tm.assert_index_equal(result, expected, exact=True)
|
55 |
+
|
56 |
+
def test_append_to_another(self):
|
57 |
+
# hits Index._concat
|
58 |
+
fst = Index(["a", "b"])
|
59 |
+
snd = CategoricalIndex(["d", "e"])
|
60 |
+
result = fst.append(snd)
|
61 |
+
expected = Index(["a", "b", "d", "e"])
|
62 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_fillna.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import CategoricalIndex
|
5 |
+
import pandas._testing as tm
|
6 |
+
|
7 |
+
|
8 |
+
class TestFillNA:
|
9 |
+
def test_fillna_categorical(self):
|
10 |
+
# GH#11343
|
11 |
+
idx = CategoricalIndex([1.0, np.nan, 3.0, 1.0], name="x")
|
12 |
+
# fill by value in categories
|
13 |
+
exp = CategoricalIndex([1.0, 1.0, 3.0, 1.0], name="x")
|
14 |
+
tm.assert_index_equal(idx.fillna(1.0), exp)
|
15 |
+
|
16 |
+
cat = idx._data
|
17 |
+
|
18 |
+
# fill by value not in categories raises TypeError on EA, casts on CI
|
19 |
+
msg = "Cannot setitem on a Categorical with a new category"
|
20 |
+
with pytest.raises(TypeError, match=msg):
|
21 |
+
cat.fillna(2.0)
|
22 |
+
|
23 |
+
result = idx.fillna(2.0)
|
24 |
+
expected = idx.astype(object).fillna(2.0)
|
25 |
+
tm.assert_index_equal(result, expected)
|
26 |
+
|
27 |
+
def test_fillna_copies_with_no_nas(self):
|
28 |
+
# Nothing to fill, should still get a copy for the Categorical method,
|
29 |
+
# but OK to get a view on CategoricalIndex method
|
30 |
+
ci = CategoricalIndex([0, 1, 1])
|
31 |
+
result = ci.fillna(0)
|
32 |
+
assert result is not ci
|
33 |
+
assert tm.shares_memory(result, ci)
|
34 |
+
|
35 |
+
# But at the EA level we always get a copy.
|
36 |
+
cat = ci._data
|
37 |
+
result = cat.fillna(0)
|
38 |
+
assert result._ndarray is not cat._ndarray
|
39 |
+
assert result._ndarray.base is None
|
40 |
+
assert not tm.shares_memory(result, cat)
|
41 |
+
|
42 |
+
def test_fillna_validates_with_no_nas(self):
|
43 |
+
# We validate the fill value even if fillna is a no-op
|
44 |
+
ci = CategoricalIndex([2, 3, 3])
|
45 |
+
cat = ci._data
|
46 |
+
|
47 |
+
msg = "Cannot setitem on a Categorical with a new category"
|
48 |
+
res = ci.fillna(False)
|
49 |
+
# nothing to fill, so we dont cast
|
50 |
+
tm.assert_index_equal(res, ci)
|
51 |
+
|
52 |
+
# Same check directly on the Categorical
|
53 |
+
with pytest.raises(TypeError, match=msg):
|
54 |
+
cat.fillna(False)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_indexing.py
ADDED
@@ -0,0 +1,420 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas.errors import InvalidIndexError
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
from pandas import (
|
8 |
+
CategoricalIndex,
|
9 |
+
Index,
|
10 |
+
IntervalIndex,
|
11 |
+
Timestamp,
|
12 |
+
)
|
13 |
+
import pandas._testing as tm
|
14 |
+
|
15 |
+
|
16 |
+
class TestTake:
|
17 |
+
def test_take_fill_value(self):
|
18 |
+
# GH 12631
|
19 |
+
|
20 |
+
# numeric category
|
21 |
+
idx = CategoricalIndex([1, 2, 3], name="xxx")
|
22 |
+
result = idx.take(np.array([1, 0, -1]))
|
23 |
+
expected = CategoricalIndex([2, 1, 3], name="xxx")
|
24 |
+
tm.assert_index_equal(result, expected)
|
25 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
26 |
+
|
27 |
+
# fill_value
|
28 |
+
result = idx.take(np.array([1, 0, -1]), fill_value=True)
|
29 |
+
expected = CategoricalIndex([2, 1, np.nan], categories=[1, 2, 3], name="xxx")
|
30 |
+
tm.assert_index_equal(result, expected)
|
31 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
32 |
+
|
33 |
+
# allow_fill=False
|
34 |
+
result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
|
35 |
+
expected = CategoricalIndex([2, 1, 3], name="xxx")
|
36 |
+
tm.assert_index_equal(result, expected)
|
37 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
38 |
+
|
39 |
+
# object category
|
40 |
+
idx = CategoricalIndex(
|
41 |
+
list("CBA"), categories=list("ABC"), ordered=True, name="xxx"
|
42 |
+
)
|
43 |
+
result = idx.take(np.array([1, 0, -1]))
|
44 |
+
expected = CategoricalIndex(
|
45 |
+
list("BCA"), categories=list("ABC"), ordered=True, name="xxx"
|
46 |
+
)
|
47 |
+
tm.assert_index_equal(result, expected)
|
48 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
49 |
+
|
50 |
+
# fill_value
|
51 |
+
result = idx.take(np.array([1, 0, -1]), fill_value=True)
|
52 |
+
expected = CategoricalIndex(
|
53 |
+
["B", "C", np.nan], categories=list("ABC"), ordered=True, name="xxx"
|
54 |
+
)
|
55 |
+
tm.assert_index_equal(result, expected)
|
56 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
57 |
+
|
58 |
+
# allow_fill=False
|
59 |
+
result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
|
60 |
+
expected = CategoricalIndex(
|
61 |
+
list("BCA"), categories=list("ABC"), ordered=True, name="xxx"
|
62 |
+
)
|
63 |
+
tm.assert_index_equal(result, expected)
|
64 |
+
tm.assert_categorical_equal(result.values, expected.values)
|
65 |
+
|
66 |
+
msg = (
|
67 |
+
"When allow_fill=True and fill_value is not None, "
|
68 |
+
"all indices must be >= -1"
|
69 |
+
)
|
70 |
+
with pytest.raises(ValueError, match=msg):
|
71 |
+
idx.take(np.array([1, 0, -2]), fill_value=True)
|
72 |
+
with pytest.raises(ValueError, match=msg):
|
73 |
+
idx.take(np.array([1, 0, -5]), fill_value=True)
|
74 |
+
|
75 |
+
msg = "index -5 is out of bounds for (axis 0 with )?size 3"
|
76 |
+
with pytest.raises(IndexError, match=msg):
|
77 |
+
idx.take(np.array([1, -5]))
|
78 |
+
|
79 |
+
def test_take_fill_value_datetime(self):
|
80 |
+
# datetime category
|
81 |
+
idx = pd.DatetimeIndex(["2011-01-01", "2011-02-01", "2011-03-01"], name="xxx")
|
82 |
+
idx = CategoricalIndex(idx)
|
83 |
+
result = idx.take(np.array([1, 0, -1]))
|
84 |
+
expected = pd.DatetimeIndex(
|
85 |
+
["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx"
|
86 |
+
)
|
87 |
+
expected = CategoricalIndex(expected)
|
88 |
+
tm.assert_index_equal(result, expected)
|
89 |
+
|
90 |
+
# fill_value
|
91 |
+
result = idx.take(np.array([1, 0, -1]), fill_value=True)
|
92 |
+
expected = pd.DatetimeIndex(["2011-02-01", "2011-01-01", "NaT"], name="xxx")
|
93 |
+
exp_cats = pd.DatetimeIndex(["2011-01-01", "2011-02-01", "2011-03-01"])
|
94 |
+
expected = CategoricalIndex(expected, categories=exp_cats)
|
95 |
+
tm.assert_index_equal(result, expected)
|
96 |
+
|
97 |
+
# allow_fill=False
|
98 |
+
result = idx.take(np.array([1, 0, -1]), allow_fill=False, fill_value=True)
|
99 |
+
expected = pd.DatetimeIndex(
|
100 |
+
["2011-02-01", "2011-01-01", "2011-03-01"], name="xxx"
|
101 |
+
)
|
102 |
+
expected = CategoricalIndex(expected)
|
103 |
+
tm.assert_index_equal(result, expected)
|
104 |
+
|
105 |
+
msg = (
|
106 |
+
"When allow_fill=True and fill_value is not None, "
|
107 |
+
"all indices must be >= -1"
|
108 |
+
)
|
109 |
+
with pytest.raises(ValueError, match=msg):
|
110 |
+
idx.take(np.array([1, 0, -2]), fill_value=True)
|
111 |
+
with pytest.raises(ValueError, match=msg):
|
112 |
+
idx.take(np.array([1, 0, -5]), fill_value=True)
|
113 |
+
|
114 |
+
msg = "index -5 is out of bounds for (axis 0 with )?size 3"
|
115 |
+
with pytest.raises(IndexError, match=msg):
|
116 |
+
idx.take(np.array([1, -5]))
|
117 |
+
|
118 |
+
def test_take_invalid_kwargs(self):
|
119 |
+
idx = CategoricalIndex([1, 2, 3], name="foo")
|
120 |
+
indices = [1, 0, -1]
|
121 |
+
|
122 |
+
msg = r"take\(\) got an unexpected keyword argument 'foo'"
|
123 |
+
with pytest.raises(TypeError, match=msg):
|
124 |
+
idx.take(indices, foo=2)
|
125 |
+
|
126 |
+
msg = "the 'out' parameter is not supported"
|
127 |
+
with pytest.raises(ValueError, match=msg):
|
128 |
+
idx.take(indices, out=indices)
|
129 |
+
|
130 |
+
msg = "the 'mode' parameter is not supported"
|
131 |
+
with pytest.raises(ValueError, match=msg):
|
132 |
+
idx.take(indices, mode="clip")
|
133 |
+
|
134 |
+
|
135 |
+
class TestGetLoc:
|
136 |
+
def test_get_loc(self):
|
137 |
+
# GH 12531
|
138 |
+
cidx1 = CategoricalIndex(list("abcde"), categories=list("edabc"))
|
139 |
+
idx1 = Index(list("abcde"))
|
140 |
+
assert cidx1.get_loc("a") == idx1.get_loc("a")
|
141 |
+
assert cidx1.get_loc("e") == idx1.get_loc("e")
|
142 |
+
|
143 |
+
for i in [cidx1, idx1]:
|
144 |
+
with pytest.raises(KeyError, match="'NOT-EXIST'"):
|
145 |
+
i.get_loc("NOT-EXIST")
|
146 |
+
|
147 |
+
# non-unique
|
148 |
+
cidx2 = CategoricalIndex(list("aacded"), categories=list("edabc"))
|
149 |
+
idx2 = Index(list("aacded"))
|
150 |
+
|
151 |
+
# results in bool array
|
152 |
+
res = cidx2.get_loc("d")
|
153 |
+
tm.assert_numpy_array_equal(res, idx2.get_loc("d"))
|
154 |
+
tm.assert_numpy_array_equal(
|
155 |
+
res, np.array([False, False, False, True, False, True])
|
156 |
+
)
|
157 |
+
# unique element results in scalar
|
158 |
+
res = cidx2.get_loc("e")
|
159 |
+
assert res == idx2.get_loc("e")
|
160 |
+
assert res == 4
|
161 |
+
|
162 |
+
for i in [cidx2, idx2]:
|
163 |
+
with pytest.raises(KeyError, match="'NOT-EXIST'"):
|
164 |
+
i.get_loc("NOT-EXIST")
|
165 |
+
|
166 |
+
# non-unique, sliceable
|
167 |
+
cidx3 = CategoricalIndex(list("aabbb"), categories=list("abc"))
|
168 |
+
idx3 = Index(list("aabbb"))
|
169 |
+
|
170 |
+
# results in slice
|
171 |
+
res = cidx3.get_loc("a")
|
172 |
+
assert res == idx3.get_loc("a")
|
173 |
+
assert res == slice(0, 2, None)
|
174 |
+
|
175 |
+
res = cidx3.get_loc("b")
|
176 |
+
assert res == idx3.get_loc("b")
|
177 |
+
assert res == slice(2, 5, None)
|
178 |
+
|
179 |
+
for i in [cidx3, idx3]:
|
180 |
+
with pytest.raises(KeyError, match="'c'"):
|
181 |
+
i.get_loc("c")
|
182 |
+
|
183 |
+
def test_get_loc_unique(self):
|
184 |
+
cidx = CategoricalIndex(list("abc"))
|
185 |
+
result = cidx.get_loc("b")
|
186 |
+
assert result == 1
|
187 |
+
|
188 |
+
def test_get_loc_monotonic_nonunique(self):
|
189 |
+
cidx = CategoricalIndex(list("abbc"))
|
190 |
+
result = cidx.get_loc("b")
|
191 |
+
expected = slice(1, 3, None)
|
192 |
+
assert result == expected
|
193 |
+
|
194 |
+
def test_get_loc_nonmonotonic_nonunique(self):
|
195 |
+
cidx = CategoricalIndex(list("abcb"))
|
196 |
+
result = cidx.get_loc("b")
|
197 |
+
expected = np.array([False, True, False, True], dtype=bool)
|
198 |
+
tm.assert_numpy_array_equal(result, expected)
|
199 |
+
|
200 |
+
def test_get_loc_nan(self):
|
201 |
+
# GH#41933
|
202 |
+
ci = CategoricalIndex(["A", "B", np.nan])
|
203 |
+
res = ci.get_loc(np.nan)
|
204 |
+
|
205 |
+
assert res == 2
|
206 |
+
|
207 |
+
|
208 |
+
class TestGetIndexer:
|
209 |
+
def test_get_indexer_base(self):
|
210 |
+
# Determined by cat ordering.
|
211 |
+
idx = CategoricalIndex(list("cab"), categories=list("cab"))
|
212 |
+
expected = np.arange(len(idx), dtype=np.intp)
|
213 |
+
|
214 |
+
actual = idx.get_indexer(idx)
|
215 |
+
tm.assert_numpy_array_equal(expected, actual)
|
216 |
+
|
217 |
+
with pytest.raises(ValueError, match="Invalid fill method"):
|
218 |
+
idx.get_indexer(idx, method="invalid")
|
219 |
+
|
220 |
+
def test_get_indexer_requires_unique(self):
|
221 |
+
ci = CategoricalIndex(list("aabbca"), categories=list("cab"), ordered=False)
|
222 |
+
oidx = Index(np.array(ci))
|
223 |
+
|
224 |
+
msg = "Reindexing only valid with uniquely valued Index objects"
|
225 |
+
|
226 |
+
for n in [1, 2, 5, len(ci)]:
|
227 |
+
finder = oidx[np.random.default_rng(2).integers(0, len(ci), size=n)]
|
228 |
+
|
229 |
+
with pytest.raises(InvalidIndexError, match=msg):
|
230 |
+
ci.get_indexer(finder)
|
231 |
+
|
232 |
+
# see gh-17323
|
233 |
+
#
|
234 |
+
# Even when indexer is equal to the
|
235 |
+
# members in the index, we should
|
236 |
+
# respect duplicates instead of taking
|
237 |
+
# the fast-track path.
|
238 |
+
for finder in [list("aabbca"), list("aababca")]:
|
239 |
+
with pytest.raises(InvalidIndexError, match=msg):
|
240 |
+
ci.get_indexer(finder)
|
241 |
+
|
242 |
+
def test_get_indexer_non_unique(self):
|
243 |
+
idx1 = CategoricalIndex(list("aabcde"), categories=list("edabc"))
|
244 |
+
idx2 = CategoricalIndex(list("abf"))
|
245 |
+
|
246 |
+
for indexer in [idx2, list("abf"), Index(list("abf"))]:
|
247 |
+
msg = "Reindexing only valid with uniquely valued Index objects"
|
248 |
+
with pytest.raises(InvalidIndexError, match=msg):
|
249 |
+
idx1.get_indexer(indexer)
|
250 |
+
|
251 |
+
r1, _ = idx1.get_indexer_non_unique(indexer)
|
252 |
+
expected = np.array([0, 1, 2, -1], dtype=np.intp)
|
253 |
+
tm.assert_almost_equal(r1, expected)
|
254 |
+
|
255 |
+
def test_get_indexer_method(self):
|
256 |
+
idx1 = CategoricalIndex(list("aabcde"), categories=list("edabc"))
|
257 |
+
idx2 = CategoricalIndex(list("abf"))
|
258 |
+
|
259 |
+
msg = "method pad not yet implemented for CategoricalIndex"
|
260 |
+
with pytest.raises(NotImplementedError, match=msg):
|
261 |
+
idx2.get_indexer(idx1, method="pad")
|
262 |
+
msg = "method backfill not yet implemented for CategoricalIndex"
|
263 |
+
with pytest.raises(NotImplementedError, match=msg):
|
264 |
+
idx2.get_indexer(idx1, method="backfill")
|
265 |
+
|
266 |
+
msg = "method nearest not yet implemented for CategoricalIndex"
|
267 |
+
with pytest.raises(NotImplementedError, match=msg):
|
268 |
+
idx2.get_indexer(idx1, method="nearest")
|
269 |
+
|
270 |
+
def test_get_indexer_array(self):
|
271 |
+
arr = np.array(
|
272 |
+
[Timestamp("1999-12-31 00:00:00"), Timestamp("2000-12-31 00:00:00")],
|
273 |
+
dtype=object,
|
274 |
+
)
|
275 |
+
cats = [Timestamp("1999-12-31 00:00:00"), Timestamp("2000-12-31 00:00:00")]
|
276 |
+
ci = CategoricalIndex(cats, categories=cats, ordered=False, dtype="category")
|
277 |
+
result = ci.get_indexer(arr)
|
278 |
+
expected = np.array([0, 1], dtype="intp")
|
279 |
+
tm.assert_numpy_array_equal(result, expected)
|
280 |
+
|
281 |
+
def test_get_indexer_same_categories_same_order(self):
|
282 |
+
ci = CategoricalIndex(["a", "b"], categories=["a", "b"])
|
283 |
+
|
284 |
+
result = ci.get_indexer(CategoricalIndex(["b", "b"], categories=["a", "b"]))
|
285 |
+
expected = np.array([1, 1], dtype="intp")
|
286 |
+
tm.assert_numpy_array_equal(result, expected)
|
287 |
+
|
288 |
+
def test_get_indexer_same_categories_different_order(self):
|
289 |
+
# https://github.com/pandas-dev/pandas/issues/19551
|
290 |
+
ci = CategoricalIndex(["a", "b"], categories=["a", "b"])
|
291 |
+
|
292 |
+
result = ci.get_indexer(CategoricalIndex(["b", "b"], categories=["b", "a"]))
|
293 |
+
expected = np.array([1, 1], dtype="intp")
|
294 |
+
tm.assert_numpy_array_equal(result, expected)
|
295 |
+
|
296 |
+
def test_get_indexer_nans_in_index_and_target(self):
|
297 |
+
# GH 45361
|
298 |
+
ci = CategoricalIndex([1, 2, np.nan, 3])
|
299 |
+
other1 = [2, 3, 4, np.nan]
|
300 |
+
res1 = ci.get_indexer(other1)
|
301 |
+
expected1 = np.array([1, 3, -1, 2], dtype=np.intp)
|
302 |
+
tm.assert_numpy_array_equal(res1, expected1)
|
303 |
+
other2 = [1, 4, 2, 3]
|
304 |
+
res2 = ci.get_indexer(other2)
|
305 |
+
expected2 = np.array([0, -1, 1, 3], dtype=np.intp)
|
306 |
+
tm.assert_numpy_array_equal(res2, expected2)
|
307 |
+
|
308 |
+
|
309 |
+
class TestWhere:
|
310 |
+
def test_where(self, listlike_box):
|
311 |
+
klass = listlike_box
|
312 |
+
|
313 |
+
i = CategoricalIndex(list("aabbca"), categories=list("cab"), ordered=False)
|
314 |
+
cond = [True] * len(i)
|
315 |
+
expected = i
|
316 |
+
result = i.where(klass(cond))
|
317 |
+
tm.assert_index_equal(result, expected)
|
318 |
+
|
319 |
+
cond = [False] + [True] * (len(i) - 1)
|
320 |
+
expected = CategoricalIndex([np.nan] + i[1:].tolist(), categories=i.categories)
|
321 |
+
result = i.where(klass(cond))
|
322 |
+
tm.assert_index_equal(result, expected)
|
323 |
+
|
324 |
+
def test_where_non_categories(self):
|
325 |
+
ci = CategoricalIndex(["a", "b", "c", "d"])
|
326 |
+
mask = np.array([True, False, True, False])
|
327 |
+
|
328 |
+
result = ci.where(mask, 2)
|
329 |
+
expected = Index(["a", 2, "c", 2], dtype=object)
|
330 |
+
tm.assert_index_equal(result, expected)
|
331 |
+
|
332 |
+
msg = "Cannot setitem on a Categorical with a new category"
|
333 |
+
with pytest.raises(TypeError, match=msg):
|
334 |
+
# Test the Categorical method directly
|
335 |
+
ci._data._where(mask, 2)
|
336 |
+
|
337 |
+
|
338 |
+
class TestContains:
|
339 |
+
def test_contains(self):
|
340 |
+
ci = CategoricalIndex(list("aabbca"), categories=list("cabdef"), ordered=False)
|
341 |
+
|
342 |
+
assert "a" in ci
|
343 |
+
assert "z" not in ci
|
344 |
+
assert "e" not in ci
|
345 |
+
assert np.nan not in ci
|
346 |
+
|
347 |
+
# assert codes NOT in index
|
348 |
+
assert 0 not in ci
|
349 |
+
assert 1 not in ci
|
350 |
+
|
351 |
+
def test_contains_nan(self):
|
352 |
+
ci = CategoricalIndex(list("aabbca") + [np.nan], categories=list("cabdef"))
|
353 |
+
assert np.nan in ci
|
354 |
+
|
355 |
+
@pytest.mark.parametrize("unwrap", [True, False])
|
356 |
+
def test_contains_na_dtype(self, unwrap):
|
357 |
+
dti = pd.date_range("2016-01-01", periods=100).insert(0, pd.NaT)
|
358 |
+
pi = dti.to_period("D")
|
359 |
+
tdi = dti - dti[-1]
|
360 |
+
ci = CategoricalIndex(dti)
|
361 |
+
|
362 |
+
obj = ci
|
363 |
+
if unwrap:
|
364 |
+
obj = ci._data
|
365 |
+
|
366 |
+
assert np.nan in obj
|
367 |
+
assert None in obj
|
368 |
+
assert pd.NaT in obj
|
369 |
+
assert np.datetime64("NaT") in obj
|
370 |
+
assert np.timedelta64("NaT") not in obj
|
371 |
+
|
372 |
+
obj2 = CategoricalIndex(tdi)
|
373 |
+
if unwrap:
|
374 |
+
obj2 = obj2._data
|
375 |
+
|
376 |
+
assert np.nan in obj2
|
377 |
+
assert None in obj2
|
378 |
+
assert pd.NaT in obj2
|
379 |
+
assert np.datetime64("NaT") not in obj2
|
380 |
+
assert np.timedelta64("NaT") in obj2
|
381 |
+
|
382 |
+
obj3 = CategoricalIndex(pi)
|
383 |
+
if unwrap:
|
384 |
+
obj3 = obj3._data
|
385 |
+
|
386 |
+
assert np.nan in obj3
|
387 |
+
assert None in obj3
|
388 |
+
assert pd.NaT in obj3
|
389 |
+
assert np.datetime64("NaT") not in obj3
|
390 |
+
assert np.timedelta64("NaT") not in obj3
|
391 |
+
|
392 |
+
@pytest.mark.parametrize(
|
393 |
+
"item, expected",
|
394 |
+
[
|
395 |
+
(pd.Interval(0, 1), True),
|
396 |
+
(1.5, True),
|
397 |
+
(pd.Interval(0.5, 1.5), False),
|
398 |
+
("a", False),
|
399 |
+
(Timestamp(1), False),
|
400 |
+
(pd.Timedelta(1), False),
|
401 |
+
],
|
402 |
+
ids=str,
|
403 |
+
)
|
404 |
+
def test_contains_interval(self, item, expected):
|
405 |
+
# GH 23705
|
406 |
+
ci = CategoricalIndex(IntervalIndex.from_breaks(range(3)))
|
407 |
+
result = item in ci
|
408 |
+
assert result is expected
|
409 |
+
|
410 |
+
def test_contains_list(self):
|
411 |
+
# GH#21729
|
412 |
+
idx = CategoricalIndex([1, 2, 3])
|
413 |
+
|
414 |
+
assert "a" not in idx
|
415 |
+
|
416 |
+
with pytest.raises(TypeError, match="unhashable type"):
|
417 |
+
["a"] in idx
|
418 |
+
|
419 |
+
with pytest.raises(TypeError, match="unhashable type"):
|
420 |
+
["a", "b"] in idx
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_map.py
ADDED
@@ -0,0 +1,144 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
CategoricalIndex,
|
7 |
+
Index,
|
8 |
+
Series,
|
9 |
+
)
|
10 |
+
import pandas._testing as tm
|
11 |
+
|
12 |
+
|
13 |
+
@pytest.mark.parametrize(
|
14 |
+
"data, categories",
|
15 |
+
[
|
16 |
+
(list("abcbca"), list("cab")),
|
17 |
+
(pd.interval_range(0, 3).repeat(3), pd.interval_range(0, 3)),
|
18 |
+
],
|
19 |
+
ids=["string", "interval"],
|
20 |
+
)
|
21 |
+
def test_map_str(data, categories, ordered):
|
22 |
+
# GH 31202 - override base class since we want to maintain categorical/ordered
|
23 |
+
index = CategoricalIndex(data, categories=categories, ordered=ordered)
|
24 |
+
result = index.map(str)
|
25 |
+
expected = CategoricalIndex(
|
26 |
+
map(str, data), categories=map(str, categories), ordered=ordered
|
27 |
+
)
|
28 |
+
tm.assert_index_equal(result, expected)
|
29 |
+
|
30 |
+
|
31 |
+
def test_map():
|
32 |
+
ci = CategoricalIndex(list("ABABC"), categories=list("CBA"), ordered=True)
|
33 |
+
result = ci.map(lambda x: x.lower())
|
34 |
+
exp = CategoricalIndex(list("ababc"), categories=list("cba"), ordered=True)
|
35 |
+
tm.assert_index_equal(result, exp)
|
36 |
+
|
37 |
+
ci = CategoricalIndex(
|
38 |
+
list("ABABC"), categories=list("BAC"), ordered=False, name="XXX"
|
39 |
+
)
|
40 |
+
result = ci.map(lambda x: x.lower())
|
41 |
+
exp = CategoricalIndex(
|
42 |
+
list("ababc"), categories=list("bac"), ordered=False, name="XXX"
|
43 |
+
)
|
44 |
+
tm.assert_index_equal(result, exp)
|
45 |
+
|
46 |
+
# GH 12766: Return an index not an array
|
47 |
+
tm.assert_index_equal(
|
48 |
+
ci.map(lambda x: 1), Index(np.array([1] * 5, dtype=np.int64), name="XXX")
|
49 |
+
)
|
50 |
+
|
51 |
+
# change categories dtype
|
52 |
+
ci = CategoricalIndex(list("ABABC"), categories=list("BAC"), ordered=False)
|
53 |
+
|
54 |
+
def f(x):
|
55 |
+
return {"A": 10, "B": 20, "C": 30}.get(x)
|
56 |
+
|
57 |
+
result = ci.map(f)
|
58 |
+
exp = CategoricalIndex([10, 20, 10, 20, 30], categories=[20, 10, 30], ordered=False)
|
59 |
+
tm.assert_index_equal(result, exp)
|
60 |
+
|
61 |
+
result = ci.map(Series([10, 20, 30], index=["A", "B", "C"]))
|
62 |
+
tm.assert_index_equal(result, exp)
|
63 |
+
|
64 |
+
result = ci.map({"A": 10, "B": 20, "C": 30})
|
65 |
+
tm.assert_index_equal(result, exp)
|
66 |
+
|
67 |
+
|
68 |
+
def test_map_with_categorical_series():
|
69 |
+
# GH 12756
|
70 |
+
a = Index([1, 2, 3, 4])
|
71 |
+
b = Series(["even", "odd", "even", "odd"], dtype="category")
|
72 |
+
c = Series(["even", "odd", "even", "odd"])
|
73 |
+
|
74 |
+
exp = CategoricalIndex(["odd", "even", "odd", np.nan])
|
75 |
+
tm.assert_index_equal(a.map(b), exp)
|
76 |
+
exp = Index(["odd", "even", "odd", np.nan])
|
77 |
+
tm.assert_index_equal(a.map(c), exp)
|
78 |
+
|
79 |
+
|
80 |
+
@pytest.mark.parametrize(
|
81 |
+
("data", "f", "expected"),
|
82 |
+
(
|
83 |
+
([1, 1, np.nan], pd.isna, CategoricalIndex([False, False, np.nan])),
|
84 |
+
([1, 2, np.nan], pd.isna, Index([False, False, np.nan])),
|
85 |
+
([1, 1, np.nan], {1: False}, CategoricalIndex([False, False, np.nan])),
|
86 |
+
([1, 2, np.nan], {1: False, 2: False}, Index([False, False, np.nan])),
|
87 |
+
(
|
88 |
+
[1, 1, np.nan],
|
89 |
+
Series([False, False]),
|
90 |
+
CategoricalIndex([False, False, np.nan]),
|
91 |
+
),
|
92 |
+
(
|
93 |
+
[1, 2, np.nan],
|
94 |
+
Series([False, False, False]),
|
95 |
+
Index([False, False, np.nan]),
|
96 |
+
),
|
97 |
+
),
|
98 |
+
)
|
99 |
+
def test_map_with_nan_ignore(data, f, expected): # GH 24241
|
100 |
+
values = CategoricalIndex(data)
|
101 |
+
result = values.map(f, na_action="ignore")
|
102 |
+
tm.assert_index_equal(result, expected)
|
103 |
+
|
104 |
+
|
105 |
+
@pytest.mark.parametrize(
|
106 |
+
("data", "f", "expected"),
|
107 |
+
(
|
108 |
+
([1, 1, np.nan], pd.isna, Index([False, False, True])),
|
109 |
+
([1, 2, np.nan], pd.isna, Index([False, False, True])),
|
110 |
+
([1, 1, np.nan], {1: False}, CategoricalIndex([False, False, np.nan])),
|
111 |
+
([1, 2, np.nan], {1: False, 2: False}, Index([False, False, np.nan])),
|
112 |
+
(
|
113 |
+
[1, 1, np.nan],
|
114 |
+
Series([False, False]),
|
115 |
+
CategoricalIndex([False, False, np.nan]),
|
116 |
+
),
|
117 |
+
(
|
118 |
+
[1, 2, np.nan],
|
119 |
+
Series([False, False, False]),
|
120 |
+
Index([False, False, np.nan]),
|
121 |
+
),
|
122 |
+
),
|
123 |
+
)
|
124 |
+
def test_map_with_nan_none(data, f, expected): # GH 24241
|
125 |
+
values = CategoricalIndex(data)
|
126 |
+
result = values.map(f, na_action=None)
|
127 |
+
tm.assert_index_equal(result, expected)
|
128 |
+
|
129 |
+
|
130 |
+
def test_map_with_dict_or_series():
|
131 |
+
orig_values = ["a", "B", 1, "a"]
|
132 |
+
new_values = ["one", 2, 3.0, "one"]
|
133 |
+
cur_index = CategoricalIndex(orig_values, name="XXX")
|
134 |
+
expected = CategoricalIndex(new_values, name="XXX", categories=[3.0, 2, "one"])
|
135 |
+
|
136 |
+
mapper = Series(new_values[:-1], index=orig_values[:-1])
|
137 |
+
result = cur_index.map(mapper)
|
138 |
+
# Order of categories in result can be different
|
139 |
+
tm.assert_index_equal(result, expected)
|
140 |
+
|
141 |
+
mapper = dict(zip(orig_values[:-1], new_values[:-1]))
|
142 |
+
result = cur_index.map(mapper)
|
143 |
+
# Order of categories in result can be different
|
144 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/categorical/test_setops.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
CategoricalIndex,
|
6 |
+
Index,
|
7 |
+
)
|
8 |
+
import pandas._testing as tm
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.mark.parametrize("na_value", [None, np.nan])
|
12 |
+
def test_difference_with_na(na_value):
|
13 |
+
# GH 57318
|
14 |
+
ci = CategoricalIndex(["a", "b", "c", None])
|
15 |
+
other = Index(["c", na_value])
|
16 |
+
result = ci.difference(other)
|
17 |
+
expected = CategoricalIndex(["a", "b"], categories=["a", "b", "c"])
|
18 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (202 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_astype.cpython-310.pyc
ADDED
Binary file (8.49 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_constructors.cpython-310.pyc
ADDED
Binary file (17.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_equals.cpython-310.pyc
ADDED
Binary file (1.18 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_formats.cpython-310.pyc
ADDED
Binary file (3.52 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_indexing.cpython-310.pyc
ADDED
Binary file (20.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval.cpython-310.pyc
ADDED
Binary file (24 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval_range.cpython-310.pyc
ADDED
Binary file (9.17 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_interval_tree.cpython-310.pyc
ADDED
Binary file (7.34 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_join.cpython-310.pyc
ADDED
Binary file (1.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_pickle.cpython-310.pyc
ADDED
Binary file (858 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/__pycache__/test_setops.cpython-310.pyc
ADDED
Binary file (5.28 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_constructors.py
ADDED
@@ -0,0 +1,535 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from functools import partial
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
import pandas.util._test_decorators as td
|
7 |
+
|
8 |
+
from pandas.core.dtypes.common import is_unsigned_integer_dtype
|
9 |
+
from pandas.core.dtypes.dtypes import IntervalDtype
|
10 |
+
|
11 |
+
from pandas import (
|
12 |
+
Categorical,
|
13 |
+
CategoricalDtype,
|
14 |
+
CategoricalIndex,
|
15 |
+
Index,
|
16 |
+
Interval,
|
17 |
+
IntervalIndex,
|
18 |
+
date_range,
|
19 |
+
notna,
|
20 |
+
period_range,
|
21 |
+
timedelta_range,
|
22 |
+
)
|
23 |
+
import pandas._testing as tm
|
24 |
+
from pandas.core.arrays import IntervalArray
|
25 |
+
import pandas.core.common as com
|
26 |
+
|
27 |
+
|
28 |
+
@pytest.fixture(params=[None, "foo"])
|
29 |
+
def name(request):
|
30 |
+
return request.param
|
31 |
+
|
32 |
+
|
33 |
+
class ConstructorTests:
|
34 |
+
"""
|
35 |
+
Common tests for all variations of IntervalIndex construction. Input data
|
36 |
+
to be supplied in breaks format, then converted by the subclass method
|
37 |
+
get_kwargs_from_breaks to the expected format.
|
38 |
+
"""
|
39 |
+
|
40 |
+
@pytest.fixture(
|
41 |
+
params=[
|
42 |
+
([3, 14, 15, 92, 653], np.int64),
|
43 |
+
(np.arange(10, dtype="int64"), np.int64),
|
44 |
+
(Index(np.arange(-10, 11, dtype=np.int64)), np.int64),
|
45 |
+
(Index(np.arange(10, 31, dtype=np.uint64)), np.uint64),
|
46 |
+
(Index(np.arange(20, 30, 0.5), dtype=np.float64), np.float64),
|
47 |
+
(date_range("20180101", periods=10), "<M8[ns]"),
|
48 |
+
(
|
49 |
+
date_range("20180101", periods=10, tz="US/Eastern"),
|
50 |
+
"datetime64[ns, US/Eastern]",
|
51 |
+
),
|
52 |
+
(timedelta_range("1 day", periods=10), "<m8[ns]"),
|
53 |
+
]
|
54 |
+
)
|
55 |
+
def breaks_and_expected_subtype(self, request):
|
56 |
+
return request.param
|
57 |
+
|
58 |
+
def test_constructor(self, constructor, breaks_and_expected_subtype, closed, name):
|
59 |
+
breaks, expected_subtype = breaks_and_expected_subtype
|
60 |
+
|
61 |
+
result_kwargs = self.get_kwargs_from_breaks(breaks, closed)
|
62 |
+
|
63 |
+
result = constructor(closed=closed, name=name, **result_kwargs)
|
64 |
+
|
65 |
+
assert result.closed == closed
|
66 |
+
assert result.name == name
|
67 |
+
assert result.dtype.subtype == expected_subtype
|
68 |
+
tm.assert_index_equal(result.left, Index(breaks[:-1], dtype=expected_subtype))
|
69 |
+
tm.assert_index_equal(result.right, Index(breaks[1:], dtype=expected_subtype))
|
70 |
+
|
71 |
+
@pytest.mark.parametrize(
|
72 |
+
"breaks, subtype",
|
73 |
+
[
|
74 |
+
(Index([0, 1, 2, 3, 4], dtype=np.int64), "float64"),
|
75 |
+
(Index([0, 1, 2, 3, 4], dtype=np.int64), "datetime64[ns]"),
|
76 |
+
(Index([0, 1, 2, 3, 4], dtype=np.int64), "timedelta64[ns]"),
|
77 |
+
(Index([0, 1, 2, 3, 4], dtype=np.float64), "int64"),
|
78 |
+
(date_range("2017-01-01", periods=5), "int64"),
|
79 |
+
(timedelta_range("1 day", periods=5), "int64"),
|
80 |
+
],
|
81 |
+
)
|
82 |
+
def test_constructor_dtype(self, constructor, breaks, subtype):
|
83 |
+
# GH 19262: conversion via dtype parameter
|
84 |
+
expected_kwargs = self.get_kwargs_from_breaks(breaks.astype(subtype))
|
85 |
+
expected = constructor(**expected_kwargs)
|
86 |
+
|
87 |
+
result_kwargs = self.get_kwargs_from_breaks(breaks)
|
88 |
+
iv_dtype = IntervalDtype(subtype, "right")
|
89 |
+
for dtype in (iv_dtype, str(iv_dtype)):
|
90 |
+
result = constructor(dtype=dtype, **result_kwargs)
|
91 |
+
tm.assert_index_equal(result, expected)
|
92 |
+
|
93 |
+
@pytest.mark.parametrize(
|
94 |
+
"breaks",
|
95 |
+
[
|
96 |
+
Index([0, 1, 2, 3, 4], dtype=np.int64),
|
97 |
+
Index([0, 1, 2, 3, 4], dtype=np.uint64),
|
98 |
+
Index([0, 1, 2, 3, 4], dtype=np.float64),
|
99 |
+
date_range("2017-01-01", periods=5),
|
100 |
+
timedelta_range("1 day", periods=5),
|
101 |
+
],
|
102 |
+
)
|
103 |
+
def test_constructor_pass_closed(self, constructor, breaks):
|
104 |
+
# not passing closed to IntervalDtype, but to IntervalArray constructor
|
105 |
+
iv_dtype = IntervalDtype(breaks.dtype)
|
106 |
+
|
107 |
+
result_kwargs = self.get_kwargs_from_breaks(breaks)
|
108 |
+
|
109 |
+
for dtype in (iv_dtype, str(iv_dtype)):
|
110 |
+
with tm.assert_produces_warning(None):
|
111 |
+
result = constructor(dtype=dtype, closed="left", **result_kwargs)
|
112 |
+
assert result.dtype.closed == "left"
|
113 |
+
|
114 |
+
@pytest.mark.parametrize("breaks", [[np.nan] * 2, [np.nan] * 4, [np.nan] * 50])
|
115 |
+
def test_constructor_nan(self, constructor, breaks, closed):
|
116 |
+
# GH 18421
|
117 |
+
result_kwargs = self.get_kwargs_from_breaks(breaks)
|
118 |
+
result = constructor(closed=closed, **result_kwargs)
|
119 |
+
|
120 |
+
expected_subtype = np.float64
|
121 |
+
expected_values = np.array(breaks[:-1], dtype=object)
|
122 |
+
|
123 |
+
assert result.closed == closed
|
124 |
+
assert result.dtype.subtype == expected_subtype
|
125 |
+
tm.assert_numpy_array_equal(np.array(result), expected_values)
|
126 |
+
|
127 |
+
@pytest.mark.parametrize(
|
128 |
+
"breaks",
|
129 |
+
[
|
130 |
+
[],
|
131 |
+
np.array([], dtype="int64"),
|
132 |
+
np.array([], dtype="uint64"),
|
133 |
+
np.array([], dtype="float64"),
|
134 |
+
np.array([], dtype="datetime64[ns]"),
|
135 |
+
np.array([], dtype="timedelta64[ns]"),
|
136 |
+
],
|
137 |
+
)
|
138 |
+
def test_constructor_empty(self, constructor, breaks, closed):
|
139 |
+
# GH 18421
|
140 |
+
result_kwargs = self.get_kwargs_from_breaks(breaks)
|
141 |
+
result = constructor(closed=closed, **result_kwargs)
|
142 |
+
|
143 |
+
expected_values = np.array([], dtype=object)
|
144 |
+
expected_subtype = getattr(breaks, "dtype", np.int64)
|
145 |
+
|
146 |
+
assert result.empty
|
147 |
+
assert result.closed == closed
|
148 |
+
assert result.dtype.subtype == expected_subtype
|
149 |
+
tm.assert_numpy_array_equal(np.array(result), expected_values)
|
150 |
+
|
151 |
+
@pytest.mark.parametrize(
|
152 |
+
"breaks",
|
153 |
+
[
|
154 |
+
tuple("0123456789"),
|
155 |
+
list("abcdefghij"),
|
156 |
+
np.array(list("abcdefghij"), dtype=object),
|
157 |
+
np.array(list("abcdefghij"), dtype="<U1"),
|
158 |
+
],
|
159 |
+
)
|
160 |
+
def test_constructor_string(self, constructor, breaks):
|
161 |
+
# GH 19016
|
162 |
+
msg = (
|
163 |
+
"category, object, and string subtypes are not supported "
|
164 |
+
"for IntervalIndex"
|
165 |
+
)
|
166 |
+
with pytest.raises(TypeError, match=msg):
|
167 |
+
constructor(**self.get_kwargs_from_breaks(breaks))
|
168 |
+
|
169 |
+
@pytest.mark.parametrize("cat_constructor", [Categorical, CategoricalIndex])
|
170 |
+
def test_constructor_categorical_valid(self, constructor, cat_constructor):
|
171 |
+
# GH 21243/21253
|
172 |
+
|
173 |
+
breaks = np.arange(10, dtype="int64")
|
174 |
+
expected = IntervalIndex.from_breaks(breaks)
|
175 |
+
|
176 |
+
cat_breaks = cat_constructor(breaks)
|
177 |
+
result_kwargs = self.get_kwargs_from_breaks(cat_breaks)
|
178 |
+
result = constructor(**result_kwargs)
|
179 |
+
tm.assert_index_equal(result, expected)
|
180 |
+
|
181 |
+
def test_generic_errors(self, constructor):
|
182 |
+
# filler input data to be used when supplying invalid kwargs
|
183 |
+
filler = self.get_kwargs_from_breaks(range(10))
|
184 |
+
|
185 |
+
# invalid closed
|
186 |
+
msg = "closed must be one of 'right', 'left', 'both', 'neither'"
|
187 |
+
with pytest.raises(ValueError, match=msg):
|
188 |
+
constructor(closed="invalid", **filler)
|
189 |
+
|
190 |
+
# unsupported dtype
|
191 |
+
msg = "dtype must be an IntervalDtype, got int64"
|
192 |
+
with pytest.raises(TypeError, match=msg):
|
193 |
+
constructor(dtype="int64", **filler)
|
194 |
+
|
195 |
+
# invalid dtype
|
196 |
+
msg = "data type [\"']invalid[\"'] not understood"
|
197 |
+
with pytest.raises(TypeError, match=msg):
|
198 |
+
constructor(dtype="invalid", **filler)
|
199 |
+
|
200 |
+
# no point in nesting periods in an IntervalIndex
|
201 |
+
periods = period_range("2000-01-01", periods=10)
|
202 |
+
periods_kwargs = self.get_kwargs_from_breaks(periods)
|
203 |
+
msg = "Period dtypes are not supported, use a PeriodIndex instead"
|
204 |
+
with pytest.raises(ValueError, match=msg):
|
205 |
+
constructor(**periods_kwargs)
|
206 |
+
|
207 |
+
# decreasing values
|
208 |
+
decreasing_kwargs = self.get_kwargs_from_breaks(range(10, -1, -1))
|
209 |
+
msg = "left side of interval must be <= right side"
|
210 |
+
with pytest.raises(ValueError, match=msg):
|
211 |
+
constructor(**decreasing_kwargs)
|
212 |
+
|
213 |
+
|
214 |
+
class TestFromArrays(ConstructorTests):
|
215 |
+
"""Tests specific to IntervalIndex.from_arrays"""
|
216 |
+
|
217 |
+
@pytest.fixture
|
218 |
+
def constructor(self):
|
219 |
+
return IntervalIndex.from_arrays
|
220 |
+
|
221 |
+
def get_kwargs_from_breaks(self, breaks, closed="right"):
|
222 |
+
"""
|
223 |
+
converts intervals in breaks format to a dictionary of kwargs to
|
224 |
+
specific to the format expected by IntervalIndex.from_arrays
|
225 |
+
"""
|
226 |
+
return {"left": breaks[:-1], "right": breaks[1:]}
|
227 |
+
|
228 |
+
def test_constructor_errors(self):
|
229 |
+
# GH 19016: categorical data
|
230 |
+
data = Categorical(list("01234abcde"), ordered=True)
|
231 |
+
msg = (
|
232 |
+
"category, object, and string subtypes are not supported "
|
233 |
+
"for IntervalIndex"
|
234 |
+
)
|
235 |
+
with pytest.raises(TypeError, match=msg):
|
236 |
+
IntervalIndex.from_arrays(data[:-1], data[1:])
|
237 |
+
|
238 |
+
# unequal length
|
239 |
+
left = [0, 1, 2]
|
240 |
+
right = [2, 3]
|
241 |
+
msg = "left and right must have the same length"
|
242 |
+
with pytest.raises(ValueError, match=msg):
|
243 |
+
IntervalIndex.from_arrays(left, right)
|
244 |
+
|
245 |
+
@pytest.mark.parametrize(
|
246 |
+
"left_subtype, right_subtype", [(np.int64, np.float64), (np.float64, np.int64)]
|
247 |
+
)
|
248 |
+
def test_mixed_float_int(self, left_subtype, right_subtype):
|
249 |
+
"""mixed int/float left/right results in float for both sides"""
|
250 |
+
left = np.arange(9, dtype=left_subtype)
|
251 |
+
right = np.arange(1, 10, dtype=right_subtype)
|
252 |
+
result = IntervalIndex.from_arrays(left, right)
|
253 |
+
|
254 |
+
expected_left = Index(left, dtype=np.float64)
|
255 |
+
expected_right = Index(right, dtype=np.float64)
|
256 |
+
expected_subtype = np.float64
|
257 |
+
|
258 |
+
tm.assert_index_equal(result.left, expected_left)
|
259 |
+
tm.assert_index_equal(result.right, expected_right)
|
260 |
+
assert result.dtype.subtype == expected_subtype
|
261 |
+
|
262 |
+
@pytest.mark.parametrize("interval_cls", [IntervalArray, IntervalIndex])
|
263 |
+
def test_from_arrays_mismatched_datetimelike_resos(self, interval_cls):
|
264 |
+
# GH#55714
|
265 |
+
left = date_range("2016-01-01", periods=3, unit="s")
|
266 |
+
right = date_range("2017-01-01", periods=3, unit="ms")
|
267 |
+
result = interval_cls.from_arrays(left, right)
|
268 |
+
expected = interval_cls.from_arrays(left.as_unit("ms"), right)
|
269 |
+
tm.assert_equal(result, expected)
|
270 |
+
|
271 |
+
# td64
|
272 |
+
left2 = left - left[0]
|
273 |
+
right2 = right - left[0]
|
274 |
+
result2 = interval_cls.from_arrays(left2, right2)
|
275 |
+
expected2 = interval_cls.from_arrays(left2.as_unit("ms"), right2)
|
276 |
+
tm.assert_equal(result2, expected2)
|
277 |
+
|
278 |
+
# dt64tz
|
279 |
+
left3 = left.tz_localize("UTC")
|
280 |
+
right3 = right.tz_localize("UTC")
|
281 |
+
result3 = interval_cls.from_arrays(left3, right3)
|
282 |
+
expected3 = interval_cls.from_arrays(left3.as_unit("ms"), right3)
|
283 |
+
tm.assert_equal(result3, expected3)
|
284 |
+
|
285 |
+
|
286 |
+
class TestFromBreaks(ConstructorTests):
|
287 |
+
"""Tests specific to IntervalIndex.from_breaks"""
|
288 |
+
|
289 |
+
@pytest.fixture
|
290 |
+
def constructor(self):
|
291 |
+
return IntervalIndex.from_breaks
|
292 |
+
|
293 |
+
def get_kwargs_from_breaks(self, breaks, closed="right"):
|
294 |
+
"""
|
295 |
+
converts intervals in breaks format to a dictionary of kwargs to
|
296 |
+
specific to the format expected by IntervalIndex.from_breaks
|
297 |
+
"""
|
298 |
+
return {"breaks": breaks}
|
299 |
+
|
300 |
+
def test_constructor_errors(self):
|
301 |
+
# GH 19016: categorical data
|
302 |
+
data = Categorical(list("01234abcde"), ordered=True)
|
303 |
+
msg = (
|
304 |
+
"category, object, and string subtypes are not supported "
|
305 |
+
"for IntervalIndex"
|
306 |
+
)
|
307 |
+
with pytest.raises(TypeError, match=msg):
|
308 |
+
IntervalIndex.from_breaks(data)
|
309 |
+
|
310 |
+
def test_length_one(self):
|
311 |
+
"""breaks of length one produce an empty IntervalIndex"""
|
312 |
+
breaks = [0]
|
313 |
+
result = IntervalIndex.from_breaks(breaks)
|
314 |
+
expected = IntervalIndex.from_breaks([])
|
315 |
+
tm.assert_index_equal(result, expected)
|
316 |
+
|
317 |
+
def test_left_right_dont_share_data(self):
|
318 |
+
# GH#36310
|
319 |
+
breaks = np.arange(5)
|
320 |
+
result = IntervalIndex.from_breaks(breaks)._data
|
321 |
+
assert result._left.base is None or result._left.base is not result._right.base
|
322 |
+
|
323 |
+
|
324 |
+
class TestFromTuples(ConstructorTests):
|
325 |
+
"""Tests specific to IntervalIndex.from_tuples"""
|
326 |
+
|
327 |
+
@pytest.fixture
|
328 |
+
def constructor(self):
|
329 |
+
return IntervalIndex.from_tuples
|
330 |
+
|
331 |
+
def get_kwargs_from_breaks(self, breaks, closed="right"):
|
332 |
+
"""
|
333 |
+
converts intervals in breaks format to a dictionary of kwargs to
|
334 |
+
specific to the format expected by IntervalIndex.from_tuples
|
335 |
+
"""
|
336 |
+
if is_unsigned_integer_dtype(breaks):
|
337 |
+
pytest.skip(f"{breaks.dtype} not relevant IntervalIndex.from_tuples tests")
|
338 |
+
|
339 |
+
if len(breaks) == 0:
|
340 |
+
return {"data": breaks}
|
341 |
+
|
342 |
+
tuples = list(zip(breaks[:-1], breaks[1:]))
|
343 |
+
if isinstance(breaks, (list, tuple)):
|
344 |
+
return {"data": tuples}
|
345 |
+
elif isinstance(getattr(breaks, "dtype", None), CategoricalDtype):
|
346 |
+
return {"data": breaks._constructor(tuples)}
|
347 |
+
return {"data": com.asarray_tuplesafe(tuples)}
|
348 |
+
|
349 |
+
def test_constructor_errors(self):
|
350 |
+
# non-tuple
|
351 |
+
tuples = [(0, 1), 2, (3, 4)]
|
352 |
+
msg = "IntervalIndex.from_tuples received an invalid item, 2"
|
353 |
+
with pytest.raises(TypeError, match=msg.format(t=tuples)):
|
354 |
+
IntervalIndex.from_tuples(tuples)
|
355 |
+
|
356 |
+
# too few/many items
|
357 |
+
tuples = [(0, 1), (2,), (3, 4)]
|
358 |
+
msg = "IntervalIndex.from_tuples requires tuples of length 2, got {t}"
|
359 |
+
with pytest.raises(ValueError, match=msg.format(t=tuples)):
|
360 |
+
IntervalIndex.from_tuples(tuples)
|
361 |
+
|
362 |
+
tuples = [(0, 1), (2, 3, 4), (5, 6)]
|
363 |
+
with pytest.raises(ValueError, match=msg.format(t=tuples)):
|
364 |
+
IntervalIndex.from_tuples(tuples)
|
365 |
+
|
366 |
+
def test_na_tuples(self):
|
367 |
+
# tuple (NA, NA) evaluates the same as NA as an element
|
368 |
+
na_tuple = [(0, 1), (np.nan, np.nan), (2, 3)]
|
369 |
+
idx_na_tuple = IntervalIndex.from_tuples(na_tuple)
|
370 |
+
idx_na_element = IntervalIndex.from_tuples([(0, 1), np.nan, (2, 3)])
|
371 |
+
tm.assert_index_equal(idx_na_tuple, idx_na_element)
|
372 |
+
|
373 |
+
|
374 |
+
class TestClassConstructors(ConstructorTests):
|
375 |
+
"""Tests specific to the IntervalIndex/Index constructors"""
|
376 |
+
|
377 |
+
@pytest.fixture(
|
378 |
+
params=[IntervalIndex, partial(Index, dtype="interval")],
|
379 |
+
ids=["IntervalIndex", "Index"],
|
380 |
+
)
|
381 |
+
def klass(self, request):
|
382 |
+
# We use a separate fixture here to include Index.__new__ with dtype kwarg
|
383 |
+
return request.param
|
384 |
+
|
385 |
+
@pytest.fixture
|
386 |
+
def constructor(self):
|
387 |
+
return IntervalIndex
|
388 |
+
|
389 |
+
def get_kwargs_from_breaks(self, breaks, closed="right"):
|
390 |
+
"""
|
391 |
+
converts intervals in breaks format to a dictionary of kwargs to
|
392 |
+
specific to the format expected by the IntervalIndex/Index constructors
|
393 |
+
"""
|
394 |
+
if is_unsigned_integer_dtype(breaks):
|
395 |
+
pytest.skip(f"{breaks.dtype} not relevant for class constructor tests")
|
396 |
+
|
397 |
+
if len(breaks) == 0:
|
398 |
+
return {"data": breaks}
|
399 |
+
|
400 |
+
ivs = [
|
401 |
+
Interval(left, right, closed) if notna(left) else left
|
402 |
+
for left, right in zip(breaks[:-1], breaks[1:])
|
403 |
+
]
|
404 |
+
|
405 |
+
if isinstance(breaks, list):
|
406 |
+
return {"data": ivs}
|
407 |
+
elif isinstance(getattr(breaks, "dtype", None), CategoricalDtype):
|
408 |
+
return {"data": breaks._constructor(ivs)}
|
409 |
+
return {"data": np.array(ivs, dtype=object)}
|
410 |
+
|
411 |
+
def test_generic_errors(self, constructor):
|
412 |
+
"""
|
413 |
+
override the base class implementation since errors are handled
|
414 |
+
differently; checks unnecessary since caught at the Interval level
|
415 |
+
"""
|
416 |
+
|
417 |
+
def test_constructor_string(self):
|
418 |
+
# GH23013
|
419 |
+
# When forming the interval from breaks,
|
420 |
+
# the interval of strings is already forbidden.
|
421 |
+
pass
|
422 |
+
|
423 |
+
def test_constructor_errors(self, klass):
|
424 |
+
# mismatched closed within intervals with no constructor override
|
425 |
+
ivs = [Interval(0, 1, closed="right"), Interval(2, 3, closed="left")]
|
426 |
+
msg = "intervals must all be closed on the same side"
|
427 |
+
with pytest.raises(ValueError, match=msg):
|
428 |
+
klass(ivs)
|
429 |
+
|
430 |
+
# scalar
|
431 |
+
msg = (
|
432 |
+
r"(IntervalIndex|Index)\(...\) must be called with a collection of "
|
433 |
+
"some kind, 5 was passed"
|
434 |
+
)
|
435 |
+
with pytest.raises(TypeError, match=msg):
|
436 |
+
klass(5)
|
437 |
+
|
438 |
+
# not an interval; dtype depends on 32bit/windows builds
|
439 |
+
msg = "type <class 'numpy.int(32|64)'> with value 0 is not an interval"
|
440 |
+
with pytest.raises(TypeError, match=msg):
|
441 |
+
klass([0, 1])
|
442 |
+
|
443 |
+
@pytest.mark.parametrize(
|
444 |
+
"data, closed",
|
445 |
+
[
|
446 |
+
([], "both"),
|
447 |
+
([np.nan, np.nan], "neither"),
|
448 |
+
(
|
449 |
+
[Interval(0, 3, closed="neither"), Interval(2, 5, closed="neither")],
|
450 |
+
"left",
|
451 |
+
),
|
452 |
+
(
|
453 |
+
[Interval(0, 3, closed="left"), Interval(2, 5, closed="right")],
|
454 |
+
"neither",
|
455 |
+
),
|
456 |
+
(IntervalIndex.from_breaks(range(5), closed="both"), "right"),
|
457 |
+
],
|
458 |
+
)
|
459 |
+
def test_override_inferred_closed(self, constructor, data, closed):
|
460 |
+
# GH 19370
|
461 |
+
if isinstance(data, IntervalIndex):
|
462 |
+
tuples = data.to_tuples()
|
463 |
+
else:
|
464 |
+
tuples = [(iv.left, iv.right) if notna(iv) else iv for iv in data]
|
465 |
+
expected = IntervalIndex.from_tuples(tuples, closed=closed)
|
466 |
+
result = constructor(data, closed=closed)
|
467 |
+
tm.assert_index_equal(result, expected)
|
468 |
+
|
469 |
+
@pytest.mark.parametrize(
|
470 |
+
"values_constructor", [list, np.array, IntervalIndex, IntervalArray]
|
471 |
+
)
|
472 |
+
def test_index_object_dtype(self, values_constructor):
|
473 |
+
# Index(intervals, dtype=object) is an Index (not an IntervalIndex)
|
474 |
+
intervals = [Interval(0, 1), Interval(1, 2), Interval(2, 3)]
|
475 |
+
values = values_constructor(intervals)
|
476 |
+
result = Index(values, dtype=object)
|
477 |
+
|
478 |
+
assert type(result) is Index
|
479 |
+
tm.assert_numpy_array_equal(result.values, np.array(values))
|
480 |
+
|
481 |
+
def test_index_mixed_closed(self):
|
482 |
+
# GH27172
|
483 |
+
intervals = [
|
484 |
+
Interval(0, 1, closed="left"),
|
485 |
+
Interval(1, 2, closed="right"),
|
486 |
+
Interval(2, 3, closed="neither"),
|
487 |
+
Interval(3, 4, closed="both"),
|
488 |
+
]
|
489 |
+
result = Index(intervals)
|
490 |
+
expected = Index(intervals, dtype=object)
|
491 |
+
tm.assert_index_equal(result, expected)
|
492 |
+
|
493 |
+
|
494 |
+
@pytest.mark.parametrize("timezone", ["UTC", "US/Pacific", "GMT"])
|
495 |
+
def test_interval_index_subtype(timezone, inclusive_endpoints_fixture):
|
496 |
+
# GH#46999
|
497 |
+
dates = date_range("2022", periods=3, tz=timezone)
|
498 |
+
dtype = f"interval[datetime64[ns, {timezone}], {inclusive_endpoints_fixture}]"
|
499 |
+
result = IntervalIndex.from_arrays(
|
500 |
+
["2022-01-01", "2022-01-02"],
|
501 |
+
["2022-01-02", "2022-01-03"],
|
502 |
+
closed=inclusive_endpoints_fixture,
|
503 |
+
dtype=dtype,
|
504 |
+
)
|
505 |
+
expected = IntervalIndex.from_arrays(
|
506 |
+
dates[:-1], dates[1:], closed=inclusive_endpoints_fixture
|
507 |
+
)
|
508 |
+
tm.assert_index_equal(result, expected)
|
509 |
+
|
510 |
+
|
511 |
+
def test_dtype_closed_mismatch():
|
512 |
+
# GH#38394 closed specified in both dtype and IntervalIndex constructor
|
513 |
+
|
514 |
+
dtype = IntervalDtype(np.int64, "left")
|
515 |
+
|
516 |
+
msg = "closed keyword does not match dtype.closed"
|
517 |
+
with pytest.raises(ValueError, match=msg):
|
518 |
+
IntervalIndex([], dtype=dtype, closed="neither")
|
519 |
+
|
520 |
+
with pytest.raises(ValueError, match=msg):
|
521 |
+
IntervalArray([], dtype=dtype, closed="neither")
|
522 |
+
|
523 |
+
|
524 |
+
@pytest.mark.parametrize(
|
525 |
+
"dtype",
|
526 |
+
["Float64", pytest.param("float64[pyarrow]", marks=td.skip_if_no("pyarrow"))],
|
527 |
+
)
|
528 |
+
def test_ea_dtype(dtype):
|
529 |
+
# GH#56765
|
530 |
+
bins = [(0.0, 0.4), (0.4, 0.6)]
|
531 |
+
interval_dtype = IntervalDtype(subtype=dtype, closed="left")
|
532 |
+
result = IntervalIndex.from_tuples(bins, closed="left", dtype=interval_dtype)
|
533 |
+
assert result.dtype == interval_dtype
|
534 |
+
expected = IntervalIndex.from_tuples(bins, closed="left").astype(interval_dtype)
|
535 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_equals.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
|
3 |
+
from pandas import (
|
4 |
+
IntervalIndex,
|
5 |
+
date_range,
|
6 |
+
)
|
7 |
+
|
8 |
+
|
9 |
+
class TestEquals:
|
10 |
+
def test_equals(self, closed):
|
11 |
+
expected = IntervalIndex.from_breaks(np.arange(5), closed=closed)
|
12 |
+
assert expected.equals(expected)
|
13 |
+
assert expected.equals(expected.copy())
|
14 |
+
|
15 |
+
assert not expected.equals(expected.astype(object))
|
16 |
+
assert not expected.equals(np.array(expected))
|
17 |
+
assert not expected.equals(list(expected))
|
18 |
+
|
19 |
+
assert not expected.equals([1, 2])
|
20 |
+
assert not expected.equals(np.array([1, 2]))
|
21 |
+
assert not expected.equals(date_range("20130101", periods=2))
|
22 |
+
|
23 |
+
expected_name1 = IntervalIndex.from_breaks(
|
24 |
+
np.arange(5), closed=closed, name="foo"
|
25 |
+
)
|
26 |
+
expected_name2 = IntervalIndex.from_breaks(
|
27 |
+
np.arange(5), closed=closed, name="bar"
|
28 |
+
)
|
29 |
+
assert expected.equals(expected_name1)
|
30 |
+
assert expected_name1.equals(expected_name2)
|
31 |
+
|
32 |
+
for other_closed in {"left", "right", "both", "neither"} - {closed}:
|
33 |
+
expected_other_closed = IntervalIndex.from_breaks(
|
34 |
+
np.arange(5), closed=other_closed
|
35 |
+
)
|
36 |
+
assert not expected.equals(expected_other_closed)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_formats.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas._config import using_pyarrow_string_dtype
|
5 |
+
|
6 |
+
from pandas import (
|
7 |
+
DataFrame,
|
8 |
+
DatetimeIndex,
|
9 |
+
Index,
|
10 |
+
Interval,
|
11 |
+
IntervalIndex,
|
12 |
+
Series,
|
13 |
+
Timedelta,
|
14 |
+
Timestamp,
|
15 |
+
)
|
16 |
+
import pandas._testing as tm
|
17 |
+
|
18 |
+
|
19 |
+
class TestIntervalIndexRendering:
|
20 |
+
# TODO: this is a test for DataFrame/Series, not IntervalIndex
|
21 |
+
@pytest.mark.parametrize(
|
22 |
+
"constructor,expected",
|
23 |
+
[
|
24 |
+
(
|
25 |
+
Series,
|
26 |
+
(
|
27 |
+
"(0.0, 1.0] a\n"
|
28 |
+
"NaN b\n"
|
29 |
+
"(2.0, 3.0] c\n"
|
30 |
+
"dtype: object"
|
31 |
+
),
|
32 |
+
),
|
33 |
+
(DataFrame, (" 0\n(0.0, 1.0] a\nNaN b\n(2.0, 3.0] c")),
|
34 |
+
],
|
35 |
+
)
|
36 |
+
def test_repr_missing(self, constructor, expected, using_infer_string, request):
|
37 |
+
# GH 25984
|
38 |
+
if using_infer_string and constructor is Series:
|
39 |
+
request.applymarker(pytest.mark.xfail(reason="repr different"))
|
40 |
+
index = IntervalIndex.from_tuples([(0, 1), np.nan, (2, 3)])
|
41 |
+
obj = constructor(list("abc"), index=index)
|
42 |
+
result = repr(obj)
|
43 |
+
assert result == expected
|
44 |
+
|
45 |
+
@pytest.mark.xfail(using_pyarrow_string_dtype(), reason="repr different")
|
46 |
+
def test_repr_floats(self):
|
47 |
+
# GH 32553
|
48 |
+
|
49 |
+
markers = Series(
|
50 |
+
["foo", "bar"],
|
51 |
+
index=IntervalIndex(
|
52 |
+
[
|
53 |
+
Interval(left, right)
|
54 |
+
for left, right in zip(
|
55 |
+
Index([329.973, 345.137], dtype="float64"),
|
56 |
+
Index([345.137, 360.191], dtype="float64"),
|
57 |
+
)
|
58 |
+
]
|
59 |
+
),
|
60 |
+
)
|
61 |
+
result = str(markers)
|
62 |
+
expected = "(329.973, 345.137] foo\n(345.137, 360.191] bar\ndtype: object"
|
63 |
+
assert result == expected
|
64 |
+
|
65 |
+
@pytest.mark.parametrize(
|
66 |
+
"tuples, closed, expected_data",
|
67 |
+
[
|
68 |
+
([(0, 1), (1, 2), (2, 3)], "left", ["[0, 1)", "[1, 2)", "[2, 3)"]),
|
69 |
+
(
|
70 |
+
[(0.5, 1.0), np.nan, (2.0, 3.0)],
|
71 |
+
"right",
|
72 |
+
["(0.5, 1.0]", "NaN", "(2.0, 3.0]"],
|
73 |
+
),
|
74 |
+
(
|
75 |
+
[
|
76 |
+
(Timestamp("20180101"), Timestamp("20180102")),
|
77 |
+
np.nan,
|
78 |
+
((Timestamp("20180102"), Timestamp("20180103"))),
|
79 |
+
],
|
80 |
+
"both",
|
81 |
+
[
|
82 |
+
"[2018-01-01 00:00:00, 2018-01-02 00:00:00]",
|
83 |
+
"NaN",
|
84 |
+
"[2018-01-02 00:00:00, 2018-01-03 00:00:00]",
|
85 |
+
],
|
86 |
+
),
|
87 |
+
(
|
88 |
+
[
|
89 |
+
(Timedelta("0 days"), Timedelta("1 days")),
|
90 |
+
(Timedelta("1 days"), Timedelta("2 days")),
|
91 |
+
np.nan,
|
92 |
+
],
|
93 |
+
"neither",
|
94 |
+
[
|
95 |
+
"(0 days 00:00:00, 1 days 00:00:00)",
|
96 |
+
"(1 days 00:00:00, 2 days 00:00:00)",
|
97 |
+
"NaN",
|
98 |
+
],
|
99 |
+
),
|
100 |
+
],
|
101 |
+
)
|
102 |
+
def test_get_values_for_csv(self, tuples, closed, expected_data):
|
103 |
+
# GH 28210
|
104 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
105 |
+
result = index._get_values_for_csv(na_rep="NaN")
|
106 |
+
expected = np.array(expected_data)
|
107 |
+
tm.assert_numpy_array_equal(result, expected)
|
108 |
+
|
109 |
+
def test_timestamp_with_timezone(self, unit):
|
110 |
+
# GH 55035
|
111 |
+
left = DatetimeIndex(["2020-01-01"], dtype=f"M8[{unit}, UTC]")
|
112 |
+
right = DatetimeIndex(["2020-01-02"], dtype=f"M8[{unit}, UTC]")
|
113 |
+
index = IntervalIndex.from_arrays(left, right)
|
114 |
+
result = repr(index)
|
115 |
+
expected = (
|
116 |
+
"IntervalIndex([(2020-01-01 00:00:00+00:00, 2020-01-02 00:00:00+00:00]], "
|
117 |
+
f"dtype='interval[datetime64[{unit}, UTC], right]')"
|
118 |
+
)
|
119 |
+
assert result == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_indexing.py
ADDED
@@ -0,0 +1,671 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import re
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
from pandas.errors import InvalidIndexError
|
7 |
+
|
8 |
+
from pandas import (
|
9 |
+
NA,
|
10 |
+
CategoricalIndex,
|
11 |
+
DatetimeIndex,
|
12 |
+
Index,
|
13 |
+
Interval,
|
14 |
+
IntervalIndex,
|
15 |
+
MultiIndex,
|
16 |
+
NaT,
|
17 |
+
Timedelta,
|
18 |
+
Timestamp,
|
19 |
+
array,
|
20 |
+
date_range,
|
21 |
+
interval_range,
|
22 |
+
isna,
|
23 |
+
period_range,
|
24 |
+
timedelta_range,
|
25 |
+
)
|
26 |
+
import pandas._testing as tm
|
27 |
+
|
28 |
+
|
29 |
+
class TestGetItem:
|
30 |
+
def test_getitem(self, closed):
|
31 |
+
idx = IntervalIndex.from_arrays((0, 1, np.nan), (1, 2, np.nan), closed=closed)
|
32 |
+
assert idx[0] == Interval(0.0, 1.0, closed=closed)
|
33 |
+
assert idx[1] == Interval(1.0, 2.0, closed=closed)
|
34 |
+
assert isna(idx[2])
|
35 |
+
|
36 |
+
result = idx[0:1]
|
37 |
+
expected = IntervalIndex.from_arrays((0.0,), (1.0,), closed=closed)
|
38 |
+
tm.assert_index_equal(result, expected)
|
39 |
+
|
40 |
+
result = idx[0:2]
|
41 |
+
expected = IntervalIndex.from_arrays((0.0, 1), (1.0, 2.0), closed=closed)
|
42 |
+
tm.assert_index_equal(result, expected)
|
43 |
+
|
44 |
+
result = idx[1:3]
|
45 |
+
expected = IntervalIndex.from_arrays(
|
46 |
+
(1.0, np.nan), (2.0, np.nan), closed=closed
|
47 |
+
)
|
48 |
+
tm.assert_index_equal(result, expected)
|
49 |
+
|
50 |
+
def test_getitem_2d_deprecated(self):
|
51 |
+
# GH#30588 multi-dim indexing is deprecated, but raising is also acceptable
|
52 |
+
idx = IntervalIndex.from_breaks(range(11), closed="right")
|
53 |
+
with pytest.raises(ValueError, match="multi-dimensional indexing not allowed"):
|
54 |
+
idx[:, None]
|
55 |
+
with pytest.raises(ValueError, match="multi-dimensional indexing not allowed"):
|
56 |
+
# GH#44051
|
57 |
+
idx[True]
|
58 |
+
with pytest.raises(ValueError, match="multi-dimensional indexing not allowed"):
|
59 |
+
# GH#44051
|
60 |
+
idx[False]
|
61 |
+
|
62 |
+
|
63 |
+
class TestWhere:
|
64 |
+
def test_where(self, listlike_box):
|
65 |
+
klass = listlike_box
|
66 |
+
|
67 |
+
idx = IntervalIndex.from_breaks(range(11), closed="right")
|
68 |
+
cond = [True] * len(idx)
|
69 |
+
expected = idx
|
70 |
+
result = expected.where(klass(cond))
|
71 |
+
tm.assert_index_equal(result, expected)
|
72 |
+
|
73 |
+
cond = [False] + [True] * len(idx[1:])
|
74 |
+
expected = IntervalIndex([np.nan] + idx[1:].tolist())
|
75 |
+
result = idx.where(klass(cond))
|
76 |
+
tm.assert_index_equal(result, expected)
|
77 |
+
|
78 |
+
|
79 |
+
class TestTake:
|
80 |
+
def test_take(self, closed):
|
81 |
+
index = IntervalIndex.from_breaks(range(11), closed=closed)
|
82 |
+
|
83 |
+
result = index.take(range(10))
|
84 |
+
tm.assert_index_equal(result, index)
|
85 |
+
|
86 |
+
result = index.take([0, 0, 1])
|
87 |
+
expected = IntervalIndex.from_arrays([0, 0, 1], [1, 1, 2], closed=closed)
|
88 |
+
tm.assert_index_equal(result, expected)
|
89 |
+
|
90 |
+
|
91 |
+
class TestGetLoc:
|
92 |
+
@pytest.mark.parametrize("side", ["right", "left", "both", "neither"])
|
93 |
+
def test_get_loc_interval(self, closed, side):
|
94 |
+
idx = IntervalIndex.from_tuples([(0, 1), (2, 3)], closed=closed)
|
95 |
+
|
96 |
+
for bound in [[0, 1], [1, 2], [2, 3], [3, 4], [0, 2], [2.5, 3], [-1, 4]]:
|
97 |
+
# if get_loc is supplied an interval, it should only search
|
98 |
+
# for exact matches, not overlaps or covers, else KeyError.
|
99 |
+
msg = re.escape(f"Interval({bound[0]}, {bound[1]}, closed='{side}')")
|
100 |
+
if closed == side:
|
101 |
+
if bound == [0, 1]:
|
102 |
+
assert idx.get_loc(Interval(0, 1, closed=side)) == 0
|
103 |
+
elif bound == [2, 3]:
|
104 |
+
assert idx.get_loc(Interval(2, 3, closed=side)) == 1
|
105 |
+
else:
|
106 |
+
with pytest.raises(KeyError, match=msg):
|
107 |
+
idx.get_loc(Interval(*bound, closed=side))
|
108 |
+
else:
|
109 |
+
with pytest.raises(KeyError, match=msg):
|
110 |
+
idx.get_loc(Interval(*bound, closed=side))
|
111 |
+
|
112 |
+
@pytest.mark.parametrize("scalar", [-0.5, 0, 0.5, 1, 1.5, 2, 2.5, 3, 3.5])
|
113 |
+
def test_get_loc_scalar(self, closed, scalar):
|
114 |
+
# correct = {side: {query: answer}}.
|
115 |
+
# If query is not in the dict, that query should raise a KeyError
|
116 |
+
correct = {
|
117 |
+
"right": {0.5: 0, 1: 0, 2.5: 1, 3: 1},
|
118 |
+
"left": {0: 0, 0.5: 0, 2: 1, 2.5: 1},
|
119 |
+
"both": {0: 0, 0.5: 0, 1: 0, 2: 1, 2.5: 1, 3: 1},
|
120 |
+
"neither": {0.5: 0, 2.5: 1},
|
121 |
+
}
|
122 |
+
|
123 |
+
idx = IntervalIndex.from_tuples([(0, 1), (2, 3)], closed=closed)
|
124 |
+
|
125 |
+
# if get_loc is supplied a scalar, it should return the index of
|
126 |
+
# the interval which contains the scalar, or KeyError.
|
127 |
+
if scalar in correct[closed].keys():
|
128 |
+
assert idx.get_loc(scalar) == correct[closed][scalar]
|
129 |
+
else:
|
130 |
+
with pytest.raises(KeyError, match=str(scalar)):
|
131 |
+
idx.get_loc(scalar)
|
132 |
+
|
133 |
+
@pytest.mark.parametrize("scalar", [-1, 0, 0.5, 3, 4.5, 5, 6])
|
134 |
+
def test_get_loc_length_one_scalar(self, scalar, closed):
|
135 |
+
# GH 20921
|
136 |
+
index = IntervalIndex.from_tuples([(0, 5)], closed=closed)
|
137 |
+
if scalar in index[0]:
|
138 |
+
result = index.get_loc(scalar)
|
139 |
+
assert result == 0
|
140 |
+
else:
|
141 |
+
with pytest.raises(KeyError, match=str(scalar)):
|
142 |
+
index.get_loc(scalar)
|
143 |
+
|
144 |
+
@pytest.mark.parametrize("other_closed", ["left", "right", "both", "neither"])
|
145 |
+
@pytest.mark.parametrize("left, right", [(0, 5), (-1, 4), (-1, 6), (6, 7)])
|
146 |
+
def test_get_loc_length_one_interval(self, left, right, closed, other_closed):
|
147 |
+
# GH 20921
|
148 |
+
index = IntervalIndex.from_tuples([(0, 5)], closed=closed)
|
149 |
+
interval = Interval(left, right, closed=other_closed)
|
150 |
+
if interval == index[0]:
|
151 |
+
result = index.get_loc(interval)
|
152 |
+
assert result == 0
|
153 |
+
else:
|
154 |
+
with pytest.raises(
|
155 |
+
KeyError,
|
156 |
+
match=re.escape(f"Interval({left}, {right}, closed='{other_closed}')"),
|
157 |
+
):
|
158 |
+
index.get_loc(interval)
|
159 |
+
|
160 |
+
# Make consistent with test_interval_new.py (see #16316, #16386)
|
161 |
+
@pytest.mark.parametrize(
|
162 |
+
"breaks",
|
163 |
+
[
|
164 |
+
date_range("20180101", periods=4),
|
165 |
+
date_range("20180101", periods=4, tz="US/Eastern"),
|
166 |
+
timedelta_range("0 days", periods=4),
|
167 |
+
],
|
168 |
+
ids=lambda x: str(x.dtype),
|
169 |
+
)
|
170 |
+
def test_get_loc_datetimelike_nonoverlapping(self, breaks):
|
171 |
+
# GH 20636
|
172 |
+
# nonoverlapping = IntervalIndex method and no i8 conversion
|
173 |
+
index = IntervalIndex.from_breaks(breaks)
|
174 |
+
|
175 |
+
value = index[0].mid
|
176 |
+
result = index.get_loc(value)
|
177 |
+
expected = 0
|
178 |
+
assert result == expected
|
179 |
+
|
180 |
+
interval = Interval(index[0].left, index[0].right)
|
181 |
+
result = index.get_loc(interval)
|
182 |
+
expected = 0
|
183 |
+
assert result == expected
|
184 |
+
|
185 |
+
@pytest.mark.parametrize(
|
186 |
+
"arrays",
|
187 |
+
[
|
188 |
+
(date_range("20180101", periods=4), date_range("20180103", periods=4)),
|
189 |
+
(
|
190 |
+
date_range("20180101", periods=4, tz="US/Eastern"),
|
191 |
+
date_range("20180103", periods=4, tz="US/Eastern"),
|
192 |
+
),
|
193 |
+
(
|
194 |
+
timedelta_range("0 days", periods=4),
|
195 |
+
timedelta_range("2 days", periods=4),
|
196 |
+
),
|
197 |
+
],
|
198 |
+
ids=lambda x: str(x[0].dtype),
|
199 |
+
)
|
200 |
+
def test_get_loc_datetimelike_overlapping(self, arrays):
|
201 |
+
# GH 20636
|
202 |
+
index = IntervalIndex.from_arrays(*arrays)
|
203 |
+
|
204 |
+
value = index[0].mid + Timedelta("12 hours")
|
205 |
+
result = index.get_loc(value)
|
206 |
+
expected = slice(0, 2, None)
|
207 |
+
assert result == expected
|
208 |
+
|
209 |
+
interval = Interval(index[0].left, index[0].right)
|
210 |
+
result = index.get_loc(interval)
|
211 |
+
expected = 0
|
212 |
+
assert result == expected
|
213 |
+
|
214 |
+
@pytest.mark.parametrize(
|
215 |
+
"values",
|
216 |
+
[
|
217 |
+
date_range("2018-01-04", periods=4, freq="-1D"),
|
218 |
+
date_range("2018-01-04", periods=4, freq="-1D", tz="US/Eastern"),
|
219 |
+
timedelta_range("3 days", periods=4, freq="-1D"),
|
220 |
+
np.arange(3.0, -1.0, -1.0),
|
221 |
+
np.arange(3, -1, -1),
|
222 |
+
],
|
223 |
+
ids=lambda x: str(x.dtype),
|
224 |
+
)
|
225 |
+
def test_get_loc_decreasing(self, values):
|
226 |
+
# GH 25860
|
227 |
+
index = IntervalIndex.from_arrays(values[1:], values[:-1])
|
228 |
+
result = index.get_loc(index[0])
|
229 |
+
expected = 0
|
230 |
+
assert result == expected
|
231 |
+
|
232 |
+
@pytest.mark.parametrize("key", [[5], (2, 3)])
|
233 |
+
def test_get_loc_non_scalar_errors(self, key):
|
234 |
+
# GH 31117
|
235 |
+
idx = IntervalIndex.from_tuples([(1, 3), (2, 4), (3, 5), (7, 10), (3, 10)])
|
236 |
+
|
237 |
+
msg = str(key)
|
238 |
+
with pytest.raises(InvalidIndexError, match=msg):
|
239 |
+
idx.get_loc(key)
|
240 |
+
|
241 |
+
def test_get_indexer_with_nans(self):
|
242 |
+
# GH#41831
|
243 |
+
index = IntervalIndex([np.nan, Interval(1, 2), np.nan])
|
244 |
+
|
245 |
+
expected = np.array([True, False, True])
|
246 |
+
for key in [None, np.nan, NA]:
|
247 |
+
assert key in index
|
248 |
+
result = index.get_loc(key)
|
249 |
+
tm.assert_numpy_array_equal(result, expected)
|
250 |
+
|
251 |
+
for key in [NaT, np.timedelta64("NaT", "ns"), np.datetime64("NaT", "ns")]:
|
252 |
+
with pytest.raises(KeyError, match=str(key)):
|
253 |
+
index.get_loc(key)
|
254 |
+
|
255 |
+
|
256 |
+
class TestGetIndexer:
|
257 |
+
@pytest.mark.parametrize(
|
258 |
+
"query, expected",
|
259 |
+
[
|
260 |
+
([Interval(2, 4, closed="right")], [1]),
|
261 |
+
([Interval(2, 4, closed="left")], [-1]),
|
262 |
+
([Interval(2, 4, closed="both")], [-1]),
|
263 |
+
([Interval(2, 4, closed="neither")], [-1]),
|
264 |
+
([Interval(1, 4, closed="right")], [-1]),
|
265 |
+
([Interval(0, 4, closed="right")], [-1]),
|
266 |
+
([Interval(0.5, 1.5, closed="right")], [-1]),
|
267 |
+
([Interval(2, 4, closed="right"), Interval(0, 1, closed="right")], [1, -1]),
|
268 |
+
([Interval(2, 4, closed="right"), Interval(2, 4, closed="right")], [1, 1]),
|
269 |
+
([Interval(5, 7, closed="right"), Interval(2, 4, closed="right")], [2, 1]),
|
270 |
+
([Interval(2, 4, closed="right"), Interval(2, 4, closed="left")], [1, -1]),
|
271 |
+
],
|
272 |
+
)
|
273 |
+
def test_get_indexer_with_interval(self, query, expected):
|
274 |
+
tuples = [(0, 2), (2, 4), (5, 7)]
|
275 |
+
index = IntervalIndex.from_tuples(tuples, closed="right")
|
276 |
+
|
277 |
+
result = index.get_indexer(query)
|
278 |
+
expected = np.array(expected, dtype="intp")
|
279 |
+
tm.assert_numpy_array_equal(result, expected)
|
280 |
+
|
281 |
+
@pytest.mark.parametrize(
|
282 |
+
"query, expected",
|
283 |
+
[
|
284 |
+
([-0.5], [-1]),
|
285 |
+
([0], [-1]),
|
286 |
+
([0.5], [0]),
|
287 |
+
([1], [0]),
|
288 |
+
([1.5], [1]),
|
289 |
+
([2], [1]),
|
290 |
+
([2.5], [-1]),
|
291 |
+
([3], [-1]),
|
292 |
+
([3.5], [2]),
|
293 |
+
([4], [2]),
|
294 |
+
([4.5], [-1]),
|
295 |
+
([1, 2], [0, 1]),
|
296 |
+
([1, 2, 3], [0, 1, -1]),
|
297 |
+
([1, 2, 3, 4], [0, 1, -1, 2]),
|
298 |
+
([1, 2, 3, 4, 2], [0, 1, -1, 2, 1]),
|
299 |
+
],
|
300 |
+
)
|
301 |
+
def test_get_indexer_with_int_and_float(self, query, expected):
|
302 |
+
tuples = [(0, 1), (1, 2), (3, 4)]
|
303 |
+
index = IntervalIndex.from_tuples(tuples, closed="right")
|
304 |
+
|
305 |
+
result = index.get_indexer(query)
|
306 |
+
expected = np.array(expected, dtype="intp")
|
307 |
+
tm.assert_numpy_array_equal(result, expected)
|
308 |
+
|
309 |
+
@pytest.mark.parametrize("item", [[3], np.arange(0.5, 5, 0.5)])
|
310 |
+
def test_get_indexer_length_one(self, item, closed):
|
311 |
+
# GH 17284
|
312 |
+
index = IntervalIndex.from_tuples([(0, 5)], closed=closed)
|
313 |
+
result = index.get_indexer(item)
|
314 |
+
expected = np.array([0] * len(item), dtype="intp")
|
315 |
+
tm.assert_numpy_array_equal(result, expected)
|
316 |
+
|
317 |
+
@pytest.mark.parametrize("size", [1, 5])
|
318 |
+
def test_get_indexer_length_one_interval(self, size, closed):
|
319 |
+
# GH 17284
|
320 |
+
index = IntervalIndex.from_tuples([(0, 5)], closed=closed)
|
321 |
+
result = index.get_indexer([Interval(0, 5, closed)] * size)
|
322 |
+
expected = np.array([0] * size, dtype="intp")
|
323 |
+
tm.assert_numpy_array_equal(result, expected)
|
324 |
+
|
325 |
+
@pytest.mark.parametrize(
|
326 |
+
"target",
|
327 |
+
[
|
328 |
+
IntervalIndex.from_tuples([(7, 8), (1, 2), (3, 4), (0, 1)]),
|
329 |
+
IntervalIndex.from_tuples([(0, 1), (1, 2), (3, 4), np.nan]),
|
330 |
+
IntervalIndex.from_tuples([(0, 1), (1, 2), (3, 4)], closed="both"),
|
331 |
+
[-1, 0, 0.5, 1, 2, 2.5, np.nan],
|
332 |
+
["foo", "foo", "bar", "baz"],
|
333 |
+
],
|
334 |
+
)
|
335 |
+
def test_get_indexer_categorical(self, target, ordered):
|
336 |
+
# GH 30063: categorical and non-categorical results should be consistent
|
337 |
+
index = IntervalIndex.from_tuples([(0, 1), (1, 2), (3, 4)])
|
338 |
+
categorical_target = CategoricalIndex(target, ordered=ordered)
|
339 |
+
|
340 |
+
result = index.get_indexer(categorical_target)
|
341 |
+
expected = index.get_indexer(target)
|
342 |
+
tm.assert_numpy_array_equal(result, expected)
|
343 |
+
|
344 |
+
def test_get_indexer_categorical_with_nans(self):
|
345 |
+
# GH#41934 nans in both index and in target
|
346 |
+
ii = IntervalIndex.from_breaks(range(5))
|
347 |
+
ii2 = ii.append(IntervalIndex([np.nan]))
|
348 |
+
ci2 = CategoricalIndex(ii2)
|
349 |
+
|
350 |
+
result = ii2.get_indexer(ci2)
|
351 |
+
expected = np.arange(5, dtype=np.intp)
|
352 |
+
tm.assert_numpy_array_equal(result, expected)
|
353 |
+
|
354 |
+
# not-all-matches
|
355 |
+
result = ii2[1:].get_indexer(ci2[::-1])
|
356 |
+
expected = np.array([3, 2, 1, 0, -1], dtype=np.intp)
|
357 |
+
tm.assert_numpy_array_equal(result, expected)
|
358 |
+
|
359 |
+
# non-unique target, non-unique nans
|
360 |
+
result = ii2.get_indexer(ci2.append(ci2))
|
361 |
+
expected = np.array([0, 1, 2, 3, 4, 0, 1, 2, 3, 4], dtype=np.intp)
|
362 |
+
tm.assert_numpy_array_equal(result, expected)
|
363 |
+
|
364 |
+
def test_get_indexer_datetime(self):
|
365 |
+
ii = IntervalIndex.from_breaks(date_range("2018-01-01", periods=4))
|
366 |
+
# TODO: with mismatched resolution get_indexer currently raises;
|
367 |
+
# this should probably coerce?
|
368 |
+
target = DatetimeIndex(["2018-01-02"], dtype="M8[ns]")
|
369 |
+
result = ii.get_indexer(target)
|
370 |
+
expected = np.array([0], dtype=np.intp)
|
371 |
+
tm.assert_numpy_array_equal(result, expected)
|
372 |
+
|
373 |
+
result = ii.get_indexer(target.astype(str))
|
374 |
+
tm.assert_numpy_array_equal(result, expected)
|
375 |
+
|
376 |
+
# https://github.com/pandas-dev/pandas/issues/47772
|
377 |
+
result = ii.get_indexer(target.asi8)
|
378 |
+
expected = np.array([-1], dtype=np.intp)
|
379 |
+
tm.assert_numpy_array_equal(result, expected)
|
380 |
+
|
381 |
+
@pytest.mark.parametrize(
|
382 |
+
"tuples, closed",
|
383 |
+
[
|
384 |
+
([(0, 2), (1, 3), (3, 4)], "neither"),
|
385 |
+
([(0, 5), (1, 4), (6, 7)], "left"),
|
386 |
+
([(0, 1), (0, 1), (1, 2)], "right"),
|
387 |
+
([(0, 1), (2, 3), (3, 4)], "both"),
|
388 |
+
],
|
389 |
+
)
|
390 |
+
def test_get_indexer_errors(self, tuples, closed):
|
391 |
+
# IntervalIndex needs non-overlapping for uniqueness when querying
|
392 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
393 |
+
|
394 |
+
msg = (
|
395 |
+
"cannot handle overlapping indices; use "
|
396 |
+
"IntervalIndex.get_indexer_non_unique"
|
397 |
+
)
|
398 |
+
with pytest.raises(InvalidIndexError, match=msg):
|
399 |
+
index.get_indexer([0, 2])
|
400 |
+
|
401 |
+
@pytest.mark.parametrize(
|
402 |
+
"query, expected",
|
403 |
+
[
|
404 |
+
([-0.5], ([-1], [0])),
|
405 |
+
([0], ([0], [])),
|
406 |
+
([0.5], ([0], [])),
|
407 |
+
([1], ([0, 1], [])),
|
408 |
+
([1.5], ([0, 1], [])),
|
409 |
+
([2], ([0, 1, 2], [])),
|
410 |
+
([2.5], ([1, 2], [])),
|
411 |
+
([3], ([2], [])),
|
412 |
+
([3.5], ([2], [])),
|
413 |
+
([4], ([-1], [0])),
|
414 |
+
([4.5], ([-1], [0])),
|
415 |
+
([1, 2], ([0, 1, 0, 1, 2], [])),
|
416 |
+
([1, 2, 3], ([0, 1, 0, 1, 2, 2], [])),
|
417 |
+
([1, 2, 3, 4], ([0, 1, 0, 1, 2, 2, -1], [3])),
|
418 |
+
([1, 2, 3, 4, 2], ([0, 1, 0, 1, 2, 2, -1, 0, 1, 2], [3])),
|
419 |
+
],
|
420 |
+
)
|
421 |
+
def test_get_indexer_non_unique_with_int_and_float(self, query, expected):
|
422 |
+
tuples = [(0, 2.5), (1, 3), (2, 4)]
|
423 |
+
index = IntervalIndex.from_tuples(tuples, closed="left")
|
424 |
+
|
425 |
+
result_indexer, result_missing = index.get_indexer_non_unique(query)
|
426 |
+
expected_indexer = np.array(expected[0], dtype="intp")
|
427 |
+
expected_missing = np.array(expected[1], dtype="intp")
|
428 |
+
|
429 |
+
tm.assert_numpy_array_equal(result_indexer, expected_indexer)
|
430 |
+
tm.assert_numpy_array_equal(result_missing, expected_missing)
|
431 |
+
|
432 |
+
# TODO we may also want to test get_indexer for the case when
|
433 |
+
# the intervals are duplicated, decreasing, non-monotonic, etc..
|
434 |
+
|
435 |
+
def test_get_indexer_non_monotonic(self):
|
436 |
+
# GH 16410
|
437 |
+
idx1 = IntervalIndex.from_tuples([(2, 3), (4, 5), (0, 1)])
|
438 |
+
idx2 = IntervalIndex.from_tuples([(0, 1), (2, 3), (6, 7), (8, 9)])
|
439 |
+
result = idx1.get_indexer(idx2)
|
440 |
+
expected = np.array([2, 0, -1, -1], dtype=np.intp)
|
441 |
+
tm.assert_numpy_array_equal(result, expected)
|
442 |
+
|
443 |
+
result = idx1.get_indexer(idx1[1:])
|
444 |
+
expected = np.array([1, 2], dtype=np.intp)
|
445 |
+
tm.assert_numpy_array_equal(result, expected)
|
446 |
+
|
447 |
+
def test_get_indexer_with_nans(self):
|
448 |
+
# GH#41831
|
449 |
+
index = IntervalIndex([np.nan, np.nan])
|
450 |
+
other = IntervalIndex([np.nan])
|
451 |
+
|
452 |
+
assert not index._index_as_unique
|
453 |
+
|
454 |
+
result = index.get_indexer_for(other)
|
455 |
+
expected = np.array([0, 1], dtype=np.intp)
|
456 |
+
tm.assert_numpy_array_equal(result, expected)
|
457 |
+
|
458 |
+
def test_get_index_non_unique_non_monotonic(self):
|
459 |
+
# GH#44084 (root cause)
|
460 |
+
index = IntervalIndex.from_tuples(
|
461 |
+
[(0.0, 1.0), (1.0, 2.0), (0.0, 1.0), (1.0, 2.0)]
|
462 |
+
)
|
463 |
+
|
464 |
+
result, _ = index.get_indexer_non_unique([Interval(1.0, 2.0)])
|
465 |
+
expected = np.array([1, 3], dtype=np.intp)
|
466 |
+
tm.assert_numpy_array_equal(result, expected)
|
467 |
+
|
468 |
+
def test_get_indexer_multiindex_with_intervals(self):
|
469 |
+
# GH#44084 (MultiIndex case as reported)
|
470 |
+
interval_index = IntervalIndex.from_tuples(
|
471 |
+
[(2.0, 3.0), (0.0, 1.0), (1.0, 2.0)], name="interval"
|
472 |
+
)
|
473 |
+
foo_index = Index([1, 2, 3], name="foo")
|
474 |
+
|
475 |
+
multi_index = MultiIndex.from_product([foo_index, interval_index])
|
476 |
+
|
477 |
+
result = multi_index.get_level_values("interval").get_indexer_for(
|
478 |
+
[Interval(0.0, 1.0)]
|
479 |
+
)
|
480 |
+
expected = np.array([1, 4, 7], dtype=np.intp)
|
481 |
+
tm.assert_numpy_array_equal(result, expected)
|
482 |
+
|
483 |
+
@pytest.mark.parametrize("box", [IntervalIndex, array, list])
|
484 |
+
def test_get_indexer_interval_index(self, box):
|
485 |
+
# GH#30178
|
486 |
+
rng = period_range("2022-07-01", freq="D", periods=3)
|
487 |
+
idx = box(interval_range(Timestamp("2022-07-01"), freq="3D", periods=3))
|
488 |
+
|
489 |
+
actual = rng.get_indexer(idx)
|
490 |
+
expected = np.array([-1, -1, -1], dtype=np.intp)
|
491 |
+
tm.assert_numpy_array_equal(actual, expected)
|
492 |
+
|
493 |
+
def test_get_indexer_read_only(self):
|
494 |
+
idx = interval_range(start=0, end=5)
|
495 |
+
arr = np.array([1, 2])
|
496 |
+
arr.flags.writeable = False
|
497 |
+
result = idx.get_indexer(arr)
|
498 |
+
expected = np.array([0, 1])
|
499 |
+
tm.assert_numpy_array_equal(result, expected, check_dtype=False)
|
500 |
+
|
501 |
+
result = idx.get_indexer_non_unique(arr)[0]
|
502 |
+
tm.assert_numpy_array_equal(result, expected, check_dtype=False)
|
503 |
+
|
504 |
+
|
505 |
+
class TestSliceLocs:
|
506 |
+
def test_slice_locs_with_interval(self):
|
507 |
+
# increasing monotonically
|
508 |
+
index = IntervalIndex.from_tuples([(0, 2), (1, 3), (2, 4)])
|
509 |
+
|
510 |
+
assert index.slice_locs(start=Interval(0, 2), end=Interval(2, 4)) == (0, 3)
|
511 |
+
assert index.slice_locs(start=Interval(0, 2)) == (0, 3)
|
512 |
+
assert index.slice_locs(end=Interval(2, 4)) == (0, 3)
|
513 |
+
assert index.slice_locs(end=Interval(0, 2)) == (0, 1)
|
514 |
+
assert index.slice_locs(start=Interval(2, 4), end=Interval(0, 2)) == (2, 1)
|
515 |
+
|
516 |
+
# decreasing monotonically
|
517 |
+
index = IntervalIndex.from_tuples([(2, 4), (1, 3), (0, 2)])
|
518 |
+
|
519 |
+
assert index.slice_locs(start=Interval(0, 2), end=Interval(2, 4)) == (2, 1)
|
520 |
+
assert index.slice_locs(start=Interval(0, 2)) == (2, 3)
|
521 |
+
assert index.slice_locs(end=Interval(2, 4)) == (0, 1)
|
522 |
+
assert index.slice_locs(end=Interval(0, 2)) == (0, 3)
|
523 |
+
assert index.slice_locs(start=Interval(2, 4), end=Interval(0, 2)) == (0, 3)
|
524 |
+
|
525 |
+
# sorted duplicates
|
526 |
+
index = IntervalIndex.from_tuples([(0, 2), (0, 2), (2, 4)])
|
527 |
+
|
528 |
+
assert index.slice_locs(start=Interval(0, 2), end=Interval(2, 4)) == (0, 3)
|
529 |
+
assert index.slice_locs(start=Interval(0, 2)) == (0, 3)
|
530 |
+
assert index.slice_locs(end=Interval(2, 4)) == (0, 3)
|
531 |
+
assert index.slice_locs(end=Interval(0, 2)) == (0, 2)
|
532 |
+
assert index.slice_locs(start=Interval(2, 4), end=Interval(0, 2)) == (2, 2)
|
533 |
+
|
534 |
+
# unsorted duplicates
|
535 |
+
index = IntervalIndex.from_tuples([(0, 2), (2, 4), (0, 2)])
|
536 |
+
|
537 |
+
with pytest.raises(
|
538 |
+
KeyError,
|
539 |
+
match=re.escape(
|
540 |
+
'"Cannot get left slice bound for non-unique label: '
|
541 |
+
"Interval(0, 2, closed='right')\""
|
542 |
+
),
|
543 |
+
):
|
544 |
+
index.slice_locs(start=Interval(0, 2), end=Interval(2, 4))
|
545 |
+
|
546 |
+
with pytest.raises(
|
547 |
+
KeyError,
|
548 |
+
match=re.escape(
|
549 |
+
'"Cannot get left slice bound for non-unique label: '
|
550 |
+
"Interval(0, 2, closed='right')\""
|
551 |
+
),
|
552 |
+
):
|
553 |
+
index.slice_locs(start=Interval(0, 2))
|
554 |
+
|
555 |
+
assert index.slice_locs(end=Interval(2, 4)) == (0, 2)
|
556 |
+
|
557 |
+
with pytest.raises(
|
558 |
+
KeyError,
|
559 |
+
match=re.escape(
|
560 |
+
'"Cannot get right slice bound for non-unique label: '
|
561 |
+
"Interval(0, 2, closed='right')\""
|
562 |
+
),
|
563 |
+
):
|
564 |
+
index.slice_locs(end=Interval(0, 2))
|
565 |
+
|
566 |
+
with pytest.raises(
|
567 |
+
KeyError,
|
568 |
+
match=re.escape(
|
569 |
+
'"Cannot get right slice bound for non-unique label: '
|
570 |
+
"Interval(0, 2, closed='right')\""
|
571 |
+
),
|
572 |
+
):
|
573 |
+
index.slice_locs(start=Interval(2, 4), end=Interval(0, 2))
|
574 |
+
|
575 |
+
# another unsorted duplicates
|
576 |
+
index = IntervalIndex.from_tuples([(0, 2), (0, 2), (2, 4), (1, 3)])
|
577 |
+
|
578 |
+
assert index.slice_locs(start=Interval(0, 2), end=Interval(2, 4)) == (0, 3)
|
579 |
+
assert index.slice_locs(start=Interval(0, 2)) == (0, 4)
|
580 |
+
assert index.slice_locs(end=Interval(2, 4)) == (0, 3)
|
581 |
+
assert index.slice_locs(end=Interval(0, 2)) == (0, 2)
|
582 |
+
assert index.slice_locs(start=Interval(2, 4), end=Interval(0, 2)) == (2, 2)
|
583 |
+
|
584 |
+
def test_slice_locs_with_ints_and_floats_succeeds(self):
|
585 |
+
# increasing non-overlapping
|
586 |
+
index = IntervalIndex.from_tuples([(0, 1), (1, 2), (3, 4)])
|
587 |
+
|
588 |
+
assert index.slice_locs(0, 1) == (0, 1)
|
589 |
+
assert index.slice_locs(0, 2) == (0, 2)
|
590 |
+
assert index.slice_locs(0, 3) == (0, 2)
|
591 |
+
assert index.slice_locs(3, 1) == (2, 1)
|
592 |
+
assert index.slice_locs(3, 4) == (2, 3)
|
593 |
+
assert index.slice_locs(0, 4) == (0, 3)
|
594 |
+
|
595 |
+
# decreasing non-overlapping
|
596 |
+
index = IntervalIndex.from_tuples([(3, 4), (1, 2), (0, 1)])
|
597 |
+
assert index.slice_locs(0, 1) == (3, 3)
|
598 |
+
assert index.slice_locs(0, 2) == (3, 2)
|
599 |
+
assert index.slice_locs(0, 3) == (3, 1)
|
600 |
+
assert index.slice_locs(3, 1) == (1, 3)
|
601 |
+
assert index.slice_locs(3, 4) == (1, 1)
|
602 |
+
assert index.slice_locs(0, 4) == (3, 1)
|
603 |
+
|
604 |
+
@pytest.mark.parametrize("query", [[0, 1], [0, 2], [0, 3], [0, 4]])
|
605 |
+
@pytest.mark.parametrize(
|
606 |
+
"tuples",
|
607 |
+
[
|
608 |
+
[(0, 2), (1, 3), (2, 4)],
|
609 |
+
[(2, 4), (1, 3), (0, 2)],
|
610 |
+
[(0, 2), (0, 2), (2, 4)],
|
611 |
+
[(0, 2), (2, 4), (0, 2)],
|
612 |
+
[(0, 2), (0, 2), (2, 4), (1, 3)],
|
613 |
+
],
|
614 |
+
)
|
615 |
+
def test_slice_locs_with_ints_and_floats_errors(self, tuples, query):
|
616 |
+
start, stop = query
|
617 |
+
index = IntervalIndex.from_tuples(tuples)
|
618 |
+
with pytest.raises(
|
619 |
+
KeyError,
|
620 |
+
match=(
|
621 |
+
"'can only get slices from an IntervalIndex if bounds are "
|
622 |
+
"non-overlapping and all monotonic increasing or decreasing'"
|
623 |
+
),
|
624 |
+
):
|
625 |
+
index.slice_locs(start, stop)
|
626 |
+
|
627 |
+
|
628 |
+
class TestPutmask:
|
629 |
+
@pytest.mark.parametrize("tz", ["US/Pacific", None])
|
630 |
+
def test_putmask_dt64(self, tz):
|
631 |
+
# GH#37968
|
632 |
+
dti = date_range("2016-01-01", periods=9, tz=tz)
|
633 |
+
idx = IntervalIndex.from_breaks(dti)
|
634 |
+
mask = np.zeros(idx.shape, dtype=bool)
|
635 |
+
mask[0:3] = True
|
636 |
+
|
637 |
+
result = idx.putmask(mask, idx[-1])
|
638 |
+
expected = IntervalIndex([idx[-1]] * 3 + list(idx[3:]))
|
639 |
+
tm.assert_index_equal(result, expected)
|
640 |
+
|
641 |
+
def test_putmask_td64(self):
|
642 |
+
# GH#37968
|
643 |
+
dti = date_range("2016-01-01", periods=9)
|
644 |
+
tdi = dti - dti[0]
|
645 |
+
idx = IntervalIndex.from_breaks(tdi)
|
646 |
+
mask = np.zeros(idx.shape, dtype=bool)
|
647 |
+
mask[0:3] = True
|
648 |
+
|
649 |
+
result = idx.putmask(mask, idx[-1])
|
650 |
+
expected = IntervalIndex([idx[-1]] * 3 + list(idx[3:]))
|
651 |
+
tm.assert_index_equal(result, expected)
|
652 |
+
|
653 |
+
|
654 |
+
class TestContains:
|
655 |
+
# .__contains__, not .contains
|
656 |
+
|
657 |
+
def test_contains_dunder(self):
|
658 |
+
index = IntervalIndex.from_arrays([0, 1], [1, 2], closed="right")
|
659 |
+
|
660 |
+
# __contains__ requires perfect matches to intervals.
|
661 |
+
assert 0 not in index
|
662 |
+
assert 1 not in index
|
663 |
+
assert 2 not in index
|
664 |
+
|
665 |
+
assert Interval(0, 1, closed="right") in index
|
666 |
+
assert Interval(0, 2, closed="right") not in index
|
667 |
+
assert Interval(0, 0.5, closed="right") not in index
|
668 |
+
assert Interval(3, 5, closed="right") not in index
|
669 |
+
assert Interval(-1, 0, closed="left") not in index
|
670 |
+
assert Interval(0, 1, closed="left") not in index
|
671 |
+
assert Interval(0, 1, closed="both") not in index
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval.py
ADDED
@@ -0,0 +1,918 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import permutations
|
2 |
+
import re
|
3 |
+
|
4 |
+
import numpy as np
|
5 |
+
import pytest
|
6 |
+
|
7 |
+
import pandas as pd
|
8 |
+
from pandas import (
|
9 |
+
Index,
|
10 |
+
Interval,
|
11 |
+
IntervalIndex,
|
12 |
+
Timedelta,
|
13 |
+
Timestamp,
|
14 |
+
date_range,
|
15 |
+
interval_range,
|
16 |
+
isna,
|
17 |
+
notna,
|
18 |
+
timedelta_range,
|
19 |
+
)
|
20 |
+
import pandas._testing as tm
|
21 |
+
import pandas.core.common as com
|
22 |
+
|
23 |
+
|
24 |
+
@pytest.fixture(params=[None, "foo"])
|
25 |
+
def name(request):
|
26 |
+
return request.param
|
27 |
+
|
28 |
+
|
29 |
+
class TestIntervalIndex:
|
30 |
+
index = IntervalIndex.from_arrays([0, 1], [1, 2])
|
31 |
+
|
32 |
+
def create_index(self, closed="right"):
|
33 |
+
return IntervalIndex.from_breaks(range(11), closed=closed)
|
34 |
+
|
35 |
+
def create_index_with_nan(self, closed="right"):
|
36 |
+
mask = [True, False] + [True] * 8
|
37 |
+
return IntervalIndex.from_arrays(
|
38 |
+
np.where(mask, np.arange(10), np.nan),
|
39 |
+
np.where(mask, np.arange(1, 11), np.nan),
|
40 |
+
closed=closed,
|
41 |
+
)
|
42 |
+
|
43 |
+
def test_properties(self, closed):
|
44 |
+
index = self.create_index(closed=closed)
|
45 |
+
assert len(index) == 10
|
46 |
+
assert index.size == 10
|
47 |
+
assert index.shape == (10,)
|
48 |
+
|
49 |
+
tm.assert_index_equal(index.left, Index(np.arange(10, dtype=np.int64)))
|
50 |
+
tm.assert_index_equal(index.right, Index(np.arange(1, 11, dtype=np.int64)))
|
51 |
+
tm.assert_index_equal(index.mid, Index(np.arange(0.5, 10.5, dtype=np.float64)))
|
52 |
+
|
53 |
+
assert index.closed == closed
|
54 |
+
|
55 |
+
ivs = [
|
56 |
+
Interval(left, right, closed)
|
57 |
+
for left, right in zip(range(10), range(1, 11))
|
58 |
+
]
|
59 |
+
expected = np.array(ivs, dtype=object)
|
60 |
+
tm.assert_numpy_array_equal(np.asarray(index), expected)
|
61 |
+
|
62 |
+
# with nans
|
63 |
+
index = self.create_index_with_nan(closed=closed)
|
64 |
+
assert len(index) == 10
|
65 |
+
assert index.size == 10
|
66 |
+
assert index.shape == (10,)
|
67 |
+
|
68 |
+
expected_left = Index([0, np.nan, 2, 3, 4, 5, 6, 7, 8, 9])
|
69 |
+
expected_right = expected_left + 1
|
70 |
+
expected_mid = expected_left + 0.5
|
71 |
+
tm.assert_index_equal(index.left, expected_left)
|
72 |
+
tm.assert_index_equal(index.right, expected_right)
|
73 |
+
tm.assert_index_equal(index.mid, expected_mid)
|
74 |
+
|
75 |
+
assert index.closed == closed
|
76 |
+
|
77 |
+
ivs = [
|
78 |
+
Interval(left, right, closed) if notna(left) else np.nan
|
79 |
+
for left, right in zip(expected_left, expected_right)
|
80 |
+
]
|
81 |
+
expected = np.array(ivs, dtype=object)
|
82 |
+
tm.assert_numpy_array_equal(np.asarray(index), expected)
|
83 |
+
|
84 |
+
@pytest.mark.parametrize(
|
85 |
+
"breaks",
|
86 |
+
[
|
87 |
+
[1, 1, 2, 5, 15, 53, 217, 1014, 5335, 31240, 201608],
|
88 |
+
[-np.inf, -100, -10, 0.5, 1, 1.5, 3.8, 101, 202, np.inf],
|
89 |
+
date_range("2017-01-01", "2017-01-04"),
|
90 |
+
pytest.param(
|
91 |
+
date_range("2017-01-01", "2017-01-04", unit="s"),
|
92 |
+
marks=pytest.mark.xfail(reason="mismatched result unit"),
|
93 |
+
),
|
94 |
+
pd.to_timedelta(["1ns", "2ms", "3s", "4min", "5h", "6D"]),
|
95 |
+
],
|
96 |
+
)
|
97 |
+
def test_length(self, closed, breaks):
|
98 |
+
# GH 18789
|
99 |
+
index = IntervalIndex.from_breaks(breaks, closed=closed)
|
100 |
+
result = index.length
|
101 |
+
expected = Index(iv.length for iv in index)
|
102 |
+
tm.assert_index_equal(result, expected)
|
103 |
+
|
104 |
+
# with NA
|
105 |
+
index = index.insert(1, np.nan)
|
106 |
+
result = index.length
|
107 |
+
expected = Index(iv.length if notna(iv) else iv for iv in index)
|
108 |
+
tm.assert_index_equal(result, expected)
|
109 |
+
|
110 |
+
def test_with_nans(self, closed):
|
111 |
+
index = self.create_index(closed=closed)
|
112 |
+
assert index.hasnans is False
|
113 |
+
|
114 |
+
result = index.isna()
|
115 |
+
expected = np.zeros(len(index), dtype=bool)
|
116 |
+
tm.assert_numpy_array_equal(result, expected)
|
117 |
+
|
118 |
+
result = index.notna()
|
119 |
+
expected = np.ones(len(index), dtype=bool)
|
120 |
+
tm.assert_numpy_array_equal(result, expected)
|
121 |
+
|
122 |
+
index = self.create_index_with_nan(closed=closed)
|
123 |
+
assert index.hasnans is True
|
124 |
+
|
125 |
+
result = index.isna()
|
126 |
+
expected = np.array([False, True] + [False] * (len(index) - 2))
|
127 |
+
tm.assert_numpy_array_equal(result, expected)
|
128 |
+
|
129 |
+
result = index.notna()
|
130 |
+
expected = np.array([True, False] + [True] * (len(index) - 2))
|
131 |
+
tm.assert_numpy_array_equal(result, expected)
|
132 |
+
|
133 |
+
def test_copy(self, closed):
|
134 |
+
expected = self.create_index(closed=closed)
|
135 |
+
|
136 |
+
result = expected.copy()
|
137 |
+
assert result.equals(expected)
|
138 |
+
|
139 |
+
result = expected.copy(deep=True)
|
140 |
+
assert result.equals(expected)
|
141 |
+
assert result.left is not expected.left
|
142 |
+
|
143 |
+
def test_ensure_copied_data(self, closed):
|
144 |
+
# exercise the copy flag in the constructor
|
145 |
+
|
146 |
+
# not copying
|
147 |
+
index = self.create_index(closed=closed)
|
148 |
+
result = IntervalIndex(index, copy=False)
|
149 |
+
tm.assert_numpy_array_equal(
|
150 |
+
index.left.values, result.left.values, check_same="same"
|
151 |
+
)
|
152 |
+
tm.assert_numpy_array_equal(
|
153 |
+
index.right.values, result.right.values, check_same="same"
|
154 |
+
)
|
155 |
+
|
156 |
+
# by-definition make a copy
|
157 |
+
result = IntervalIndex(np.array(index), copy=False)
|
158 |
+
tm.assert_numpy_array_equal(
|
159 |
+
index.left.values, result.left.values, check_same="copy"
|
160 |
+
)
|
161 |
+
tm.assert_numpy_array_equal(
|
162 |
+
index.right.values, result.right.values, check_same="copy"
|
163 |
+
)
|
164 |
+
|
165 |
+
def test_delete(self, closed):
|
166 |
+
breaks = np.arange(1, 11, dtype=np.int64)
|
167 |
+
expected = IntervalIndex.from_breaks(breaks, closed=closed)
|
168 |
+
result = self.create_index(closed=closed).delete(0)
|
169 |
+
tm.assert_index_equal(result, expected)
|
170 |
+
|
171 |
+
@pytest.mark.parametrize(
|
172 |
+
"data",
|
173 |
+
[
|
174 |
+
interval_range(0, periods=10, closed="neither"),
|
175 |
+
interval_range(1.7, periods=8, freq=2.5, closed="both"),
|
176 |
+
interval_range(Timestamp("20170101"), periods=12, closed="left"),
|
177 |
+
interval_range(Timedelta("1 day"), periods=6, closed="right"),
|
178 |
+
],
|
179 |
+
)
|
180 |
+
def test_insert(self, data):
|
181 |
+
item = data[0]
|
182 |
+
idx_item = IntervalIndex([item])
|
183 |
+
|
184 |
+
# start
|
185 |
+
expected = idx_item.append(data)
|
186 |
+
result = data.insert(0, item)
|
187 |
+
tm.assert_index_equal(result, expected)
|
188 |
+
|
189 |
+
# end
|
190 |
+
expected = data.append(idx_item)
|
191 |
+
result = data.insert(len(data), item)
|
192 |
+
tm.assert_index_equal(result, expected)
|
193 |
+
|
194 |
+
# mid
|
195 |
+
expected = data[:3].append(idx_item).append(data[3:])
|
196 |
+
result = data.insert(3, item)
|
197 |
+
tm.assert_index_equal(result, expected)
|
198 |
+
|
199 |
+
# invalid type
|
200 |
+
res = data.insert(1, "foo")
|
201 |
+
expected = data.astype(object).insert(1, "foo")
|
202 |
+
tm.assert_index_equal(res, expected)
|
203 |
+
|
204 |
+
msg = "can only insert Interval objects and NA into an IntervalArray"
|
205 |
+
with pytest.raises(TypeError, match=msg):
|
206 |
+
data._data.insert(1, "foo")
|
207 |
+
|
208 |
+
# invalid closed
|
209 |
+
msg = "'value.closed' is 'left', expected 'right'."
|
210 |
+
for closed in {"left", "right", "both", "neither"} - {item.closed}:
|
211 |
+
msg = f"'value.closed' is '{closed}', expected '{item.closed}'."
|
212 |
+
bad_item = Interval(item.left, item.right, closed=closed)
|
213 |
+
res = data.insert(1, bad_item)
|
214 |
+
expected = data.astype(object).insert(1, bad_item)
|
215 |
+
tm.assert_index_equal(res, expected)
|
216 |
+
with pytest.raises(ValueError, match=msg):
|
217 |
+
data._data.insert(1, bad_item)
|
218 |
+
|
219 |
+
# GH 18295 (test missing)
|
220 |
+
na_idx = IntervalIndex([np.nan], closed=data.closed)
|
221 |
+
for na in [np.nan, None, pd.NA]:
|
222 |
+
expected = data[:1].append(na_idx).append(data[1:])
|
223 |
+
result = data.insert(1, na)
|
224 |
+
tm.assert_index_equal(result, expected)
|
225 |
+
|
226 |
+
if data.left.dtype.kind not in ["m", "M"]:
|
227 |
+
# trying to insert pd.NaT into a numeric-dtyped Index should cast
|
228 |
+
expected = data.astype(object).insert(1, pd.NaT)
|
229 |
+
|
230 |
+
msg = "can only insert Interval objects and NA into an IntervalArray"
|
231 |
+
with pytest.raises(TypeError, match=msg):
|
232 |
+
data._data.insert(1, pd.NaT)
|
233 |
+
|
234 |
+
result = data.insert(1, pd.NaT)
|
235 |
+
tm.assert_index_equal(result, expected)
|
236 |
+
|
237 |
+
def test_is_unique_interval(self, closed):
|
238 |
+
"""
|
239 |
+
Interval specific tests for is_unique in addition to base class tests
|
240 |
+
"""
|
241 |
+
# unique overlapping - distinct endpoints
|
242 |
+
idx = IntervalIndex.from_tuples([(0, 1), (0.5, 1.5)], closed=closed)
|
243 |
+
assert idx.is_unique is True
|
244 |
+
|
245 |
+
# unique overlapping - shared endpoints
|
246 |
+
idx = IntervalIndex.from_tuples([(1, 2), (1, 3), (2, 3)], closed=closed)
|
247 |
+
assert idx.is_unique is True
|
248 |
+
|
249 |
+
# unique nested
|
250 |
+
idx = IntervalIndex.from_tuples([(-1, 1), (-2, 2)], closed=closed)
|
251 |
+
assert idx.is_unique is True
|
252 |
+
|
253 |
+
# unique NaN
|
254 |
+
idx = IntervalIndex.from_tuples([(np.nan, np.nan)], closed=closed)
|
255 |
+
assert idx.is_unique is True
|
256 |
+
|
257 |
+
# non-unique NaN
|
258 |
+
idx = IntervalIndex.from_tuples(
|
259 |
+
[(np.nan, np.nan), (np.nan, np.nan)], closed=closed
|
260 |
+
)
|
261 |
+
assert idx.is_unique is False
|
262 |
+
|
263 |
+
def test_monotonic(self, closed):
|
264 |
+
# increasing non-overlapping
|
265 |
+
idx = IntervalIndex.from_tuples([(0, 1), (2, 3), (4, 5)], closed=closed)
|
266 |
+
assert idx.is_monotonic_increasing is True
|
267 |
+
assert idx._is_strictly_monotonic_increasing is True
|
268 |
+
assert idx.is_monotonic_decreasing is False
|
269 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
270 |
+
|
271 |
+
# decreasing non-overlapping
|
272 |
+
idx = IntervalIndex.from_tuples([(4, 5), (2, 3), (1, 2)], closed=closed)
|
273 |
+
assert idx.is_monotonic_increasing is False
|
274 |
+
assert idx._is_strictly_monotonic_increasing is False
|
275 |
+
assert idx.is_monotonic_decreasing is True
|
276 |
+
assert idx._is_strictly_monotonic_decreasing is True
|
277 |
+
|
278 |
+
# unordered non-overlapping
|
279 |
+
idx = IntervalIndex.from_tuples([(0, 1), (4, 5), (2, 3)], closed=closed)
|
280 |
+
assert idx.is_monotonic_increasing is False
|
281 |
+
assert idx._is_strictly_monotonic_increasing is False
|
282 |
+
assert idx.is_monotonic_decreasing is False
|
283 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
284 |
+
|
285 |
+
# increasing overlapping
|
286 |
+
idx = IntervalIndex.from_tuples([(0, 2), (0.5, 2.5), (1, 3)], closed=closed)
|
287 |
+
assert idx.is_monotonic_increasing is True
|
288 |
+
assert idx._is_strictly_monotonic_increasing is True
|
289 |
+
assert idx.is_monotonic_decreasing is False
|
290 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
291 |
+
|
292 |
+
# decreasing overlapping
|
293 |
+
idx = IntervalIndex.from_tuples([(1, 3), (0.5, 2.5), (0, 2)], closed=closed)
|
294 |
+
assert idx.is_monotonic_increasing is False
|
295 |
+
assert idx._is_strictly_monotonic_increasing is False
|
296 |
+
assert idx.is_monotonic_decreasing is True
|
297 |
+
assert idx._is_strictly_monotonic_decreasing is True
|
298 |
+
|
299 |
+
# unordered overlapping
|
300 |
+
idx = IntervalIndex.from_tuples([(0.5, 2.5), (0, 2), (1, 3)], closed=closed)
|
301 |
+
assert idx.is_monotonic_increasing is False
|
302 |
+
assert idx._is_strictly_monotonic_increasing is False
|
303 |
+
assert idx.is_monotonic_decreasing is False
|
304 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
305 |
+
|
306 |
+
# increasing overlapping shared endpoints
|
307 |
+
idx = IntervalIndex.from_tuples([(1, 2), (1, 3), (2, 3)], closed=closed)
|
308 |
+
assert idx.is_monotonic_increasing is True
|
309 |
+
assert idx._is_strictly_monotonic_increasing is True
|
310 |
+
assert idx.is_monotonic_decreasing is False
|
311 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
312 |
+
|
313 |
+
# decreasing overlapping shared endpoints
|
314 |
+
idx = IntervalIndex.from_tuples([(2, 3), (1, 3), (1, 2)], closed=closed)
|
315 |
+
assert idx.is_monotonic_increasing is False
|
316 |
+
assert idx._is_strictly_monotonic_increasing is False
|
317 |
+
assert idx.is_monotonic_decreasing is True
|
318 |
+
assert idx._is_strictly_monotonic_decreasing is True
|
319 |
+
|
320 |
+
# stationary
|
321 |
+
idx = IntervalIndex.from_tuples([(0, 1), (0, 1)], closed=closed)
|
322 |
+
assert idx.is_monotonic_increasing is True
|
323 |
+
assert idx._is_strictly_monotonic_increasing is False
|
324 |
+
assert idx.is_monotonic_decreasing is True
|
325 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
326 |
+
|
327 |
+
# empty
|
328 |
+
idx = IntervalIndex([], closed=closed)
|
329 |
+
assert idx.is_monotonic_increasing is True
|
330 |
+
assert idx._is_strictly_monotonic_increasing is True
|
331 |
+
assert idx.is_monotonic_decreasing is True
|
332 |
+
assert idx._is_strictly_monotonic_decreasing is True
|
333 |
+
|
334 |
+
def test_is_monotonic_with_nans(self):
|
335 |
+
# GH#41831
|
336 |
+
index = IntervalIndex([np.nan, np.nan])
|
337 |
+
|
338 |
+
assert not index.is_monotonic_increasing
|
339 |
+
assert not index._is_strictly_monotonic_increasing
|
340 |
+
assert not index.is_monotonic_increasing
|
341 |
+
assert not index._is_strictly_monotonic_decreasing
|
342 |
+
assert not index.is_monotonic_decreasing
|
343 |
+
|
344 |
+
@pytest.mark.parametrize(
|
345 |
+
"breaks",
|
346 |
+
[
|
347 |
+
date_range("20180101", periods=4),
|
348 |
+
date_range("20180101", periods=4, tz="US/Eastern"),
|
349 |
+
timedelta_range("0 days", periods=4),
|
350 |
+
],
|
351 |
+
ids=lambda x: str(x.dtype),
|
352 |
+
)
|
353 |
+
def test_maybe_convert_i8(self, breaks):
|
354 |
+
# GH 20636
|
355 |
+
index = IntervalIndex.from_breaks(breaks)
|
356 |
+
|
357 |
+
# intervalindex
|
358 |
+
result = index._maybe_convert_i8(index)
|
359 |
+
expected = IntervalIndex.from_breaks(breaks.asi8)
|
360 |
+
tm.assert_index_equal(result, expected)
|
361 |
+
|
362 |
+
# interval
|
363 |
+
interval = Interval(breaks[0], breaks[1])
|
364 |
+
result = index._maybe_convert_i8(interval)
|
365 |
+
expected = Interval(breaks[0]._value, breaks[1]._value)
|
366 |
+
assert result == expected
|
367 |
+
|
368 |
+
# datetimelike index
|
369 |
+
result = index._maybe_convert_i8(breaks)
|
370 |
+
expected = Index(breaks.asi8)
|
371 |
+
tm.assert_index_equal(result, expected)
|
372 |
+
|
373 |
+
# datetimelike scalar
|
374 |
+
result = index._maybe_convert_i8(breaks[0])
|
375 |
+
expected = breaks[0]._value
|
376 |
+
assert result == expected
|
377 |
+
|
378 |
+
# list-like of datetimelike scalars
|
379 |
+
result = index._maybe_convert_i8(list(breaks))
|
380 |
+
expected = Index(breaks.asi8)
|
381 |
+
tm.assert_index_equal(result, expected)
|
382 |
+
|
383 |
+
@pytest.mark.parametrize(
|
384 |
+
"breaks",
|
385 |
+
[date_range("2018-01-01", periods=5), timedelta_range("0 days", periods=5)],
|
386 |
+
)
|
387 |
+
def test_maybe_convert_i8_nat(self, breaks):
|
388 |
+
# GH 20636
|
389 |
+
index = IntervalIndex.from_breaks(breaks)
|
390 |
+
|
391 |
+
to_convert = breaks._constructor([pd.NaT] * 3).as_unit("ns")
|
392 |
+
expected = Index([np.nan] * 3, dtype=np.float64)
|
393 |
+
result = index._maybe_convert_i8(to_convert)
|
394 |
+
tm.assert_index_equal(result, expected)
|
395 |
+
|
396 |
+
to_convert = to_convert.insert(0, breaks[0])
|
397 |
+
expected = expected.insert(0, float(breaks[0]._value))
|
398 |
+
result = index._maybe_convert_i8(to_convert)
|
399 |
+
tm.assert_index_equal(result, expected)
|
400 |
+
|
401 |
+
@pytest.mark.parametrize(
|
402 |
+
"make_key",
|
403 |
+
[lambda breaks: breaks, list],
|
404 |
+
ids=["lambda", "list"],
|
405 |
+
)
|
406 |
+
def test_maybe_convert_i8_numeric(self, make_key, any_real_numpy_dtype):
|
407 |
+
# GH 20636
|
408 |
+
breaks = np.arange(5, dtype=any_real_numpy_dtype)
|
409 |
+
index = IntervalIndex.from_breaks(breaks)
|
410 |
+
key = make_key(breaks)
|
411 |
+
|
412 |
+
result = index._maybe_convert_i8(key)
|
413 |
+
kind = breaks.dtype.kind
|
414 |
+
expected_dtype = {"i": np.int64, "u": np.uint64, "f": np.float64}[kind]
|
415 |
+
expected = Index(key, dtype=expected_dtype)
|
416 |
+
tm.assert_index_equal(result, expected)
|
417 |
+
|
418 |
+
@pytest.mark.parametrize(
|
419 |
+
"make_key",
|
420 |
+
[
|
421 |
+
IntervalIndex.from_breaks,
|
422 |
+
lambda breaks: Interval(breaks[0], breaks[1]),
|
423 |
+
lambda breaks: breaks[0],
|
424 |
+
],
|
425 |
+
ids=["IntervalIndex", "Interval", "scalar"],
|
426 |
+
)
|
427 |
+
def test_maybe_convert_i8_numeric_identical(self, make_key, any_real_numpy_dtype):
|
428 |
+
# GH 20636
|
429 |
+
breaks = np.arange(5, dtype=any_real_numpy_dtype)
|
430 |
+
index = IntervalIndex.from_breaks(breaks)
|
431 |
+
key = make_key(breaks)
|
432 |
+
|
433 |
+
# test if _maybe_convert_i8 won't change key if an Interval or IntervalIndex
|
434 |
+
result = index._maybe_convert_i8(key)
|
435 |
+
assert result is key
|
436 |
+
|
437 |
+
@pytest.mark.parametrize(
|
438 |
+
"breaks1, breaks2",
|
439 |
+
permutations(
|
440 |
+
[
|
441 |
+
date_range("20180101", periods=4),
|
442 |
+
date_range("20180101", periods=4, tz="US/Eastern"),
|
443 |
+
timedelta_range("0 days", periods=4),
|
444 |
+
],
|
445 |
+
2,
|
446 |
+
),
|
447 |
+
ids=lambda x: str(x.dtype),
|
448 |
+
)
|
449 |
+
@pytest.mark.parametrize(
|
450 |
+
"make_key",
|
451 |
+
[
|
452 |
+
IntervalIndex.from_breaks,
|
453 |
+
lambda breaks: Interval(breaks[0], breaks[1]),
|
454 |
+
lambda breaks: breaks,
|
455 |
+
lambda breaks: breaks[0],
|
456 |
+
list,
|
457 |
+
],
|
458 |
+
ids=["IntervalIndex", "Interval", "Index", "scalar", "list"],
|
459 |
+
)
|
460 |
+
def test_maybe_convert_i8_errors(self, breaks1, breaks2, make_key):
|
461 |
+
# GH 20636
|
462 |
+
index = IntervalIndex.from_breaks(breaks1)
|
463 |
+
key = make_key(breaks2)
|
464 |
+
|
465 |
+
msg = (
|
466 |
+
f"Cannot index an IntervalIndex of subtype {breaks1.dtype} with "
|
467 |
+
f"values of dtype {breaks2.dtype}"
|
468 |
+
)
|
469 |
+
msg = re.escape(msg)
|
470 |
+
with pytest.raises(ValueError, match=msg):
|
471 |
+
index._maybe_convert_i8(key)
|
472 |
+
|
473 |
+
def test_contains_method(self):
|
474 |
+
# can select values that are IN the range of a value
|
475 |
+
i = IntervalIndex.from_arrays([0, 1], [1, 2])
|
476 |
+
|
477 |
+
expected = np.array([False, False], dtype="bool")
|
478 |
+
actual = i.contains(0)
|
479 |
+
tm.assert_numpy_array_equal(actual, expected)
|
480 |
+
actual = i.contains(3)
|
481 |
+
tm.assert_numpy_array_equal(actual, expected)
|
482 |
+
|
483 |
+
expected = np.array([True, False], dtype="bool")
|
484 |
+
actual = i.contains(0.5)
|
485 |
+
tm.assert_numpy_array_equal(actual, expected)
|
486 |
+
actual = i.contains(1)
|
487 |
+
tm.assert_numpy_array_equal(actual, expected)
|
488 |
+
|
489 |
+
# __contains__ not implemented for "interval in interval", follow
|
490 |
+
# that for the contains method for now
|
491 |
+
with pytest.raises(
|
492 |
+
NotImplementedError, match="contains not implemented for two"
|
493 |
+
):
|
494 |
+
i.contains(Interval(0, 1))
|
495 |
+
|
496 |
+
def test_dropna(self, closed):
|
497 |
+
expected = IntervalIndex.from_tuples([(0.0, 1.0), (1.0, 2.0)], closed=closed)
|
498 |
+
|
499 |
+
ii = IntervalIndex.from_tuples([(0, 1), (1, 2), np.nan], closed=closed)
|
500 |
+
result = ii.dropna()
|
501 |
+
tm.assert_index_equal(result, expected)
|
502 |
+
|
503 |
+
ii = IntervalIndex.from_arrays([0, 1, np.nan], [1, 2, np.nan], closed=closed)
|
504 |
+
result = ii.dropna()
|
505 |
+
tm.assert_index_equal(result, expected)
|
506 |
+
|
507 |
+
def test_non_contiguous(self, closed):
|
508 |
+
index = IntervalIndex.from_tuples([(0, 1), (2, 3)], closed=closed)
|
509 |
+
target = [0.5, 1.5, 2.5]
|
510 |
+
actual = index.get_indexer(target)
|
511 |
+
expected = np.array([0, -1, 1], dtype="intp")
|
512 |
+
tm.assert_numpy_array_equal(actual, expected)
|
513 |
+
|
514 |
+
assert 1.5 not in index
|
515 |
+
|
516 |
+
def test_isin(self, closed):
|
517 |
+
index = self.create_index(closed=closed)
|
518 |
+
|
519 |
+
expected = np.array([True] + [False] * (len(index) - 1))
|
520 |
+
result = index.isin(index[:1])
|
521 |
+
tm.assert_numpy_array_equal(result, expected)
|
522 |
+
|
523 |
+
result = index.isin([index[0]])
|
524 |
+
tm.assert_numpy_array_equal(result, expected)
|
525 |
+
|
526 |
+
other = IntervalIndex.from_breaks(np.arange(-2, 10), closed=closed)
|
527 |
+
expected = np.array([True] * (len(index) - 1) + [False])
|
528 |
+
result = index.isin(other)
|
529 |
+
tm.assert_numpy_array_equal(result, expected)
|
530 |
+
|
531 |
+
result = index.isin(other.tolist())
|
532 |
+
tm.assert_numpy_array_equal(result, expected)
|
533 |
+
|
534 |
+
for other_closed in ["right", "left", "both", "neither"]:
|
535 |
+
other = self.create_index(closed=other_closed)
|
536 |
+
expected = np.repeat(closed == other_closed, len(index))
|
537 |
+
result = index.isin(other)
|
538 |
+
tm.assert_numpy_array_equal(result, expected)
|
539 |
+
|
540 |
+
result = index.isin(other.tolist())
|
541 |
+
tm.assert_numpy_array_equal(result, expected)
|
542 |
+
|
543 |
+
def test_comparison(self):
|
544 |
+
actual = Interval(0, 1) < self.index
|
545 |
+
expected = np.array([False, True])
|
546 |
+
tm.assert_numpy_array_equal(actual, expected)
|
547 |
+
|
548 |
+
actual = Interval(0.5, 1.5) < self.index
|
549 |
+
expected = np.array([False, True])
|
550 |
+
tm.assert_numpy_array_equal(actual, expected)
|
551 |
+
actual = self.index > Interval(0.5, 1.5)
|
552 |
+
tm.assert_numpy_array_equal(actual, expected)
|
553 |
+
|
554 |
+
actual = self.index == self.index
|
555 |
+
expected = np.array([True, True])
|
556 |
+
tm.assert_numpy_array_equal(actual, expected)
|
557 |
+
actual = self.index <= self.index
|
558 |
+
tm.assert_numpy_array_equal(actual, expected)
|
559 |
+
actual = self.index >= self.index
|
560 |
+
tm.assert_numpy_array_equal(actual, expected)
|
561 |
+
|
562 |
+
actual = self.index < self.index
|
563 |
+
expected = np.array([False, False])
|
564 |
+
tm.assert_numpy_array_equal(actual, expected)
|
565 |
+
actual = self.index > self.index
|
566 |
+
tm.assert_numpy_array_equal(actual, expected)
|
567 |
+
|
568 |
+
actual = self.index == IntervalIndex.from_breaks([0, 1, 2], "left")
|
569 |
+
tm.assert_numpy_array_equal(actual, expected)
|
570 |
+
|
571 |
+
actual = self.index == self.index.values
|
572 |
+
tm.assert_numpy_array_equal(actual, np.array([True, True]))
|
573 |
+
actual = self.index.values == self.index
|
574 |
+
tm.assert_numpy_array_equal(actual, np.array([True, True]))
|
575 |
+
actual = self.index <= self.index.values
|
576 |
+
tm.assert_numpy_array_equal(actual, np.array([True, True]))
|
577 |
+
actual = self.index != self.index.values
|
578 |
+
tm.assert_numpy_array_equal(actual, np.array([False, False]))
|
579 |
+
actual = self.index > self.index.values
|
580 |
+
tm.assert_numpy_array_equal(actual, np.array([False, False]))
|
581 |
+
actual = self.index.values > self.index
|
582 |
+
tm.assert_numpy_array_equal(actual, np.array([False, False]))
|
583 |
+
|
584 |
+
# invalid comparisons
|
585 |
+
actual = self.index == 0
|
586 |
+
tm.assert_numpy_array_equal(actual, np.array([False, False]))
|
587 |
+
actual = self.index == self.index.left
|
588 |
+
tm.assert_numpy_array_equal(actual, np.array([False, False]))
|
589 |
+
|
590 |
+
msg = "|".join(
|
591 |
+
[
|
592 |
+
"not supported between instances of 'int' and '.*.Interval'",
|
593 |
+
r"Invalid comparison between dtype=interval\[int64, right\] and ",
|
594 |
+
]
|
595 |
+
)
|
596 |
+
with pytest.raises(TypeError, match=msg):
|
597 |
+
self.index > 0
|
598 |
+
with pytest.raises(TypeError, match=msg):
|
599 |
+
self.index <= 0
|
600 |
+
with pytest.raises(TypeError, match=msg):
|
601 |
+
self.index > np.arange(2)
|
602 |
+
|
603 |
+
msg = "Lengths must match to compare"
|
604 |
+
with pytest.raises(ValueError, match=msg):
|
605 |
+
self.index > np.arange(3)
|
606 |
+
|
607 |
+
def test_missing_values(self, closed):
|
608 |
+
idx = Index(
|
609 |
+
[np.nan, Interval(0, 1, closed=closed), Interval(1, 2, closed=closed)]
|
610 |
+
)
|
611 |
+
idx2 = IntervalIndex.from_arrays([np.nan, 0, 1], [np.nan, 1, 2], closed=closed)
|
612 |
+
assert idx.equals(idx2)
|
613 |
+
|
614 |
+
msg = (
|
615 |
+
"missing values must be missing in the same location both left "
|
616 |
+
"and right sides"
|
617 |
+
)
|
618 |
+
with pytest.raises(ValueError, match=msg):
|
619 |
+
IntervalIndex.from_arrays(
|
620 |
+
[np.nan, 0, 1], np.array([0, 1, 2]), closed=closed
|
621 |
+
)
|
622 |
+
|
623 |
+
tm.assert_numpy_array_equal(isna(idx), np.array([True, False, False]))
|
624 |
+
|
625 |
+
def test_sort_values(self, closed):
|
626 |
+
index = self.create_index(closed=closed)
|
627 |
+
|
628 |
+
result = index.sort_values()
|
629 |
+
tm.assert_index_equal(result, index)
|
630 |
+
|
631 |
+
result = index.sort_values(ascending=False)
|
632 |
+
tm.assert_index_equal(result, index[::-1])
|
633 |
+
|
634 |
+
# with nan
|
635 |
+
index = IntervalIndex([Interval(1, 2), np.nan, Interval(0, 1)])
|
636 |
+
|
637 |
+
result = index.sort_values()
|
638 |
+
expected = IntervalIndex([Interval(0, 1), Interval(1, 2), np.nan])
|
639 |
+
tm.assert_index_equal(result, expected)
|
640 |
+
|
641 |
+
result = index.sort_values(ascending=False, na_position="first")
|
642 |
+
expected = IntervalIndex([np.nan, Interval(1, 2), Interval(0, 1)])
|
643 |
+
tm.assert_index_equal(result, expected)
|
644 |
+
|
645 |
+
@pytest.mark.parametrize("tz", [None, "US/Eastern"])
|
646 |
+
def test_datetime(self, tz):
|
647 |
+
start = Timestamp("2000-01-01", tz=tz)
|
648 |
+
dates = date_range(start=start, periods=10)
|
649 |
+
index = IntervalIndex.from_breaks(dates)
|
650 |
+
|
651 |
+
# test mid
|
652 |
+
start = Timestamp("2000-01-01T12:00", tz=tz)
|
653 |
+
expected = date_range(start=start, periods=9)
|
654 |
+
tm.assert_index_equal(index.mid, expected)
|
655 |
+
|
656 |
+
# __contains__ doesn't check individual points
|
657 |
+
assert Timestamp("2000-01-01", tz=tz) not in index
|
658 |
+
assert Timestamp("2000-01-01T12", tz=tz) not in index
|
659 |
+
assert Timestamp("2000-01-02", tz=tz) not in index
|
660 |
+
iv_true = Interval(
|
661 |
+
Timestamp("2000-01-02", tz=tz), Timestamp("2000-01-03", tz=tz)
|
662 |
+
)
|
663 |
+
iv_false = Interval(
|
664 |
+
Timestamp("1999-12-31", tz=tz), Timestamp("2000-01-01", tz=tz)
|
665 |
+
)
|
666 |
+
assert iv_true in index
|
667 |
+
assert iv_false not in index
|
668 |
+
|
669 |
+
# .contains does check individual points
|
670 |
+
assert not index.contains(Timestamp("2000-01-01", tz=tz)).any()
|
671 |
+
assert index.contains(Timestamp("2000-01-01T12", tz=tz)).any()
|
672 |
+
assert index.contains(Timestamp("2000-01-02", tz=tz)).any()
|
673 |
+
|
674 |
+
# test get_indexer
|
675 |
+
start = Timestamp("1999-12-31T12:00", tz=tz)
|
676 |
+
target = date_range(start=start, periods=7, freq="12h")
|
677 |
+
actual = index.get_indexer(target)
|
678 |
+
expected = np.array([-1, -1, 0, 0, 1, 1, 2], dtype="intp")
|
679 |
+
tm.assert_numpy_array_equal(actual, expected)
|
680 |
+
|
681 |
+
start = Timestamp("2000-01-08T18:00", tz=tz)
|
682 |
+
target = date_range(start=start, periods=7, freq="6h")
|
683 |
+
actual = index.get_indexer(target)
|
684 |
+
expected = np.array([7, 7, 8, 8, 8, 8, -1], dtype="intp")
|
685 |
+
tm.assert_numpy_array_equal(actual, expected)
|
686 |
+
|
687 |
+
def test_append(self, closed):
|
688 |
+
index1 = IntervalIndex.from_arrays([0, 1], [1, 2], closed=closed)
|
689 |
+
index2 = IntervalIndex.from_arrays([1, 2], [2, 3], closed=closed)
|
690 |
+
|
691 |
+
result = index1.append(index2)
|
692 |
+
expected = IntervalIndex.from_arrays([0, 1, 1, 2], [1, 2, 2, 3], closed=closed)
|
693 |
+
tm.assert_index_equal(result, expected)
|
694 |
+
|
695 |
+
result = index1.append([index1, index2])
|
696 |
+
expected = IntervalIndex.from_arrays(
|
697 |
+
[0, 1, 0, 1, 1, 2], [1, 2, 1, 2, 2, 3], closed=closed
|
698 |
+
)
|
699 |
+
tm.assert_index_equal(result, expected)
|
700 |
+
|
701 |
+
for other_closed in {"left", "right", "both", "neither"} - {closed}:
|
702 |
+
index_other_closed = IntervalIndex.from_arrays(
|
703 |
+
[0, 1], [1, 2], closed=other_closed
|
704 |
+
)
|
705 |
+
result = index1.append(index_other_closed)
|
706 |
+
expected = index1.astype(object).append(index_other_closed.astype(object))
|
707 |
+
tm.assert_index_equal(result, expected)
|
708 |
+
|
709 |
+
def test_is_non_overlapping_monotonic(self, closed):
|
710 |
+
# Should be True in all cases
|
711 |
+
tpls = [(0, 1), (2, 3), (4, 5), (6, 7)]
|
712 |
+
idx = IntervalIndex.from_tuples(tpls, closed=closed)
|
713 |
+
assert idx.is_non_overlapping_monotonic is True
|
714 |
+
|
715 |
+
idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed)
|
716 |
+
assert idx.is_non_overlapping_monotonic is True
|
717 |
+
|
718 |
+
# Should be False in all cases (overlapping)
|
719 |
+
tpls = [(0, 2), (1, 3), (4, 5), (6, 7)]
|
720 |
+
idx = IntervalIndex.from_tuples(tpls, closed=closed)
|
721 |
+
assert idx.is_non_overlapping_monotonic is False
|
722 |
+
|
723 |
+
idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed)
|
724 |
+
assert idx.is_non_overlapping_monotonic is False
|
725 |
+
|
726 |
+
# Should be False in all cases (non-monotonic)
|
727 |
+
tpls = [(0, 1), (2, 3), (6, 7), (4, 5)]
|
728 |
+
idx = IntervalIndex.from_tuples(tpls, closed=closed)
|
729 |
+
assert idx.is_non_overlapping_monotonic is False
|
730 |
+
|
731 |
+
idx = IntervalIndex.from_tuples(tpls[::-1], closed=closed)
|
732 |
+
assert idx.is_non_overlapping_monotonic is False
|
733 |
+
|
734 |
+
# Should be False for closed='both', otherwise True (GH16560)
|
735 |
+
if closed == "both":
|
736 |
+
idx = IntervalIndex.from_breaks(range(4), closed=closed)
|
737 |
+
assert idx.is_non_overlapping_monotonic is False
|
738 |
+
else:
|
739 |
+
idx = IntervalIndex.from_breaks(range(4), closed=closed)
|
740 |
+
assert idx.is_non_overlapping_monotonic is True
|
741 |
+
|
742 |
+
@pytest.mark.parametrize(
|
743 |
+
"start, shift, na_value",
|
744 |
+
[
|
745 |
+
(0, 1, np.nan),
|
746 |
+
(Timestamp("2018-01-01"), Timedelta("1 day"), pd.NaT),
|
747 |
+
(Timedelta("0 days"), Timedelta("1 day"), pd.NaT),
|
748 |
+
],
|
749 |
+
)
|
750 |
+
def test_is_overlapping(self, start, shift, na_value, closed):
|
751 |
+
# GH 23309
|
752 |
+
# see test_interval_tree.py for extensive tests; interface tests here
|
753 |
+
|
754 |
+
# non-overlapping
|
755 |
+
tuples = [(start + n * shift, start + (n + 1) * shift) for n in (0, 2, 4)]
|
756 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
757 |
+
assert index.is_overlapping is False
|
758 |
+
|
759 |
+
# non-overlapping with NA
|
760 |
+
tuples = [(na_value, na_value)] + tuples + [(na_value, na_value)]
|
761 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
762 |
+
assert index.is_overlapping is False
|
763 |
+
|
764 |
+
# overlapping
|
765 |
+
tuples = [(start + n * shift, start + (n + 2) * shift) for n in range(3)]
|
766 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
767 |
+
assert index.is_overlapping is True
|
768 |
+
|
769 |
+
# overlapping with NA
|
770 |
+
tuples = [(na_value, na_value)] + tuples + [(na_value, na_value)]
|
771 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
772 |
+
assert index.is_overlapping is True
|
773 |
+
|
774 |
+
# common endpoints
|
775 |
+
tuples = [(start + n * shift, start + (n + 1) * shift) for n in range(3)]
|
776 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
777 |
+
result = index.is_overlapping
|
778 |
+
expected = closed == "both"
|
779 |
+
assert result is expected
|
780 |
+
|
781 |
+
# common endpoints with NA
|
782 |
+
tuples = [(na_value, na_value)] + tuples + [(na_value, na_value)]
|
783 |
+
index = IntervalIndex.from_tuples(tuples, closed=closed)
|
784 |
+
result = index.is_overlapping
|
785 |
+
assert result is expected
|
786 |
+
|
787 |
+
# intervals with duplicate left values
|
788 |
+
a = [10, 15, 20, 25, 30, 35, 40, 45, 45, 50, 55, 60, 65, 70, 75, 80, 85]
|
789 |
+
b = [15, 20, 25, 30, 35, 40, 45, 45, 50, 55, 60, 65, 70, 75, 80, 85, 90]
|
790 |
+
index = IntervalIndex.from_arrays(a, b, closed="right")
|
791 |
+
result = index.is_overlapping
|
792 |
+
assert result is False
|
793 |
+
|
794 |
+
@pytest.mark.parametrize(
|
795 |
+
"tuples",
|
796 |
+
[
|
797 |
+
list(zip(range(10), range(1, 11))),
|
798 |
+
list(
|
799 |
+
zip(
|
800 |
+
date_range("20170101", periods=10),
|
801 |
+
date_range("20170101", periods=10),
|
802 |
+
)
|
803 |
+
),
|
804 |
+
list(
|
805 |
+
zip(
|
806 |
+
timedelta_range("0 days", periods=10),
|
807 |
+
timedelta_range("1 day", periods=10),
|
808 |
+
)
|
809 |
+
),
|
810 |
+
],
|
811 |
+
)
|
812 |
+
def test_to_tuples(self, tuples):
|
813 |
+
# GH 18756
|
814 |
+
idx = IntervalIndex.from_tuples(tuples)
|
815 |
+
result = idx.to_tuples()
|
816 |
+
expected = Index(com.asarray_tuplesafe(tuples))
|
817 |
+
tm.assert_index_equal(result, expected)
|
818 |
+
|
819 |
+
@pytest.mark.parametrize(
|
820 |
+
"tuples",
|
821 |
+
[
|
822 |
+
list(zip(range(10), range(1, 11))) + [np.nan],
|
823 |
+
list(
|
824 |
+
zip(
|
825 |
+
date_range("20170101", periods=10),
|
826 |
+
date_range("20170101", periods=10),
|
827 |
+
)
|
828 |
+
)
|
829 |
+
+ [np.nan],
|
830 |
+
list(
|
831 |
+
zip(
|
832 |
+
timedelta_range("0 days", periods=10),
|
833 |
+
timedelta_range("1 day", periods=10),
|
834 |
+
)
|
835 |
+
)
|
836 |
+
+ [np.nan],
|
837 |
+
],
|
838 |
+
)
|
839 |
+
@pytest.mark.parametrize("na_tuple", [True, False])
|
840 |
+
def test_to_tuples_na(self, tuples, na_tuple):
|
841 |
+
# GH 18756
|
842 |
+
idx = IntervalIndex.from_tuples(tuples)
|
843 |
+
result = idx.to_tuples(na_tuple=na_tuple)
|
844 |
+
|
845 |
+
# check the non-NA portion
|
846 |
+
expected_notna = Index(com.asarray_tuplesafe(tuples[:-1]))
|
847 |
+
result_notna = result[:-1]
|
848 |
+
tm.assert_index_equal(result_notna, expected_notna)
|
849 |
+
|
850 |
+
# check the NA portion
|
851 |
+
result_na = result[-1]
|
852 |
+
if na_tuple:
|
853 |
+
assert isinstance(result_na, tuple)
|
854 |
+
assert len(result_na) == 2
|
855 |
+
assert all(isna(x) for x in result_na)
|
856 |
+
else:
|
857 |
+
assert isna(result_na)
|
858 |
+
|
859 |
+
def test_nbytes(self):
|
860 |
+
# GH 19209
|
861 |
+
left = np.arange(0, 4, dtype="i8")
|
862 |
+
right = np.arange(1, 5, dtype="i8")
|
863 |
+
|
864 |
+
result = IntervalIndex.from_arrays(left, right).nbytes
|
865 |
+
expected = 64 # 4 * 8 * 2
|
866 |
+
assert result == expected
|
867 |
+
|
868 |
+
@pytest.mark.parametrize("new_closed", ["left", "right", "both", "neither"])
|
869 |
+
def test_set_closed(self, name, closed, new_closed):
|
870 |
+
# GH 21670
|
871 |
+
index = interval_range(0, 5, closed=closed, name=name)
|
872 |
+
result = index.set_closed(new_closed)
|
873 |
+
expected = interval_range(0, 5, closed=new_closed, name=name)
|
874 |
+
tm.assert_index_equal(result, expected)
|
875 |
+
|
876 |
+
@pytest.mark.parametrize("bad_closed", ["foo", 10, "LEFT", True, False])
|
877 |
+
def test_set_closed_errors(self, bad_closed):
|
878 |
+
# GH 21670
|
879 |
+
index = interval_range(0, 5)
|
880 |
+
msg = f"invalid option for 'closed': {bad_closed}"
|
881 |
+
with pytest.raises(ValueError, match=msg):
|
882 |
+
index.set_closed(bad_closed)
|
883 |
+
|
884 |
+
def test_is_all_dates(self):
|
885 |
+
# GH 23576
|
886 |
+
year_2017 = Interval(
|
887 |
+
Timestamp("2017-01-01 00:00:00"), Timestamp("2018-01-01 00:00:00")
|
888 |
+
)
|
889 |
+
year_2017_index = IntervalIndex([year_2017])
|
890 |
+
assert not year_2017_index._is_all_dates
|
891 |
+
|
892 |
+
|
893 |
+
def test_dir():
|
894 |
+
# GH#27571 dir(interval_index) should not raise
|
895 |
+
index = IntervalIndex.from_arrays([0, 1], [1, 2])
|
896 |
+
result = dir(index)
|
897 |
+
assert "str" not in result
|
898 |
+
|
899 |
+
|
900 |
+
def test_searchsorted_different_argument_classes(listlike_box):
|
901 |
+
# https://github.com/pandas-dev/pandas/issues/32762
|
902 |
+
values = IntervalIndex([Interval(0, 1), Interval(1, 2)])
|
903 |
+
result = values.searchsorted(listlike_box(values))
|
904 |
+
expected = np.array([0, 1], dtype=result.dtype)
|
905 |
+
tm.assert_numpy_array_equal(result, expected)
|
906 |
+
|
907 |
+
result = values._data.searchsorted(listlike_box(values))
|
908 |
+
tm.assert_numpy_array_equal(result, expected)
|
909 |
+
|
910 |
+
|
911 |
+
@pytest.mark.parametrize(
|
912 |
+
"arg", [[1, 2], ["a", "b"], [Timestamp("2020-01-01", tz="Europe/London")] * 2]
|
913 |
+
)
|
914 |
+
def test_searchsorted_invalid_argument(arg):
|
915 |
+
values = IntervalIndex([Interval(0, 1), Interval(1, 2)])
|
916 |
+
msg = "'<' not supported between instances of 'pandas._libs.interval.Interval' and "
|
917 |
+
with pytest.raises(TypeError, match=msg):
|
918 |
+
values.searchsorted(arg)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval_range.py
ADDED
@@ -0,0 +1,369 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import timedelta
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
from pandas.core.dtypes.common import is_integer
|
7 |
+
|
8 |
+
from pandas import (
|
9 |
+
DateOffset,
|
10 |
+
Interval,
|
11 |
+
IntervalIndex,
|
12 |
+
Timedelta,
|
13 |
+
Timestamp,
|
14 |
+
date_range,
|
15 |
+
interval_range,
|
16 |
+
timedelta_range,
|
17 |
+
)
|
18 |
+
import pandas._testing as tm
|
19 |
+
|
20 |
+
from pandas.tseries.offsets import Day
|
21 |
+
|
22 |
+
|
23 |
+
@pytest.fixture(params=[None, "foo"])
|
24 |
+
def name(request):
|
25 |
+
return request.param
|
26 |
+
|
27 |
+
|
28 |
+
class TestIntervalRange:
|
29 |
+
@pytest.mark.parametrize("freq, periods", [(1, 100), (2.5, 40), (5, 20), (25, 4)])
|
30 |
+
def test_constructor_numeric(self, closed, name, freq, periods):
|
31 |
+
start, end = 0, 100
|
32 |
+
breaks = np.arange(101, step=freq)
|
33 |
+
expected = IntervalIndex.from_breaks(breaks, name=name, closed=closed)
|
34 |
+
|
35 |
+
# defined from start/end/freq
|
36 |
+
result = interval_range(
|
37 |
+
start=start, end=end, freq=freq, name=name, closed=closed
|
38 |
+
)
|
39 |
+
tm.assert_index_equal(result, expected)
|
40 |
+
|
41 |
+
# defined from start/periods/freq
|
42 |
+
result = interval_range(
|
43 |
+
start=start, periods=periods, freq=freq, name=name, closed=closed
|
44 |
+
)
|
45 |
+
tm.assert_index_equal(result, expected)
|
46 |
+
|
47 |
+
# defined from end/periods/freq
|
48 |
+
result = interval_range(
|
49 |
+
end=end, periods=periods, freq=freq, name=name, closed=closed
|
50 |
+
)
|
51 |
+
tm.assert_index_equal(result, expected)
|
52 |
+
|
53 |
+
# GH 20976: linspace behavior defined from start/end/periods
|
54 |
+
result = interval_range(
|
55 |
+
start=start, end=end, periods=periods, name=name, closed=closed
|
56 |
+
)
|
57 |
+
tm.assert_index_equal(result, expected)
|
58 |
+
|
59 |
+
@pytest.mark.parametrize("tz", [None, "US/Eastern"])
|
60 |
+
@pytest.mark.parametrize(
|
61 |
+
"freq, periods", [("D", 364), ("2D", 182), ("22D18h", 16), ("ME", 11)]
|
62 |
+
)
|
63 |
+
def test_constructor_timestamp(self, closed, name, freq, periods, tz):
|
64 |
+
start, end = Timestamp("20180101", tz=tz), Timestamp("20181231", tz=tz)
|
65 |
+
breaks = date_range(start=start, end=end, freq=freq)
|
66 |
+
expected = IntervalIndex.from_breaks(breaks, name=name, closed=closed)
|
67 |
+
|
68 |
+
# defined from start/end/freq
|
69 |
+
result = interval_range(
|
70 |
+
start=start, end=end, freq=freq, name=name, closed=closed
|
71 |
+
)
|
72 |
+
tm.assert_index_equal(result, expected)
|
73 |
+
|
74 |
+
# defined from start/periods/freq
|
75 |
+
result = interval_range(
|
76 |
+
start=start, periods=periods, freq=freq, name=name, closed=closed
|
77 |
+
)
|
78 |
+
tm.assert_index_equal(result, expected)
|
79 |
+
|
80 |
+
# defined from end/periods/freq
|
81 |
+
result = interval_range(
|
82 |
+
end=end, periods=periods, freq=freq, name=name, closed=closed
|
83 |
+
)
|
84 |
+
tm.assert_index_equal(result, expected)
|
85 |
+
|
86 |
+
# GH 20976: linspace behavior defined from start/end/periods
|
87 |
+
if not breaks.freq.n == 1 and tz is None:
|
88 |
+
result = interval_range(
|
89 |
+
start=start, end=end, periods=periods, name=name, closed=closed
|
90 |
+
)
|
91 |
+
tm.assert_index_equal(result, expected)
|
92 |
+
|
93 |
+
@pytest.mark.parametrize(
|
94 |
+
"freq, periods", [("D", 100), ("2D12h", 40), ("5D", 20), ("25D", 4)]
|
95 |
+
)
|
96 |
+
def test_constructor_timedelta(self, closed, name, freq, periods):
|
97 |
+
start, end = Timedelta("0 days"), Timedelta("100 days")
|
98 |
+
breaks = timedelta_range(start=start, end=end, freq=freq)
|
99 |
+
expected = IntervalIndex.from_breaks(breaks, name=name, closed=closed)
|
100 |
+
|
101 |
+
# defined from start/end/freq
|
102 |
+
result = interval_range(
|
103 |
+
start=start, end=end, freq=freq, name=name, closed=closed
|
104 |
+
)
|
105 |
+
tm.assert_index_equal(result, expected)
|
106 |
+
|
107 |
+
# defined from start/periods/freq
|
108 |
+
result = interval_range(
|
109 |
+
start=start, periods=periods, freq=freq, name=name, closed=closed
|
110 |
+
)
|
111 |
+
tm.assert_index_equal(result, expected)
|
112 |
+
|
113 |
+
# defined from end/periods/freq
|
114 |
+
result = interval_range(
|
115 |
+
end=end, periods=periods, freq=freq, name=name, closed=closed
|
116 |
+
)
|
117 |
+
tm.assert_index_equal(result, expected)
|
118 |
+
|
119 |
+
# GH 20976: linspace behavior defined from start/end/periods
|
120 |
+
result = interval_range(
|
121 |
+
start=start, end=end, periods=periods, name=name, closed=closed
|
122 |
+
)
|
123 |
+
tm.assert_index_equal(result, expected)
|
124 |
+
|
125 |
+
@pytest.mark.parametrize(
|
126 |
+
"start, end, freq, expected_endpoint",
|
127 |
+
[
|
128 |
+
(0, 10, 3, 9),
|
129 |
+
(0, 10, 1.5, 9),
|
130 |
+
(0.5, 10, 3, 9.5),
|
131 |
+
(Timedelta("0D"), Timedelta("10D"), "2D4h", Timedelta("8D16h")),
|
132 |
+
(
|
133 |
+
Timestamp("2018-01-01"),
|
134 |
+
Timestamp("2018-02-09"),
|
135 |
+
"MS",
|
136 |
+
Timestamp("2018-02-01"),
|
137 |
+
),
|
138 |
+
(
|
139 |
+
Timestamp("2018-01-01", tz="US/Eastern"),
|
140 |
+
Timestamp("2018-01-20", tz="US/Eastern"),
|
141 |
+
"5D12h",
|
142 |
+
Timestamp("2018-01-17 12:00:00", tz="US/Eastern"),
|
143 |
+
),
|
144 |
+
],
|
145 |
+
)
|
146 |
+
def test_early_truncation(self, start, end, freq, expected_endpoint):
|
147 |
+
# index truncates early if freq causes end to be skipped
|
148 |
+
result = interval_range(start=start, end=end, freq=freq)
|
149 |
+
result_endpoint = result.right[-1]
|
150 |
+
assert result_endpoint == expected_endpoint
|
151 |
+
|
152 |
+
@pytest.mark.parametrize(
|
153 |
+
"start, end, freq",
|
154 |
+
[(0.5, None, None), (None, 4.5, None), (0.5, None, 1.5), (None, 6.5, 1.5)],
|
155 |
+
)
|
156 |
+
def test_no_invalid_float_truncation(self, start, end, freq):
|
157 |
+
# GH 21161
|
158 |
+
if freq is None:
|
159 |
+
breaks = [0.5, 1.5, 2.5, 3.5, 4.5]
|
160 |
+
else:
|
161 |
+
breaks = [0.5, 2.0, 3.5, 5.0, 6.5]
|
162 |
+
expected = IntervalIndex.from_breaks(breaks)
|
163 |
+
|
164 |
+
result = interval_range(start=start, end=end, periods=4, freq=freq)
|
165 |
+
tm.assert_index_equal(result, expected)
|
166 |
+
|
167 |
+
@pytest.mark.parametrize(
|
168 |
+
"start, mid, end",
|
169 |
+
[
|
170 |
+
(
|
171 |
+
Timestamp("2018-03-10", tz="US/Eastern"),
|
172 |
+
Timestamp("2018-03-10 23:30:00", tz="US/Eastern"),
|
173 |
+
Timestamp("2018-03-12", tz="US/Eastern"),
|
174 |
+
),
|
175 |
+
(
|
176 |
+
Timestamp("2018-11-03", tz="US/Eastern"),
|
177 |
+
Timestamp("2018-11-04 00:30:00", tz="US/Eastern"),
|
178 |
+
Timestamp("2018-11-05", tz="US/Eastern"),
|
179 |
+
),
|
180 |
+
],
|
181 |
+
)
|
182 |
+
def test_linspace_dst_transition(self, start, mid, end):
|
183 |
+
# GH 20976: linspace behavior defined from start/end/periods
|
184 |
+
# accounts for the hour gained/lost during DST transition
|
185 |
+
start = start.as_unit("ns")
|
186 |
+
mid = mid.as_unit("ns")
|
187 |
+
end = end.as_unit("ns")
|
188 |
+
result = interval_range(start=start, end=end, periods=2)
|
189 |
+
expected = IntervalIndex.from_breaks([start, mid, end])
|
190 |
+
tm.assert_index_equal(result, expected)
|
191 |
+
|
192 |
+
@pytest.mark.parametrize("freq", [2, 2.0])
|
193 |
+
@pytest.mark.parametrize("end", [10, 10.0])
|
194 |
+
@pytest.mark.parametrize("start", [0, 0.0])
|
195 |
+
def test_float_subtype(self, start, end, freq):
|
196 |
+
# Has float subtype if any of start/end/freq are float, even if all
|
197 |
+
# resulting endpoints can safely be upcast to integers
|
198 |
+
|
199 |
+
# defined from start/end/freq
|
200 |
+
index = interval_range(start=start, end=end, freq=freq)
|
201 |
+
result = index.dtype.subtype
|
202 |
+
expected = "int64" if is_integer(start + end + freq) else "float64"
|
203 |
+
assert result == expected
|
204 |
+
|
205 |
+
# defined from start/periods/freq
|
206 |
+
index = interval_range(start=start, periods=5, freq=freq)
|
207 |
+
result = index.dtype.subtype
|
208 |
+
expected = "int64" if is_integer(start + freq) else "float64"
|
209 |
+
assert result == expected
|
210 |
+
|
211 |
+
# defined from end/periods/freq
|
212 |
+
index = interval_range(end=end, periods=5, freq=freq)
|
213 |
+
result = index.dtype.subtype
|
214 |
+
expected = "int64" if is_integer(end + freq) else "float64"
|
215 |
+
assert result == expected
|
216 |
+
|
217 |
+
# GH 20976: linspace behavior defined from start/end/periods
|
218 |
+
index = interval_range(start=start, end=end, periods=5)
|
219 |
+
result = index.dtype.subtype
|
220 |
+
expected = "int64" if is_integer(start + end) else "float64"
|
221 |
+
assert result == expected
|
222 |
+
|
223 |
+
def test_interval_range_fractional_period(self):
|
224 |
+
# float value for periods
|
225 |
+
expected = interval_range(start=0, periods=10)
|
226 |
+
msg = "Non-integer 'periods' in pd.date_range, .* pd.interval_range"
|
227 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
228 |
+
result = interval_range(start=0, periods=10.5)
|
229 |
+
tm.assert_index_equal(result, expected)
|
230 |
+
|
231 |
+
def test_constructor_coverage(self):
|
232 |
+
# equivalent timestamp-like start/end
|
233 |
+
start, end = Timestamp("2017-01-01"), Timestamp("2017-01-15")
|
234 |
+
expected = interval_range(start=start, end=end)
|
235 |
+
|
236 |
+
result = interval_range(start=start.to_pydatetime(), end=end.to_pydatetime())
|
237 |
+
tm.assert_index_equal(result, expected)
|
238 |
+
|
239 |
+
result = interval_range(start=start.asm8, end=end.asm8)
|
240 |
+
tm.assert_index_equal(result, expected)
|
241 |
+
|
242 |
+
# equivalent freq with timestamp
|
243 |
+
equiv_freq = [
|
244 |
+
"D",
|
245 |
+
Day(),
|
246 |
+
Timedelta(days=1),
|
247 |
+
timedelta(days=1),
|
248 |
+
DateOffset(days=1),
|
249 |
+
]
|
250 |
+
for freq in equiv_freq:
|
251 |
+
result = interval_range(start=start, end=end, freq=freq)
|
252 |
+
tm.assert_index_equal(result, expected)
|
253 |
+
|
254 |
+
# equivalent timedelta-like start/end
|
255 |
+
start, end = Timedelta(days=1), Timedelta(days=10)
|
256 |
+
expected = interval_range(start=start, end=end)
|
257 |
+
|
258 |
+
result = interval_range(start=start.to_pytimedelta(), end=end.to_pytimedelta())
|
259 |
+
tm.assert_index_equal(result, expected)
|
260 |
+
|
261 |
+
result = interval_range(start=start.asm8, end=end.asm8)
|
262 |
+
tm.assert_index_equal(result, expected)
|
263 |
+
|
264 |
+
# equivalent freq with timedelta
|
265 |
+
equiv_freq = ["D", Day(), Timedelta(days=1), timedelta(days=1)]
|
266 |
+
for freq in equiv_freq:
|
267 |
+
result = interval_range(start=start, end=end, freq=freq)
|
268 |
+
tm.assert_index_equal(result, expected)
|
269 |
+
|
270 |
+
def test_errors(self):
|
271 |
+
# not enough params
|
272 |
+
msg = (
|
273 |
+
"Of the four parameters: start, end, periods, and freq, "
|
274 |
+
"exactly three must be specified"
|
275 |
+
)
|
276 |
+
|
277 |
+
with pytest.raises(ValueError, match=msg):
|
278 |
+
interval_range(start=0)
|
279 |
+
|
280 |
+
with pytest.raises(ValueError, match=msg):
|
281 |
+
interval_range(end=5)
|
282 |
+
|
283 |
+
with pytest.raises(ValueError, match=msg):
|
284 |
+
interval_range(periods=2)
|
285 |
+
|
286 |
+
with pytest.raises(ValueError, match=msg):
|
287 |
+
interval_range()
|
288 |
+
|
289 |
+
# too many params
|
290 |
+
with pytest.raises(ValueError, match=msg):
|
291 |
+
interval_range(start=0, end=5, periods=6, freq=1.5)
|
292 |
+
|
293 |
+
# mixed units
|
294 |
+
msg = "start, end, freq need to be type compatible"
|
295 |
+
with pytest.raises(TypeError, match=msg):
|
296 |
+
interval_range(start=0, end=Timestamp("20130101"), freq=2)
|
297 |
+
|
298 |
+
with pytest.raises(TypeError, match=msg):
|
299 |
+
interval_range(start=0, end=Timedelta("1 day"), freq=2)
|
300 |
+
|
301 |
+
with pytest.raises(TypeError, match=msg):
|
302 |
+
interval_range(start=0, end=10, freq="D")
|
303 |
+
|
304 |
+
with pytest.raises(TypeError, match=msg):
|
305 |
+
interval_range(start=Timestamp("20130101"), end=10, freq="D")
|
306 |
+
|
307 |
+
with pytest.raises(TypeError, match=msg):
|
308 |
+
interval_range(
|
309 |
+
start=Timestamp("20130101"), end=Timedelta("1 day"), freq="D"
|
310 |
+
)
|
311 |
+
|
312 |
+
with pytest.raises(TypeError, match=msg):
|
313 |
+
interval_range(
|
314 |
+
start=Timestamp("20130101"), end=Timestamp("20130110"), freq=2
|
315 |
+
)
|
316 |
+
|
317 |
+
with pytest.raises(TypeError, match=msg):
|
318 |
+
interval_range(start=Timedelta("1 day"), end=10, freq="D")
|
319 |
+
|
320 |
+
with pytest.raises(TypeError, match=msg):
|
321 |
+
interval_range(
|
322 |
+
start=Timedelta("1 day"), end=Timestamp("20130110"), freq="D"
|
323 |
+
)
|
324 |
+
|
325 |
+
with pytest.raises(TypeError, match=msg):
|
326 |
+
interval_range(start=Timedelta("1 day"), end=Timedelta("10 days"), freq=2)
|
327 |
+
|
328 |
+
# invalid periods
|
329 |
+
msg = "periods must be a number, got foo"
|
330 |
+
with pytest.raises(TypeError, match=msg):
|
331 |
+
interval_range(start=0, periods="foo")
|
332 |
+
|
333 |
+
# invalid start
|
334 |
+
msg = "start must be numeric or datetime-like, got foo"
|
335 |
+
with pytest.raises(ValueError, match=msg):
|
336 |
+
interval_range(start="foo", periods=10)
|
337 |
+
|
338 |
+
# invalid end
|
339 |
+
msg = r"end must be numeric or datetime-like, got \(0, 1\]"
|
340 |
+
with pytest.raises(ValueError, match=msg):
|
341 |
+
interval_range(end=Interval(0, 1), periods=10)
|
342 |
+
|
343 |
+
# invalid freq for datetime-like
|
344 |
+
msg = "freq must be numeric or convertible to DateOffset, got foo"
|
345 |
+
with pytest.raises(ValueError, match=msg):
|
346 |
+
interval_range(start=0, end=10, freq="foo")
|
347 |
+
|
348 |
+
with pytest.raises(ValueError, match=msg):
|
349 |
+
interval_range(start=Timestamp("20130101"), periods=10, freq="foo")
|
350 |
+
|
351 |
+
with pytest.raises(ValueError, match=msg):
|
352 |
+
interval_range(end=Timedelta("1 day"), periods=10, freq="foo")
|
353 |
+
|
354 |
+
# mixed tz
|
355 |
+
start = Timestamp("2017-01-01", tz="US/Eastern")
|
356 |
+
end = Timestamp("2017-01-07", tz="US/Pacific")
|
357 |
+
msg = "Start and end cannot both be tz-aware with different timezones"
|
358 |
+
with pytest.raises(TypeError, match=msg):
|
359 |
+
interval_range(start=start, end=end)
|
360 |
+
|
361 |
+
def test_float_freq(self):
|
362 |
+
# GH 54477
|
363 |
+
result = interval_range(0, 1, freq=0.1)
|
364 |
+
expected = IntervalIndex.from_breaks([0 + 0.1 * n for n in range(11)])
|
365 |
+
tm.assert_index_equal(result, expected)
|
366 |
+
|
367 |
+
result = interval_range(0, 1, freq=0.6)
|
368 |
+
expected = IntervalIndex.from_breaks([0, 0.6])
|
369 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_interval_tree.py
ADDED
@@ -0,0 +1,209 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import permutations
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
from pandas._libs.interval import IntervalTree
|
7 |
+
from pandas.compat import IS64
|
8 |
+
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
|
12 |
+
def skipif_32bit(param):
|
13 |
+
"""
|
14 |
+
Skip parameters in a parametrize on 32bit systems. Specifically used
|
15 |
+
here to skip leaf_size parameters related to GH 23440.
|
16 |
+
"""
|
17 |
+
marks = pytest.mark.skipif(not IS64, reason="GH 23440: int type mismatch on 32bit")
|
18 |
+
return pytest.param(param, marks=marks)
|
19 |
+
|
20 |
+
|
21 |
+
@pytest.fixture(params=["int64", "float64", "uint64"])
|
22 |
+
def dtype(request):
|
23 |
+
return request.param
|
24 |
+
|
25 |
+
|
26 |
+
@pytest.fixture(params=[skipif_32bit(1), skipif_32bit(2), 10])
|
27 |
+
def leaf_size(request):
|
28 |
+
"""
|
29 |
+
Fixture to specify IntervalTree leaf_size parameter; to be used with the
|
30 |
+
tree fixture.
|
31 |
+
"""
|
32 |
+
return request.param
|
33 |
+
|
34 |
+
|
35 |
+
@pytest.fixture(
|
36 |
+
params=[
|
37 |
+
np.arange(5, dtype="int64"),
|
38 |
+
np.arange(5, dtype="uint64"),
|
39 |
+
np.arange(5, dtype="float64"),
|
40 |
+
np.array([0, 1, 2, 3, 4, np.nan], dtype="float64"),
|
41 |
+
]
|
42 |
+
)
|
43 |
+
def tree(request, leaf_size):
|
44 |
+
left = request.param
|
45 |
+
return IntervalTree(left, left + 2, leaf_size=leaf_size)
|
46 |
+
|
47 |
+
|
48 |
+
class TestIntervalTree:
|
49 |
+
def test_get_indexer(self, tree):
|
50 |
+
result = tree.get_indexer(np.array([1.0, 5.5, 6.5]))
|
51 |
+
expected = np.array([0, 4, -1], dtype="intp")
|
52 |
+
tm.assert_numpy_array_equal(result, expected)
|
53 |
+
|
54 |
+
with pytest.raises(
|
55 |
+
KeyError, match="'indexer does not intersect a unique set of intervals'"
|
56 |
+
):
|
57 |
+
tree.get_indexer(np.array([3.0]))
|
58 |
+
|
59 |
+
@pytest.mark.parametrize(
|
60 |
+
"dtype, target_value, target_dtype",
|
61 |
+
[("int64", 2**63 + 1, "uint64"), ("uint64", -1, "int64")],
|
62 |
+
)
|
63 |
+
def test_get_indexer_overflow(self, dtype, target_value, target_dtype):
|
64 |
+
left, right = np.array([0, 1], dtype=dtype), np.array([1, 2], dtype=dtype)
|
65 |
+
tree = IntervalTree(left, right)
|
66 |
+
|
67 |
+
result = tree.get_indexer(np.array([target_value], dtype=target_dtype))
|
68 |
+
expected = np.array([-1], dtype="intp")
|
69 |
+
tm.assert_numpy_array_equal(result, expected)
|
70 |
+
|
71 |
+
def test_get_indexer_non_unique(self, tree):
|
72 |
+
indexer, missing = tree.get_indexer_non_unique(np.array([1.0, 2.0, 6.5]))
|
73 |
+
|
74 |
+
result = indexer[:1]
|
75 |
+
expected = np.array([0], dtype="intp")
|
76 |
+
tm.assert_numpy_array_equal(result, expected)
|
77 |
+
|
78 |
+
result = np.sort(indexer[1:3])
|
79 |
+
expected = np.array([0, 1], dtype="intp")
|
80 |
+
tm.assert_numpy_array_equal(result, expected)
|
81 |
+
|
82 |
+
result = np.sort(indexer[3:])
|
83 |
+
expected = np.array([-1], dtype="intp")
|
84 |
+
tm.assert_numpy_array_equal(result, expected)
|
85 |
+
|
86 |
+
result = missing
|
87 |
+
expected = np.array([2], dtype="intp")
|
88 |
+
tm.assert_numpy_array_equal(result, expected)
|
89 |
+
|
90 |
+
@pytest.mark.parametrize(
|
91 |
+
"dtype, target_value, target_dtype",
|
92 |
+
[("int64", 2**63 + 1, "uint64"), ("uint64", -1, "int64")],
|
93 |
+
)
|
94 |
+
def test_get_indexer_non_unique_overflow(self, dtype, target_value, target_dtype):
|
95 |
+
left, right = np.array([0, 2], dtype=dtype), np.array([1, 3], dtype=dtype)
|
96 |
+
tree = IntervalTree(left, right)
|
97 |
+
target = np.array([target_value], dtype=target_dtype)
|
98 |
+
|
99 |
+
result_indexer, result_missing = tree.get_indexer_non_unique(target)
|
100 |
+
expected_indexer = np.array([-1], dtype="intp")
|
101 |
+
tm.assert_numpy_array_equal(result_indexer, expected_indexer)
|
102 |
+
|
103 |
+
expected_missing = np.array([0], dtype="intp")
|
104 |
+
tm.assert_numpy_array_equal(result_missing, expected_missing)
|
105 |
+
|
106 |
+
def test_duplicates(self, dtype):
|
107 |
+
left = np.array([0, 0, 0], dtype=dtype)
|
108 |
+
tree = IntervalTree(left, left + 1)
|
109 |
+
|
110 |
+
with pytest.raises(
|
111 |
+
KeyError, match="'indexer does not intersect a unique set of intervals'"
|
112 |
+
):
|
113 |
+
tree.get_indexer(np.array([0.5]))
|
114 |
+
|
115 |
+
indexer, missing = tree.get_indexer_non_unique(np.array([0.5]))
|
116 |
+
result = np.sort(indexer)
|
117 |
+
expected = np.array([0, 1, 2], dtype="intp")
|
118 |
+
tm.assert_numpy_array_equal(result, expected)
|
119 |
+
|
120 |
+
result = missing
|
121 |
+
expected = np.array([], dtype="intp")
|
122 |
+
tm.assert_numpy_array_equal(result, expected)
|
123 |
+
|
124 |
+
@pytest.mark.parametrize(
|
125 |
+
"leaf_size", [skipif_32bit(1), skipif_32bit(10), skipif_32bit(100), 10000]
|
126 |
+
)
|
127 |
+
def test_get_indexer_closed(self, closed, leaf_size):
|
128 |
+
x = np.arange(1000, dtype="float64")
|
129 |
+
found = x.astype("intp")
|
130 |
+
not_found = (-1 * np.ones(1000)).astype("intp")
|
131 |
+
|
132 |
+
tree = IntervalTree(x, x + 0.5, closed=closed, leaf_size=leaf_size)
|
133 |
+
tm.assert_numpy_array_equal(found, tree.get_indexer(x + 0.25))
|
134 |
+
|
135 |
+
expected = found if tree.closed_left else not_found
|
136 |
+
tm.assert_numpy_array_equal(expected, tree.get_indexer(x + 0.0))
|
137 |
+
|
138 |
+
expected = found if tree.closed_right else not_found
|
139 |
+
tm.assert_numpy_array_equal(expected, tree.get_indexer(x + 0.5))
|
140 |
+
|
141 |
+
@pytest.mark.parametrize(
|
142 |
+
"left, right, expected",
|
143 |
+
[
|
144 |
+
(np.array([0, 1, 4], dtype="int64"), np.array([2, 3, 5]), True),
|
145 |
+
(np.array([0, 1, 2], dtype="int64"), np.array([5, 4, 3]), True),
|
146 |
+
(np.array([0, 1, np.nan]), np.array([5, 4, np.nan]), True),
|
147 |
+
(np.array([0, 2, 4], dtype="int64"), np.array([1, 3, 5]), False),
|
148 |
+
(np.array([0, 2, np.nan]), np.array([1, 3, np.nan]), False),
|
149 |
+
],
|
150 |
+
)
|
151 |
+
@pytest.mark.parametrize("order", (list(x) for x in permutations(range(3))))
|
152 |
+
def test_is_overlapping(self, closed, order, left, right, expected):
|
153 |
+
# GH 23309
|
154 |
+
tree = IntervalTree(left[order], right[order], closed=closed)
|
155 |
+
result = tree.is_overlapping
|
156 |
+
assert result is expected
|
157 |
+
|
158 |
+
@pytest.mark.parametrize("order", (list(x) for x in permutations(range(3))))
|
159 |
+
def test_is_overlapping_endpoints(self, closed, order):
|
160 |
+
"""shared endpoints are marked as overlapping"""
|
161 |
+
# GH 23309
|
162 |
+
left, right = np.arange(3, dtype="int64"), np.arange(1, 4)
|
163 |
+
tree = IntervalTree(left[order], right[order], closed=closed)
|
164 |
+
result = tree.is_overlapping
|
165 |
+
expected = closed == "both"
|
166 |
+
assert result is expected
|
167 |
+
|
168 |
+
@pytest.mark.parametrize(
|
169 |
+
"left, right",
|
170 |
+
[
|
171 |
+
(np.array([], dtype="int64"), np.array([], dtype="int64")),
|
172 |
+
(np.array([0], dtype="int64"), np.array([1], dtype="int64")),
|
173 |
+
(np.array([np.nan]), np.array([np.nan])),
|
174 |
+
(np.array([np.nan] * 3), np.array([np.nan] * 3)),
|
175 |
+
],
|
176 |
+
)
|
177 |
+
def test_is_overlapping_trivial(self, closed, left, right):
|
178 |
+
# GH 23309
|
179 |
+
tree = IntervalTree(left, right, closed=closed)
|
180 |
+
assert tree.is_overlapping is False
|
181 |
+
|
182 |
+
@pytest.mark.skipif(not IS64, reason="GH 23440")
|
183 |
+
def test_construction_overflow(self):
|
184 |
+
# GH 25485
|
185 |
+
left, right = np.arange(101, dtype="int64"), [np.iinfo(np.int64).max] * 101
|
186 |
+
tree = IntervalTree(left, right)
|
187 |
+
|
188 |
+
# pivot should be average of left/right medians
|
189 |
+
result = tree.root.pivot
|
190 |
+
expected = (50 + np.iinfo(np.int64).max) / 2
|
191 |
+
assert result == expected
|
192 |
+
|
193 |
+
@pytest.mark.xfail(not IS64, reason="GH 23440")
|
194 |
+
@pytest.mark.parametrize(
|
195 |
+
"left, right, expected",
|
196 |
+
[
|
197 |
+
([-np.inf, 1.0], [1.0, 2.0], 0.0),
|
198 |
+
([-np.inf, -2.0], [-2.0, -1.0], -2.0),
|
199 |
+
([-2.0, -1.0], [-1.0, np.inf], 0.0),
|
200 |
+
([1.0, 2.0], [2.0, np.inf], 2.0),
|
201 |
+
],
|
202 |
+
)
|
203 |
+
def test_inf_bound_infinite_recursion(self, left, right, expected):
|
204 |
+
# GH 46658
|
205 |
+
|
206 |
+
tree = IntervalTree(left * 101, right * 101)
|
207 |
+
|
208 |
+
result = tree.root.pivot
|
209 |
+
assert result == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/interval/test_join.py
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import pytest
|
2 |
+
|
3 |
+
from pandas import (
|
4 |
+
IntervalIndex,
|
5 |
+
MultiIndex,
|
6 |
+
RangeIndex,
|
7 |
+
)
|
8 |
+
import pandas._testing as tm
|
9 |
+
|
10 |
+
|
11 |
+
@pytest.fixture
|
12 |
+
def range_index():
|
13 |
+
return RangeIndex(3, name="range_index")
|
14 |
+
|
15 |
+
|
16 |
+
@pytest.fixture
|
17 |
+
def interval_index():
|
18 |
+
return IntervalIndex.from_tuples(
|
19 |
+
[(0.0, 1.0), (1.0, 2.0), (1.5, 2.5)], name="interval_index"
|
20 |
+
)
|
21 |
+
|
22 |
+
|
23 |
+
def test_join_overlapping_in_mi_to_same_intervalindex(range_index, interval_index):
|
24 |
+
# GH-45661
|
25 |
+
multi_index = MultiIndex.from_product([interval_index, range_index])
|
26 |
+
result = multi_index.join(interval_index)
|
27 |
+
|
28 |
+
tm.assert_index_equal(result, multi_index)
|
29 |
+
|
30 |
+
|
31 |
+
def test_join_overlapping_to_multiindex_with_same_interval(range_index, interval_index):
|
32 |
+
# GH-45661
|
33 |
+
multi_index = MultiIndex.from_product([interval_index, range_index])
|
34 |
+
result = interval_index.join(multi_index)
|
35 |
+
|
36 |
+
tm.assert_index_equal(result, multi_index)
|
37 |
+
|
38 |
+
|
39 |
+
def test_join_overlapping_interval_to_another_intervalindex(interval_index):
|
40 |
+
# GH-45661
|
41 |
+
flipped_interval_index = interval_index[::-1]
|
42 |
+
result = interval_index.join(flipped_interval_index)
|
43 |
+
|
44 |
+
tm.assert_index_equal(result, interval_index)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_analytics.py
ADDED
@@ -0,0 +1,263 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
Index,
|
7 |
+
MultiIndex,
|
8 |
+
date_range,
|
9 |
+
period_range,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
|
14 |
+
def test_infer_objects(idx):
|
15 |
+
with pytest.raises(NotImplementedError, match="to_frame"):
|
16 |
+
idx.infer_objects()
|
17 |
+
|
18 |
+
|
19 |
+
def test_shift(idx):
|
20 |
+
# GH8083 test the base class for shift
|
21 |
+
msg = (
|
22 |
+
"This method is only implemented for DatetimeIndex, PeriodIndex and "
|
23 |
+
"TimedeltaIndex; Got type MultiIndex"
|
24 |
+
)
|
25 |
+
with pytest.raises(NotImplementedError, match=msg):
|
26 |
+
idx.shift(1)
|
27 |
+
with pytest.raises(NotImplementedError, match=msg):
|
28 |
+
idx.shift(1, 2)
|
29 |
+
|
30 |
+
|
31 |
+
def test_groupby(idx):
|
32 |
+
groups = idx.groupby(np.array([1, 1, 1, 2, 2, 2]))
|
33 |
+
labels = idx.tolist()
|
34 |
+
exp = {1: labels[:3], 2: labels[3:]}
|
35 |
+
tm.assert_dict_equal(groups, exp)
|
36 |
+
|
37 |
+
# GH5620
|
38 |
+
groups = idx.groupby(idx)
|
39 |
+
exp = {key: [key] for key in idx}
|
40 |
+
tm.assert_dict_equal(groups, exp)
|
41 |
+
|
42 |
+
|
43 |
+
def test_truncate_multiindex():
|
44 |
+
# GH 34564 for MultiIndex level names check
|
45 |
+
major_axis = Index(list(range(4)))
|
46 |
+
minor_axis = Index(list(range(2)))
|
47 |
+
|
48 |
+
major_codes = np.array([0, 0, 1, 2, 3, 3])
|
49 |
+
minor_codes = np.array([0, 1, 0, 1, 0, 1])
|
50 |
+
|
51 |
+
index = MultiIndex(
|
52 |
+
levels=[major_axis, minor_axis],
|
53 |
+
codes=[major_codes, minor_codes],
|
54 |
+
names=["L1", "L2"],
|
55 |
+
)
|
56 |
+
|
57 |
+
result = index.truncate(before=1)
|
58 |
+
assert "foo" not in result.levels[0]
|
59 |
+
assert 1 in result.levels[0]
|
60 |
+
assert index.names == result.names
|
61 |
+
|
62 |
+
result = index.truncate(after=1)
|
63 |
+
assert 2 not in result.levels[0]
|
64 |
+
assert 1 in result.levels[0]
|
65 |
+
assert index.names == result.names
|
66 |
+
|
67 |
+
result = index.truncate(before=1, after=2)
|
68 |
+
assert len(result.levels[0]) == 2
|
69 |
+
assert index.names == result.names
|
70 |
+
|
71 |
+
msg = "after < before"
|
72 |
+
with pytest.raises(ValueError, match=msg):
|
73 |
+
index.truncate(3, 1)
|
74 |
+
|
75 |
+
|
76 |
+
# TODO: reshape
|
77 |
+
|
78 |
+
|
79 |
+
def test_reorder_levels(idx):
|
80 |
+
# this blows up
|
81 |
+
with pytest.raises(IndexError, match="^Too many levels"):
|
82 |
+
idx.reorder_levels([2, 1, 0])
|
83 |
+
|
84 |
+
|
85 |
+
def test_numpy_repeat():
|
86 |
+
reps = 2
|
87 |
+
numbers = [1, 2, 3]
|
88 |
+
names = np.array(["foo", "bar"])
|
89 |
+
|
90 |
+
m = MultiIndex.from_product([numbers, names], names=names)
|
91 |
+
expected = MultiIndex.from_product([numbers, names.repeat(reps)], names=names)
|
92 |
+
tm.assert_index_equal(np.repeat(m, reps), expected)
|
93 |
+
|
94 |
+
msg = "the 'axis' parameter is not supported"
|
95 |
+
with pytest.raises(ValueError, match=msg):
|
96 |
+
np.repeat(m, reps, axis=1)
|
97 |
+
|
98 |
+
|
99 |
+
def test_append_mixed_dtypes():
|
100 |
+
# GH 13660
|
101 |
+
dti = date_range("2011-01-01", freq="ME", periods=3)
|
102 |
+
dti_tz = date_range("2011-01-01", freq="ME", periods=3, tz="US/Eastern")
|
103 |
+
pi = period_range("2011-01", freq="M", periods=3)
|
104 |
+
|
105 |
+
mi = MultiIndex.from_arrays(
|
106 |
+
[[1, 2, 3], [1.1, np.nan, 3.3], ["a", "b", "c"], dti, dti_tz, pi]
|
107 |
+
)
|
108 |
+
assert mi.nlevels == 6
|
109 |
+
|
110 |
+
res = mi.append(mi)
|
111 |
+
exp = MultiIndex.from_arrays(
|
112 |
+
[
|
113 |
+
[1, 2, 3, 1, 2, 3],
|
114 |
+
[1.1, np.nan, 3.3, 1.1, np.nan, 3.3],
|
115 |
+
["a", "b", "c", "a", "b", "c"],
|
116 |
+
dti.append(dti),
|
117 |
+
dti_tz.append(dti_tz),
|
118 |
+
pi.append(pi),
|
119 |
+
]
|
120 |
+
)
|
121 |
+
tm.assert_index_equal(res, exp)
|
122 |
+
|
123 |
+
other = MultiIndex.from_arrays(
|
124 |
+
[
|
125 |
+
["x", "y", "z"],
|
126 |
+
["x", "y", "z"],
|
127 |
+
["x", "y", "z"],
|
128 |
+
["x", "y", "z"],
|
129 |
+
["x", "y", "z"],
|
130 |
+
["x", "y", "z"],
|
131 |
+
]
|
132 |
+
)
|
133 |
+
|
134 |
+
res = mi.append(other)
|
135 |
+
exp = MultiIndex.from_arrays(
|
136 |
+
[
|
137 |
+
[1, 2, 3, "x", "y", "z"],
|
138 |
+
[1.1, np.nan, 3.3, "x", "y", "z"],
|
139 |
+
["a", "b", "c", "x", "y", "z"],
|
140 |
+
dti.append(Index(["x", "y", "z"])),
|
141 |
+
dti_tz.append(Index(["x", "y", "z"])),
|
142 |
+
pi.append(Index(["x", "y", "z"])),
|
143 |
+
]
|
144 |
+
)
|
145 |
+
tm.assert_index_equal(res, exp)
|
146 |
+
|
147 |
+
|
148 |
+
def test_iter(idx):
|
149 |
+
result = list(idx)
|
150 |
+
expected = [
|
151 |
+
("foo", "one"),
|
152 |
+
("foo", "two"),
|
153 |
+
("bar", "one"),
|
154 |
+
("baz", "two"),
|
155 |
+
("qux", "one"),
|
156 |
+
("qux", "two"),
|
157 |
+
]
|
158 |
+
assert result == expected
|
159 |
+
|
160 |
+
|
161 |
+
def test_sub(idx):
|
162 |
+
first = idx
|
163 |
+
|
164 |
+
# - now raises (previously was set op difference)
|
165 |
+
msg = "cannot perform __sub__ with this index type: MultiIndex"
|
166 |
+
with pytest.raises(TypeError, match=msg):
|
167 |
+
first - idx[-3:]
|
168 |
+
with pytest.raises(TypeError, match=msg):
|
169 |
+
idx[-3:] - first
|
170 |
+
with pytest.raises(TypeError, match=msg):
|
171 |
+
idx[-3:] - first.tolist()
|
172 |
+
msg = "cannot perform __rsub__ with this index type: MultiIndex"
|
173 |
+
with pytest.raises(TypeError, match=msg):
|
174 |
+
first.tolist() - idx[-3:]
|
175 |
+
|
176 |
+
|
177 |
+
def test_map(idx):
|
178 |
+
# callable
|
179 |
+
index = idx
|
180 |
+
|
181 |
+
result = index.map(lambda x: x)
|
182 |
+
tm.assert_index_equal(result, index)
|
183 |
+
|
184 |
+
|
185 |
+
@pytest.mark.parametrize(
|
186 |
+
"mapper",
|
187 |
+
[
|
188 |
+
lambda values, idx: {i: e for e, i in zip(values, idx)},
|
189 |
+
lambda values, idx: pd.Series(values, idx),
|
190 |
+
],
|
191 |
+
)
|
192 |
+
def test_map_dictlike(idx, mapper):
|
193 |
+
identity = mapper(idx.values, idx)
|
194 |
+
|
195 |
+
# we don't infer to uint64 dtype for a dict
|
196 |
+
if idx.dtype == np.uint64 and isinstance(identity, dict):
|
197 |
+
expected = idx.astype("int64")
|
198 |
+
else:
|
199 |
+
expected = idx
|
200 |
+
|
201 |
+
result = idx.map(identity)
|
202 |
+
tm.assert_index_equal(result, expected)
|
203 |
+
|
204 |
+
# empty mappable
|
205 |
+
expected = Index([np.nan] * len(idx))
|
206 |
+
result = idx.map(mapper(expected, idx))
|
207 |
+
tm.assert_index_equal(result, expected)
|
208 |
+
|
209 |
+
|
210 |
+
@pytest.mark.parametrize(
|
211 |
+
"func",
|
212 |
+
[
|
213 |
+
np.exp,
|
214 |
+
np.exp2,
|
215 |
+
np.expm1,
|
216 |
+
np.log,
|
217 |
+
np.log2,
|
218 |
+
np.log10,
|
219 |
+
np.log1p,
|
220 |
+
np.sqrt,
|
221 |
+
np.sin,
|
222 |
+
np.cos,
|
223 |
+
np.tan,
|
224 |
+
np.arcsin,
|
225 |
+
np.arccos,
|
226 |
+
np.arctan,
|
227 |
+
np.sinh,
|
228 |
+
np.cosh,
|
229 |
+
np.tanh,
|
230 |
+
np.arcsinh,
|
231 |
+
np.arccosh,
|
232 |
+
np.arctanh,
|
233 |
+
np.deg2rad,
|
234 |
+
np.rad2deg,
|
235 |
+
],
|
236 |
+
ids=lambda func: func.__name__,
|
237 |
+
)
|
238 |
+
def test_numpy_ufuncs(idx, func):
|
239 |
+
# test ufuncs of numpy. see:
|
240 |
+
# https://numpy.org/doc/stable/reference/ufuncs.html
|
241 |
+
|
242 |
+
expected_exception = TypeError
|
243 |
+
msg = (
|
244 |
+
"loop of ufunc does not support argument 0 of type tuple which "
|
245 |
+
f"has no callable {func.__name__} method"
|
246 |
+
)
|
247 |
+
with pytest.raises(expected_exception, match=msg):
|
248 |
+
func(idx)
|
249 |
+
|
250 |
+
|
251 |
+
@pytest.mark.parametrize(
|
252 |
+
"func",
|
253 |
+
[np.isfinite, np.isinf, np.isnan, np.signbit],
|
254 |
+
ids=lambda func: func.__name__,
|
255 |
+
)
|
256 |
+
def test_numpy_type_funcs(idx, func):
|
257 |
+
msg = (
|
258 |
+
f"ufunc '{func.__name__}' not supported for the input types, and the inputs "
|
259 |
+
"could not be safely coerced to any supported types according to "
|
260 |
+
"the casting rule ''safe''"
|
261 |
+
)
|
262 |
+
with pytest.raises(TypeError, match=msg):
|
263 |
+
func(idx)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_compat.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import MultiIndex
|
6 |
+
import pandas._testing as tm
|
7 |
+
|
8 |
+
|
9 |
+
def test_numeric_compat(idx):
|
10 |
+
with pytest.raises(TypeError, match="cannot perform __mul__"):
|
11 |
+
idx * 1
|
12 |
+
|
13 |
+
with pytest.raises(TypeError, match="cannot perform __rmul__"):
|
14 |
+
1 * idx
|
15 |
+
|
16 |
+
div_err = "cannot perform __truediv__"
|
17 |
+
with pytest.raises(TypeError, match=div_err):
|
18 |
+
idx / 1
|
19 |
+
|
20 |
+
div_err = div_err.replace(" __", " __r")
|
21 |
+
with pytest.raises(TypeError, match=div_err):
|
22 |
+
1 / idx
|
23 |
+
|
24 |
+
with pytest.raises(TypeError, match="cannot perform __floordiv__"):
|
25 |
+
idx // 1
|
26 |
+
|
27 |
+
with pytest.raises(TypeError, match="cannot perform __rfloordiv__"):
|
28 |
+
1 // idx
|
29 |
+
|
30 |
+
|
31 |
+
@pytest.mark.parametrize("method", ["all", "any", "__invert__"])
|
32 |
+
def test_logical_compat(idx, method):
|
33 |
+
msg = f"cannot perform {method}"
|
34 |
+
|
35 |
+
with pytest.raises(TypeError, match=msg):
|
36 |
+
getattr(idx, method)()
|
37 |
+
|
38 |
+
|
39 |
+
def test_inplace_mutation_resets_values():
|
40 |
+
levels = [["a", "b", "c"], [4]]
|
41 |
+
levels2 = [[1, 2, 3], ["a"]]
|
42 |
+
codes = [[0, 1, 0, 2, 2, 0], [0, 0, 0, 0, 0, 0]]
|
43 |
+
|
44 |
+
mi1 = MultiIndex(levels=levels, codes=codes)
|
45 |
+
mi2 = MultiIndex(levels=levels2, codes=codes)
|
46 |
+
|
47 |
+
# instantiating MultiIndex should not access/cache _.values
|
48 |
+
assert "_values" not in mi1._cache
|
49 |
+
assert "_values" not in mi2._cache
|
50 |
+
|
51 |
+
vals = mi1.values.copy()
|
52 |
+
vals2 = mi2.values.copy()
|
53 |
+
|
54 |
+
# accessing .values should cache ._values
|
55 |
+
assert mi1._values is mi1._cache["_values"]
|
56 |
+
assert mi1.values is mi1._cache["_values"]
|
57 |
+
assert isinstance(mi1._cache["_values"], np.ndarray)
|
58 |
+
|
59 |
+
# Make sure level setting works
|
60 |
+
new_vals = mi1.set_levels(levels2).values
|
61 |
+
tm.assert_almost_equal(vals2, new_vals)
|
62 |
+
|
63 |
+
# Doesn't drop _values from _cache [implementation detail]
|
64 |
+
tm.assert_almost_equal(mi1._cache["_values"], vals)
|
65 |
+
|
66 |
+
# ...and values is still same too
|
67 |
+
tm.assert_almost_equal(mi1.values, vals)
|
68 |
+
|
69 |
+
# Make sure label setting works too
|
70 |
+
codes2 = [[0, 0, 0, 0, 0, 0], [0, 0, 0, 0, 0, 0]]
|
71 |
+
exp_values = np.empty((6,), dtype=object)
|
72 |
+
exp_values[:] = [(1, "a")] * 6
|
73 |
+
|
74 |
+
# Must be 1d array of tuples
|
75 |
+
assert exp_values.shape == (6,)
|
76 |
+
|
77 |
+
new_mi = mi2.set_codes(codes2)
|
78 |
+
assert "_values" not in new_mi._cache
|
79 |
+
new_values = new_mi.values
|
80 |
+
assert "_values" in new_mi._cache
|
81 |
+
|
82 |
+
# Shouldn't change cache
|
83 |
+
tm.assert_almost_equal(mi2._cache["_values"], vals2)
|
84 |
+
|
85 |
+
# Should have correct values
|
86 |
+
tm.assert_almost_equal(exp_values, new_values)
|
87 |
+
|
88 |
+
|
89 |
+
def test_boxable_categorical_values():
|
90 |
+
cat = pd.Categorical(pd.date_range("2012-01-01", periods=3, freq="h"))
|
91 |
+
result = MultiIndex.from_product([["a", "b", "c"], cat]).values
|
92 |
+
expected = pd.Series(
|
93 |
+
[
|
94 |
+
("a", pd.Timestamp("2012-01-01 00:00:00")),
|
95 |
+
("a", pd.Timestamp("2012-01-01 01:00:00")),
|
96 |
+
("a", pd.Timestamp("2012-01-01 02:00:00")),
|
97 |
+
("b", pd.Timestamp("2012-01-01 00:00:00")),
|
98 |
+
("b", pd.Timestamp("2012-01-01 01:00:00")),
|
99 |
+
("b", pd.Timestamp("2012-01-01 02:00:00")),
|
100 |
+
("c", pd.Timestamp("2012-01-01 00:00:00")),
|
101 |
+
("c", pd.Timestamp("2012-01-01 01:00:00")),
|
102 |
+
("c", pd.Timestamp("2012-01-01 02:00:00")),
|
103 |
+
]
|
104 |
+
).values
|
105 |
+
tm.assert_numpy_array_equal(result, expected)
|
106 |
+
result = pd.DataFrame({"a": ["a", "b", "c"], "b": cat, "c": np.array(cat)}).values
|
107 |
+
expected = pd.DataFrame(
|
108 |
+
{
|
109 |
+
"a": ["a", "b", "c"],
|
110 |
+
"b": [
|
111 |
+
pd.Timestamp("2012-01-01 00:00:00"),
|
112 |
+
pd.Timestamp("2012-01-01 01:00:00"),
|
113 |
+
pd.Timestamp("2012-01-01 02:00:00"),
|
114 |
+
],
|
115 |
+
"c": [
|
116 |
+
pd.Timestamp("2012-01-01 00:00:00"),
|
117 |
+
pd.Timestamp("2012-01-01 01:00:00"),
|
118 |
+
pd.Timestamp("2012-01-01 02:00:00"),
|
119 |
+
],
|
120 |
+
}
|
121 |
+
).values
|
122 |
+
tm.assert_numpy_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_drop.py
ADDED
@@ -0,0 +1,190 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas.errors import PerformanceWarning
|
5 |
+
|
6 |
+
import pandas as pd
|
7 |
+
from pandas import (
|
8 |
+
Index,
|
9 |
+
MultiIndex,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
|
14 |
+
def test_drop(idx):
|
15 |
+
dropped = idx.drop([("foo", "two"), ("qux", "one")])
|
16 |
+
|
17 |
+
index = MultiIndex.from_tuples([("foo", "two"), ("qux", "one")])
|
18 |
+
dropped2 = idx.drop(index)
|
19 |
+
|
20 |
+
expected = idx[[0, 2, 3, 5]]
|
21 |
+
tm.assert_index_equal(dropped, expected)
|
22 |
+
tm.assert_index_equal(dropped2, expected)
|
23 |
+
|
24 |
+
dropped = idx.drop(["bar"])
|
25 |
+
expected = idx[[0, 1, 3, 4, 5]]
|
26 |
+
tm.assert_index_equal(dropped, expected)
|
27 |
+
|
28 |
+
dropped = idx.drop("foo")
|
29 |
+
expected = idx[[2, 3, 4, 5]]
|
30 |
+
tm.assert_index_equal(dropped, expected)
|
31 |
+
|
32 |
+
index = MultiIndex.from_tuples([("bar", "two")])
|
33 |
+
with pytest.raises(KeyError, match=r"^\('bar', 'two'\)$"):
|
34 |
+
idx.drop([("bar", "two")])
|
35 |
+
with pytest.raises(KeyError, match=r"^\('bar', 'two'\)$"):
|
36 |
+
idx.drop(index)
|
37 |
+
with pytest.raises(KeyError, match=r"^'two'$"):
|
38 |
+
idx.drop(["foo", "two"])
|
39 |
+
|
40 |
+
# partially correct argument
|
41 |
+
mixed_index = MultiIndex.from_tuples([("qux", "one"), ("bar", "two")])
|
42 |
+
with pytest.raises(KeyError, match=r"^\('bar', 'two'\)$"):
|
43 |
+
idx.drop(mixed_index)
|
44 |
+
|
45 |
+
# error='ignore'
|
46 |
+
dropped = idx.drop(index, errors="ignore")
|
47 |
+
expected = idx[[0, 1, 2, 3, 4, 5]]
|
48 |
+
tm.assert_index_equal(dropped, expected)
|
49 |
+
|
50 |
+
dropped = idx.drop(mixed_index, errors="ignore")
|
51 |
+
expected = idx[[0, 1, 2, 3, 5]]
|
52 |
+
tm.assert_index_equal(dropped, expected)
|
53 |
+
|
54 |
+
dropped = idx.drop(["foo", "two"], errors="ignore")
|
55 |
+
expected = idx[[2, 3, 4, 5]]
|
56 |
+
tm.assert_index_equal(dropped, expected)
|
57 |
+
|
58 |
+
# mixed partial / full drop
|
59 |
+
dropped = idx.drop(["foo", ("qux", "one")])
|
60 |
+
expected = idx[[2, 3, 5]]
|
61 |
+
tm.assert_index_equal(dropped, expected)
|
62 |
+
|
63 |
+
# mixed partial / full drop / error='ignore'
|
64 |
+
mixed_index = ["foo", ("qux", "one"), "two"]
|
65 |
+
with pytest.raises(KeyError, match=r"^'two'$"):
|
66 |
+
idx.drop(mixed_index)
|
67 |
+
dropped = idx.drop(mixed_index, errors="ignore")
|
68 |
+
expected = idx[[2, 3, 5]]
|
69 |
+
tm.assert_index_equal(dropped, expected)
|
70 |
+
|
71 |
+
|
72 |
+
def test_droplevel_with_names(idx):
|
73 |
+
index = idx[idx.get_loc("foo")]
|
74 |
+
dropped = index.droplevel(0)
|
75 |
+
assert dropped.name == "second"
|
76 |
+
|
77 |
+
index = MultiIndex(
|
78 |
+
levels=[Index(range(4)), Index(range(4)), Index(range(4))],
|
79 |
+
codes=[
|
80 |
+
np.array([0, 0, 1, 2, 2, 2, 3, 3]),
|
81 |
+
np.array([0, 1, 0, 0, 0, 1, 0, 1]),
|
82 |
+
np.array([1, 0, 1, 1, 0, 0, 1, 0]),
|
83 |
+
],
|
84 |
+
names=["one", "two", "three"],
|
85 |
+
)
|
86 |
+
dropped = index.droplevel(0)
|
87 |
+
assert dropped.names == ("two", "three")
|
88 |
+
|
89 |
+
dropped = index.droplevel("two")
|
90 |
+
expected = index.droplevel(1)
|
91 |
+
assert dropped.equals(expected)
|
92 |
+
|
93 |
+
|
94 |
+
def test_droplevel_list():
|
95 |
+
index = MultiIndex(
|
96 |
+
levels=[Index(range(4)), Index(range(4)), Index(range(4))],
|
97 |
+
codes=[
|
98 |
+
np.array([0, 0, 1, 2, 2, 2, 3, 3]),
|
99 |
+
np.array([0, 1, 0, 0, 0, 1, 0, 1]),
|
100 |
+
np.array([1, 0, 1, 1, 0, 0, 1, 0]),
|
101 |
+
],
|
102 |
+
names=["one", "two", "three"],
|
103 |
+
)
|
104 |
+
|
105 |
+
dropped = index[:2].droplevel(["three", "one"])
|
106 |
+
expected = index[:2].droplevel(2).droplevel(0)
|
107 |
+
assert dropped.equals(expected)
|
108 |
+
|
109 |
+
dropped = index[:2].droplevel([])
|
110 |
+
expected = index[:2]
|
111 |
+
assert dropped.equals(expected)
|
112 |
+
|
113 |
+
msg = (
|
114 |
+
"Cannot remove 3 levels from an index with 3 levels: "
|
115 |
+
"at least one level must be left"
|
116 |
+
)
|
117 |
+
with pytest.raises(ValueError, match=msg):
|
118 |
+
index[:2].droplevel(["one", "two", "three"])
|
119 |
+
|
120 |
+
with pytest.raises(KeyError, match="'Level four not found'"):
|
121 |
+
index[:2].droplevel(["one", "four"])
|
122 |
+
|
123 |
+
|
124 |
+
def test_drop_not_lexsorted():
|
125 |
+
# GH 12078
|
126 |
+
|
127 |
+
# define the lexsorted version of the multi-index
|
128 |
+
tuples = [("a", ""), ("b1", "c1"), ("b2", "c2")]
|
129 |
+
lexsorted_mi = MultiIndex.from_tuples(tuples, names=["b", "c"])
|
130 |
+
assert lexsorted_mi._is_lexsorted()
|
131 |
+
|
132 |
+
# and the not-lexsorted version
|
133 |
+
df = pd.DataFrame(
|
134 |
+
columns=["a", "b", "c", "d"], data=[[1, "b1", "c1", 3], [1, "b2", "c2", 4]]
|
135 |
+
)
|
136 |
+
df = df.pivot_table(index="a", columns=["b", "c"], values="d")
|
137 |
+
df = df.reset_index()
|
138 |
+
not_lexsorted_mi = df.columns
|
139 |
+
assert not not_lexsorted_mi._is_lexsorted()
|
140 |
+
|
141 |
+
# compare the results
|
142 |
+
tm.assert_index_equal(lexsorted_mi, not_lexsorted_mi)
|
143 |
+
with tm.assert_produces_warning(PerformanceWarning):
|
144 |
+
tm.assert_index_equal(lexsorted_mi.drop("a"), not_lexsorted_mi.drop("a"))
|
145 |
+
|
146 |
+
|
147 |
+
def test_drop_with_nan_in_index(nulls_fixture):
|
148 |
+
# GH#18853
|
149 |
+
mi = MultiIndex.from_tuples([("blah", nulls_fixture)], names=["name", "date"])
|
150 |
+
msg = r"labels \[Timestamp\('2001-01-01 00:00:00'\)\] not found in level"
|
151 |
+
with pytest.raises(KeyError, match=msg):
|
152 |
+
mi.drop(pd.Timestamp("2001"), level="date")
|
153 |
+
|
154 |
+
|
155 |
+
@pytest.mark.filterwarnings("ignore::pandas.errors.PerformanceWarning")
|
156 |
+
def test_drop_with_non_monotonic_duplicates():
|
157 |
+
# GH#33494
|
158 |
+
mi = MultiIndex.from_tuples([(1, 2), (2, 3), (1, 2)])
|
159 |
+
result = mi.drop((1, 2))
|
160 |
+
expected = MultiIndex.from_tuples([(2, 3)])
|
161 |
+
tm.assert_index_equal(result, expected)
|
162 |
+
|
163 |
+
|
164 |
+
def test_single_level_drop_partially_missing_elements():
|
165 |
+
# GH 37820
|
166 |
+
|
167 |
+
mi = MultiIndex.from_tuples([(1, 2), (2, 2), (3, 2)])
|
168 |
+
msg = r"labels \[4\] not found in level"
|
169 |
+
with pytest.raises(KeyError, match=msg):
|
170 |
+
mi.drop(4, level=0)
|
171 |
+
with pytest.raises(KeyError, match=msg):
|
172 |
+
mi.drop([1, 4], level=0)
|
173 |
+
msg = r"labels \[nan\] not found in level"
|
174 |
+
with pytest.raises(KeyError, match=msg):
|
175 |
+
mi.drop([np.nan], level=0)
|
176 |
+
with pytest.raises(KeyError, match=msg):
|
177 |
+
mi.drop([np.nan, 1, 2, 3], level=0)
|
178 |
+
|
179 |
+
mi = MultiIndex.from_tuples([(np.nan, 1), (1, 2)])
|
180 |
+
msg = r"labels \['a'\] not found in level"
|
181 |
+
with pytest.raises(KeyError, match=msg):
|
182 |
+
mi.drop([np.nan, 1, "a"], level=0)
|
183 |
+
|
184 |
+
|
185 |
+
def test_droplevel_multiindex_one_level():
|
186 |
+
# GH#37208
|
187 |
+
index = MultiIndex.from_tuples([(2,)], names=("b",))
|
188 |
+
result = index.droplevel([])
|
189 |
+
expected = Index([2], name="b")
|
190 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_duplicates.py
ADDED
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from itertools import product
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
|
6 |
+
from pandas._libs import (
|
7 |
+
hashtable,
|
8 |
+
index as libindex,
|
9 |
+
)
|
10 |
+
|
11 |
+
from pandas import (
|
12 |
+
NA,
|
13 |
+
DatetimeIndex,
|
14 |
+
Index,
|
15 |
+
MultiIndex,
|
16 |
+
Series,
|
17 |
+
)
|
18 |
+
import pandas._testing as tm
|
19 |
+
|
20 |
+
|
21 |
+
@pytest.fixture
|
22 |
+
def idx_dup():
|
23 |
+
# compare tests/indexes/multi/conftest.py
|
24 |
+
major_axis = Index(["foo", "bar", "baz", "qux"])
|
25 |
+
minor_axis = Index(["one", "two"])
|
26 |
+
|
27 |
+
major_codes = np.array([0, 0, 1, 0, 1, 1])
|
28 |
+
minor_codes = np.array([0, 1, 0, 1, 0, 1])
|
29 |
+
index_names = ["first", "second"]
|
30 |
+
mi = MultiIndex(
|
31 |
+
levels=[major_axis, minor_axis],
|
32 |
+
codes=[major_codes, minor_codes],
|
33 |
+
names=index_names,
|
34 |
+
verify_integrity=False,
|
35 |
+
)
|
36 |
+
return mi
|
37 |
+
|
38 |
+
|
39 |
+
@pytest.mark.parametrize("names", [None, ["first", "second"]])
|
40 |
+
def test_unique(names):
|
41 |
+
mi = MultiIndex.from_arrays([[1, 2, 1, 2], [1, 1, 1, 2]], names=names)
|
42 |
+
|
43 |
+
res = mi.unique()
|
44 |
+
exp = MultiIndex.from_arrays([[1, 2, 2], [1, 1, 2]], names=mi.names)
|
45 |
+
tm.assert_index_equal(res, exp)
|
46 |
+
|
47 |
+
mi = MultiIndex.from_arrays([list("aaaa"), list("abab")], names=names)
|
48 |
+
res = mi.unique()
|
49 |
+
exp = MultiIndex.from_arrays([list("aa"), list("ab")], names=mi.names)
|
50 |
+
tm.assert_index_equal(res, exp)
|
51 |
+
|
52 |
+
mi = MultiIndex.from_arrays([list("aaaa"), list("aaaa")], names=names)
|
53 |
+
res = mi.unique()
|
54 |
+
exp = MultiIndex.from_arrays([["a"], ["a"]], names=mi.names)
|
55 |
+
tm.assert_index_equal(res, exp)
|
56 |
+
|
57 |
+
# GH #20568 - empty MI
|
58 |
+
mi = MultiIndex.from_arrays([[], []], names=names)
|
59 |
+
res = mi.unique()
|
60 |
+
tm.assert_index_equal(mi, res)
|
61 |
+
|
62 |
+
|
63 |
+
def test_unique_datetimelike():
|
64 |
+
idx1 = DatetimeIndex(
|
65 |
+
["2015-01-01", "2015-01-01", "2015-01-01", "2015-01-01", "NaT", "NaT"]
|
66 |
+
)
|
67 |
+
idx2 = DatetimeIndex(
|
68 |
+
["2015-01-01", "2015-01-01", "2015-01-02", "2015-01-02", "NaT", "2015-01-01"],
|
69 |
+
tz="Asia/Tokyo",
|
70 |
+
)
|
71 |
+
result = MultiIndex.from_arrays([idx1, idx2]).unique()
|
72 |
+
|
73 |
+
eidx1 = DatetimeIndex(["2015-01-01", "2015-01-01", "NaT", "NaT"])
|
74 |
+
eidx2 = DatetimeIndex(
|
75 |
+
["2015-01-01", "2015-01-02", "NaT", "2015-01-01"], tz="Asia/Tokyo"
|
76 |
+
)
|
77 |
+
exp = MultiIndex.from_arrays([eidx1, eidx2])
|
78 |
+
tm.assert_index_equal(result, exp)
|
79 |
+
|
80 |
+
|
81 |
+
@pytest.mark.parametrize("level", [0, "first", 1, "second"])
|
82 |
+
def test_unique_level(idx, level):
|
83 |
+
# GH #17896 - with level= argument
|
84 |
+
result = idx.unique(level=level)
|
85 |
+
expected = idx.get_level_values(level).unique()
|
86 |
+
tm.assert_index_equal(result, expected)
|
87 |
+
|
88 |
+
# With already unique level
|
89 |
+
mi = MultiIndex.from_arrays([[1, 3, 2, 4], [1, 3, 2, 5]], names=["first", "second"])
|
90 |
+
result = mi.unique(level=level)
|
91 |
+
expected = mi.get_level_values(level)
|
92 |
+
tm.assert_index_equal(result, expected)
|
93 |
+
|
94 |
+
# With empty MI
|
95 |
+
mi = MultiIndex.from_arrays([[], []], names=["first", "second"])
|
96 |
+
result = mi.unique(level=level)
|
97 |
+
expected = mi.get_level_values(level)
|
98 |
+
tm.assert_index_equal(result, expected)
|
99 |
+
|
100 |
+
|
101 |
+
def test_duplicate_multiindex_codes():
|
102 |
+
# GH 17464
|
103 |
+
# Make sure that a MultiIndex with duplicate levels throws a ValueError
|
104 |
+
msg = r"Level values must be unique: \[[A', ]+\] on level 0"
|
105 |
+
with pytest.raises(ValueError, match=msg):
|
106 |
+
mi = MultiIndex([["A"] * 10, range(10)], [[0] * 10, range(10)])
|
107 |
+
|
108 |
+
# And that using set_levels with duplicate levels fails
|
109 |
+
mi = MultiIndex.from_arrays([["A", "A", "B", "B", "B"], [1, 2, 1, 2, 3]])
|
110 |
+
msg = r"Level values must be unique: \[[AB', ]+\] on level 0"
|
111 |
+
with pytest.raises(ValueError, match=msg):
|
112 |
+
mi.set_levels([["A", "B", "A", "A", "B"], [2, 1, 3, -2, 5]])
|
113 |
+
|
114 |
+
|
115 |
+
@pytest.mark.parametrize("names", [["a", "b", "a"], [1, 1, 2], [1, "a", 1]])
|
116 |
+
def test_duplicate_level_names(names):
|
117 |
+
# GH18872, GH19029
|
118 |
+
mi = MultiIndex.from_product([[0, 1]] * 3, names=names)
|
119 |
+
assert mi.names == names
|
120 |
+
|
121 |
+
# With .rename()
|
122 |
+
mi = MultiIndex.from_product([[0, 1]] * 3)
|
123 |
+
mi = mi.rename(names)
|
124 |
+
assert mi.names == names
|
125 |
+
|
126 |
+
# With .rename(., level=)
|
127 |
+
mi.rename(names[1], level=1, inplace=True)
|
128 |
+
mi = mi.rename([names[0], names[2]], level=[0, 2])
|
129 |
+
assert mi.names == names
|
130 |
+
|
131 |
+
|
132 |
+
def test_duplicate_meta_data():
|
133 |
+
# GH 10115
|
134 |
+
mi = MultiIndex(
|
135 |
+
levels=[[0, 1], [0, 1, 2]], codes=[[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]]
|
136 |
+
)
|
137 |
+
|
138 |
+
for idx in [
|
139 |
+
mi,
|
140 |
+
mi.set_names([None, None]),
|
141 |
+
mi.set_names([None, "Num"]),
|
142 |
+
mi.set_names(["Upper", "Num"]),
|
143 |
+
]:
|
144 |
+
assert idx.has_duplicates
|
145 |
+
assert idx.drop_duplicates().names == idx.names
|
146 |
+
|
147 |
+
|
148 |
+
def test_has_duplicates(idx, idx_dup):
|
149 |
+
# see fixtures
|
150 |
+
assert idx.is_unique is True
|
151 |
+
assert idx.has_duplicates is False
|
152 |
+
assert idx_dup.is_unique is False
|
153 |
+
assert idx_dup.has_duplicates is True
|
154 |
+
|
155 |
+
mi = MultiIndex(
|
156 |
+
levels=[[0, 1], [0, 1, 2]], codes=[[0, 0, 0, 0, 1, 1, 1], [0, 1, 2, 0, 0, 1, 2]]
|
157 |
+
)
|
158 |
+
assert mi.is_unique is False
|
159 |
+
assert mi.has_duplicates is True
|
160 |
+
|
161 |
+
# single instance of NaN
|
162 |
+
mi_nan = MultiIndex(
|
163 |
+
levels=[["a", "b"], [0, 1]], codes=[[-1, 0, 0, 1, 1], [-1, 0, 1, 0, 1]]
|
164 |
+
)
|
165 |
+
assert mi_nan.is_unique is True
|
166 |
+
assert mi_nan.has_duplicates is False
|
167 |
+
|
168 |
+
# multiple instances of NaN
|
169 |
+
mi_nan_dup = MultiIndex(
|
170 |
+
levels=[["a", "b"], [0, 1]], codes=[[-1, -1, 0, 0, 1, 1], [-1, -1, 0, 1, 0, 1]]
|
171 |
+
)
|
172 |
+
assert mi_nan_dup.is_unique is False
|
173 |
+
assert mi_nan_dup.has_duplicates is True
|
174 |
+
|
175 |
+
|
176 |
+
def test_has_duplicates_from_tuples():
|
177 |
+
# GH 9075
|
178 |
+
t = [
|
179 |
+
("x", "out", "z", 5, "y", "in", "z", 169),
|
180 |
+
("x", "out", "z", 7, "y", "in", "z", 119),
|
181 |
+
("x", "out", "z", 9, "y", "in", "z", 135),
|
182 |
+
("x", "out", "z", 13, "y", "in", "z", 145),
|
183 |
+
("x", "out", "z", 14, "y", "in", "z", 158),
|
184 |
+
("x", "out", "z", 16, "y", "in", "z", 122),
|
185 |
+
("x", "out", "z", 17, "y", "in", "z", 160),
|
186 |
+
("x", "out", "z", 18, "y", "in", "z", 180),
|
187 |
+
("x", "out", "z", 20, "y", "in", "z", 143),
|
188 |
+
("x", "out", "z", 21, "y", "in", "z", 128),
|
189 |
+
("x", "out", "z", 22, "y", "in", "z", 129),
|
190 |
+
("x", "out", "z", 25, "y", "in", "z", 111),
|
191 |
+
("x", "out", "z", 28, "y", "in", "z", 114),
|
192 |
+
("x", "out", "z", 29, "y", "in", "z", 121),
|
193 |
+
("x", "out", "z", 31, "y", "in", "z", 126),
|
194 |
+
("x", "out", "z", 32, "y", "in", "z", 155),
|
195 |
+
("x", "out", "z", 33, "y", "in", "z", 123),
|
196 |
+
("x", "out", "z", 12, "y", "in", "z", 144),
|
197 |
+
]
|
198 |
+
|
199 |
+
mi = MultiIndex.from_tuples(t)
|
200 |
+
assert not mi.has_duplicates
|
201 |
+
|
202 |
+
|
203 |
+
@pytest.mark.parametrize("nlevels", [4, 8])
|
204 |
+
@pytest.mark.parametrize("with_nulls", [True, False])
|
205 |
+
def test_has_duplicates_overflow(nlevels, with_nulls):
|
206 |
+
# handle int64 overflow if possible
|
207 |
+
# no overflow with 4
|
208 |
+
# overflow possible with 8
|
209 |
+
codes = np.tile(np.arange(500), 2)
|
210 |
+
level = np.arange(500)
|
211 |
+
|
212 |
+
if with_nulls: # inject some null values
|
213 |
+
codes[500] = -1 # common nan value
|
214 |
+
codes = [codes.copy() for i in range(nlevels)]
|
215 |
+
for i in range(nlevels):
|
216 |
+
codes[i][500 + i - nlevels // 2] = -1
|
217 |
+
|
218 |
+
codes += [np.array([-1, 1]).repeat(500)]
|
219 |
+
else:
|
220 |
+
codes = [codes] * nlevels + [np.arange(2).repeat(500)]
|
221 |
+
|
222 |
+
levels = [level] * nlevels + [[0, 1]]
|
223 |
+
|
224 |
+
# no dups
|
225 |
+
mi = MultiIndex(levels=levels, codes=codes)
|
226 |
+
assert not mi.has_duplicates
|
227 |
+
|
228 |
+
# with a dup
|
229 |
+
if with_nulls:
|
230 |
+
|
231 |
+
def f(a):
|
232 |
+
return np.insert(a, 1000, a[0])
|
233 |
+
|
234 |
+
codes = list(map(f, codes))
|
235 |
+
mi = MultiIndex(levels=levels, codes=codes)
|
236 |
+
else:
|
237 |
+
values = mi.values.tolist()
|
238 |
+
mi = MultiIndex.from_tuples(values + [values[0]])
|
239 |
+
|
240 |
+
assert mi.has_duplicates
|
241 |
+
|
242 |
+
|
243 |
+
@pytest.mark.parametrize(
|
244 |
+
"keep, expected",
|
245 |
+
[
|
246 |
+
("first", np.array([False, False, False, True, True, False])),
|
247 |
+
("last", np.array([False, True, True, False, False, False])),
|
248 |
+
(False, np.array([False, True, True, True, True, False])),
|
249 |
+
],
|
250 |
+
)
|
251 |
+
def test_duplicated(idx_dup, keep, expected):
|
252 |
+
result = idx_dup.duplicated(keep=keep)
|
253 |
+
tm.assert_numpy_array_equal(result, expected)
|
254 |
+
|
255 |
+
|
256 |
+
@pytest.mark.arm_slow
|
257 |
+
def test_duplicated_hashtable_impl(keep, monkeypatch):
|
258 |
+
# GH 9125
|
259 |
+
n, k = 6, 10
|
260 |
+
levels = [np.arange(n), [str(i) for i in range(n)], 1000 + np.arange(n)]
|
261 |
+
codes = [np.random.default_rng(2).choice(n, k * n) for _ in levels]
|
262 |
+
with monkeypatch.context() as m:
|
263 |
+
m.setattr(libindex, "_SIZE_CUTOFF", 50)
|
264 |
+
mi = MultiIndex(levels=levels, codes=codes)
|
265 |
+
|
266 |
+
result = mi.duplicated(keep=keep)
|
267 |
+
expected = hashtable.duplicated(mi.values, keep=keep)
|
268 |
+
tm.assert_numpy_array_equal(result, expected)
|
269 |
+
|
270 |
+
|
271 |
+
@pytest.mark.parametrize("val", [101, 102])
|
272 |
+
def test_duplicated_with_nan(val):
|
273 |
+
# GH5873
|
274 |
+
mi = MultiIndex.from_arrays([[101, val], [3.5, np.nan]])
|
275 |
+
assert not mi.has_duplicates
|
276 |
+
|
277 |
+
tm.assert_numpy_array_equal(mi.duplicated(), np.zeros(2, dtype="bool"))
|
278 |
+
|
279 |
+
|
280 |
+
@pytest.mark.parametrize("n", range(1, 6))
|
281 |
+
@pytest.mark.parametrize("m", range(1, 5))
|
282 |
+
def test_duplicated_with_nan_multi_shape(n, m):
|
283 |
+
# GH5873
|
284 |
+
# all possible unique combinations, including nan
|
285 |
+
codes = product(range(-1, n), range(-1, m))
|
286 |
+
mi = MultiIndex(
|
287 |
+
levels=[list("abcde")[:n], list("WXYZ")[:m]],
|
288 |
+
codes=np.random.default_rng(2).permutation(list(codes)).T,
|
289 |
+
)
|
290 |
+
assert len(mi) == (n + 1) * (m + 1)
|
291 |
+
assert not mi.has_duplicates
|
292 |
+
|
293 |
+
tm.assert_numpy_array_equal(mi.duplicated(), np.zeros(len(mi), dtype="bool"))
|
294 |
+
|
295 |
+
|
296 |
+
def test_duplicated_drop_duplicates():
|
297 |
+
# GH#4060
|
298 |
+
idx = MultiIndex.from_arrays(([1, 2, 3, 1, 2, 3], [1, 1, 1, 1, 2, 2]))
|
299 |
+
|
300 |
+
expected = np.array([False, False, False, True, False, False], dtype=bool)
|
301 |
+
duplicated = idx.duplicated()
|
302 |
+
tm.assert_numpy_array_equal(duplicated, expected)
|
303 |
+
assert duplicated.dtype == bool
|
304 |
+
expected = MultiIndex.from_arrays(([1, 2, 3, 2, 3], [1, 1, 1, 2, 2]))
|
305 |
+
tm.assert_index_equal(idx.drop_duplicates(), expected)
|
306 |
+
|
307 |
+
expected = np.array([True, False, False, False, False, False])
|
308 |
+
duplicated = idx.duplicated(keep="last")
|
309 |
+
tm.assert_numpy_array_equal(duplicated, expected)
|
310 |
+
assert duplicated.dtype == bool
|
311 |
+
expected = MultiIndex.from_arrays(([2, 3, 1, 2, 3], [1, 1, 1, 2, 2]))
|
312 |
+
tm.assert_index_equal(idx.drop_duplicates(keep="last"), expected)
|
313 |
+
|
314 |
+
expected = np.array([True, False, False, True, False, False])
|
315 |
+
duplicated = idx.duplicated(keep=False)
|
316 |
+
tm.assert_numpy_array_equal(duplicated, expected)
|
317 |
+
assert duplicated.dtype == bool
|
318 |
+
expected = MultiIndex.from_arrays(([2, 3, 2, 3], [1, 1, 2, 2]))
|
319 |
+
tm.assert_index_equal(idx.drop_duplicates(keep=False), expected)
|
320 |
+
|
321 |
+
|
322 |
+
@pytest.mark.parametrize(
|
323 |
+
"dtype",
|
324 |
+
[
|
325 |
+
np.complex64,
|
326 |
+
np.complex128,
|
327 |
+
],
|
328 |
+
)
|
329 |
+
def test_duplicated_series_complex_numbers(dtype):
|
330 |
+
# GH 17927
|
331 |
+
expected = Series(
|
332 |
+
[False, False, False, True, False, False, False, True, False, True],
|
333 |
+
dtype=bool,
|
334 |
+
)
|
335 |
+
result = Series(
|
336 |
+
[
|
337 |
+
np.nan + np.nan * 1j,
|
338 |
+
0,
|
339 |
+
1j,
|
340 |
+
1j,
|
341 |
+
1,
|
342 |
+
1 + 1j,
|
343 |
+
1 + 2j,
|
344 |
+
1 + 1j,
|
345 |
+
np.nan,
|
346 |
+
np.nan + np.nan * 1j,
|
347 |
+
],
|
348 |
+
dtype=dtype,
|
349 |
+
).duplicated()
|
350 |
+
tm.assert_series_equal(result, expected)
|
351 |
+
|
352 |
+
|
353 |
+
def test_midx_unique_ea_dtype():
|
354 |
+
# GH#48335
|
355 |
+
vals_a = Series([1, 2, NA, NA], dtype="Int64")
|
356 |
+
vals_b = np.array([1, 2, 3, 3])
|
357 |
+
midx = MultiIndex.from_arrays([vals_a, vals_b], names=["a", "b"])
|
358 |
+
result = midx.unique()
|
359 |
+
|
360 |
+
exp_vals_a = Series([1, 2, NA], dtype="Int64")
|
361 |
+
exp_vals_b = np.array([1, 2, 3])
|
362 |
+
expected = MultiIndex.from_arrays([exp_vals_a, exp_vals_b], names=["a", "b"])
|
363 |
+
tm.assert_index_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_formats.py
ADDED
@@ -0,0 +1,249 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
import pandas as pd
|
5 |
+
from pandas import (
|
6 |
+
Index,
|
7 |
+
MultiIndex,
|
8 |
+
)
|
9 |
+
import pandas._testing as tm
|
10 |
+
|
11 |
+
|
12 |
+
def test_format(idx):
|
13 |
+
msg = "MultiIndex.format is deprecated"
|
14 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
15 |
+
idx.format()
|
16 |
+
idx[:0].format()
|
17 |
+
|
18 |
+
|
19 |
+
def test_format_integer_names():
|
20 |
+
index = MultiIndex(
|
21 |
+
levels=[[0, 1], [0, 1]], codes=[[0, 0, 1, 1], [0, 1, 0, 1]], names=[0, 1]
|
22 |
+
)
|
23 |
+
msg = "MultiIndex.format is deprecated"
|
24 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
25 |
+
index.format(names=True)
|
26 |
+
|
27 |
+
|
28 |
+
def test_format_sparse_config(idx):
|
29 |
+
# GH1538
|
30 |
+
msg = "MultiIndex.format is deprecated"
|
31 |
+
with pd.option_context("display.multi_sparse", False):
|
32 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
33 |
+
result = idx.format()
|
34 |
+
assert result[1] == "foo two"
|
35 |
+
|
36 |
+
|
37 |
+
def test_format_sparse_display():
|
38 |
+
index = MultiIndex(
|
39 |
+
levels=[[0, 1], [0, 1], [0, 1], [0]],
|
40 |
+
codes=[
|
41 |
+
[0, 0, 0, 1, 1, 1],
|
42 |
+
[0, 0, 1, 0, 0, 1],
|
43 |
+
[0, 1, 0, 0, 1, 0],
|
44 |
+
[0, 0, 0, 0, 0, 0],
|
45 |
+
],
|
46 |
+
)
|
47 |
+
msg = "MultiIndex.format is deprecated"
|
48 |
+
with tm.assert_produces_warning(FutureWarning, match=msg):
|
49 |
+
result = index.format()
|
50 |
+
assert result[3] == "1 0 0 0"
|
51 |
+
|
52 |
+
|
53 |
+
def test_repr_with_unicode_data():
|
54 |
+
with pd.option_context("display.encoding", "UTF-8"):
|
55 |
+
d = {"a": ["\u05d0", 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
|
56 |
+
index = pd.DataFrame(d).set_index(["a", "b"]).index
|
57 |
+
assert "\\" not in repr(index) # we don't want unicode-escaped
|
58 |
+
|
59 |
+
|
60 |
+
def test_repr_roundtrip_raises():
|
61 |
+
mi = MultiIndex.from_product([list("ab"), range(3)], names=["first", "second"])
|
62 |
+
msg = "Must pass both levels and codes"
|
63 |
+
with pytest.raises(TypeError, match=msg):
|
64 |
+
eval(repr(mi))
|
65 |
+
|
66 |
+
|
67 |
+
def test_unicode_string_with_unicode():
|
68 |
+
d = {"a": ["\u05d0", 2, 3], "b": [4, 5, 6], "c": [7, 8, 9]}
|
69 |
+
idx = pd.DataFrame(d).set_index(["a", "b"]).index
|
70 |
+
str(idx)
|
71 |
+
|
72 |
+
|
73 |
+
def test_repr_max_seq_item_setting(idx):
|
74 |
+
# GH10182
|
75 |
+
idx = idx.repeat(50)
|
76 |
+
with pd.option_context("display.max_seq_items", None):
|
77 |
+
repr(idx)
|
78 |
+
assert "..." not in str(idx)
|
79 |
+
|
80 |
+
|
81 |
+
class TestRepr:
|
82 |
+
def test_unicode_repr_issues(self):
|
83 |
+
levels = [Index(["a/\u03c3", "b/\u03c3", "c/\u03c3"]), Index([0, 1])]
|
84 |
+
codes = [np.arange(3).repeat(2), np.tile(np.arange(2), 3)]
|
85 |
+
index = MultiIndex(levels=levels, codes=codes)
|
86 |
+
|
87 |
+
repr(index.levels)
|
88 |
+
repr(index.get_level_values(1))
|
89 |
+
|
90 |
+
def test_repr_max_seq_items_equal_to_n(self, idx):
|
91 |
+
# display.max_seq_items == n
|
92 |
+
with pd.option_context("display.max_seq_items", 6):
|
93 |
+
result = idx.__repr__()
|
94 |
+
expected = """\
|
95 |
+
MultiIndex([('foo', 'one'),
|
96 |
+
('foo', 'two'),
|
97 |
+
('bar', 'one'),
|
98 |
+
('baz', 'two'),
|
99 |
+
('qux', 'one'),
|
100 |
+
('qux', 'two')],
|
101 |
+
names=['first', 'second'])"""
|
102 |
+
assert result == expected
|
103 |
+
|
104 |
+
def test_repr(self, idx):
|
105 |
+
result = idx[:1].__repr__()
|
106 |
+
expected = """\
|
107 |
+
MultiIndex([('foo', 'one')],
|
108 |
+
names=['first', 'second'])"""
|
109 |
+
assert result == expected
|
110 |
+
|
111 |
+
result = idx.__repr__()
|
112 |
+
expected = """\
|
113 |
+
MultiIndex([('foo', 'one'),
|
114 |
+
('foo', 'two'),
|
115 |
+
('bar', 'one'),
|
116 |
+
('baz', 'two'),
|
117 |
+
('qux', 'one'),
|
118 |
+
('qux', 'two')],
|
119 |
+
names=['first', 'second'])"""
|
120 |
+
assert result == expected
|
121 |
+
|
122 |
+
with pd.option_context("display.max_seq_items", 5):
|
123 |
+
result = idx.__repr__()
|
124 |
+
expected = """\
|
125 |
+
MultiIndex([('foo', 'one'),
|
126 |
+
('foo', 'two'),
|
127 |
+
...
|
128 |
+
('qux', 'one'),
|
129 |
+
('qux', 'two')],
|
130 |
+
names=['first', 'second'], length=6)"""
|
131 |
+
assert result == expected
|
132 |
+
|
133 |
+
# display.max_seq_items == 1
|
134 |
+
with pd.option_context("display.max_seq_items", 1):
|
135 |
+
result = idx.__repr__()
|
136 |
+
expected = """\
|
137 |
+
MultiIndex([...
|
138 |
+
('qux', 'two')],
|
139 |
+
names=['first', ...], length=6)"""
|
140 |
+
assert result == expected
|
141 |
+
|
142 |
+
def test_rjust(self):
|
143 |
+
n = 1000
|
144 |
+
ci = pd.CategoricalIndex(list("a" * n) + (["abc"] * n))
|
145 |
+
dti = pd.date_range("2000-01-01", freq="s", periods=n * 2)
|
146 |
+
mi = MultiIndex.from_arrays([ci, ci.codes + 9, dti], names=["a", "b", "dti"])
|
147 |
+
result = mi[:1].__repr__()
|
148 |
+
expected = """\
|
149 |
+
MultiIndex([('a', 9, '2000-01-01 00:00:00')],
|
150 |
+
names=['a', 'b', 'dti'])"""
|
151 |
+
assert result == expected
|
152 |
+
|
153 |
+
result = mi[::500].__repr__()
|
154 |
+
expected = """\
|
155 |
+
MultiIndex([( 'a', 9, '2000-01-01 00:00:00'),
|
156 |
+
( 'a', 9, '2000-01-01 00:08:20'),
|
157 |
+
('abc', 10, '2000-01-01 00:16:40'),
|
158 |
+
('abc', 10, '2000-01-01 00:25:00')],
|
159 |
+
names=['a', 'b', 'dti'])"""
|
160 |
+
assert result == expected
|
161 |
+
|
162 |
+
result = mi.__repr__()
|
163 |
+
expected = """\
|
164 |
+
MultiIndex([( 'a', 9, '2000-01-01 00:00:00'),
|
165 |
+
( 'a', 9, '2000-01-01 00:00:01'),
|
166 |
+
( 'a', 9, '2000-01-01 00:00:02'),
|
167 |
+
( 'a', 9, '2000-01-01 00:00:03'),
|
168 |
+
( 'a', 9, '2000-01-01 00:00:04'),
|
169 |
+
( 'a', 9, '2000-01-01 00:00:05'),
|
170 |
+
( 'a', 9, '2000-01-01 00:00:06'),
|
171 |
+
( 'a', 9, '2000-01-01 00:00:07'),
|
172 |
+
( 'a', 9, '2000-01-01 00:00:08'),
|
173 |
+
( 'a', 9, '2000-01-01 00:00:09'),
|
174 |
+
...
|
175 |
+
('abc', 10, '2000-01-01 00:33:10'),
|
176 |
+
('abc', 10, '2000-01-01 00:33:11'),
|
177 |
+
('abc', 10, '2000-01-01 00:33:12'),
|
178 |
+
('abc', 10, '2000-01-01 00:33:13'),
|
179 |
+
('abc', 10, '2000-01-01 00:33:14'),
|
180 |
+
('abc', 10, '2000-01-01 00:33:15'),
|
181 |
+
('abc', 10, '2000-01-01 00:33:16'),
|
182 |
+
('abc', 10, '2000-01-01 00:33:17'),
|
183 |
+
('abc', 10, '2000-01-01 00:33:18'),
|
184 |
+
('abc', 10, '2000-01-01 00:33:19')],
|
185 |
+
names=['a', 'b', 'dti'], length=2000)"""
|
186 |
+
assert result == expected
|
187 |
+
|
188 |
+
def test_tuple_width(self):
|
189 |
+
n = 1000
|
190 |
+
ci = pd.CategoricalIndex(list("a" * n) + (["abc"] * n))
|
191 |
+
dti = pd.date_range("2000-01-01", freq="s", periods=n * 2)
|
192 |
+
levels = [ci, ci.codes + 9, dti, dti, dti]
|
193 |
+
names = ["a", "b", "dti_1", "dti_2", "dti_3"]
|
194 |
+
mi = MultiIndex.from_arrays(levels, names=names)
|
195 |
+
result = mi[:1].__repr__()
|
196 |
+
expected = """MultiIndex([('a', 9, '2000-01-01 00:00:00', '2000-01-01 00:00:00', ...)],
|
197 |
+
names=['a', 'b', 'dti_1', 'dti_2', 'dti_3'])""" # noqa: E501
|
198 |
+
assert result == expected
|
199 |
+
|
200 |
+
result = mi[:10].__repr__()
|
201 |
+
expected = """\
|
202 |
+
MultiIndex([('a', 9, '2000-01-01 00:00:00', '2000-01-01 00:00:00', ...),
|
203 |
+
('a', 9, '2000-01-01 00:00:01', '2000-01-01 00:00:01', ...),
|
204 |
+
('a', 9, '2000-01-01 00:00:02', '2000-01-01 00:00:02', ...),
|
205 |
+
('a', 9, '2000-01-01 00:00:03', '2000-01-01 00:00:03', ...),
|
206 |
+
('a', 9, '2000-01-01 00:00:04', '2000-01-01 00:00:04', ...),
|
207 |
+
('a', 9, '2000-01-01 00:00:05', '2000-01-01 00:00:05', ...),
|
208 |
+
('a', 9, '2000-01-01 00:00:06', '2000-01-01 00:00:06', ...),
|
209 |
+
('a', 9, '2000-01-01 00:00:07', '2000-01-01 00:00:07', ...),
|
210 |
+
('a', 9, '2000-01-01 00:00:08', '2000-01-01 00:00:08', ...),
|
211 |
+
('a', 9, '2000-01-01 00:00:09', '2000-01-01 00:00:09', ...)],
|
212 |
+
names=['a', 'b', 'dti_1', 'dti_2', 'dti_3'])"""
|
213 |
+
assert result == expected
|
214 |
+
|
215 |
+
result = mi.__repr__()
|
216 |
+
expected = """\
|
217 |
+
MultiIndex([( 'a', 9, '2000-01-01 00:00:00', '2000-01-01 00:00:00', ...),
|
218 |
+
( 'a', 9, '2000-01-01 00:00:01', '2000-01-01 00:00:01', ...),
|
219 |
+
( 'a', 9, '2000-01-01 00:00:02', '2000-01-01 00:00:02', ...),
|
220 |
+
( 'a', 9, '2000-01-01 00:00:03', '2000-01-01 00:00:03', ...),
|
221 |
+
( 'a', 9, '2000-01-01 00:00:04', '2000-01-01 00:00:04', ...),
|
222 |
+
( 'a', 9, '2000-01-01 00:00:05', '2000-01-01 00:00:05', ...),
|
223 |
+
( 'a', 9, '2000-01-01 00:00:06', '2000-01-01 00:00:06', ...),
|
224 |
+
( 'a', 9, '2000-01-01 00:00:07', '2000-01-01 00:00:07', ...),
|
225 |
+
( 'a', 9, '2000-01-01 00:00:08', '2000-01-01 00:00:08', ...),
|
226 |
+
( 'a', 9, '2000-01-01 00:00:09', '2000-01-01 00:00:09', ...),
|
227 |
+
...
|
228 |
+
('abc', 10, '2000-01-01 00:33:10', '2000-01-01 00:33:10', ...),
|
229 |
+
('abc', 10, '2000-01-01 00:33:11', '2000-01-01 00:33:11', ...),
|
230 |
+
('abc', 10, '2000-01-01 00:33:12', '2000-01-01 00:33:12', ...),
|
231 |
+
('abc', 10, '2000-01-01 00:33:13', '2000-01-01 00:33:13', ...),
|
232 |
+
('abc', 10, '2000-01-01 00:33:14', '2000-01-01 00:33:14', ...),
|
233 |
+
('abc', 10, '2000-01-01 00:33:15', '2000-01-01 00:33:15', ...),
|
234 |
+
('abc', 10, '2000-01-01 00:33:16', '2000-01-01 00:33:16', ...),
|
235 |
+
('abc', 10, '2000-01-01 00:33:17', '2000-01-01 00:33:17', ...),
|
236 |
+
('abc', 10, '2000-01-01 00:33:18', '2000-01-01 00:33:18', ...),
|
237 |
+
('abc', 10, '2000-01-01 00:33:19', '2000-01-01 00:33:19', ...)],
|
238 |
+
names=['a', 'b', 'dti_1', 'dti_2', 'dti_3'], length=2000)"""
|
239 |
+
assert result == expected
|
240 |
+
|
241 |
+
def test_multiindex_long_element(self):
|
242 |
+
# Non-regression test towards GH#52960
|
243 |
+
data = MultiIndex.from_tuples([("c" * 62,)])
|
244 |
+
|
245 |
+
expected = (
|
246 |
+
"MultiIndex([('cccccccccccccccccccccccccccccccccccccccc"
|
247 |
+
"cccccccccccccccccccccc',)],\n )"
|
248 |
+
)
|
249 |
+
assert str(data) == expected
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_get_level_values.py
ADDED
@@ -0,0 +1,124 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
|
3 |
+
import pandas as pd
|
4 |
+
from pandas import (
|
5 |
+
CategoricalIndex,
|
6 |
+
Index,
|
7 |
+
MultiIndex,
|
8 |
+
Timestamp,
|
9 |
+
date_range,
|
10 |
+
)
|
11 |
+
import pandas._testing as tm
|
12 |
+
|
13 |
+
|
14 |
+
class TestGetLevelValues:
|
15 |
+
def test_get_level_values_box_datetime64(self):
|
16 |
+
dates = date_range("1/1/2000", periods=4)
|
17 |
+
levels = [dates, [0, 1]]
|
18 |
+
codes = [[0, 0, 1, 1, 2, 2, 3, 3], [0, 1, 0, 1, 0, 1, 0, 1]]
|
19 |
+
|
20 |
+
index = MultiIndex(levels=levels, codes=codes)
|
21 |
+
|
22 |
+
assert isinstance(index.get_level_values(0)[0], Timestamp)
|
23 |
+
|
24 |
+
|
25 |
+
def test_get_level_values(idx):
|
26 |
+
result = idx.get_level_values(0)
|
27 |
+
expected = Index(["foo", "foo", "bar", "baz", "qux", "qux"], name="first")
|
28 |
+
tm.assert_index_equal(result, expected)
|
29 |
+
assert result.name == "first"
|
30 |
+
|
31 |
+
result = idx.get_level_values("first")
|
32 |
+
expected = idx.get_level_values(0)
|
33 |
+
tm.assert_index_equal(result, expected)
|
34 |
+
|
35 |
+
# GH 10460
|
36 |
+
index = MultiIndex(
|
37 |
+
levels=[CategoricalIndex(["A", "B"]), CategoricalIndex([1, 2, 3])],
|
38 |
+
codes=[np.array([0, 0, 0, 1, 1, 1]), np.array([0, 1, 2, 0, 1, 2])],
|
39 |
+
)
|
40 |
+
|
41 |
+
exp = CategoricalIndex(["A", "A", "A", "B", "B", "B"])
|
42 |
+
tm.assert_index_equal(index.get_level_values(0), exp)
|
43 |
+
exp = CategoricalIndex([1, 2, 3, 1, 2, 3])
|
44 |
+
tm.assert_index_equal(index.get_level_values(1), exp)
|
45 |
+
|
46 |
+
|
47 |
+
def test_get_level_values_all_na():
|
48 |
+
# GH#17924 when level entirely consists of nan
|
49 |
+
arrays = [[np.nan, np.nan, np.nan], ["a", np.nan, 1]]
|
50 |
+
index = MultiIndex.from_arrays(arrays)
|
51 |
+
result = index.get_level_values(0)
|
52 |
+
expected = Index([np.nan, np.nan, np.nan], dtype=np.float64)
|
53 |
+
tm.assert_index_equal(result, expected)
|
54 |
+
|
55 |
+
result = index.get_level_values(1)
|
56 |
+
expected = Index(["a", np.nan, 1], dtype=object)
|
57 |
+
tm.assert_index_equal(result, expected)
|
58 |
+
|
59 |
+
|
60 |
+
def test_get_level_values_int_with_na():
|
61 |
+
# GH#17924
|
62 |
+
arrays = [["a", "b", "b"], [1, np.nan, 2]]
|
63 |
+
index = MultiIndex.from_arrays(arrays)
|
64 |
+
result = index.get_level_values(1)
|
65 |
+
expected = Index([1, np.nan, 2])
|
66 |
+
tm.assert_index_equal(result, expected)
|
67 |
+
|
68 |
+
arrays = [["a", "b", "b"], [np.nan, np.nan, 2]]
|
69 |
+
index = MultiIndex.from_arrays(arrays)
|
70 |
+
result = index.get_level_values(1)
|
71 |
+
expected = Index([np.nan, np.nan, 2])
|
72 |
+
tm.assert_index_equal(result, expected)
|
73 |
+
|
74 |
+
|
75 |
+
def test_get_level_values_na():
|
76 |
+
arrays = [[np.nan, np.nan, np.nan], ["a", np.nan, 1]]
|
77 |
+
index = MultiIndex.from_arrays(arrays)
|
78 |
+
result = index.get_level_values(0)
|
79 |
+
expected = Index([np.nan, np.nan, np.nan])
|
80 |
+
tm.assert_index_equal(result, expected)
|
81 |
+
|
82 |
+
result = index.get_level_values(1)
|
83 |
+
expected = Index(["a", np.nan, 1])
|
84 |
+
tm.assert_index_equal(result, expected)
|
85 |
+
|
86 |
+
arrays = [["a", "b", "b"], pd.DatetimeIndex([0, 1, pd.NaT])]
|
87 |
+
index = MultiIndex.from_arrays(arrays)
|
88 |
+
result = index.get_level_values(1)
|
89 |
+
expected = pd.DatetimeIndex([0, 1, pd.NaT])
|
90 |
+
tm.assert_index_equal(result, expected)
|
91 |
+
|
92 |
+
arrays = [[], []]
|
93 |
+
index = MultiIndex.from_arrays(arrays)
|
94 |
+
result = index.get_level_values(0)
|
95 |
+
expected = Index([], dtype=object)
|
96 |
+
tm.assert_index_equal(result, expected)
|
97 |
+
|
98 |
+
|
99 |
+
def test_get_level_values_when_periods():
|
100 |
+
# GH33131. See also discussion in GH32669.
|
101 |
+
# This test can probably be removed when PeriodIndex._engine is removed.
|
102 |
+
from pandas import (
|
103 |
+
Period,
|
104 |
+
PeriodIndex,
|
105 |
+
)
|
106 |
+
|
107 |
+
idx = MultiIndex.from_arrays(
|
108 |
+
[PeriodIndex([Period("2019Q1"), Period("2019Q2")], name="b")]
|
109 |
+
)
|
110 |
+
idx2 = MultiIndex.from_arrays(
|
111 |
+
[idx._get_level_values(level) for level in range(idx.nlevels)]
|
112 |
+
)
|
113 |
+
assert all(x.is_monotonic_increasing for x in idx2.levels)
|
114 |
+
|
115 |
+
|
116 |
+
def test_values_loses_freq_of_underlying_index():
|
117 |
+
# GH#49054
|
118 |
+
idx = pd.DatetimeIndex(date_range("20200101", periods=3, freq="BME"))
|
119 |
+
expected = idx.copy(deep=True)
|
120 |
+
idx2 = Index([1, 2, 3])
|
121 |
+
midx = MultiIndex(levels=[idx, idx2], codes=[[0, 1, 2], [0, 1, 2]])
|
122 |
+
midx.values
|
123 |
+
assert idx.freq is not None
|
124 |
+
tm.assert_index_equal(idx, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_isin.py
ADDED
@@ -0,0 +1,103 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import MultiIndex
|
5 |
+
import pandas._testing as tm
|
6 |
+
|
7 |
+
|
8 |
+
def test_isin_nan():
|
9 |
+
idx = MultiIndex.from_arrays([["foo", "bar"], [1.0, np.nan]])
|
10 |
+
tm.assert_numpy_array_equal(idx.isin([("bar", np.nan)]), np.array([False, True]))
|
11 |
+
tm.assert_numpy_array_equal(
|
12 |
+
idx.isin([("bar", float("nan"))]), np.array([False, True])
|
13 |
+
)
|
14 |
+
|
15 |
+
|
16 |
+
def test_isin_missing(nulls_fixture):
|
17 |
+
# GH48905
|
18 |
+
mi1 = MultiIndex.from_tuples([(1, nulls_fixture)])
|
19 |
+
mi2 = MultiIndex.from_tuples([(1, 1), (1, 2)])
|
20 |
+
result = mi2.isin(mi1)
|
21 |
+
expected = np.array([False, False])
|
22 |
+
tm.assert_numpy_array_equal(result, expected)
|
23 |
+
|
24 |
+
|
25 |
+
def test_isin():
|
26 |
+
values = [("foo", 2), ("bar", 3), ("quux", 4)]
|
27 |
+
|
28 |
+
idx = MultiIndex.from_arrays([["qux", "baz", "foo", "bar"], np.arange(4)])
|
29 |
+
result = idx.isin(values)
|
30 |
+
expected = np.array([False, False, True, True])
|
31 |
+
tm.assert_numpy_array_equal(result, expected)
|
32 |
+
|
33 |
+
# empty, return dtype bool
|
34 |
+
idx = MultiIndex.from_arrays([[], []])
|
35 |
+
result = idx.isin(values)
|
36 |
+
assert len(result) == 0
|
37 |
+
assert result.dtype == np.bool_
|
38 |
+
|
39 |
+
|
40 |
+
def test_isin_level_kwarg():
|
41 |
+
idx = MultiIndex.from_arrays([["qux", "baz", "foo", "bar"], np.arange(4)])
|
42 |
+
|
43 |
+
vals_0 = ["foo", "bar", "quux"]
|
44 |
+
vals_1 = [2, 3, 10]
|
45 |
+
|
46 |
+
expected = np.array([False, False, True, True])
|
47 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=0))
|
48 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level=-2))
|
49 |
+
|
50 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=1))
|
51 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level=-1))
|
52 |
+
|
53 |
+
msg = "Too many levels: Index has only 2 levels, not 6"
|
54 |
+
with pytest.raises(IndexError, match=msg):
|
55 |
+
idx.isin(vals_0, level=5)
|
56 |
+
msg = "Too many levels: Index has only 2 levels, -5 is not a valid level number"
|
57 |
+
with pytest.raises(IndexError, match=msg):
|
58 |
+
idx.isin(vals_0, level=-5)
|
59 |
+
|
60 |
+
with pytest.raises(KeyError, match=r"'Level 1\.0 not found'"):
|
61 |
+
idx.isin(vals_0, level=1.0)
|
62 |
+
with pytest.raises(KeyError, match=r"'Level -1\.0 not found'"):
|
63 |
+
idx.isin(vals_1, level=-1.0)
|
64 |
+
with pytest.raises(KeyError, match="'Level A not found'"):
|
65 |
+
idx.isin(vals_1, level="A")
|
66 |
+
|
67 |
+
idx.names = ["A", "B"]
|
68 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_0, level="A"))
|
69 |
+
tm.assert_numpy_array_equal(expected, idx.isin(vals_1, level="B"))
|
70 |
+
|
71 |
+
with pytest.raises(KeyError, match="'Level C not found'"):
|
72 |
+
idx.isin(vals_1, level="C")
|
73 |
+
|
74 |
+
|
75 |
+
@pytest.mark.parametrize(
|
76 |
+
"labels,expected,level",
|
77 |
+
[
|
78 |
+
([("b", np.nan)], np.array([False, False, True]), None),
|
79 |
+
([np.nan, "a"], np.array([True, True, False]), 0),
|
80 |
+
(["d", np.nan], np.array([False, True, True]), 1),
|
81 |
+
],
|
82 |
+
)
|
83 |
+
def test_isin_multi_index_with_missing_value(labels, expected, level):
|
84 |
+
# GH 19132
|
85 |
+
midx = MultiIndex.from_arrays([[np.nan, "a", "b"], ["c", "d", np.nan]])
|
86 |
+
result = midx.isin(labels, level=level)
|
87 |
+
tm.assert_numpy_array_equal(result, expected)
|
88 |
+
|
89 |
+
|
90 |
+
def test_isin_empty():
|
91 |
+
# GH#51599
|
92 |
+
midx = MultiIndex.from_arrays([[1, 2], [3, 4]])
|
93 |
+
result = midx.isin([])
|
94 |
+
expected = np.array([False, False])
|
95 |
+
tm.assert_numpy_array_equal(result, expected)
|
96 |
+
|
97 |
+
|
98 |
+
def test_isin_generator():
|
99 |
+
# GH#52568
|
100 |
+
midx = MultiIndex.from_tuples([(1, 2)])
|
101 |
+
result = midx.isin(x for x in [(1, 2)])
|
102 |
+
expected = np.array([True])
|
103 |
+
tm.assert_numpy_array_equal(result, expected)
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_monotonic.py
ADDED
@@ -0,0 +1,188 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import numpy as np
|
2 |
+
import pytest
|
3 |
+
|
4 |
+
from pandas import (
|
5 |
+
Index,
|
6 |
+
MultiIndex,
|
7 |
+
)
|
8 |
+
|
9 |
+
|
10 |
+
def test_is_monotonic_increasing_lexsorted(lexsorted_two_level_string_multiindex):
|
11 |
+
# string ordering
|
12 |
+
mi = lexsorted_two_level_string_multiindex
|
13 |
+
assert mi.is_monotonic_increasing is False
|
14 |
+
assert Index(mi.values).is_monotonic_increasing is False
|
15 |
+
assert mi._is_strictly_monotonic_increasing is False
|
16 |
+
assert Index(mi.values)._is_strictly_monotonic_increasing is False
|
17 |
+
|
18 |
+
|
19 |
+
def test_is_monotonic_increasing():
|
20 |
+
i = MultiIndex.from_product([np.arange(10), np.arange(10)], names=["one", "two"])
|
21 |
+
assert i.is_monotonic_increasing is True
|
22 |
+
assert i._is_strictly_monotonic_increasing is True
|
23 |
+
assert Index(i.values).is_monotonic_increasing is True
|
24 |
+
assert i._is_strictly_monotonic_increasing is True
|
25 |
+
|
26 |
+
i = MultiIndex.from_product(
|
27 |
+
[np.arange(10, 0, -1), np.arange(10)], names=["one", "two"]
|
28 |
+
)
|
29 |
+
assert i.is_monotonic_increasing is False
|
30 |
+
assert i._is_strictly_monotonic_increasing is False
|
31 |
+
assert Index(i.values).is_monotonic_increasing is False
|
32 |
+
assert Index(i.values)._is_strictly_monotonic_increasing is False
|
33 |
+
|
34 |
+
i = MultiIndex.from_product(
|
35 |
+
[np.arange(10), np.arange(10, 0, -1)], names=["one", "two"]
|
36 |
+
)
|
37 |
+
assert i.is_monotonic_increasing is False
|
38 |
+
assert i._is_strictly_monotonic_increasing is False
|
39 |
+
assert Index(i.values).is_monotonic_increasing is False
|
40 |
+
assert Index(i.values)._is_strictly_monotonic_increasing is False
|
41 |
+
|
42 |
+
i = MultiIndex.from_product([[1.0, np.nan, 2.0], ["a", "b", "c"]])
|
43 |
+
assert i.is_monotonic_increasing is False
|
44 |
+
assert i._is_strictly_monotonic_increasing is False
|
45 |
+
assert Index(i.values).is_monotonic_increasing is False
|
46 |
+
assert Index(i.values)._is_strictly_monotonic_increasing is False
|
47 |
+
|
48 |
+
i = MultiIndex(
|
49 |
+
levels=[["bar", "baz", "foo", "qux"], ["mom", "next", "zenith"]],
|
50 |
+
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
|
51 |
+
names=["first", "second"],
|
52 |
+
)
|
53 |
+
assert i.is_monotonic_increasing is True
|
54 |
+
assert Index(i.values).is_monotonic_increasing is True
|
55 |
+
assert i._is_strictly_monotonic_increasing is True
|
56 |
+
assert Index(i.values)._is_strictly_monotonic_increasing is True
|
57 |
+
|
58 |
+
# mixed levels, hits the TypeError
|
59 |
+
i = MultiIndex(
|
60 |
+
levels=[
|
61 |
+
[1, 2, 3, 4],
|
62 |
+
[
|
63 |
+
"gb00b03mlx29",
|
64 |
+
"lu0197800237",
|
65 |
+
"nl0000289783",
|
66 |
+
"nl0000289965",
|
67 |
+
"nl0000301109",
|
68 |
+
],
|
69 |
+
],
|
70 |
+
codes=[[0, 1, 1, 2, 2, 2, 3], [4, 2, 0, 0, 1, 3, -1]],
|
71 |
+
names=["household_id", "asset_id"],
|
72 |
+
)
|
73 |
+
|
74 |
+
assert i.is_monotonic_increasing is False
|
75 |
+
assert i._is_strictly_monotonic_increasing is False
|
76 |
+
|
77 |
+
# empty
|
78 |
+
i = MultiIndex.from_arrays([[], []])
|
79 |
+
assert i.is_monotonic_increasing is True
|
80 |
+
assert Index(i.values).is_monotonic_increasing is True
|
81 |
+
assert i._is_strictly_monotonic_increasing is True
|
82 |
+
assert Index(i.values)._is_strictly_monotonic_increasing is True
|
83 |
+
|
84 |
+
|
85 |
+
def test_is_monotonic_decreasing():
|
86 |
+
i = MultiIndex.from_product(
|
87 |
+
[np.arange(9, -1, -1), np.arange(9, -1, -1)], names=["one", "two"]
|
88 |
+
)
|
89 |
+
assert i.is_monotonic_decreasing is True
|
90 |
+
assert i._is_strictly_monotonic_decreasing is True
|
91 |
+
assert Index(i.values).is_monotonic_decreasing is True
|
92 |
+
assert i._is_strictly_monotonic_decreasing is True
|
93 |
+
|
94 |
+
i = MultiIndex.from_product(
|
95 |
+
[np.arange(10), np.arange(10, 0, -1)], names=["one", "two"]
|
96 |
+
)
|
97 |
+
assert i.is_monotonic_decreasing is False
|
98 |
+
assert i._is_strictly_monotonic_decreasing is False
|
99 |
+
assert Index(i.values).is_monotonic_decreasing is False
|
100 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is False
|
101 |
+
|
102 |
+
i = MultiIndex.from_product(
|
103 |
+
[np.arange(10, 0, -1), np.arange(10)], names=["one", "two"]
|
104 |
+
)
|
105 |
+
assert i.is_monotonic_decreasing is False
|
106 |
+
assert i._is_strictly_monotonic_decreasing is False
|
107 |
+
assert Index(i.values).is_monotonic_decreasing is False
|
108 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is False
|
109 |
+
|
110 |
+
i = MultiIndex.from_product([[2.0, np.nan, 1.0], ["c", "b", "a"]])
|
111 |
+
assert i.is_monotonic_decreasing is False
|
112 |
+
assert i._is_strictly_monotonic_decreasing is False
|
113 |
+
assert Index(i.values).is_monotonic_decreasing is False
|
114 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is False
|
115 |
+
|
116 |
+
# string ordering
|
117 |
+
i = MultiIndex(
|
118 |
+
levels=[["qux", "foo", "baz", "bar"], ["three", "two", "one"]],
|
119 |
+
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
|
120 |
+
names=["first", "second"],
|
121 |
+
)
|
122 |
+
assert i.is_monotonic_decreasing is False
|
123 |
+
assert Index(i.values).is_monotonic_decreasing is False
|
124 |
+
assert i._is_strictly_monotonic_decreasing is False
|
125 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is False
|
126 |
+
|
127 |
+
i = MultiIndex(
|
128 |
+
levels=[["qux", "foo", "baz", "bar"], ["zenith", "next", "mom"]],
|
129 |
+
codes=[[0, 0, 0, 1, 1, 2, 2, 3, 3, 3], [0, 1, 2, 0, 1, 1, 2, 0, 1, 2]],
|
130 |
+
names=["first", "second"],
|
131 |
+
)
|
132 |
+
assert i.is_monotonic_decreasing is True
|
133 |
+
assert Index(i.values).is_monotonic_decreasing is True
|
134 |
+
assert i._is_strictly_monotonic_decreasing is True
|
135 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is True
|
136 |
+
|
137 |
+
# mixed levels, hits the TypeError
|
138 |
+
i = MultiIndex(
|
139 |
+
levels=[
|
140 |
+
[4, 3, 2, 1],
|
141 |
+
[
|
142 |
+
"nl0000301109",
|
143 |
+
"nl0000289965",
|
144 |
+
"nl0000289783",
|
145 |
+
"lu0197800237",
|
146 |
+
"gb00b03mlx29",
|
147 |
+
],
|
148 |
+
],
|
149 |
+
codes=[[0, 1, 1, 2, 2, 2, 3], [4, 2, 0, 0, 1, 3, -1]],
|
150 |
+
names=["household_id", "asset_id"],
|
151 |
+
)
|
152 |
+
|
153 |
+
assert i.is_monotonic_decreasing is False
|
154 |
+
assert i._is_strictly_monotonic_decreasing is False
|
155 |
+
|
156 |
+
# empty
|
157 |
+
i = MultiIndex.from_arrays([[], []])
|
158 |
+
assert i.is_monotonic_decreasing is True
|
159 |
+
assert Index(i.values).is_monotonic_decreasing is True
|
160 |
+
assert i._is_strictly_monotonic_decreasing is True
|
161 |
+
assert Index(i.values)._is_strictly_monotonic_decreasing is True
|
162 |
+
|
163 |
+
|
164 |
+
def test_is_strictly_monotonic_increasing():
|
165 |
+
idx = MultiIndex(
|
166 |
+
levels=[["bar", "baz"], ["mom", "next"]], codes=[[0, 0, 1, 1], [0, 0, 0, 1]]
|
167 |
+
)
|
168 |
+
assert idx.is_monotonic_increasing is True
|
169 |
+
assert idx._is_strictly_monotonic_increasing is False
|
170 |
+
|
171 |
+
|
172 |
+
def test_is_strictly_monotonic_decreasing():
|
173 |
+
idx = MultiIndex(
|
174 |
+
levels=[["baz", "bar"], ["next", "mom"]], codes=[[0, 0, 1, 1], [0, 0, 0, 1]]
|
175 |
+
)
|
176 |
+
assert idx.is_monotonic_decreasing is True
|
177 |
+
assert idx._is_strictly_monotonic_decreasing is False
|
178 |
+
|
179 |
+
|
180 |
+
@pytest.mark.parametrize("attr", ["is_monotonic_increasing", "is_monotonic_decreasing"])
|
181 |
+
@pytest.mark.parametrize(
|
182 |
+
"values",
|
183 |
+
[[(np.nan,), (1,), (2,)], [(1,), (np.nan,), (2,)], [(1,), (2,), (np.nan,)]],
|
184 |
+
)
|
185 |
+
def test_is_monotonic_with_nans(values, attr):
|
186 |
+
# GH: 37220
|
187 |
+
idx = MultiIndex.from_tuples(values, names=["test"])
|
188 |
+
assert getattr(idx, attr) is False
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/multi/test_reshape.py
ADDED
@@ -0,0 +1,224 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from datetime import datetime
|
2 |
+
|
3 |
+
import numpy as np
|
4 |
+
import pytest
|
5 |
+
import pytz
|
6 |
+
|
7 |
+
import pandas as pd
|
8 |
+
from pandas import (
|
9 |
+
Index,
|
10 |
+
MultiIndex,
|
11 |
+
)
|
12 |
+
import pandas._testing as tm
|
13 |
+
|
14 |
+
|
15 |
+
def test_insert(idx):
|
16 |
+
# key contained in all levels
|
17 |
+
new_index = idx.insert(0, ("bar", "two"))
|
18 |
+
assert new_index.equal_levels(idx)
|
19 |
+
assert new_index[0] == ("bar", "two")
|
20 |
+
|
21 |
+
# key not contained in all levels
|
22 |
+
new_index = idx.insert(0, ("abc", "three"))
|
23 |
+
|
24 |
+
exp0 = Index(list(idx.levels[0]) + ["abc"], name="first")
|
25 |
+
tm.assert_index_equal(new_index.levels[0], exp0)
|
26 |
+
assert new_index.names == ["first", "second"]
|
27 |
+
|
28 |
+
exp1 = Index(list(idx.levels[1]) + ["three"], name="second")
|
29 |
+
tm.assert_index_equal(new_index.levels[1], exp1)
|
30 |
+
assert new_index[0] == ("abc", "three")
|
31 |
+
|
32 |
+
# key wrong length
|
33 |
+
msg = "Item must have length equal to number of levels"
|
34 |
+
with pytest.raises(ValueError, match=msg):
|
35 |
+
idx.insert(0, ("foo2",))
|
36 |
+
|
37 |
+
left = pd.DataFrame([["a", "b", 0], ["b", "d", 1]], columns=["1st", "2nd", "3rd"])
|
38 |
+
left.set_index(["1st", "2nd"], inplace=True)
|
39 |
+
ts = left["3rd"].copy(deep=True)
|
40 |
+
|
41 |
+
left.loc[("b", "x"), "3rd"] = 2
|
42 |
+
left.loc[("b", "a"), "3rd"] = -1
|
43 |
+
left.loc[("b", "b"), "3rd"] = 3
|
44 |
+
left.loc[("a", "x"), "3rd"] = 4
|
45 |
+
left.loc[("a", "w"), "3rd"] = 5
|
46 |
+
left.loc[("a", "a"), "3rd"] = 6
|
47 |
+
|
48 |
+
ts.loc[("b", "x")] = 2
|
49 |
+
ts.loc["b", "a"] = -1
|
50 |
+
ts.loc[("b", "b")] = 3
|
51 |
+
ts.loc["a", "x"] = 4
|
52 |
+
ts.loc[("a", "w")] = 5
|
53 |
+
ts.loc["a", "a"] = 6
|
54 |
+
|
55 |
+
right = pd.DataFrame(
|
56 |
+
[
|
57 |
+
["a", "b", 0],
|
58 |
+
["b", "d", 1],
|
59 |
+
["b", "x", 2],
|
60 |
+
["b", "a", -1],
|
61 |
+
["b", "b", 3],
|
62 |
+
["a", "x", 4],
|
63 |
+
["a", "w", 5],
|
64 |
+
["a", "a", 6],
|
65 |
+
],
|
66 |
+
columns=["1st", "2nd", "3rd"],
|
67 |
+
)
|
68 |
+
right.set_index(["1st", "2nd"], inplace=True)
|
69 |
+
# FIXME data types changes to float because
|
70 |
+
# of intermediate nan insertion;
|
71 |
+
tm.assert_frame_equal(left, right, check_dtype=False)
|
72 |
+
tm.assert_series_equal(ts, right["3rd"])
|
73 |
+
|
74 |
+
|
75 |
+
def test_insert2():
|
76 |
+
# GH9250
|
77 |
+
idx = (
|
78 |
+
[("test1", i) for i in range(5)]
|
79 |
+
+ [("test2", i) for i in range(6)]
|
80 |
+
+ [("test", 17), ("test", 18)]
|
81 |
+
)
|
82 |
+
|
83 |
+
left = pd.Series(np.linspace(0, 10, 11), MultiIndex.from_tuples(idx[:-2]))
|
84 |
+
|
85 |
+
left.loc[("test", 17)] = 11
|
86 |
+
left.loc[("test", 18)] = 12
|
87 |
+
|
88 |
+
right = pd.Series(np.linspace(0, 12, 13), MultiIndex.from_tuples(idx))
|
89 |
+
|
90 |
+
tm.assert_series_equal(left, right)
|
91 |
+
|
92 |
+
|
93 |
+
def test_append(idx):
|
94 |
+
result = idx[:3].append(idx[3:])
|
95 |
+
assert result.equals(idx)
|
96 |
+
|
97 |
+
foos = [idx[:1], idx[1:3], idx[3:]]
|
98 |
+
result = foos[0].append(foos[1:])
|
99 |
+
assert result.equals(idx)
|
100 |
+
|
101 |
+
# empty
|
102 |
+
result = idx.append([])
|
103 |
+
assert result.equals(idx)
|
104 |
+
|
105 |
+
|
106 |
+
def test_append_index():
|
107 |
+
idx1 = Index([1.1, 1.2, 1.3])
|
108 |
+
idx2 = pd.date_range("2011-01-01", freq="D", periods=3, tz="Asia/Tokyo")
|
109 |
+
idx3 = Index(["A", "B", "C"])
|
110 |
+
|
111 |
+
midx_lv2 = MultiIndex.from_arrays([idx1, idx2])
|
112 |
+
midx_lv3 = MultiIndex.from_arrays([idx1, idx2, idx3])
|
113 |
+
|
114 |
+
result = idx1.append(midx_lv2)
|
115 |
+
|
116 |
+
# see gh-7112
|
117 |
+
tz = pytz.timezone("Asia/Tokyo")
|
118 |
+
expected_tuples = [
|
119 |
+
(1.1, tz.localize(datetime(2011, 1, 1))),
|
120 |
+
(1.2, tz.localize(datetime(2011, 1, 2))),
|
121 |
+
(1.3, tz.localize(datetime(2011, 1, 3))),
|
122 |
+
]
|
123 |
+
expected = Index([1.1, 1.2, 1.3] + expected_tuples)
|
124 |
+
tm.assert_index_equal(result, expected)
|
125 |
+
|
126 |
+
result = midx_lv2.append(idx1)
|
127 |
+
expected = Index(expected_tuples + [1.1, 1.2, 1.3])
|
128 |
+
tm.assert_index_equal(result, expected)
|
129 |
+
|
130 |
+
result = midx_lv2.append(midx_lv2)
|
131 |
+
expected = MultiIndex.from_arrays([idx1.append(idx1), idx2.append(idx2)])
|
132 |
+
tm.assert_index_equal(result, expected)
|
133 |
+
|
134 |
+
result = midx_lv2.append(midx_lv3)
|
135 |
+
tm.assert_index_equal(result, expected)
|
136 |
+
|
137 |
+
result = midx_lv3.append(midx_lv2)
|
138 |
+
expected = Index._simple_new(
|
139 |
+
np.array(
|
140 |
+
[
|
141 |
+
(1.1, tz.localize(datetime(2011, 1, 1)), "A"),
|
142 |
+
(1.2, tz.localize(datetime(2011, 1, 2)), "B"),
|
143 |
+
(1.3, tz.localize(datetime(2011, 1, 3)), "C"),
|
144 |
+
]
|
145 |
+
+ expected_tuples,
|
146 |
+
dtype=object,
|
147 |
+
),
|
148 |
+
None,
|
149 |
+
)
|
150 |
+
tm.assert_index_equal(result, expected)
|
151 |
+
|
152 |
+
|
153 |
+
@pytest.mark.parametrize("name, exp", [("b", "b"), ("c", None)])
|
154 |
+
def test_append_names_match(name, exp):
|
155 |
+
# GH#48288
|
156 |
+
midx = MultiIndex.from_arrays([[1, 2], [3, 4]], names=["a", "b"])
|
157 |
+
midx2 = MultiIndex.from_arrays([[3], [5]], names=["a", name])
|
158 |
+
result = midx.append(midx2)
|
159 |
+
expected = MultiIndex.from_arrays([[1, 2, 3], [3, 4, 5]], names=["a", exp])
|
160 |
+
tm.assert_index_equal(result, expected)
|
161 |
+
|
162 |
+
|
163 |
+
def test_append_names_dont_match():
|
164 |
+
# GH#48288
|
165 |
+
midx = MultiIndex.from_arrays([[1, 2], [3, 4]], names=["a", "b"])
|
166 |
+
midx2 = MultiIndex.from_arrays([[3], [5]], names=["x", "y"])
|
167 |
+
result = midx.append(midx2)
|
168 |
+
expected = MultiIndex.from_arrays([[1, 2, 3], [3, 4, 5]], names=None)
|
169 |
+
tm.assert_index_equal(result, expected)
|
170 |
+
|
171 |
+
|
172 |
+
def test_append_overlapping_interval_levels():
|
173 |
+
# GH 54934
|
174 |
+
ivl1 = pd.IntervalIndex.from_breaks([0.0, 1.0, 2.0])
|
175 |
+
ivl2 = pd.IntervalIndex.from_breaks([0.5, 1.5, 2.5])
|
176 |
+
mi1 = MultiIndex.from_product([ivl1, ivl1])
|
177 |
+
mi2 = MultiIndex.from_product([ivl2, ivl2])
|
178 |
+
result = mi1.append(mi2)
|
179 |
+
expected = MultiIndex.from_tuples(
|
180 |
+
[
|
181 |
+
(pd.Interval(0.0, 1.0), pd.Interval(0.0, 1.0)),
|
182 |
+
(pd.Interval(0.0, 1.0), pd.Interval(1.0, 2.0)),
|
183 |
+
(pd.Interval(1.0, 2.0), pd.Interval(0.0, 1.0)),
|
184 |
+
(pd.Interval(1.0, 2.0), pd.Interval(1.0, 2.0)),
|
185 |
+
(pd.Interval(0.5, 1.5), pd.Interval(0.5, 1.5)),
|
186 |
+
(pd.Interval(0.5, 1.5), pd.Interval(1.5, 2.5)),
|
187 |
+
(pd.Interval(1.5, 2.5), pd.Interval(0.5, 1.5)),
|
188 |
+
(pd.Interval(1.5, 2.5), pd.Interval(1.5, 2.5)),
|
189 |
+
]
|
190 |
+
)
|
191 |
+
tm.assert_index_equal(result, expected)
|
192 |
+
|
193 |
+
|
194 |
+
def test_repeat():
|
195 |
+
reps = 2
|
196 |
+
numbers = [1, 2, 3]
|
197 |
+
names = np.array(["foo", "bar"])
|
198 |
+
|
199 |
+
m = MultiIndex.from_product([numbers, names], names=names)
|
200 |
+
expected = MultiIndex.from_product([numbers, names.repeat(reps)], names=names)
|
201 |
+
tm.assert_index_equal(m.repeat(reps), expected)
|
202 |
+
|
203 |
+
|
204 |
+
def test_insert_base(idx):
|
205 |
+
result = idx[1:4]
|
206 |
+
|
207 |
+
# test 0th element
|
208 |
+
assert idx[0:4].equals(result.insert(0, idx[0]))
|
209 |
+
|
210 |
+
|
211 |
+
def test_delete_base(idx):
|
212 |
+
expected = idx[1:]
|
213 |
+
result = idx.delete(0)
|
214 |
+
assert result.equals(expected)
|
215 |
+
assert result.name == expected.name
|
216 |
+
|
217 |
+
expected = idx[:-1]
|
218 |
+
result = idx.delete(-1)
|
219 |
+
assert result.equals(expected)
|
220 |
+
assert result.name == expected.name
|
221 |
+
|
222 |
+
msg = "index 6 is out of bounds for axis 0 with size 6"
|
223 |
+
with pytest.raises(IndexError, match=msg):
|
224 |
+
idx.delete(len(idx))
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_asfreq.cpython-310.pyc
ADDED
Binary file (5.84 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_astype.cpython-310.pyc
ADDED
Binary file (5.02 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_factorize.cpython-310.pyc
ADDED
Binary file (1.55 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_fillna.cpython-310.pyc
ADDED
Binary file (1.07 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_insert.cpython-310.pyc
ADDED
Binary file (923 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_is_full.cpython-310.pyc
ADDED
Binary file (767 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_repeat.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pandas/tests/indexes/period/methods/__pycache__/test_shift.cpython-310.pyc
ADDED
Binary file (3.57 kB). View file
|
|