applied-ai-018 commited on
Commit
d020ce8
·
verified ·
1 Parent(s): 302c5db

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/pyarrow/interchange/buffer.py +107 -0
  2. llmeval-env/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_interop.h +103 -0
  3. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/__init__.cpython-310.pyc +0 -0
  4. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_16597.cpython-310.pyc +0 -0
  5. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_39313.cpython-310.pyc +0 -0
  6. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_7980.cpython-310.pyc +0 -0
  7. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/conftest.cpython-310.pyc +0 -0
  8. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/pandas_examples.cpython-310.pyc +0 -0
  9. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/read_record_batch.cpython-310.pyc +0 -0
  10. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/strategies.cpython-310.pyc +0 -0
  11. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_adhoc_memory_leak.cpython-310.pyc +0 -0
  12. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_array.cpython-310.pyc +0 -0
  13. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_builder.cpython-310.pyc +0 -0
  14. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_cffi.cpython-310.pyc +0 -0
  15. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpython-310.pyc +0 -0
  16. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_csv.cpython-310.pyc +0 -0
  17. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_cuda.cpython-310.pyc +0 -0
  18. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_dataset_encryption.cpython-310.pyc +0 -0
  19. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_deprecations.cpython-310.pyc +0 -0
  20. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_exec_plan.cpython-310.pyc +0 -0
  21. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_extension_type.cpython-310.pyc +0 -0
  22. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_feather.cpython-310.pyc +0 -0
  23. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_flight.cpython-310.pyc +0 -0
  24. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_flight_async.cpython-310.pyc +0 -0
  25. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_fs.cpython-310.pyc +0 -0
  26. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_gandiva.cpython-310.pyc +0 -0
  27. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_gdb.cpython-310.pyc +0 -0
  28. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_io.cpython-310.pyc +0 -0
  29. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_ipc.cpython-310.pyc +0 -0
  30. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_json.cpython-310.pyc +0 -0
  31. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_jvm.cpython-310.pyc +0 -0
  32. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_misc.cpython-310.pyc +0 -0
  33. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_orc.cpython-310.pyc +0 -0
  34. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_schema.cpython-310.pyc +0 -0
  35. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_tensor.cpython-310.pyc +0 -0
  36. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_types.cpython-310.pyc +0 -0
  37. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_udf.cpython-310.pyc +0 -0
  38. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/util.cpython-310.pyc +0 -0
  39. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/feather/v0.17.0.version.2-compression.lz4.feather +0 -0
  40. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/orc/README.md +22 -0
  41. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/orc/TestOrcFile.testDate1900.orc +0 -0
  42. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__init__.py +16 -0
  43. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/__init__.cpython-310.pyc +0 -0
  44. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/test_conversion.cpython-310.pyc +0 -0
  45. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/test_interchange_spec.cpython-310.pyc +0 -0
  46. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/test_conversion.py +522 -0
  47. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/test_interchange_spec.py +288 -0
  48. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__init__.py +24 -0
  49. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__pycache__/__init__.cpython-310.pyc +0 -0
  50. llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__pycache__/common.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/pyarrow/interchange/buffer.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from __future__ import annotations
19
+ import enum
20
+
21
+ import pyarrow as pa
22
+
23
+
24
+ class DlpackDeviceType(enum.IntEnum):
25
+ """Integer enum for device type codes matching DLPack."""
26
+
27
+ CPU = 1
28
+ CUDA = 2
29
+ CPU_PINNED = 3
30
+ OPENCL = 4
31
+ VULKAN = 7
32
+ METAL = 8
33
+ VPI = 9
34
+ ROCM = 10
35
+
36
+
37
+ class _PyArrowBuffer:
38
+ """
39
+ Data in the buffer is guaranteed to be contiguous in memory.
40
+
41
+ Note that there is no dtype attribute present, a buffer can be thought of
42
+ as simply a block of memory. However, if the column that the buffer is
43
+ attached to has a dtype that's supported by DLPack and ``__dlpack__`` is
44
+ implemented, then that dtype information will be contained in the return
45
+ value from ``__dlpack__``.
46
+
47
+ This distinction is useful to support both data exchange via DLPack on a
48
+ buffer and (b) dtypes like variable-length strings which do not have a
49
+ fixed number of bytes per element.
50
+ """
51
+
52
+ def __init__(self, x: pa.Buffer, allow_copy: bool = True) -> None:
53
+ """
54
+ Handle PyArrow Buffers.
55
+ """
56
+ self._x = x
57
+
58
+ @property
59
+ def bufsize(self) -> int:
60
+ """
61
+ Buffer size in bytes.
62
+ """
63
+ return self._x.size
64
+
65
+ @property
66
+ def ptr(self) -> int:
67
+ """
68
+ Pointer to start of the buffer as an integer.
69
+ """
70
+ return self._x.address
71
+
72
+ def __dlpack__(self):
73
+ """
74
+ Produce DLPack capsule (see array API standard).
75
+
76
+ Raises:
77
+ - TypeError : if the buffer contains unsupported dtypes.
78
+ - NotImplementedError : if DLPack support is not implemented
79
+
80
+ Useful to have to connect to array libraries. Support optional because
81
+ it's not completely trivial to implement for a Python-only library.
82
+ """
83
+ raise NotImplementedError("__dlpack__")
84
+
85
+ def __dlpack_device__(self) -> tuple[DlpackDeviceType, int | None]:
86
+ """
87
+ Device type and device ID for where the data in the buffer resides.
88
+ Uses device type codes matching DLPack.
89
+ Note: must be implemented even if ``__dlpack__`` is not.
90
+ """
91
+ if self._x.is_cpu:
92
+ return (DlpackDeviceType.CPU, None)
93
+ else:
94
+ raise NotImplementedError("__dlpack_device__")
95
+
96
+ def __repr__(self) -> str:
97
+ return (
98
+ "PyArrowBuffer(" +
99
+ str(
100
+ {
101
+ "bufsize": self.bufsize,
102
+ "ptr": self.ptr,
103
+ "device": self.__dlpack_device__()[0].name,
104
+ }
105
+ ) +
106
+ ")"
107
+ )
llmeval-env/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_interop.h ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/platform.h" // IWYU pragma: export
21
+
22
+ #include <numpy/numpyconfig.h> // IWYU pragma: export
23
+
24
+ // Don't use the deprecated Numpy functions
25
+ #ifdef NPY_1_7_API_VERSION
26
+ #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
27
+ #else
28
+ #define NPY_ARRAY_NOTSWAPPED NPY_NOTSWAPPED
29
+ #define NPY_ARRAY_ALIGNED NPY_ALIGNED
30
+ #define NPY_ARRAY_WRITEABLE NPY_WRITEABLE
31
+ #define NPY_ARRAY_UPDATEIFCOPY NPY_UPDATEIFCOPY
32
+ #endif
33
+
34
+ // This is required to be able to access the NumPy C API properly in C++ files
35
+ // other than init.cc.
36
+ #define PY_ARRAY_UNIQUE_SYMBOL arrow_ARRAY_API
37
+ #ifndef NUMPY_IMPORT_ARRAY
38
+ #define NO_IMPORT_ARRAY
39
+ #endif
40
+
41
+ #include <numpy/arrayobject.h> // IWYU pragma: export
42
+ #include <numpy/arrayscalars.h> // IWYU pragma: export
43
+ #include <numpy/ufuncobject.h> // IWYU pragma: export
44
+
45
+ // A bit subtle. Numpy has 5 canonical integer types:
46
+ // (or, rather, type pairs: signed and unsigned)
47
+ // NPY_BYTE, NPY_SHORT, NPY_INT, NPY_LONG, NPY_LONGLONG
48
+ // It also has 4 fixed-width integer aliases.
49
+ // When mapping Arrow integer types to these 4 fixed-width aliases,
50
+ // we always miss one of the canonical types (even though it may
51
+ // have the same width as one of the aliases).
52
+ // Which one depends on the platform...
53
+ // On a LP64 system, NPY_INT64 maps to NPY_LONG and
54
+ // NPY_LONGLONG needs to be handled separately.
55
+ // On a LLP64 system, NPY_INT32 maps to NPY_LONG and
56
+ // NPY_INT needs to be handled separately.
57
+
58
+ #if NPY_BITSOF_LONG == 32 && NPY_BITSOF_LONGLONG == 64
59
+ #define NPY_INT64_IS_LONG_LONG 1
60
+ #else
61
+ #define NPY_INT64_IS_LONG_LONG 0
62
+ #endif
63
+
64
+ #if NPY_BITSOF_INT == 32 && NPY_BITSOF_LONG == 64
65
+ #define NPY_INT32_IS_INT 1
66
+ #else
67
+ #define NPY_INT32_IS_INT 0
68
+ #endif
69
+
70
+ // Backported NumPy 2 API (can be removed if numpy 2 is required)
71
+ #if NPY_ABI_VERSION < 0x02000000
72
+ #define PyDataType_ELSIZE(descr) ((descr)->elsize)
73
+ #define PyDataType_C_METADATA(descr) ((descr)->c_metadata)
74
+ #define PyDataType_FIELDS(descr) ((descr)->fields)
75
+ #endif
76
+
77
+ namespace arrow {
78
+ namespace py {
79
+
80
+ inline int import_numpy() {
81
+ #ifdef NUMPY_IMPORT_ARRAY
82
+ import_array1(-1);
83
+ import_umath1(-1);
84
+ #endif
85
+
86
+ return 0;
87
+ }
88
+
89
+ // See above about the missing Numpy integer type numbers
90
+ inline int fix_numpy_type_num(int type_num) {
91
+ #if !NPY_INT32_IS_INT && NPY_BITSOF_INT == 32
92
+ if (type_num == NPY_INT) return NPY_INT32;
93
+ if (type_num == NPY_UINT) return NPY_UINT32;
94
+ #endif
95
+ #if !NPY_INT64_IS_LONG_LONG && NPY_BITSOF_LONGLONG == 64
96
+ if (type_num == NPY_LONGLONG) return NPY_INT64;
97
+ if (type_num == NPY_ULONGLONG) return NPY_UINT64;
98
+ #endif
99
+ return type_num;
100
+ }
101
+
102
+ } // namespace py
103
+ } // namespace arrow
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (186 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_16597.cpython-310.pyc ADDED
Binary file (909 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_39313.cpython-310.pyc ADDED
Binary file (924 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/arrow_7980.cpython-310.pyc ADDED
Binary file (376 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/conftest.cpython-310.pyc ADDED
Binary file (8.17 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/pandas_examples.cpython-310.pyc ADDED
Binary file (3.51 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/read_record_batch.cpython-310.pyc ADDED
Binary file (370 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/strategies.cpython-310.pyc ADDED
Binary file (9.39 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_adhoc_memory_leak.cpython-310.pyc ADDED
Binary file (1.17 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_array.cpython-310.pyc ADDED
Binary file (98.4 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_builder.cpython-310.pyc ADDED
Binary file (2.07 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_cffi.cpython-310.pyc ADDED
Binary file (17.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_compute.cpython-310.pyc ADDED
Binary file (101 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_csv.cpython-310.pyc ADDED
Binary file (52.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_cuda.cpython-310.pyc ADDED
Binary file (18.7 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_dataset_encryption.cpython-310.pyc ADDED
Binary file (5.66 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_deprecations.cpython-310.pyc ADDED
Binary file (236 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_exec_plan.cpython-310.pyc ADDED
Binary file (7.09 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_extension_type.cpython-310.pyc ADDED
Binary file (44.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_feather.cpython-310.pyc ADDED
Binary file (22 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_flight.cpython-310.pyc ADDED
Binary file (81.9 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_flight_async.cpython-310.pyc ADDED
Binary file (2.73 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_fs.cpython-310.pyc ADDED
Binary file (49.4 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_gandiva.cpython-310.pyc ADDED
Binary file (10.7 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_gdb.cpython-310.pyc ADDED
Binary file (33.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_io.cpython-310.pyc ADDED
Binary file (56.9 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_ipc.cpython-310.pyc ADDED
Binary file (35.4 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_json.cpython-310.pyc ADDED
Binary file (10.7 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_jvm.cpython-310.pyc ADDED
Binary file (9.1 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_misc.cpython-310.pyc ADDED
Binary file (6.84 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_orc.cpython-310.pyc ADDED
Binary file (12.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_schema.cpython-310.pyc ADDED
Binary file (18.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_tensor.cpython-310.pyc ADDED
Binary file (5.77 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_types.cpython-310.pyc ADDED
Binary file (37.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/test_udf.cpython-310.pyc ADDED
Binary file (24.8 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/__pycache__/util.cpython-310.pyc ADDED
Binary file (14.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/feather/v0.17.0.version.2-compression.lz4.feather ADDED
Binary file (594 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/orc/README.md ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!---
2
+ Licensed to the Apache Software Foundation (ASF) under one
3
+ or more contributor license agreements. See the NOTICE file
4
+ distributed with this work for additional information
5
+ regarding copyright ownership. The ASF licenses this file
6
+ to you under the Apache License, Version 2.0 (the
7
+ "License"); you may not use this file except in compliance
8
+ with the License. You may obtain a copy of the License at
9
+
10
+ http://www.apache.org/licenses/LICENSE-2.0
11
+
12
+ Unless required by applicable law or agreed to in writing,
13
+ software distributed under the License is distributed on an
14
+ "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
15
+ KIND, either express or implied. See the License for the
16
+ specific language governing permissions and limitations
17
+ under the License.
18
+ -->
19
+
20
+ The ORC and JSON files come from the `examples` directory in the Apache ORC
21
+ source tree:
22
+ https://github.com/apache/orc/tree/main/examples
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/data/orc/TestOrcFile.testDate1900.orc ADDED
Binary file (30.9 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__init__.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (198 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/test_conversion.cpython-310.pyc ADDED
Binary file (13.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/__pycache__/test_interchange_spec.cpython-310.pyc ADDED
Binary file (7.38 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/test_conversion.py ADDED
@@ -0,0 +1,522 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ from datetime import datetime as dt
19
+ import numpy as np
20
+ import pyarrow as pa
21
+ from pyarrow.vendored.version import Version
22
+ import pytest
23
+
24
+ import pyarrow.interchange as pi
25
+ from pyarrow.interchange.column import (
26
+ _PyArrowColumn,
27
+ ColumnNullType,
28
+ DtypeKind,
29
+ )
30
+ from pyarrow.interchange.from_dataframe import _from_dataframe
31
+
32
+ try:
33
+ import pandas as pd
34
+ # import pandas.testing as tm
35
+ except ImportError:
36
+ pass
37
+
38
+
39
+ @pytest.mark.parametrize("unit", ['s', 'ms', 'us', 'ns'])
40
+ @pytest.mark.parametrize("tz", ['', 'America/New_York', '+07:30', '-04:30'])
41
+ def test_datetime(unit, tz):
42
+ dt_arr = [dt(2007, 7, 13), dt(2007, 7, 14), None]
43
+ table = pa.table({"A": pa.array(dt_arr, type=pa.timestamp(unit, tz=tz))})
44
+ col = table.__dataframe__().get_column_by_name("A")
45
+
46
+ assert col.size() == 3
47
+ assert col.offset == 0
48
+ assert col.null_count == 1
49
+ assert col.dtype[0] == DtypeKind.DATETIME
50
+ assert col.describe_null == (ColumnNullType.USE_BITMASK, 0)
51
+
52
+
53
+ @pytest.mark.parametrize(
54
+ ["test_data", "kind"],
55
+ [
56
+ (["foo", "bar"], 21),
57
+ ([1.5, 2.5, 3.5], 2),
58
+ ([1, 2, 3, 4], 0),
59
+ ],
60
+ )
61
+ def test_array_to_pyarrowcolumn(test_data, kind):
62
+ arr = pa.array(test_data)
63
+ arr_column = _PyArrowColumn(arr)
64
+
65
+ assert arr_column._col == arr
66
+ assert arr_column.size() == len(test_data)
67
+ assert arr_column.dtype[0] == kind
68
+ assert arr_column.num_chunks() == 1
69
+ assert arr_column.null_count == 0
70
+ assert arr_column.get_buffers()["validity"] is None
71
+ assert len(list(arr_column.get_chunks())) == 1
72
+
73
+ for chunk in arr_column.get_chunks():
74
+ assert chunk == arr_column
75
+
76
+
77
+ def test_offset_of_sliced_array():
78
+ arr = pa.array([1, 2, 3, 4])
79
+ arr_sliced = arr.slice(2, 2)
80
+
81
+ table = pa.table([arr], names=["arr"])
82
+ table_sliced = pa.table([arr_sliced], names=["arr_sliced"])
83
+
84
+ col = table_sliced.__dataframe__().get_column(0)
85
+ assert col.offset == 2
86
+
87
+ result = _from_dataframe(table_sliced.__dataframe__())
88
+ assert table_sliced.equals(result)
89
+ assert not table.equals(result)
90
+
91
+ # pandas hardcodes offset to 0:
92
+ # https://github.com/pandas-dev/pandas/blob/5c66e65d7b9fef47ccb585ce2fd0b3ea18dc82ea/pandas/core/interchange/from_dataframe.py#L247
93
+ # so conversion to pandas can't be tested currently
94
+
95
+ # df = pandas_from_dataframe(table)
96
+ # df_sliced = pandas_from_dataframe(table_sliced)
97
+
98
+ # tm.assert_series_equal(df["arr"][2:4], df_sliced["arr_sliced"],
99
+ # check_index=False, check_names=False)
100
+
101
+
102
+ @pytest.mark.pandas
103
+ @pytest.mark.parametrize(
104
+ "uint", [pa.uint8(), pa.uint16(), pa.uint32()]
105
+ )
106
+ @pytest.mark.parametrize(
107
+ "int", [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
108
+ )
109
+ @pytest.mark.parametrize(
110
+ "float, np_float", [
111
+ # (pa.float16(), np.float16), #not supported by pandas
112
+ (pa.float32(), np.float32),
113
+ (pa.float64(), np.float64)
114
+ ]
115
+ )
116
+ def test_pandas_roundtrip(uint, int, float, np_float):
117
+ if Version(pd.__version__) < Version("1.5.0"):
118
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
119
+
120
+ arr = [1, 2, 3]
121
+ table = pa.table(
122
+ {
123
+ "a": pa.array(arr, type=uint),
124
+ "b": pa.array(arr, type=int),
125
+ "c": pa.array(np.array(arr, dtype=np_float), type=float),
126
+ "d": [True, False, True],
127
+ }
128
+ )
129
+ from pandas.api.interchange import (
130
+ from_dataframe as pandas_from_dataframe
131
+ )
132
+ pandas_df = pandas_from_dataframe(table)
133
+ result = pi.from_dataframe(pandas_df)
134
+ assert table.equals(result)
135
+
136
+ table_protocol = table.__dataframe__()
137
+ result_protocol = result.__dataframe__()
138
+
139
+ assert table_protocol.num_columns() == result_protocol.num_columns()
140
+ assert table_protocol.num_rows() == result_protocol.num_rows()
141
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
142
+ assert table_protocol.column_names() == result_protocol.column_names()
143
+
144
+
145
+ @pytest.mark.pandas
146
+ def test_pandas_roundtrip_string():
147
+ # See https://github.com/pandas-dev/pandas/issues/50554
148
+ if Version(pd.__version__) < Version("1.6"):
149
+ pytest.skip("Column.size() bug in pandas")
150
+
151
+ arr = ["a", "", "c"]
152
+ table = pa.table({"a": pa.array(arr)})
153
+
154
+ from pandas.api.interchange import (
155
+ from_dataframe as pandas_from_dataframe
156
+ )
157
+
158
+ pandas_df = pandas_from_dataframe(table)
159
+ result = pi.from_dataframe(pandas_df)
160
+
161
+ assert result["a"].to_pylist() == table["a"].to_pylist()
162
+ assert pa.types.is_string(table["a"].type)
163
+ assert pa.types.is_large_string(result["a"].type)
164
+
165
+ table_protocol = table.__dataframe__()
166
+ result_protocol = result.__dataframe__()
167
+
168
+ assert table_protocol.num_columns() == result_protocol.num_columns()
169
+ assert table_protocol.num_rows() == result_protocol.num_rows()
170
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
171
+ assert table_protocol.column_names() == result_protocol.column_names()
172
+
173
+
174
+ @pytest.mark.pandas
175
+ def test_pandas_roundtrip_large_string():
176
+ # See https://github.com/pandas-dev/pandas/issues/50554
177
+ if Version(pd.__version__) < Version("1.6"):
178
+ pytest.skip("Column.size() bug in pandas")
179
+
180
+ arr = ["a", "", "c"]
181
+ table = pa.table({"a_large": pa.array(arr, type=pa.large_string())})
182
+
183
+ from pandas.api.interchange import (
184
+ from_dataframe as pandas_from_dataframe
185
+ )
186
+
187
+ if Version(pd.__version__) >= Version("2.0.1"):
188
+ pandas_df = pandas_from_dataframe(table)
189
+ result = pi.from_dataframe(pandas_df)
190
+
191
+ assert result["a_large"].to_pylist() == table["a_large"].to_pylist()
192
+ assert pa.types.is_large_string(table["a_large"].type)
193
+ assert pa.types.is_large_string(result["a_large"].type)
194
+
195
+ table_protocol = table.__dataframe__()
196
+ result_protocol = result.__dataframe__()
197
+
198
+ assert table_protocol.num_columns() == result_protocol.num_columns()
199
+ assert table_protocol.num_rows() == result_protocol.num_rows()
200
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
201
+ assert table_protocol.column_names() == result_protocol.column_names()
202
+
203
+ else:
204
+ # large string not supported by pandas implementation for
205
+ # older versions of pandas
206
+ # https://github.com/pandas-dev/pandas/issues/52795
207
+ with pytest.raises(AssertionError):
208
+ pandas_from_dataframe(table)
209
+
210
+
211
+ @pytest.mark.pandas
212
+ def test_pandas_roundtrip_string_with_missing():
213
+ # See https://github.com/pandas-dev/pandas/issues/50554
214
+ if Version(pd.__version__) < Version("1.6"):
215
+ pytest.skip("Column.size() bug in pandas")
216
+
217
+ arr = ["a", "", "c", None]
218
+ table = pa.table({"a": pa.array(arr),
219
+ "a_large": pa.array(arr, type=pa.large_string())})
220
+
221
+ from pandas.api.interchange import (
222
+ from_dataframe as pandas_from_dataframe
223
+ )
224
+
225
+ if Version(pd.__version__) >= Version("2.0.2"):
226
+ pandas_df = pandas_from_dataframe(table)
227
+ result = pi.from_dataframe(pandas_df)
228
+
229
+ assert result["a"].to_pylist() == table["a"].to_pylist()
230
+ assert pa.types.is_string(table["a"].type)
231
+ assert pa.types.is_large_string(result["a"].type)
232
+
233
+ assert result["a_large"].to_pylist() == table["a_large"].to_pylist()
234
+ assert pa.types.is_large_string(table["a_large"].type)
235
+ assert pa.types.is_large_string(result["a_large"].type)
236
+ else:
237
+ # older versions of pandas do not have bitmask support
238
+ # https://github.com/pandas-dev/pandas/issues/49888
239
+ with pytest.raises(NotImplementedError):
240
+ pandas_from_dataframe(table)
241
+
242
+
243
+ @pytest.mark.pandas
244
+ def test_pandas_roundtrip_categorical():
245
+ if Version(pd.__version__) < Version("2.0.2"):
246
+ pytest.skip("Bitmasks not supported in pandas interchange implementation")
247
+
248
+ arr = ["Mon", "Tue", "Mon", "Wed", "Mon", "Thu", "Fri", "Sat", None]
249
+ table = pa.table(
250
+ {"weekday": pa.array(arr).dictionary_encode()}
251
+ )
252
+
253
+ from pandas.api.interchange import (
254
+ from_dataframe as pandas_from_dataframe
255
+ )
256
+ pandas_df = pandas_from_dataframe(table)
257
+ result = pi.from_dataframe(pandas_df)
258
+
259
+ assert result["weekday"].to_pylist() == table["weekday"].to_pylist()
260
+ assert pa.types.is_dictionary(table["weekday"].type)
261
+ assert pa.types.is_dictionary(result["weekday"].type)
262
+ assert pa.types.is_string(table["weekday"].chunk(0).dictionary.type)
263
+ assert pa.types.is_large_string(result["weekday"].chunk(0).dictionary.type)
264
+ assert pa.types.is_int32(table["weekday"].chunk(0).indices.type)
265
+ assert pa.types.is_int8(result["weekday"].chunk(0).indices.type)
266
+
267
+ table_protocol = table.__dataframe__()
268
+ result_protocol = result.__dataframe__()
269
+
270
+ assert table_protocol.num_columns() == result_protocol.num_columns()
271
+ assert table_protocol.num_rows() == result_protocol.num_rows()
272
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
273
+ assert table_protocol.column_names() == result_protocol.column_names()
274
+
275
+ col_table = table_protocol.get_column(0)
276
+ col_result = result_protocol.get_column(0)
277
+
278
+ assert col_result.dtype[0] == DtypeKind.CATEGORICAL
279
+ assert col_result.dtype[0] == col_table.dtype[0]
280
+ assert col_result.size() == col_table.size()
281
+ assert col_result.offset == col_table.offset
282
+
283
+ desc_cat_table = col_result.describe_categorical
284
+ desc_cat_result = col_result.describe_categorical
285
+
286
+ assert desc_cat_table["is_ordered"] == desc_cat_result["is_ordered"]
287
+ assert desc_cat_table["is_dictionary"] == desc_cat_result["is_dictionary"]
288
+ assert isinstance(desc_cat_result["categories"]._col, pa.Array)
289
+
290
+
291
+ @pytest.mark.pandas
292
+ @pytest.mark.parametrize("unit", ['s', 'ms', 'us', 'ns'])
293
+ def test_pandas_roundtrip_datetime(unit):
294
+ if Version(pd.__version__) < Version("1.5.0"):
295
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
296
+ from datetime import datetime as dt
297
+
298
+ # timezones not included as they are not yet supported in
299
+ # the pandas implementation
300
+ dt_arr = [dt(2007, 7, 13), dt(2007, 7, 14), dt(2007, 7, 15)]
301
+ table = pa.table({"a": pa.array(dt_arr, type=pa.timestamp(unit))})
302
+
303
+ if Version(pd.__version__) < Version("1.6"):
304
+ # pandas < 2.0 always creates datetime64 in "ns"
305
+ # resolution
306
+ expected = pa.table({"a": pa.array(dt_arr, type=pa.timestamp('ns'))})
307
+ else:
308
+ expected = table
309
+
310
+ from pandas.api.interchange import (
311
+ from_dataframe as pandas_from_dataframe
312
+ )
313
+ pandas_df = pandas_from_dataframe(table)
314
+ result = pi.from_dataframe(pandas_df)
315
+
316
+ assert expected.equals(result)
317
+
318
+ expected_protocol = expected.__dataframe__()
319
+ result_protocol = result.__dataframe__()
320
+
321
+ assert expected_protocol.num_columns() == result_protocol.num_columns()
322
+ assert expected_protocol.num_rows() == result_protocol.num_rows()
323
+ assert expected_protocol.num_chunks() == result_protocol.num_chunks()
324
+ assert expected_protocol.column_names() == result_protocol.column_names()
325
+
326
+
327
+ @pytest.mark.pandas
328
+ @pytest.mark.parametrize(
329
+ "np_float", [np.float32, np.float64]
330
+ )
331
+ def test_pandas_to_pyarrow_with_missing(np_float):
332
+ if Version(pd.__version__) < Version("1.5.0"):
333
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
334
+
335
+ np_array = np.array([0, np.nan, 2], dtype=np_float)
336
+ datetime_array = [None, dt(2007, 7, 14), dt(2007, 7, 15)]
337
+ df = pd.DataFrame({
338
+ "a": np_array, # float, ColumnNullType.USE_NAN
339
+ "dt": datetime_array # ColumnNullType.USE_SENTINEL
340
+ })
341
+ expected = pa.table({
342
+ "a": pa.array(np_array, from_pandas=True),
343
+ "dt": pa.array(datetime_array, type=pa.timestamp("ns"))
344
+ })
345
+ result = pi.from_dataframe(df)
346
+
347
+ assert result.equals(expected)
348
+
349
+
350
+ @pytest.mark.pandas
351
+ def test_pandas_to_pyarrow_float16_with_missing():
352
+ if Version(pd.__version__) < Version("1.5.0"):
353
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
354
+
355
+ # np.float16 errors if ps.is_nan is used
356
+ # pyarrow.lib.ArrowNotImplementedError: Function 'is_nan' has no kernel
357
+ # matching input types (halffloat)
358
+ np_array = np.array([0, np.nan, 2], dtype=np.float16)
359
+ df = pd.DataFrame({"a": np_array})
360
+
361
+ with pytest.raises(NotImplementedError):
362
+ pi.from_dataframe(df)
363
+
364
+
365
+ @pytest.mark.parametrize(
366
+ "uint", [pa.uint8(), pa.uint16(), pa.uint32()]
367
+ )
368
+ @pytest.mark.parametrize(
369
+ "int", [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
370
+ )
371
+ @pytest.mark.parametrize(
372
+ "float, np_float", [
373
+ (pa.float16(), np.float16),
374
+ (pa.float32(), np.float32),
375
+ (pa.float64(), np.float64)
376
+ ]
377
+ )
378
+ @pytest.mark.parametrize("unit", ['s', 'ms', 'us', 'ns'])
379
+ @pytest.mark.parametrize("tz", ['America/New_York', '+07:30', '-04:30'])
380
+ @pytest.mark.parametrize("offset, length", [(0, 3), (0, 2), (1, 2), (2, 1)])
381
+ def test_pyarrow_roundtrip(uint, int, float, np_float,
382
+ unit, tz, offset, length):
383
+
384
+ from datetime import datetime as dt
385
+ arr = [1, 2, None]
386
+ dt_arr = [dt(2007, 7, 13), None, dt(2007, 7, 15)]
387
+
388
+ table = pa.table(
389
+ {
390
+ "a": pa.array(arr, type=uint),
391
+ "b": pa.array(arr, type=int),
392
+ "c": pa.array(np.array(arr, dtype=np_float),
393
+ type=float, from_pandas=True),
394
+ "d": [True, False, True],
395
+ "e": [True, False, None],
396
+ "f": ["a", None, "c"],
397
+ "g": pa.array(dt_arr, type=pa.timestamp(unit, tz=tz))
398
+ }
399
+ )
400
+ table = table.slice(offset, length)
401
+ result = _from_dataframe(table.__dataframe__())
402
+
403
+ assert table.equals(result)
404
+
405
+ table_protocol = table.__dataframe__()
406
+ result_protocol = result.__dataframe__()
407
+
408
+ assert table_protocol.num_columns() == result_protocol.num_columns()
409
+ assert table_protocol.num_rows() == result_protocol.num_rows()
410
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
411
+ assert table_protocol.column_names() == result_protocol.column_names()
412
+
413
+
414
+ @pytest.mark.parametrize("offset, length", [(0, 10), (0, 2), (7, 3), (2, 1)])
415
+ def test_pyarrow_roundtrip_categorical(offset, length):
416
+ arr = ["Mon", "Tue", "Mon", "Wed", "Mon", "Thu", "Fri", None, "Sun"]
417
+ table = pa.table(
418
+ {"weekday": pa.array(arr).dictionary_encode()}
419
+ )
420
+ table = table.slice(offset, length)
421
+ result = _from_dataframe(table.__dataframe__())
422
+
423
+ assert table.equals(result)
424
+
425
+ table_protocol = table.__dataframe__()
426
+ result_protocol = result.__dataframe__()
427
+
428
+ assert table_protocol.num_columns() == result_protocol.num_columns()
429
+ assert table_protocol.num_rows() == result_protocol.num_rows()
430
+ assert table_protocol.num_chunks() == result_protocol.num_chunks()
431
+ assert table_protocol.column_names() == result_protocol.column_names()
432
+
433
+ col_table = table_protocol.get_column(0)
434
+ col_result = result_protocol.get_column(0)
435
+
436
+ assert col_result.dtype[0] == DtypeKind.CATEGORICAL
437
+ assert col_result.dtype[0] == col_table.dtype[0]
438
+ assert col_result.size() == col_table.size()
439
+ assert col_result.offset == col_table.offset
440
+
441
+ desc_cat_table = col_table.describe_categorical
442
+ desc_cat_result = col_result.describe_categorical
443
+
444
+ assert desc_cat_table["is_ordered"] == desc_cat_result["is_ordered"]
445
+ assert desc_cat_table["is_dictionary"] == desc_cat_result["is_dictionary"]
446
+ assert isinstance(desc_cat_result["categories"]._col, pa.Array)
447
+
448
+
449
+ @pytest.mark.large_memory
450
+ def test_pyarrow_roundtrip_large_string():
451
+
452
+ data = np.array([b'x'*1024]*(3*1024**2), dtype='object') # 3GB bytes data
453
+ arr = pa.array(data, type=pa.large_string())
454
+ table = pa.table([arr], names=["large_string"])
455
+
456
+ result = _from_dataframe(table.__dataframe__())
457
+ col = result.__dataframe__().get_column(0)
458
+
459
+ assert col.size() == 3*1024**2
460
+ assert pa.types.is_large_string(table[0].type)
461
+ assert pa.types.is_large_string(result[0].type)
462
+
463
+ assert table.equals(result)
464
+
465
+
466
+ def test_nan_as_null():
467
+ table = pa.table({"a": [1, 2, 3, 4]})
468
+ with pytest.raises(RuntimeError):
469
+ table.__dataframe__(nan_as_null=True)
470
+
471
+
472
+ @pytest.mark.pandas
473
+ def test_allow_copy_false():
474
+ if Version(pd.__version__) < Version("1.5.0"):
475
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
476
+
477
+ # Test that an error is raised when a copy is needed
478
+ # to create a bitmask
479
+
480
+ df = pd.DataFrame({"a": [0, 1.0, 2.0]})
481
+ with pytest.raises(RuntimeError):
482
+ pi.from_dataframe(df, allow_copy=False)
483
+
484
+ df = pd.DataFrame({
485
+ "dt": [None, dt(2007, 7, 14), dt(2007, 7, 15)]
486
+ })
487
+ with pytest.raises(RuntimeError):
488
+ pi.from_dataframe(df, allow_copy=False)
489
+
490
+
491
+ @pytest.mark.pandas
492
+ def test_allow_copy_false_bool_categorical():
493
+ if Version(pd.__version__) < Version("1.5.0"):
494
+ pytest.skip("__dataframe__ added to pandas in 1.5.0")
495
+
496
+ # Test that an error is raised for boolean
497
+ # and categorical dtype (copy is always made)
498
+
499
+ df = pd.DataFrame({"a": [None, False, True]})
500
+ with pytest.raises(RuntimeError):
501
+ pi.from_dataframe(df, allow_copy=False)
502
+
503
+ df = pd.DataFrame({"a": [True, False, True]})
504
+ with pytest.raises(RuntimeError):
505
+ pi.from_dataframe(df, allow_copy=False)
506
+
507
+ df = pd.DataFrame({"weekday": ["a", "b", None]})
508
+ df = df.astype("category")
509
+ with pytest.raises(RuntimeError):
510
+ pi.from_dataframe(df, allow_copy=False)
511
+
512
+ df = pd.DataFrame({"weekday": ["a", "b", "c"]})
513
+ df = df.astype("category")
514
+ with pytest.raises(RuntimeError):
515
+ pi.from_dataframe(df, allow_copy=False)
516
+
517
+
518
+ def test_empty_dataframe():
519
+ schema = pa.schema([('col1', pa.int8())])
520
+ df = pa.table([[]], schema=schema)
521
+ dfi = df.__dataframe__()
522
+ assert pi.from_dataframe(dfi) == df
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/interchange/test_interchange_spec.py ADDED
@@ -0,0 +1,288 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ import ctypes
19
+ import hypothesis as h
20
+ import hypothesis.strategies as st
21
+
22
+ import numpy as np
23
+ import pyarrow as pa
24
+ import pyarrow.tests.strategies as past
25
+ import pytest
26
+
27
+
28
+ all_types = st.deferred(
29
+ lambda: (
30
+ past.signed_integer_types |
31
+ past.unsigned_integer_types |
32
+ past.floating_types |
33
+ past.bool_type |
34
+ past.string_type |
35
+ past.large_string_type
36
+ )
37
+ )
38
+
39
+
40
+ # datetime is tested in test_extra.py
41
+ # dictionary is tested in test_categorical()
42
+ @h.given(past.arrays(all_types, size=3))
43
+ def test_dtypes(arr):
44
+ table = pa.table([arr], names=["a"])
45
+ df = table.__dataframe__()
46
+
47
+ null_count = df.get_column(0).null_count
48
+ assert null_count == arr.null_count
49
+ assert isinstance(null_count, int)
50
+ assert df.get_column(0).size() == 3
51
+ assert df.get_column(0).offset == 0
52
+
53
+
54
+ @pytest.mark.parametrize(
55
+ "uint, uint_bw",
56
+ [
57
+ (pa.uint8(), 8),
58
+ (pa.uint16(), 16),
59
+ (pa.uint32(), 32)
60
+ ]
61
+ )
62
+ @pytest.mark.parametrize(
63
+ "int, int_bw", [
64
+ (pa.int8(), 8),
65
+ (pa.int16(), 16),
66
+ (pa.int32(), 32),
67
+ (pa.int64(), 64)
68
+ ]
69
+ )
70
+ @pytest.mark.parametrize(
71
+ "float, float_bw, np_float", [
72
+ (pa.float16(), 16, np.float16),
73
+ (pa.float32(), 32, np.float32),
74
+ (pa.float64(), 64, np.float64)
75
+ ]
76
+ )
77
+ @pytest.mark.parametrize("unit", ['s', 'ms', 'us', 'ns'])
78
+ @pytest.mark.parametrize("tz", ['', 'America/New_York', '+07:30', '-04:30'])
79
+ @pytest.mark.parametrize("use_batch", [False, True])
80
+ def test_mixed_dtypes(uint, uint_bw, int, int_bw,
81
+ float, float_bw, np_float, unit, tz,
82
+ use_batch):
83
+ from datetime import datetime as dt
84
+ arr = [1, 2, 3]
85
+ dt_arr = [dt(2007, 7, 13), dt(2007, 7, 14), dt(2007, 7, 15)]
86
+ table = pa.table(
87
+ {
88
+ "a": pa.array(arr, type=uint),
89
+ "b": pa.array(arr, type=int),
90
+ "c": pa.array(np.array(arr, dtype=np_float), type=float),
91
+ "d": [True, False, True],
92
+ "e": ["a", "", "c"],
93
+ "f": pa.array(dt_arr, type=pa.timestamp(unit, tz=tz))
94
+ }
95
+ )
96
+ if use_batch:
97
+ table = table.to_batches()[0]
98
+ df = table.__dataframe__()
99
+ # 0 = DtypeKind.INT, 1 = DtypeKind.UINT, 2 = DtypeKind.FLOAT,
100
+ # 20 = DtypeKind.BOOL, 21 = DtypeKind.STRING, 22 = DtypeKind.DATETIME
101
+ # see DtypeKind class in column.py
102
+ columns = {"a": 1, "b": 0, "c": 2, "d": 20, "e": 21, "f": 22}
103
+
104
+ for column, kind in columns.items():
105
+ col = df.get_column_by_name(column)
106
+
107
+ assert col.null_count == 0
108
+ assert col.size() == 3
109
+ assert col.offset == 0
110
+ assert col.dtype[0] == kind
111
+
112
+ assert df.get_column_by_name("a").dtype[1] == uint_bw
113
+ assert df.get_column_by_name("b").dtype[1] == int_bw
114
+ assert df.get_column_by_name("c").dtype[1] == float_bw
115
+
116
+
117
+ def test_na_float():
118
+ table = pa.table({"a": [1.0, None, 2.0]})
119
+ df = table.__dataframe__()
120
+ col = df.get_column_by_name("a")
121
+ assert col.null_count == 1
122
+ assert isinstance(col.null_count, int)
123
+
124
+
125
+ def test_noncategorical():
126
+ table = pa.table({"a": [1, 2, 3]})
127
+ df = table.__dataframe__()
128
+ col = df.get_column_by_name("a")
129
+ with pytest.raises(TypeError, match=".*categorical.*"):
130
+ col.describe_categorical
131
+
132
+
133
+ @pytest.mark.parametrize("use_batch", [False, True])
134
+ def test_categorical(use_batch):
135
+ import pyarrow as pa
136
+ arr = ["Mon", "Tue", "Mon", "Wed", "Mon", "Thu", "Fri", "Sat", None]
137
+ table = pa.table(
138
+ {"weekday": pa.array(arr).dictionary_encode()}
139
+ )
140
+ if use_batch:
141
+ table = table.to_batches()[0]
142
+
143
+ col = table.__dataframe__().get_column_by_name("weekday")
144
+ categorical = col.describe_categorical
145
+ assert isinstance(categorical["is_ordered"], bool)
146
+ assert isinstance(categorical["is_dictionary"], bool)
147
+
148
+
149
+ @pytest.mark.parametrize("use_batch", [False, True])
150
+ def test_dataframe(use_batch):
151
+ n = pa.chunked_array([[2, 2, 4], [4, 5, 100]])
152
+ a = pa.chunked_array([["Flamingo", "Parrot", "Cow"],
153
+ ["Horse", "Brittle stars", "Centipede"]])
154
+ table = pa.table([n, a], names=['n_legs', 'animals'])
155
+ if use_batch:
156
+ table = table.combine_chunks().to_batches()[0]
157
+ df = table.__dataframe__()
158
+
159
+ assert df.num_columns() == 2
160
+ assert df.num_rows() == 6
161
+ if use_batch:
162
+ assert df.num_chunks() == 1
163
+ else:
164
+ assert df.num_chunks() == 2
165
+ assert list(df.column_names()) == ['n_legs', 'animals']
166
+ assert list(df.select_columns((1,)).column_names()) == list(
167
+ df.select_columns_by_name(("animals",)).column_names()
168
+ )
169
+
170
+
171
+ @pytest.mark.parametrize("use_batch", [False, True])
172
+ @pytest.mark.parametrize(["size", "n_chunks"], [(10, 3), (12, 3), (12, 5)])
173
+ def test_df_get_chunks(use_batch, size, n_chunks):
174
+ table = pa.table({"x": list(range(size))})
175
+ if use_batch:
176
+ table = table.to_batches()[0]
177
+ df = table.__dataframe__()
178
+ chunks = list(df.get_chunks(n_chunks))
179
+ assert len(chunks) == n_chunks
180
+ assert sum(chunk.num_rows() for chunk in chunks) == size
181
+
182
+
183
+ @pytest.mark.parametrize("use_batch", [False, True])
184
+ @pytest.mark.parametrize(["size", "n_chunks"], [(10, 3), (12, 3), (12, 5)])
185
+ def test_column_get_chunks(use_batch, size, n_chunks):
186
+ table = pa.table({"x": list(range(size))})
187
+ if use_batch:
188
+ table = table.to_batches()[0]
189
+ df = table.__dataframe__()
190
+ chunks = list(df.get_column(0).get_chunks(n_chunks))
191
+ assert len(chunks) == n_chunks
192
+ assert sum(chunk.size() for chunk in chunks) == size
193
+
194
+
195
+ @pytest.mark.pandas
196
+ @pytest.mark.parametrize(
197
+ "uint", [pa.uint8(), pa.uint16(), pa.uint32()]
198
+ )
199
+ @pytest.mark.parametrize(
200
+ "int", [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
201
+ )
202
+ @pytest.mark.parametrize(
203
+ "float, np_float", [
204
+ (pa.float16(), np.float16),
205
+ (pa.float32(), np.float32),
206
+ (pa.float64(), np.float64)
207
+ ]
208
+ )
209
+ @pytest.mark.parametrize("use_batch", [False, True])
210
+ def test_get_columns(uint, int, float, np_float, use_batch):
211
+ arr = [[1, 2, 3], [4, 5]]
212
+ arr_float = np.array([1, 2, 3, 4, 5], dtype=np_float)
213
+ table = pa.table(
214
+ {
215
+ "a": pa.chunked_array(arr, type=uint),
216
+ "b": pa.chunked_array(arr, type=int),
217
+ "c": pa.array(arr_float, type=float)
218
+ }
219
+ )
220
+ if use_batch:
221
+ table = table.combine_chunks().to_batches()[0]
222
+ df = table.__dataframe__()
223
+ for col in df.get_columns():
224
+ assert col.size() == 5
225
+ assert col.num_chunks() == 1
226
+
227
+ # 0 = DtypeKind.INT, 1 = DtypeKind.UINT, 2 = DtypeKind.FLOAT,
228
+ # see DtypeKind class in column.py
229
+ assert df.get_column(0).dtype[0] == 1 # UINT
230
+ assert df.get_column(1).dtype[0] == 0 # INT
231
+ assert df.get_column(2).dtype[0] == 2 # FLOAT
232
+
233
+
234
+ @pytest.mark.parametrize(
235
+ "int", [pa.int8(), pa.int16(), pa.int32(), pa.int64()]
236
+ )
237
+ @pytest.mark.parametrize("use_batch", [False, True])
238
+ def test_buffer(int, use_batch):
239
+ arr = [0, 1, -1]
240
+ table = pa.table({"a": pa.array(arr, type=int)})
241
+ if use_batch:
242
+ table = table.to_batches()[0]
243
+ df = table.__dataframe__()
244
+ col = df.get_column(0)
245
+ buf = col.get_buffers()
246
+
247
+ dataBuf, dataDtype = buf["data"]
248
+
249
+ assert dataBuf.bufsize > 0
250
+ assert dataBuf.ptr != 0
251
+ device, _ = dataBuf.__dlpack_device__()
252
+
253
+ # 0 = DtypeKind.INT
254
+ # see DtypeKind class in column.py
255
+ assert dataDtype[0] == 0
256
+
257
+ if device == 1: # CPU-only as we're going to directly read memory here
258
+ bitwidth = dataDtype[1]
259
+ ctype = {
260
+ 8: ctypes.c_int8,
261
+ 16: ctypes.c_int16,
262
+ 32: ctypes.c_int32,
263
+ 64: ctypes.c_int64,
264
+ }[bitwidth]
265
+
266
+ for idx, truth in enumerate(arr):
267
+ val = ctype.from_address(dataBuf.ptr + idx * (bitwidth // 8)).value
268
+ assert val == truth, f"Buffer at index {idx} mismatch"
269
+
270
+
271
+ @pytest.mark.parametrize(
272
+ "indices_type, bitwidth, f_string", [
273
+ (pa.int8(), 8, "c"),
274
+ (pa.int16(), 16, "s"),
275
+ (pa.int32(), 32, "i"),
276
+ (pa.int64(), 64, "l")
277
+ ]
278
+ )
279
+ def test_categorical_dtype(indices_type, bitwidth, f_string):
280
+ type = pa.dictionary(indices_type, pa.string())
281
+ arr = pa.array(["a", "b", None, "d"], type)
282
+ table = pa.table({'a': arr})
283
+
284
+ df = table.__dataframe__()
285
+ col = df.get_column(0)
286
+ assert col.dtype[0] == 23 # <DtypeKind.CATEGORICAL: 23>
287
+ assert col.dtype[1] == bitwidth
288
+ assert col.dtype[2] == f_string
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__init__.py ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ import pytest
19
+
20
+ # Marks all of the tests in this module
21
+ # Ignore these with pytest ... -m 'not parquet'
22
+ pytestmark = [
23
+ pytest.mark.parquet,
24
+ ]
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (253 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pyarrow/tests/parquet/__pycache__/common.cpython-310.pyc ADDED
Binary file (4.65 kB). View file