applied-ai-018 commited on
Commit
a1e6eab
·
verified ·
1 Parent(s): a605e33

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/exp_avg.pt +3 -0
  2. ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/exp_avg_sq.pt +3 -0
  3. ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/fp32.pt +3 -0
  4. ckpts/universal/global_step80/zero/6.attention.dense.weight/fp32.pt +3 -0
  5. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/CMakeLists.txt +18 -0
  6. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/api.h +30 -0
  7. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_pandas.cc +2645 -0
  8. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_pandas.h +146 -0
  9. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_python_internal.h +49 -0
  10. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/async.h +60 -0
  11. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/benchmark.cc +38 -0
  12. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/benchmark.h +36 -0
  13. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/common.cc +246 -0
  14. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/common.h +458 -0
  15. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/csv.cc +62 -0
  16. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/csv.h +42 -0
  17. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/datetime.cc +663 -0
  18. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/datetime.h +231 -0
  19. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/decimal.cc +246 -0
  20. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/decimal.h +128 -0
  21. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/deserialize.cc +495 -0
  22. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/extension_type.cc +217 -0
  23. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/extension_type.h +85 -0
  24. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/filesystem.h +130 -0
  25. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/flight.cc +388 -0
  26. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/flight.h +350 -0
  27. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/gdb.cc +530 -0
  28. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/gdb.h +29 -0
  29. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/helpers.cc +472 -0
  30. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/inference.cc +745 -0
  31. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/inference.h +64 -0
  32. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/init.cc +24 -0
  33. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/init.h +26 -0
  34. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/io.cc +387 -0
  35. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/io.h +121 -0
  36. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/ipc.cc +133 -0
  37. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/ipc.h +72 -0
  38. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/iterators.h +194 -0
  39. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_convert.cc +551 -0
  40. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_internal.h +182 -0
  41. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_interop.h +103 -0
  42. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/parquet_encryption.cc +98 -0
  43. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/parquet_encryption.h +132 -0
  44. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pch.h +24 -0
  45. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow.cc +100 -0
  46. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow.h +89 -0
  47. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow_api.h +19 -0
  48. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow_lib.h +19 -0
  49. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/python_test.cc +895 -0
  50. venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/python_test.h +42 -0
ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5247983daf5cb154e2f46264d9275340e60efc19ba15ad6e9bc5e962cf1884c9
3
+ size 33555612
ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5a892de5296152399b959d23ca217c2ba08b08f2bcf5dc35c2bb352dbd72e1f
3
+ size 33555627
ckpts/universal/global_step80/zero/19.mlp.dense_4h_to_h.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e2c5f6edcf47485a9d413b1caddcba93664a09425f47010c9735344698043288
3
+ size 33555533
ckpts/universal/global_step80/zero/6.attention.dense.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8b1e96ed1246db8201d29883058bdd689bc571dec86180d695783263ebf557e3
3
+ size 16778317
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/CMakeLists.txt ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Licensed to the Apache Software Foundation (ASF) under one
2
+ # or more contributor license agreements. See the NOTICE file
3
+ # distributed with this work for additional information
4
+ # regarding copyright ownership. The ASF licenses this file
5
+ # to you under the Apache License, Version 2.0 (the
6
+ # "License"); you may not use this file except in compliance
7
+ # with the License. You may obtain a copy of the License at
8
+ #
9
+ # http://www.apache.org/licenses/LICENSE-2.0
10
+ #
11
+ # Unless required by applicable law or agreed to in writing,
12
+ # software distributed under the License is distributed on an
13
+ # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ # KIND, either express or implied. See the License for the
15
+ # specific language governing permissions and limitations
16
+ # under the License.
17
+
18
+ arrow_install_all_headers("arrow/python")
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/api.h ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/arrow_to_pandas.h"
21
+ #include "arrow/python/common.h"
22
+ #include "arrow/python/datetime.h"
23
+ #include "arrow/python/deserialize.h"
24
+ #include "arrow/python/helpers.h"
25
+ #include "arrow/python/inference.h"
26
+ #include "arrow/python/io.h"
27
+ #include "arrow/python/numpy_convert.h"
28
+ #include "arrow/python/numpy_to_arrow.h"
29
+ #include "arrow/python/python_to_arrow.h"
30
+ #include "arrow/python/serialize.h"
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_pandas.cc ADDED
@@ -0,0 +1,2645 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Functions for pandas conversion via NumPy
19
+
20
+ #include "arrow/python/arrow_to_pandas.h"
21
+ #include "arrow/python/numpy_interop.h" // IWYU pragma: expand
22
+
23
+ #include <cmath>
24
+ #include <cstdint>
25
+ #include <iostream>
26
+ #include <memory>
27
+ #include <mutex>
28
+ #include <string>
29
+ #include <string_view>
30
+ #include <unordered_map>
31
+ #include <utility>
32
+ #include <vector>
33
+
34
+ #include "arrow/array.h"
35
+ #include "arrow/buffer.h"
36
+ #include "arrow/datum.h"
37
+ #include "arrow/status.h"
38
+ #include "arrow/table.h"
39
+ #include "arrow/type.h"
40
+ #include "arrow/type_traits.h"
41
+ #include "arrow/util/checked_cast.h"
42
+ #include "arrow/util/hashing.h"
43
+ #include "arrow/util/int_util.h"
44
+ #include "arrow/util/logging.h"
45
+ #include "arrow/util/macros.h"
46
+ #include "arrow/util/parallel.h"
47
+ #include "arrow/visit_type_inline.h"
48
+
49
+ #include "arrow/compute/api.h"
50
+
51
+ #include "arrow/python/arrow_to_python_internal.h"
52
+ #include "arrow/python/common.h"
53
+ #include "arrow/python/datetime.h"
54
+ #include "arrow/python/decimal.h"
55
+ #include "arrow/python/helpers.h"
56
+ #include "arrow/python/numpy_convert.h"
57
+ #include "arrow/python/numpy_internal.h"
58
+ #include "arrow/python/pyarrow.h"
59
+ #include "arrow/python/python_to_arrow.h"
60
+ #include "arrow/python/type_traits.h"
61
+
62
+ namespace arrow {
63
+
64
+ class MemoryPool;
65
+
66
+ using internal::checked_cast;
67
+ using internal::CheckIndexBounds;
68
+ using internal::OptionalParallelFor;
69
+
70
+ namespace py {
71
+ namespace {
72
+
73
+ // Fix options for conversion of an inner (child) array.
74
+ PandasOptions MakeInnerOptions(PandasOptions options) {
75
+ // Make sure conversion of inner dictionary arrays always returns an array,
76
+ // not a dict {'indices': array, 'dictionary': array, 'ordered': bool}
77
+ options.decode_dictionaries = true;
78
+ options.categorical_columns.clear();
79
+ options.strings_to_categorical = false;
80
+
81
+ // In ARROW-7723, we found as a result of ARROW-3789 that second
82
+ // through microsecond resolution tz-aware timestamps were being promoted to
83
+ // use the DATETIME_NANO_TZ conversion path, yielding a datetime64[ns] NumPy
84
+ // array in this function. PyArray_GETITEM returns datetime.datetime for
85
+ // units second through microsecond but PyLong for nanosecond (because
86
+ // datetime.datetime does not support nanoseconds).
87
+ // We force the object conversion to preserve the value of the timezone.
88
+ // Nanoseconds are returned as integers.
89
+ options.coerce_temporal_nanoseconds = false;
90
+
91
+ return options;
92
+ }
93
+
94
+ // ----------------------------------------------------------------------
95
+ // PyCapsule code for setting ndarray base to reference C++ object
96
+
97
+ struct ArrayCapsule {
98
+ std::shared_ptr<Array> array;
99
+ };
100
+
101
+ struct BufferCapsule {
102
+ std::shared_ptr<Buffer> buffer;
103
+ };
104
+
105
+ void ArrayCapsule_Destructor(PyObject* capsule) {
106
+ delete reinterpret_cast<ArrayCapsule*>(PyCapsule_GetPointer(capsule, "arrow::Array"));
107
+ }
108
+
109
+ void BufferCapsule_Destructor(PyObject* capsule) {
110
+ delete reinterpret_cast<BufferCapsule*>(PyCapsule_GetPointer(capsule, "arrow::Buffer"));
111
+ }
112
+
113
+ // ----------------------------------------------------------------------
114
+ // pandas 0.x DataFrame conversion internals
115
+
116
+ using internal::arrow_traits;
117
+ using internal::npy_traits;
118
+
119
+ template <typename T>
120
+ struct WrapBytes {};
121
+
122
+ template <>
123
+ struct WrapBytes<StringType> {
124
+ static inline PyObject* Wrap(const char* data, int64_t length) {
125
+ return PyUnicode_FromStringAndSize(data, length);
126
+ }
127
+ };
128
+
129
+ template <>
130
+ struct WrapBytes<LargeStringType> {
131
+ static inline PyObject* Wrap(const char* data, int64_t length) {
132
+ return PyUnicode_FromStringAndSize(data, length);
133
+ }
134
+ };
135
+
136
+ template <>
137
+ struct WrapBytes<StringViewType> {
138
+ static inline PyObject* Wrap(const char* data, int64_t length) {
139
+ return PyUnicode_FromStringAndSize(data, length);
140
+ }
141
+ };
142
+
143
+ template <>
144
+ struct WrapBytes<BinaryType> {
145
+ static inline PyObject* Wrap(const char* data, int64_t length) {
146
+ return PyBytes_FromStringAndSize(data, length);
147
+ }
148
+ };
149
+
150
+ template <>
151
+ struct WrapBytes<LargeBinaryType> {
152
+ static inline PyObject* Wrap(const char* data, int64_t length) {
153
+ return PyBytes_FromStringAndSize(data, length);
154
+ }
155
+ };
156
+
157
+ template <>
158
+ struct WrapBytes<BinaryViewType> {
159
+ static inline PyObject* Wrap(const char* data, int64_t length) {
160
+ return PyBytes_FromStringAndSize(data, length);
161
+ }
162
+ };
163
+
164
+ template <>
165
+ struct WrapBytes<FixedSizeBinaryType> {
166
+ static inline PyObject* Wrap(const char* data, int64_t length) {
167
+ return PyBytes_FromStringAndSize(data, length);
168
+ }
169
+ };
170
+
171
+ static inline bool ListTypeSupported(const DataType& type) {
172
+ switch (type.id()) {
173
+ case Type::BOOL:
174
+ case Type::UINT8:
175
+ case Type::INT8:
176
+ case Type::UINT16:
177
+ case Type::INT16:
178
+ case Type::UINT32:
179
+ case Type::INT32:
180
+ case Type::INT64:
181
+ case Type::UINT64:
182
+ case Type::HALF_FLOAT:
183
+ case Type::FLOAT:
184
+ case Type::DOUBLE:
185
+ case Type::DECIMAL128:
186
+ case Type::DECIMAL256:
187
+ case Type::BINARY:
188
+ case Type::LARGE_BINARY:
189
+ case Type::STRING:
190
+ case Type::LARGE_STRING:
191
+ case Type::DATE32:
192
+ case Type::DATE64:
193
+ case Type::STRUCT:
194
+ case Type::MAP:
195
+ case Type::TIME32:
196
+ case Type::TIME64:
197
+ case Type::TIMESTAMP:
198
+ case Type::DURATION:
199
+ case Type::DICTIONARY:
200
+ case Type::INTERVAL_MONTH_DAY_NANO:
201
+ case Type::NA: // empty list
202
+ // The above types are all supported.
203
+ return true;
204
+ case Type::FIXED_SIZE_LIST:
205
+ case Type::LIST:
206
+ case Type::LARGE_LIST:
207
+ case Type::LIST_VIEW:
208
+ case Type::LARGE_LIST_VIEW: {
209
+ const auto& list_type = checked_cast<const BaseListType&>(type);
210
+ return ListTypeSupported(*list_type.value_type());
211
+ }
212
+ case Type::EXTENSION: {
213
+ const auto& ext = checked_cast<const ExtensionType&>(*type.GetSharedPtr());
214
+ return ListTypeSupported(*(ext.storage_type()));
215
+ }
216
+ default:
217
+ break;
218
+ }
219
+ return false;
220
+ }
221
+
222
+ Status CapsulizeArray(const std::shared_ptr<Array>& arr, PyObject** out) {
223
+ auto capsule = new ArrayCapsule{{arr}};
224
+ *out = PyCapsule_New(reinterpret_cast<void*>(capsule), "arrow::Array",
225
+ &ArrayCapsule_Destructor);
226
+ if (*out == nullptr) {
227
+ delete capsule;
228
+ RETURN_IF_PYERROR();
229
+ }
230
+ return Status::OK();
231
+ }
232
+
233
+ Status CapsulizeBuffer(const std::shared_ptr<Buffer>& buffer, PyObject** out) {
234
+ auto capsule = new BufferCapsule{{buffer}};
235
+ *out = PyCapsule_New(reinterpret_cast<void*>(capsule), "arrow::Buffer",
236
+ &BufferCapsule_Destructor);
237
+ if (*out == nullptr) {
238
+ delete capsule;
239
+ RETURN_IF_PYERROR();
240
+ }
241
+ return Status::OK();
242
+ }
243
+
244
+ Status SetNdarrayBase(PyArrayObject* arr, PyObject* base) {
245
+ if (PyArray_SetBaseObject(arr, base) == -1) {
246
+ // Error occurred, trust that SetBaseObject sets the error state
247
+ Py_XDECREF(base);
248
+ RETURN_IF_PYERROR();
249
+ }
250
+ return Status::OK();
251
+ }
252
+
253
+ Status SetBufferBase(PyArrayObject* arr, const std::shared_ptr<Buffer>& buffer) {
254
+ PyObject* base;
255
+ RETURN_NOT_OK(CapsulizeBuffer(buffer, &base));
256
+ return SetNdarrayBase(arr, base);
257
+ }
258
+
259
+ inline void set_numpy_metadata(int type, const DataType* datatype, PyArray_Descr* out) {
260
+ auto metadata =
261
+ reinterpret_cast<PyArray_DatetimeDTypeMetaData*>(PyDataType_C_METADATA(out));
262
+ if (type == NPY_DATETIME) {
263
+ if (datatype->id() == Type::TIMESTAMP) {
264
+ const auto& timestamp_type = checked_cast<const TimestampType&>(*datatype);
265
+ metadata->meta.base = internal::NumPyFrequency(timestamp_type.unit());
266
+ } else {
267
+ DCHECK(false) << "NPY_DATETIME views only supported for Arrow TIMESTAMP types";
268
+ }
269
+ } else if (type == NPY_TIMEDELTA) {
270
+ DCHECK_EQ(datatype->id(), Type::DURATION);
271
+ const auto& duration_type = checked_cast<const DurationType&>(*datatype);
272
+ metadata->meta.base = internal::NumPyFrequency(duration_type.unit());
273
+ }
274
+ }
275
+
276
+ Status PyArray_NewFromPool(int nd, npy_intp* dims, PyArray_Descr* descr, MemoryPool* pool,
277
+ PyObject** out) {
278
+ // ARROW-6570: Allocate memory from MemoryPool for a couple reasons
279
+ //
280
+ // * Track allocations
281
+ // * Get better performance through custom allocators
282
+ int64_t total_size = PyDataType_ELSIZE(descr);
283
+ for (int i = 0; i < nd; ++i) {
284
+ total_size *= dims[i];
285
+ }
286
+
287
+ ARROW_ASSIGN_OR_RAISE(auto buffer, AllocateBuffer(total_size, pool));
288
+ *out = PyArray_NewFromDescr(&PyArray_Type, descr, nd, dims,
289
+ /*strides=*/nullptr,
290
+ /*data=*/buffer->mutable_data(),
291
+ /*flags=*/NPY_ARRAY_CARRAY | NPY_ARRAY_WRITEABLE,
292
+ /*obj=*/nullptr);
293
+ if (*out == nullptr) {
294
+ RETURN_IF_PYERROR();
295
+ // Trust that error set if NULL returned
296
+ }
297
+ return SetBufferBase(reinterpret_cast<PyArrayObject*>(*out), std::move(buffer));
298
+ }
299
+
300
+ template <typename T = void>
301
+ inline const T* GetPrimitiveValues(const Array& arr) {
302
+ if (arr.length() == 0) {
303
+ return nullptr;
304
+ }
305
+ const int elsize = arr.type()->byte_width();
306
+ const auto& prim_arr = checked_cast<const PrimitiveArray&>(arr);
307
+ return reinterpret_cast<const T*>(prim_arr.values()->data() + arr.offset() * elsize);
308
+ }
309
+
310
+ Status MakeNumPyView(std::shared_ptr<Array> arr, PyObject* py_ref, int npy_type, int ndim,
311
+ npy_intp* dims, PyObject** out) {
312
+ PyAcquireGIL lock;
313
+
314
+ PyArray_Descr* descr = internal::GetSafeNumPyDtype(npy_type);
315
+ set_numpy_metadata(npy_type, arr->type().get(), descr);
316
+ PyObject* result = PyArray_NewFromDescr(
317
+ &PyArray_Type, descr, ndim, dims, /*strides=*/nullptr,
318
+ const_cast<void*>(GetPrimitiveValues(*arr)), /*flags=*/0, nullptr);
319
+ PyArrayObject* np_arr = reinterpret_cast<PyArrayObject*>(result);
320
+ if (np_arr == nullptr) {
321
+ // Error occurred, trust that error set
322
+ return Status::OK();
323
+ }
324
+
325
+ PyObject* base;
326
+ if (py_ref == nullptr) {
327
+ // Capsule will be owned by the ndarray, no incref necessary. See
328
+ // ARROW-1973
329
+ RETURN_NOT_OK(CapsulizeArray(arr, &base));
330
+ } else {
331
+ Py_INCREF(py_ref);
332
+ base = py_ref;
333
+ }
334
+ RETURN_NOT_OK(SetNdarrayBase(np_arr, base));
335
+
336
+ // Do not allow Arrow data to be mutated
337
+ PyArray_CLEARFLAGS(np_arr, NPY_ARRAY_WRITEABLE);
338
+ *out = result;
339
+ return Status::OK();
340
+ }
341
+
342
+ class PandasWriter {
343
+ public:
344
+ enum type {
345
+ OBJECT,
346
+ UINT8,
347
+ INT8,
348
+ UINT16,
349
+ INT16,
350
+ UINT32,
351
+ INT32,
352
+ UINT64,
353
+ INT64,
354
+ HALF_FLOAT,
355
+ FLOAT,
356
+ DOUBLE,
357
+ BOOL,
358
+ DATETIME_DAY,
359
+ DATETIME_SECOND,
360
+ DATETIME_MILLI,
361
+ DATETIME_MICRO,
362
+ DATETIME_NANO,
363
+ DATETIME_SECOND_TZ,
364
+ DATETIME_MILLI_TZ,
365
+ DATETIME_MICRO_TZ,
366
+ DATETIME_NANO_TZ,
367
+ TIMEDELTA_SECOND,
368
+ TIMEDELTA_MILLI,
369
+ TIMEDELTA_MICRO,
370
+ TIMEDELTA_NANO,
371
+ CATEGORICAL,
372
+ EXTENSION
373
+ };
374
+
375
+ PandasWriter(const PandasOptions& options, int64_t num_rows, int num_columns)
376
+ : options_(options), num_rows_(num_rows), num_columns_(num_columns) {
377
+ PyAcquireGIL lock;
378
+ internal::InitPandasStaticData();
379
+ }
380
+ virtual ~PandasWriter() {}
381
+
382
+ void SetBlockData(PyObject* arr) {
383
+ block_arr_.reset(arr);
384
+ block_data_ =
385
+ reinterpret_cast<uint8_t*>(PyArray_DATA(reinterpret_cast<PyArrayObject*>(arr)));
386
+ }
387
+
388
+ /// \brief Either copy or wrap single array to create pandas-compatible array
389
+ /// for Series or DataFrame. num_columns_ can only be 1. Will try to zero
390
+ /// copy if possible (or error if not possible and zero_copy_only=True)
391
+ virtual Status TransferSingle(std::shared_ptr<ChunkedArray> data, PyObject* py_ref) = 0;
392
+
393
+ /// \brief Copy ChunkedArray into a multi-column block
394
+ virtual Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) = 0;
395
+
396
+ Status EnsurePlacementAllocated() {
397
+ std::lock_guard<std::mutex> guard(allocation_lock_);
398
+ if (placement_data_ != nullptr) {
399
+ return Status::OK();
400
+ }
401
+ PyAcquireGIL lock;
402
+ npy_intp placement_dims[1] = {num_columns_};
403
+ PyObject* placement_arr = PyArray_SimpleNew(1, placement_dims, NPY_INT64);
404
+ RETURN_IF_PYERROR();
405
+ placement_arr_.reset(placement_arr);
406
+ placement_data_ = reinterpret_cast<int64_t*>(
407
+ PyArray_DATA(reinterpret_cast<PyArrayObject*>(placement_arr)));
408
+ return Status::OK();
409
+ }
410
+
411
+ Status EnsureAllocated() {
412
+ std::lock_guard<std::mutex> guard(allocation_lock_);
413
+ if (block_data_ != nullptr) {
414
+ return Status::OK();
415
+ }
416
+ RETURN_NOT_OK(Allocate());
417
+ return Status::OK();
418
+ }
419
+
420
+ virtual bool CanZeroCopy(const ChunkedArray& data) const { return false; }
421
+
422
+ virtual Status Write(std::shared_ptr<ChunkedArray> data, int64_t abs_placement,
423
+ int64_t rel_placement) {
424
+ RETURN_NOT_OK(EnsurePlacementAllocated());
425
+ if (num_columns_ == 1 && options_.allow_zero_copy_blocks) {
426
+ RETURN_NOT_OK(TransferSingle(data, /*py_ref=*/nullptr));
427
+ } else {
428
+ RETURN_NOT_OK(
429
+ CheckNoZeroCopy("Cannot do zero copy conversion into "
430
+ "multi-column DataFrame block"));
431
+ RETURN_NOT_OK(EnsureAllocated());
432
+ RETURN_NOT_OK(CopyInto(data, rel_placement));
433
+ }
434
+ placement_data_[rel_placement] = abs_placement;
435
+ return Status::OK();
436
+ }
437
+
438
+ virtual Status GetDataFrameResult(PyObject** out) {
439
+ PyObject* result = PyDict_New();
440
+ RETURN_IF_PYERROR();
441
+
442
+ PyObject* block;
443
+ RETURN_NOT_OK(GetResultBlock(&block));
444
+
445
+ PyDict_SetItemString(result, "block", block);
446
+ PyDict_SetItemString(result, "placement", placement_arr_.obj());
447
+
448
+ RETURN_NOT_OK(AddResultMetadata(result));
449
+ *out = result;
450
+ return Status::OK();
451
+ }
452
+
453
+ // Caller steals the reference to this object
454
+ virtual Status GetSeriesResult(PyObject** out) {
455
+ RETURN_NOT_OK(MakeBlock1D());
456
+ // Caller owns the object now
457
+ *out = block_arr_.detach();
458
+ return Status::OK();
459
+ }
460
+
461
+ protected:
462
+ virtual Status AddResultMetadata(PyObject* result) { return Status::OK(); }
463
+
464
+ Status MakeBlock1D() {
465
+ // For Series or for certain DataFrame block types, we need to shape to a
466
+ // 1D array when there is only one column
467
+ PyAcquireGIL lock;
468
+
469
+ DCHECK_EQ(1, num_columns_);
470
+
471
+ npy_intp new_dims[1] = {static_cast<npy_intp>(num_rows_)};
472
+ PyArray_Dims dims;
473
+ dims.ptr = new_dims;
474
+ dims.len = 1;
475
+
476
+ PyObject* reshaped = PyArray_Newshape(
477
+ reinterpret_cast<PyArrayObject*>(block_arr_.obj()), &dims, NPY_ANYORDER);
478
+ RETURN_IF_PYERROR();
479
+
480
+ // ARROW-8801: Here a PyArrayObject is created that is not being managed by
481
+ // any OwnedRef object. This object is then put in the resulting object
482
+ // with PyDict_SetItemString, which increments the reference count, so a
483
+ // memory leak ensues. There are several ways to fix the memory leak but a
484
+ // simple one is to put the reshaped 1D block array in this OwnedRefNoGIL
485
+ // so it will be correctly decref'd when this class is destructed.
486
+ block_arr_.reset(reshaped);
487
+ return Status::OK();
488
+ }
489
+
490
+ virtual Status GetResultBlock(PyObject** out) {
491
+ *out = block_arr_.obj();
492
+ return Status::OK();
493
+ }
494
+
495
+ Status CheckNoZeroCopy(const std::string& message) {
496
+ if (options_.zero_copy_only) {
497
+ return Status::Invalid(message);
498
+ }
499
+ return Status::OK();
500
+ }
501
+
502
+ Status CheckNotZeroCopyOnly(const ChunkedArray& data) {
503
+ if (options_.zero_copy_only) {
504
+ return Status::Invalid("Needed to copy ", data.num_chunks(), " chunks with ",
505
+ data.null_count(), " nulls, but zero_copy_only was True");
506
+ }
507
+ return Status::OK();
508
+ }
509
+
510
+ virtual Status Allocate() {
511
+ return Status::NotImplemented("Override Allocate in subclasses");
512
+ }
513
+
514
+ Status AllocateNDArray(int npy_type, int ndim = 2) {
515
+ PyAcquireGIL lock;
516
+
517
+ PyObject* block_arr = nullptr;
518
+ npy_intp block_dims[2] = {0, 0};
519
+
520
+ if (ndim == 2) {
521
+ block_dims[0] = num_columns_;
522
+ block_dims[1] = num_rows_;
523
+ } else {
524
+ block_dims[0] = num_rows_;
525
+ }
526
+ PyArray_Descr* descr = internal::GetSafeNumPyDtype(npy_type);
527
+ if (PyDataType_REFCHK(descr)) {
528
+ // ARROW-6876: if the array has refcounted items, let Numpy
529
+ // own the array memory so as to decref elements on array destruction
530
+ block_arr = PyArray_SimpleNewFromDescr(ndim, block_dims, descr);
531
+ RETURN_IF_PYERROR();
532
+ } else {
533
+ RETURN_NOT_OK(
534
+ PyArray_NewFromPool(ndim, block_dims, descr, options_.pool, &block_arr));
535
+ }
536
+
537
+ SetBlockData(block_arr);
538
+ return Status::OK();
539
+ }
540
+
541
+ void SetDatetimeUnit(NPY_DATETIMEUNIT unit) {
542
+ PyAcquireGIL lock;
543
+ auto date_dtype =
544
+ reinterpret_cast<PyArray_DatetimeDTypeMetaData*>(PyDataType_C_METADATA(
545
+ PyArray_DESCR(reinterpret_cast<PyArrayObject*>(block_arr_.obj()))));
546
+ date_dtype->meta.base = unit;
547
+ }
548
+
549
+ PandasOptions options_;
550
+
551
+ std::mutex allocation_lock_;
552
+
553
+ int64_t num_rows_;
554
+ int num_columns_;
555
+
556
+ OwnedRefNoGIL block_arr_;
557
+ uint8_t* block_data_ = nullptr;
558
+
559
+ // ndarray<int32>
560
+ OwnedRefNoGIL placement_arr_;
561
+ int64_t* placement_data_ = nullptr;
562
+
563
+ private:
564
+ ARROW_DISALLOW_COPY_AND_ASSIGN(PandasWriter);
565
+ };
566
+
567
+ template <typename InType, typename OutType>
568
+ inline void ConvertIntegerWithNulls(const PandasOptions& options,
569
+ const ChunkedArray& data, OutType* out_values) {
570
+ for (int c = 0; c < data.num_chunks(); c++) {
571
+ const auto& arr = *data.chunk(c);
572
+ const InType* in_values = GetPrimitiveValues<InType>(arr);
573
+ // Upcast to double, set NaN as appropriate
574
+
575
+ for (int i = 0; i < arr.length(); ++i) {
576
+ *out_values++ =
577
+ arr.IsNull(i) ? static_cast<OutType>(NAN) : static_cast<OutType>(in_values[i]);
578
+ }
579
+ }
580
+ }
581
+
582
+ template <typename T>
583
+ inline void ConvertIntegerNoNullsSameType(const PandasOptions& options,
584
+ const ChunkedArray& data, T* out_values) {
585
+ for (int c = 0; c < data.num_chunks(); c++) {
586
+ const auto& arr = *data.chunk(c);
587
+ if (arr.length() > 0) {
588
+ const T* in_values = GetPrimitiveValues<T>(arr);
589
+ memcpy(out_values, in_values, sizeof(T) * arr.length());
590
+ out_values += arr.length();
591
+ }
592
+ }
593
+ }
594
+
595
+ template <typename InType, typename OutType>
596
+ inline void ConvertIntegerNoNullsCast(const PandasOptions& options,
597
+ const ChunkedArray& data, OutType* out_values) {
598
+ for (int c = 0; c < data.num_chunks(); c++) {
599
+ const auto& arr = *data.chunk(c);
600
+ const InType* in_values = GetPrimitiveValues<InType>(arr);
601
+ for (int64_t i = 0; i < arr.length(); ++i) {
602
+ *out_values = in_values[i];
603
+ }
604
+ }
605
+ }
606
+
607
+ template <typename T, typename Enable = void>
608
+ struct MemoizationTraits {
609
+ using Scalar = typename T::c_type;
610
+ };
611
+
612
+ template <typename T>
613
+ struct MemoizationTraits<T, enable_if_has_string_view<T>> {
614
+ // For binary, we memoize string_view as a scalar value to avoid having to
615
+ // unnecessarily copy the memory into the memo table data structure
616
+ using Scalar = std::string_view;
617
+ };
618
+
619
+ // Generic Array -> PyObject** converter that handles object deduplication, if
620
+ // requested
621
+ template <typename Type, typename WrapFunction>
622
+ inline Status ConvertAsPyObjects(const PandasOptions& options, const ChunkedArray& data,
623
+ WrapFunction&& wrap_func, PyObject** out_values) {
624
+ using ArrayType = typename TypeTraits<Type>::ArrayType;
625
+ using Scalar = typename MemoizationTraits<Type>::Scalar;
626
+
627
+ auto convert_chunks = [&](auto&& wrap_func) -> Status {
628
+ for (int c = 0; c < data.num_chunks(); c++) {
629
+ const auto& arr = arrow::internal::checked_cast<const ArrayType&>(*data.chunk(c));
630
+ RETURN_NOT_OK(internal::WriteArrayObjects(arr, wrap_func, out_values));
631
+ out_values += arr.length();
632
+ }
633
+ return Status::OK();
634
+ };
635
+
636
+ if (options.deduplicate_objects) {
637
+ // GH-40316: only allocate a memo table if deduplication is enabled.
638
+ ::arrow::internal::ScalarMemoTable<Scalar> memo_table(options.pool);
639
+ std::vector<PyObject*> unique_values;
640
+ int32_t memo_size = 0;
641
+
642
+ auto WrapMemoized = [&](const Scalar& value, PyObject** out_values) {
643
+ int32_t memo_index;
644
+ RETURN_NOT_OK(memo_table.GetOrInsert(value, &memo_index));
645
+ if (memo_index == memo_size) {
646
+ // New entry
647
+ RETURN_NOT_OK(wrap_func(value, out_values));
648
+ unique_values.push_back(*out_values);
649
+ ++memo_size;
650
+ } else {
651
+ // Duplicate entry
652
+ Py_INCREF(unique_values[memo_index]);
653
+ *out_values = unique_values[memo_index];
654
+ }
655
+ return Status::OK();
656
+ };
657
+ return convert_chunks(std::move(WrapMemoized));
658
+ } else {
659
+ return convert_chunks(std::forward<WrapFunction>(wrap_func));
660
+ }
661
+ }
662
+
663
+ Status ConvertStruct(PandasOptions options, const ChunkedArray& data,
664
+ PyObject** out_values) {
665
+ if (data.num_chunks() == 0) {
666
+ return Status::OK();
667
+ }
668
+ // ChunkedArray has at least one chunk
669
+ auto arr = checked_cast<const StructArray*>(data.chunk(0).get());
670
+ // Use it to cache the struct type and number of fields for all chunks
671
+ int32_t num_fields = arr->num_fields();
672
+ auto array_type = arr->type();
673
+ std::vector<OwnedRef> fields_data(num_fields * data.num_chunks());
674
+ OwnedRef dict_item;
675
+
676
+ // See notes in MakeInnerOptions.
677
+ options = MakeInnerOptions(std::move(options));
678
+ // Don't blindly convert because timestamps in lists are handled differently.
679
+ options.timestamp_as_object = true;
680
+
681
+ for (int c = 0; c < data.num_chunks(); c++) {
682
+ auto fields_data_offset = c * num_fields;
683
+ auto arr = checked_cast<const StructArray*>(data.chunk(c).get());
684
+ // Convert the struct arrays first
685
+ for (int32_t i = 0; i < num_fields; i++) {
686
+ auto field = arr->field(static_cast<int>(i));
687
+ // In case the field is an extension array, use .storage() to convert to Pandas
688
+ if (field->type()->id() == Type::EXTENSION) {
689
+ const ExtensionArray& arr_ext = checked_cast<const ExtensionArray&>(*field);
690
+ field = arr_ext.storage();
691
+ }
692
+ RETURN_NOT_OK(ConvertArrayToPandas(options, field, nullptr,
693
+ fields_data[i + fields_data_offset].ref()));
694
+ DCHECK(PyArray_Check(fields_data[i + fields_data_offset].obj()));
695
+ }
696
+
697
+ // Construct a dictionary for each row
698
+ const bool has_nulls = data.null_count() > 0;
699
+ for (int64_t i = 0; i < arr->length(); ++i) {
700
+ if (has_nulls && arr->IsNull(i)) {
701
+ Py_INCREF(Py_None);
702
+ *out_values = Py_None;
703
+ } else {
704
+ // Build the new dict object for the row
705
+ dict_item.reset(PyDict_New());
706
+ RETURN_IF_PYERROR();
707
+ for (int32_t field_idx = 0; field_idx < num_fields; ++field_idx) {
708
+ OwnedRef field_value;
709
+ auto name = array_type->field(static_cast<int>(field_idx))->name();
710
+ if (!arr->field(static_cast<int>(field_idx))->IsNull(i)) {
711
+ // Value exists in child array, obtain it
712
+ auto array = reinterpret_cast<PyArrayObject*>(
713
+ fields_data[field_idx + fields_data_offset].obj());
714
+ auto ptr = reinterpret_cast<const char*>(PyArray_GETPTR1(array, i));
715
+ field_value.reset(PyArray_GETITEM(array, ptr));
716
+ RETURN_IF_PYERROR();
717
+ } else {
718
+ // Translate the Null to a None
719
+ Py_INCREF(Py_None);
720
+ field_value.reset(Py_None);
721
+ }
722
+ // PyDict_SetItemString increments reference count
723
+ auto setitem_result =
724
+ PyDict_SetItemString(dict_item.obj(), name.c_str(), field_value.obj());
725
+ RETURN_IF_PYERROR();
726
+ DCHECK_EQ(setitem_result, 0);
727
+ }
728
+ *out_values = dict_item.obj();
729
+ // Grant ownership to the resulting array
730
+ Py_INCREF(*out_values);
731
+ }
732
+ ++out_values;
733
+ }
734
+ }
735
+ return Status::OK();
736
+ }
737
+
738
+ Status DecodeDictionaries(MemoryPool* pool, const std::shared_ptr<DataType>& dense_type,
739
+ ArrayVector* arrays) {
740
+ compute::ExecContext ctx(pool);
741
+ compute::CastOptions options;
742
+ for (size_t i = 0; i < arrays->size(); ++i) {
743
+ ARROW_ASSIGN_OR_RAISE((*arrays)[i],
744
+ compute::Cast(*(*arrays)[i], dense_type, options, &ctx));
745
+ }
746
+ return Status::OK();
747
+ }
748
+
749
+ Status DecodeDictionaries(MemoryPool* pool, const std::shared_ptr<DataType>& dense_type,
750
+ std::shared_ptr<ChunkedArray>* array) {
751
+ auto chunks = (*array)->chunks();
752
+ RETURN_NOT_OK(DecodeDictionaries(pool, dense_type, &chunks));
753
+ *array = std::make_shared<ChunkedArray>(std::move(chunks), dense_type);
754
+ return Status::OK();
755
+ }
756
+
757
+ template <typename T>
758
+ enable_if_list_like<T, Status> ConvertListsLike(PandasOptions options,
759
+ const ChunkedArray& data,
760
+ PyObject** out_values) {
761
+ using ListArrayT = typename TypeTraits<T>::ArrayType;
762
+ // Get column of underlying value arrays
763
+ ArrayVector value_arrays;
764
+ for (int c = 0; c < data.num_chunks(); c++) {
765
+ const auto& arr = checked_cast<const ListArrayT&>(*data.chunk(c));
766
+ // values() does not account for offsets, so we need to slice into it.
767
+ // We can't use Flatten(), because it removes the values behind a null list
768
+ // value, and that makes the offsets into original list values and our
769
+ // flattened_values array different.
770
+ std::shared_ptr<Array> flattened_values = arr.values()->Slice(
771
+ arr.value_offset(0), arr.value_offset(arr.length()) - arr.value_offset(0));
772
+ if (arr.value_type()->id() == Type::EXTENSION) {
773
+ const auto& arr_ext = checked_cast<const ExtensionArray&>(*flattened_values);
774
+ value_arrays.emplace_back(arr_ext.storage());
775
+ } else {
776
+ value_arrays.emplace_back(flattened_values);
777
+ }
778
+ }
779
+
780
+ using ListArrayType = typename ListArrayT::TypeClass;
781
+ const auto& list_type = checked_cast<const ListArrayType&>(*data.type());
782
+ auto value_type = list_type.value_type();
783
+ if (value_type->id() == Type::EXTENSION) {
784
+ value_type = checked_cast<const ExtensionType&>(*value_type).storage_type();
785
+ }
786
+
787
+ auto flat_column = std::make_shared<ChunkedArray>(value_arrays, value_type);
788
+
789
+ options = MakeInnerOptions(std::move(options));
790
+
791
+ OwnedRefNoGIL owned_numpy_array;
792
+ RETURN_NOT_OK(ConvertChunkedArrayToPandas(options, flat_column, nullptr,
793
+ owned_numpy_array.ref()));
794
+ PyObject* numpy_array = owned_numpy_array.obj();
795
+ DCHECK(PyArray_Check(numpy_array));
796
+
797
+ int64_t chunk_offset = 0;
798
+ for (int c = 0; c < data.num_chunks(); c++) {
799
+ const auto& arr = checked_cast<const ListArrayT&>(*data.chunk(c));
800
+ const bool has_nulls = data.null_count() > 0;
801
+ for (int64_t i = 0; i < arr.length(); ++i) {
802
+ if (has_nulls && arr.IsNull(i)) {
803
+ Py_INCREF(Py_None);
804
+ *out_values = Py_None;
805
+ } else {
806
+ // Need to subtract value_offset(0) since the original chunk might be a slice
807
+ // into another array.
808
+ OwnedRef start(PyLong_FromLongLong(arr.value_offset(i) + chunk_offset -
809
+ arr.value_offset(0)));
810
+ OwnedRef end(PyLong_FromLongLong(arr.value_offset(i + 1) + chunk_offset -
811
+ arr.value_offset(0)));
812
+ OwnedRef slice(PySlice_New(start.obj(), end.obj(), nullptr));
813
+
814
+ if (ARROW_PREDICT_FALSE(slice.obj() == nullptr)) {
815
+ // Fall out of loop, will return from RETURN_IF_PYERROR
816
+ break;
817
+ }
818
+ *out_values = PyObject_GetItem(numpy_array, slice.obj());
819
+
820
+ if (*out_values == nullptr) {
821
+ // Fall out of loop, will return from RETURN_IF_PYERROR
822
+ break;
823
+ }
824
+ }
825
+ ++out_values;
826
+ }
827
+ RETURN_IF_PYERROR();
828
+
829
+ chunk_offset += arr.value_offset(arr.length()) - arr.value_offset(0);
830
+ }
831
+
832
+ return Status::OK();
833
+ }
834
+
835
+ // TODO GH-40579: optimize ListView conversion to avoid unnecessary copies
836
+ template <typename T>
837
+ enable_if_list_view<T, Status> ConvertListsLike(PandasOptions options,
838
+ const ChunkedArray& data,
839
+ PyObject** out_values) {
840
+ using ListViewArrayType = typename TypeTraits<T>::ArrayType;
841
+ using NonViewType =
842
+ std::conditional_t<T::type_id == Type::LIST_VIEW, ListType, LargeListType>;
843
+ using NonViewClass = typename TypeTraits<NonViewType>::ArrayType;
844
+ ArrayVector list_arrays;
845
+ for (int c = 0; c < data.num_chunks(); c++) {
846
+ const auto& arr = checked_cast<const ListViewArrayType&>(*data.chunk(c));
847
+ ARROW_ASSIGN_OR_RAISE(auto non_view_array,
848
+ NonViewClass::FromListView(arr, options.pool));
849
+ list_arrays.emplace_back(non_view_array);
850
+ }
851
+ auto chunked_array = std::make_shared<ChunkedArray>(list_arrays);
852
+ return ConvertListsLike<NonViewType>(options, *chunked_array, out_values);
853
+ }
854
+
855
+ template <typename F1, typename F2, typename F3>
856
+ Status ConvertMapHelper(F1 resetRow, F2 addPairToRow, F3 stealRow,
857
+ const ChunkedArray& data, PyArrayObject* py_keys,
858
+ PyArrayObject* py_items,
859
+ // needed for null checks in items
860
+ const std::vector<std::shared_ptr<Array>> item_arrays,
861
+ PyObject** out_values) {
862
+ OwnedRef key_value;
863
+ OwnedRef item_value;
864
+
865
+ int64_t chunk_offset = 0;
866
+ for (int c = 0; c < data.num_chunks(); ++c) {
867
+ const auto& arr = checked_cast<const MapArray&>(*data.chunk(c));
868
+ const bool has_nulls = data.null_count() > 0;
869
+
870
+ // Make a list of key/item pairs for each row in array
871
+ for (int64_t i = 0; i < arr.length(); ++i) {
872
+ if (has_nulls && arr.IsNull(i)) {
873
+ Py_INCREF(Py_None);
874
+ *out_values = Py_None;
875
+ } else {
876
+ int64_t entry_offset = arr.value_offset(i);
877
+ int64_t num_pairs = arr.value_offset(i + 1) - entry_offset;
878
+
879
+ // Build the new list object for the row of Python pairs
880
+ RETURN_NOT_OK(resetRow(num_pairs));
881
+
882
+ // Add each key/item pair in the row
883
+ for (int64_t j = 0; j < num_pairs; ++j) {
884
+ // Get key value, key is non-nullable for a valid row
885
+ auto ptr_key = reinterpret_cast<const char*>(
886
+ PyArray_GETPTR1(py_keys, chunk_offset + entry_offset + j));
887
+ key_value.reset(PyArray_GETITEM(py_keys, ptr_key));
888
+ RETURN_IF_PYERROR();
889
+
890
+ if (item_arrays[c]->IsNull(entry_offset + j)) {
891
+ // Translate the Null to a None
892
+ Py_INCREF(Py_None);
893
+ item_value.reset(Py_None);
894
+ } else {
895
+ // Get valid value from item array
896
+ auto ptr_item = reinterpret_cast<const char*>(
897
+ PyArray_GETPTR1(py_items, chunk_offset + entry_offset + j));
898
+ item_value.reset(PyArray_GETITEM(py_items, ptr_item));
899
+ RETURN_IF_PYERROR();
900
+ }
901
+
902
+ // Add the key/item pair to the row
903
+ RETURN_NOT_OK(addPairToRow(j, key_value, item_value));
904
+ }
905
+
906
+ // Pass ownership to the resulting array
907
+ *out_values = stealRow();
908
+ }
909
+ ++out_values;
910
+ }
911
+ RETURN_IF_PYERROR();
912
+
913
+ chunk_offset += arr.values()->length();
914
+ }
915
+
916
+ return Status::OK();
917
+ }
918
+
919
+ // A more helpful error message around TypeErrors that may stem from unhashable keys
920
+ Status CheckMapAsPydictsTypeError() {
921
+ if (ARROW_PREDICT_TRUE(!PyErr_Occurred())) {
922
+ return Status::OK();
923
+ }
924
+ if (PyErr_ExceptionMatches(PyExc_TypeError)) {
925
+ // Modify the error string directly, so it is re-raised
926
+ // with our additional info.
927
+ //
928
+ // There are not many interesting things happening when this
929
+ // is hit. This is intended to only be called directly after
930
+ // PyDict_SetItem, where a finite set of errors could occur.
931
+ PyObject *type, *value, *traceback;
932
+ PyErr_Fetch(&type, &value, &traceback);
933
+ std::string message;
934
+ RETURN_NOT_OK(internal::PyObject_StdStringStr(value, &message));
935
+ message +=
936
+ ". If keys are not hashable, then you must use the option "
937
+ "[maps_as_pydicts=None (default)]";
938
+
939
+ // resets the error
940
+ PyErr_SetString(PyExc_TypeError, message.c_str());
941
+ }
942
+ return ConvertPyError();
943
+ }
944
+
945
+ Status CheckForDuplicateKeys(bool error_on_duplicate_keys, Py_ssize_t total_dict_len,
946
+ Py_ssize_t total_raw_len) {
947
+ if (total_dict_len < total_raw_len) {
948
+ const char* message =
949
+ "[maps_as_pydicts] "
950
+ "After conversion of Arrow maps to pydicts, "
951
+ "detected data loss due to duplicate keys. "
952
+ "Original input length is [%lld], total converted pydict length is [%lld].";
953
+ std::array<char, 256> buf;
954
+ std::snprintf(buf.data(), buf.size(), message, total_raw_len, total_dict_len);
955
+
956
+ if (error_on_duplicate_keys) {
957
+ return Status::UnknownError(buf.data());
958
+ } else {
959
+ ARROW_LOG(WARNING) << buf.data();
960
+ }
961
+ }
962
+ return Status::OK();
963
+ }
964
+
965
+ Status ConvertMap(PandasOptions options, const ChunkedArray& data,
966
+ PyObject** out_values) {
967
+ // Get columns of underlying key/item arrays
968
+ std::vector<std::shared_ptr<Array>> key_arrays;
969
+ std::vector<std::shared_ptr<Array>> item_arrays;
970
+ for (int c = 0; c < data.num_chunks(); ++c) {
971
+ const auto& map_arr = checked_cast<const MapArray&>(*data.chunk(c));
972
+ key_arrays.emplace_back(map_arr.keys());
973
+ item_arrays.emplace_back(map_arr.items());
974
+ }
975
+
976
+ const auto& map_type = checked_cast<const MapType&>(*data.type());
977
+ auto key_type = map_type.key_type();
978
+ auto item_type = map_type.item_type();
979
+
980
+ // ARROW-6899: Convert dictionary-encoded children to dense instead of
981
+ // failing below. A more efficient conversion than this could be done later
982
+ if (key_type->id() == Type::DICTIONARY) {
983
+ auto dense_type = checked_cast<const DictionaryType&>(*key_type).value_type();
984
+ RETURN_NOT_OK(DecodeDictionaries(options.pool, dense_type, &key_arrays));
985
+ key_type = dense_type;
986
+ }
987
+ if (item_type->id() == Type::DICTIONARY) {
988
+ auto dense_type = checked_cast<const DictionaryType&>(*item_type).value_type();
989
+ RETURN_NOT_OK(DecodeDictionaries(options.pool, dense_type, &item_arrays));
990
+ item_type = dense_type;
991
+ }
992
+
993
+ // See notes in MakeInnerOptions.
994
+ options = MakeInnerOptions(std::move(options));
995
+ // Don't blindly convert because timestamps in lists are handled differently.
996
+ options.timestamp_as_object = true;
997
+
998
+ auto flat_keys = std::make_shared<ChunkedArray>(key_arrays, key_type);
999
+ auto flat_items = std::make_shared<ChunkedArray>(item_arrays, item_type);
1000
+ OwnedRefNoGIL owned_numpy_keys;
1001
+ RETURN_NOT_OK(
1002
+ ConvertChunkedArrayToPandas(options, flat_keys, nullptr, owned_numpy_keys.ref()));
1003
+ OwnedRefNoGIL owned_numpy_items;
1004
+ RETURN_NOT_OK(
1005
+ ConvertChunkedArrayToPandas(options, flat_items, nullptr, owned_numpy_items.ref()));
1006
+ PyArrayObject* py_keys = reinterpret_cast<PyArrayObject*>(owned_numpy_keys.obj());
1007
+ PyArrayObject* py_items = reinterpret_cast<PyArrayObject*>(owned_numpy_items.obj());
1008
+
1009
+ if (options.maps_as_pydicts == MapConversionType::DEFAULT) {
1010
+ // The default behavior to express an Arrow MAP as a list of [(key, value), ...] pairs
1011
+ OwnedRef list_item;
1012
+ return ConvertMapHelper(
1013
+ [&list_item](int64_t num_pairs) {
1014
+ list_item.reset(PyList_New(num_pairs));
1015
+ return CheckPyError();
1016
+ },
1017
+ [&list_item](int64_t idx, OwnedRef& key_value, OwnedRef& item_value) {
1018
+ PyList_SET_ITEM(list_item.obj(), idx,
1019
+ PyTuple_Pack(2, key_value.obj(), item_value.obj()));
1020
+ return CheckPyError();
1021
+ },
1022
+ [&list_item] { return list_item.detach(); }, data, py_keys, py_items, item_arrays,
1023
+ out_values);
1024
+ } else {
1025
+ // Use a native pydict
1026
+ OwnedRef dict_item;
1027
+ Py_ssize_t total_dict_len{0};
1028
+ Py_ssize_t total_raw_len{0};
1029
+
1030
+ bool error_on_duplicate_keys;
1031
+ if (options.maps_as_pydicts == MapConversionType::LOSSY) {
1032
+ error_on_duplicate_keys = false;
1033
+ } else if (options.maps_as_pydicts == MapConversionType::STRICT_) {
1034
+ error_on_duplicate_keys = true;
1035
+ } else {
1036
+ auto val = std::underlying_type_t<MapConversionType>(options.maps_as_pydicts);
1037
+ return Status::UnknownError("Received unknown option for maps_as_pydicts: " +
1038
+ std::to_string(val));
1039
+ }
1040
+
1041
+ auto status = ConvertMapHelper(
1042
+ [&dict_item, &total_raw_len](int64_t num_pairs) {
1043
+ total_raw_len += num_pairs;
1044
+ dict_item.reset(PyDict_New());
1045
+ return CheckPyError();
1046
+ },
1047
+ [&dict_item]([[maybe_unused]] int64_t idx, OwnedRef& key_value,
1048
+ OwnedRef& item_value) {
1049
+ auto setitem_result =
1050
+ PyDict_SetItem(dict_item.obj(), key_value.obj(), item_value.obj());
1051
+ ARROW_RETURN_NOT_OK(CheckMapAsPydictsTypeError());
1052
+ // returns -1 if there are internal errors around hashing/resizing
1053
+ return setitem_result == 0 ? Status::OK()
1054
+ : Status::UnknownError(
1055
+ "[maps_as_pydicts] "
1056
+ "Unexpected failure inserting Arrow (key, "
1057
+ "value) pair into Python dict");
1058
+ },
1059
+ [&dict_item, &total_dict_len] {
1060
+ total_dict_len += PyDict_Size(dict_item.obj());
1061
+ return dict_item.detach();
1062
+ },
1063
+ data, py_keys, py_items, item_arrays, out_values);
1064
+
1065
+ ARROW_RETURN_NOT_OK(status);
1066
+ // If there were no errors generating the pydicts,
1067
+ // then check if we detected any data loss from duplicate keys.
1068
+ return CheckForDuplicateKeys(error_on_duplicate_keys, total_dict_len, total_raw_len);
1069
+ }
1070
+ }
1071
+
1072
+ template <typename InType, typename OutType>
1073
+ inline void ConvertNumericNullable(const ChunkedArray& data, InType na_value,
1074
+ OutType* out_values) {
1075
+ for (int c = 0; c < data.num_chunks(); c++) {
1076
+ const auto& arr = *data.chunk(c);
1077
+ const InType* in_values = GetPrimitiveValues<InType>(arr);
1078
+
1079
+ if (arr.null_count() > 0) {
1080
+ for (int64_t i = 0; i < arr.length(); ++i) {
1081
+ *out_values++ = arr.IsNull(i) ? na_value : in_values[i];
1082
+ }
1083
+ } else {
1084
+ memcpy(out_values, in_values, sizeof(InType) * arr.length());
1085
+ out_values += arr.length();
1086
+ }
1087
+ }
1088
+ }
1089
+
1090
+ template <typename InType, typename OutType>
1091
+ inline void ConvertNumericNullableCast(const ChunkedArray& data, InType na_value,
1092
+ OutType* out_values) {
1093
+ for (int c = 0; c < data.num_chunks(); c++) {
1094
+ const auto& arr = *data.chunk(c);
1095
+ const InType* in_values = GetPrimitiveValues<InType>(arr);
1096
+
1097
+ for (int64_t i = 0; i < arr.length(); ++i) {
1098
+ *out_values++ = arr.IsNull(i) ? static_cast<OutType>(na_value)
1099
+ : static_cast<OutType>(in_values[i]);
1100
+ }
1101
+ }
1102
+ }
1103
+
1104
+ template <int NPY_TYPE>
1105
+ class TypedPandasWriter : public PandasWriter {
1106
+ public:
1107
+ using T = typename npy_traits<NPY_TYPE>::value_type;
1108
+
1109
+ using PandasWriter::PandasWriter;
1110
+
1111
+ Status TransferSingle(std::shared_ptr<ChunkedArray> data, PyObject* py_ref) override {
1112
+ if (CanZeroCopy(*data)) {
1113
+ PyObject* wrapped;
1114
+ npy_intp dims[2] = {static_cast<npy_intp>(num_columns_),
1115
+ static_cast<npy_intp>(num_rows_)};
1116
+ RETURN_NOT_OK(
1117
+ MakeNumPyView(data->chunk(0), py_ref, NPY_TYPE, /*ndim=*/2, dims, &wrapped));
1118
+ SetBlockData(wrapped);
1119
+ return Status::OK();
1120
+ } else {
1121
+ RETURN_NOT_OK(CheckNotZeroCopyOnly(*data));
1122
+ RETURN_NOT_OK(EnsureAllocated());
1123
+ return CopyInto(data, /*rel_placement=*/0);
1124
+ }
1125
+ }
1126
+
1127
+ Status CheckTypeExact(const DataType& type, Type::type expected) {
1128
+ if (type.id() != expected) {
1129
+ // TODO(wesm): stringify NumPy / pandas type
1130
+ return Status::NotImplemented("Cannot write Arrow data of type ", type.ToString());
1131
+ }
1132
+ return Status::OK();
1133
+ }
1134
+
1135
+ T* GetBlockColumnStart(int64_t rel_placement) {
1136
+ return reinterpret_cast<T*>(block_data_) + rel_placement * num_rows_;
1137
+ }
1138
+
1139
+ protected:
1140
+ Status Allocate() override { return AllocateNDArray(NPY_TYPE); }
1141
+ };
1142
+
1143
+ struct ObjectWriterVisitor {
1144
+ const PandasOptions& options;
1145
+ const ChunkedArray& data;
1146
+ PyObject** out_values;
1147
+
1148
+ Status Visit(const NullType& type) {
1149
+ for (int c = 0; c < data.num_chunks(); c++) {
1150
+ std::shared_ptr<Array> arr = data.chunk(c);
1151
+
1152
+ for (int64_t i = 0; i < arr->length(); ++i) {
1153
+ // All values are null
1154
+ Py_INCREF(Py_None);
1155
+ *out_values = Py_None;
1156
+ ++out_values;
1157
+ }
1158
+ }
1159
+ return Status::OK();
1160
+ }
1161
+
1162
+ Status Visit(const BooleanType& type) {
1163
+ for (int c = 0; c < data.num_chunks(); c++) {
1164
+ const auto& arr = checked_cast<const BooleanArray&>(*data.chunk(c));
1165
+
1166
+ for (int64_t i = 0; i < arr.length(); ++i) {
1167
+ if (arr.IsNull(i)) {
1168
+ Py_INCREF(Py_None);
1169
+ *out_values++ = Py_None;
1170
+ } else if (arr.Value(i)) {
1171
+ // True
1172
+ Py_INCREF(Py_True);
1173
+ *out_values++ = Py_True;
1174
+ } else {
1175
+ // False
1176
+ Py_INCREF(Py_False);
1177
+ *out_values++ = Py_False;
1178
+ }
1179
+ }
1180
+ }
1181
+ return Status::OK();
1182
+ }
1183
+
1184
+ template <typename Type>
1185
+ enable_if_integer<Type, Status> Visit(const Type& type) {
1186
+ using T = typename Type::c_type;
1187
+ auto WrapValue = [](T value, PyObject** out) {
1188
+ *out = std::is_signed<T>::value ? PyLong_FromLongLong(value)
1189
+ : PyLong_FromUnsignedLongLong(value);
1190
+ RETURN_IF_PYERROR();
1191
+ return Status::OK();
1192
+ };
1193
+ return ConvertAsPyObjects<Type>(options, data, WrapValue, out_values);
1194
+ }
1195
+
1196
+ template <typename Type>
1197
+ enable_if_t<is_base_binary_type<Type>::value || is_binary_view_like_type<Type>::value ||
1198
+ is_fixed_size_binary_type<Type>::value,
1199
+ Status>
1200
+ Visit(const Type& type) {
1201
+ auto WrapValue = [](const std::string_view& view, PyObject** out) {
1202
+ *out = WrapBytes<Type>::Wrap(view.data(), view.length());
1203
+ if (*out == nullptr) {
1204
+ PyErr_Clear();
1205
+ return Status::UnknownError("Wrapping ", view, " failed");
1206
+ }
1207
+ return Status::OK();
1208
+ };
1209
+ return ConvertAsPyObjects<Type>(options, data, WrapValue, out_values);
1210
+ }
1211
+
1212
+ template <typename Type>
1213
+ enable_if_date<Type, Status> Visit(const Type& type) {
1214
+ auto WrapValue = [](typename Type::c_type value, PyObject** out) {
1215
+ RETURN_NOT_OK(internal::PyDate_from_int(value, Type::UNIT, out));
1216
+ RETURN_IF_PYERROR();
1217
+ return Status::OK();
1218
+ };
1219
+ return ConvertAsPyObjects<Type>(options, data, WrapValue, out_values);
1220
+ }
1221
+
1222
+ template <typename Type>
1223
+ enable_if_time<Type, Status> Visit(const Type& type) {
1224
+ const TimeUnit::type unit = type.unit();
1225
+ auto WrapValue = [unit](typename Type::c_type value, PyObject** out) {
1226
+ RETURN_NOT_OK(internal::PyTime_from_int(value, unit, out));
1227
+ RETURN_IF_PYERROR();
1228
+ return Status::OK();
1229
+ };
1230
+ return ConvertAsPyObjects<Type>(options, data, WrapValue, out_values);
1231
+ }
1232
+
1233
+ template <typename Type>
1234
+ enable_if_timestamp<Type, Status> Visit(const Type& type) {
1235
+ const TimeUnit::type unit = type.unit();
1236
+ OwnedRef tzinfo;
1237
+
1238
+ auto ConvertTimezoneNaive = [&](typename Type::c_type value, PyObject** out) {
1239
+ RETURN_NOT_OK(internal::PyDateTime_from_int(value, unit, out));
1240
+ RETURN_IF_PYERROR();
1241
+ return Status::OK();
1242
+ };
1243
+ auto ConvertTimezoneAware = [&](typename Type::c_type value, PyObject** out) {
1244
+ PyObject* naive_datetime;
1245
+ RETURN_NOT_OK(ConvertTimezoneNaive(value, &naive_datetime));
1246
+
1247
+ // convert the timezone naive datetime object to timezone aware
1248
+ // two step conversion of the datetime mimics Python's code:
1249
+ // dt.replace(tzinfo=datetime.timezone.utc).astimezone(tzinfo)
1250
+ // first step: replacing timezone with timezone.utc (replace method)
1251
+ OwnedRef args(PyTuple_New(0));
1252
+ OwnedRef keywords(PyDict_New());
1253
+ PyDict_SetItemString(keywords.obj(), "tzinfo", PyDateTime_TimeZone_UTC);
1254
+ OwnedRef naive_datetime_replace(PyObject_GetAttrString(naive_datetime, "replace"));
1255
+ OwnedRef datetime_utc(
1256
+ PyObject_Call(naive_datetime_replace.obj(), args.obj(), keywords.obj()));
1257
+ // second step: adjust the datetime to tzinfo timezone (astimezone method)
1258
+ *out = PyObject_CallMethod(datetime_utc.obj(), "astimezone", "O", tzinfo.obj());
1259
+
1260
+ // the timezone naive object is no longer required
1261
+ Py_DECREF(naive_datetime);
1262
+ RETURN_IF_PYERROR();
1263
+
1264
+ return Status::OK();
1265
+ };
1266
+
1267
+ if (!type.timezone().empty() && !options.ignore_timezone) {
1268
+ // convert timezone aware
1269
+ PyObject* tzobj;
1270
+ ARROW_ASSIGN_OR_RAISE(tzobj, internal::StringToTzinfo(type.timezone()));
1271
+ tzinfo.reset(tzobj);
1272
+ RETURN_IF_PYERROR();
1273
+ RETURN_NOT_OK(
1274
+ ConvertAsPyObjects<Type>(options, data, ConvertTimezoneAware, out_values));
1275
+ } else {
1276
+ // convert timezone naive
1277
+ RETURN_NOT_OK(
1278
+ ConvertAsPyObjects<Type>(options, data, ConvertTimezoneNaive, out_values));
1279
+ }
1280
+
1281
+ return Status::OK();
1282
+ }
1283
+
1284
+ template <typename Type>
1285
+ enable_if_t<std::is_same<Type, MonthDayNanoIntervalType>::value, Status> Visit(
1286
+ const Type& type) {
1287
+ OwnedRef args(PyTuple_New(0));
1288
+ OwnedRef kwargs(PyDict_New());
1289
+ RETURN_IF_PYERROR();
1290
+ auto to_date_offset = [&](const MonthDayNanoIntervalType::MonthDayNanos& interval,
1291
+ PyObject** out) {
1292
+ DCHECK(internal::BorrowPandasDataOffsetType() != nullptr);
1293
+ // DateOffset objects do not add nanoseconds component to pd.Timestamp.
1294
+ // as of Pandas 1.3.3
1295
+ // (https://github.com/pandas-dev/pandas/issues/43892).
1296
+ // So convert microseconds and remainder to preserve data
1297
+ // but give users more expected results.
1298
+ int64_t microseconds = interval.nanoseconds / 1000;
1299
+ int64_t nanoseconds;
1300
+ if (interval.nanoseconds >= 0) {
1301
+ nanoseconds = interval.nanoseconds % 1000;
1302
+ } else {
1303
+ nanoseconds = -((-interval.nanoseconds) % 1000);
1304
+ }
1305
+
1306
+ PyDict_SetItemString(kwargs.obj(), "months", PyLong_FromLong(interval.months));
1307
+ PyDict_SetItemString(kwargs.obj(), "days", PyLong_FromLong(interval.days));
1308
+ PyDict_SetItemString(kwargs.obj(), "microseconds",
1309
+ PyLong_FromLongLong(microseconds));
1310
+ PyDict_SetItemString(kwargs.obj(), "nanoseconds", PyLong_FromLongLong(nanoseconds));
1311
+ *out =
1312
+ PyObject_Call(internal::BorrowPandasDataOffsetType(), args.obj(), kwargs.obj());
1313
+ RETURN_IF_PYERROR();
1314
+ return Status::OK();
1315
+ };
1316
+ return ConvertAsPyObjects<MonthDayNanoIntervalType>(options, data, to_date_offset,
1317
+ out_values);
1318
+ }
1319
+
1320
+ Status Visit(const Decimal128Type& type) {
1321
+ OwnedRef decimal;
1322
+ OwnedRef Decimal;
1323
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal));
1324
+ RETURN_NOT_OK(internal::ImportFromModule(decimal.obj(), "Decimal", &Decimal));
1325
+ PyObject* decimal_constructor = Decimal.obj();
1326
+
1327
+ for (int c = 0; c < data.num_chunks(); c++) {
1328
+ const auto& arr = checked_cast<const arrow::Decimal128Array&>(*data.chunk(c));
1329
+
1330
+ for (int64_t i = 0; i < arr.length(); ++i) {
1331
+ if (arr.IsNull(i)) {
1332
+ Py_INCREF(Py_None);
1333
+ *out_values++ = Py_None;
1334
+ } else {
1335
+ *out_values++ =
1336
+ internal::DecimalFromString(decimal_constructor, arr.FormatValue(i));
1337
+ RETURN_IF_PYERROR();
1338
+ }
1339
+ }
1340
+ }
1341
+
1342
+ return Status::OK();
1343
+ }
1344
+
1345
+ Status Visit(const Decimal256Type& type) {
1346
+ OwnedRef decimal;
1347
+ OwnedRef Decimal;
1348
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal));
1349
+ RETURN_NOT_OK(internal::ImportFromModule(decimal.obj(), "Decimal", &Decimal));
1350
+ PyObject* decimal_constructor = Decimal.obj();
1351
+
1352
+ for (int c = 0; c < data.num_chunks(); c++) {
1353
+ const auto& arr = checked_cast<const arrow::Decimal256Array&>(*data.chunk(c));
1354
+
1355
+ for (int64_t i = 0; i < arr.length(); ++i) {
1356
+ if (arr.IsNull(i)) {
1357
+ Py_INCREF(Py_None);
1358
+ *out_values++ = Py_None;
1359
+ } else {
1360
+ *out_values++ =
1361
+ internal::DecimalFromString(decimal_constructor, arr.FormatValue(i));
1362
+ RETURN_IF_PYERROR();
1363
+ }
1364
+ }
1365
+ }
1366
+
1367
+ return Status::OK();
1368
+ }
1369
+
1370
+ template <typename T>
1371
+ enable_if_t<is_list_like_type<T>::value || is_list_view_type<T>::value, Status> Visit(
1372
+ const T& type) {
1373
+ if (!ListTypeSupported(*type.value_type())) {
1374
+ return Status::NotImplemented(
1375
+ "Not implemented type for conversion from List to Pandas: ",
1376
+ type.value_type()->ToString());
1377
+ }
1378
+ return ConvertListsLike<T>(options, data, out_values);
1379
+ }
1380
+
1381
+ Status Visit(const MapType& type) { return ConvertMap(options, data, out_values); }
1382
+
1383
+ Status Visit(const StructType& type) {
1384
+ return ConvertStruct(options, data, out_values);
1385
+ }
1386
+
1387
+ template <typename Type>
1388
+ enable_if_t<is_floating_type<Type>::value ||
1389
+ std::is_same<DictionaryType, Type>::value ||
1390
+ std::is_same<DurationType, Type>::value ||
1391
+ std::is_same<RunEndEncodedType, Type>::value ||
1392
+ std::is_same<ExtensionType, Type>::value ||
1393
+ (std::is_base_of<IntervalType, Type>::value &&
1394
+ !std::is_same<MonthDayNanoIntervalType, Type>::value) ||
1395
+ std::is_base_of<UnionType, Type>::value,
1396
+ Status>
1397
+ Visit(const Type& type) {
1398
+ return Status::NotImplemented("No implemented conversion to object dtype: ",
1399
+ type.ToString());
1400
+ }
1401
+ };
1402
+
1403
+ class ObjectWriter : public TypedPandasWriter<NPY_OBJECT> {
1404
+ public:
1405
+ using TypedPandasWriter<NPY_OBJECT>::TypedPandasWriter;
1406
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1407
+ PyAcquireGIL lock;
1408
+ ObjectWriterVisitor visitor{this->options_, *data,
1409
+ this->GetBlockColumnStart(rel_placement)};
1410
+ return VisitTypeInline(*data->type(), &visitor);
1411
+ }
1412
+ };
1413
+
1414
+ static inline bool IsNonNullContiguous(const ChunkedArray& data) {
1415
+ return data.num_chunks() == 1 && data.null_count() == 0;
1416
+ }
1417
+
1418
+ template <int NPY_TYPE>
1419
+ class IntWriter : public TypedPandasWriter<NPY_TYPE> {
1420
+ public:
1421
+ using ArrowType = typename npy_traits<NPY_TYPE>::TypeClass;
1422
+ using TypedPandasWriter<NPY_TYPE>::TypedPandasWriter;
1423
+
1424
+ bool CanZeroCopy(const ChunkedArray& data) const override {
1425
+ return IsNonNullContiguous(data);
1426
+ }
1427
+
1428
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1429
+ RETURN_NOT_OK(this->CheckTypeExact(*data->type(), ArrowType::type_id));
1430
+ ConvertIntegerNoNullsSameType<typename ArrowType::c_type>(
1431
+ this->options_, *data, this->GetBlockColumnStart(rel_placement));
1432
+ return Status::OK();
1433
+ }
1434
+ };
1435
+
1436
+ template <int NPY_TYPE>
1437
+ class FloatWriter : public TypedPandasWriter<NPY_TYPE> {
1438
+ public:
1439
+ using ArrowType = typename npy_traits<NPY_TYPE>::TypeClass;
1440
+ using TypedPandasWriter<NPY_TYPE>::TypedPandasWriter;
1441
+ using T = typename ArrowType::c_type;
1442
+
1443
+ bool CanZeroCopy(const ChunkedArray& data) const override {
1444
+ return IsNonNullContiguous(data) && data.type()->id() == ArrowType::type_id;
1445
+ }
1446
+
1447
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1448
+ Type::type in_type = data->type()->id();
1449
+ auto out_values = this->GetBlockColumnStart(rel_placement);
1450
+
1451
+ #define INTEGER_CASE(IN_TYPE) \
1452
+ ConvertIntegerWithNulls<IN_TYPE, T>(this->options_, *data, out_values); \
1453
+ break;
1454
+
1455
+ switch (in_type) {
1456
+ case Type::UINT8:
1457
+ INTEGER_CASE(uint8_t);
1458
+ case Type::INT8:
1459
+ INTEGER_CASE(int8_t);
1460
+ case Type::UINT16:
1461
+ INTEGER_CASE(uint16_t);
1462
+ case Type::INT16:
1463
+ INTEGER_CASE(int16_t);
1464
+ case Type::UINT32:
1465
+ INTEGER_CASE(uint32_t);
1466
+ case Type::INT32:
1467
+ INTEGER_CASE(int32_t);
1468
+ case Type::UINT64:
1469
+ INTEGER_CASE(uint64_t);
1470
+ case Type::INT64:
1471
+ INTEGER_CASE(int64_t);
1472
+ case Type::HALF_FLOAT:
1473
+ ConvertNumericNullableCast(*data, npy_traits<NPY_TYPE>::na_sentinel, out_values);
1474
+ case Type::FLOAT:
1475
+ ConvertNumericNullableCast(*data, npy_traits<NPY_TYPE>::na_sentinel, out_values);
1476
+ break;
1477
+ case Type::DOUBLE:
1478
+ ConvertNumericNullableCast(*data, npy_traits<NPY_TYPE>::na_sentinel, out_values);
1479
+ break;
1480
+ default:
1481
+ return Status::NotImplemented("Cannot write Arrow data of type ",
1482
+ data->type()->ToString(),
1483
+ " to a Pandas floating point block");
1484
+ }
1485
+
1486
+ #undef INTEGER_CASE
1487
+
1488
+ return Status::OK();
1489
+ }
1490
+ };
1491
+
1492
+ using UInt8Writer = IntWriter<NPY_UINT8>;
1493
+ using Int8Writer = IntWriter<NPY_INT8>;
1494
+ using UInt16Writer = IntWriter<NPY_UINT16>;
1495
+ using Int16Writer = IntWriter<NPY_INT16>;
1496
+ using UInt32Writer = IntWriter<NPY_UINT32>;
1497
+ using Int32Writer = IntWriter<NPY_INT32>;
1498
+ using UInt64Writer = IntWriter<NPY_UINT64>;
1499
+ using Int64Writer = IntWriter<NPY_INT64>;
1500
+ using Float16Writer = FloatWriter<NPY_FLOAT16>;
1501
+ using Float32Writer = FloatWriter<NPY_FLOAT32>;
1502
+ using Float64Writer = FloatWriter<NPY_FLOAT64>;
1503
+
1504
+ class BoolWriter : public TypedPandasWriter<NPY_BOOL> {
1505
+ public:
1506
+ using TypedPandasWriter<NPY_BOOL>::TypedPandasWriter;
1507
+
1508
+ Status TransferSingle(std::shared_ptr<ChunkedArray> data, PyObject* py_ref) override {
1509
+ RETURN_NOT_OK(
1510
+ CheckNoZeroCopy("Zero copy conversions not possible with "
1511
+ "boolean types"));
1512
+ RETURN_NOT_OK(EnsureAllocated());
1513
+ return CopyInto(data, /*rel_placement=*/0);
1514
+ }
1515
+
1516
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1517
+ RETURN_NOT_OK(this->CheckTypeExact(*data->type(), Type::BOOL));
1518
+ auto out_values = this->GetBlockColumnStart(rel_placement);
1519
+ for (int c = 0; c < data->num_chunks(); c++) {
1520
+ const auto& arr = checked_cast<const BooleanArray&>(*data->chunk(c));
1521
+ for (int64_t i = 0; i < arr.length(); ++i) {
1522
+ *out_values++ = static_cast<uint8_t>(arr.Value(i));
1523
+ }
1524
+ }
1525
+ return Status::OK();
1526
+ }
1527
+ };
1528
+
1529
+ // ----------------------------------------------------------------------
1530
+ // Date / timestamp types
1531
+
1532
+ template <typename T, int64_t SHIFT>
1533
+ inline void ConvertDatetime(const ChunkedArray& data, int64_t* out_values) {
1534
+ for (int c = 0; c < data.num_chunks(); c++) {
1535
+ const auto& arr = *data.chunk(c);
1536
+ const T* in_values = GetPrimitiveValues<T>(arr);
1537
+
1538
+ for (int64_t i = 0; i < arr.length(); ++i) {
1539
+ *out_values++ = arr.IsNull(i) ? kPandasTimestampNull
1540
+ : (static_cast<int64_t>(in_values[i]) * SHIFT);
1541
+ }
1542
+ }
1543
+ }
1544
+
1545
+ template <typename T, int SHIFT>
1546
+ void ConvertDatesShift(const ChunkedArray& data, int64_t* out_values) {
1547
+ for (int c = 0; c < data.num_chunks(); c++) {
1548
+ const auto& arr = *data.chunk(c);
1549
+ const T* in_values = GetPrimitiveValues<T>(arr);
1550
+ for (int64_t i = 0; i < arr.length(); ++i) {
1551
+ *out_values++ = arr.IsNull(i) ? kPandasTimestampNull
1552
+ : static_cast<int64_t>(in_values[i]) / SHIFT;
1553
+ }
1554
+ }
1555
+ }
1556
+
1557
+ class DatetimeDayWriter : public TypedPandasWriter<NPY_DATETIME> {
1558
+ public:
1559
+ using TypedPandasWriter<NPY_DATETIME>::TypedPandasWriter;
1560
+
1561
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1562
+ int64_t* out_values = this->GetBlockColumnStart(rel_placement);
1563
+ const auto& type = checked_cast<const DateType&>(*data->type());
1564
+ switch (type.unit()) {
1565
+ case DateUnit::DAY:
1566
+ ConvertDatesShift<int32_t, 1LL>(*data, out_values);
1567
+ break;
1568
+ case DateUnit::MILLI:
1569
+ ConvertDatesShift<int64_t, 86400000LL>(*data, out_values);
1570
+ break;
1571
+ }
1572
+ return Status::OK();
1573
+ }
1574
+
1575
+ protected:
1576
+ Status Allocate() override {
1577
+ RETURN_NOT_OK(this->AllocateNDArray(NPY_DATETIME));
1578
+ SetDatetimeUnit(NPY_FR_D);
1579
+ return Status::OK();
1580
+ }
1581
+ };
1582
+
1583
+ template <TimeUnit::type UNIT>
1584
+ class DatetimeWriter : public TypedPandasWriter<NPY_DATETIME> {
1585
+ public:
1586
+ using TypedPandasWriter<NPY_DATETIME>::TypedPandasWriter;
1587
+
1588
+ bool CanZeroCopy(const ChunkedArray& data) const override {
1589
+ if (data.type()->id() == Type::TIMESTAMP) {
1590
+ const auto& type = checked_cast<const TimestampType&>(*data.type());
1591
+ return IsNonNullContiguous(data) && type.unit() == UNIT;
1592
+ } else {
1593
+ return false;
1594
+ }
1595
+ }
1596
+
1597
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1598
+ const auto& ts_type = checked_cast<const TimestampType&>(*data->type());
1599
+ DCHECK_EQ(UNIT, ts_type.unit()) << "Should only call instances of this writer "
1600
+ << "with arrays of the correct unit";
1601
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull,
1602
+ this->GetBlockColumnStart(rel_placement));
1603
+ return Status::OK();
1604
+ }
1605
+
1606
+ protected:
1607
+ Status Allocate() override {
1608
+ RETURN_NOT_OK(this->AllocateNDArray(NPY_DATETIME));
1609
+ SetDatetimeUnit(internal::NumPyFrequency(UNIT));
1610
+ return Status::OK();
1611
+ }
1612
+ };
1613
+
1614
+ using DatetimeSecondWriter = DatetimeWriter<TimeUnit::SECOND>;
1615
+
1616
+ class DatetimeMilliWriter : public DatetimeWriter<TimeUnit::MILLI> {
1617
+ public:
1618
+ using DatetimeWriter<TimeUnit::MILLI>::DatetimeWriter;
1619
+
1620
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1621
+ Type::type type = data->type()->id();
1622
+ int64_t* out_values = this->GetBlockColumnStart(rel_placement);
1623
+ if (type == Type::DATE32) {
1624
+ // Convert from days since epoch to datetime64[ms]
1625
+ ConvertDatetime<int32_t, 86400000L>(*data, out_values);
1626
+ } else if (type == Type::DATE64) {
1627
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull, out_values);
1628
+ } else {
1629
+ const auto& ts_type = checked_cast<const TimestampType&>(*data->type());
1630
+ DCHECK_EQ(TimeUnit::MILLI, ts_type.unit())
1631
+ << "Should only call instances of this writer "
1632
+ << "with arrays of the correct unit";
1633
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull, out_values);
1634
+ }
1635
+ return Status::OK();
1636
+ }
1637
+ };
1638
+
1639
+ using DatetimeMicroWriter = DatetimeWriter<TimeUnit::MICRO>;
1640
+
1641
+ class DatetimeNanoWriter : public DatetimeWriter<TimeUnit::NANO> {
1642
+ public:
1643
+ using DatetimeWriter<TimeUnit::NANO>::DatetimeWriter;
1644
+
1645
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1646
+ Type::type type = data->type()->id();
1647
+ int64_t* out_values = this->GetBlockColumnStart(rel_placement);
1648
+ compute::ExecContext ctx(options_.pool);
1649
+ compute::CastOptions options;
1650
+ if (options_.safe_cast) {
1651
+ options = compute::CastOptions::Safe();
1652
+ } else {
1653
+ options = compute::CastOptions::Unsafe();
1654
+ }
1655
+ Datum out;
1656
+ auto target_type = timestamp(TimeUnit::NANO);
1657
+
1658
+ if (type == Type::DATE32) {
1659
+ // Convert from days since epoch to datetime64[ns]
1660
+ ConvertDatetime<int32_t, kNanosecondsInDay>(*data, out_values);
1661
+ } else if (type == Type::DATE64) {
1662
+ // Date64Type is millisecond timestamp stored as int64_t
1663
+ // TODO(wesm): Do we want to make sure to zero out the milliseconds?
1664
+ ConvertDatetime<int64_t, 1000000L>(*data, out_values);
1665
+ } else if (type == Type::TIMESTAMP) {
1666
+ const auto& ts_type = checked_cast<const TimestampType&>(*data->type());
1667
+
1668
+ if (ts_type.unit() == TimeUnit::NANO) {
1669
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull, out_values);
1670
+ } else if (ts_type.unit() == TimeUnit::MICRO || ts_type.unit() == TimeUnit::MILLI ||
1671
+ ts_type.unit() == TimeUnit::SECOND) {
1672
+ ARROW_ASSIGN_OR_RAISE(out, compute::Cast(data, target_type, options, &ctx));
1673
+ ConvertNumericNullable<int64_t>(*out.chunked_array(), kPandasTimestampNull,
1674
+ out_values);
1675
+ } else {
1676
+ return Status::NotImplemented("Unsupported time unit");
1677
+ }
1678
+ } else {
1679
+ return Status::NotImplemented("Cannot write Arrow data of type ",
1680
+ data->type()->ToString(),
1681
+ " to a Pandas datetime block.");
1682
+ }
1683
+ return Status::OK();
1684
+ }
1685
+ };
1686
+
1687
+ template <typename BASE>
1688
+ class DatetimeTZWriter : public BASE {
1689
+ public:
1690
+ DatetimeTZWriter(const PandasOptions& options, const std::string& timezone,
1691
+ int64_t num_rows)
1692
+ : BASE(options, num_rows, 1), timezone_(timezone) {}
1693
+
1694
+ protected:
1695
+ Status GetResultBlock(PyObject** out) override {
1696
+ RETURN_NOT_OK(this->MakeBlock1D());
1697
+ *out = this->block_arr_.obj();
1698
+ return Status::OK();
1699
+ }
1700
+
1701
+ Status AddResultMetadata(PyObject* result) override {
1702
+ PyObject* py_tz = PyUnicode_FromStringAndSize(
1703
+ timezone_.c_str(), static_cast<Py_ssize_t>(timezone_.size()));
1704
+ RETURN_IF_PYERROR();
1705
+ PyDict_SetItemString(result, "timezone", py_tz);
1706
+ Py_DECREF(py_tz);
1707
+ return Status::OK();
1708
+ }
1709
+
1710
+ private:
1711
+ std::string timezone_;
1712
+ };
1713
+
1714
+ using DatetimeSecondTZWriter = DatetimeTZWriter<DatetimeSecondWriter>;
1715
+ using DatetimeMilliTZWriter = DatetimeTZWriter<DatetimeMilliWriter>;
1716
+ using DatetimeMicroTZWriter = DatetimeTZWriter<DatetimeMicroWriter>;
1717
+ using DatetimeNanoTZWriter = DatetimeTZWriter<DatetimeNanoWriter>;
1718
+
1719
+ template <TimeUnit::type UNIT>
1720
+ class TimedeltaWriter : public TypedPandasWriter<NPY_TIMEDELTA> {
1721
+ public:
1722
+ using TypedPandasWriter<NPY_TIMEDELTA>::TypedPandasWriter;
1723
+
1724
+ Status AllocateTimedelta(int ndim) {
1725
+ RETURN_NOT_OK(this->AllocateNDArray(NPY_TIMEDELTA, ndim));
1726
+ SetDatetimeUnit(internal::NumPyFrequency(UNIT));
1727
+ return Status::OK();
1728
+ }
1729
+
1730
+ bool CanZeroCopy(const ChunkedArray& data) const override {
1731
+ const auto& type = checked_cast<const DurationType&>(*data.type());
1732
+ return IsNonNullContiguous(data) && type.unit() == UNIT;
1733
+ }
1734
+
1735
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1736
+ const auto& type = checked_cast<const DurationType&>(*data->type());
1737
+ DCHECK_EQ(UNIT, type.unit()) << "Should only call instances of this writer "
1738
+ << "with arrays of the correct unit";
1739
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull,
1740
+ this->GetBlockColumnStart(rel_placement));
1741
+ return Status::OK();
1742
+ }
1743
+
1744
+ protected:
1745
+ Status Allocate() override { return AllocateTimedelta(2); }
1746
+ };
1747
+
1748
+ using TimedeltaSecondWriter = TimedeltaWriter<TimeUnit::SECOND>;
1749
+ using TimedeltaMilliWriter = TimedeltaWriter<TimeUnit::MILLI>;
1750
+ using TimedeltaMicroWriter = TimedeltaWriter<TimeUnit::MICRO>;
1751
+
1752
+ class TimedeltaNanoWriter : public TimedeltaWriter<TimeUnit::NANO> {
1753
+ public:
1754
+ using TimedeltaWriter<TimeUnit::NANO>::TimedeltaWriter;
1755
+
1756
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1757
+ Type::type type = data->type()->id();
1758
+ int64_t* out_values = this->GetBlockColumnStart(rel_placement);
1759
+ if (type == Type::DURATION) {
1760
+ const auto& ts_type = checked_cast<const DurationType&>(*data->type());
1761
+ if (ts_type.unit() == TimeUnit::NANO) {
1762
+ ConvertNumericNullable<int64_t>(*data, kPandasTimestampNull, out_values);
1763
+ } else if (ts_type.unit() == TimeUnit::MICRO) {
1764
+ ConvertDatetime<int64_t, 1000L>(*data, out_values);
1765
+ } else if (ts_type.unit() == TimeUnit::MILLI) {
1766
+ ConvertDatetime<int64_t, 1000000L>(*data, out_values);
1767
+ } else if (ts_type.unit() == TimeUnit::SECOND) {
1768
+ ConvertDatetime<int64_t, 1000000000L>(*data, out_values);
1769
+ } else {
1770
+ return Status::NotImplemented("Unsupported time unit");
1771
+ }
1772
+ } else {
1773
+ return Status::NotImplemented("Cannot write Arrow data of type ",
1774
+ data->type()->ToString(),
1775
+ " to a Pandas timedelta block.");
1776
+ }
1777
+ return Status::OK();
1778
+ }
1779
+ };
1780
+
1781
+ Status MakeZeroLengthArray(const std::shared_ptr<DataType>& type,
1782
+ std::shared_ptr<Array>* out) {
1783
+ std::unique_ptr<ArrayBuilder> builder;
1784
+ RETURN_NOT_OK(MakeBuilder(default_memory_pool(), type, &builder));
1785
+ RETURN_NOT_OK(builder->Resize(0));
1786
+ return builder->Finish(out);
1787
+ }
1788
+
1789
+ bool NeedDictionaryUnification(const ChunkedArray& data) {
1790
+ if (data.num_chunks() < 2) {
1791
+ return false;
1792
+ }
1793
+ const auto& arr_first = checked_cast<const DictionaryArray&>(*data.chunk(0));
1794
+ for (int c = 1; c < data.num_chunks(); c++) {
1795
+ const auto& arr = checked_cast<const DictionaryArray&>(*data.chunk(c));
1796
+ if (!(arr_first.dictionary()->Equals(arr.dictionary()))) {
1797
+ return true;
1798
+ }
1799
+ }
1800
+ return false;
1801
+ }
1802
+
1803
+ template <typename IndexType>
1804
+ class CategoricalWriter
1805
+ : public TypedPandasWriter<arrow_traits<IndexType::type_id>::npy_type> {
1806
+ public:
1807
+ using TRAITS = arrow_traits<IndexType::type_id>;
1808
+ using ArrayType = typename TypeTraits<IndexType>::ArrayType;
1809
+ using T = typename TRAITS::T;
1810
+
1811
+ explicit CategoricalWriter(const PandasOptions& options, int64_t num_rows)
1812
+ : TypedPandasWriter<TRAITS::npy_type>(options, num_rows, 1),
1813
+ ordered_(false),
1814
+ needs_copy_(false) {}
1815
+
1816
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1817
+ return Status::NotImplemented("categorical type");
1818
+ }
1819
+
1820
+ Status TransferSingle(std::shared_ptr<ChunkedArray> data, PyObject* py_ref) override {
1821
+ const auto& dict_type = checked_cast<const DictionaryType&>(*data->type());
1822
+ std::shared_ptr<Array> dict;
1823
+ if (data->num_chunks() == 0) {
1824
+ // no dictionary values => create empty array
1825
+ RETURN_NOT_OK(this->AllocateNDArray(TRAITS::npy_type, 1));
1826
+ RETURN_NOT_OK(MakeZeroLengthArray(dict_type.value_type(), &dict));
1827
+ } else {
1828
+ DCHECK_EQ(IndexType::type_id, dict_type.index_type()->id());
1829
+ RETURN_NOT_OK(WriteIndices(*data, &dict));
1830
+ }
1831
+
1832
+ PyObject* pydict;
1833
+ RETURN_NOT_OK(ConvertArrayToPandas(this->options_, dict, nullptr, &pydict));
1834
+ dictionary_.reset(pydict);
1835
+ ordered_ = dict_type.ordered();
1836
+ return Status::OK();
1837
+ }
1838
+
1839
+ Status Write(std::shared_ptr<ChunkedArray> data, int64_t abs_placement,
1840
+ int64_t rel_placement) override {
1841
+ RETURN_NOT_OK(this->EnsurePlacementAllocated());
1842
+ RETURN_NOT_OK(TransferSingle(data, /*py_ref=*/nullptr));
1843
+ this->placement_data_[rel_placement] = abs_placement;
1844
+ return Status::OK();
1845
+ }
1846
+
1847
+ Status GetSeriesResult(PyObject** out) override {
1848
+ PyAcquireGIL lock;
1849
+
1850
+ PyObject* result = PyDict_New();
1851
+ RETURN_IF_PYERROR();
1852
+
1853
+ // Expected single array dictionary layout
1854
+ PyDict_SetItemString(result, "indices", this->block_arr_.obj());
1855
+ RETURN_IF_PYERROR();
1856
+ RETURN_NOT_OK(AddResultMetadata(result));
1857
+
1858
+ *out = result;
1859
+ return Status::OK();
1860
+ }
1861
+
1862
+ protected:
1863
+ Status AddResultMetadata(PyObject* result) override {
1864
+ PyDict_SetItemString(result, "dictionary", dictionary_.obj());
1865
+ PyObject* py_ordered = ordered_ ? Py_True : Py_False;
1866
+ Py_INCREF(py_ordered);
1867
+ PyDict_SetItemString(result, "ordered", py_ordered);
1868
+ return Status::OK();
1869
+ }
1870
+
1871
+ Status WriteIndicesUniform(const ChunkedArray& data) {
1872
+ RETURN_NOT_OK(this->AllocateNDArray(TRAITS::npy_type, 1));
1873
+ T* out_values = reinterpret_cast<T*>(this->block_data_);
1874
+
1875
+ for (int c = 0; c < data.num_chunks(); c++) {
1876
+ const auto& arr = checked_cast<const DictionaryArray&>(*data.chunk(c));
1877
+ const auto& indices = checked_cast<const ArrayType&>(*arr.indices());
1878
+ auto values = reinterpret_cast<const T*>(indices.raw_values());
1879
+
1880
+ RETURN_NOT_OK(CheckIndexBounds(*indices.data(), arr.dictionary()->length()));
1881
+ // Null is -1 in CategoricalBlock
1882
+ for (int i = 0; i < arr.length(); ++i) {
1883
+ if (indices.IsValid(i)) {
1884
+ *out_values++ = values[i];
1885
+ } else {
1886
+ *out_values++ = -1;
1887
+ }
1888
+ }
1889
+ }
1890
+ return Status::OK();
1891
+ }
1892
+
1893
+ Status WriteIndicesVarying(const ChunkedArray& data, std::shared_ptr<Array>* out_dict) {
1894
+ // Yield int32 indices to allow for dictionary outgrowing the current index
1895
+ // type
1896
+ RETURN_NOT_OK(this->AllocateNDArray(NPY_INT32, 1));
1897
+ auto out_values = reinterpret_cast<int32_t*>(this->block_data_);
1898
+
1899
+ const auto& dict_type = checked_cast<const DictionaryType&>(*data.type());
1900
+
1901
+ ARROW_ASSIGN_OR_RAISE(auto unifier, DictionaryUnifier::Make(dict_type.value_type(),
1902
+ this->options_.pool));
1903
+ for (int c = 0; c < data.num_chunks(); c++) {
1904
+ const auto& arr = checked_cast<const DictionaryArray&>(*data.chunk(c));
1905
+ const auto& indices = checked_cast<const ArrayType&>(*arr.indices());
1906
+ auto values = reinterpret_cast<const T*>(indices.raw_values());
1907
+
1908
+ std::shared_ptr<Buffer> transpose_buffer;
1909
+ RETURN_NOT_OK(unifier->Unify(*arr.dictionary(), &transpose_buffer));
1910
+
1911
+ auto transpose = reinterpret_cast<const int32_t*>(transpose_buffer->data());
1912
+ int64_t dict_length = arr.dictionary()->length();
1913
+
1914
+ RETURN_NOT_OK(CheckIndexBounds(*indices.data(), dict_length));
1915
+
1916
+ // Null is -1 in CategoricalBlock
1917
+ for (int i = 0; i < arr.length(); ++i) {
1918
+ if (indices.IsValid(i)) {
1919
+ *out_values++ = transpose[values[i]];
1920
+ } else {
1921
+ *out_values++ = -1;
1922
+ }
1923
+ }
1924
+ }
1925
+
1926
+ std::shared_ptr<DataType> unused_type;
1927
+ return unifier->GetResult(&unused_type, out_dict);
1928
+ }
1929
+
1930
+ Status WriteIndices(const ChunkedArray& data, std::shared_ptr<Array>* out_dict) {
1931
+ DCHECK_GT(data.num_chunks(), 0);
1932
+
1933
+ // Sniff the first chunk
1934
+ const auto& arr_first = checked_cast<const DictionaryArray&>(*data.chunk(0));
1935
+ const auto indices_first = std::static_pointer_cast<ArrayType>(arr_first.indices());
1936
+
1937
+ if (data.num_chunks() == 1 && indices_first->null_count() == 0) {
1938
+ RETURN_NOT_OK(
1939
+ CheckIndexBounds(*indices_first->data(), arr_first.dictionary()->length()));
1940
+
1941
+ PyObject* wrapped;
1942
+ npy_intp dims[1] = {static_cast<npy_intp>(this->num_rows_)};
1943
+ RETURN_NOT_OK(MakeNumPyView(indices_first, /*py_ref=*/nullptr, TRAITS::npy_type,
1944
+ /*ndim=*/1, dims, &wrapped));
1945
+ this->SetBlockData(wrapped);
1946
+ *out_dict = arr_first.dictionary();
1947
+ } else {
1948
+ RETURN_NOT_OK(this->CheckNotZeroCopyOnly(data));
1949
+ if (NeedDictionaryUnification(data)) {
1950
+ RETURN_NOT_OK(WriteIndicesVarying(data, out_dict));
1951
+ } else {
1952
+ RETURN_NOT_OK(WriteIndicesUniform(data));
1953
+ *out_dict = arr_first.dictionary();
1954
+ }
1955
+ }
1956
+ return Status::OK();
1957
+ }
1958
+
1959
+ OwnedRefNoGIL dictionary_;
1960
+ bool ordered_;
1961
+ bool needs_copy_;
1962
+ };
1963
+
1964
+ class ExtensionWriter : public PandasWriter {
1965
+ public:
1966
+ using PandasWriter::PandasWriter;
1967
+
1968
+ Status Allocate() override {
1969
+ // no-op
1970
+ return Status::OK();
1971
+ }
1972
+
1973
+ Status TransferSingle(std::shared_ptr<ChunkedArray> data, PyObject* py_ref) override {
1974
+ PyAcquireGIL lock;
1975
+ PyObject* py_array;
1976
+ py_array = wrap_chunked_array(data);
1977
+ py_array_.reset(py_array);
1978
+
1979
+ return Status::OK();
1980
+ }
1981
+
1982
+ Status CopyInto(std::shared_ptr<ChunkedArray> data, int64_t rel_placement) override {
1983
+ return TransferSingle(data, nullptr);
1984
+ }
1985
+
1986
+ Status GetDataFrameResult(PyObject** out) override {
1987
+ PyAcquireGIL lock;
1988
+ PyObject* result = PyDict_New();
1989
+ RETURN_IF_PYERROR();
1990
+
1991
+ PyDict_SetItemString(result, "py_array", py_array_.obj());
1992
+ PyDict_SetItemString(result, "placement", placement_arr_.obj());
1993
+ *out = result;
1994
+ return Status::OK();
1995
+ }
1996
+
1997
+ Status GetSeriesResult(PyObject** out) override {
1998
+ *out = py_array_.detach();
1999
+ return Status::OK();
2000
+ }
2001
+
2002
+ protected:
2003
+ OwnedRefNoGIL py_array_;
2004
+ };
2005
+
2006
+ Status MakeWriter(const PandasOptions& options, PandasWriter::type writer_type,
2007
+ const DataType& type, int64_t num_rows, int num_columns,
2008
+ std::shared_ptr<PandasWriter>* writer) {
2009
+ #define BLOCK_CASE(NAME, TYPE) \
2010
+ case PandasWriter::NAME: \
2011
+ *writer = std::make_shared<TYPE>(options, num_rows, num_columns); \
2012
+ break;
2013
+
2014
+ #define CATEGORICAL_CASE(TYPE) \
2015
+ case TYPE::type_id: \
2016
+ *writer = std::make_shared<CategoricalWriter<TYPE>>(options, num_rows); \
2017
+ break;
2018
+
2019
+ #define TZ_CASE(NAME, TYPE) \
2020
+ case PandasWriter::NAME: { \
2021
+ const auto& ts_type = checked_cast<const TimestampType&>(type); \
2022
+ *writer = std::make_shared<TYPE>(options, ts_type.timezone(), num_rows); \
2023
+ } break;
2024
+
2025
+ switch (writer_type) {
2026
+ case PandasWriter::CATEGORICAL: {
2027
+ const auto& index_type = *checked_cast<const DictionaryType&>(type).index_type();
2028
+ switch (index_type.id()) {
2029
+ CATEGORICAL_CASE(Int8Type);
2030
+ CATEGORICAL_CASE(Int16Type);
2031
+ CATEGORICAL_CASE(Int32Type);
2032
+ CATEGORICAL_CASE(Int64Type);
2033
+ case Type::UINT8:
2034
+ case Type::UINT16:
2035
+ case Type::UINT32:
2036
+ case Type::UINT64:
2037
+ return Status::TypeError(
2038
+ "Converting unsigned dictionary indices to pandas",
2039
+ " not yet supported, index type: ", index_type.ToString());
2040
+ default:
2041
+ // Unreachable
2042
+ DCHECK(false);
2043
+ break;
2044
+ }
2045
+ } break;
2046
+ case PandasWriter::EXTENSION:
2047
+ *writer = std::make_shared<ExtensionWriter>(options, num_rows, num_columns);
2048
+ break;
2049
+ BLOCK_CASE(OBJECT, ObjectWriter);
2050
+ BLOCK_CASE(UINT8, UInt8Writer);
2051
+ BLOCK_CASE(INT8, Int8Writer);
2052
+ BLOCK_CASE(UINT16, UInt16Writer);
2053
+ BLOCK_CASE(INT16, Int16Writer);
2054
+ BLOCK_CASE(UINT32, UInt32Writer);
2055
+ BLOCK_CASE(INT32, Int32Writer);
2056
+ BLOCK_CASE(UINT64, UInt64Writer);
2057
+ BLOCK_CASE(INT64, Int64Writer);
2058
+ BLOCK_CASE(HALF_FLOAT, Float16Writer);
2059
+ BLOCK_CASE(FLOAT, Float32Writer);
2060
+ BLOCK_CASE(DOUBLE, Float64Writer);
2061
+ BLOCK_CASE(BOOL, BoolWriter);
2062
+ BLOCK_CASE(DATETIME_DAY, DatetimeDayWriter);
2063
+ BLOCK_CASE(DATETIME_SECOND, DatetimeSecondWriter);
2064
+ BLOCK_CASE(DATETIME_MILLI, DatetimeMilliWriter);
2065
+ BLOCK_CASE(DATETIME_MICRO, DatetimeMicroWriter);
2066
+ BLOCK_CASE(DATETIME_NANO, DatetimeNanoWriter);
2067
+ BLOCK_CASE(TIMEDELTA_SECOND, TimedeltaSecondWriter);
2068
+ BLOCK_CASE(TIMEDELTA_MILLI, TimedeltaMilliWriter);
2069
+ BLOCK_CASE(TIMEDELTA_MICRO, TimedeltaMicroWriter);
2070
+ BLOCK_CASE(TIMEDELTA_NANO, TimedeltaNanoWriter);
2071
+ TZ_CASE(DATETIME_SECOND_TZ, DatetimeSecondTZWriter);
2072
+ TZ_CASE(DATETIME_MILLI_TZ, DatetimeMilliTZWriter);
2073
+ TZ_CASE(DATETIME_MICRO_TZ, DatetimeMicroTZWriter);
2074
+ TZ_CASE(DATETIME_NANO_TZ, DatetimeNanoTZWriter);
2075
+ default:
2076
+ return Status::NotImplemented("Unsupported block type");
2077
+ }
2078
+
2079
+ #undef BLOCK_CASE
2080
+ #undef CATEGORICAL_CASE
2081
+
2082
+ return Status::OK();
2083
+ }
2084
+
2085
+ static Status GetPandasWriterType(const ChunkedArray& data, const PandasOptions& options,
2086
+ PandasWriter::type* output_type) {
2087
+ #define INTEGER_CASE(NAME) \
2088
+ *output_type = \
2089
+ data.null_count() > 0 \
2090
+ ? options.integer_object_nulls ? PandasWriter::OBJECT : PandasWriter::DOUBLE \
2091
+ : PandasWriter::NAME; \
2092
+ break;
2093
+
2094
+ switch (data.type()->id()) {
2095
+ case Type::BOOL:
2096
+ *output_type = data.null_count() > 0 ? PandasWriter::OBJECT : PandasWriter::BOOL;
2097
+ break;
2098
+ case Type::UINT8:
2099
+ INTEGER_CASE(UINT8);
2100
+ case Type::INT8:
2101
+ INTEGER_CASE(INT8);
2102
+ case Type::UINT16:
2103
+ INTEGER_CASE(UINT16);
2104
+ case Type::INT16:
2105
+ INTEGER_CASE(INT16);
2106
+ case Type::UINT32:
2107
+ INTEGER_CASE(UINT32);
2108
+ case Type::INT32:
2109
+ INTEGER_CASE(INT32);
2110
+ case Type::UINT64:
2111
+ INTEGER_CASE(UINT64);
2112
+ case Type::INT64:
2113
+ INTEGER_CASE(INT64);
2114
+ case Type::HALF_FLOAT:
2115
+ *output_type = PandasWriter::HALF_FLOAT;
2116
+ break;
2117
+ case Type::FLOAT:
2118
+ *output_type = PandasWriter::FLOAT;
2119
+ break;
2120
+ case Type::DOUBLE:
2121
+ *output_type = PandasWriter::DOUBLE;
2122
+ break;
2123
+ case Type::STRING: // fall through
2124
+ case Type::LARGE_STRING: // fall through
2125
+ case Type::STRING_VIEW: // fall through
2126
+ case Type::BINARY: // fall through
2127
+ case Type::LARGE_BINARY:
2128
+ case Type::BINARY_VIEW:
2129
+ case Type::NA: // fall through
2130
+ case Type::FIXED_SIZE_BINARY: // fall through
2131
+ case Type::STRUCT: // fall through
2132
+ case Type::TIME32: // fall through
2133
+ case Type::TIME64: // fall through
2134
+ case Type::DECIMAL128: // fall through
2135
+ case Type::DECIMAL256: // fall through
2136
+ case Type::INTERVAL_MONTH_DAY_NANO: // fall through
2137
+ *output_type = PandasWriter::OBJECT;
2138
+ break;
2139
+ case Type::DATE32:
2140
+ if (options.date_as_object) {
2141
+ *output_type = PandasWriter::OBJECT;
2142
+ } else if (options.coerce_temporal_nanoseconds) {
2143
+ *output_type = PandasWriter::DATETIME_NANO;
2144
+ } else if (options.to_numpy) {
2145
+ // Numpy supports Day, but Pandas does not
2146
+ *output_type = PandasWriter::DATETIME_DAY;
2147
+ } else {
2148
+ *output_type = PandasWriter::DATETIME_MILLI;
2149
+ }
2150
+ break;
2151
+ case Type::DATE64:
2152
+ if (options.date_as_object) {
2153
+ *output_type = PandasWriter::OBJECT;
2154
+ } else if (options.coerce_temporal_nanoseconds) {
2155
+ *output_type = PandasWriter::DATETIME_NANO;
2156
+ } else {
2157
+ *output_type = PandasWriter::DATETIME_MILLI;
2158
+ }
2159
+ break;
2160
+ case Type::TIMESTAMP: {
2161
+ const auto& ts_type = checked_cast<const TimestampType&>(*data.type());
2162
+ if (options.timestamp_as_object && ts_type.unit() != TimeUnit::NANO) {
2163
+ // Nanoseconds are never out of bounds for pandas, so in that case
2164
+ // we don't convert to object
2165
+ *output_type = PandasWriter::OBJECT;
2166
+ } else if (options.coerce_temporal_nanoseconds) {
2167
+ if (!ts_type.timezone().empty()) {
2168
+ *output_type = PandasWriter::DATETIME_NANO_TZ;
2169
+ } else {
2170
+ *output_type = PandasWriter::DATETIME_NANO;
2171
+ }
2172
+ } else {
2173
+ if (!ts_type.timezone().empty()) {
2174
+ switch (ts_type.unit()) {
2175
+ case TimeUnit::SECOND:
2176
+ *output_type = PandasWriter::DATETIME_SECOND_TZ;
2177
+ break;
2178
+ case TimeUnit::MILLI:
2179
+ *output_type = PandasWriter::DATETIME_MILLI_TZ;
2180
+ break;
2181
+ case TimeUnit::MICRO:
2182
+ *output_type = PandasWriter::DATETIME_MICRO_TZ;
2183
+ break;
2184
+ case TimeUnit::NANO:
2185
+ *output_type = PandasWriter::DATETIME_NANO_TZ;
2186
+ break;
2187
+ }
2188
+ } else {
2189
+ switch (ts_type.unit()) {
2190
+ case TimeUnit::SECOND:
2191
+ *output_type = PandasWriter::DATETIME_SECOND;
2192
+ break;
2193
+ case TimeUnit::MILLI:
2194
+ *output_type = PandasWriter::DATETIME_MILLI;
2195
+ break;
2196
+ case TimeUnit::MICRO:
2197
+ *output_type = PandasWriter::DATETIME_MICRO;
2198
+ break;
2199
+ case TimeUnit::NANO:
2200
+ *output_type = PandasWriter::DATETIME_NANO;
2201
+ break;
2202
+ }
2203
+ }
2204
+ }
2205
+ } break;
2206
+ case Type::DURATION: {
2207
+ const auto& dur_type = checked_cast<const DurationType&>(*data.type());
2208
+ if (options.coerce_temporal_nanoseconds) {
2209
+ *output_type = PandasWriter::TIMEDELTA_NANO;
2210
+ } else {
2211
+ switch (dur_type.unit()) {
2212
+ case TimeUnit::SECOND:
2213
+ *output_type = PandasWriter::TIMEDELTA_SECOND;
2214
+ break;
2215
+ case TimeUnit::MILLI:
2216
+ *output_type = PandasWriter::TIMEDELTA_MILLI;
2217
+ break;
2218
+ case TimeUnit::MICRO:
2219
+ *output_type = PandasWriter::TIMEDELTA_MICRO;
2220
+ break;
2221
+ case TimeUnit::NANO:
2222
+ *output_type = PandasWriter::TIMEDELTA_NANO;
2223
+ break;
2224
+ }
2225
+ }
2226
+ } break;
2227
+ case Type::FIXED_SIZE_LIST:
2228
+ case Type::LIST:
2229
+ case Type::LARGE_LIST:
2230
+ case Type::LIST_VIEW:
2231
+ case Type::LARGE_LIST_VIEW:
2232
+ case Type::MAP: {
2233
+ auto list_type = std::static_pointer_cast<BaseListType>(data.type());
2234
+ if (!ListTypeSupported(*list_type->value_type())) {
2235
+ return Status::NotImplemented("Not implemented type for Arrow list to pandas: ",
2236
+ list_type->value_type()->ToString());
2237
+ }
2238
+ *output_type = PandasWriter::OBJECT;
2239
+ } break;
2240
+ case Type::DICTIONARY:
2241
+ *output_type = PandasWriter::CATEGORICAL;
2242
+ break;
2243
+ case Type::EXTENSION:
2244
+ *output_type = PandasWriter::EXTENSION;
2245
+ break;
2246
+ default:
2247
+ return Status::NotImplemented(
2248
+ "No known equivalent Pandas block for Arrow data of type ",
2249
+ data.type()->ToString(), " is known.");
2250
+ }
2251
+ return Status::OK();
2252
+ }
2253
+
2254
+ // Construct the exact pandas "BlockManager" memory layout
2255
+ //
2256
+ // * For each column determine the correct output pandas type
2257
+ // * Allocate 2D blocks (ncols x nrows) for each distinct data type in output
2258
+ // * Allocate block placement arrays
2259
+ // * Write Arrow columns out into each slice of memory; populate block
2260
+ // * placement arrays as we go
2261
+ class PandasBlockCreator {
2262
+ public:
2263
+ using WriterMap = std::unordered_map<int, std::shared_ptr<PandasWriter>>;
2264
+
2265
+ explicit PandasBlockCreator(const PandasOptions& options, FieldVector fields,
2266
+ ChunkedArrayVector arrays)
2267
+ : options_(options), fields_(std::move(fields)), arrays_(std::move(arrays)) {
2268
+ num_columns_ = static_cast<int>(arrays_.size());
2269
+ if (num_columns_ > 0) {
2270
+ num_rows_ = arrays_[0]->length();
2271
+ }
2272
+ column_block_placement_.resize(num_columns_);
2273
+ }
2274
+ virtual ~PandasBlockCreator() = default;
2275
+
2276
+ virtual Status Convert(PyObject** out) = 0;
2277
+
2278
+ Status AppendBlocks(const WriterMap& blocks, PyObject* list) {
2279
+ for (const auto& it : blocks) {
2280
+ PyObject* item;
2281
+ RETURN_NOT_OK(it.second->GetDataFrameResult(&item));
2282
+ if (PyList_Append(list, item) < 0) {
2283
+ RETURN_IF_PYERROR();
2284
+ }
2285
+
2286
+ // ARROW-1017; PyList_Append increments object refcount
2287
+ Py_DECREF(item);
2288
+ }
2289
+ return Status::OK();
2290
+ }
2291
+
2292
+ protected:
2293
+ PandasOptions options_;
2294
+
2295
+ FieldVector fields_;
2296
+ ChunkedArrayVector arrays_;
2297
+ int num_columns_;
2298
+ int64_t num_rows_;
2299
+
2300
+ // column num -> relative placement within internal block
2301
+ std::vector<int> column_block_placement_;
2302
+ };
2303
+
2304
+ // Helper function for extension chunked arrays
2305
+ // Constructing a storage chunked array of an extension chunked array
2306
+ std::shared_ptr<ChunkedArray> GetStorageChunkedArray(std::shared_ptr<ChunkedArray> arr) {
2307
+ auto value_type = checked_cast<const ExtensionType&>(*arr->type()).storage_type();
2308
+ ArrayVector storage_arrays;
2309
+ for (int c = 0; c < arr->num_chunks(); c++) {
2310
+ const auto& arr_ext = checked_cast<const ExtensionArray&>(*arr->chunk(c));
2311
+ storage_arrays.emplace_back(arr_ext.storage());
2312
+ }
2313
+ return std::make_shared<ChunkedArray>(std::move(storage_arrays), value_type);
2314
+ };
2315
+
2316
+ // Helper function to decode RunEndEncodedArray
2317
+ Result<std::shared_ptr<ChunkedArray>> GetDecodedChunkedArray(
2318
+ std::shared_ptr<ChunkedArray> arr) {
2319
+ ARROW_ASSIGN_OR_RAISE(Datum decoded, compute::RunEndDecode(arr));
2320
+ DCHECK(decoded.is_chunked_array());
2321
+ return decoded.chunked_array();
2322
+ };
2323
+
2324
+ class ConsolidatedBlockCreator : public PandasBlockCreator {
2325
+ public:
2326
+ using PandasBlockCreator::PandasBlockCreator;
2327
+
2328
+ Status Convert(PyObject** out) override {
2329
+ column_types_.resize(num_columns_);
2330
+ RETURN_NOT_OK(CreateBlocks());
2331
+ RETURN_NOT_OK(WriteTableToBlocks());
2332
+ PyAcquireGIL lock;
2333
+
2334
+ PyObject* result = PyList_New(0);
2335
+ RETURN_IF_PYERROR();
2336
+
2337
+ RETURN_NOT_OK(AppendBlocks(blocks_, result));
2338
+ RETURN_NOT_OK(AppendBlocks(singleton_blocks_, result));
2339
+
2340
+ *out = result;
2341
+ return Status::OK();
2342
+ }
2343
+
2344
+ Status GetBlockType(int column_index, PandasWriter::type* out) {
2345
+ if (options_.extension_columns.count(fields_[column_index]->name())) {
2346
+ *out = PandasWriter::EXTENSION;
2347
+ return Status::OK();
2348
+ } else {
2349
+ // In case of an extension array default to the storage type
2350
+ if (arrays_[column_index]->type()->id() == Type::EXTENSION) {
2351
+ arrays_[column_index] = GetStorageChunkedArray(arrays_[column_index]);
2352
+ }
2353
+ // In case of a RunEndEncodedArray default to the values type
2354
+ else if (arrays_[column_index]->type()->id() == Type::RUN_END_ENCODED) {
2355
+ ARROW_ASSIGN_OR_RAISE(arrays_[column_index],
2356
+ GetDecodedChunkedArray(arrays_[column_index]));
2357
+ }
2358
+ return GetPandasWriterType(*arrays_[column_index], options_, out);
2359
+ }
2360
+ }
2361
+
2362
+ Status CreateBlocks() {
2363
+ for (int i = 0; i < num_columns_; ++i) {
2364
+ const DataType& type = *arrays_[i]->type();
2365
+ PandasWriter::type output_type;
2366
+ RETURN_NOT_OK(GetBlockType(i, &output_type));
2367
+
2368
+ int block_placement = 0;
2369
+ std::shared_ptr<PandasWriter> writer;
2370
+ if (output_type == PandasWriter::CATEGORICAL ||
2371
+ output_type == PandasWriter::DATETIME_SECOND_TZ ||
2372
+ output_type == PandasWriter::DATETIME_MILLI_TZ ||
2373
+ output_type == PandasWriter::DATETIME_MICRO_TZ ||
2374
+ output_type == PandasWriter::DATETIME_NANO_TZ ||
2375
+ output_type == PandasWriter::EXTENSION) {
2376
+ RETURN_NOT_OK(MakeWriter(options_, output_type, type, num_rows_,
2377
+ /*num_columns=*/1, &writer));
2378
+ singleton_blocks_[i] = writer;
2379
+ } else {
2380
+ auto it = block_sizes_.find(output_type);
2381
+ if (it != block_sizes_.end()) {
2382
+ block_placement = it->second;
2383
+ // Increment count
2384
+ ++it->second;
2385
+ } else {
2386
+ // Add key to map
2387
+ block_sizes_[output_type] = 1;
2388
+ }
2389
+ }
2390
+ column_types_[i] = output_type;
2391
+ column_block_placement_[i] = block_placement;
2392
+ }
2393
+
2394
+ // Create normal non-categorical blocks
2395
+ for (const auto& it : this->block_sizes_) {
2396
+ PandasWriter::type output_type = static_cast<PandasWriter::type>(it.first);
2397
+ std::shared_ptr<PandasWriter> block;
2398
+ RETURN_NOT_OK(MakeWriter(this->options_, output_type, /*unused*/ *null(), num_rows_,
2399
+ it.second, &block));
2400
+ this->blocks_[output_type] = block;
2401
+ }
2402
+ return Status::OK();
2403
+ }
2404
+
2405
+ Status GetWriter(int i, std::shared_ptr<PandasWriter>* block) {
2406
+ PandasWriter::type output_type = this->column_types_[i];
2407
+ switch (output_type) {
2408
+ case PandasWriter::CATEGORICAL:
2409
+ case PandasWriter::DATETIME_SECOND_TZ:
2410
+ case PandasWriter::DATETIME_MILLI_TZ:
2411
+ case PandasWriter::DATETIME_MICRO_TZ:
2412
+ case PandasWriter::DATETIME_NANO_TZ:
2413
+ case PandasWriter::EXTENSION: {
2414
+ auto it = this->singleton_blocks_.find(i);
2415
+ if (it == this->singleton_blocks_.end()) {
2416
+ return Status::KeyError("No block allocated");
2417
+ }
2418
+ *block = it->second;
2419
+ } break;
2420
+ default:
2421
+ auto it = this->blocks_.find(output_type);
2422
+ if (it == this->blocks_.end()) {
2423
+ return Status::KeyError("No block allocated");
2424
+ }
2425
+ *block = it->second;
2426
+ break;
2427
+ }
2428
+ return Status::OK();
2429
+ }
2430
+
2431
+ Status WriteTableToBlocks() {
2432
+ auto WriteColumn = [this](int i) {
2433
+ std::shared_ptr<PandasWriter> block;
2434
+ RETURN_NOT_OK(this->GetWriter(i, &block));
2435
+ // ARROW-3789 Use std::move on the array to permit self-destructing
2436
+ return block->Write(std::move(arrays_[i]), i, this->column_block_placement_[i]);
2437
+ };
2438
+
2439
+ return OptionalParallelFor(options_.use_threads, num_columns_, WriteColumn);
2440
+ }
2441
+
2442
+ private:
2443
+ // column num -> block type id
2444
+ std::vector<PandasWriter::type> column_types_;
2445
+
2446
+ // block type -> type count
2447
+ std::unordered_map<int, int> block_sizes_;
2448
+ std::unordered_map<int, const DataType*> block_types_;
2449
+
2450
+ // block type -> block
2451
+ WriterMap blocks_;
2452
+
2453
+ WriterMap singleton_blocks_;
2454
+ };
2455
+
2456
+ /// \brief Create blocks for pandas.DataFrame block manager using one block per
2457
+ /// column strategy. This permits some zero-copy optimizations as well as the
2458
+ /// ability for the table to "self-destruct" if selected by the user.
2459
+ class SplitBlockCreator : public PandasBlockCreator {
2460
+ public:
2461
+ using PandasBlockCreator::PandasBlockCreator;
2462
+
2463
+ Status GetWriter(int i, std::shared_ptr<PandasWriter>* writer) {
2464
+ PandasWriter::type output_type = PandasWriter::OBJECT;
2465
+ const DataType& type = *arrays_[i]->type();
2466
+ if (options_.extension_columns.count(fields_[i]->name())) {
2467
+ output_type = PandasWriter::EXTENSION;
2468
+ } else {
2469
+ // Null count needed to determine output type
2470
+ RETURN_NOT_OK(GetPandasWriterType(*arrays_[i], options_, &output_type));
2471
+ }
2472
+ return MakeWriter(this->options_, output_type, type, num_rows_, 1, writer);
2473
+ }
2474
+
2475
+ Status Convert(PyObject** out) override {
2476
+ PyAcquireGIL lock;
2477
+
2478
+ PyObject* result = PyList_New(0);
2479
+ RETURN_IF_PYERROR();
2480
+
2481
+ for (int i = 0; i < num_columns_; ++i) {
2482
+ std::shared_ptr<PandasWriter> writer;
2483
+ RETURN_NOT_OK(GetWriter(i, &writer));
2484
+ // ARROW-3789 Use std::move on the array to permit self-destructing
2485
+ RETURN_NOT_OK(writer->Write(std::move(arrays_[i]), i, /*rel_placement=*/0));
2486
+
2487
+ PyObject* item;
2488
+ RETURN_NOT_OK(writer->GetDataFrameResult(&item));
2489
+ if (PyList_Append(result, item) < 0) {
2490
+ RETURN_IF_PYERROR();
2491
+ }
2492
+ // PyList_Append increments object refcount
2493
+ Py_DECREF(item);
2494
+ }
2495
+
2496
+ *out = result;
2497
+ return Status::OK();
2498
+ }
2499
+
2500
+ private:
2501
+ std::vector<std::shared_ptr<PandasWriter>> writers_;
2502
+ };
2503
+
2504
+ Status ConvertCategoricals(const PandasOptions& options, ChunkedArrayVector* arrays,
2505
+ FieldVector* fields) {
2506
+ std::vector<int> columns_to_encode;
2507
+
2508
+ // For Categorical conversions
2509
+ auto EncodeColumn = [&](int j) {
2510
+ int i = columns_to_encode[j];
2511
+ if (options.zero_copy_only) {
2512
+ return Status::Invalid("Need to dictionary encode a column, but ",
2513
+ "only zero-copy conversions allowed");
2514
+ }
2515
+ compute::ExecContext ctx(options.pool);
2516
+ ARROW_ASSIGN_OR_RAISE(
2517
+ Datum out, DictionaryEncode((*arrays)[i],
2518
+ compute::DictionaryEncodeOptions::Defaults(), &ctx));
2519
+ (*arrays)[i] = out.chunked_array();
2520
+ (*fields)[i] = (*fields)[i]->WithType((*arrays)[i]->type());
2521
+ return Status::OK();
2522
+ };
2523
+
2524
+ if (!options.categorical_columns.empty()) {
2525
+ for (int i = 0; i < static_cast<int>(arrays->size()); i++) {
2526
+ if ((*arrays)[i]->type()->id() != Type::DICTIONARY &&
2527
+ options.categorical_columns.count((*fields)[i]->name())) {
2528
+ columns_to_encode.push_back(i);
2529
+ }
2530
+ }
2531
+ }
2532
+ if (options.strings_to_categorical) {
2533
+ for (int i = 0; i < static_cast<int>(arrays->size()); i++) {
2534
+ if (is_base_binary_like((*arrays)[i]->type()->id())) {
2535
+ columns_to_encode.push_back(i);
2536
+ }
2537
+ }
2538
+ }
2539
+ return OptionalParallelFor(options.use_threads,
2540
+ static_cast<int>(columns_to_encode.size()), EncodeColumn);
2541
+ }
2542
+
2543
+ } // namespace
2544
+
2545
+ Status ConvertArrayToPandas(const PandasOptions& options, std::shared_ptr<Array> arr,
2546
+ PyObject* py_ref, PyObject** out) {
2547
+ return ConvertChunkedArrayToPandas(
2548
+ options, std::make_shared<ChunkedArray>(std::move(arr)), py_ref, out);
2549
+ }
2550
+
2551
+ Status ConvertChunkedArrayToPandas(const PandasOptions& options,
2552
+ std::shared_ptr<ChunkedArray> arr, PyObject* py_ref,
2553
+ PyObject** out) {
2554
+ if (options.decode_dictionaries && arr->type()->id() == Type::DICTIONARY) {
2555
+ // XXX we should return an error as below if options.zero_copy_only
2556
+ // is true, but that would break compatibility with existing tests.
2557
+ const auto& dense_type =
2558
+ checked_cast<const DictionaryType&>(*arr->type()).value_type();
2559
+ RETURN_NOT_OK(DecodeDictionaries(options.pool, dense_type, &arr));
2560
+ DCHECK_NE(arr->type()->id(), Type::DICTIONARY);
2561
+
2562
+ // The original Python DictionaryArray won't own the memory anymore
2563
+ // as we actually built a new array when we decoded the DictionaryArray
2564
+ // thus let the final resulting numpy array own the memory through a Capsule
2565
+ py_ref = nullptr;
2566
+ }
2567
+
2568
+ if (options.strings_to_categorical && is_base_binary_like(arr->type()->id())) {
2569
+ if (options.zero_copy_only) {
2570
+ return Status::Invalid("Need to dictionary encode a column, but ",
2571
+ "only zero-copy conversions allowed");
2572
+ }
2573
+ compute::ExecContext ctx(options.pool);
2574
+ ARROW_ASSIGN_OR_RAISE(
2575
+ Datum out,
2576
+ DictionaryEncode(arr, compute::DictionaryEncodeOptions::Defaults(), &ctx));
2577
+ arr = out.chunked_array();
2578
+ }
2579
+
2580
+ PandasOptions modified_options = options;
2581
+ modified_options.strings_to_categorical = false;
2582
+
2583
+ // ARROW-7596: We permit the hybrid Series/DataFrame code path to do zero copy
2584
+ // optimizations that we do not allow in the default case when converting
2585
+ // Table->DataFrame
2586
+ modified_options.allow_zero_copy_blocks = true;
2587
+
2588
+ // In case of an extension array default to the storage type
2589
+ if (arr->type()->id() == Type::EXTENSION) {
2590
+ arr = GetStorageChunkedArray(arr);
2591
+ }
2592
+ // In case of a RunEndEncodedArray decode the array
2593
+ else if (arr->type()->id() == Type::RUN_END_ENCODED) {
2594
+ if (options.zero_copy_only) {
2595
+ return Status::Invalid("Need to dencode a RunEndEncodedArray, but ",
2596
+ "only zero-copy conversions allowed");
2597
+ }
2598
+ ARROW_ASSIGN_OR_RAISE(arr, GetDecodedChunkedArray(arr));
2599
+
2600
+ // Because we built a new array when we decoded the RunEndEncodedArray
2601
+ // the final resulting numpy array should own the memory through a Capsule
2602
+ py_ref = nullptr;
2603
+ }
2604
+
2605
+ PandasWriter::type output_type;
2606
+ RETURN_NOT_OK(GetPandasWriterType(*arr, modified_options, &output_type));
2607
+ if (options.decode_dictionaries) {
2608
+ DCHECK_NE(output_type, PandasWriter::CATEGORICAL);
2609
+ }
2610
+
2611
+ std::shared_ptr<PandasWriter> writer;
2612
+ RETURN_NOT_OK(MakeWriter(modified_options, output_type, *arr->type(), arr->length(),
2613
+ /*num_columns=*/1, &writer));
2614
+ RETURN_NOT_OK(writer->TransferSingle(std::move(arr), py_ref));
2615
+ return writer->GetSeriesResult(out);
2616
+ }
2617
+
2618
+ Status ConvertTableToPandas(const PandasOptions& options, std::shared_ptr<Table> table,
2619
+ PyObject** out) {
2620
+ ChunkedArrayVector arrays = table->columns();
2621
+ FieldVector fields = table->fields();
2622
+
2623
+ // ARROW-3789: allow "self-destructing" by releasing references to columns as
2624
+ // we convert them to pandas
2625
+ table = nullptr;
2626
+
2627
+ RETURN_NOT_OK(ConvertCategoricals(options, &arrays, &fields));
2628
+
2629
+ PandasOptions modified_options = options;
2630
+ modified_options.strings_to_categorical = false;
2631
+ modified_options.categorical_columns.clear();
2632
+
2633
+ if (options.split_blocks) {
2634
+ modified_options.allow_zero_copy_blocks = true;
2635
+ SplitBlockCreator helper(modified_options, std::move(fields), std::move(arrays));
2636
+ return helper.Convert(out);
2637
+ } else {
2638
+ ConsolidatedBlockCreator helper(modified_options, std::move(fields),
2639
+ std::move(arrays));
2640
+ return helper.Convert(out);
2641
+ }
2642
+ }
2643
+
2644
+ } // namespace py
2645
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_pandas.h ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Functions for converting between pandas's NumPy-based data representation
19
+ // and Arrow data structures
20
+
21
+ #pragma once
22
+
23
+ #include "arrow/python/platform.h"
24
+
25
+ #include <memory>
26
+ #include <string>
27
+ #include <unordered_set>
28
+
29
+ #include "arrow/memory_pool.h"
30
+ #include "arrow/python/visibility.h"
31
+
32
+ namespace arrow {
33
+
34
+ class Array;
35
+ class ChunkedArray;
36
+ class Column;
37
+ class DataType;
38
+ class MemoryPool;
39
+ class Status;
40
+ class Table;
41
+
42
+ namespace py {
43
+
44
+ enum class MapConversionType {
45
+ DEFAULT, // convert arrow maps to assoc lists (list of kev-value tuples) in Pandas
46
+ LOSSY, // report warnings when lossiness is encountered due to duplicate keys
47
+ STRICT_, // raise a Python exception when lossiness is encountered due to duplicate
48
+ // keys
49
+ };
50
+
51
+ struct PandasOptions {
52
+ /// arrow::MemoryPool to use for memory allocations
53
+ MemoryPool* pool = default_memory_pool();
54
+
55
+ /// If true, we will convert all string columns to categoricals
56
+ bool strings_to_categorical = false;
57
+ bool zero_copy_only = false;
58
+ bool integer_object_nulls = false;
59
+ bool date_as_object = false;
60
+ bool timestamp_as_object = false;
61
+ bool use_threads = false;
62
+
63
+ /// Coerce all date and timestamp to datetime64[ns]
64
+ bool coerce_temporal_nanoseconds = false;
65
+
66
+ /// Used to maintain backwards compatibility for
67
+ /// timezone bugs (see ARROW-9528). Should be removed
68
+ /// after Arrow 2.0 release.
69
+ bool ignore_timezone = false;
70
+
71
+ /// \brief If true, do not create duplicate PyObject versions of equal
72
+ /// objects. This only applies to immutable objects like strings or datetime
73
+ /// objects
74
+ bool deduplicate_objects = false;
75
+
76
+ /// \brief For certain data types, a cast is needed in order to store the
77
+ /// data in a pandas DataFrame or Series (e.g. timestamps are always stored
78
+ /// as nanoseconds in pandas). This option controls whether it is a safe
79
+ /// cast or not.
80
+ bool safe_cast = true;
81
+
82
+ /// \brief If true, create one block per column rather than consolidated
83
+ /// blocks (1 per data type). Do zero-copy wrapping when there are no
84
+ /// nulls. pandas currently will consolidate the blocks on its own, causing
85
+ /// increased memory use, so keep this in mind if you are working on a
86
+ /// memory-constrained situation.
87
+ bool split_blocks = false;
88
+
89
+ /// \brief If true, allow non-writable zero-copy views to be created for
90
+ /// single column blocks. This option is also used to provide zero copy for
91
+ /// Series data
92
+ bool allow_zero_copy_blocks = false;
93
+
94
+ /// \brief If true, attempt to deallocate buffers in passed Arrow object if
95
+ /// it is the only remaining shared_ptr copy of it. See ARROW-3789 for
96
+ /// original context for this feature. Only currently implemented for Table
97
+ /// conversions
98
+ bool self_destruct = false;
99
+
100
+ /// \brief The default behavior (DEFAULT), is to convert Arrow Map arrays to
101
+ /// Python association lists (list-of-tuples) in the same order as the Arrow
102
+ /// Map, as in [(key1, value1), (key2, value2), ...]
103
+ /// If LOSSY or STRICT, convert Arrow Map arrays to native Python dicts.
104
+ /// This can change the ordering of (key, value) pairs, and will deduplicate
105
+ /// multiple keys, resulting in a possible loss of data.
106
+ /// If 'lossy', this key deduplication results in a warning printed
107
+ /// when detected. If 'strict', this instead results in an exception
108
+ /// being raised when detected.
109
+ MapConversionType maps_as_pydicts = MapConversionType::DEFAULT;
110
+
111
+ // Used internally for nested arrays.
112
+ bool decode_dictionaries = false;
113
+
114
+ // Columns that should be casted to categorical
115
+ std::unordered_set<std::string> categorical_columns;
116
+
117
+ // Columns that should be passed through to be converted to
118
+ // ExtensionArray/Block
119
+ std::unordered_set<std::string> extension_columns;
120
+
121
+ // Used internally to decipher between to_numpy() and to_pandas() when
122
+ // the expected output differs
123
+ bool to_numpy = false;
124
+ };
125
+
126
+ ARROW_PYTHON_EXPORT
127
+ Status ConvertArrayToPandas(const PandasOptions& options, std::shared_ptr<Array> arr,
128
+ PyObject* py_ref, PyObject** out);
129
+
130
+ ARROW_PYTHON_EXPORT
131
+ Status ConvertChunkedArrayToPandas(const PandasOptions& options,
132
+ std::shared_ptr<ChunkedArray> col, PyObject* py_ref,
133
+ PyObject** out);
134
+
135
+ // Convert a whole table as efficiently as possible to a pandas.DataFrame.
136
+ //
137
+ // The returned Python object is a list of tuples consisting of the exact 2D
138
+ // BlockManager structure of the pandas.DataFrame used as of pandas 0.19.x.
139
+ //
140
+ // tuple item: (indices: ndarray[int32], block: ndarray[TYPE, ndim=2])
141
+ ARROW_PYTHON_EXPORT
142
+ Status ConvertTableToPandas(const PandasOptions& options, std::shared_ptr<Table> table,
143
+ PyObject** out);
144
+
145
+ } // namespace py
146
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/arrow_to_python_internal.h ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/array.h"
21
+ #include "arrow/python/platform.h"
22
+
23
+ namespace arrow {
24
+ namespace py {
25
+ namespace internal {
26
+ // TODO(ARROW-12976): See if we can refactor Pandas ObjectWriter logic
27
+ // to the .cc file and move this there as well if we can.
28
+
29
+ // Converts array to a sequency of python objects.
30
+ template <typename ArrayType, typename WriteValue, typename Assigner>
31
+ inline Status WriteArrayObjects(const ArrayType& arr, WriteValue&& write_func,
32
+ Assigner out_values) {
33
+ // TODO(ARROW-12976): Use visitor here?
34
+ const bool has_nulls = arr.null_count() > 0;
35
+ for (int64_t i = 0; i < arr.length(); ++i) {
36
+ if (has_nulls && arr.IsNull(i)) {
37
+ Py_INCREF(Py_None);
38
+ *out_values = Py_None;
39
+ } else {
40
+ RETURN_NOT_OK(write_func(arr.GetView(i), out_values));
41
+ }
42
+ ++out_values;
43
+ }
44
+ return Status::OK();
45
+ }
46
+
47
+ } // namespace internal
48
+ } // namespace py
49
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/async.h ADDED
@@ -0,0 +1,60 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <utility>
21
+
22
+ #include "arrow/python/common.h"
23
+ #include "arrow/status.h"
24
+ #include "arrow/util/future.h"
25
+
26
+ namespace arrow::py {
27
+
28
+ /// \brief Bind a Python callback to an arrow::Future.
29
+ ///
30
+ /// If the Future finishes successfully, py_wrapper is called with its
31
+ /// result value and should return a PyObject*. If py_wrapper is successful,
32
+ /// py_cb is called with its return value.
33
+ ///
34
+ /// If either the Future or py_wrapper fails, py_cb is called with the
35
+ /// associated Python exception.
36
+ ///
37
+ /// \param future The future to bind to.
38
+ /// \param py_cb The Python callback function. Will be passed the result of
39
+ /// py_wrapper, or a Python exception if the future failed or one was
40
+ /// raised by py_wrapper.
41
+ /// \param py_wrapper A function (likely defined in Cython) to convert the C++
42
+ /// result of the future to a Python object.
43
+ template <typename T, typename PyWrapper = PyObject* (*)(T)>
44
+ void BindFuture(Future<T> future, PyObject* py_cb, PyWrapper py_wrapper) {
45
+ Py_INCREF(py_cb);
46
+ OwnedRefNoGIL cb_ref(py_cb);
47
+
48
+ auto future_cb = [cb_ref = std::move(cb_ref),
49
+ py_wrapper = std::move(py_wrapper)](Result<T> result) {
50
+ SafeCallIntoPythonVoid([&]() {
51
+ OwnedRef py_value_or_exc{WrapResult(std::move(result), std::move(py_wrapper))};
52
+ Py_XDECREF(
53
+ PyObject_CallFunctionObjArgs(cb_ref.obj(), py_value_or_exc.obj(), NULLPTR));
54
+ ARROW_WARN_NOT_OK(CheckPyError(), "Internal error in async call");
55
+ });
56
+ };
57
+ future.AddCallback(std::move(future_cb));
58
+ }
59
+
60
+ } // namespace arrow::py
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/benchmark.cc ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/benchmark.h"
19
+ #include "arrow/python/helpers.h"
20
+
21
+ namespace arrow {
22
+ namespace py {
23
+ namespace benchmark {
24
+
25
+ void Benchmark_PandasObjectIsNull(PyObject* list) {
26
+ if (!PyList_CheckExact(list)) {
27
+ PyErr_SetString(PyExc_TypeError, "expected a list");
28
+ return;
29
+ }
30
+ Py_ssize_t i, n = PyList_GET_SIZE(list);
31
+ for (i = 0; i < n; i++) {
32
+ internal::PandasObjectIsNull(PyList_GET_ITEM(list, i));
33
+ }
34
+ }
35
+
36
+ } // namespace benchmark
37
+ } // namespace py
38
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/benchmark.h ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/platform.h"
21
+
22
+ #include "arrow/python/visibility.h"
23
+
24
+ namespace arrow {
25
+ namespace py {
26
+ namespace benchmark {
27
+
28
+ // Micro-benchmark routines for use from ASV
29
+
30
+ // Run PandasObjectIsNull() once over every object in *list*
31
+ ARROW_PYTHON_EXPORT
32
+ void Benchmark_PandasObjectIsNull(PyObject* list);
33
+
34
+ } // namespace benchmark
35
+ } // namespace py
36
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/common.cc ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/common.h"
19
+
20
+ #include <cstdlib>
21
+ #include <mutex>
22
+ #include <sstream>
23
+ #include <string>
24
+
25
+ #include "arrow/memory_pool.h"
26
+ #include "arrow/status.h"
27
+ #include "arrow/util/checked_cast.h"
28
+ #include "arrow/util/logging.h"
29
+
30
+ #include "arrow/python/helpers.h"
31
+
32
+ namespace arrow {
33
+
34
+ using internal::checked_cast;
35
+
36
+ namespace py {
37
+
38
+ static std::mutex memory_pool_mutex;
39
+ static MemoryPool* default_python_pool = nullptr;
40
+
41
+ void set_default_memory_pool(MemoryPool* pool) {
42
+ std::lock_guard<std::mutex> guard(memory_pool_mutex);
43
+ default_python_pool = pool;
44
+ }
45
+
46
+ MemoryPool* get_memory_pool() {
47
+ std::lock_guard<std::mutex> guard(memory_pool_mutex);
48
+ if (default_python_pool) {
49
+ return default_python_pool;
50
+ } else {
51
+ return default_memory_pool();
52
+ }
53
+ }
54
+
55
+ // ----------------------------------------------------------------------
56
+ // PythonErrorDetail
57
+
58
+ namespace {
59
+
60
+ const char kErrorDetailTypeId[] = "arrow::py::PythonErrorDetail";
61
+
62
+ // Try to match the Python exception type with an appropriate Status code
63
+ StatusCode MapPyError(PyObject* exc_type) {
64
+ StatusCode code;
65
+
66
+ if (PyErr_GivenExceptionMatches(exc_type, PyExc_MemoryError)) {
67
+ code = StatusCode::OutOfMemory;
68
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_IndexError)) {
69
+ code = StatusCode::IndexError;
70
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_KeyError)) {
71
+ code = StatusCode::KeyError;
72
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_TypeError)) {
73
+ code = StatusCode::TypeError;
74
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_ValueError) ||
75
+ PyErr_GivenExceptionMatches(exc_type, PyExc_OverflowError)) {
76
+ code = StatusCode::Invalid;
77
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_EnvironmentError)) {
78
+ code = StatusCode::IOError;
79
+ } else if (PyErr_GivenExceptionMatches(exc_type, PyExc_NotImplementedError)) {
80
+ code = StatusCode::NotImplemented;
81
+ } else {
82
+ code = StatusCode::UnknownError;
83
+ }
84
+ return code;
85
+ }
86
+
87
+ // PythonErrorDetail indicates a Python exception was raised.
88
+ class PythonErrorDetail : public StatusDetail {
89
+ public:
90
+ const char* type_id() const override { return kErrorDetailTypeId; }
91
+
92
+ std::string ToString() const override {
93
+ // This is simple enough not to need the GIL
94
+ Result<std::string> result = FormatImpl();
95
+
96
+ if (result.ok()) {
97
+ return result.ValueOrDie();
98
+ } else {
99
+ // Fallback to just the exception type
100
+ const auto ty = reinterpret_cast<const PyTypeObject*>(exc_type_.obj());
101
+ return std::string("Python exception: ") + ty->tp_name;
102
+ }
103
+ }
104
+
105
+ void RestorePyError() const {
106
+ Py_INCREF(exc_type_.obj());
107
+ Py_INCREF(exc_value_.obj());
108
+ Py_INCREF(exc_traceback_.obj());
109
+ PyErr_Restore(exc_type_.obj(), exc_value_.obj(), exc_traceback_.obj());
110
+ }
111
+
112
+ PyObject* exc_type() const { return exc_type_.obj(); }
113
+
114
+ PyObject* exc_value() const { return exc_value_.obj(); }
115
+
116
+ static std::shared_ptr<PythonErrorDetail> FromPyError() {
117
+ PyObject* exc_type = nullptr;
118
+ PyObject* exc_value = nullptr;
119
+ PyObject* exc_traceback = nullptr;
120
+
121
+ PyErr_Fetch(&exc_type, &exc_value, &exc_traceback);
122
+ PyErr_NormalizeException(&exc_type, &exc_value, &exc_traceback);
123
+ ARROW_CHECK(exc_type)
124
+ << "PythonErrorDetail::FromPyError called without a Python error set";
125
+ DCHECK(PyType_Check(exc_type));
126
+ DCHECK(exc_value); // Ensured by PyErr_NormalizeException, double-check
127
+ if (exc_traceback == nullptr) {
128
+ // Needed by PyErr_Restore()
129
+ Py_INCREF(Py_None);
130
+ exc_traceback = Py_None;
131
+ }
132
+
133
+ std::shared_ptr<PythonErrorDetail> detail(new PythonErrorDetail);
134
+ detail->exc_type_.reset(exc_type);
135
+ detail->exc_value_.reset(exc_value);
136
+ detail->exc_traceback_.reset(exc_traceback);
137
+ return detail;
138
+ }
139
+
140
+ protected:
141
+ Result<std::string> FormatImpl() const {
142
+ PyAcquireGIL lock;
143
+
144
+ // Use traceback.format_exception()
145
+ OwnedRef traceback_module;
146
+ RETURN_NOT_OK(internal::ImportModule("traceback", &traceback_module));
147
+
148
+ OwnedRef fmt_exception;
149
+ RETURN_NOT_OK(internal::ImportFromModule(traceback_module.obj(), "format_exception",
150
+ &fmt_exception));
151
+
152
+ OwnedRef formatted;
153
+ formatted.reset(PyObject_CallFunctionObjArgs(fmt_exception.obj(), exc_type_.obj(),
154
+ exc_value_.obj(), exc_traceback_.obj(),
155
+ NULL));
156
+ RETURN_IF_PYERROR();
157
+
158
+ std::stringstream ss;
159
+ ss << "Python exception: ";
160
+ Py_ssize_t num_lines = PySequence_Length(formatted.obj());
161
+ RETURN_IF_PYERROR();
162
+
163
+ for (Py_ssize_t i = 0; i < num_lines; ++i) {
164
+ Py_ssize_t line_size;
165
+
166
+ PyObject* line = PySequence_GetItem(formatted.obj(), i);
167
+ RETURN_IF_PYERROR();
168
+
169
+ const char* data = PyUnicode_AsUTF8AndSize(line, &line_size);
170
+ RETURN_IF_PYERROR();
171
+
172
+ ss << std::string_view(data, line_size);
173
+ }
174
+ return ss.str();
175
+ }
176
+
177
+ PythonErrorDetail() = default;
178
+
179
+ OwnedRefNoGIL exc_type_, exc_value_, exc_traceback_;
180
+ };
181
+
182
+ } // namespace
183
+
184
+ // ----------------------------------------------------------------------
185
+ // Python exception <-> Status
186
+
187
+ Status ConvertPyError(StatusCode code) {
188
+ auto detail = PythonErrorDetail::FromPyError();
189
+ if (code == StatusCode::UnknownError) {
190
+ code = MapPyError(detail->exc_type());
191
+ }
192
+
193
+ std::string message;
194
+ RETURN_NOT_OK(internal::PyObject_StdStringStr(detail->exc_value(), &message));
195
+ return Status(code, message, detail);
196
+ }
197
+
198
+ bool IsPyError(const Status& status) {
199
+ if (status.ok()) {
200
+ return false;
201
+ }
202
+ auto detail = status.detail();
203
+ bool result = detail != nullptr && detail->type_id() == kErrorDetailTypeId;
204
+ return result;
205
+ }
206
+
207
+ void RestorePyError(const Status& status) {
208
+ ARROW_CHECK(IsPyError(status));
209
+ const auto& detail = checked_cast<const PythonErrorDetail&>(*status.detail());
210
+ detail.RestorePyError();
211
+ }
212
+
213
+ // ----------------------------------------------------------------------
214
+ // PyBuffer
215
+
216
+ PyBuffer::PyBuffer() : Buffer(nullptr, 0) {}
217
+
218
+ Status PyBuffer::Init(PyObject* obj) {
219
+ if (!PyObject_GetBuffer(obj, &py_buf_, PyBUF_ANY_CONTIGUOUS)) {
220
+ data_ = reinterpret_cast<const uint8_t*>(py_buf_.buf);
221
+ ARROW_CHECK_NE(data_, nullptr) << "Null pointer in Py_buffer";
222
+ size_ = py_buf_.len;
223
+ capacity_ = py_buf_.len;
224
+ is_mutable_ = !py_buf_.readonly;
225
+ return Status::OK();
226
+ } else {
227
+ return ConvertPyError(StatusCode::Invalid);
228
+ }
229
+ }
230
+
231
+ Result<std::shared_ptr<Buffer>> PyBuffer::FromPyObject(PyObject* obj) {
232
+ PyBuffer* buf = new PyBuffer();
233
+ std::shared_ptr<Buffer> res(buf);
234
+ RETURN_NOT_OK(buf->Init(obj));
235
+ return res;
236
+ }
237
+
238
+ PyBuffer::~PyBuffer() {
239
+ if (data_ != nullptr) {
240
+ PyAcquireGIL lock;
241
+ PyBuffer_Release(&py_buf_);
242
+ }
243
+ }
244
+
245
+ } // namespace py
246
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/common.h ADDED
@@ -0,0 +1,458 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <functional>
21
+ #include <memory>
22
+ #include <optional>
23
+ #include <utility>
24
+
25
+ #include "arrow/buffer.h"
26
+ #include "arrow/python/pyarrow.h"
27
+ #include "arrow/python/visibility.h"
28
+ #include "arrow/result.h"
29
+ #include "arrow/util/macros.h"
30
+
31
+ namespace arrow {
32
+
33
+ class MemoryPool;
34
+ template <class T>
35
+ class Result;
36
+
37
+ namespace py {
38
+
39
+ // Convert current Python error to a Status. The Python error state is cleared
40
+ // and can be restored with RestorePyError().
41
+ ARROW_PYTHON_EXPORT Status ConvertPyError(StatusCode code = StatusCode::UnknownError);
42
+ // Query whether the given Status is a Python error (as wrapped by ConvertPyError()).
43
+ ARROW_PYTHON_EXPORT bool IsPyError(const Status& status);
44
+ // Restore a Python error wrapped in a Status.
45
+ ARROW_PYTHON_EXPORT void RestorePyError(const Status& status);
46
+
47
+ // Catch a pending Python exception and return the corresponding Status.
48
+ // If no exception is pending, Status::OK() is returned.
49
+ inline Status CheckPyError(StatusCode code = StatusCode::UnknownError) {
50
+ if (ARROW_PREDICT_TRUE(!PyErr_Occurred())) {
51
+ return Status::OK();
52
+ } else {
53
+ return ConvertPyError(code);
54
+ }
55
+ }
56
+
57
+ #define RETURN_IF_PYERROR() ARROW_RETURN_NOT_OK(CheckPyError())
58
+
59
+ #define PY_RETURN_IF_ERROR(CODE) ARROW_RETURN_NOT_OK(CheckPyError(CODE))
60
+
61
+ // For Cython, as you can't define template C++ functions in Cython, only use them.
62
+ // This function can set a Python exception. It assumes that T has a (cheap)
63
+ // default constructor.
64
+ template <class T>
65
+ T GetResultValue(Result<T> result) {
66
+ if (ARROW_PREDICT_TRUE(result.ok())) {
67
+ return *std::move(result);
68
+ } else {
69
+ int r = internal::check_status(result.status()); // takes the GIL
70
+ assert(r == -1); // should have errored out
71
+ ARROW_UNUSED(r);
72
+ return {};
73
+ }
74
+ }
75
+
76
+ /// \brief Wrap a Result and return the corresponding Python object.
77
+ ///
78
+ /// If the Result is successful, py_wrapper is called with its result value
79
+ /// and should return a PyObject*. If py_wrapper is successful (returns
80
+ /// a non-NULL value), its return value is returned.
81
+ ///
82
+ /// If either the Result or py_wrapper fails, the associated Python exception
83
+ /// is raised and NULL is returned.
84
+ //
85
+ /// \param result The Result whose value to wrap in a Python object.
86
+ /// \param py_wrapper A function (likely defined in Cython) to convert the C++
87
+ /// value of the Result to a Python object.
88
+ /// \return A new Python reference, or NULL if an exception occurred
89
+ template <typename T, typename PyWrapper = PyObject* (*)(T)>
90
+ PyObject* WrapResult(Result<T> result, PyWrapper&& py_wrapper) {
91
+ static_assert(std::is_same_v<PyObject*, decltype(py_wrapper(std::declval<T>()))>,
92
+ "PyWrapper argument to WrapResult should return a PyObject* "
93
+ "when called with a T*");
94
+ Status st = result.status();
95
+ if (st.ok()) {
96
+ PyObject* py_value = py_wrapper(result.MoveValueUnsafe());
97
+ st = CheckPyError();
98
+ if (st.ok()) {
99
+ return py_value;
100
+ }
101
+ Py_XDECREF(py_value); // should be null, but who knows
102
+ }
103
+ // Status is an error, convert it to an exception.
104
+ return internal::convert_status(st);
105
+ }
106
+
107
+ // A RAII-style helper that ensures the GIL is acquired inside a lexical block.
108
+ class ARROW_PYTHON_EXPORT PyAcquireGIL {
109
+ public:
110
+ PyAcquireGIL() : acquired_gil_(false) { acquire(); }
111
+
112
+ ~PyAcquireGIL() { release(); }
113
+
114
+ void acquire() {
115
+ if (!acquired_gil_) {
116
+ state_ = PyGILState_Ensure();
117
+ acquired_gil_ = true;
118
+ }
119
+ }
120
+
121
+ // idempotent
122
+ void release() {
123
+ if (acquired_gil_) {
124
+ PyGILState_Release(state_);
125
+ acquired_gil_ = false;
126
+ }
127
+ }
128
+
129
+ private:
130
+ bool acquired_gil_;
131
+ PyGILState_STATE state_;
132
+ ARROW_DISALLOW_COPY_AND_ASSIGN(PyAcquireGIL);
133
+ };
134
+
135
+ // A RAII-style helper that releases the GIL until the end of a lexical block
136
+ class ARROW_PYTHON_EXPORT PyReleaseGIL {
137
+ public:
138
+ PyReleaseGIL() : ptr_(PyEval_SaveThread(), &unique_ptr_deleter) {}
139
+
140
+ private:
141
+ static void unique_ptr_deleter(PyThreadState* state) {
142
+ if (state) {
143
+ PyEval_RestoreThread(state);
144
+ }
145
+ }
146
+ std::unique_ptr<PyThreadState, decltype(&unique_ptr_deleter)> ptr_;
147
+ };
148
+
149
+ // A helper to call safely into the Python interpreter from arbitrary C++ code.
150
+ // The GIL is acquired, and the current thread's error status is preserved.
151
+ template <typename Function>
152
+ auto SafeCallIntoPython(Function&& func) -> decltype(func()) {
153
+ PyAcquireGIL lock;
154
+ PyObject* exc_type;
155
+ PyObject* exc_value;
156
+ PyObject* exc_traceback;
157
+ PyErr_Fetch(&exc_type, &exc_value, &exc_traceback);
158
+ auto maybe_status = std::forward<Function>(func)();
159
+ // If the return Status is a "Python error", the current Python error status
160
+ // describes the error and shouldn't be clobbered.
161
+ if (!IsPyError(::arrow::internal::GenericToStatus(maybe_status)) &&
162
+ exc_type != NULLPTR) {
163
+ PyErr_Restore(exc_type, exc_value, exc_traceback);
164
+ }
165
+ return maybe_status;
166
+ }
167
+
168
+ template <typename Function>
169
+ auto SafeCallIntoPythonVoid(Function&& func) -> decltype(func()) {
170
+ PyAcquireGIL lock;
171
+ PyObject* exc_type;
172
+ PyObject* exc_value;
173
+ PyObject* exc_traceback;
174
+ PyErr_Fetch(&exc_type, &exc_value, &exc_traceback);
175
+ func();
176
+ if (exc_type != NULLPTR) {
177
+ PyErr_Restore(exc_type, exc_value, exc_traceback);
178
+ }
179
+ }
180
+
181
+ // A RAII primitive that DECREFs the underlying PyObject* when it
182
+ // goes out of scope.
183
+ class ARROW_PYTHON_EXPORT OwnedRef {
184
+ public:
185
+ OwnedRef() : obj_(NULLPTR) {}
186
+ OwnedRef(OwnedRef&& other) : OwnedRef(other.detach()) {}
187
+ explicit OwnedRef(PyObject* obj) : obj_(obj) {}
188
+
189
+ OwnedRef& operator=(OwnedRef&& other) {
190
+ obj_ = other.detach();
191
+ return *this;
192
+ }
193
+
194
+ ~OwnedRef() {
195
+ // GH-38626: destructor may be called after the Python interpreter is finalized.
196
+ if (Py_IsInitialized()) {
197
+ reset();
198
+ }
199
+ }
200
+
201
+ void reset(PyObject* obj) {
202
+ Py_XDECREF(obj_);
203
+ obj_ = obj;
204
+ }
205
+
206
+ void reset() { reset(NULLPTR); }
207
+
208
+ PyObject* detach() {
209
+ PyObject* result = obj_;
210
+ obj_ = NULLPTR;
211
+ return result;
212
+ }
213
+
214
+ PyObject* obj() const { return obj_; }
215
+
216
+ PyObject** ref() { return &obj_; }
217
+
218
+ operator bool() const { return obj_ != NULLPTR; }
219
+
220
+ private:
221
+ ARROW_DISALLOW_COPY_AND_ASSIGN(OwnedRef);
222
+
223
+ PyObject* obj_;
224
+ };
225
+
226
+ // Same as OwnedRef, but ensures the GIL is taken when it goes out of scope.
227
+ // This is for situations where the GIL is not always known to be held
228
+ // (e.g. if it is released in the middle of a function for performance reasons)
229
+ class ARROW_PYTHON_EXPORT OwnedRefNoGIL : public OwnedRef {
230
+ public:
231
+ OwnedRefNoGIL() : OwnedRef() {}
232
+ OwnedRefNoGIL(OwnedRefNoGIL&& other) : OwnedRef(other.detach()) {}
233
+ explicit OwnedRefNoGIL(PyObject* obj) : OwnedRef(obj) {}
234
+
235
+ ~OwnedRefNoGIL() {
236
+ // GH-38626: destructor may be called after the Python interpreter is finalized.
237
+ if (Py_IsInitialized() && obj() != NULLPTR) {
238
+ PyAcquireGIL lock;
239
+ reset();
240
+ }
241
+ }
242
+ };
243
+
244
+ template <template <typename...> typename SmartPtr, typename... Ts>
245
+ class SmartPtrNoGIL : public SmartPtr<Ts...> {
246
+ using Base = SmartPtr<Ts...>;
247
+
248
+ public:
249
+ template <typename... Args>
250
+ SmartPtrNoGIL(Args&&... args) : Base(std::forward<Args>(args)...) {}
251
+
252
+ ~SmartPtrNoGIL() { reset(); }
253
+
254
+ template <typename... Args>
255
+ void reset(Args&&... args) {
256
+ auto release_guard = optional_gil_release();
257
+ Base::reset(std::forward<Args>(args)...);
258
+ }
259
+
260
+ template <typename V>
261
+ SmartPtrNoGIL& operator=(V&& v) {
262
+ auto release_guard = optional_gil_release();
263
+ Base::operator=(std::forward<V>(v));
264
+ return *this;
265
+ }
266
+
267
+ private:
268
+ // Only release the GIL if we own an object *and* the Python runtime is
269
+ // valid *and* the GIL is held.
270
+ std::optional<PyReleaseGIL> optional_gil_release() const {
271
+ if (this->get() != nullptr && Py_IsInitialized() && PyGILState_Check()) {
272
+ return PyReleaseGIL();
273
+ }
274
+ return {};
275
+ }
276
+ };
277
+
278
+ /// \brief A std::shared_ptr<T, ...> subclass that releases the GIL when destroying T
279
+ template <typename... Ts>
280
+ using SharedPtrNoGIL = SmartPtrNoGIL<std::shared_ptr, Ts...>;
281
+
282
+ /// \brief A std::unique_ptr<T, ...> subclass that releases the GIL when destroying T
283
+ template <typename... Ts>
284
+ using UniquePtrNoGIL = SmartPtrNoGIL<std::unique_ptr, Ts...>;
285
+
286
+ template <typename Fn>
287
+ struct BoundFunction;
288
+
289
+ template <typename... Args>
290
+ struct BoundFunction<void(PyObject*, Args...)> {
291
+ // We bind `cdef void fn(object, ...)` to get a `Status(...)`
292
+ // where the Status contains any Python error raised by `fn`
293
+ using Unbound = void(PyObject*, Args...);
294
+ using Bound = Status(Args...);
295
+
296
+ BoundFunction(Unbound* unbound, PyObject* bound_arg)
297
+ : unbound_(unbound), bound_arg_(bound_arg) {}
298
+
299
+ Status Invoke(Args... args) const {
300
+ PyAcquireGIL lock;
301
+ unbound_(bound_arg_.obj(), std::forward<Args>(args)...);
302
+ RETURN_IF_PYERROR();
303
+ return Status::OK();
304
+ }
305
+
306
+ Unbound* unbound_;
307
+ OwnedRefNoGIL bound_arg_;
308
+ };
309
+
310
+ template <typename Return, typename... Args>
311
+ struct BoundFunction<Return(PyObject*, Args...)> {
312
+ // We bind `cdef Return fn(object, ...)` to get a `Result<Return>(...)`
313
+ // where the Result contains any Python error raised by `fn` or the
314
+ // return value from `fn`.
315
+ using Unbound = Return(PyObject*, Args...);
316
+ using Bound = Result<Return>(Args...);
317
+
318
+ BoundFunction(Unbound* unbound, PyObject* bound_arg)
319
+ : unbound_(unbound), bound_arg_(bound_arg) {}
320
+
321
+ Result<Return> Invoke(Args... args) const {
322
+ PyAcquireGIL lock;
323
+ Return ret = unbound_(bound_arg_.obj(), std::forward<Args>(args)...);
324
+ RETURN_IF_PYERROR();
325
+ return ret;
326
+ }
327
+
328
+ Unbound* unbound_;
329
+ OwnedRefNoGIL bound_arg_;
330
+ };
331
+
332
+ template <typename OutFn, typename Return, typename... Args>
333
+ std::function<OutFn> BindFunction(Return (*unbound)(PyObject*, Args...),
334
+ PyObject* bound_arg) {
335
+ using Fn = BoundFunction<Return(PyObject*, Args...)>;
336
+
337
+ static_assert(std::is_same<typename Fn::Bound, OutFn>::value,
338
+ "requested bound function of unsupported type");
339
+
340
+ Py_XINCREF(bound_arg);
341
+ auto bound_fn = std::make_shared<Fn>(unbound, bound_arg);
342
+ return
343
+ [bound_fn](Args... args) { return bound_fn->Invoke(std::forward<Args>(args)...); };
344
+ }
345
+
346
+ // A temporary conversion of a Python object to a bytes area.
347
+ struct PyBytesView {
348
+ const char* bytes;
349
+ Py_ssize_t size;
350
+ bool is_utf8;
351
+
352
+ static Result<PyBytesView> FromString(PyObject* obj, bool check_utf8 = false) {
353
+ PyBytesView self;
354
+ ARROW_RETURN_NOT_OK(self.ParseString(obj, check_utf8));
355
+ return std::move(self);
356
+ }
357
+
358
+ static Result<PyBytesView> FromUnicode(PyObject* obj) {
359
+ PyBytesView self;
360
+ ARROW_RETURN_NOT_OK(self.ParseUnicode(obj));
361
+ return std::move(self);
362
+ }
363
+
364
+ static Result<PyBytesView> FromBinary(PyObject* obj) {
365
+ PyBytesView self;
366
+ ARROW_RETURN_NOT_OK(self.ParseBinary(obj));
367
+ return std::move(self);
368
+ }
369
+
370
+ // View the given Python object as string-like, i.e. str or (utf8) bytes
371
+ Status ParseString(PyObject* obj, bool check_utf8 = false) {
372
+ if (PyUnicode_Check(obj)) {
373
+ return ParseUnicode(obj);
374
+ } else {
375
+ ARROW_RETURN_NOT_OK(ParseBinary(obj));
376
+ if (check_utf8) {
377
+ // Check the bytes are utf8 utf-8
378
+ OwnedRef decoded(PyUnicode_FromStringAndSize(bytes, size));
379
+ if (ARROW_PREDICT_TRUE(!PyErr_Occurred())) {
380
+ is_utf8 = true;
381
+ } else {
382
+ PyErr_Clear();
383
+ is_utf8 = false;
384
+ }
385
+ }
386
+ return Status::OK();
387
+ }
388
+ }
389
+
390
+ // View the given Python object as unicode string
391
+ Status ParseUnicode(PyObject* obj) {
392
+ // The utf-8 representation is cached on the unicode object
393
+ bytes = PyUnicode_AsUTF8AndSize(obj, &size);
394
+ RETURN_IF_PYERROR();
395
+ is_utf8 = true;
396
+ return Status::OK();
397
+ }
398
+
399
+ // View the given Python object as binary-like, i.e. bytes
400
+ Status ParseBinary(PyObject* obj) {
401
+ if (PyBytes_Check(obj)) {
402
+ bytes = PyBytes_AS_STRING(obj);
403
+ size = PyBytes_GET_SIZE(obj);
404
+ is_utf8 = false;
405
+ } else if (PyByteArray_Check(obj)) {
406
+ bytes = PyByteArray_AS_STRING(obj);
407
+ size = PyByteArray_GET_SIZE(obj);
408
+ is_utf8 = false;
409
+ } else if (PyMemoryView_Check(obj)) {
410
+ PyObject* ref = PyMemoryView_GetContiguous(obj, PyBUF_READ, 'C');
411
+ RETURN_IF_PYERROR();
412
+ Py_buffer* buffer = PyMemoryView_GET_BUFFER(ref);
413
+ bytes = reinterpret_cast<const char*>(buffer->buf);
414
+ size = buffer->len;
415
+ is_utf8 = false;
416
+ } else {
417
+ return Status::TypeError("Expected bytes, got a '", Py_TYPE(obj)->tp_name,
418
+ "' object");
419
+ }
420
+ return Status::OK();
421
+ }
422
+
423
+ protected:
424
+ OwnedRef ref;
425
+ };
426
+
427
+ class ARROW_PYTHON_EXPORT PyBuffer : public Buffer {
428
+ public:
429
+ /// While memoryview objects support multi-dimensional buffers, PyBuffer only supports
430
+ /// one-dimensional byte buffers.
431
+ ~PyBuffer();
432
+
433
+ static Result<std::shared_ptr<Buffer>> FromPyObject(PyObject* obj);
434
+
435
+ private:
436
+ PyBuffer();
437
+ Status Init(PyObject*);
438
+
439
+ Py_buffer py_buf_;
440
+ };
441
+
442
+ // Return the common PyArrow memory pool
443
+ ARROW_PYTHON_EXPORT void set_default_memory_pool(MemoryPool* pool);
444
+ ARROW_PYTHON_EXPORT MemoryPool* get_memory_pool();
445
+
446
+ // This is annoying: because C++11 does not allow implicit conversion of string
447
+ // literals to non-const char*, we need to go through some gymnastics to use
448
+ // PyObject_CallMethod without a lot of pain (its arguments are non-const
449
+ // char*)
450
+ template <typename... ArgTypes>
451
+ static inline PyObject* cpp_PyObject_CallMethod(PyObject* obj, const char* method_name,
452
+ const char* argspec, ArgTypes... args) {
453
+ return PyObject_CallMethod(obj, const_cast<char*>(method_name),
454
+ const_cast<char*>(argspec), args...);
455
+ }
456
+
457
+ } // namespace py
458
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/csv.cc ADDED
@@ -0,0 +1,62 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "csv.h"
19
+
20
+ #include <memory>
21
+
22
+ #include "arrow/python/common.h"
23
+
24
+ namespace arrow {
25
+
26
+ using csv::InvalidRow;
27
+ using csv::InvalidRowHandler;
28
+ using csv::InvalidRowResult;
29
+
30
+ namespace py {
31
+ namespace csv {
32
+
33
+ InvalidRowHandler MakeInvalidRowHandler(PyInvalidRowCallback cb, PyObject* py_handler) {
34
+ if (cb == nullptr) {
35
+ return InvalidRowHandler{};
36
+ }
37
+
38
+ struct Handler {
39
+ PyInvalidRowCallback cb;
40
+ std::shared_ptr<OwnedRefNoGIL> handler_ref;
41
+
42
+ InvalidRowResult operator()(const InvalidRow& invalid_row) {
43
+ InvalidRowResult result;
44
+ auto st = SafeCallIntoPython([&]() -> Status {
45
+ result = cb(handler_ref->obj(), invalid_row);
46
+ if (PyErr_Occurred()) {
47
+ PyErr_WriteUnraisable(handler_ref->obj());
48
+ }
49
+ return Status::OK();
50
+ });
51
+ ARROW_UNUSED(st);
52
+ return result;
53
+ }
54
+ };
55
+
56
+ Py_INCREF(py_handler);
57
+ return Handler{cb, std::make_shared<OwnedRefNoGIL>(py_handler)};
58
+ }
59
+
60
+ } // namespace csv
61
+ } // namespace py
62
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/csv.h ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <functional>
21
+ #include <memory>
22
+ #include <string>
23
+ #include <vector>
24
+
25
+ #include "arrow/csv/options.h"
26
+ #include "arrow/python/common.h"
27
+ #include "arrow/util/macros.h"
28
+
29
+ namespace arrow {
30
+ namespace py {
31
+ namespace csv {
32
+
33
+ using PyInvalidRowCallback = std::function<::arrow::csv::InvalidRowResult(
34
+ PyObject*, const ::arrow::csv::InvalidRow&)>;
35
+
36
+ ARROW_PYTHON_EXPORT
37
+ ::arrow::csv::InvalidRowHandler MakeInvalidRowHandler(PyInvalidRowCallback,
38
+ PyObject* handler);
39
+
40
+ } // namespace csv
41
+ } // namespace py
42
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/datetime.cc ADDED
@@ -0,0 +1,663 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+ #include "datetime.h"
18
+
19
+ #include <algorithm>
20
+ #include <chrono>
21
+ #include <iomanip>
22
+ #include <regex>
23
+ #include <string_view>
24
+
25
+ #include "arrow/array.h"
26
+ #include "arrow/python/arrow_to_python_internal.h"
27
+ #include "arrow/python/common.h"
28
+ #include "arrow/python/helpers.h"
29
+ #include "arrow/python/platform.h"
30
+ #include "arrow/scalar.h"
31
+ #include "arrow/status.h"
32
+ #include "arrow/type.h"
33
+ #include "arrow/util/logging.h"
34
+ #include "arrow/util/regex.h"
35
+ #include "arrow/util/value_parsing.h"
36
+
37
+ namespace arrow {
38
+
39
+ using internal::RegexMatch;
40
+
41
+ namespace py {
42
+ namespace internal {
43
+
44
+ namespace {
45
+
46
+ bool MatchFixedOffset(const std::string& tz, std::string_view* sign,
47
+ std::string_view* hour, std::string_view* minute) {
48
+ static const std::regex regex("^([+-])(0[0-9]|1[0-9]|2[0-3]):([0-5][0-9])$");
49
+ if (tz.size() < 5) {
50
+ return false;
51
+ }
52
+ return RegexMatch(regex, tz, {sign, hour, minute});
53
+ }
54
+
55
+ constexpr char* NonConst(const char* st) {
56
+ // Hack for python versions < 3.7 where members of PyStruct members
57
+ // where non-const (C++ doesn't like assigning string literals to these types)
58
+ return const_cast<char*>(st);
59
+ }
60
+
61
+ static PyTypeObject MonthDayNanoTupleType = {};
62
+
63
+ static PyStructSequence_Field MonthDayNanoField[] = {
64
+ {NonConst("months"), NonConst("The number of months in the interval")},
65
+ {NonConst("days"), NonConst("The number days in the interval")},
66
+ {NonConst("nanoseconds"), NonConst("The number of nanoseconds in the interval")},
67
+ {nullptr, nullptr}};
68
+
69
+ static PyStructSequence_Desc MonthDayNanoTupleDesc = {
70
+ NonConst("MonthDayNano"),
71
+ NonConst("A calendar interval consisting of months, days and nanoseconds."),
72
+ MonthDayNanoField,
73
+ /*n_in_sequence=*/3};
74
+
75
+ } // namespace
76
+
77
+ #ifndef PYPY_VERSION
78
+ PyDateTime_CAPI* datetime_api = nullptr;
79
+
80
+ void InitDatetime() {
81
+ PyAcquireGIL lock;
82
+ datetime_api =
83
+ reinterpret_cast<PyDateTime_CAPI*>(PyCapsule_Import(PyDateTime_CAPSULE_NAME, 0));
84
+ if (datetime_api == nullptr) {
85
+ Py_FatalError("Could not import datetime C API");
86
+ }
87
+ }
88
+ #endif
89
+
90
+ // The following code is adapted from
91
+ // https://github.com/numpy/numpy/blob/main/numpy/core/src/multiarray/datetime.c
92
+
93
+ // Days per month, regular year and leap year
94
+ static int64_t _days_per_month_table[2][12] = {
95
+ {31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31},
96
+ {31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31}};
97
+
98
+ static bool is_leapyear(int64_t year) {
99
+ return (year & 0x3) == 0 && // year % 4 == 0
100
+ ((year % 100) != 0 || (year % 400) == 0);
101
+ }
102
+
103
+ // Calculates the days offset from the 1970 epoch.
104
+ static int64_t get_days_from_date(int64_t date_year, int64_t date_month,
105
+ int64_t date_day) {
106
+ int64_t i, month;
107
+ int64_t year, days = 0;
108
+ int64_t* month_lengths;
109
+
110
+ year = date_year - 1970;
111
+ days = year * 365;
112
+
113
+ // Adjust for leap years
114
+ if (days >= 0) {
115
+ // 1968 is the closest leap year before 1970.
116
+ // Exclude the current year, so add 1.
117
+ year += 1;
118
+ // Add one day for each 4 years
119
+ days += year / 4;
120
+ // 1900 is the closest previous year divisible by 100
121
+ year += 68;
122
+ // Subtract one day for each 100 years
123
+ days -= year / 100;
124
+ // 1600 is the closest previous year divisible by 400
125
+ year += 300;
126
+ // Add one day for each 400 years
127
+ days += year / 400;
128
+ } else {
129
+ // 1972 is the closest later year after 1970.
130
+ // Include the current year, so subtract 2.
131
+ year -= 2;
132
+ // Subtract one day for each 4 years
133
+ days += year / 4;
134
+ // 2000 is the closest later year divisible by 100
135
+ year -= 28;
136
+ // Add one day for each 100 years
137
+ days -= year / 100;
138
+ // 2000 is also the closest later year divisible by 400
139
+ // Subtract one day for each 400 years
140
+ days += year / 400;
141
+ }
142
+
143
+ month_lengths = _days_per_month_table[is_leapyear(date_year)];
144
+ month = date_month - 1;
145
+
146
+ // Add the months
147
+ for (i = 0; i < month; ++i) {
148
+ days += month_lengths[i];
149
+ }
150
+
151
+ // Add the days
152
+ days += date_day - 1;
153
+
154
+ return days;
155
+ }
156
+
157
+ // Modifies '*days_' to be the day offset within the year,
158
+ // and returns the year.
159
+ static int64_t days_to_yearsdays(int64_t* days_) {
160
+ const int64_t days_per_400years = (400 * 365 + 100 - 4 + 1);
161
+ // Adjust so it's relative to the year 2000 (divisible by 400)
162
+ int64_t days = (*days_) - (365 * 30 + 7);
163
+ int64_t year;
164
+
165
+ // Break down the 400 year cycle to get the year and day within the year
166
+ if (days >= 0) {
167
+ year = 400 * (days / days_per_400years);
168
+ days = days % days_per_400years;
169
+ } else {
170
+ year = 400 * ((days - (days_per_400years - 1)) / days_per_400years);
171
+ days = days % days_per_400years;
172
+ if (days < 0) {
173
+ days += days_per_400years;
174
+ }
175
+ }
176
+
177
+ // Work out the year/day within the 400 year cycle
178
+ if (days >= 366) {
179
+ year += 100 * ((days - 1) / (100 * 365 + 25 - 1));
180
+ days = (days - 1) % (100 * 365 + 25 - 1);
181
+ if (days >= 365) {
182
+ year += 4 * ((days + 1) / (4 * 365 + 1));
183
+ days = (days + 1) % (4 * 365 + 1);
184
+ if (days >= 366) {
185
+ year += (days - 1) / 365;
186
+ days = (days - 1) % 365;
187
+ }
188
+ }
189
+ }
190
+
191
+ *days_ = days;
192
+ return year + 2000;
193
+ }
194
+
195
+ // Extracts the month and year and day number from a number of days
196
+ static void get_date_from_days(int64_t days, int64_t* date_year, int64_t* date_month,
197
+ int64_t* date_day) {
198
+ int64_t *month_lengths, i;
199
+
200
+ *date_year = days_to_yearsdays(&days);
201
+ month_lengths = _days_per_month_table[is_leapyear(*date_year)];
202
+
203
+ for (i = 0; i < 12; ++i) {
204
+ if (days < month_lengths[i]) {
205
+ *date_month = i + 1;
206
+ *date_day = days + 1;
207
+ return;
208
+ } else {
209
+ days -= month_lengths[i];
210
+ }
211
+ }
212
+
213
+ // Should never get here
214
+ return;
215
+ }
216
+
217
+ // Splitting time quantities, for example splitting total seconds into
218
+ // minutes and remaining seconds. After we run
219
+ // int64_t remaining = split_time(total, quotient, &next)
220
+ // we have
221
+ // total = next * quotient + remaining. Handles negative values by propagating
222
+ // them: If total is negative, next will be negative and remaining will
223
+ // always be non-negative.
224
+ static inline int64_t split_time(int64_t total, int64_t quotient, int64_t* next) {
225
+ int64_t r = total % quotient;
226
+ if (r < 0) {
227
+ *next = total / quotient - 1;
228
+ return r + quotient;
229
+ } else {
230
+ *next = total / quotient;
231
+ return r;
232
+ }
233
+ }
234
+
235
+ static inline Status PyTime_convert_int(int64_t val, const TimeUnit::type unit,
236
+ int64_t* hour, int64_t* minute, int64_t* second,
237
+ int64_t* microsecond) {
238
+ switch (unit) {
239
+ case TimeUnit::NANO:
240
+ if (val % 1000 != 0) {
241
+ return Status::Invalid("Value ", val, " has non-zero nanoseconds");
242
+ }
243
+ val /= 1000;
244
+ // fall through
245
+ case TimeUnit::MICRO:
246
+ *microsecond = split_time(val, 1000000LL, &val);
247
+ *second = split_time(val, 60, &val);
248
+ *minute = split_time(val, 60, hour);
249
+ break;
250
+ case TimeUnit::MILLI:
251
+ *microsecond = split_time(val, 1000, &val) * 1000;
252
+ // fall through
253
+ case TimeUnit::SECOND:
254
+ *second = split_time(val, 60, &val);
255
+ *minute = split_time(val, 60, hour);
256
+ break;
257
+ default:
258
+ break;
259
+ }
260
+ return Status::OK();
261
+ }
262
+
263
+ static inline Status PyDate_convert_int(int64_t val, const DateUnit unit, int64_t* year,
264
+ int64_t* month, int64_t* day) {
265
+ switch (unit) {
266
+ case DateUnit::MILLI:
267
+ val /= 86400000LL; // fall through
268
+ case DateUnit::DAY:
269
+ get_date_from_days(val, year, month, day);
270
+ default:
271
+ break;
272
+ }
273
+ return Status::OK();
274
+ }
275
+
276
+ PyObject* NewMonthDayNanoTupleType() {
277
+ if (MonthDayNanoTupleType.tp_name == nullptr) {
278
+ if (PyStructSequence_InitType2(&MonthDayNanoTupleType, &MonthDayNanoTupleDesc) != 0) {
279
+ Py_FatalError("Could not initialize MonthDayNanoTuple");
280
+ }
281
+ }
282
+ Py_INCREF(&MonthDayNanoTupleType);
283
+ return (PyObject*)&MonthDayNanoTupleType;
284
+ }
285
+
286
+ Status PyTime_from_int(int64_t val, const TimeUnit::type unit, PyObject** out) {
287
+ int64_t hour = 0, minute = 0, second = 0, microsecond = 0;
288
+ RETURN_NOT_OK(PyTime_convert_int(val, unit, &hour, &minute, &second, &microsecond));
289
+ *out = PyTime_FromTime(static_cast<int32_t>(hour), static_cast<int32_t>(minute),
290
+ static_cast<int32_t>(second), static_cast<int32_t>(microsecond));
291
+ return Status::OK();
292
+ }
293
+
294
+ Status PyDate_from_int(int64_t val, const DateUnit unit, PyObject** out) {
295
+ int64_t year = 0, month = 0, day = 0;
296
+ RETURN_NOT_OK(PyDate_convert_int(val, unit, &year, &month, &day));
297
+ *out = PyDate_FromDate(static_cast<int32_t>(year), static_cast<int32_t>(month),
298
+ static_cast<int32_t>(day));
299
+ return Status::OK();
300
+ }
301
+
302
+ Status PyDateTime_from_int(int64_t val, const TimeUnit::type unit, PyObject** out) {
303
+ int64_t hour = 0, minute = 0, second = 0, microsecond = 0;
304
+ RETURN_NOT_OK(PyTime_convert_int(val, unit, &hour, &minute, &second, &microsecond));
305
+ int64_t total_days = 0;
306
+ hour = split_time(hour, 24, &total_days);
307
+ int64_t year = 0, month = 0, day = 0;
308
+ get_date_from_days(total_days, &year, &month, &day);
309
+ *out = PyDateTime_FromDateAndTime(
310
+ static_cast<int32_t>(year), static_cast<int32_t>(month), static_cast<int32_t>(day),
311
+ static_cast<int32_t>(hour), static_cast<int32_t>(minute),
312
+ static_cast<int32_t>(second), static_cast<int32_t>(microsecond));
313
+ return Status::OK();
314
+ }
315
+
316
+ int64_t PyDate_to_days(PyDateTime_Date* pydate) {
317
+ return get_days_from_date(PyDateTime_GET_YEAR(pydate), PyDateTime_GET_MONTH(pydate),
318
+ PyDateTime_GET_DAY(pydate));
319
+ }
320
+
321
+ Result<int64_t> PyDateTime_utcoffset_s(PyObject* obj) {
322
+ // calculate offset from UTC timezone in seconds
323
+ // supports only PyDateTime_DateTime and PyDateTime_Time objects
324
+ OwnedRef pyoffset(PyObject_CallMethod(obj, "utcoffset", NULL));
325
+ RETURN_IF_PYERROR();
326
+ if (pyoffset.obj() != nullptr && pyoffset.obj() != Py_None) {
327
+ auto delta = reinterpret_cast<PyDateTime_Delta*>(pyoffset.obj());
328
+ return internal::PyDelta_to_s(delta);
329
+ } else {
330
+ return 0;
331
+ }
332
+ }
333
+
334
+ Result<std::string> PyTZInfo_utcoffset_hhmm(PyObject* pytzinfo) {
335
+ // attempt to convert timezone offset objects to "+/-{hh}:{mm}" format
336
+ OwnedRef pydelta_object(PyObject_CallMethod(pytzinfo, "utcoffset", "O", Py_None));
337
+ RETURN_IF_PYERROR();
338
+
339
+ if (!PyDelta_Check(pydelta_object.obj())) {
340
+ return Status::Invalid(
341
+ "Object returned by tzinfo.utcoffset(None) is not an instance of "
342
+ "datetime.timedelta");
343
+ }
344
+ auto pydelta = reinterpret_cast<PyDateTime_Delta*>(pydelta_object.obj());
345
+
346
+ // retrieve the offset as seconds
347
+ auto total_seconds = internal::PyDelta_to_s(pydelta);
348
+
349
+ // determine whether the offset is positive or negative
350
+ auto sign = (total_seconds < 0) ? "-" : "+";
351
+ total_seconds = abs(total_seconds);
352
+
353
+ // calculate offset components
354
+ int64_t hours, minutes, seconds;
355
+ seconds = split_time(total_seconds, 60, &minutes);
356
+ minutes = split_time(minutes, 60, &hours);
357
+ if (seconds > 0) {
358
+ // check there are no remaining seconds
359
+ return Status::Invalid("Offset must represent whole number of minutes");
360
+ }
361
+
362
+ // construct the timezone string
363
+ std::stringstream stream;
364
+ stream << sign << std::setfill('0') << std::setw(2) << hours << ":" << std::setfill('0')
365
+ << std::setw(2) << minutes;
366
+ return stream.str();
367
+ }
368
+
369
+ // Converted from python. See https://github.com/apache/arrow/pull/7604
370
+ // for details.
371
+ Result<PyObject*> StringToTzinfo(const std::string& tz) {
372
+ std::string_view sign_str, hour_str, minute_str;
373
+ OwnedRef pytz;
374
+ OwnedRef zoneinfo;
375
+ OwnedRef datetime;
376
+
377
+ if (internal::ImportModule("pytz", &pytz).ok()) {
378
+ if (MatchFixedOffset(tz, &sign_str, &hour_str, &minute_str)) {
379
+ int sign = -1;
380
+ if (sign_str == "+") {
381
+ sign = 1;
382
+ }
383
+ OwnedRef fixed_offset;
384
+ RETURN_NOT_OK(internal::ImportFromModule(pytz.obj(), "FixedOffset", &fixed_offset));
385
+ uint32_t minutes, hours;
386
+ if (!::arrow::internal::ParseUnsigned(hour_str.data(), hour_str.size(), &hours) ||
387
+ !::arrow::internal::ParseUnsigned(minute_str.data(), minute_str.size(),
388
+ &minutes)) {
389
+ return Status::Invalid("Invalid timezone: ", tz);
390
+ }
391
+ OwnedRef total_minutes(PyLong_FromLong(
392
+ sign * ((static_cast<int>(hours) * 60) + static_cast<int>(minutes))));
393
+ RETURN_IF_PYERROR();
394
+ auto tzinfo =
395
+ PyObject_CallFunctionObjArgs(fixed_offset.obj(), total_minutes.obj(), NULL);
396
+ RETURN_IF_PYERROR();
397
+ return tzinfo;
398
+ }
399
+
400
+ OwnedRef timezone;
401
+ RETURN_NOT_OK(internal::ImportFromModule(pytz.obj(), "timezone", &timezone));
402
+ OwnedRef py_tz_string(
403
+ PyUnicode_FromStringAndSize(tz.c_str(), static_cast<Py_ssize_t>(tz.size())));
404
+ auto tzinfo = PyObject_CallFunctionObjArgs(timezone.obj(), py_tz_string.obj(), NULL);
405
+ RETURN_IF_PYERROR();
406
+ return tzinfo;
407
+ }
408
+
409
+ // catch fixed offset if pytz is not present
410
+ if (MatchFixedOffset(tz, &sign_str, &hour_str, &minute_str)) {
411
+ RETURN_NOT_OK(internal::ImportModule("datetime", &datetime));
412
+ int sign = -1;
413
+ if (sign_str == "+") {
414
+ sign = 1;
415
+ }
416
+
417
+ // import timezone and timedelta module to create a tzinfo object
418
+ OwnedRef class_timezone;
419
+ OwnedRef class_timedelta;
420
+ RETURN_NOT_OK(
421
+ internal::ImportFromModule(datetime.obj(), "timezone", &class_timezone));
422
+ RETURN_NOT_OK(
423
+ internal::ImportFromModule(datetime.obj(), "timedelta", &class_timedelta));
424
+
425
+ // check input
426
+ uint32_t minutes, hours;
427
+ if (!::arrow::internal::ParseUnsigned(hour_str.data(), hour_str.size(), &hours) ||
428
+ !::arrow::internal::ParseUnsigned(minute_str.data(), minute_str.size(),
429
+ &minutes)) {
430
+ return Status::Invalid("Invalid timezone: ", tz);
431
+ }
432
+
433
+ // save offset as a signed integer
434
+ OwnedRef total_minutes(PyLong_FromLong(
435
+ sign * ((static_cast<int>(hours) * 60) + static_cast<int>(minutes))));
436
+ // create zero integers for empty arguments in datetime.timedelta
437
+ OwnedRef zero(PyLong_FromLong(static_cast<int>(0)));
438
+
439
+ // call datetime.timedelta to get correct offset object for datetime.timezone
440
+ auto offset =
441
+ PyObject_CallFunctionObjArgs(class_timedelta.obj(), zero.obj(), zero.obj(),
442
+ zero.obj(), zero.obj(), total_minutes.obj(), NULL);
443
+ RETURN_IF_PYERROR();
444
+ // call datetime.timezone
445
+ auto tzinfo = PyObject_CallFunctionObjArgs(class_timezone.obj(), offset, NULL);
446
+ RETURN_IF_PYERROR();
447
+ return tzinfo;
448
+ }
449
+
450
+ // fallback on zoneinfo if tz is string and pytz is not present
451
+ if (internal::ImportModule("zoneinfo", &zoneinfo).ok()) {
452
+ OwnedRef class_zoneinfo;
453
+ RETURN_NOT_OK(
454
+ internal::ImportFromModule(zoneinfo.obj(), "ZoneInfo", &class_zoneinfo));
455
+ OwnedRef py_tz_string(
456
+ PyUnicode_FromStringAndSize(tz.c_str(), static_cast<Py_ssize_t>(tz.size())));
457
+ auto tzinfo =
458
+ PyObject_CallFunctionObjArgs(class_zoneinfo.obj(), py_tz_string.obj(), NULL);
459
+ RETURN_IF_PYERROR();
460
+ return tzinfo;
461
+ }
462
+
463
+ return Status::Invalid(
464
+ "Pytz package or Python>=3.8 for zoneinfo module must be installed.");
465
+ }
466
+
467
+ Result<std::string> TzinfoToString(PyObject* tzinfo) {
468
+ OwnedRef module_pytz; // import pytz
469
+ OwnedRef module_datetime; // import datetime
470
+ OwnedRef module_zoneinfo; // import zoneinfo
471
+ OwnedRef module_dateutil; // import dateutil
472
+ OwnedRef class_timezone; // from datetime import timezone
473
+ OwnedRef class_fixedoffset; // from pytz import _FixedOffset
474
+ OwnedRef class_basetzinfo; // from pytz import BaseTzInfo
475
+ OwnedRef class_zoneinfo; // from zoneinfo import ZoneInfo
476
+ OwnedRef class_tzfile; // from zoneinfo import tzfile
477
+
478
+ // import necessary modules
479
+ RETURN_NOT_OK(internal::ImportModule("datetime", &module_datetime));
480
+ // import necessary classes
481
+ RETURN_NOT_OK(
482
+ internal::ImportFromModule(module_datetime.obj(), "timezone", &class_timezone));
483
+
484
+ // check that it's a valid tzinfo object
485
+ if (!PyTZInfo_Check(tzinfo)) {
486
+ return Status::TypeError("Not an instance of datetime.tzinfo");
487
+ }
488
+
489
+ // if tzinfo is an instance of datetime.timezone return the
490
+ // HH:MM offset string representation
491
+ if (PyObject_IsInstance(tzinfo, class_timezone.obj())) {
492
+ // still recognize datetime.timezone.utc as UTC (instead of +00:00)
493
+ OwnedRef tzname_object(PyObject_CallMethod(tzinfo, "tzname", "O", Py_None));
494
+ RETURN_IF_PYERROR();
495
+ if (PyUnicode_Check(tzname_object.obj())) {
496
+ std::string result;
497
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(tzname_object.obj(), &result));
498
+ if (result == "UTC") {
499
+ return result;
500
+ }
501
+ }
502
+ return PyTZInfo_utcoffset_hhmm(tzinfo);
503
+ }
504
+
505
+ // Try to import pytz if it is available
506
+ if (internal::ImportModule("pytz", &module_pytz).ok()) {
507
+ RETURN_NOT_OK(internal::ImportFromModule(module_pytz.obj(), "_FixedOffset",
508
+ &class_fixedoffset));
509
+ RETURN_NOT_OK(
510
+ internal::ImportFromModule(module_pytz.obj(), "BaseTzInfo", &class_basetzinfo));
511
+ }
512
+
513
+ // if tzinfo is an instance of pytz._FixedOffset return the
514
+ // HH:MM offset string representation
515
+ if (module_pytz.obj() != nullptr &&
516
+ PyObject_IsInstance(tzinfo, class_fixedoffset.obj())) {
517
+ OwnedRef tzname_object(PyObject_CallMethod(tzinfo, "tzname", "O", Py_None));
518
+ RETURN_IF_PYERROR();
519
+ return PyTZInfo_utcoffset_hhmm(tzinfo);
520
+ }
521
+
522
+ // if pytz is installed and tzinfo is and instance of pytz.BaseTzInfo
523
+ if (module_pytz.obj() != nullptr &&
524
+ PyObject_IsInstance(tzinfo, class_basetzinfo.obj())) {
525
+ OwnedRef zone(PyObject_GetAttrString(tzinfo, "zone"));
526
+ RETURN_IF_PYERROR();
527
+ std::string result;
528
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(zone.obj(), &result));
529
+ return result;
530
+ }
531
+
532
+ // Try to import zoneinfo if it is available
533
+ if (internal::ImportModule("zoneinfo", &module_zoneinfo).ok()) {
534
+ RETURN_NOT_OK(
535
+ internal::ImportFromModule(module_zoneinfo.obj(), "ZoneInfo", &class_zoneinfo));
536
+ }
537
+
538
+ // if zoneinfo is installed and tzinfo is an instance of zoneinfo.ZoneInfo
539
+ if (module_zoneinfo.obj() != nullptr &&
540
+ PyObject_IsInstance(tzinfo, class_zoneinfo.obj())) {
541
+ OwnedRef key(PyObject_GetAttrString(tzinfo, "key"));
542
+ RETURN_IF_PYERROR();
543
+ std::string result;
544
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(key.obj(), &result));
545
+ return result;
546
+ }
547
+
548
+ // Try to import dateutil if it is available
549
+ if (internal::ImportModule("dateutil.tz", &module_dateutil).ok()) {
550
+ RETURN_NOT_OK(
551
+ internal::ImportFromModule(module_dateutil.obj(), "tzfile", &class_tzfile));
552
+ }
553
+
554
+ // if dateutil is installed and tzinfo is an instance of dateutil.tz.tzfile
555
+ if (module_dateutil.obj() != nullptr &&
556
+ PyObject_IsInstance(tzinfo, class_tzfile.obj())) {
557
+ OwnedRef _filename(PyObject_GetAttrString(tzinfo, "_filename"));
558
+ RETURN_IF_PYERROR();
559
+ std::string result;
560
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(_filename.obj(), &result));
561
+ // _filename returns a full path in general ('/usr/share/zoneinfo/Europe/Paris')
562
+ // or POSIX name on Windows ('Europe/Paris') - we need a substring in first case
563
+ std::size_t pos = result.find("zoneinfo/");
564
+ if (pos != std::string::npos) {
565
+ return result.substr(pos + 9);
566
+ }
567
+ return result;
568
+ }
569
+
570
+ // attempt to call tzinfo.tzname(None)
571
+ OwnedRef tzname_object(PyObject_CallMethod(tzinfo, "tzname", "O", Py_None));
572
+ RETURN_IF_PYERROR();
573
+ if (PyUnicode_Check(tzname_object.obj())) {
574
+ std::string result;
575
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(tzname_object.obj(), &result));
576
+ return result;
577
+ }
578
+
579
+ // fall back to HH:MM offset string representation based on tzinfo.utcoffset(None)
580
+ return PyTZInfo_utcoffset_hhmm(tzinfo);
581
+ }
582
+
583
+ PyObject* MonthDayNanoIntervalToNamedTuple(
584
+ const MonthDayNanoIntervalType::MonthDayNanos& interval) {
585
+ OwnedRef tuple(PyStructSequence_New(&MonthDayNanoTupleType));
586
+ if (ARROW_PREDICT_FALSE(tuple.obj() == nullptr)) {
587
+ return nullptr;
588
+ }
589
+ PyStructSequence_SetItem(tuple.obj(), /*pos=*/0, PyLong_FromLong(interval.months));
590
+ PyStructSequence_SetItem(tuple.obj(), /*pos=*/1, PyLong_FromLong(interval.days));
591
+ PyStructSequence_SetItem(tuple.obj(), /*pos=*/2,
592
+ PyLong_FromLongLong(interval.nanoseconds));
593
+ return tuple.detach();
594
+ }
595
+
596
+ namespace {
597
+
598
+ // Wrapper around a Python list object that mimics dereference and assignment
599
+ // operations.
600
+ struct PyListAssigner {
601
+ public:
602
+ explicit PyListAssigner(PyObject* list) : list_(list) { DCHECK(PyList_Check(list_)); }
603
+
604
+ PyListAssigner& operator*() { return *this; }
605
+
606
+ void operator=(PyObject* obj) {
607
+ if (ARROW_PREDICT_FALSE(PyList_SetItem(list_, current_index_, obj) == -1)) {
608
+ Py_FatalError("list did not have the correct preallocated size.");
609
+ }
610
+ }
611
+
612
+ PyListAssigner& operator++() {
613
+ current_index_++;
614
+ return *this;
615
+ }
616
+
617
+ PyListAssigner& operator+=(int64_t offset) {
618
+ current_index_ += offset;
619
+ return *this;
620
+ }
621
+
622
+ private:
623
+ PyObject* list_;
624
+ int64_t current_index_ = 0;
625
+ };
626
+
627
+ } // namespace
628
+
629
+ Result<PyObject*> MonthDayNanoIntervalArrayToPyList(
630
+ const MonthDayNanoIntervalArray& array) {
631
+ OwnedRef out_list(PyList_New(array.length()));
632
+ RETURN_IF_PYERROR();
633
+ PyListAssigner out_objects(out_list.obj());
634
+ auto& interval_array =
635
+ arrow::internal::checked_cast<const MonthDayNanoIntervalArray&>(array);
636
+ RETURN_NOT_OK(internal::WriteArrayObjects(
637
+ interval_array,
638
+ [&](const MonthDayNanoIntervalType::MonthDayNanos& interval, PyListAssigner& out) {
639
+ PyObject* tuple = internal::MonthDayNanoIntervalToNamedTuple(interval);
640
+ if (ARROW_PREDICT_FALSE(tuple == nullptr)) {
641
+ RETURN_IF_PYERROR();
642
+ }
643
+
644
+ *out = tuple;
645
+ return Status::OK();
646
+ },
647
+ out_objects));
648
+ return out_list.detach();
649
+ }
650
+
651
+ Result<PyObject*> MonthDayNanoIntervalScalarToPyObject(
652
+ const MonthDayNanoIntervalScalar& scalar) {
653
+ if (scalar.is_valid) {
654
+ return internal::MonthDayNanoIntervalToNamedTuple(scalar.value);
655
+ } else {
656
+ Py_INCREF(Py_None);
657
+ return Py_None;
658
+ }
659
+ }
660
+
661
+ } // namespace internal
662
+ } // namespace py
663
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/datetime.h ADDED
@@ -0,0 +1,231 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <algorithm>
21
+ #include <chrono>
22
+
23
+ #include "arrow/python/platform.h"
24
+ #include "arrow/python/visibility.h"
25
+ #include "arrow/result.h"
26
+ #include "arrow/status.h"
27
+ #include "arrow/type.h"
28
+ #include "arrow/type_fwd.h"
29
+ #include "arrow/util/int_util_overflow.h"
30
+ #include "arrow/util/logging.h"
31
+
32
+ // By default, PyDateTimeAPI is a *static* variable. This forces
33
+ // PyDateTime_IMPORT to be called in every C/C++ module using the
34
+ // C datetime API. This is error-prone and potentially costly.
35
+ // Instead, we redefine PyDateTimeAPI to point to a global variable,
36
+ // which is initialized once by calling InitDatetime().
37
+ #ifdef PYPY_VERSION
38
+ #include "datetime.h"
39
+ #else
40
+ #define PyDateTimeAPI ::arrow::py::internal::datetime_api
41
+ #endif
42
+
43
+ namespace arrow {
44
+ using internal::AddWithOverflow;
45
+ using internal::MultiplyWithOverflow;
46
+ namespace py {
47
+ namespace internal {
48
+
49
+ #ifndef PYPY_VERSION
50
+ extern PyDateTime_CAPI* datetime_api;
51
+
52
+ ARROW_PYTHON_EXPORT
53
+ void InitDatetime();
54
+ #endif
55
+
56
+ // Returns the MonthDayNano namedtuple type (increments the reference count).
57
+ ARROW_PYTHON_EXPORT
58
+ PyObject* NewMonthDayNanoTupleType();
59
+
60
+ ARROW_PYTHON_EXPORT
61
+ inline int64_t PyTime_to_us(PyObject* pytime) {
62
+ return (PyDateTime_TIME_GET_HOUR(pytime) * 3600000000LL +
63
+ PyDateTime_TIME_GET_MINUTE(pytime) * 60000000LL +
64
+ PyDateTime_TIME_GET_SECOND(pytime) * 1000000LL +
65
+ PyDateTime_TIME_GET_MICROSECOND(pytime));
66
+ }
67
+
68
+ ARROW_PYTHON_EXPORT
69
+ inline int64_t PyTime_to_s(PyObject* pytime) { return PyTime_to_us(pytime) / 1000000; }
70
+
71
+ ARROW_PYTHON_EXPORT
72
+ inline int64_t PyTime_to_ms(PyObject* pytime) { return PyTime_to_us(pytime) / 1000; }
73
+
74
+ ARROW_PYTHON_EXPORT
75
+ inline int64_t PyTime_to_ns(PyObject* pytime) { return PyTime_to_us(pytime) * 1000; }
76
+
77
+ ARROW_PYTHON_EXPORT
78
+ Status PyTime_from_int(int64_t val, const TimeUnit::type unit, PyObject** out);
79
+
80
+ ARROW_PYTHON_EXPORT
81
+ Status PyDate_from_int(int64_t val, const DateUnit unit, PyObject** out);
82
+
83
+ // WARNING: This function returns a naive datetime.
84
+ ARROW_PYTHON_EXPORT
85
+ Status PyDateTime_from_int(int64_t val, const TimeUnit::type unit, PyObject** out);
86
+
87
+ // This declaration must be the same as in filesystem/filesystem.h
88
+ using TimePoint =
89
+ std::chrono::time_point<std::chrono::system_clock, std::chrono::nanoseconds>;
90
+
91
+ ARROW_PYTHON_EXPORT
92
+ int64_t PyDate_to_days(PyDateTime_Date* pydate);
93
+
94
+ ARROW_PYTHON_EXPORT
95
+ inline int64_t PyDate_to_s(PyDateTime_Date* pydate) {
96
+ return PyDate_to_days(pydate) * 86400LL;
97
+ }
98
+
99
+ ARROW_PYTHON_EXPORT
100
+ inline int64_t PyDate_to_ms(PyDateTime_Date* pydate) {
101
+ return PyDate_to_days(pydate) * 86400000LL;
102
+ }
103
+
104
+ ARROW_PYTHON_EXPORT
105
+ inline int64_t PyDateTime_to_s(PyDateTime_DateTime* pydatetime) {
106
+ return (PyDate_to_s(reinterpret_cast<PyDateTime_Date*>(pydatetime)) +
107
+ PyDateTime_DATE_GET_HOUR(pydatetime) * 3600LL +
108
+ PyDateTime_DATE_GET_MINUTE(pydatetime) * 60LL +
109
+ PyDateTime_DATE_GET_SECOND(pydatetime));
110
+ }
111
+
112
+ ARROW_PYTHON_EXPORT
113
+ inline int64_t PyDateTime_to_ms(PyDateTime_DateTime* pydatetime) {
114
+ return (PyDateTime_to_s(pydatetime) * 1000LL +
115
+ PyDateTime_DATE_GET_MICROSECOND(pydatetime) / 1000);
116
+ }
117
+
118
+ ARROW_PYTHON_EXPORT
119
+ inline int64_t PyDateTime_to_us(PyDateTime_DateTime* pydatetime) {
120
+ return (PyDateTime_to_s(pydatetime) * 1000000LL +
121
+ PyDateTime_DATE_GET_MICROSECOND(pydatetime));
122
+ }
123
+
124
+ ARROW_PYTHON_EXPORT
125
+ inline int64_t PyDateTime_to_ns(PyDateTime_DateTime* pydatetime) {
126
+ return PyDateTime_to_us(pydatetime) * 1000LL;
127
+ }
128
+
129
+ ARROW_PYTHON_EXPORT
130
+ inline TimePoint PyDateTime_to_TimePoint(PyDateTime_DateTime* pydatetime) {
131
+ return TimePoint(TimePoint::duration(PyDateTime_to_ns(pydatetime)));
132
+ }
133
+
134
+ ARROW_PYTHON_EXPORT
135
+ inline int64_t TimePoint_to_ns(TimePoint val) { return val.time_since_epoch().count(); }
136
+
137
+ ARROW_PYTHON_EXPORT
138
+ inline TimePoint TimePoint_from_s(double val) {
139
+ return TimePoint(TimePoint::duration(static_cast<int64_t>(1e9 * val)));
140
+ }
141
+
142
+ ARROW_PYTHON_EXPORT
143
+ inline TimePoint TimePoint_from_ns(int64_t val) {
144
+ return TimePoint(TimePoint::duration(val));
145
+ }
146
+
147
+ ARROW_PYTHON_EXPORT
148
+ inline int64_t PyDelta_to_s(PyDateTime_Delta* pytimedelta) {
149
+ return (PyDateTime_DELTA_GET_DAYS(pytimedelta) * 86400LL +
150
+ PyDateTime_DELTA_GET_SECONDS(pytimedelta));
151
+ }
152
+
153
+ ARROW_PYTHON_EXPORT
154
+ inline int64_t PyDelta_to_ms(PyDateTime_Delta* pytimedelta) {
155
+ return (PyDelta_to_s(pytimedelta) * 1000LL +
156
+ PyDateTime_DELTA_GET_MICROSECONDS(pytimedelta) / 1000);
157
+ }
158
+
159
+ ARROW_PYTHON_EXPORT
160
+ inline Result<int64_t> PyDelta_to_us(PyDateTime_Delta* pytimedelta) {
161
+ int64_t result = PyDelta_to_s(pytimedelta);
162
+ if (MultiplyWithOverflow(result, 1000000LL, &result)) {
163
+ return Status::Invalid("Timedelta too large to fit in 64-bit integer");
164
+ }
165
+ if (AddWithOverflow(result, PyDateTime_DELTA_GET_MICROSECONDS(pytimedelta), &result)) {
166
+ return Status::Invalid("Timedelta too large to fit in 64-bit integer");
167
+ }
168
+ return result;
169
+ }
170
+
171
+ ARROW_PYTHON_EXPORT
172
+ inline Result<int64_t> PyDelta_to_ns(PyDateTime_Delta* pytimedelta) {
173
+ ARROW_ASSIGN_OR_RAISE(int64_t result, PyDelta_to_us(pytimedelta));
174
+ if (MultiplyWithOverflow(result, 1000LL, &result)) {
175
+ return Status::Invalid("Timedelta too large to fit in 64-bit integer");
176
+ }
177
+ return result;
178
+ }
179
+
180
+ ARROW_PYTHON_EXPORT
181
+ Result<int64_t> PyDateTime_utcoffset_s(PyObject* pydatetime);
182
+
183
+ /// \brief Convert a time zone name into a time zone object.
184
+ ///
185
+ /// Supported input strings are:
186
+ /// * As used in the Olson time zone database (the "tz database" or
187
+ /// "tzdata"), such as "America/New_York"
188
+ /// * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
189
+ /// GIL must be held when calling this method.
190
+ ARROW_PYTHON_EXPORT
191
+ Result<PyObject*> StringToTzinfo(const std::string& tz);
192
+
193
+ /// \brief Convert a time zone object to a string representation.
194
+ ///
195
+ /// The output strings are:
196
+ /// * An absolute time zone offset of the form +XX:XX or -XX:XX, such as +07:30
197
+ /// if the input object is either an instance of pytz._FixedOffset or
198
+ /// datetime.timedelta
199
+ /// * The timezone's name if the input object's tzname() method returns with a
200
+ /// non-empty timezone name such as "UTC" or "America/New_York"
201
+ ///
202
+ /// GIL must be held when calling this method.
203
+ ARROW_PYTHON_EXPORT
204
+ Result<std::string> TzinfoToString(PyObject* pytzinfo);
205
+
206
+ /// \brief Convert MonthDayNano to a python namedtuple.
207
+ ///
208
+ /// Return a named tuple (pyarrow.MonthDayNano) containing attributes
209
+ /// "months", "days", "nanoseconds" in the given order
210
+ /// with values extracted from the fields on interval.
211
+ ///
212
+ /// GIL must be held when calling this method.
213
+ ARROW_PYTHON_EXPORT
214
+ PyObject* MonthDayNanoIntervalToNamedTuple(
215
+ const MonthDayNanoIntervalType::MonthDayNanos& interval);
216
+
217
+ /// \brief Convert the given Array to a PyList object containing
218
+ /// pyarrow.MonthDayNano objects.
219
+ ARROW_PYTHON_EXPORT
220
+ Result<PyObject*> MonthDayNanoIntervalArrayToPyList(
221
+ const MonthDayNanoIntervalArray& array);
222
+
223
+ /// \brief Convert the Scalar object to a pyarrow.MonthDayNano (or None if
224
+ /// is isn't valid).
225
+ ARROW_PYTHON_EXPORT
226
+ Result<PyObject*> MonthDayNanoIntervalScalarToPyObject(
227
+ const MonthDayNanoIntervalScalar& scalar);
228
+
229
+ } // namespace internal
230
+ } // namespace py
231
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/decimal.cc ADDED
@@ -0,0 +1,246 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include <algorithm>
19
+ #include <limits>
20
+
21
+ #include "arrow/python/common.h"
22
+ #include "arrow/python/decimal.h"
23
+ #include "arrow/python/helpers.h"
24
+ #include "arrow/type_fwd.h"
25
+ #include "arrow/util/decimal.h"
26
+ #include "arrow/util/logging.h"
27
+
28
+ namespace arrow {
29
+ namespace py {
30
+ namespace internal {
31
+
32
+ Status ImportDecimalType(OwnedRef* decimal_type) {
33
+ OwnedRef decimal_module;
34
+ RETURN_NOT_OK(ImportModule("decimal", &decimal_module));
35
+ RETURN_NOT_OK(ImportFromModule(decimal_module.obj(), "Decimal", decimal_type));
36
+ return Status::OK();
37
+ }
38
+
39
+ Status PythonDecimalToString(PyObject* python_decimal, std::string* out) {
40
+ // Call Python's str(decimal_object)
41
+ return PyObject_StdStringStr(python_decimal, out);
42
+ }
43
+
44
+ // \brief Infer the precision and scale of a Python decimal.Decimal instance
45
+ // \param python_decimal[in] An instance of decimal.Decimal
46
+ // \param precision[out] The value of the inferred precision
47
+ // \param scale[out] The value of the inferred scale
48
+ // \return The status of the operation
49
+ static Status InferDecimalPrecisionAndScale(PyObject* python_decimal, int32_t* precision,
50
+ int32_t* scale) {
51
+ DCHECK_NE(python_decimal, NULLPTR);
52
+ DCHECK_NE(precision, NULLPTR);
53
+ DCHECK_NE(scale, NULLPTR);
54
+
55
+ // TODO(phillipc): Make sure we perform PyDecimal_Check(python_decimal) as a DCHECK
56
+ OwnedRef as_tuple(PyObject_CallMethod(python_decimal, const_cast<char*>("as_tuple"),
57
+ const_cast<char*>("")));
58
+ RETURN_IF_PYERROR();
59
+ DCHECK(PyTuple_Check(as_tuple.obj()));
60
+
61
+ OwnedRef digits(PyObject_GetAttrString(as_tuple.obj(), "digits"));
62
+ RETURN_IF_PYERROR();
63
+ DCHECK(PyTuple_Check(digits.obj()));
64
+
65
+ const auto num_digits = static_cast<int32_t>(PyTuple_Size(digits.obj()));
66
+ RETURN_IF_PYERROR();
67
+
68
+ OwnedRef py_exponent(PyObject_GetAttrString(as_tuple.obj(), "exponent"));
69
+ RETURN_IF_PYERROR();
70
+ DCHECK(IsPyInteger(py_exponent.obj()));
71
+
72
+ const auto exponent = static_cast<int32_t>(PyLong_AsLong(py_exponent.obj()));
73
+ RETURN_IF_PYERROR();
74
+
75
+ if (exponent < 0) {
76
+ // If exponent > num_digits, we have a number with leading zeros
77
+ // such as 0.01234. Ensure we have enough precision for leading zeros
78
+ // (which are not included in num_digits).
79
+ *precision = std::max(num_digits, -exponent);
80
+ *scale = -exponent;
81
+ } else {
82
+ // Trailing zeros are not included in num_digits, need to add to precision.
83
+ // Note we don't generate negative scales as they are poorly supported
84
+ // in non-Arrow systems.
85
+ *precision = num_digits + exponent;
86
+ *scale = 0;
87
+ }
88
+ return Status::OK();
89
+ }
90
+
91
+ PyObject* DecimalFromString(PyObject* decimal_constructor,
92
+ const std::string& decimal_string) {
93
+ DCHECK_NE(decimal_constructor, nullptr);
94
+
95
+ auto string_size = decimal_string.size();
96
+ DCHECK_GT(string_size, 0);
97
+
98
+ auto string_bytes = decimal_string.c_str();
99
+ DCHECK_NE(string_bytes, nullptr);
100
+
101
+ return PyObject_CallFunction(decimal_constructor, const_cast<char*>("s#"), string_bytes,
102
+ static_cast<Py_ssize_t>(string_size));
103
+ }
104
+
105
+ namespace {
106
+
107
+ template <typename ArrowDecimal>
108
+ Status DecimalFromStdString(const std::string& decimal_string,
109
+ const DecimalType& arrow_type, ArrowDecimal* out) {
110
+ int32_t inferred_precision;
111
+ int32_t inferred_scale;
112
+
113
+ RETURN_NOT_OK(ArrowDecimal::FromString(decimal_string, out, &inferred_precision,
114
+ &inferred_scale));
115
+
116
+ const int32_t precision = arrow_type.precision();
117
+ const int32_t scale = arrow_type.scale();
118
+
119
+ if (scale != inferred_scale) {
120
+ DCHECK_NE(out, NULLPTR);
121
+ ARROW_ASSIGN_OR_RAISE(*out, out->Rescale(inferred_scale, scale));
122
+ }
123
+
124
+ auto inferred_scale_delta = inferred_scale - scale;
125
+ if (ARROW_PREDICT_FALSE((inferred_precision - inferred_scale_delta) > precision)) {
126
+ return Status::Invalid(
127
+ "Decimal type with precision ", inferred_precision,
128
+ " does not fit into precision inferred from first array element: ", precision);
129
+ }
130
+
131
+ return Status::OK();
132
+ }
133
+
134
+ template <typename ArrowDecimal>
135
+ Status InternalDecimalFromPythonDecimal(PyObject* python_decimal,
136
+ const DecimalType& arrow_type,
137
+ ArrowDecimal* out) {
138
+ DCHECK_NE(python_decimal, NULLPTR);
139
+ DCHECK_NE(out, NULLPTR);
140
+
141
+ std::string string;
142
+ RETURN_NOT_OK(PythonDecimalToString(python_decimal, &string));
143
+ return DecimalFromStdString(string, arrow_type, out);
144
+ }
145
+
146
+ template <typename ArrowDecimal>
147
+ Status InternalDecimalFromPyObject(PyObject* obj, const DecimalType& arrow_type,
148
+ ArrowDecimal* out) {
149
+ DCHECK_NE(obj, NULLPTR);
150
+ DCHECK_NE(out, NULLPTR);
151
+
152
+ if (IsPyInteger(obj)) {
153
+ // TODO: add a fast path for small-ish ints
154
+ std::string string;
155
+ RETURN_NOT_OK(PyObject_StdStringStr(obj, &string));
156
+ return DecimalFromStdString(string, arrow_type, out);
157
+ } else if (PyDecimal_Check(obj)) {
158
+ return InternalDecimalFromPythonDecimal<ArrowDecimal>(obj, arrow_type, out);
159
+ } else {
160
+ return Status::TypeError("int or Decimal object expected, got ",
161
+ Py_TYPE(obj)->tp_name);
162
+ }
163
+ }
164
+
165
+ } // namespace
166
+
167
+ Status DecimalFromPythonDecimal(PyObject* python_decimal, const DecimalType& arrow_type,
168
+ Decimal128* out) {
169
+ return InternalDecimalFromPythonDecimal(python_decimal, arrow_type, out);
170
+ }
171
+
172
+ Status DecimalFromPyObject(PyObject* obj, const DecimalType& arrow_type,
173
+ Decimal128* out) {
174
+ return InternalDecimalFromPyObject(obj, arrow_type, out);
175
+ }
176
+
177
+ Status DecimalFromPythonDecimal(PyObject* python_decimal, const DecimalType& arrow_type,
178
+ Decimal256* out) {
179
+ return InternalDecimalFromPythonDecimal(python_decimal, arrow_type, out);
180
+ }
181
+
182
+ Status DecimalFromPyObject(PyObject* obj, const DecimalType& arrow_type,
183
+ Decimal256* out) {
184
+ return InternalDecimalFromPyObject(obj, arrow_type, out);
185
+ }
186
+
187
+ bool PyDecimal_Check(PyObject* obj) {
188
+ static OwnedRef decimal_type;
189
+ if (!decimal_type.obj()) {
190
+ ARROW_CHECK_OK(ImportDecimalType(&decimal_type));
191
+ DCHECK(PyType_Check(decimal_type.obj()));
192
+ }
193
+ // PyObject_IsInstance() is slower as it has to check for virtual subclasses
194
+ const int result =
195
+ PyType_IsSubtype(Py_TYPE(obj), reinterpret_cast<PyTypeObject*>(decimal_type.obj()));
196
+ ARROW_CHECK_NE(result, -1) << " error during PyType_IsSubtype check";
197
+ return result == 1;
198
+ }
199
+
200
+ bool PyDecimal_ISNAN(PyObject* obj) {
201
+ DCHECK(PyDecimal_Check(obj)) << "obj is not an instance of decimal.Decimal";
202
+ OwnedRef is_nan(
203
+ PyObject_CallMethod(obj, const_cast<char*>("is_nan"), const_cast<char*>("")));
204
+ return PyObject_IsTrue(is_nan.obj()) == 1;
205
+ }
206
+
207
+ DecimalMetadata::DecimalMetadata()
208
+ : DecimalMetadata(std::numeric_limits<int32_t>::min(),
209
+ std::numeric_limits<int32_t>::min()) {}
210
+
211
+ DecimalMetadata::DecimalMetadata(int32_t precision, int32_t scale)
212
+ : precision_(precision), scale_(scale) {}
213
+
214
+ Status DecimalMetadata::Update(int32_t suggested_precision, int32_t suggested_scale) {
215
+ const int32_t current_scale = scale_;
216
+ scale_ = std::max(current_scale, suggested_scale);
217
+
218
+ const int32_t current_precision = precision_;
219
+
220
+ if (current_precision == std::numeric_limits<int32_t>::min()) {
221
+ precision_ = suggested_precision;
222
+ } else {
223
+ auto num_digits = std::max(current_precision - current_scale,
224
+ suggested_precision - suggested_scale);
225
+ precision_ = std::max(num_digits + scale_, current_precision);
226
+ }
227
+
228
+ return Status::OK();
229
+ }
230
+
231
+ Status DecimalMetadata::Update(PyObject* object) {
232
+ bool is_decimal = PyDecimal_Check(object);
233
+
234
+ if (ARROW_PREDICT_FALSE(!is_decimal || PyDecimal_ISNAN(object))) {
235
+ return Status::OK();
236
+ }
237
+
238
+ int32_t precision = 0;
239
+ int32_t scale = 0;
240
+ RETURN_NOT_OK(InferDecimalPrecisionAndScale(object, &precision, &scale));
241
+ return Update(precision, scale);
242
+ }
243
+
244
+ } // namespace internal
245
+ } // namespace py
246
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/decimal.h ADDED
@@ -0,0 +1,128 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <string>
21
+
22
+ #include "arrow/python/visibility.h"
23
+ #include "arrow/type.h"
24
+
25
+ namespace arrow {
26
+
27
+ class Decimal128;
28
+ class Decimal256;
29
+
30
+ namespace py {
31
+
32
+ class OwnedRef;
33
+
34
+ //
35
+ // Python Decimal support
36
+ //
37
+
38
+ namespace internal {
39
+
40
+ // \brief Import the Python Decimal type
41
+ ARROW_PYTHON_EXPORT
42
+ Status ImportDecimalType(OwnedRef* decimal_type);
43
+
44
+ // \brief Convert a Python Decimal object to a C++ string
45
+ // \param[in] python_decimal A Python decimal.Decimal instance
46
+ // \param[out] The string representation of the Python Decimal instance
47
+ // \return The status of the operation
48
+ ARROW_PYTHON_EXPORT
49
+ Status PythonDecimalToString(PyObject* python_decimal, std::string* out);
50
+
51
+ // \brief Convert a C++ std::string to a Python Decimal instance
52
+ // \param[in] decimal_constructor The decimal type object
53
+ // \param[in] decimal_string A decimal string
54
+ // \return An instance of decimal.Decimal
55
+ ARROW_PYTHON_EXPORT
56
+ PyObject* DecimalFromString(PyObject* decimal_constructor,
57
+ const std::string& decimal_string);
58
+
59
+ // \brief Convert a Python decimal to an Arrow Decimal128 object
60
+ // \param[in] python_decimal A Python decimal.Decimal instance
61
+ // \param[in] arrow_type An instance of arrow::DecimalType
62
+ // \param[out] out A pointer to a Decimal128
63
+ // \return The status of the operation
64
+ ARROW_PYTHON_EXPORT
65
+ Status DecimalFromPythonDecimal(PyObject* python_decimal, const DecimalType& arrow_type,
66
+ Decimal128* out);
67
+
68
+ // \brief Convert a Python object to an Arrow Decimal128 object
69
+ // \param[in] python_decimal A Python int or decimal.Decimal instance
70
+ // \param[in] arrow_type An instance of arrow::DecimalType
71
+ // \param[out] out A pointer to a Decimal128
72
+ // \return The status of the operation
73
+ ARROW_PYTHON_EXPORT
74
+ Status DecimalFromPyObject(PyObject* obj, const DecimalType& arrow_type, Decimal128* out);
75
+
76
+ // \brief Convert a Python decimal to an Arrow Decimal256 object
77
+ // \param[in] python_decimal A Python decimal.Decimal instance
78
+ // \param[in] arrow_type An instance of arrow::DecimalType
79
+ // \param[out] out A pointer to a Decimal256
80
+ // \return The status of the operation
81
+ ARROW_PYTHON_EXPORT
82
+ Status DecimalFromPythonDecimal(PyObject* python_decimal, const DecimalType& arrow_type,
83
+ Decimal256* out);
84
+
85
+ // \brief Convert a Python object to an Arrow Decimal256 object
86
+ // \param[in] python_decimal A Python int or decimal.Decimal instance
87
+ // \param[in] arrow_type An instance of arrow::DecimalType
88
+ // \param[out] out A pointer to a Decimal256
89
+ // \return The status of the operation
90
+ ARROW_PYTHON_EXPORT
91
+ Status DecimalFromPyObject(PyObject* obj, const DecimalType& arrow_type, Decimal256* out);
92
+
93
+ // \brief Check whether obj is an instance of Decimal
94
+ ARROW_PYTHON_EXPORT
95
+ bool PyDecimal_Check(PyObject* obj);
96
+
97
+ // \brief Check whether obj is nan. This function will abort the program if the argument
98
+ // is not a Decimal instance
99
+ ARROW_PYTHON_EXPORT
100
+ bool PyDecimal_ISNAN(PyObject* obj);
101
+
102
+ // \brief Helper class to track and update the precision and scale of a decimal
103
+ class ARROW_PYTHON_EXPORT DecimalMetadata {
104
+ public:
105
+ DecimalMetadata();
106
+ DecimalMetadata(int32_t precision, int32_t scale);
107
+
108
+ // \brief Adjust the precision and scale of a decimal type given a new precision and a
109
+ // new scale \param[in] suggested_precision A candidate precision \param[in]
110
+ // suggested_scale A candidate scale \return The status of the operation
111
+ Status Update(int32_t suggested_precision, int32_t suggested_scale);
112
+
113
+ // \brief A convenient interface for updating the precision and scale based on a Python
114
+ // Decimal object \param object A Python Decimal object \return The status of the
115
+ // operation
116
+ Status Update(PyObject* object);
117
+
118
+ int32_t precision() const { return precision_; }
119
+ int32_t scale() const { return scale_; }
120
+
121
+ private:
122
+ int32_t precision_;
123
+ int32_t scale_;
124
+ };
125
+
126
+ } // namespace internal
127
+ } // namespace py
128
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/deserialize.cc ADDED
@@ -0,0 +1,495 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/deserialize.h"
19
+
20
+ #include "arrow/python/numpy_interop.h"
21
+
22
+ #include <cstdint>
23
+ #include <memory>
24
+ #include <string>
25
+ #include <utility>
26
+ #include <vector>
27
+
28
+ #include <numpy/arrayobject.h>
29
+ #include <numpy/arrayscalars.h>
30
+
31
+ #include "arrow/array.h"
32
+ #include "arrow/io/interfaces.h"
33
+ #include "arrow/io/memory.h"
34
+ #include "arrow/ipc/options.h"
35
+ #include "arrow/ipc/reader.h"
36
+ #include "arrow/ipc/util.h"
37
+ #include "arrow/ipc/writer.h"
38
+ #include "arrow/table.h"
39
+ #include "arrow/util/checked_cast.h"
40
+ #include "arrow/util/logging.h"
41
+ #include "arrow/util/value_parsing.h"
42
+
43
+ #include "arrow/python/common.h"
44
+ #include "arrow/python/datetime.h"
45
+ #include "arrow/python/helpers.h"
46
+ #include "arrow/python/numpy_convert.h"
47
+ #include "arrow/python/pyarrow.h"
48
+ #include "arrow/python/serialize.h"
49
+
50
+ namespace arrow {
51
+
52
+ using internal::checked_cast;
53
+ using internal::ParseValue;
54
+
55
+ namespace py {
56
+
57
+ Status CallDeserializeCallback(PyObject* context, PyObject* value,
58
+ PyObject** deserialized_object);
59
+
60
+ Status DeserializeTuple(PyObject* context, const Array& array, int64_t start_idx,
61
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
62
+ PyObject** out);
63
+
64
+ Status DeserializeList(PyObject* context, const Array& array, int64_t start_idx,
65
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
66
+ PyObject** out);
67
+
68
+ Status DeserializeSet(PyObject* context, const Array& array, int64_t start_idx,
69
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
70
+ PyObject** out);
71
+
72
+ Status DeserializeDict(PyObject* context, const Array& array, int64_t start_idx,
73
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
74
+ PyObject** out) {
75
+ const auto& data = checked_cast<const StructArray&>(array);
76
+ OwnedRef keys, vals;
77
+ OwnedRef result(PyDict_New());
78
+ RETURN_IF_PYERROR();
79
+
80
+ DCHECK_EQ(2, data.num_fields());
81
+
82
+ RETURN_NOT_OK(DeserializeList(context, *data.field(0), start_idx, stop_idx, base, blobs,
83
+ keys.ref()));
84
+ RETURN_NOT_OK(DeserializeList(context, *data.field(1), start_idx, stop_idx, base, blobs,
85
+ vals.ref()));
86
+ for (int64_t i = start_idx; i < stop_idx; ++i) {
87
+ // PyDict_SetItem behaves differently from PyList_SetItem and PyTuple_SetItem.
88
+ // The latter two steal references whereas PyDict_SetItem does not. So we need
89
+ // to make sure the reference count is decremented by letting the OwnedRef
90
+ // go out of scope at the end.
91
+ int ret = PyDict_SetItem(result.obj(), PyList_GET_ITEM(keys.obj(), i - start_idx),
92
+ PyList_GET_ITEM(vals.obj(), i - start_idx));
93
+ if (ret != 0) {
94
+ return ConvertPyError();
95
+ }
96
+ }
97
+ static PyObject* py_type = PyUnicode_FromString("_pytype_");
98
+ if (PyDict_Contains(result.obj(), py_type)) {
99
+ RETURN_NOT_OK(CallDeserializeCallback(context, result.obj(), out));
100
+ } else {
101
+ *out = result.detach();
102
+ }
103
+ return Status::OK();
104
+ }
105
+
106
+ Status DeserializeArray(int32_t index, PyObject* base, const SerializedPyObject& blobs,
107
+ PyObject** out) {
108
+ RETURN_NOT_OK(py::TensorToNdarray(blobs.ndarrays[index], base, out));
109
+ // Mark the array as immutable
110
+ OwnedRef flags(PyObject_GetAttrString(*out, "flags"));
111
+ if (flags.obj() == NULL) {
112
+ return ConvertPyError();
113
+ }
114
+ if (PyObject_SetAttrString(flags.obj(), "writeable", Py_False) < 0) {
115
+ return ConvertPyError();
116
+ }
117
+ return Status::OK();
118
+ }
119
+
120
+ Status GetValue(PyObject* context, const Array& arr, int64_t index, int8_t type,
121
+ PyObject* base, const SerializedPyObject& blobs, PyObject** result) {
122
+ switch (type) {
123
+ case PythonType::NONE:
124
+ Py_INCREF(Py_None);
125
+ *result = Py_None;
126
+ return Status::OK();
127
+ case PythonType::BOOL:
128
+ *result = PyBool_FromLong(checked_cast<const BooleanArray&>(arr).Value(index));
129
+ return Status::OK();
130
+ case PythonType::PY2INT:
131
+ case PythonType::INT: {
132
+ *result = PyLong_FromSsize_t(checked_cast<const Int64Array&>(arr).Value(index));
133
+ return Status::OK();
134
+ }
135
+ case PythonType::BYTES: {
136
+ auto view = checked_cast<const BinaryArray&>(arr).GetView(index);
137
+ *result = PyBytes_FromStringAndSize(view.data(), view.length());
138
+ return CheckPyError();
139
+ }
140
+ case PythonType::STRING: {
141
+ auto view = checked_cast<const StringArray&>(arr).GetView(index);
142
+ *result = PyUnicode_FromStringAndSize(view.data(), view.length());
143
+ return CheckPyError();
144
+ }
145
+ case PythonType::HALF_FLOAT: {
146
+ *result = PyHalf_FromHalf(checked_cast<const HalfFloatArray&>(arr).Value(index));
147
+ RETURN_IF_PYERROR();
148
+ return Status::OK();
149
+ }
150
+ case PythonType::FLOAT:
151
+ *result = PyFloat_FromDouble(checked_cast<const FloatArray&>(arr).Value(index));
152
+ return Status::OK();
153
+ case PythonType::DOUBLE:
154
+ *result = PyFloat_FromDouble(checked_cast<const DoubleArray&>(arr).Value(index));
155
+ return Status::OK();
156
+ case PythonType::DATE64: {
157
+ RETURN_NOT_OK(internal::PyDateTime_from_int(
158
+ checked_cast<const Date64Array&>(arr).Value(index), TimeUnit::MICRO, result));
159
+ RETURN_IF_PYERROR();
160
+ return Status::OK();
161
+ }
162
+ case PythonType::LIST: {
163
+ const auto& l = checked_cast<const ListArray&>(arr);
164
+ return DeserializeList(context, *l.values(), l.value_offset(index),
165
+ l.value_offset(index + 1), base, blobs, result);
166
+ }
167
+ case PythonType::DICT: {
168
+ const auto& l = checked_cast<const ListArray&>(arr);
169
+ return DeserializeDict(context, *l.values(), l.value_offset(index),
170
+ l.value_offset(index + 1), base, blobs, result);
171
+ }
172
+ case PythonType::TUPLE: {
173
+ const auto& l = checked_cast<const ListArray&>(arr);
174
+ return DeserializeTuple(context, *l.values(), l.value_offset(index),
175
+ l.value_offset(index + 1), base, blobs, result);
176
+ }
177
+ case PythonType::SET: {
178
+ const auto& l = checked_cast<const ListArray&>(arr);
179
+ return DeserializeSet(context, *l.values(), l.value_offset(index),
180
+ l.value_offset(index + 1), base, blobs, result);
181
+ }
182
+ case PythonType::TENSOR: {
183
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
184
+ *result = wrap_tensor(blobs.tensors[ref]);
185
+ return Status::OK();
186
+ }
187
+ case PythonType::SPARSECOOTENSOR: {
188
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
189
+ const std::shared_ptr<SparseCOOTensor>& sparse_coo_tensor =
190
+ arrow::internal::checked_pointer_cast<SparseCOOTensor>(
191
+ blobs.sparse_tensors[ref]);
192
+ *result = wrap_sparse_coo_tensor(sparse_coo_tensor);
193
+ return Status::OK();
194
+ }
195
+ case PythonType::SPARSECSRMATRIX: {
196
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
197
+ const std::shared_ptr<SparseCSRMatrix>& sparse_csr_matrix =
198
+ arrow::internal::checked_pointer_cast<SparseCSRMatrix>(
199
+ blobs.sparse_tensors[ref]);
200
+ *result = wrap_sparse_csr_matrix(sparse_csr_matrix);
201
+ return Status::OK();
202
+ }
203
+ case PythonType::SPARSECSCMATRIX: {
204
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
205
+ const std::shared_ptr<SparseCSCMatrix>& sparse_csc_matrix =
206
+ arrow::internal::checked_pointer_cast<SparseCSCMatrix>(
207
+ blobs.sparse_tensors[ref]);
208
+ *result = wrap_sparse_csc_matrix(sparse_csc_matrix);
209
+ return Status::OK();
210
+ }
211
+ case PythonType::SPARSECSFTENSOR: {
212
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
213
+ const std::shared_ptr<SparseCSFTensor>& sparse_csf_tensor =
214
+ arrow::internal::checked_pointer_cast<SparseCSFTensor>(
215
+ blobs.sparse_tensors[ref]);
216
+ *result = wrap_sparse_csf_tensor(sparse_csf_tensor);
217
+ return Status::OK();
218
+ }
219
+ case PythonType::NDARRAY: {
220
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
221
+ return DeserializeArray(ref, base, blobs, result);
222
+ }
223
+ case PythonType::BUFFER: {
224
+ int32_t ref = checked_cast<const Int32Array&>(arr).Value(index);
225
+ *result = wrap_buffer(blobs.buffers[ref]);
226
+ return Status::OK();
227
+ }
228
+ default: {
229
+ ARROW_CHECK(false) << "union tag " << type << "' not recognized";
230
+ }
231
+ }
232
+ return Status::OK();
233
+ }
234
+
235
+ Status GetPythonTypes(const UnionArray& data, std::vector<int8_t>* result) {
236
+ ARROW_CHECK(result != nullptr);
237
+ auto type = data.type();
238
+ for (int i = 0; i < type->num_fields(); ++i) {
239
+ int8_t tag = 0;
240
+ const std::string& data = type->field(i)->name();
241
+ if (!ParseValue<Int8Type>(data.c_str(), data.size(), &tag)) {
242
+ return Status::SerializationError("Cannot convert string: \"",
243
+ type->field(i)->name(), "\" to int8_t");
244
+ }
245
+ result->push_back(tag);
246
+ }
247
+ return Status::OK();
248
+ }
249
+
250
+ template <typename CreateSequenceFn, typename SetItemFn>
251
+ Status DeserializeSequence(PyObject* context, const Array& array, int64_t start_idx,
252
+ int64_t stop_idx, PyObject* base,
253
+ const SerializedPyObject& blobs,
254
+ CreateSequenceFn&& create_sequence, SetItemFn&& set_item,
255
+ PyObject** out) {
256
+ const auto& data = checked_cast<const DenseUnionArray&>(array);
257
+ OwnedRef result(create_sequence(stop_idx - start_idx));
258
+ RETURN_IF_PYERROR();
259
+ const int8_t* type_codes = data.raw_type_codes();
260
+ const int32_t* value_offsets = data.raw_value_offsets();
261
+ std::vector<int8_t> python_types;
262
+ RETURN_NOT_OK(GetPythonTypes(data, &python_types));
263
+ for (int64_t i = start_idx; i < stop_idx; ++i) {
264
+ const int64_t offset = value_offsets[i];
265
+ const uint8_t type = type_codes[i];
266
+ PyObject* value;
267
+ RETURN_NOT_OK(GetValue(context, *data.field(type), offset, python_types[type], base,
268
+ blobs, &value));
269
+ RETURN_NOT_OK(set_item(result.obj(), i - start_idx, value));
270
+ }
271
+ *out = result.detach();
272
+ return Status::OK();
273
+ }
274
+
275
+ Status DeserializeList(PyObject* context, const Array& array, int64_t start_idx,
276
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
277
+ PyObject** out) {
278
+ return DeserializeSequence(
279
+ context, array, start_idx, stop_idx, base, blobs,
280
+ [](int64_t size) { return PyList_New(size); },
281
+ [](PyObject* seq, int64_t index, PyObject* item) {
282
+ PyList_SET_ITEM(seq, index, item);
283
+ return Status::OK();
284
+ },
285
+ out);
286
+ }
287
+
288
+ Status DeserializeTuple(PyObject* context, const Array& array, int64_t start_idx,
289
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
290
+ PyObject** out) {
291
+ return DeserializeSequence(
292
+ context, array, start_idx, stop_idx, base, blobs,
293
+ [](int64_t size) { return PyTuple_New(size); },
294
+ [](PyObject* seq, int64_t index, PyObject* item) {
295
+ PyTuple_SET_ITEM(seq, index, item);
296
+ return Status::OK();
297
+ },
298
+ out);
299
+ }
300
+
301
+ Status DeserializeSet(PyObject* context, const Array& array, int64_t start_idx,
302
+ int64_t stop_idx, PyObject* base, const SerializedPyObject& blobs,
303
+ PyObject** out) {
304
+ return DeserializeSequence(
305
+ context, array, start_idx, stop_idx, base, blobs,
306
+ [](int64_t size) { return PySet_New(nullptr); },
307
+ [](PyObject* seq, int64_t index, PyObject* item) {
308
+ int err = PySet_Add(seq, item);
309
+ Py_DECREF(item);
310
+ if (err < 0) {
311
+ RETURN_IF_PYERROR();
312
+ }
313
+ return Status::OK();
314
+ },
315
+ out);
316
+ }
317
+
318
+ Status ReadSerializedObject(io::RandomAccessFile* src, SerializedPyObject* out) {
319
+ int32_t num_tensors;
320
+ int32_t num_sparse_tensors;
321
+ int32_t num_ndarrays;
322
+ int32_t num_buffers;
323
+
324
+ // Read number of tensors
325
+ RETURN_NOT_OK(src->Read(sizeof(int32_t), reinterpret_cast<uint8_t*>(&num_tensors)));
326
+ RETURN_NOT_OK(
327
+ src->Read(sizeof(int32_t), reinterpret_cast<uint8_t*>(&num_sparse_tensors)));
328
+ RETURN_NOT_OK(src->Read(sizeof(int32_t), reinterpret_cast<uint8_t*>(&num_ndarrays)));
329
+ RETURN_NOT_OK(src->Read(sizeof(int32_t), reinterpret_cast<uint8_t*>(&num_buffers)));
330
+
331
+ // Align stream to 8-byte offset
332
+ RETURN_NOT_OK(ipc::AlignStream(src, ipc::kArrowIpcAlignment));
333
+ std::shared_ptr<RecordBatchReader> reader;
334
+ ARROW_ASSIGN_OR_RAISE(reader, ipc::RecordBatchStreamReader::Open(src));
335
+ RETURN_NOT_OK(reader->ReadNext(&out->batch));
336
+
337
+ /// Skip EOS marker
338
+ RETURN_NOT_OK(src->Advance(4));
339
+
340
+ /// Align stream so tensor bodies are 64-byte aligned
341
+ RETURN_NOT_OK(ipc::AlignStream(src, ipc::kTensorAlignment));
342
+
343
+ for (int i = 0; i < num_tensors; ++i) {
344
+ std::shared_ptr<Tensor> tensor;
345
+ ARROW_ASSIGN_OR_RAISE(tensor, ipc::ReadTensor(src));
346
+ RETURN_NOT_OK(ipc::AlignStream(src, ipc::kTensorAlignment));
347
+ out->tensors.push_back(tensor);
348
+ }
349
+
350
+ for (int i = 0; i < num_sparse_tensors; ++i) {
351
+ std::shared_ptr<SparseTensor> sparse_tensor;
352
+ ARROW_ASSIGN_OR_RAISE(sparse_tensor, ipc::ReadSparseTensor(src));
353
+ RETURN_NOT_OK(ipc::AlignStream(src, ipc::kTensorAlignment));
354
+ out->sparse_tensors.push_back(sparse_tensor);
355
+ }
356
+
357
+ for (int i = 0; i < num_ndarrays; ++i) {
358
+ std::shared_ptr<Tensor> ndarray;
359
+ ARROW_ASSIGN_OR_RAISE(ndarray, ipc::ReadTensor(src));
360
+ RETURN_NOT_OK(ipc::AlignStream(src, ipc::kTensorAlignment));
361
+ out->ndarrays.push_back(ndarray);
362
+ }
363
+
364
+ ARROW_ASSIGN_OR_RAISE(int64_t offset, src->Tell());
365
+ for (int i = 0; i < num_buffers; ++i) {
366
+ int64_t size;
367
+ RETURN_NOT_OK(src->ReadAt(offset, sizeof(int64_t), &size));
368
+ offset += sizeof(int64_t);
369
+ ARROW_ASSIGN_OR_RAISE(auto buffer, src->ReadAt(offset, size));
370
+ out->buffers.push_back(buffer);
371
+ offset += size;
372
+ }
373
+
374
+ return Status::OK();
375
+ }
376
+
377
+ Status DeserializeObject(PyObject* context, const SerializedPyObject& obj, PyObject* base,
378
+ PyObject** out) {
379
+ PyAcquireGIL lock;
380
+ return DeserializeList(context, *obj.batch->column(0), 0, obj.batch->num_rows(), base,
381
+ obj, out);
382
+ }
383
+
384
+ Status GetSerializedFromComponents(int num_tensors,
385
+ const SparseTensorCounts& num_sparse_tensors,
386
+ int num_ndarrays, int num_buffers, PyObject* data,
387
+ SerializedPyObject* out) {
388
+ PyAcquireGIL gil;
389
+ const Py_ssize_t data_length = PyList_Size(data);
390
+ RETURN_IF_PYERROR();
391
+
392
+ const Py_ssize_t expected_data_length = 1 + num_tensors * 2 +
393
+ num_sparse_tensors.num_total_buffers() +
394
+ num_ndarrays * 2 + num_buffers;
395
+ if (data_length != expected_data_length) {
396
+ return Status::Invalid("Invalid number of buffers in data");
397
+ }
398
+
399
+ auto GetBuffer = [&data](Py_ssize_t index, std::shared_ptr<Buffer>* out) {
400
+ ARROW_CHECK_LE(index, PyList_Size(data));
401
+ PyObject* py_buf = PyList_GET_ITEM(data, index);
402
+ return unwrap_buffer(py_buf).Value(out);
403
+ };
404
+
405
+ Py_ssize_t buffer_index = 0;
406
+
407
+ // Read the union batch describing object structure
408
+ {
409
+ std::shared_ptr<Buffer> data_buffer;
410
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &data_buffer));
411
+ gil.release();
412
+ io::BufferReader buf_reader(data_buffer);
413
+ std::shared_ptr<RecordBatchReader> reader;
414
+ ARROW_ASSIGN_OR_RAISE(reader, ipc::RecordBatchStreamReader::Open(&buf_reader));
415
+ RETURN_NOT_OK(reader->ReadNext(&out->batch));
416
+ gil.acquire();
417
+ }
418
+
419
+ // Zero-copy reconstruct tensors
420
+ for (int i = 0; i < num_tensors; ++i) {
421
+ std::shared_ptr<Buffer> metadata;
422
+ std::shared_ptr<Buffer> body;
423
+ std::shared_ptr<Tensor> tensor;
424
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &metadata));
425
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &body));
426
+
427
+ ipc::Message message(metadata, body);
428
+
429
+ ARROW_ASSIGN_OR_RAISE(tensor, ipc::ReadTensor(message));
430
+ out->tensors.emplace_back(std::move(tensor));
431
+ }
432
+
433
+ // Zero-copy reconstruct sparse tensors
434
+ for (int i = 0, n = num_sparse_tensors.num_total_tensors(); i < n; ++i) {
435
+ ipc::IpcPayload payload;
436
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &payload.metadata));
437
+
438
+ ARROW_ASSIGN_OR_RAISE(
439
+ size_t num_bodies,
440
+ ipc::internal::ReadSparseTensorBodyBufferCount(*payload.metadata));
441
+
442
+ payload.body_buffers.reserve(num_bodies);
443
+ for (size_t i = 0; i < num_bodies; ++i) {
444
+ std::shared_ptr<Buffer> body;
445
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &body));
446
+ payload.body_buffers.emplace_back(body);
447
+ }
448
+
449
+ std::shared_ptr<SparseTensor> sparse_tensor;
450
+ ARROW_ASSIGN_OR_RAISE(sparse_tensor, ipc::internal::ReadSparseTensorPayload(payload));
451
+ out->sparse_tensors.emplace_back(std::move(sparse_tensor));
452
+ }
453
+
454
+ // Zero-copy reconstruct tensors for numpy ndarrays
455
+ for (int i = 0; i < num_ndarrays; ++i) {
456
+ std::shared_ptr<Buffer> metadata;
457
+ std::shared_ptr<Buffer> body;
458
+ std::shared_ptr<Tensor> tensor;
459
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &metadata));
460
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &body));
461
+
462
+ ipc::Message message(metadata, body);
463
+
464
+ ARROW_ASSIGN_OR_RAISE(tensor, ipc::ReadTensor(message));
465
+ out->ndarrays.emplace_back(std::move(tensor));
466
+ }
467
+
468
+ // Unwrap and append buffers
469
+ for (int i = 0; i < num_buffers; ++i) {
470
+ std::shared_ptr<Buffer> buffer;
471
+ RETURN_NOT_OK(GetBuffer(buffer_index++, &buffer));
472
+ out->buffers.emplace_back(std::move(buffer));
473
+ }
474
+
475
+ return Status::OK();
476
+ }
477
+
478
+ Status DeserializeNdarray(const SerializedPyObject& object,
479
+ std::shared_ptr<Tensor>* out) {
480
+ if (object.ndarrays.size() != 1) {
481
+ return Status::Invalid("Object is not an Ndarray");
482
+ }
483
+ *out = object.ndarrays[0];
484
+ return Status::OK();
485
+ }
486
+
487
+ Status NdarrayFromBuffer(std::shared_ptr<Buffer> src, std::shared_ptr<Tensor>* out) {
488
+ io::BufferReader reader(src);
489
+ SerializedPyObject object;
490
+ RETURN_NOT_OK(ReadSerializedObject(&reader, &object));
491
+ return DeserializeNdarray(object, out);
492
+ }
493
+
494
+ } // namespace py
495
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/extension_type.cc ADDED
@@ -0,0 +1,217 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include <memory>
19
+ #include <sstream>
20
+ #include <utility>
21
+
22
+ #include "arrow/python/extension_type.h"
23
+ #include "arrow/python/helpers.h"
24
+ #include "arrow/python/pyarrow.h"
25
+ #include "arrow/util/checked_cast.h"
26
+ #include "arrow/util/logging.h"
27
+
28
+ namespace arrow {
29
+
30
+ using internal::checked_cast;
31
+
32
+ namespace py {
33
+
34
+ namespace {
35
+
36
+ // Serialize a Python ExtensionType instance
37
+ Status SerializeExtInstance(PyObject* type_instance, std::string* out) {
38
+ OwnedRef res(
39
+ cpp_PyObject_CallMethod(type_instance, "__arrow_ext_serialize__", nullptr));
40
+ if (!res) {
41
+ return ConvertPyError();
42
+ }
43
+ if (!PyBytes_Check(res.obj())) {
44
+ return Status::TypeError(
45
+ "__arrow_ext_serialize__ should return bytes object, "
46
+ "got ",
47
+ internal::PyObject_StdStringRepr(res.obj()));
48
+ }
49
+ *out = internal::PyBytes_AsStdString(res.obj());
50
+ return Status::OK();
51
+ }
52
+
53
+ // Deserialize a Python ExtensionType instance
54
+ PyObject* DeserializeExtInstance(PyObject* type_class,
55
+ std::shared_ptr<DataType> storage_type,
56
+ const std::string& serialized_data) {
57
+ OwnedRef storage_ref(wrap_data_type(storage_type));
58
+ if (!storage_ref) {
59
+ return nullptr;
60
+ }
61
+ OwnedRef data_ref(PyBytes_FromStringAndSize(
62
+ serialized_data.data(), static_cast<Py_ssize_t>(serialized_data.size())));
63
+ if (!data_ref) {
64
+ return nullptr;
65
+ }
66
+
67
+ return cpp_PyObject_CallMethod(type_class, "__arrow_ext_deserialize__", "OO",
68
+ storage_ref.obj(), data_ref.obj());
69
+ }
70
+
71
+ } // namespace
72
+
73
+ static const char* kExtensionName = "arrow.py_extension_type";
74
+
75
+ std::string PyExtensionType::ToString(bool show_metadata) const {
76
+ PyAcquireGIL lock;
77
+
78
+ std::stringstream ss;
79
+ OwnedRef instance(GetInstance());
80
+ ss << "extension<" << this->extension_name() << "<" << Py_TYPE(instance.obj())->tp_name
81
+ << ">>";
82
+ return ss.str();
83
+ }
84
+
85
+ PyExtensionType::PyExtensionType(std::shared_ptr<DataType> storage_type, PyObject* typ,
86
+ PyObject* inst)
87
+ : ExtensionType(storage_type),
88
+ extension_name_(kExtensionName),
89
+ type_class_(typ),
90
+ type_instance_(inst) {}
91
+
92
+ PyExtensionType::PyExtensionType(std::shared_ptr<DataType> storage_type,
93
+ std::string extension_name, PyObject* typ,
94
+ PyObject* inst)
95
+ : ExtensionType(storage_type),
96
+ extension_name_(std::move(extension_name)),
97
+ type_class_(typ),
98
+ type_instance_(inst) {}
99
+
100
+ bool PyExtensionType::ExtensionEquals(const ExtensionType& other) const {
101
+ PyAcquireGIL lock;
102
+
103
+ if (other.extension_name() != extension_name()) {
104
+ return false;
105
+ }
106
+ const auto& other_ext = checked_cast<const PyExtensionType&>(other);
107
+ int res = -1;
108
+ if (!type_instance_) {
109
+ if (other_ext.type_instance_) {
110
+ return false;
111
+ }
112
+ // Compare Python types
113
+ res = PyObject_RichCompareBool(type_class_.obj(), other_ext.type_class_.obj(), Py_EQ);
114
+ } else {
115
+ if (!other_ext.type_instance_) {
116
+ return false;
117
+ }
118
+ // Compare Python instances
119
+ OwnedRef left(GetInstance());
120
+ OwnedRef right(other_ext.GetInstance());
121
+ if (!left || !right) {
122
+ goto error;
123
+ }
124
+ res = PyObject_RichCompareBool(left.obj(), right.obj(), Py_EQ);
125
+ }
126
+ if (res == -1) {
127
+ goto error;
128
+ }
129
+ return res == 1;
130
+
131
+ error:
132
+ // Cannot propagate error
133
+ PyErr_WriteUnraisable(nullptr);
134
+ return false;
135
+ }
136
+
137
+ std::shared_ptr<Array> PyExtensionType::MakeArray(std::shared_ptr<ArrayData> data) const {
138
+ DCHECK_EQ(data->type->id(), Type::EXTENSION);
139
+ return std::make_shared<ExtensionArray>(data);
140
+ }
141
+
142
+ std::string PyExtensionType::Serialize() const {
143
+ DCHECK(type_instance_);
144
+ return serialized_;
145
+ }
146
+
147
+ Result<std::shared_ptr<DataType>> PyExtensionType::Deserialize(
148
+ std::shared_ptr<DataType> storage_type, const std::string& serialized_data) const {
149
+ PyAcquireGIL lock;
150
+
151
+ if (import_pyarrow()) {
152
+ return ConvertPyError();
153
+ }
154
+ OwnedRef res(DeserializeExtInstance(type_class_.obj(), storage_type, serialized_data));
155
+ if (!res) {
156
+ return ConvertPyError();
157
+ }
158
+ return unwrap_data_type(res.obj());
159
+ }
160
+
161
+ PyObject* PyExtensionType::GetInstance() const {
162
+ if (!type_instance_) {
163
+ PyErr_SetString(PyExc_TypeError, "Not an instance");
164
+ return nullptr;
165
+ }
166
+ DCHECK(PyWeakref_CheckRef(type_instance_.obj()));
167
+ PyObject* inst = PyWeakref_GET_OBJECT(type_instance_.obj());
168
+ if (inst != Py_None) {
169
+ // Cached instance still alive
170
+ Py_INCREF(inst);
171
+ return inst;
172
+ } else {
173
+ // Must reconstruct from serialized form
174
+ // XXX cache again?
175
+ return DeserializeExtInstance(type_class_.obj(), storage_type_, serialized_);
176
+ }
177
+ }
178
+
179
+ Status PyExtensionType::SetInstance(PyObject* inst) const {
180
+ // Check we have the right type
181
+ PyObject* typ = reinterpret_cast<PyObject*>(Py_TYPE(inst));
182
+ if (typ != type_class_.obj()) {
183
+ return Status::TypeError("Unexpected Python ExtensionType class ",
184
+ internal::PyObject_StdStringRepr(typ), " expected ",
185
+ internal::PyObject_StdStringRepr(type_class_.obj()));
186
+ }
187
+
188
+ PyObject* wr = PyWeakref_NewRef(inst, nullptr);
189
+ if (wr == NULL) {
190
+ return ConvertPyError();
191
+ }
192
+ type_instance_.reset(wr);
193
+ return SerializeExtInstance(inst, &serialized_);
194
+ }
195
+
196
+ Status PyExtensionType::FromClass(const std::shared_ptr<DataType> storage_type,
197
+ const std::string extension_name, PyObject* typ,
198
+ std::shared_ptr<ExtensionType>* out) {
199
+ Py_INCREF(typ);
200
+ out->reset(new PyExtensionType(storage_type, std::move(extension_name), typ));
201
+ return Status::OK();
202
+ }
203
+
204
+ Status RegisterPyExtensionType(const std::shared_ptr<DataType>& type) {
205
+ DCHECK_EQ(type->id(), Type::EXTENSION);
206
+ auto ext_type = std::dynamic_pointer_cast<ExtensionType>(type);
207
+ return RegisterExtensionType(ext_type);
208
+ }
209
+
210
+ Status UnregisterPyExtensionType(const std::string& type_name) {
211
+ return UnregisterExtensionType(type_name);
212
+ }
213
+
214
+ std::string PyExtensionName() { return kExtensionName; }
215
+
216
+ } // namespace py
217
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/extension_type.h ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <memory>
21
+ #include <string>
22
+
23
+ #include "arrow/extension_type.h"
24
+ #include "arrow/python/common.h"
25
+ #include "arrow/python/visibility.h"
26
+ #include "arrow/util/macros.h"
27
+
28
+ namespace arrow {
29
+ namespace py {
30
+
31
+ class ARROW_PYTHON_EXPORT PyExtensionType : public ExtensionType {
32
+ public:
33
+ // Implement extensionType API
34
+ std::string extension_name() const override { return extension_name_; }
35
+
36
+ std::string ToString(bool show_metadata = false) const override;
37
+
38
+ bool ExtensionEquals(const ExtensionType& other) const override;
39
+
40
+ std::shared_ptr<Array> MakeArray(std::shared_ptr<ArrayData> data) const override;
41
+
42
+ Result<std::shared_ptr<DataType>> Deserialize(
43
+ std::shared_ptr<DataType> storage_type,
44
+ const std::string& serialized) const override;
45
+
46
+ std::string Serialize() const override;
47
+
48
+ // For use from Cython
49
+ // Assumes that `typ` is borrowed
50
+ static Status FromClass(const std::shared_ptr<DataType> storage_type,
51
+ const std::string extension_name, PyObject* typ,
52
+ std::shared_ptr<ExtensionType>* out);
53
+
54
+ // Return new ref
55
+ PyObject* GetInstance() const;
56
+ Status SetInstance(PyObject*) const;
57
+
58
+ protected:
59
+ PyExtensionType(std::shared_ptr<DataType> storage_type, PyObject* typ,
60
+ PyObject* inst = NULLPTR);
61
+ PyExtensionType(std::shared_ptr<DataType> storage_type, std::string extension_name,
62
+ PyObject* typ, PyObject* inst = NULLPTR);
63
+
64
+ std::string extension_name_;
65
+
66
+ // These fields are mutable because of two-step initialization.
67
+ mutable OwnedRefNoGIL type_class_;
68
+ // A weakref or null. Storing a strong reference to the Python extension type
69
+ // instance would create an unreclaimable reference cycle between Python and C++
70
+ // (the Python instance has to keep a strong reference to the C++ ExtensionType
71
+ // in other direction). Instead, we store a weakref to the instance.
72
+ // If the weakref is dead, we reconstruct the instance from its serialized form.
73
+ mutable OwnedRefNoGIL type_instance_;
74
+ // Empty if type_instance_ is null
75
+ mutable std::string serialized_;
76
+ };
77
+
78
+ ARROW_PYTHON_EXPORT std::string PyExtensionName();
79
+
80
+ ARROW_PYTHON_EXPORT Status RegisterPyExtensionType(const std::shared_ptr<DataType>&);
81
+
82
+ ARROW_PYTHON_EXPORT Status UnregisterPyExtensionType(const std::string& type_name);
83
+
84
+ } // namespace py
85
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/filesystem.h ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <memory>
21
+ #include <string>
22
+ #include <vector>
23
+
24
+ #include "arrow/filesystem/filesystem.h"
25
+ #include "arrow/python/common.h"
26
+ #include "arrow/python/visibility.h"
27
+ #include "arrow/util/macros.h"
28
+
29
+ namespace arrow::py::fs {
30
+
31
+ class ARROW_PYTHON_EXPORT PyFileSystemVtable {
32
+ public:
33
+ std::function<void(PyObject*, std::string* out)> get_type_name;
34
+ std::function<bool(PyObject*, const arrow::fs::FileSystem& other)> equals;
35
+
36
+ std::function<void(PyObject*, const std::string& path, arrow::fs::FileInfo* out)>
37
+ get_file_info;
38
+ std::function<void(PyObject*, const std::vector<std::string>& paths,
39
+ std::vector<arrow::fs::FileInfo>* out)>
40
+ get_file_info_vector;
41
+ std::function<void(PyObject*, const arrow::fs::FileSelector&,
42
+ std::vector<arrow::fs::FileInfo>* out)>
43
+ get_file_info_selector;
44
+
45
+ std::function<void(PyObject*, const std::string& path, bool)> create_dir;
46
+ std::function<void(PyObject*, const std::string& path)> delete_dir;
47
+ std::function<void(PyObject*, const std::string& path, bool)> delete_dir_contents;
48
+ std::function<void(PyObject*)> delete_root_dir_contents;
49
+ std::function<void(PyObject*, const std::string& path)> delete_file;
50
+ std::function<void(PyObject*, const std::string& src, const std::string& dest)> move;
51
+ std::function<void(PyObject*, const std::string& src, const std::string& dest)>
52
+ copy_file;
53
+
54
+ std::function<void(PyObject*, const std::string& path,
55
+ std::shared_ptr<io::InputStream>* out)>
56
+ open_input_stream;
57
+ std::function<void(PyObject*, const std::string& path,
58
+ std::shared_ptr<io::RandomAccessFile>* out)>
59
+ open_input_file;
60
+ std::function<void(PyObject*, const std::string& path,
61
+ const std::shared_ptr<const KeyValueMetadata>&,
62
+ std::shared_ptr<io::OutputStream>* out)>
63
+ open_output_stream;
64
+ std::function<void(PyObject*, const std::string& path,
65
+ const std::shared_ptr<const KeyValueMetadata>&,
66
+ std::shared_ptr<io::OutputStream>* out)>
67
+ open_append_stream;
68
+
69
+ std::function<void(PyObject*, const std::string& path, std::string* out)>
70
+ normalize_path;
71
+ };
72
+
73
+ class ARROW_PYTHON_EXPORT PyFileSystem : public arrow::fs::FileSystem {
74
+ public:
75
+ PyFileSystem(PyObject* handler, PyFileSystemVtable vtable);
76
+ ~PyFileSystem() override;
77
+
78
+ static std::shared_ptr<PyFileSystem> Make(PyObject* handler, PyFileSystemVtable vtable);
79
+
80
+ std::string type_name() const override;
81
+
82
+ bool Equals(const FileSystem& other) const override;
83
+
84
+ /// \cond FALSE
85
+ using FileSystem::CreateDir;
86
+ using FileSystem::DeleteDirContents;
87
+ using FileSystem::GetFileInfo;
88
+ using FileSystem::OpenAppendStream;
89
+ using FileSystem::OpenOutputStream;
90
+ /// \endcond
91
+
92
+ Result<arrow::fs::FileInfo> GetFileInfo(const std::string& path) override;
93
+ Result<std::vector<arrow::fs::FileInfo>> GetFileInfo(
94
+ const std::vector<std::string>& paths) override;
95
+ Result<std::vector<arrow::fs::FileInfo>> GetFileInfo(
96
+ const arrow::fs::FileSelector& select) override;
97
+
98
+ Status CreateDir(const std::string& path, bool recursive) override;
99
+
100
+ Status DeleteDir(const std::string& path) override;
101
+ Status DeleteDirContents(const std::string& path, bool missing_dir_ok) override;
102
+ Status DeleteRootDirContents() override;
103
+
104
+ Status DeleteFile(const std::string& path) override;
105
+
106
+ Status Move(const std::string& src, const std::string& dest) override;
107
+
108
+ Status CopyFile(const std::string& src, const std::string& dest) override;
109
+
110
+ Result<std::shared_ptr<io::InputStream>> OpenInputStream(
111
+ const std::string& path) override;
112
+ Result<std::shared_ptr<io::RandomAccessFile>> OpenInputFile(
113
+ const std::string& path) override;
114
+ Result<std::shared_ptr<io::OutputStream>> OpenOutputStream(
115
+ const std::string& path,
116
+ const std::shared_ptr<const KeyValueMetadata>& metadata) override;
117
+ Result<std::shared_ptr<io::OutputStream>> OpenAppendStream(
118
+ const std::string& path,
119
+ const std::shared_ptr<const KeyValueMetadata>& metadata) override;
120
+
121
+ Result<std::string> NormalizePath(std::string path) override;
122
+
123
+ PyObject* handler() const { return handler_.obj(); }
124
+
125
+ private:
126
+ OwnedRefNoGIL handler_;
127
+ PyFileSystemVtable vtable_;
128
+ };
129
+
130
+ } // namespace arrow::py::fs
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/flight.cc ADDED
@@ -0,0 +1,388 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include <signal.h>
19
+ #include <utility>
20
+
21
+ #include "arrow/python/flight.h"
22
+ #include "arrow/util/io_util.h"
23
+ #include "arrow/util/logging.h"
24
+
25
+ using arrow::flight::FlightPayload;
26
+
27
+ namespace arrow {
28
+ namespace py {
29
+ namespace flight {
30
+
31
+ const char* kPyServerMiddlewareName = "arrow.py_server_middleware";
32
+
33
+ PyServerAuthHandler::PyServerAuthHandler(PyObject* handler,
34
+ const PyServerAuthHandlerVtable& vtable)
35
+ : vtable_(vtable) {
36
+ Py_INCREF(handler);
37
+ handler_.reset(handler);
38
+ }
39
+
40
+ Status PyServerAuthHandler::Authenticate(arrow::flight::ServerAuthSender* outgoing,
41
+ arrow::flight::ServerAuthReader* incoming) {
42
+ return SafeCallIntoPython([=] {
43
+ const Status status = vtable_.authenticate(handler_.obj(), outgoing, incoming);
44
+ RETURN_NOT_OK(CheckPyError());
45
+ return status;
46
+ });
47
+ }
48
+
49
+ Status PyServerAuthHandler::IsValid(const std::string& token,
50
+ std::string* peer_identity) {
51
+ return SafeCallIntoPython([=] {
52
+ const Status status = vtable_.is_valid(handler_.obj(), token, peer_identity);
53
+ RETURN_NOT_OK(CheckPyError());
54
+ return status;
55
+ });
56
+ }
57
+
58
+ PyClientAuthHandler::PyClientAuthHandler(PyObject* handler,
59
+ const PyClientAuthHandlerVtable& vtable)
60
+ : vtable_(vtable) {
61
+ Py_INCREF(handler);
62
+ handler_.reset(handler);
63
+ }
64
+
65
+ Status PyClientAuthHandler::Authenticate(arrow::flight::ClientAuthSender* outgoing,
66
+ arrow::flight::ClientAuthReader* incoming) {
67
+ return SafeCallIntoPython([=] {
68
+ const Status status = vtable_.authenticate(handler_.obj(), outgoing, incoming);
69
+ RETURN_NOT_OK(CheckPyError());
70
+ return status;
71
+ });
72
+ }
73
+
74
+ Status PyClientAuthHandler::GetToken(std::string* token) {
75
+ return SafeCallIntoPython([=] {
76
+ const Status status = vtable_.get_token(handler_.obj(), token);
77
+ RETURN_NOT_OK(CheckPyError());
78
+ return status;
79
+ });
80
+ }
81
+
82
+ PyFlightServer::PyFlightServer(PyObject* server, const PyFlightServerVtable& vtable)
83
+ : vtable_(vtable) {
84
+ Py_INCREF(server);
85
+ server_.reset(server);
86
+ }
87
+
88
+ Status PyFlightServer::ListFlights(
89
+ const arrow::flight::ServerCallContext& context,
90
+ const arrow::flight::Criteria* criteria,
91
+ std::unique_ptr<arrow::flight::FlightListing>* listings) {
92
+ return SafeCallIntoPython([&] {
93
+ const Status status =
94
+ vtable_.list_flights(server_.obj(), context, criteria, listings);
95
+ RETURN_NOT_OK(CheckPyError());
96
+ return status;
97
+ });
98
+ }
99
+
100
+ Status PyFlightServer::GetFlightInfo(const arrow::flight::ServerCallContext& context,
101
+ const arrow::flight::FlightDescriptor& request,
102
+ std::unique_ptr<arrow::flight::FlightInfo>* info) {
103
+ return SafeCallIntoPython([&] {
104
+ const Status status = vtable_.get_flight_info(server_.obj(), context, request, info);
105
+ RETURN_NOT_OK(CheckPyError());
106
+ return status;
107
+ });
108
+ }
109
+
110
+ Status PyFlightServer::GetSchema(const arrow::flight::ServerCallContext& context,
111
+ const arrow::flight::FlightDescriptor& request,
112
+ std::unique_ptr<arrow::flight::SchemaResult>* result) {
113
+ return SafeCallIntoPython([&] {
114
+ const Status status = vtable_.get_schema(server_.obj(), context, request, result);
115
+ RETURN_NOT_OK(CheckPyError());
116
+ return status;
117
+ });
118
+ }
119
+
120
+ Status PyFlightServer::DoGet(const arrow::flight::ServerCallContext& context,
121
+ const arrow::flight::Ticket& request,
122
+ std::unique_ptr<arrow::flight::FlightDataStream>* stream) {
123
+ return SafeCallIntoPython([&] {
124
+ const Status status = vtable_.do_get(server_.obj(), context, request, stream);
125
+ RETURN_NOT_OK(CheckPyError());
126
+ return status;
127
+ });
128
+ }
129
+
130
+ Status PyFlightServer::DoPut(
131
+ const arrow::flight::ServerCallContext& context,
132
+ std::unique_ptr<arrow::flight::FlightMessageReader> reader,
133
+ std::unique_ptr<arrow::flight::FlightMetadataWriter> writer) {
134
+ return SafeCallIntoPython([&] {
135
+ const Status status =
136
+ vtable_.do_put(server_.obj(), context, std::move(reader), std::move(writer));
137
+ RETURN_NOT_OK(CheckPyError());
138
+ return status;
139
+ });
140
+ }
141
+
142
+ Status PyFlightServer::DoExchange(
143
+ const arrow::flight::ServerCallContext& context,
144
+ std::unique_ptr<arrow::flight::FlightMessageReader> reader,
145
+ std::unique_ptr<arrow::flight::FlightMessageWriter> writer) {
146
+ return SafeCallIntoPython([&] {
147
+ const Status status =
148
+ vtable_.do_exchange(server_.obj(), context, std::move(reader), std::move(writer));
149
+ RETURN_NOT_OK(CheckPyError());
150
+ return status;
151
+ });
152
+ }
153
+
154
+ Status PyFlightServer::DoAction(const arrow::flight::ServerCallContext& context,
155
+ const arrow::flight::Action& action,
156
+ std::unique_ptr<arrow::flight::ResultStream>* result) {
157
+ return SafeCallIntoPython([&] {
158
+ const Status status = vtable_.do_action(server_.obj(), context, action, result);
159
+ RETURN_NOT_OK(CheckPyError());
160
+ return status;
161
+ });
162
+ }
163
+
164
+ Status PyFlightServer::ListActions(const arrow::flight::ServerCallContext& context,
165
+ std::vector<arrow::flight::ActionType>* actions) {
166
+ return SafeCallIntoPython([&] {
167
+ const Status status = vtable_.list_actions(server_.obj(), context, actions);
168
+ RETURN_NOT_OK(CheckPyError());
169
+ return status;
170
+ });
171
+ }
172
+
173
+ Status PyFlightServer::ServeWithSignals() {
174
+ // Respect the current Python settings, i.e. only interrupt the server if there is
175
+ // an active signal handler for SIGINT and SIGTERM.
176
+ std::vector<int> signals;
177
+ for (const int signum : {SIGINT, SIGTERM}) {
178
+ ARROW_ASSIGN_OR_RAISE(auto handler, ::arrow::internal::GetSignalHandler(signum));
179
+ auto cb = handler.callback();
180
+ if (cb != SIG_DFL && cb != SIG_IGN) {
181
+ signals.push_back(signum);
182
+ }
183
+ }
184
+ RETURN_NOT_OK(SetShutdownOnSignals(signals));
185
+
186
+ // Serve until we got told to shutdown or a signal interrupted us
187
+ RETURN_NOT_OK(Serve());
188
+ int signum = GotSignal();
189
+ if (signum != 0) {
190
+ // Issue the signal again with Python's signal handlers restored
191
+ PyAcquireGIL lock;
192
+ raise(signum);
193
+ // XXX Ideally we would loop and serve again if no exception was raised.
194
+ // Unfortunately, gRPC will return immediately if Serve() is called again.
195
+ ARROW_UNUSED(PyErr_CheckSignals());
196
+ }
197
+
198
+ return Status::OK();
199
+ }
200
+
201
+ PyFlightResultStream::PyFlightResultStream(PyObject* generator,
202
+ PyFlightResultStreamCallback callback)
203
+ : callback_(callback) {
204
+ Py_INCREF(generator);
205
+ generator_.reset(generator);
206
+ }
207
+
208
+ arrow::Result<std::unique_ptr<arrow::flight::Result>> PyFlightResultStream::Next() {
209
+ return SafeCallIntoPython(
210
+ [=]() -> arrow::Result<std::unique_ptr<arrow::flight::Result>> {
211
+ std::unique_ptr<arrow::flight::Result> result;
212
+ const Status status = callback_(generator_.obj(), &result);
213
+ RETURN_NOT_OK(CheckPyError());
214
+ RETURN_NOT_OK(status);
215
+ return result;
216
+ });
217
+ }
218
+
219
+ PyFlightDataStream::PyFlightDataStream(
220
+ PyObject* data_source, std::unique_ptr<arrow::flight::FlightDataStream> stream)
221
+ : stream_(std::move(stream)) {
222
+ Py_INCREF(data_source);
223
+ data_source_.reset(data_source);
224
+ }
225
+
226
+ std::shared_ptr<Schema> PyFlightDataStream::schema() { return stream_->schema(); }
227
+
228
+ arrow::Result<FlightPayload> PyFlightDataStream::GetSchemaPayload() {
229
+ return stream_->GetSchemaPayload();
230
+ }
231
+
232
+ arrow::Result<FlightPayload> PyFlightDataStream::Next() { return stream_->Next(); }
233
+
234
+ PyGeneratorFlightDataStream::PyGeneratorFlightDataStream(
235
+ PyObject* generator, std::shared_ptr<arrow::Schema> schema,
236
+ PyGeneratorFlightDataStreamCallback callback, const ipc::IpcWriteOptions& options)
237
+ : schema_(schema), mapper_(*schema_), options_(options), callback_(callback) {
238
+ Py_INCREF(generator);
239
+ generator_.reset(generator);
240
+ }
241
+
242
+ std::shared_ptr<Schema> PyGeneratorFlightDataStream::schema() { return schema_; }
243
+
244
+ arrow::Result<FlightPayload> PyGeneratorFlightDataStream::GetSchemaPayload() {
245
+ FlightPayload payload;
246
+ RETURN_NOT_OK(ipc::GetSchemaPayload(*schema_, options_, mapper_, &payload.ipc_message));
247
+ return payload;
248
+ }
249
+
250
+ arrow::Result<FlightPayload> PyGeneratorFlightDataStream::Next() {
251
+ return SafeCallIntoPython([=]() -> arrow::Result<FlightPayload> {
252
+ FlightPayload payload;
253
+ const Status status = callback_(generator_.obj(), &payload);
254
+ RETURN_NOT_OK(CheckPyError());
255
+ RETURN_NOT_OK(status);
256
+ return payload;
257
+ });
258
+ }
259
+
260
+ // Flight Server Middleware
261
+
262
+ PyServerMiddlewareFactory::PyServerMiddlewareFactory(PyObject* factory,
263
+ StartCallCallback start_call)
264
+ : start_call_(start_call) {
265
+ Py_INCREF(factory);
266
+ factory_.reset(factory);
267
+ }
268
+
269
+ Status PyServerMiddlewareFactory::StartCall(
270
+ const arrow::flight::CallInfo& info,
271
+ const arrow::flight::CallHeaders& incoming_headers,
272
+ std::shared_ptr<arrow::flight::ServerMiddleware>* middleware) {
273
+ return SafeCallIntoPython([&] {
274
+ const Status status = start_call_(factory_.obj(), info, incoming_headers, middleware);
275
+ RETURN_NOT_OK(CheckPyError());
276
+ return status;
277
+ });
278
+ }
279
+
280
+ PyServerMiddleware::PyServerMiddleware(PyObject* middleware, Vtable vtable)
281
+ : vtable_(vtable) {
282
+ Py_INCREF(middleware);
283
+ middleware_.reset(middleware);
284
+ }
285
+
286
+ void PyServerMiddleware::SendingHeaders(arrow::flight::AddCallHeaders* outgoing_headers) {
287
+ const Status& status = SafeCallIntoPython([&] {
288
+ const Status status = vtable_.sending_headers(middleware_.obj(), outgoing_headers);
289
+ RETURN_NOT_OK(CheckPyError());
290
+ return status;
291
+ });
292
+
293
+ ARROW_WARN_NOT_OK(status, "Python server middleware failed in SendingHeaders");
294
+ }
295
+
296
+ void PyServerMiddleware::CallCompleted(const Status& call_status) {
297
+ const Status& status = SafeCallIntoPython([&] {
298
+ const Status status = vtable_.call_completed(middleware_.obj(), call_status);
299
+ RETURN_NOT_OK(CheckPyError());
300
+ return status;
301
+ });
302
+
303
+ ARROW_WARN_NOT_OK(status, "Python server middleware failed in CallCompleted");
304
+ }
305
+
306
+ std::string PyServerMiddleware::name() const { return kPyServerMiddlewareName; }
307
+
308
+ PyObject* PyServerMiddleware::py_object() const { return middleware_.obj(); }
309
+
310
+ // Flight Client Middleware
311
+
312
+ PyClientMiddlewareFactory::PyClientMiddlewareFactory(PyObject* factory,
313
+ StartCallCallback start_call)
314
+ : start_call_(start_call) {
315
+ Py_INCREF(factory);
316
+ factory_.reset(factory);
317
+ }
318
+
319
+ void PyClientMiddlewareFactory::StartCall(
320
+ const arrow::flight::CallInfo& info,
321
+ std::unique_ptr<arrow::flight::ClientMiddleware>* middleware) {
322
+ const Status& status = SafeCallIntoPython([&] {
323
+ const Status status = start_call_(factory_.obj(), info, middleware);
324
+ RETURN_NOT_OK(CheckPyError());
325
+ return status;
326
+ });
327
+
328
+ ARROW_WARN_NOT_OK(status, "Python client middleware failed in StartCall");
329
+ }
330
+
331
+ PyClientMiddleware::PyClientMiddleware(PyObject* middleware, Vtable vtable)
332
+ : vtable_(vtable) {
333
+ Py_INCREF(middleware);
334
+ middleware_.reset(middleware);
335
+ }
336
+
337
+ void PyClientMiddleware::SendingHeaders(arrow::flight::AddCallHeaders* outgoing_headers) {
338
+ const Status& status = SafeCallIntoPython([&] {
339
+ const Status status = vtable_.sending_headers(middleware_.obj(), outgoing_headers);
340
+ RETURN_NOT_OK(CheckPyError());
341
+ return status;
342
+ });
343
+
344
+ ARROW_WARN_NOT_OK(status, "Python client middleware failed in StartCall");
345
+ }
346
+
347
+ void PyClientMiddleware::ReceivedHeaders(
348
+ const arrow::flight::CallHeaders& incoming_headers) {
349
+ const Status& status = SafeCallIntoPython([&] {
350
+ const Status status = vtable_.received_headers(middleware_.obj(), incoming_headers);
351
+ RETURN_NOT_OK(CheckPyError());
352
+ return status;
353
+ });
354
+
355
+ ARROW_WARN_NOT_OK(status, "Python client middleware failed in StartCall");
356
+ }
357
+
358
+ void PyClientMiddleware::CallCompleted(const Status& call_status) {
359
+ const Status& status = SafeCallIntoPython([&] {
360
+ const Status status = vtable_.call_completed(middleware_.obj(), call_status);
361
+ RETURN_NOT_OK(CheckPyError());
362
+ return status;
363
+ });
364
+
365
+ ARROW_WARN_NOT_OK(status, "Python client middleware failed in StartCall");
366
+ }
367
+
368
+ Status CreateFlightInfo(const std::shared_ptr<arrow::Schema>& schema,
369
+ const arrow::flight::FlightDescriptor& descriptor,
370
+ const std::vector<arrow::flight::FlightEndpoint>& endpoints,
371
+ int64_t total_records, int64_t total_bytes,
372
+ std::unique_ptr<arrow::flight::FlightInfo>* out) {
373
+ ARROW_ASSIGN_OR_RAISE(auto result,
374
+ arrow::flight::FlightInfo::Make(*schema, descriptor, endpoints,
375
+ total_records, total_bytes));
376
+ *out = std::unique_ptr<arrow::flight::FlightInfo>(
377
+ new arrow::flight::FlightInfo(std::move(result)));
378
+ return Status::OK();
379
+ }
380
+
381
+ Status CreateSchemaResult(const std::shared_ptr<arrow::Schema>& schema,
382
+ std::unique_ptr<arrow::flight::SchemaResult>* out) {
383
+ return arrow::flight::SchemaResult::Make(*schema).Value(out);
384
+ }
385
+
386
+ } // namespace flight
387
+ } // namespace py
388
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/flight.h ADDED
@@ -0,0 +1,350 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <memory>
21
+ #include <string>
22
+ #include <vector>
23
+
24
+ #include "arrow/flight/api.h"
25
+ #include "arrow/ipc/dictionary.h"
26
+ #include "arrow/python/common.h"
27
+
28
+ #if defined(_WIN32) || defined(__CYGWIN__) // Windows
29
+ #if defined(_MSC_VER)
30
+ #pragma warning(disable : 4251)
31
+ #else
32
+ #pragma GCC diagnostic ignored "-Wattributes"
33
+ #endif
34
+
35
+ #ifdef ARROW_PYTHON_STATIC
36
+ #define ARROW_PYFLIGHT_EXPORT
37
+ #elif defined(ARROW_PYFLIGHT_EXPORTING)
38
+ #define ARROW_PYFLIGHT_EXPORT __declspec(dllexport)
39
+ #else
40
+ #define ARROW_PYFLIGHT_EXPORT __declspec(dllimport)
41
+ #endif
42
+
43
+ #else // Not Windows
44
+ #ifndef ARROW_PYFLIGHT_EXPORT
45
+ #define ARROW_PYFLIGHT_EXPORT __attribute__((visibility("default")))
46
+ #endif
47
+ #endif // Non-Windows
48
+
49
+ namespace arrow {
50
+
51
+ namespace py {
52
+
53
+ namespace flight {
54
+
55
+ ARROW_PYFLIGHT_EXPORT
56
+ extern const char* kPyServerMiddlewareName;
57
+
58
+ /// \brief A table of function pointers for calling from C++ into
59
+ /// Python.
60
+ class ARROW_PYFLIGHT_EXPORT PyFlightServerVtable {
61
+ public:
62
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
63
+ const arrow::flight::Criteria*,
64
+ std::unique_ptr<arrow::flight::FlightListing>*)>
65
+ list_flights;
66
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
67
+ const arrow::flight::FlightDescriptor&,
68
+ std::unique_ptr<arrow::flight::FlightInfo>*)>
69
+ get_flight_info;
70
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
71
+ const arrow::flight::FlightDescriptor&,
72
+ std::unique_ptr<arrow::flight::SchemaResult>*)>
73
+ get_schema;
74
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
75
+ const arrow::flight::Ticket&,
76
+ std::unique_ptr<arrow::flight::FlightDataStream>*)>
77
+ do_get;
78
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
79
+ std::unique_ptr<arrow::flight::FlightMessageReader>,
80
+ std::unique_ptr<arrow::flight::FlightMetadataWriter>)>
81
+ do_put;
82
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
83
+ std::unique_ptr<arrow::flight::FlightMessageReader>,
84
+ std::unique_ptr<arrow::flight::FlightMessageWriter>)>
85
+ do_exchange;
86
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
87
+ const arrow::flight::Action&,
88
+ std::unique_ptr<arrow::flight::ResultStream>*)>
89
+ do_action;
90
+ std::function<Status(PyObject*, const arrow::flight::ServerCallContext&,
91
+ std::vector<arrow::flight::ActionType>*)>
92
+ list_actions;
93
+ };
94
+
95
+ class ARROW_PYFLIGHT_EXPORT PyServerAuthHandlerVtable {
96
+ public:
97
+ std::function<Status(PyObject*, arrow::flight::ServerAuthSender*,
98
+ arrow::flight::ServerAuthReader*)>
99
+ authenticate;
100
+ std::function<Status(PyObject*, const std::string&, std::string*)> is_valid;
101
+ };
102
+
103
+ class ARROW_PYFLIGHT_EXPORT PyClientAuthHandlerVtable {
104
+ public:
105
+ std::function<Status(PyObject*, arrow::flight::ClientAuthSender*,
106
+ arrow::flight::ClientAuthReader*)>
107
+ authenticate;
108
+ std::function<Status(PyObject*, std::string*)> get_token;
109
+ };
110
+
111
+ /// \brief A helper to implement an auth mechanism in Python.
112
+ class ARROW_PYFLIGHT_EXPORT PyServerAuthHandler
113
+ : public arrow::flight::ServerAuthHandler {
114
+ public:
115
+ explicit PyServerAuthHandler(PyObject* handler,
116
+ const PyServerAuthHandlerVtable& vtable);
117
+ Status Authenticate(arrow::flight::ServerAuthSender* outgoing,
118
+ arrow::flight::ServerAuthReader* incoming) override;
119
+ Status IsValid(const std::string& token, std::string* peer_identity) override;
120
+
121
+ private:
122
+ OwnedRefNoGIL handler_;
123
+ PyServerAuthHandlerVtable vtable_;
124
+ };
125
+
126
+ /// \brief A helper to implement an auth mechanism in Python.
127
+ class ARROW_PYFLIGHT_EXPORT PyClientAuthHandler
128
+ : public arrow::flight::ClientAuthHandler {
129
+ public:
130
+ explicit PyClientAuthHandler(PyObject* handler,
131
+ const PyClientAuthHandlerVtable& vtable);
132
+ Status Authenticate(arrow::flight::ClientAuthSender* outgoing,
133
+ arrow::flight::ClientAuthReader* incoming) override;
134
+ Status GetToken(std::string* token) override;
135
+
136
+ private:
137
+ OwnedRefNoGIL handler_;
138
+ PyClientAuthHandlerVtable vtable_;
139
+ };
140
+
141
+ class ARROW_PYFLIGHT_EXPORT PyFlightServer : public arrow::flight::FlightServerBase {
142
+ public:
143
+ explicit PyFlightServer(PyObject* server, const PyFlightServerVtable& vtable);
144
+
145
+ // Like Serve(), but set up signals and invoke Python signal handlers
146
+ // if necessary. This function may return with a Python exception set.
147
+ Status ServeWithSignals();
148
+
149
+ Status ListFlights(const arrow::flight::ServerCallContext& context,
150
+ const arrow::flight::Criteria* criteria,
151
+ std::unique_ptr<arrow::flight::FlightListing>* listings) override;
152
+ Status GetFlightInfo(const arrow::flight::ServerCallContext& context,
153
+ const arrow::flight::FlightDescriptor& request,
154
+ std::unique_ptr<arrow::flight::FlightInfo>* info) override;
155
+ Status GetSchema(const arrow::flight::ServerCallContext& context,
156
+ const arrow::flight::FlightDescriptor& request,
157
+ std::unique_ptr<arrow::flight::SchemaResult>* result) override;
158
+ Status DoGet(const arrow::flight::ServerCallContext& context,
159
+ const arrow::flight::Ticket& request,
160
+ std::unique_ptr<arrow::flight::FlightDataStream>* stream) override;
161
+ Status DoPut(const arrow::flight::ServerCallContext& context,
162
+ std::unique_ptr<arrow::flight::FlightMessageReader> reader,
163
+ std::unique_ptr<arrow::flight::FlightMetadataWriter> writer) override;
164
+ Status DoExchange(const arrow::flight::ServerCallContext& context,
165
+ std::unique_ptr<arrow::flight::FlightMessageReader> reader,
166
+ std::unique_ptr<arrow::flight::FlightMessageWriter> writer) override;
167
+ Status DoAction(const arrow::flight::ServerCallContext& context,
168
+ const arrow::flight::Action& action,
169
+ std::unique_ptr<arrow::flight::ResultStream>* result) override;
170
+ Status ListActions(const arrow::flight::ServerCallContext& context,
171
+ std::vector<arrow::flight::ActionType>* actions) override;
172
+
173
+ private:
174
+ OwnedRefNoGIL server_;
175
+ PyFlightServerVtable vtable_;
176
+ };
177
+
178
+ /// \brief A callback that obtains the next result from a Flight action.
179
+ typedef std::function<Status(PyObject*, std::unique_ptr<arrow::flight::Result>*)>
180
+ PyFlightResultStreamCallback;
181
+
182
+ /// \brief A ResultStream built around a Python callback.
183
+ class ARROW_PYFLIGHT_EXPORT PyFlightResultStream : public arrow::flight::ResultStream {
184
+ public:
185
+ /// \brief Construct a FlightResultStream from a Python object and callback.
186
+ /// Must only be called while holding the GIL.
187
+ explicit PyFlightResultStream(PyObject* generator,
188
+ PyFlightResultStreamCallback callback);
189
+ arrow::Result<std::unique_ptr<arrow::flight::Result>> Next() override;
190
+
191
+ private:
192
+ OwnedRefNoGIL generator_;
193
+ PyFlightResultStreamCallback callback_;
194
+ };
195
+
196
+ /// \brief A wrapper around a FlightDataStream that keeps alive a
197
+ /// Python object backing it.
198
+ class ARROW_PYFLIGHT_EXPORT PyFlightDataStream : public arrow::flight::FlightDataStream {
199
+ public:
200
+ /// \brief Construct a FlightDataStream from a Python object and underlying stream.
201
+ /// Must only be called while holding the GIL.
202
+ explicit PyFlightDataStream(PyObject* data_source,
203
+ std::unique_ptr<arrow::flight::FlightDataStream> stream);
204
+
205
+ std::shared_ptr<Schema> schema() override;
206
+ arrow::Result<arrow::flight::FlightPayload> GetSchemaPayload() override;
207
+ arrow::Result<arrow::flight::FlightPayload> Next() override;
208
+
209
+ private:
210
+ OwnedRefNoGIL data_source_;
211
+ std::unique_ptr<arrow::flight::FlightDataStream> stream_;
212
+ };
213
+
214
+ class ARROW_PYFLIGHT_EXPORT PyServerMiddlewareFactory
215
+ : public arrow::flight::ServerMiddlewareFactory {
216
+ public:
217
+ /// \brief A callback to create the middleware instance in Python
218
+ typedef std::function<Status(
219
+ PyObject*, const arrow::flight::CallInfo& info,
220
+ const arrow::flight::CallHeaders& incoming_headers,
221
+ std::shared_ptr<arrow::flight::ServerMiddleware>* middleware)>
222
+ StartCallCallback;
223
+
224
+ /// \brief Must only be called while holding the GIL.
225
+ explicit PyServerMiddlewareFactory(PyObject* factory, StartCallCallback start_call);
226
+
227
+ Status StartCall(const arrow::flight::CallInfo& info,
228
+ const arrow::flight::CallHeaders& incoming_headers,
229
+ std::shared_ptr<arrow::flight::ServerMiddleware>* middleware) override;
230
+
231
+ private:
232
+ OwnedRefNoGIL factory_;
233
+ StartCallCallback start_call_;
234
+ };
235
+
236
+ class ARROW_PYFLIGHT_EXPORT PyServerMiddleware : public arrow::flight::ServerMiddleware {
237
+ public:
238
+ typedef std::function<Status(PyObject*,
239
+ arrow::flight::AddCallHeaders* outgoing_headers)>
240
+ SendingHeadersCallback;
241
+ typedef std::function<Status(PyObject*, const Status& status)> CallCompletedCallback;
242
+
243
+ struct Vtable {
244
+ SendingHeadersCallback sending_headers;
245
+ CallCompletedCallback call_completed;
246
+ };
247
+
248
+ /// \brief Must only be called while holding the GIL.
249
+ explicit PyServerMiddleware(PyObject* middleware, Vtable vtable);
250
+
251
+ void SendingHeaders(arrow::flight::AddCallHeaders* outgoing_headers) override;
252
+ void CallCompleted(const Status& status) override;
253
+ std::string name() const override;
254
+ /// \brief Get the underlying Python object.
255
+ PyObject* py_object() const;
256
+
257
+ private:
258
+ OwnedRefNoGIL middleware_;
259
+ Vtable vtable_;
260
+ };
261
+
262
+ class ARROW_PYFLIGHT_EXPORT PyClientMiddlewareFactory
263
+ : public arrow::flight::ClientMiddlewareFactory {
264
+ public:
265
+ /// \brief A callback to create the middleware instance in Python
266
+ typedef std::function<Status(
267
+ PyObject*, const arrow::flight::CallInfo& info,
268
+ std::unique_ptr<arrow::flight::ClientMiddleware>* middleware)>
269
+ StartCallCallback;
270
+
271
+ /// \brief Must only be called while holding the GIL.
272
+ explicit PyClientMiddlewareFactory(PyObject* factory, StartCallCallback start_call);
273
+
274
+ void StartCall(const arrow::flight::CallInfo& info,
275
+ std::unique_ptr<arrow::flight::ClientMiddleware>* middleware) override;
276
+
277
+ private:
278
+ OwnedRefNoGIL factory_;
279
+ StartCallCallback start_call_;
280
+ };
281
+
282
+ class ARROW_PYFLIGHT_EXPORT PyClientMiddleware : public arrow::flight::ClientMiddleware {
283
+ public:
284
+ typedef std::function<Status(PyObject*,
285
+ arrow::flight::AddCallHeaders* outgoing_headers)>
286
+ SendingHeadersCallback;
287
+ typedef std::function<Status(PyObject*,
288
+ const arrow::flight::CallHeaders& incoming_headers)>
289
+ ReceivedHeadersCallback;
290
+ typedef std::function<Status(PyObject*, const Status& status)> CallCompletedCallback;
291
+
292
+ struct Vtable {
293
+ SendingHeadersCallback sending_headers;
294
+ ReceivedHeadersCallback received_headers;
295
+ CallCompletedCallback call_completed;
296
+ };
297
+
298
+ /// \brief Must only be called while holding the GIL.
299
+ explicit PyClientMiddleware(PyObject* factory, Vtable vtable);
300
+
301
+ void SendingHeaders(arrow::flight::AddCallHeaders* outgoing_headers) override;
302
+ void ReceivedHeaders(const arrow::flight::CallHeaders& incoming_headers) override;
303
+ void CallCompleted(const Status& status) override;
304
+
305
+ private:
306
+ OwnedRefNoGIL middleware_;
307
+ Vtable vtable_;
308
+ };
309
+
310
+ /// \brief A callback that obtains the next payload from a Flight result stream.
311
+ typedef std::function<Status(PyObject*, arrow::flight::FlightPayload*)>
312
+ PyGeneratorFlightDataStreamCallback;
313
+
314
+ /// \brief A FlightDataStream built around a Python callback.
315
+ class ARROW_PYFLIGHT_EXPORT PyGeneratorFlightDataStream
316
+ : public arrow::flight::FlightDataStream {
317
+ public:
318
+ /// \brief Construct a FlightDataStream from a Python object and underlying stream.
319
+ /// Must only be called while holding the GIL.
320
+ explicit PyGeneratorFlightDataStream(PyObject* generator,
321
+ std::shared_ptr<arrow::Schema> schema,
322
+ PyGeneratorFlightDataStreamCallback callback,
323
+ const ipc::IpcWriteOptions& options);
324
+ std::shared_ptr<Schema> schema() override;
325
+ arrow::Result<arrow::flight::FlightPayload> GetSchemaPayload() override;
326
+ arrow::Result<arrow::flight::FlightPayload> Next() override;
327
+
328
+ private:
329
+ OwnedRefNoGIL generator_;
330
+ std::shared_ptr<arrow::Schema> schema_;
331
+ ipc::DictionaryFieldMapper mapper_;
332
+ ipc::IpcWriteOptions options_;
333
+ PyGeneratorFlightDataStreamCallback callback_;
334
+ };
335
+
336
+ ARROW_PYFLIGHT_EXPORT
337
+ Status CreateFlightInfo(const std::shared_ptr<arrow::Schema>& schema,
338
+ const arrow::flight::FlightDescriptor& descriptor,
339
+ const std::vector<arrow::flight::FlightEndpoint>& endpoints,
340
+ int64_t total_records, int64_t total_bytes,
341
+ std::unique_ptr<arrow::flight::FlightInfo>* out);
342
+
343
+ /// \brief Create a SchemaResult from schema.
344
+ ARROW_PYFLIGHT_EXPORT
345
+ Status CreateSchemaResult(const std::shared_ptr<arrow::Schema>& schema,
346
+ std::unique_ptr<arrow::flight::SchemaResult>* out);
347
+
348
+ } // namespace flight
349
+ } // namespace py
350
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/gdb.cc ADDED
@@ -0,0 +1,530 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include <cstdlib>
19
+ #include <memory>
20
+ #include <utility>
21
+
22
+ #include "arrow/array.h"
23
+ #include "arrow/chunked_array.h"
24
+ #include "arrow/datum.h"
25
+ #include "arrow/extension_type.h"
26
+ #include "arrow/ipc/json_simple.h"
27
+ #include "arrow/python/gdb.h"
28
+ #include "arrow/record_batch.h"
29
+ #include "arrow/scalar.h"
30
+ #include "arrow/table.h"
31
+ #include "arrow/type.h"
32
+ #include "arrow/util/debug.h"
33
+ #include "arrow/util/decimal.h"
34
+ #include "arrow/util/key_value_metadata.h"
35
+ #include "arrow/util/logging.h"
36
+ #include "arrow/util/macros.h"
37
+
38
+ namespace arrow {
39
+
40
+ using ipc::internal::json::ArrayFromJSON;
41
+ using ipc::internal::json::ChunkedArrayFromJSON;
42
+ using ipc::internal::json::ScalarFromJSON;
43
+
44
+ namespace gdb {
45
+
46
+ // Add a nested `arrow` namespace to exercise type lookup from GDB (ARROW-15652)
47
+ namespace arrow {
48
+ void DummyFunction() {}
49
+ } // namespace arrow
50
+
51
+ namespace {
52
+
53
+ class CustomStatusDetail : public StatusDetail {
54
+ public:
55
+ const char* type_id() const override { return "custom-detail-id"; }
56
+ std::string ToString() const override { return "This is a detail"; }
57
+ };
58
+
59
+ class UuidType : public ExtensionType {
60
+ public:
61
+ UuidType() : ExtensionType(fixed_size_binary(16)) {}
62
+
63
+ std::string extension_name() const override { return "uuid"; }
64
+
65
+ bool ExtensionEquals(const ExtensionType& other) const override {
66
+ return (other.extension_name() == this->extension_name());
67
+ }
68
+
69
+ std::shared_ptr<Array> MakeArray(std::shared_ptr<ArrayData> data) const override {
70
+ return std::make_shared<ExtensionArray>(data);
71
+ }
72
+
73
+ Result<std::shared_ptr<DataType>> Deserialize(
74
+ std::shared_ptr<DataType> storage_type,
75
+ const std::string& serialized) const override {
76
+ return Status::NotImplemented("");
77
+ }
78
+
79
+ std::string Serialize() const override { return "uuid-serialized"; }
80
+ };
81
+
82
+ std::shared_ptr<Array> SliceArrayFromJSON(const std::shared_ptr<DataType>& ty,
83
+ std::string_view json, int64_t offset = 0,
84
+ int64_t length = -1) {
85
+ auto array = *ArrayFromJSON(ty, json);
86
+ if (length != -1) {
87
+ return array->Slice(offset, length);
88
+ } else {
89
+ return array->Slice(offset);
90
+ }
91
+ }
92
+
93
+ } // namespace
94
+
95
+ void TestSession() {
96
+ // We define local variables for all types for which we want to test
97
+ // pretty-printing.
98
+ // Then, at the end of this function, we trap to the debugger, so that
99
+ // test instrumentation can print values from this frame by interacting
100
+ // with the debugger.
101
+ // The test instrumentation is in pyarrow/tests/test_gdb.py
102
+
103
+ #ifdef __clang__
104
+ _Pragma("clang diagnostic push");
105
+ _Pragma("clang diagnostic ignored \"-Wunused-variable\"");
106
+ #elif defined(__GNUC__)
107
+ _Pragma("GCC diagnostic push");
108
+ _Pragma("GCC diagnostic ignored \"-Wunused-variable\"");
109
+ #endif
110
+
111
+ arrow::DummyFunction();
112
+
113
+ // Status & Result
114
+ auto ok_status = Status::OK();
115
+ auto error_status = Status::IOError("This is an error");
116
+ auto error_detail_status =
117
+ error_status.WithDetail(std::make_shared<CustomStatusDetail>());
118
+ auto ok_result = Result<int>(42);
119
+ auto error_result = Result<int>(error_status);
120
+ auto error_detail_result = Result<int>(error_detail_status);
121
+
122
+ // String views
123
+ std::string_view string_view_abc{"abc"};
124
+ std::string special_chars = std::string("foo\"bar") + '\x00' + "\r\n\t\x1f";
125
+ std::string_view string_view_special_chars(special_chars);
126
+
127
+ // Buffers
128
+ Buffer buffer_null{nullptr, 0};
129
+ Buffer buffer_abc{string_view_abc};
130
+ Buffer buffer_special_chars{string_view_special_chars};
131
+ char mutable_array[3] = {'a', 'b', 'c'};
132
+ MutableBuffer buffer_mutable{reinterpret_cast<uint8_t*>(mutable_array), 3};
133
+ auto heap_buffer = std::make_shared<Buffer>(string_view_abc);
134
+ auto heap_buffer_mutable = *AllocateBuffer(buffer_abc.size());
135
+ memcpy(heap_buffer_mutable->mutable_data(), buffer_abc.data(), buffer_abc.size());
136
+
137
+ // KeyValueMetadata
138
+ auto empty_metadata = key_value_metadata({}, {});
139
+ auto metadata = key_value_metadata(
140
+ {"key_text", "key_binary"}, {"some value", std::string("z") + '\x00' + "\x1f\xff"});
141
+
142
+ // Decimals
143
+ Decimal128 decimal128_zero{};
144
+ Decimal128 decimal128_pos{"98765432109876543210987654321098765432"};
145
+ Decimal128 decimal128_neg{"-98765432109876543210987654321098765432"};
146
+ BasicDecimal128 basic_decimal128_zero{};
147
+ BasicDecimal128 basic_decimal128_pos{decimal128_pos.native_endian_array()};
148
+ BasicDecimal128 basic_decimal128_neg{decimal128_neg.native_endian_array()};
149
+ Decimal256 decimal256_zero{};
150
+ Decimal256 decimal256_pos{
151
+ "9876543210987654321098765432109876543210987654321098765432109876543210987654"};
152
+ Decimal256 decimal256_neg{
153
+ "-9876543210987654321098765432109876543210987654321098765432109876543210987654"};
154
+ BasicDecimal256 basic_decimal256_zero{};
155
+ BasicDecimal256 basic_decimal256_pos{decimal256_pos.native_endian_array()};
156
+ BasicDecimal256 basic_decimal256_neg{decimal256_neg.native_endian_array()};
157
+
158
+ // Data types
159
+ NullType null_type;
160
+ auto heap_null_type = null();
161
+ BooleanType bool_type;
162
+ auto heap_bool_type = boolean();
163
+
164
+ Date32Type date32_type;
165
+ Date64Type date64_type;
166
+ Time32Type time_type_s(TimeUnit::SECOND);
167
+ Time32Type time_type_ms(TimeUnit::MILLI);
168
+ Time64Type time_type_us(TimeUnit::MICRO);
169
+ Time64Type time_type_ns(TimeUnit::NANO);
170
+ auto heap_time_type_ns = time64(TimeUnit::NANO);
171
+
172
+ TimestampType timestamp_type_s(TimeUnit::SECOND);
173
+ TimestampType timestamp_type_ms_timezone(TimeUnit::MILLI, "Europe/Paris");
174
+ TimestampType timestamp_type_us(TimeUnit::MICRO);
175
+ TimestampType timestamp_type_ns_timezone(TimeUnit::NANO, "Europe/Paris");
176
+ auto heap_timestamp_type_ns_timezone = timestamp(TimeUnit::NANO, "Europe/Paris");
177
+
178
+ DayTimeIntervalType day_time_interval_type;
179
+ MonthIntervalType month_interval_type;
180
+ MonthDayNanoIntervalType month_day_nano_interval_type;
181
+
182
+ DurationType duration_type_s(TimeUnit::SECOND);
183
+ DurationType duration_type_ns(TimeUnit::NANO);
184
+
185
+ BinaryType binary_type;
186
+ StringType string_type;
187
+ LargeBinaryType large_binary_type;
188
+ LargeStringType large_string_type;
189
+ FixedSizeBinaryType fixed_size_binary_type(10);
190
+ auto heap_fixed_size_binary_type = fixed_size_binary(10);
191
+
192
+ Decimal128Type decimal128_type(16, 5);
193
+ Decimal256Type decimal256_type(42, 12);
194
+ auto heap_decimal128_type = decimal128(16, 5);
195
+
196
+ ListType list_type(uint8());
197
+ LargeListType large_list_type(large_utf8());
198
+ auto heap_list_type = list(uint8());
199
+ auto heap_large_list_type = large_list(large_utf8());
200
+
201
+ FixedSizeListType fixed_size_list_type(float64(), 3);
202
+ auto heap_fixed_size_list_type = fixed_size_list(float64(), 3);
203
+
204
+ DictionaryType dict_type_unordered(int16(), utf8());
205
+ DictionaryType dict_type_ordered(int16(), utf8(), /*ordered=*/true);
206
+ auto heap_dict_type = dictionary(int16(), utf8());
207
+
208
+ MapType map_type_unsorted(utf8(), binary());
209
+ MapType map_type_sorted(utf8(), binary(), /*keys_sorted=*/true);
210
+ auto heap_map_type = map(utf8(), binary());
211
+
212
+ StructType struct_type_empty({});
213
+ StructType struct_type(
214
+ {field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
215
+ auto heap_struct_type =
216
+ struct_({field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
217
+
218
+ std::vector<int8_t> union_type_codes({7, 42});
219
+ FieldVector union_fields(
220
+ {field("ints", int8()), field("strs", utf8(), /*nullable=*/false)});
221
+ SparseUnionType sparse_union_type(union_fields, union_type_codes);
222
+ DenseUnionType dense_union_type(union_fields, union_type_codes);
223
+
224
+ UuidType uuid_type{};
225
+ std::shared_ptr<DataType> heap_uuid_type = std::make_shared<UuidType>();
226
+
227
+ // Schema
228
+ auto schema_empty = schema({});
229
+ auto schema_non_empty = schema({field("ints", int8()), field("strs", utf8())});
230
+ auto schema_with_metadata = schema_non_empty->WithMetadata(
231
+ key_value_metadata({"key1", "key2"}, {"value1", "value2"}));
232
+
233
+ // Fields
234
+ Field int_field("ints", int64());
235
+ Field float_field("floats", float32(), /*nullable=*/false);
236
+ auto heap_int_field = field("ints", int64());
237
+
238
+ // Scalars
239
+ NullScalar null_scalar;
240
+ auto heap_null_scalar = MakeNullScalar(null());
241
+
242
+ BooleanScalar bool_scalar_null{};
243
+ BooleanScalar bool_scalar{true};
244
+ auto heap_bool_scalar = *MakeScalar(boolean(), true);
245
+
246
+ Int8Scalar int8_scalar_null{};
247
+ UInt8Scalar uint8_scalar_null{};
248
+ Int64Scalar int64_scalar_null{};
249
+ UInt64Scalar uint64_scalar_null{};
250
+ Int8Scalar int8_scalar{-42};
251
+ UInt8Scalar uint8_scalar{234};
252
+ Int64Scalar int64_scalar{-9223372036854775807LL - 1};
253
+ UInt64Scalar uint64_scalar{18446744073709551615ULL};
254
+ HalfFloatScalar half_float_scalar{48640}; // -1.5
255
+ FloatScalar float_scalar{1.25f};
256
+ DoubleScalar double_scalar{2.5};
257
+
258
+ Time32Scalar time_scalar_s{100, TimeUnit::SECOND};
259
+ Time32Scalar time_scalar_ms{1000, TimeUnit::MILLI};
260
+ Time64Scalar time_scalar_us{10000, TimeUnit::MICRO};
261
+ Time64Scalar time_scalar_ns{100000, TimeUnit::NANO};
262
+ Time64Scalar time_scalar_null{time64(TimeUnit::NANO)};
263
+
264
+ DurationScalar duration_scalar_s{-100, TimeUnit::SECOND};
265
+ DurationScalar duration_scalar_ms{-1000, TimeUnit::MILLI};
266
+ DurationScalar duration_scalar_us{-10000, TimeUnit::MICRO};
267
+ DurationScalar duration_scalar_ns{-100000, TimeUnit::NANO};
268
+ DurationScalar duration_scalar_null{duration(TimeUnit::NANO)};
269
+
270
+ TimestampScalar timestamp_scalar_s{12345, timestamp(TimeUnit::SECOND)};
271
+ TimestampScalar timestamp_scalar_ms{-123456, timestamp(TimeUnit::MILLI)};
272
+ TimestampScalar timestamp_scalar_us{1234567, timestamp(TimeUnit::MICRO)};
273
+ TimestampScalar timestamp_scalar_ns{-12345678, timestamp(TimeUnit::NANO)};
274
+ TimestampScalar timestamp_scalar_null{timestamp(TimeUnit::NANO)};
275
+
276
+ TimestampScalar timestamp_scalar_s_tz{12345,
277
+ timestamp(TimeUnit::SECOND, "Europe/Paris")};
278
+ TimestampScalar timestamp_scalar_ms_tz{-123456,
279
+ timestamp(TimeUnit::MILLI, "Europe/Paris")};
280
+ TimestampScalar timestamp_scalar_us_tz{1234567,
281
+ timestamp(TimeUnit::MICRO, "Europe/Paris")};
282
+ TimestampScalar timestamp_scalar_ns_tz{-12345678,
283
+ timestamp(TimeUnit::NANO, "Europe/Paris")};
284
+ TimestampScalar timestamp_scalar_null_tz{timestamp(TimeUnit::NANO, "Europe/Paris")};
285
+
286
+ MonthIntervalScalar month_interval_scalar{23};
287
+ MonthIntervalScalar month_interval_scalar_null{};
288
+ DayTimeIntervalScalar day_time_interval_scalar{{23, -456}};
289
+ DayTimeIntervalScalar day_time_interval_scalar_null{};
290
+ MonthDayNanoIntervalScalar month_day_nano_interval_scalar{{1, 23, -456}};
291
+ MonthDayNanoIntervalScalar month_day_nano_interval_scalar_null{};
292
+
293
+ Date32Scalar date32_scalar{23};
294
+ Date32Scalar date32_scalar_null{};
295
+ Date64Scalar date64_scalar{45 * 86400000LL};
296
+ Date64Scalar date64_scalar_null{};
297
+
298
+ Decimal128Scalar decimal128_scalar_pos_scale_pos{Decimal128("1234567"),
299
+ decimal128(10, 4)};
300
+ Decimal128Scalar decimal128_scalar_pos_scale_neg{Decimal128("-1234567"),
301
+ decimal128(10, 4)};
302
+ Decimal128Scalar decimal128_scalar_neg_scale_pos{Decimal128("1234567"),
303
+ decimal128(10, -4)};
304
+ Decimal128Scalar decimal128_scalar_neg_scale_neg{Decimal128("-1234567"),
305
+ decimal128(10, -4)};
306
+ Decimal128Scalar decimal128_scalar_null{decimal128(10, 4)};
307
+ auto heap_decimal128_scalar = *MakeScalar(decimal128(10, 4), Decimal128("1234567"));
308
+
309
+ Decimal256Scalar decimal256_scalar_pos_scale_pos{
310
+ Decimal256("1234567890123456789012345678901234567890123456"), decimal256(50, 4)};
311
+ Decimal256Scalar decimal256_scalar_pos_scale_neg{
312
+ Decimal256("-1234567890123456789012345678901234567890123456"), decimal256(50, 4)};
313
+ Decimal256Scalar decimal256_scalar_neg_scale_pos{
314
+ Decimal256("1234567890123456789012345678901234567890123456"), decimal256(50, -4)};
315
+ Decimal256Scalar decimal256_scalar_neg_scale_neg{
316
+ Decimal256("-1234567890123456789012345678901234567890123456"), decimal256(50, -4)};
317
+ Decimal256Scalar decimal256_scalar_null{decimal256(50, 4)};
318
+ auto heap_decimal256_scalar = *MakeScalar(
319
+ decimal256(50, 4), Decimal256("1234567890123456789012345678901234567890123456"));
320
+
321
+ BinaryScalar binary_scalar_null{};
322
+ BinaryScalar binary_scalar_unallocated{std::shared_ptr<Buffer>{nullptr}};
323
+ BinaryScalar binary_scalar_empty{Buffer::FromString("")};
324
+ BinaryScalar binary_scalar_abc{Buffer::FromString("abc")};
325
+ BinaryScalar binary_scalar_bytes{
326
+ Buffer::FromString(std::string() + '\x00' + "\x1f\xff")};
327
+
328
+ StringScalar string_scalar_null{};
329
+ StringScalar string_scalar_unallocated{std::shared_ptr<Buffer>{nullptr}};
330
+ StringScalar string_scalar_empty{Buffer::FromString("")};
331
+ StringScalar string_scalar_hehe{Buffer::FromString("héhé")};
332
+ StringScalar string_scalar_invalid_chars{
333
+ Buffer::FromString(std::string("abc") + '\x00' + "def\xffghi")};
334
+
335
+ LargeBinaryScalar large_binary_scalar_abc{Buffer::FromString("abc")};
336
+ LargeStringScalar large_string_scalar_hehe{Buffer::FromString("héhé")};
337
+
338
+ FixedSizeBinaryScalar fixed_size_binary_scalar{Buffer::FromString("abc"),
339
+ fixed_size_binary(3)};
340
+ FixedSizeBinaryScalar fixed_size_binary_scalar_null{
341
+ Buffer::FromString(" "), fixed_size_binary(3), /*is_valid=*/false};
342
+
343
+ std::shared_ptr<Array> dict_array;
344
+ dict_array = *ArrayFromJSON(utf8(), R"(["foo", "bar", "quux"])");
345
+ DictionaryScalar dict_scalar{{std::make_shared<Int8Scalar>(42), dict_array},
346
+ dictionary(int8(), utf8())};
347
+ DictionaryScalar dict_scalar_null{dictionary(int8(), utf8())};
348
+
349
+ std::shared_ptr<Array> list_value_array = *ArrayFromJSON(int32(), R"([4, 5, 6])");
350
+ std::shared_ptr<Array> list_zero_length = *ArrayFromJSON(int32(), R"([])");
351
+ ListScalar list_scalar{list_value_array};
352
+ ListScalar list_scalar_null{list_zero_length, list(int32()), /*is_valid=*/false};
353
+ LargeListScalar large_list_scalar{list_value_array};
354
+ LargeListScalar large_list_scalar_null{list_zero_length, large_list(int32()),
355
+ /*is_valid=*/false};
356
+ FixedSizeListScalar fixed_size_list_scalar{list_value_array};
357
+ FixedSizeListScalar fixed_size_list_scalar_null{
358
+ list_value_array, fixed_size_list(int32(), 3), /*is_valid=*/false};
359
+
360
+ auto struct_scalar_type = struct_({field("ints", int32()), field("strs", utf8())});
361
+ StructScalar struct_scalar{
362
+ ScalarVector{MakeScalar(int32_t(42)), MakeScalar("some text")}, struct_scalar_type};
363
+ StructScalar struct_scalar_null{struct_scalar.value, struct_scalar_type,
364
+ /*is_valid=*/false};
365
+
366
+ auto sparse_union_scalar_type =
367
+ sparse_union(FieldVector{field("ints", int32()), field("strs", utf8())}, {7, 42});
368
+ auto dense_union_scalar_type =
369
+ dense_union(FieldVector{field("ints", int32()), field("strs", utf8())}, {7, 42});
370
+ std::vector<std::shared_ptr<Scalar>> union_values = {MakeScalar(int32_t(43)),
371
+ MakeNullScalar(utf8())};
372
+ SparseUnionScalar sparse_union_scalar{union_values, 7, sparse_union_scalar_type};
373
+ DenseUnionScalar dense_union_scalar{union_values[0], 7, dense_union_scalar_type};
374
+
375
+ union_values[0] = MakeNullScalar(int32());
376
+ SparseUnionScalar sparse_union_scalar_null{union_values, 7, sparse_union_scalar_type};
377
+ DenseUnionScalar dense_union_scalar_null{union_values[0], 7, dense_union_scalar_type};
378
+
379
+ auto extension_scalar_type = std::make_shared<UuidType>();
380
+ ExtensionScalar extension_scalar{
381
+ std::make_shared<FixedSizeBinaryScalar>(Buffer::FromString("0123456789abcdef"),
382
+ extension_scalar_type->storage_type()),
383
+ extension_scalar_type};
384
+ ExtensionScalar extension_scalar_null{extension_scalar.value, extension_scalar_type,
385
+ /*is_valid=*/false};
386
+
387
+ std::shared_ptr<Scalar> heap_map_scalar;
388
+ ARROW_CHECK_OK(
389
+ ScalarFromJSON(map(utf8(), int32()), R"([["a", 5], ["b", 6]])", &heap_map_scalar));
390
+ auto heap_map_scalar_null = MakeNullScalar(heap_map_scalar->type);
391
+
392
+ // Array and ArrayData
393
+ auto heap_null_array = SliceArrayFromJSON(null(), "[null, null]");
394
+
395
+ auto heap_int32_array = SliceArrayFromJSON(int32(), "[-5, 6, null, 42]");
396
+ ArrayData int32_array_data{*heap_int32_array->data()};
397
+ Int32Array int32_array{heap_int32_array->data()->Copy()};
398
+
399
+ auto heap_int32_array_no_nulls = SliceArrayFromJSON(int32(), "[-5, 6, 3, 42]");
400
+
401
+ const char* json_int32_array = "[-1, 2, -3, 4, null, -5, 6, -7, 8, null, -9, -10]";
402
+ auto heap_int32_array_sliced_1_9 = SliceArrayFromJSON(int32(), json_int32_array, 1, 9);
403
+ auto heap_int32_array_sliced_2_6 = SliceArrayFromJSON(int32(), json_int32_array, 2, 6);
404
+ auto heap_int32_array_sliced_8_4 = SliceArrayFromJSON(int32(), json_int32_array, 8, 4);
405
+ auto heap_int32_array_sliced_empty =
406
+ SliceArrayFromJSON(int32(), json_int32_array, 6, 0);
407
+
408
+ const char* json_bool_array =
409
+ "[false, false, true, true, null, null, false, false, true, true, "
410
+ "null, null, false, false, true, true, null, null]";
411
+ auto heap_bool_array = SliceArrayFromJSON(boolean(), json_bool_array);
412
+ auto heap_bool_array_sliced_1_9 = SliceArrayFromJSON(boolean(), json_bool_array, 1, 9);
413
+ auto heap_bool_array_sliced_2_6 = SliceArrayFromJSON(boolean(), json_bool_array, 2, 6);
414
+ auto heap_bool_array_sliced_empty =
415
+ SliceArrayFromJSON(boolean(), json_bool_array, 6, 0);
416
+
417
+ auto heap_list_array = SliceArrayFromJSON(list(int64()), "[[1, 2], null, []]");
418
+ ListArray list_array{heap_list_array->data()};
419
+
420
+ const char* json_double_array = "[-1.5, null]";
421
+ auto heap_double_array = SliceArrayFromJSON(float64(), json_double_array);
422
+
423
+ const char* json_float16_array = "[0, 48640]";
424
+ auto heap_float16_array =
425
+ *SliceArrayFromJSON(uint16(), json_float16_array)->View(float16());
426
+
427
+ auto heap_date32_array =
428
+ SliceArrayFromJSON(date32(), "[0, null, 18336, -9004, -719162, -719163]");
429
+ auto heap_date64_array = SliceArrayFromJSON(
430
+ date64(), "[1584230400000, -777945600000, -62135596800000, -62135683200000, 123]");
431
+
432
+ const char* json_time_array = "[null, -123, 456]";
433
+ auto heap_time32_array_s =
434
+ SliceArrayFromJSON(time32(TimeUnit::SECOND), json_time_array);
435
+ auto heap_time32_array_ms =
436
+ SliceArrayFromJSON(time32(TimeUnit::MILLI), json_time_array);
437
+ auto heap_time64_array_us =
438
+ SliceArrayFromJSON(time64(TimeUnit::MICRO), json_time_array);
439
+ auto heap_time64_array_ns = SliceArrayFromJSON(time64(TimeUnit::NANO), json_time_array);
440
+
441
+ auto heap_month_interval_array =
442
+ SliceArrayFromJSON(month_interval(), "[123, -456, null]");
443
+ auto heap_day_time_interval_array =
444
+ SliceArrayFromJSON(day_time_interval(), "[[1, -600], null]");
445
+ auto heap_month_day_nano_interval_array =
446
+ SliceArrayFromJSON(month_day_nano_interval(), "[[1, -600, 5000], null]");
447
+
448
+ const char* json_duration_array = "[null, -1234567890123456789]";
449
+ auto heap_duration_array_s =
450
+ SliceArrayFromJSON(duration(TimeUnit::SECOND), json_duration_array);
451
+ auto heap_duration_array_ns =
452
+ SliceArrayFromJSON(duration(TimeUnit::NANO), json_duration_array);
453
+
454
+ auto heap_timestamp_array_s = SliceArrayFromJSON(
455
+ timestamp(TimeUnit::SECOND),
456
+ R"([null, "1970-01-01 00:00:00", "1900-02-28 12:34:56", "3989-07-14 00:00:00"])");
457
+ auto heap_timestamp_array_ms = SliceArrayFromJSON(
458
+ timestamp(TimeUnit::MILLI),
459
+ R"([null, "1900-02-28 12:34:56.123", "3989-07-14 00:00:00.789"])");
460
+ auto heap_timestamp_array_us = SliceArrayFromJSON(
461
+ timestamp(TimeUnit::MICRO),
462
+ R"([null, "1900-02-28 12:34:56.654321", "3989-07-14 00:00:00.456789"])");
463
+ auto heap_timestamp_array_ns = SliceArrayFromJSON(
464
+ timestamp(TimeUnit::NANO), R"([null, "1900-02-28 12:34:56.987654321"])");
465
+
466
+ auto heap_decimal128_array = SliceArrayFromJSON(
467
+ decimal128(30, 6),
468
+ R"([null, "-1234567890123456789.012345", "1234567890123456789.012345"])");
469
+ auto heap_decimal256_array = SliceArrayFromJSON(
470
+ decimal256(50, 6), R"([null, "-123456789012345678901234567890123456789.012345"])");
471
+ auto heap_decimal128_array_sliced = heap_decimal128_array->Slice(1, 1);
472
+
473
+ auto heap_fixed_size_binary_array =
474
+ SliceArrayFromJSON(fixed_size_binary(3), "[null, \"abc\", \"\\u0000\\u001f\xff\"]");
475
+ auto heap_fixed_size_binary_array_zero_width =
476
+ SliceArrayFromJSON(fixed_size_binary(0), R"([null, ""])");
477
+ auto heap_fixed_size_binary_array_sliced = heap_fixed_size_binary_array->Slice(1, 1);
478
+
479
+ const char* json_binary_array = "[null, \"abcd\", \"\\u0000\\u001f\xff\"]";
480
+ auto heap_binary_array = SliceArrayFromJSON(binary(), json_binary_array);
481
+ auto heap_large_binary_array = SliceArrayFromJSON(large_binary(), json_binary_array);
482
+ const char* json_string_array = "[null, \"héhé\", \"invalid \xff char\"]";
483
+ auto heap_string_array = SliceArrayFromJSON(utf8(), json_string_array);
484
+ auto heap_large_string_array = SliceArrayFromJSON(large_utf8(), json_string_array);
485
+ auto heap_binary_array_sliced = heap_binary_array->Slice(1, 1);
486
+
487
+ // ChunkedArray
488
+ ArrayVector array_chunks(2);
489
+ array_chunks[0] = *ArrayFromJSON(int32(), "[1, 2]");
490
+ array_chunks[1] = *ArrayFromJSON(int32(), "[3, null, 4]");
491
+ ChunkedArray chunked_array{array_chunks};
492
+
493
+ // RecordBatch
494
+ auto batch_schema = schema({field("ints", int32()), field("strs", utf8())});
495
+ ArrayVector batch_columns{2};
496
+ batch_columns[0] = *ArrayFromJSON(int32(), "[1, 2, 3]");
497
+ batch_columns[1] = *ArrayFromJSON(utf8(), R"(["abc", null, "def"])");
498
+ auto batch = RecordBatch::Make(batch_schema, /*num_rows=*/3, batch_columns);
499
+ auto batch_with_metadata = batch->ReplaceSchemaMetadata(
500
+ key_value_metadata({"key1", "key2", "key3"}, {"value1", "value2", "value3"}));
501
+
502
+ // Table
503
+ ChunkedArrayVector table_columns{2};
504
+ ARROW_CHECK_OK(
505
+ ChunkedArrayFromJSON(int32(), {"[1, 2, 3]", "[4, 5]"}, &table_columns[0]));
506
+ ARROW_CHECK_OK(ChunkedArrayFromJSON(
507
+ utf8(), {R"(["abc", null])", R"(["def"])", R"(["ghi", "jkl"])"},
508
+ &table_columns[1]));
509
+ auto table = Table::Make(batch_schema, table_columns);
510
+
511
+ // Datum
512
+ Datum empty_datum{};
513
+ Datum scalar_datum{MakeNullScalar(boolean())};
514
+ Datum array_datum{heap_int32_array};
515
+ Datum chunked_array_datum{chunked_array};
516
+ Datum batch_datum{batch};
517
+ Datum table_datum{table};
518
+
519
+ #ifdef __clang__
520
+ _Pragma("clang diagnostic pop");
521
+ #elif defined(__GNUC__)
522
+ _Pragma("GCC diagnostic pop");
523
+ #endif
524
+
525
+ // Hook into debugger
526
+ ::arrow::internal::DebugTrap();
527
+ }
528
+
529
+ } // namespace gdb
530
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/gdb.h ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/visibility.h"
21
+
22
+ namespace arrow {
23
+ namespace gdb {
24
+
25
+ ARROW_PYTHON_EXPORT
26
+ void TestSession();
27
+
28
+ } // namespace gdb
29
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/helpers.cc ADDED
@@ -0,0 +1,472 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // helpers.h includes a NumPy header, so we include this first
19
+ #include "arrow/python/numpy_interop.h"
20
+
21
+ #include "arrow/python/helpers.h"
22
+
23
+ #include <cmath>
24
+ #include <limits>
25
+ #include <sstream>
26
+ #include <type_traits>
27
+
28
+ #include "arrow/python/common.h"
29
+ #include "arrow/python/decimal.h"
30
+ #include "arrow/type_fwd.h"
31
+ #include "arrow/util/checked_cast.h"
32
+ #include "arrow/util/logging.h"
33
+
34
+ namespace arrow {
35
+
36
+ using internal::checked_cast;
37
+
38
+ namespace py {
39
+
40
+ #define GET_PRIMITIVE_TYPE(NAME, FACTORY) \
41
+ case Type::NAME: \
42
+ return FACTORY()
43
+
44
+ std::shared_ptr<DataType> GetPrimitiveType(Type::type type) {
45
+ switch (type) {
46
+ case Type::NA:
47
+ return null();
48
+ GET_PRIMITIVE_TYPE(UINT8, uint8);
49
+ GET_PRIMITIVE_TYPE(INT8, int8);
50
+ GET_PRIMITIVE_TYPE(UINT16, uint16);
51
+ GET_PRIMITIVE_TYPE(INT16, int16);
52
+ GET_PRIMITIVE_TYPE(UINT32, uint32);
53
+ GET_PRIMITIVE_TYPE(INT32, int32);
54
+ GET_PRIMITIVE_TYPE(UINT64, uint64);
55
+ GET_PRIMITIVE_TYPE(INT64, int64);
56
+ GET_PRIMITIVE_TYPE(DATE32, date32);
57
+ GET_PRIMITIVE_TYPE(DATE64, date64);
58
+ GET_PRIMITIVE_TYPE(BOOL, boolean);
59
+ GET_PRIMITIVE_TYPE(HALF_FLOAT, float16);
60
+ GET_PRIMITIVE_TYPE(FLOAT, float32);
61
+ GET_PRIMITIVE_TYPE(DOUBLE, float64);
62
+ GET_PRIMITIVE_TYPE(BINARY, binary);
63
+ GET_PRIMITIVE_TYPE(STRING, utf8);
64
+ GET_PRIMITIVE_TYPE(LARGE_BINARY, large_binary);
65
+ GET_PRIMITIVE_TYPE(LARGE_STRING, large_utf8);
66
+ GET_PRIMITIVE_TYPE(BINARY_VIEW, binary_view);
67
+ GET_PRIMITIVE_TYPE(STRING_VIEW, utf8_view);
68
+ GET_PRIMITIVE_TYPE(INTERVAL_MONTH_DAY_NANO, month_day_nano_interval);
69
+ default:
70
+ return nullptr;
71
+ }
72
+ }
73
+
74
+ PyObject* PyHalf_FromHalf(npy_half value) {
75
+ PyObject* result = PyArrayScalar_New(Half);
76
+ if (result != NULL) {
77
+ PyArrayScalar_ASSIGN(result, Half, value);
78
+ }
79
+ return result;
80
+ }
81
+
82
+ Status PyFloat_AsHalf(PyObject* obj, npy_half* out) {
83
+ if (PyArray_IsScalar(obj, Half)) {
84
+ *out = PyArrayScalar_VAL(obj, Half);
85
+ return Status::OK();
86
+ } else {
87
+ // XXX: cannot use npy_double_to_half() without linking with Numpy
88
+ return Status::TypeError("Expected np.float16 instance");
89
+ }
90
+ }
91
+
92
+ namespace internal {
93
+
94
+ std::string PyBytes_AsStdString(PyObject* obj) {
95
+ DCHECK(PyBytes_Check(obj));
96
+ return std::string(PyBytes_AS_STRING(obj), PyBytes_GET_SIZE(obj));
97
+ }
98
+
99
+ Status PyUnicode_AsStdString(PyObject* obj, std::string* out) {
100
+ DCHECK(PyUnicode_Check(obj));
101
+ Py_ssize_t size;
102
+ // The utf-8 representation is cached on the unicode object
103
+ const char* data = PyUnicode_AsUTF8AndSize(obj, &size);
104
+ RETURN_IF_PYERROR();
105
+ *out = std::string(data, size);
106
+ return Status::OK();
107
+ }
108
+
109
+ std::string PyObject_StdStringRepr(PyObject* obj) {
110
+ OwnedRef unicode_ref(PyObject_Repr(obj));
111
+ OwnedRef bytes_ref;
112
+
113
+ if (unicode_ref) {
114
+ bytes_ref.reset(
115
+ PyUnicode_AsEncodedString(unicode_ref.obj(), "utf8", "backslashreplace"));
116
+ }
117
+ if (!bytes_ref) {
118
+ PyErr_Clear();
119
+ std::stringstream ss;
120
+ ss << "<object of type '" << Py_TYPE(obj)->tp_name << "' repr() failed>";
121
+ return ss.str();
122
+ }
123
+ return PyBytes_AsStdString(bytes_ref.obj());
124
+ }
125
+
126
+ Status PyObject_StdStringStr(PyObject* obj, std::string* out) {
127
+ OwnedRef string_ref(PyObject_Str(obj));
128
+ RETURN_IF_PYERROR();
129
+ return PyUnicode_AsStdString(string_ref.obj(), out);
130
+ }
131
+
132
+ Result<bool> IsModuleImported(const std::string& module_name) {
133
+ // PyImport_GetModuleDict returns with a borrowed reference
134
+ OwnedRef key(PyUnicode_FromString(module_name.c_str()));
135
+ auto is_imported = PyDict_Contains(PyImport_GetModuleDict(), key.obj());
136
+ RETURN_IF_PYERROR();
137
+ return is_imported;
138
+ }
139
+
140
+ Status ImportModule(const std::string& module_name, OwnedRef* ref) {
141
+ PyObject* module = PyImport_ImportModule(module_name.c_str());
142
+ RETURN_IF_PYERROR();
143
+ ref->reset(module);
144
+ return Status::OK();
145
+ }
146
+
147
+ Status ImportFromModule(PyObject* module, const std::string& name, OwnedRef* ref) {
148
+ PyObject* attr = PyObject_GetAttrString(module, name.c_str());
149
+ RETURN_IF_PYERROR();
150
+ ref->reset(attr);
151
+ return Status::OK();
152
+ }
153
+
154
+ namespace {
155
+
156
+ Status IntegerOverflowStatus(PyObject* obj, const std::string& overflow_message) {
157
+ if (overflow_message.empty()) {
158
+ std::string obj_as_stdstring;
159
+ RETURN_NOT_OK(PyObject_StdStringStr(obj, &obj_as_stdstring));
160
+ return Status::Invalid("Value ", obj_as_stdstring,
161
+ " too large to fit in C integer type");
162
+ } else {
163
+ return Status::Invalid(overflow_message);
164
+ }
165
+ }
166
+
167
+ Result<OwnedRef> PyObjectToPyInt(PyObject* obj) {
168
+ // Try to call __index__ or __int__ on `obj`
169
+ // (starting from Python 3.10, the latter isn't done anymore by PyLong_AsLong*).
170
+ OwnedRef ref(PyNumber_Index(obj));
171
+ if (ref) {
172
+ return std::move(ref);
173
+ }
174
+ PyErr_Clear();
175
+ const auto nb = Py_TYPE(obj)->tp_as_number;
176
+ if (nb && nb->nb_int) {
177
+ ref.reset(nb->nb_int(obj));
178
+ if (!ref) {
179
+ RETURN_IF_PYERROR();
180
+ }
181
+ DCHECK(ref);
182
+ return std::move(ref);
183
+ }
184
+ return Status::TypeError(
185
+ "object of type ",
186
+ PyObject_StdStringRepr(reinterpret_cast<PyObject*>(Py_TYPE(obj))),
187
+ " cannot be converted to int");
188
+ }
189
+
190
+ // Extract C signed int from Python object
191
+ template <typename Int, enable_if_t<std::is_signed<Int>::value, Int> = 0>
192
+ Status CIntFromPythonImpl(PyObject* obj, Int* out, const std::string& overflow_message) {
193
+ static_assert(sizeof(Int) <= sizeof(long long), // NOLINT
194
+ "integer type larger than long long");
195
+
196
+ OwnedRef ref;
197
+ if (!PyLong_Check(obj)) {
198
+ ARROW_ASSIGN_OR_RAISE(ref, PyObjectToPyInt(obj));
199
+ obj = ref.obj();
200
+ }
201
+
202
+ if (sizeof(Int) > sizeof(long)) { // NOLINT
203
+ const auto value = PyLong_AsLongLong(obj);
204
+ if (ARROW_PREDICT_FALSE(value == -1)) {
205
+ RETURN_IF_PYERROR();
206
+ }
207
+ if (ARROW_PREDICT_FALSE(value < std::numeric_limits<Int>::min() ||
208
+ value > std::numeric_limits<Int>::max())) {
209
+ return IntegerOverflowStatus(obj, overflow_message);
210
+ }
211
+ *out = static_cast<Int>(value);
212
+ } else {
213
+ const auto value = PyLong_AsLong(obj);
214
+ if (ARROW_PREDICT_FALSE(value == -1)) {
215
+ RETURN_IF_PYERROR();
216
+ }
217
+ if (ARROW_PREDICT_FALSE(value < std::numeric_limits<Int>::min() ||
218
+ value > std::numeric_limits<Int>::max())) {
219
+ return IntegerOverflowStatus(obj, overflow_message);
220
+ }
221
+ *out = static_cast<Int>(value);
222
+ }
223
+ return Status::OK();
224
+ }
225
+
226
+ // Extract C unsigned int from Python object
227
+ template <typename Int, enable_if_t<std::is_unsigned<Int>::value, Int> = 0>
228
+ Status CIntFromPythonImpl(PyObject* obj, Int* out, const std::string& overflow_message) {
229
+ static_assert(sizeof(Int) <= sizeof(unsigned long long), // NOLINT
230
+ "integer type larger than unsigned long long");
231
+
232
+ OwnedRef ref;
233
+ if (!PyLong_Check(obj)) {
234
+ ARROW_ASSIGN_OR_RAISE(ref, PyObjectToPyInt(obj));
235
+ obj = ref.obj();
236
+ }
237
+
238
+ if (sizeof(Int) > sizeof(unsigned long)) { // NOLINT
239
+ const auto value = PyLong_AsUnsignedLongLong(obj);
240
+ if (ARROW_PREDICT_FALSE(value == static_cast<decltype(value)>(-1))) {
241
+ RETURN_IF_PYERROR();
242
+ }
243
+ if (ARROW_PREDICT_FALSE(value > std::numeric_limits<Int>::max())) {
244
+ return IntegerOverflowStatus(obj, overflow_message);
245
+ }
246
+ *out = static_cast<Int>(value);
247
+ } else {
248
+ const auto value = PyLong_AsUnsignedLong(obj);
249
+ if (ARROW_PREDICT_FALSE(value == static_cast<decltype(value)>(-1))) {
250
+ RETURN_IF_PYERROR();
251
+ }
252
+ if (ARROW_PREDICT_FALSE(value > std::numeric_limits<Int>::max())) {
253
+ return IntegerOverflowStatus(obj, overflow_message);
254
+ }
255
+ *out = static_cast<Int>(value);
256
+ }
257
+ return Status::OK();
258
+ }
259
+
260
+ } // namespace
261
+
262
+ template <typename Int>
263
+ Status CIntFromPython(PyObject* obj, Int* out, const std::string& overflow_message) {
264
+ if (PyBool_Check(obj)) {
265
+ return Status::TypeError("Expected integer, got bool");
266
+ }
267
+ return CIntFromPythonImpl(obj, out, overflow_message);
268
+ }
269
+
270
+ template Status CIntFromPython(PyObject*, int8_t*, const std::string&);
271
+ template Status CIntFromPython(PyObject*, int16_t*, const std::string&);
272
+ template Status CIntFromPython(PyObject*, int32_t*, const std::string&);
273
+ template Status CIntFromPython(PyObject*, int64_t*, const std::string&);
274
+ template Status CIntFromPython(PyObject*, uint8_t*, const std::string&);
275
+ template Status CIntFromPython(PyObject*, uint16_t*, const std::string&);
276
+ template Status CIntFromPython(PyObject*, uint32_t*, const std::string&);
277
+ template Status CIntFromPython(PyObject*, uint64_t*, const std::string&);
278
+
279
+ inline bool MayHaveNaN(PyObject* obj) {
280
+ // Some core types can be very quickly type-checked and do not allow NaN values
281
+ const int64_t non_nan_tpflags = Py_TPFLAGS_LONG_SUBCLASS | Py_TPFLAGS_LIST_SUBCLASS |
282
+ Py_TPFLAGS_TUPLE_SUBCLASS | Py_TPFLAGS_BYTES_SUBCLASS |
283
+ Py_TPFLAGS_UNICODE_SUBCLASS | Py_TPFLAGS_DICT_SUBCLASS |
284
+ Py_TPFLAGS_BASE_EXC_SUBCLASS | Py_TPFLAGS_TYPE_SUBCLASS;
285
+ return !PyType_HasFeature(Py_TYPE(obj), non_nan_tpflags);
286
+ }
287
+
288
+ bool PyFloat_IsNaN(PyObject* obj) {
289
+ return PyFloat_Check(obj) && std::isnan(PyFloat_AsDouble(obj));
290
+ }
291
+
292
+ namespace {
293
+
294
+ static bool pandas_static_initialized = false;
295
+
296
+ // Once initialized, these variables hold borrowed references to Pandas static data.
297
+ // We should not use OwnedRef here because Python destructors would be
298
+ // called on a finalized interpreter.
299
+ static PyObject* pandas_NA = nullptr;
300
+ static PyObject* pandas_NaT = nullptr;
301
+ static PyObject* pandas_Timedelta = nullptr;
302
+ static PyObject* pandas_Timestamp = nullptr;
303
+ static PyTypeObject* pandas_NaTType = nullptr;
304
+ static PyObject* pandas_DateOffset = nullptr;
305
+
306
+ } // namespace
307
+
308
+ void InitPandasStaticData() {
309
+ // NOTE: This is called with the GIL held. We needn't (and shouldn't,
310
+ // to avoid deadlocks) use an additional C++ lock (ARROW-10519).
311
+ if (pandas_static_initialized) {
312
+ return;
313
+ }
314
+
315
+ OwnedRef pandas;
316
+
317
+ // Import pandas
318
+ Status s = ImportModule("pandas", &pandas);
319
+ if (!s.ok()) {
320
+ return;
321
+ }
322
+
323
+ // Since ImportModule can release the GIL, another thread could have
324
+ // already initialized the static data.
325
+ if (pandas_static_initialized) {
326
+ return;
327
+ }
328
+ OwnedRef ref;
329
+
330
+ // set NaT sentinel and its type
331
+ if (ImportFromModule(pandas.obj(), "NaT", &ref).ok()) {
332
+ pandas_NaT = ref.obj();
333
+ // PyObject_Type returns a new reference but we trust that pandas.NaT will
334
+ // outlive our use of this PyObject*
335
+ pandas_NaTType = Py_TYPE(ref.obj());
336
+ }
337
+
338
+ // retain a reference to Timedelta
339
+ if (ImportFromModule(pandas.obj(), "Timedelta", &ref).ok()) {
340
+ pandas_Timedelta = ref.obj();
341
+ }
342
+
343
+ // retain a reference to Timestamp
344
+ if (ImportFromModule(pandas.obj(), "Timestamp", &ref).ok()) {
345
+ pandas_Timestamp = ref.obj();
346
+ }
347
+
348
+ // if pandas.NA exists, retain a reference to it
349
+ if (ImportFromModule(pandas.obj(), "NA", &ref).ok()) {
350
+ pandas_NA = ref.obj();
351
+ }
352
+
353
+ // Import DateOffset type
354
+ if (ImportFromModule(pandas.obj(), "DateOffset", &ref).ok()) {
355
+ pandas_DateOffset = ref.obj();
356
+ }
357
+
358
+ pandas_static_initialized = true;
359
+ }
360
+
361
+ bool PandasObjectIsNull(PyObject* obj) {
362
+ if (!MayHaveNaN(obj)) {
363
+ return false;
364
+ }
365
+ if (obj == Py_None) {
366
+ return true;
367
+ }
368
+ if (PyFloat_IsNaN(obj) || (pandas_NA && obj == pandas_NA) ||
369
+ (pandas_NaTType && PyObject_TypeCheck(obj, pandas_NaTType)) ||
370
+ (internal::PyDecimal_Check(obj) && internal::PyDecimal_ISNAN(obj))) {
371
+ return true;
372
+ }
373
+ return false;
374
+ }
375
+
376
+ bool IsPandasTimedelta(PyObject* obj) {
377
+ return pandas_Timedelta && PyObject_IsInstance(obj, pandas_Timedelta);
378
+ }
379
+
380
+ bool IsPandasTimestamp(PyObject* obj) {
381
+ return pandas_Timestamp && PyObject_IsInstance(obj, pandas_Timestamp);
382
+ }
383
+
384
+ PyObject* BorrowPandasDataOffsetType() { return pandas_DateOffset; }
385
+
386
+ Status InvalidValue(PyObject* obj, const std::string& why) {
387
+ auto obj_as_str = PyObject_StdStringRepr(obj);
388
+ return Status::Invalid("Could not convert ", std::move(obj_as_str), " with type ",
389
+ Py_TYPE(obj)->tp_name, ": ", why);
390
+ }
391
+
392
+ Status InvalidType(PyObject* obj, const std::string& why) {
393
+ auto obj_as_str = PyObject_StdStringRepr(obj);
394
+ return Status::TypeError("Could not convert ", std::move(obj_as_str), " with type ",
395
+ Py_TYPE(obj)->tp_name, ": ", why);
396
+ }
397
+
398
+ Status UnboxIntegerAsInt64(PyObject* obj, int64_t* out) {
399
+ if (PyLong_Check(obj)) {
400
+ int overflow = 0;
401
+ *out = PyLong_AsLongLongAndOverflow(obj, &overflow);
402
+ if (overflow) {
403
+ return Status::Invalid("PyLong is too large to fit int64");
404
+ }
405
+ } else if (PyArray_IsScalar(obj, Byte)) {
406
+ *out = reinterpret_cast<PyByteScalarObject*>(obj)->obval;
407
+ } else if (PyArray_IsScalar(obj, UByte)) {
408
+ *out = reinterpret_cast<PyUByteScalarObject*>(obj)->obval;
409
+ } else if (PyArray_IsScalar(obj, Short)) {
410
+ *out = reinterpret_cast<PyShortScalarObject*>(obj)->obval;
411
+ } else if (PyArray_IsScalar(obj, UShort)) {
412
+ *out = reinterpret_cast<PyUShortScalarObject*>(obj)->obval;
413
+ } else if (PyArray_IsScalar(obj, Int)) {
414
+ *out = reinterpret_cast<PyIntScalarObject*>(obj)->obval;
415
+ } else if (PyArray_IsScalar(obj, UInt)) {
416
+ *out = reinterpret_cast<PyUIntScalarObject*>(obj)->obval;
417
+ } else if (PyArray_IsScalar(obj, Long)) {
418
+ *out = reinterpret_cast<PyLongScalarObject*>(obj)->obval;
419
+ } else if (PyArray_IsScalar(obj, ULong)) {
420
+ *out = reinterpret_cast<PyULongScalarObject*>(obj)->obval;
421
+ } else if (PyArray_IsScalar(obj, LongLong)) {
422
+ *out = reinterpret_cast<PyLongLongScalarObject*>(obj)->obval;
423
+ } else if (PyArray_IsScalar(obj, Int64)) {
424
+ *out = reinterpret_cast<PyInt64ScalarObject*>(obj)->obval;
425
+ } else if (PyArray_IsScalar(obj, ULongLong)) {
426
+ *out = reinterpret_cast<PyULongLongScalarObject*>(obj)->obval;
427
+ } else if (PyArray_IsScalar(obj, UInt64)) {
428
+ *out = reinterpret_cast<PyUInt64ScalarObject*>(obj)->obval;
429
+ } else {
430
+ return Status::Invalid("Integer scalar type not recognized");
431
+ }
432
+ return Status::OK();
433
+ }
434
+
435
+ Status IntegerScalarToDoubleSafe(PyObject* obj, double* out) {
436
+ int64_t value = 0;
437
+ RETURN_NOT_OK(UnboxIntegerAsInt64(obj, &value));
438
+
439
+ constexpr int64_t kDoubleMax = 1LL << 53;
440
+ constexpr int64_t kDoubleMin = -(1LL << 53);
441
+
442
+ if (value < kDoubleMin || value > kDoubleMax) {
443
+ return Status::Invalid("Integer value ", value, " is outside of the range exactly",
444
+ " representable by a IEEE 754 double precision value");
445
+ }
446
+ *out = static_cast<double>(value);
447
+ return Status::OK();
448
+ }
449
+
450
+ Status IntegerScalarToFloat32Safe(PyObject* obj, float* out) {
451
+ int64_t value = 0;
452
+ RETURN_NOT_OK(UnboxIntegerAsInt64(obj, &value));
453
+
454
+ constexpr int64_t kFloatMax = 1LL << 24;
455
+ constexpr int64_t kFloatMin = -(1LL << 24);
456
+
457
+ if (value < kFloatMin || value > kFloatMax) {
458
+ return Status::Invalid("Integer value ", value, " is outside of the range exactly",
459
+ " representable by a IEEE 754 single precision value");
460
+ }
461
+ *out = static_cast<float>(value);
462
+ return Status::OK();
463
+ }
464
+
465
+ void DebugPrint(PyObject* obj) {
466
+ std::string repr = PyObject_StdStringRepr(obj);
467
+ PySys_WriteStderr("%s\n", repr.c_str());
468
+ }
469
+
470
+ } // namespace internal
471
+ } // namespace py
472
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/inference.cc ADDED
@@ -0,0 +1,745 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/inference.h"
19
+ #include "arrow/python/numpy_interop.h"
20
+
21
+ #include <datetime.h>
22
+
23
+ #include <algorithm>
24
+ #include <limits>
25
+ #include <map>
26
+ #include <string>
27
+ #include <utility>
28
+ #include <vector>
29
+
30
+ #include "arrow/scalar.h"
31
+ #include "arrow/status.h"
32
+ #include "arrow/util/decimal.h"
33
+ #include "arrow/util/logging.h"
34
+
35
+ #include "arrow/python/datetime.h"
36
+ #include "arrow/python/decimal.h"
37
+ #include "arrow/python/helpers.h"
38
+ #include "arrow/python/iterators.h"
39
+ #include "arrow/python/numpy_convert.h"
40
+
41
+ namespace arrow {
42
+ namespace py {
43
+ namespace {
44
+ // Assigns a tuple to interval_types_tuple containing the nametuple for
45
+ // MonthDayNanoIntervalType and if present dateutil's relativedelta and
46
+ // pandas DateOffset.
47
+ Status ImportPresentIntervalTypes(OwnedRefNoGIL* interval_types_tuple) {
48
+ OwnedRef relative_delta_module;
49
+ // These are Optional imports so swallow errors.
50
+ OwnedRef relative_delta_type;
51
+ // Try to import pandas to get types.
52
+ internal::InitPandasStaticData();
53
+ if (internal::ImportModule("dateutil.relativedelta", &relative_delta_module).ok()) {
54
+ RETURN_NOT_OK(internal::ImportFromModule(relative_delta_module.obj(), "relativedelta",
55
+ &relative_delta_type));
56
+ }
57
+
58
+ PyObject* date_offset_type = internal::BorrowPandasDataOffsetType();
59
+ interval_types_tuple->reset(
60
+ PyTuple_New(1 + (date_offset_type != nullptr ? 1 : 0) +
61
+ (relative_delta_type.obj() != nullptr ? 1 : 0)));
62
+ RETURN_IF_PYERROR();
63
+ int index = 0;
64
+ PyTuple_SetItem(interval_types_tuple->obj(), index++,
65
+ internal::NewMonthDayNanoTupleType());
66
+ RETURN_IF_PYERROR();
67
+ if (date_offset_type != nullptr) {
68
+ Py_XINCREF(date_offset_type);
69
+ PyTuple_SetItem(interval_types_tuple->obj(), index++, date_offset_type);
70
+ RETURN_IF_PYERROR();
71
+ }
72
+ if (relative_delta_type.obj() != nullptr) {
73
+ PyTuple_SetItem(interval_types_tuple->obj(), index++, relative_delta_type.detach());
74
+ RETURN_IF_PYERROR();
75
+ }
76
+ return Status::OK();
77
+ }
78
+
79
+ } // namespace
80
+
81
+ #define _NUMPY_UNIFY_NOOP(DTYPE) \
82
+ case NPY_##DTYPE: \
83
+ return OK;
84
+
85
+ #define _NUMPY_UNIFY_PROMOTE(DTYPE) \
86
+ case NPY_##DTYPE: \
87
+ current_type_num_ = dtype; \
88
+ current_dtype_ = descr; \
89
+ return OK;
90
+
91
+ #define _NUMPY_UNIFY_PROMOTE_TO(DTYPE, NEW_TYPE) \
92
+ case NPY_##DTYPE: \
93
+ current_type_num_ = NPY_##NEW_TYPE; \
94
+ current_dtype_ = PyArray_DescrFromType(current_type_num_); \
95
+ return OK;
96
+
97
+ // Form a consensus NumPy dtype to use for Arrow conversion for a
98
+ // collection of dtype objects observed one at a time
99
+ class NumPyDtypeUnifier {
100
+ public:
101
+ enum Action { OK, INVALID };
102
+
103
+ NumPyDtypeUnifier() : current_type_num_(-1), current_dtype_(nullptr) {}
104
+
105
+ Status InvalidMix(int new_dtype) {
106
+ return Status::Invalid("Cannot mix NumPy dtypes ",
107
+ GetNumPyTypeName(current_type_num_), " and ",
108
+ GetNumPyTypeName(new_dtype));
109
+ }
110
+
111
+ int Observe_BOOL(PyArray_Descr* descr, int dtype) { return INVALID; }
112
+
113
+ int Observe_INT8(PyArray_Descr* descr, int dtype) {
114
+ switch (dtype) {
115
+ _NUMPY_UNIFY_PROMOTE(INT16);
116
+ _NUMPY_UNIFY_PROMOTE(INT32);
117
+ _NUMPY_UNIFY_PROMOTE(INT64);
118
+ _NUMPY_UNIFY_PROMOTE(FLOAT32);
119
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
120
+ default:
121
+ return INVALID;
122
+ }
123
+ }
124
+
125
+ int Observe_INT16(PyArray_Descr* descr, int dtype) {
126
+ switch (dtype) {
127
+ _NUMPY_UNIFY_NOOP(INT8);
128
+ _NUMPY_UNIFY_PROMOTE(INT32);
129
+ _NUMPY_UNIFY_PROMOTE(INT64);
130
+ _NUMPY_UNIFY_NOOP(UINT8);
131
+ _NUMPY_UNIFY_PROMOTE(FLOAT32);
132
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
133
+ default:
134
+ return INVALID;
135
+ }
136
+ }
137
+
138
+ int Observe_INT32(PyArray_Descr* descr, int dtype) {
139
+ switch (dtype) {
140
+ _NUMPY_UNIFY_NOOP(INT8);
141
+ _NUMPY_UNIFY_NOOP(INT16);
142
+ _NUMPY_UNIFY_PROMOTE(INT32);
143
+ _NUMPY_UNIFY_PROMOTE(INT64);
144
+ _NUMPY_UNIFY_NOOP(UINT8);
145
+ _NUMPY_UNIFY_NOOP(UINT16);
146
+ _NUMPY_UNIFY_PROMOTE_TO(FLOAT32, FLOAT64);
147
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
148
+ default:
149
+ return INVALID;
150
+ }
151
+ }
152
+
153
+ int Observe_INT64(PyArray_Descr* descr, int dtype) {
154
+ switch (dtype) {
155
+ _NUMPY_UNIFY_NOOP(INT8);
156
+ _NUMPY_UNIFY_NOOP(INT16);
157
+ _NUMPY_UNIFY_NOOP(INT32);
158
+ _NUMPY_UNIFY_NOOP(INT64);
159
+ _NUMPY_UNIFY_NOOP(UINT8);
160
+ _NUMPY_UNIFY_NOOP(UINT16);
161
+ _NUMPY_UNIFY_NOOP(UINT32);
162
+ _NUMPY_UNIFY_PROMOTE_TO(FLOAT32, FLOAT64);
163
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
164
+ default:
165
+ return INVALID;
166
+ }
167
+ }
168
+
169
+ int Observe_UINT8(PyArray_Descr* descr, int dtype) {
170
+ switch (dtype) {
171
+ _NUMPY_UNIFY_PROMOTE(UINT16);
172
+ _NUMPY_UNIFY_PROMOTE(UINT32);
173
+ _NUMPY_UNIFY_PROMOTE(UINT64);
174
+ _NUMPY_UNIFY_PROMOTE(FLOAT32);
175
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
176
+ default:
177
+ return INVALID;
178
+ }
179
+ }
180
+
181
+ int Observe_UINT16(PyArray_Descr* descr, int dtype) {
182
+ switch (dtype) {
183
+ _NUMPY_UNIFY_NOOP(UINT8);
184
+ _NUMPY_UNIFY_PROMOTE(UINT32);
185
+ _NUMPY_UNIFY_PROMOTE(UINT64);
186
+ _NUMPY_UNIFY_PROMOTE(FLOAT32);
187
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
188
+ default:
189
+ return INVALID;
190
+ }
191
+ }
192
+
193
+ int Observe_UINT32(PyArray_Descr* descr, int dtype) {
194
+ switch (dtype) {
195
+ _NUMPY_UNIFY_NOOP(UINT8);
196
+ _NUMPY_UNIFY_NOOP(UINT16);
197
+ _NUMPY_UNIFY_PROMOTE(UINT64);
198
+ _NUMPY_UNIFY_PROMOTE_TO(FLOAT32, FLOAT64);
199
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
200
+ default:
201
+ return INVALID;
202
+ }
203
+ }
204
+
205
+ int Observe_UINT64(PyArray_Descr* descr, int dtype) {
206
+ switch (dtype) {
207
+ _NUMPY_UNIFY_NOOP(UINT8);
208
+ _NUMPY_UNIFY_NOOP(UINT16);
209
+ _NUMPY_UNIFY_NOOP(UINT32);
210
+ _NUMPY_UNIFY_PROMOTE_TO(FLOAT32, FLOAT64);
211
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
212
+ default:
213
+ return INVALID;
214
+ }
215
+ }
216
+
217
+ int Observe_FLOAT16(PyArray_Descr* descr, int dtype) {
218
+ switch (dtype) {
219
+ _NUMPY_UNIFY_PROMOTE(FLOAT32);
220
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
221
+ default:
222
+ return INVALID;
223
+ }
224
+ }
225
+
226
+ int Observe_FLOAT32(PyArray_Descr* descr, int dtype) {
227
+ switch (dtype) {
228
+ _NUMPY_UNIFY_NOOP(INT8);
229
+ _NUMPY_UNIFY_NOOP(INT16);
230
+ _NUMPY_UNIFY_NOOP(INT32);
231
+ _NUMPY_UNIFY_NOOP(INT64);
232
+ _NUMPY_UNIFY_NOOP(UINT8);
233
+ _NUMPY_UNIFY_NOOP(UINT16);
234
+ _NUMPY_UNIFY_NOOP(UINT32);
235
+ _NUMPY_UNIFY_NOOP(UINT64);
236
+ _NUMPY_UNIFY_PROMOTE(FLOAT64);
237
+ default:
238
+ return INVALID;
239
+ }
240
+ }
241
+
242
+ int Observe_FLOAT64(PyArray_Descr* descr, int dtype) {
243
+ switch (dtype) {
244
+ _NUMPY_UNIFY_NOOP(INT8);
245
+ _NUMPY_UNIFY_NOOP(INT16);
246
+ _NUMPY_UNIFY_NOOP(INT32);
247
+ _NUMPY_UNIFY_NOOP(INT64);
248
+ _NUMPY_UNIFY_NOOP(UINT8);
249
+ _NUMPY_UNIFY_NOOP(UINT16);
250
+ _NUMPY_UNIFY_NOOP(UINT32);
251
+ _NUMPY_UNIFY_NOOP(UINT64);
252
+ default:
253
+ return INVALID;
254
+ }
255
+ }
256
+
257
+ int Observe_DATETIME(PyArray_Descr* dtype_obj) {
258
+ // TODO: check that units are all the same
259
+ return OK;
260
+ }
261
+
262
+ Status Observe(PyArray_Descr* descr) {
263
+ int dtype = fix_numpy_type_num(descr->type_num);
264
+
265
+ if (current_type_num_ == -1) {
266
+ current_dtype_ = descr;
267
+ current_type_num_ = dtype;
268
+ return Status::OK();
269
+ } else if (current_type_num_ == dtype) {
270
+ return Status::OK();
271
+ }
272
+
273
+ #define OBSERVE_CASE(DTYPE) \
274
+ case NPY_##DTYPE: \
275
+ action = Observe_##DTYPE(descr, dtype); \
276
+ break;
277
+
278
+ int action = OK;
279
+ switch (current_type_num_) {
280
+ OBSERVE_CASE(BOOL);
281
+ OBSERVE_CASE(INT8);
282
+ OBSERVE_CASE(INT16);
283
+ OBSERVE_CASE(INT32);
284
+ OBSERVE_CASE(INT64);
285
+ OBSERVE_CASE(UINT8);
286
+ OBSERVE_CASE(UINT16);
287
+ OBSERVE_CASE(UINT32);
288
+ OBSERVE_CASE(UINT64);
289
+ OBSERVE_CASE(FLOAT16);
290
+ OBSERVE_CASE(FLOAT32);
291
+ OBSERVE_CASE(FLOAT64);
292
+ case NPY_DATETIME:
293
+ action = Observe_DATETIME(descr);
294
+ break;
295
+ default:
296
+ return Status::NotImplemented("Unsupported numpy type ", GetNumPyTypeName(dtype));
297
+ }
298
+
299
+ if (action == INVALID) {
300
+ return InvalidMix(dtype);
301
+ }
302
+ return Status::OK();
303
+ }
304
+
305
+ bool dtype_was_observed() const { return current_type_num_ != -1; }
306
+
307
+ PyArray_Descr* current_dtype() const { return current_dtype_; }
308
+
309
+ int current_type_num() const { return current_type_num_; }
310
+
311
+ private:
312
+ int current_type_num_;
313
+ PyArray_Descr* current_dtype_;
314
+ };
315
+
316
+ class TypeInferrer {
317
+ // A type inference visitor for Python values
318
+ public:
319
+ // \param validate_interval the number of elements to observe before checking
320
+ // whether the data is mixed type or has other problems. This helps avoid
321
+ // excess computation for each element while also making sure we "bail out"
322
+ // early with long sequences that may have problems up front
323
+ // \param make_unions permit mixed-type data by creating union types (not yet
324
+ // implemented)
325
+ explicit TypeInferrer(bool pandas_null_sentinels = false,
326
+ int64_t validate_interval = 100, bool make_unions = false)
327
+ : pandas_null_sentinels_(pandas_null_sentinels),
328
+ validate_interval_(validate_interval),
329
+ make_unions_(make_unions),
330
+ total_count_(0),
331
+ none_count_(0),
332
+ bool_count_(0),
333
+ int_count_(0),
334
+ date_count_(0),
335
+ time_count_(0),
336
+ timestamp_micro_count_(0),
337
+ duration_count_(0),
338
+ float_count_(0),
339
+ binary_count_(0),
340
+ unicode_count_(0),
341
+ decimal_count_(0),
342
+ list_count_(0),
343
+ struct_count_(0),
344
+ arrow_scalar_count_(0),
345
+ numpy_dtype_count_(0),
346
+ interval_count_(0),
347
+ max_decimal_metadata_(std::numeric_limits<int32_t>::min(),
348
+ std::numeric_limits<int32_t>::min()),
349
+ decimal_type_() {
350
+ ARROW_CHECK_OK(internal::ImportDecimalType(&decimal_type_));
351
+ ARROW_CHECK_OK(ImportPresentIntervalTypes(&interval_types_));
352
+ }
353
+
354
+ /// \param[in] obj a Python object in the sequence
355
+ /// \param[out] keep_going if sufficient information has been gathered to
356
+ /// attempt to begin converting the sequence, *keep_going will be set to true
357
+ /// to signal to the calling visitor loop to terminate
358
+ Status Visit(PyObject* obj, bool* keep_going) {
359
+ ++total_count_;
360
+
361
+ if (obj == Py_None || (pandas_null_sentinels_ && internal::PandasObjectIsNull(obj))) {
362
+ ++none_count_;
363
+ } else if (PyBool_Check(obj)) {
364
+ ++bool_count_;
365
+ *keep_going = make_unions_;
366
+ } else if (PyFloat_Check(obj)) {
367
+ ++float_count_;
368
+ *keep_going = make_unions_;
369
+ } else if (internal::IsPyInteger(obj)) {
370
+ ++int_count_;
371
+ } else if (PyDateTime_Check(obj)) {
372
+ // infer timezone from the first encountered datetime object
373
+ if (!timestamp_micro_count_) {
374
+ OwnedRef tzinfo(PyObject_GetAttrString(obj, "tzinfo"));
375
+ if (tzinfo.obj() != nullptr && tzinfo.obj() != Py_None) {
376
+ ARROW_ASSIGN_OR_RAISE(timezone_, internal::TzinfoToString(tzinfo.obj()));
377
+ }
378
+ }
379
+ ++timestamp_micro_count_;
380
+ *keep_going = make_unions_;
381
+ } else if (PyDelta_Check(obj)) {
382
+ ++duration_count_;
383
+ *keep_going = make_unions_;
384
+ } else if (PyDate_Check(obj)) {
385
+ ++date_count_;
386
+ *keep_going = make_unions_;
387
+ } else if (PyTime_Check(obj)) {
388
+ ++time_count_;
389
+ *keep_going = make_unions_;
390
+ } else if (internal::IsPyBinary(obj)) {
391
+ ++binary_count_;
392
+ *keep_going = make_unions_;
393
+ } else if (PyUnicode_Check(obj)) {
394
+ ++unicode_count_;
395
+ *keep_going = make_unions_;
396
+ } else if (arrow::py::is_scalar(obj)) {
397
+ RETURN_NOT_OK(VisitArrowScalar(obj, keep_going));
398
+ } else if (PyArray_CheckAnyScalarExact(obj)) {
399
+ RETURN_NOT_OK(VisitDType(PyArray_DescrFromScalar(obj), keep_going));
400
+ } else if (PySet_Check(obj) || (Py_TYPE(obj) == &PyDictValues_Type)) {
401
+ RETURN_NOT_OK(VisitSet(obj, keep_going));
402
+ } else if (PyArray_Check(obj)) {
403
+ RETURN_NOT_OK(VisitNdarray(obj, keep_going));
404
+ } else if (PyDict_Check(obj)) {
405
+ RETURN_NOT_OK(VisitDict(obj));
406
+ } else if (PyList_Check(obj) ||
407
+ (PyTuple_Check(obj) &&
408
+ !PyObject_IsInstance(obj, PyTuple_GetItem(interval_types_.obj(), 0)))) {
409
+ RETURN_NOT_OK(VisitList(obj, keep_going));
410
+ } else if (PyObject_IsInstance(obj, decimal_type_.obj())) {
411
+ RETURN_NOT_OK(max_decimal_metadata_.Update(obj));
412
+ ++decimal_count_;
413
+ } else if (PyObject_IsInstance(obj, interval_types_.obj())) {
414
+ ++interval_count_;
415
+ } else {
416
+ return internal::InvalidValue(obj,
417
+ "did not recognize Python value type when inferring "
418
+ "an Arrow data type");
419
+ }
420
+
421
+ if (total_count_ % validate_interval_ == 0) {
422
+ RETURN_NOT_OK(Validate());
423
+ }
424
+
425
+ return Status::OK();
426
+ }
427
+
428
+ // Infer value type from a sequence of values
429
+ Status VisitSequence(PyObject* obj, PyObject* mask = nullptr) {
430
+ if (mask == nullptr || mask == Py_None) {
431
+ return internal::VisitSequence(
432
+ obj, /*offset=*/0,
433
+ [this](PyObject* value, bool* keep_going) { return Visit(value, keep_going); });
434
+ } else {
435
+ return internal::VisitSequenceMasked(
436
+ obj, mask, /*offset=*/0,
437
+ [this](PyObject* value, uint8_t masked, bool* keep_going) {
438
+ if (!masked) {
439
+ return Visit(value, keep_going);
440
+ } else {
441
+ return Status::OK();
442
+ }
443
+ });
444
+ }
445
+ }
446
+
447
+ // Infer value type from a sequence of values
448
+ Status VisitIterable(PyObject* obj) {
449
+ return internal::VisitIterable(obj, [this](PyObject* value, bool* keep_going) {
450
+ return Visit(value, keep_going);
451
+ });
452
+ }
453
+
454
+ Status GetType(std::shared_ptr<DataType>* out) {
455
+ // TODO(wesm): handling forming unions
456
+ if (make_unions_) {
457
+ return Status::NotImplemented("Creating union types not yet supported");
458
+ }
459
+
460
+ RETURN_NOT_OK(Validate());
461
+
462
+ if (arrow_scalar_count_ > 0 && arrow_scalar_count_ + none_count_ != total_count_) {
463
+ return Status::Invalid(
464
+ "pyarrow scalars cannot be mixed "
465
+ "with other Python scalar values currently");
466
+ }
467
+
468
+ if (numpy_dtype_count_ > 0) {
469
+ // All NumPy scalars and Nones/nulls
470
+ if (numpy_dtype_count_ + none_count_ == total_count_) {
471
+ return NumPyDtypeToArrow(numpy_unifier_.current_dtype()).Value(out);
472
+ }
473
+
474
+ // The "bad path": data contains a mix of NumPy scalars and
475
+ // other kinds of scalars. Note this can happen innocuously
476
+ // because numpy.nan is not a NumPy scalar (it's a built-in
477
+ // PyFloat)
478
+
479
+ // TODO(ARROW-5564): Merge together type unification so this
480
+ // hack is not necessary
481
+ switch (numpy_unifier_.current_type_num()) {
482
+ case NPY_BOOL:
483
+ bool_count_ += numpy_dtype_count_;
484
+ break;
485
+ case NPY_INT8:
486
+ case NPY_INT16:
487
+ case NPY_INT32:
488
+ case NPY_INT64:
489
+ case NPY_UINT8:
490
+ case NPY_UINT16:
491
+ case NPY_UINT32:
492
+ case NPY_UINT64:
493
+ int_count_ += numpy_dtype_count_;
494
+ break;
495
+ case NPY_FLOAT32:
496
+ case NPY_FLOAT64:
497
+ float_count_ += numpy_dtype_count_;
498
+ break;
499
+ case NPY_DATETIME:
500
+ return Status::Invalid(
501
+ "numpy.datetime64 scalars cannot be mixed "
502
+ "with other Python scalar values currently");
503
+ }
504
+ }
505
+
506
+ if (list_count_) {
507
+ std::shared_ptr<DataType> value_type;
508
+ RETURN_NOT_OK(list_inferrer_->GetType(&value_type));
509
+ *out = list(value_type);
510
+ } else if (struct_count_) {
511
+ RETURN_NOT_OK(GetStructType(out));
512
+ } else if (decimal_count_) {
513
+ if (max_decimal_metadata_.precision() > Decimal128Type::kMaxPrecision) {
514
+ // the default constructor does not validate the precision and scale
515
+ ARROW_ASSIGN_OR_RAISE(*out,
516
+ Decimal256Type::Make(max_decimal_metadata_.precision(),
517
+ max_decimal_metadata_.scale()));
518
+ } else {
519
+ ARROW_ASSIGN_OR_RAISE(*out,
520
+ Decimal128Type::Make(max_decimal_metadata_.precision(),
521
+ max_decimal_metadata_.scale()));
522
+ }
523
+ } else if (float_count_) {
524
+ // Prioritize floats before integers
525
+ *out = float64();
526
+ } else if (int_count_) {
527
+ *out = int64();
528
+ } else if (date_count_) {
529
+ *out = date32();
530
+ } else if (time_count_) {
531
+ *out = time64(TimeUnit::MICRO);
532
+ } else if (timestamp_micro_count_) {
533
+ *out = timestamp(TimeUnit::MICRO, timezone_);
534
+ } else if (duration_count_) {
535
+ *out = duration(TimeUnit::MICRO);
536
+ } else if (bool_count_) {
537
+ *out = boolean();
538
+ } else if (binary_count_) {
539
+ *out = binary();
540
+ } else if (unicode_count_) {
541
+ *out = utf8();
542
+ } else if (interval_count_) {
543
+ *out = month_day_nano_interval();
544
+ } else if (arrow_scalar_count_) {
545
+ *out = scalar_type_;
546
+ } else {
547
+ *out = null();
548
+ }
549
+ return Status::OK();
550
+ }
551
+
552
+ int64_t total_count() const { return total_count_; }
553
+
554
+ protected:
555
+ Status Validate() const {
556
+ if (list_count_ > 0) {
557
+ if (list_count_ + none_count_ != total_count_) {
558
+ return Status::Invalid("cannot mix list and non-list, non-null values");
559
+ }
560
+ RETURN_NOT_OK(list_inferrer_->Validate());
561
+ } else if (struct_count_ > 0) {
562
+ if (struct_count_ + none_count_ != total_count_) {
563
+ return Status::Invalid("cannot mix struct and non-struct, non-null values");
564
+ }
565
+ for (const auto& it : struct_inferrers_) {
566
+ RETURN_NOT_OK(it.second.Validate());
567
+ }
568
+ }
569
+ return Status::OK();
570
+ }
571
+
572
+ Status VisitArrowScalar(PyObject* obj, bool* keep_going /* unused */) {
573
+ ARROW_ASSIGN_OR_RAISE(auto scalar, arrow::py::unwrap_scalar(obj));
574
+ // Check that all the scalar types for the sequence are the same
575
+ if (arrow_scalar_count_ > 0 && *scalar->type != *scalar_type_) {
576
+ return internal::InvalidValue(obj, "cannot mix scalars with different types");
577
+ }
578
+ scalar_type_ = scalar->type;
579
+ ++arrow_scalar_count_;
580
+ return Status::OK();
581
+ }
582
+
583
+ Status VisitDType(PyArray_Descr* dtype, bool* keep_going) {
584
+ // Continue visiting dtypes for now.
585
+ // TODO(wesm): devise approach for unions
586
+ ++numpy_dtype_count_;
587
+ *keep_going = true;
588
+ return numpy_unifier_.Observe(dtype);
589
+ }
590
+
591
+ Status VisitList(PyObject* obj, bool* keep_going /* unused */) {
592
+ if (!list_inferrer_) {
593
+ list_inferrer_.reset(
594
+ new TypeInferrer(pandas_null_sentinels_, validate_interval_, make_unions_));
595
+ }
596
+ ++list_count_;
597
+ return list_inferrer_->VisitSequence(obj);
598
+ }
599
+
600
+ Status VisitSet(PyObject* obj, bool* keep_going /* unused */) {
601
+ if (!list_inferrer_) {
602
+ list_inferrer_.reset(
603
+ new TypeInferrer(pandas_null_sentinels_, validate_interval_, make_unions_));
604
+ }
605
+ ++list_count_;
606
+ return list_inferrer_->VisitIterable(obj);
607
+ }
608
+
609
+ Status VisitNdarray(PyObject* obj, bool* keep_going) {
610
+ PyArray_Descr* dtype = PyArray_DESCR(reinterpret_cast<PyArrayObject*>(obj));
611
+ if (dtype->type_num == NPY_OBJECT) {
612
+ return VisitList(obj, keep_going);
613
+ }
614
+ // Not an object array: infer child Arrow type from dtype
615
+ if (!list_inferrer_) {
616
+ list_inferrer_.reset(
617
+ new TypeInferrer(pandas_null_sentinels_, validate_interval_, make_unions_));
618
+ }
619
+ ++list_count_;
620
+
621
+ // XXX(wesm): In ARROW-4324 I added accounting to check whether
622
+ // all of the non-null values have NumPy dtypes, but the
623
+ // total_count not being properly incremented here
624
+ ++(*list_inferrer_).total_count_;
625
+ return list_inferrer_->VisitDType(dtype, keep_going);
626
+ }
627
+
628
+ Status VisitDict(PyObject* obj) {
629
+ PyObject* key_obj;
630
+ PyObject* value_obj;
631
+ Py_ssize_t pos = 0;
632
+
633
+ while (PyDict_Next(obj, &pos, &key_obj, &value_obj)) {
634
+ std::string key;
635
+ if (PyUnicode_Check(key_obj)) {
636
+ RETURN_NOT_OK(internal::PyUnicode_AsStdString(key_obj, &key));
637
+ } else if (PyBytes_Check(key_obj)) {
638
+ key = internal::PyBytes_AsStdString(key_obj);
639
+ } else {
640
+ return Status::TypeError("Expected dict key of type str or bytes, got '",
641
+ Py_TYPE(key_obj)->tp_name, "'");
642
+ }
643
+ // Get or create visitor for this key
644
+ auto it = struct_inferrers_.find(key);
645
+ if (it == struct_inferrers_.end()) {
646
+ it = struct_inferrers_
647
+ .insert(
648
+ std::make_pair(key, TypeInferrer(pandas_null_sentinels_,
649
+ validate_interval_, make_unions_)))
650
+ .first;
651
+ }
652
+ TypeInferrer* visitor = &it->second;
653
+
654
+ // We ignore termination signals from child visitors for now
655
+ //
656
+ // TODO(wesm): keep track of whether type inference has terminated for
657
+ // the child visitors to avoid doing unneeded work
658
+ bool keep_going = true;
659
+ RETURN_NOT_OK(visitor->Visit(value_obj, &keep_going));
660
+ }
661
+
662
+ // We do not terminate visiting dicts since we want the union of all
663
+ // observed keys
664
+ ++struct_count_;
665
+ return Status::OK();
666
+ }
667
+
668
+ Status GetStructType(std::shared_ptr<DataType>* out) {
669
+ std::vector<std::shared_ptr<Field>> fields;
670
+ for (auto&& it : struct_inferrers_) {
671
+ std::shared_ptr<DataType> field_type;
672
+ RETURN_NOT_OK(it.second.GetType(&field_type));
673
+ fields.emplace_back(field(it.first, field_type));
674
+ }
675
+ *out = struct_(fields);
676
+ return Status::OK();
677
+ }
678
+
679
+ private:
680
+ bool pandas_null_sentinels_;
681
+ int64_t validate_interval_;
682
+ bool make_unions_;
683
+ int64_t total_count_;
684
+ int64_t none_count_;
685
+ int64_t bool_count_;
686
+ int64_t int_count_;
687
+ int64_t date_count_;
688
+ int64_t time_count_;
689
+ int64_t timestamp_micro_count_;
690
+ std::string timezone_;
691
+ int64_t duration_count_;
692
+ int64_t float_count_;
693
+ int64_t binary_count_;
694
+ int64_t unicode_count_;
695
+ int64_t decimal_count_;
696
+ int64_t list_count_;
697
+ int64_t struct_count_;
698
+ int64_t arrow_scalar_count_;
699
+ int64_t numpy_dtype_count_;
700
+ int64_t interval_count_;
701
+ std::unique_ptr<TypeInferrer> list_inferrer_;
702
+ std::map<std::string, TypeInferrer> struct_inferrers_;
703
+ std::shared_ptr<DataType> scalar_type_;
704
+
705
+ // If we observe a strongly-typed value in e.g. a NumPy array, we can store
706
+ // it here to skip the type counting logic above
707
+ NumPyDtypeUnifier numpy_unifier_;
708
+
709
+ internal::DecimalMetadata max_decimal_metadata_;
710
+
711
+ OwnedRefNoGIL decimal_type_;
712
+ OwnedRefNoGIL interval_types_;
713
+ };
714
+
715
+ // Non-exhaustive type inference
716
+ Result<std::shared_ptr<DataType>> InferArrowType(PyObject* obj, PyObject* mask,
717
+ bool pandas_null_sentinels) {
718
+ if (pandas_null_sentinels) {
719
+ // ARROW-842: If pandas is not installed then null checks will be less
720
+ // comprehensive, but that is okay.
721
+ internal::InitPandasStaticData();
722
+ }
723
+
724
+ std::shared_ptr<DataType> out_type;
725
+ TypeInferrer inferrer(pandas_null_sentinels);
726
+ RETURN_NOT_OK(inferrer.VisitSequence(obj, mask));
727
+ RETURN_NOT_OK(inferrer.GetType(&out_type));
728
+ if (out_type == nullptr) {
729
+ return Status::TypeError("Unable to determine data type");
730
+ } else {
731
+ return std::move(out_type);
732
+ }
733
+ }
734
+
735
+ ARROW_PYTHON_EXPORT
736
+ bool IsPyBool(PyObject* obj) { return internal::PyBoolScalar_Check(obj); }
737
+
738
+ ARROW_PYTHON_EXPORT
739
+ bool IsPyInt(PyObject* obj) { return internal::PyIntScalar_Check(obj); }
740
+
741
+ ARROW_PYTHON_EXPORT
742
+ bool IsPyFloat(PyObject* obj) { return internal::PyFloatScalar_Check(obj); }
743
+
744
+ } // namespace py
745
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/inference.h ADDED
@@ -0,0 +1,64 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Functions for converting between CPython built-in data structures and Arrow
19
+ // data structures
20
+
21
+ #pragma once
22
+
23
+ #include "arrow/python/platform.h"
24
+
25
+ #include <memory>
26
+
27
+ #include "arrow/python/visibility.h"
28
+ #include "arrow/type.h"
29
+ #include "arrow/util/macros.h"
30
+
31
+ #include "common.h"
32
+
33
+ namespace arrow {
34
+
35
+ class Array;
36
+ class Status;
37
+
38
+ namespace py {
39
+
40
+ // These functions take a sequence input, not arbitrary iterables
41
+
42
+ /// \brief Infer Arrow type from a Python sequence
43
+ /// \param[in] obj the sequence of values
44
+ /// \param[in] mask an optional mask where True values are null. May
45
+ /// be nullptr
46
+ /// \param[in] pandas_null_sentinels use pandas's null value markers
47
+ ARROW_PYTHON_EXPORT
48
+ Result<std::shared_ptr<arrow::DataType>> InferArrowType(PyObject* obj, PyObject* mask,
49
+ bool pandas_null_sentinels);
50
+
51
+ /// Checks whether the passed Python object is a boolean scalar
52
+ ARROW_PYTHON_EXPORT
53
+ bool IsPyBool(PyObject* obj);
54
+
55
+ /// Checks whether the passed Python object is an integer scalar
56
+ ARROW_PYTHON_EXPORT
57
+ bool IsPyInt(PyObject* obj);
58
+
59
+ /// Checks whether the passed Python object is a float scalar
60
+ ARROW_PYTHON_EXPORT
61
+ bool IsPyFloat(PyObject* obj);
62
+
63
+ } // namespace py
64
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/init.cc ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Trigger the array import (inversion of NO_IMPORT_ARRAY)
19
+ #define NUMPY_IMPORT_ARRAY
20
+
21
+ #include "arrow/python/init.h"
22
+ #include "arrow/python/numpy_interop.h"
23
+
24
+ int arrow_init_numpy() { return arrow::py::import_numpy(); }
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/init.h ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/platform.h"
21
+ #include "arrow/python/visibility.h"
22
+
23
+ extern "C" {
24
+ ARROW_PYTHON_EXPORT
25
+ int arrow_init_numpy();
26
+ }
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/io.cc ADDED
@@ -0,0 +1,387 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "io.h"
19
+
20
+ #include <cstdint>
21
+ #include <cstdlib>
22
+ #include <memory>
23
+ #include <mutex>
24
+ #include <string>
25
+
26
+ #include "arrow/io/memory.h"
27
+ #include "arrow/memory_pool.h"
28
+ #include "arrow/status.h"
29
+ #include "arrow/util/logging.h"
30
+
31
+ #include "arrow/python/common.h"
32
+ #include "arrow/python/pyarrow.h"
33
+
34
+ namespace arrow {
35
+
36
+ using arrow::io::TransformInputStream;
37
+
38
+ namespace py {
39
+
40
+ // ----------------------------------------------------------------------
41
+ // Python file
42
+
43
+ // A common interface to a Python file-like object. Must acquire GIL before
44
+ // calling any methods
45
+ class PythonFile {
46
+ public:
47
+ explicit PythonFile(PyObject* file) : file_(file), checked_read_buffer_(false) {
48
+ Py_INCREF(file);
49
+ }
50
+
51
+ Status CheckClosed() const {
52
+ if (!file_) {
53
+ return Status::Invalid("operation on closed Python file");
54
+ }
55
+ return Status::OK();
56
+ }
57
+
58
+ Status Close() {
59
+ if (file_) {
60
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "close", "()");
61
+ Py_XDECREF(result);
62
+ file_.reset();
63
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
64
+ }
65
+ return Status::OK();
66
+ }
67
+
68
+ Status Abort() {
69
+ file_.reset();
70
+ return Status::OK();
71
+ }
72
+
73
+ bool closed() const {
74
+ if (!file_) {
75
+ return true;
76
+ }
77
+ PyObject* result = PyObject_GetAttrString(file_.obj(), "closed");
78
+ if (result == NULL) {
79
+ // Can't propagate the error, so write it out and return an arbitrary value
80
+ PyErr_WriteUnraisable(NULL);
81
+ return true;
82
+ }
83
+ int ret = PyObject_IsTrue(result);
84
+ Py_XDECREF(result);
85
+ if (ret < 0) {
86
+ PyErr_WriteUnraisable(NULL);
87
+ return true;
88
+ }
89
+ return ret != 0;
90
+ }
91
+
92
+ Status Seek(int64_t position, int whence) {
93
+ RETURN_NOT_OK(CheckClosed());
94
+
95
+ // NOTE: `long long` is at least 64 bits in the C standard, the cast below is
96
+ // therefore safe.
97
+
98
+ // whence: 0 for relative to start of file, 2 for end of file
99
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "seek", "(Li)",
100
+ static_cast<long long>(position), whence);
101
+ Py_XDECREF(result);
102
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
103
+ return Status::OK();
104
+ }
105
+
106
+ Status Read(int64_t nbytes, PyObject** out) {
107
+ RETURN_NOT_OK(CheckClosed());
108
+
109
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "read", "(L)",
110
+ static_cast<long long>(nbytes));
111
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
112
+ *out = result;
113
+ return Status::OK();
114
+ }
115
+
116
+ Status ReadBuffer(int64_t nbytes, PyObject** out) {
117
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "read_buffer", "(L)",
118
+ static_cast<long long>(nbytes));
119
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
120
+ *out = result;
121
+ return Status::OK();
122
+ }
123
+
124
+ Status Write(const void* data, int64_t nbytes) {
125
+ RETURN_NOT_OK(CheckClosed());
126
+
127
+ // Since the data isn't owned, we have to make a copy
128
+ PyObject* py_data =
129
+ PyBytes_FromStringAndSize(reinterpret_cast<const char*>(data), nbytes);
130
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
131
+
132
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "write", "(O)", py_data);
133
+ Py_XDECREF(py_data);
134
+ Py_XDECREF(result);
135
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
136
+ return Status::OK();
137
+ }
138
+
139
+ Status Write(const std::shared_ptr<Buffer>& buffer) {
140
+ RETURN_NOT_OK(CheckClosed());
141
+
142
+ PyObject* py_data = wrap_buffer(buffer);
143
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
144
+
145
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "write", "(O)", py_data);
146
+ Py_XDECREF(py_data);
147
+ Py_XDECREF(result);
148
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
149
+ return Status::OK();
150
+ }
151
+
152
+ Result<int64_t> Tell() {
153
+ RETURN_NOT_OK(CheckClosed());
154
+
155
+ PyObject* result = cpp_PyObject_CallMethod(file_.obj(), "tell", "()");
156
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
157
+
158
+ int64_t position = PyLong_AsLongLong(result);
159
+ Py_DECREF(result);
160
+
161
+ // PyLong_AsLongLong can raise OverflowError
162
+ PY_RETURN_IF_ERROR(StatusCode::IOError);
163
+ return position;
164
+ }
165
+
166
+ std::mutex& lock() { return lock_; }
167
+
168
+ bool HasReadBuffer() {
169
+ if (!checked_read_buffer_) { // we don't want to check this each time
170
+ has_read_buffer_ = PyObject_HasAttrString(file_.obj(), "read_buffer") == 1;
171
+ checked_read_buffer_ = true;
172
+ }
173
+ return has_read_buffer_;
174
+ }
175
+
176
+ private:
177
+ std::mutex lock_;
178
+ OwnedRefNoGIL file_;
179
+ bool has_read_buffer_;
180
+ bool checked_read_buffer_;
181
+ };
182
+
183
+ // ----------------------------------------------------------------------
184
+ // Seekable input stream
185
+
186
+ PyReadableFile::PyReadableFile(PyObject* file) { file_.reset(new PythonFile(file)); }
187
+
188
+ // The destructor does not close the underlying Python file object, as
189
+ // there may be multiple references to it. Instead let the Python
190
+ // destructor do its job.
191
+ PyReadableFile::~PyReadableFile() {}
192
+
193
+ Status PyReadableFile::Abort() {
194
+ return SafeCallIntoPython([this]() { return file_->Abort(); });
195
+ }
196
+
197
+ Status PyReadableFile::Close() {
198
+ return SafeCallIntoPython([this]() { return file_->Close(); });
199
+ }
200
+
201
+ bool PyReadableFile::closed() const {
202
+ bool res;
203
+ Status st = SafeCallIntoPython([this, &res]() {
204
+ res = file_->closed();
205
+ return Status::OK();
206
+ });
207
+ return res;
208
+ }
209
+
210
+ Status PyReadableFile::Seek(int64_t position) {
211
+ return SafeCallIntoPython([=] { return file_->Seek(position, 0); });
212
+ }
213
+
214
+ Result<int64_t> PyReadableFile::Tell() const {
215
+ return SafeCallIntoPython([=]() -> Result<int64_t> { return file_->Tell(); });
216
+ }
217
+
218
+ Result<int64_t> PyReadableFile::Read(int64_t nbytes, void* out) {
219
+ return SafeCallIntoPython([=]() -> Result<int64_t> {
220
+ OwnedRef bytes;
221
+ RETURN_NOT_OK(file_->Read(nbytes, bytes.ref()));
222
+ PyObject* bytes_obj = bytes.obj();
223
+ DCHECK(bytes_obj != NULL);
224
+
225
+ Py_buffer py_buf;
226
+ if (!PyObject_GetBuffer(bytes_obj, &py_buf, PyBUF_ANY_CONTIGUOUS)) {
227
+ const uint8_t* data = reinterpret_cast<const uint8_t*>(py_buf.buf);
228
+ std::memcpy(out, data, py_buf.len);
229
+ int64_t len = py_buf.len;
230
+ PyBuffer_Release(&py_buf);
231
+ return len;
232
+ } else {
233
+ return Status::TypeError(
234
+ "Python file read() should have returned a bytes object or an object "
235
+ "supporting the buffer protocol, got '",
236
+ Py_TYPE(bytes_obj)->tp_name, "' (did you open the file in binary mode?)");
237
+ }
238
+ });
239
+ }
240
+
241
+ Result<std::shared_ptr<Buffer>> PyReadableFile::Read(int64_t nbytes) {
242
+ return SafeCallIntoPython([=]() -> Result<std::shared_ptr<Buffer>> {
243
+ OwnedRef buffer_obj;
244
+ if (file_->HasReadBuffer()) {
245
+ RETURN_NOT_OK(file_->ReadBuffer(nbytes, buffer_obj.ref()));
246
+ } else {
247
+ RETURN_NOT_OK(file_->Read(nbytes, buffer_obj.ref()));
248
+ }
249
+ DCHECK(buffer_obj.obj() != NULL);
250
+
251
+ return PyBuffer::FromPyObject(buffer_obj.obj());
252
+ });
253
+ }
254
+
255
+ Result<int64_t> PyReadableFile::ReadAt(int64_t position, int64_t nbytes, void* out) {
256
+ std::lock_guard<std::mutex> guard(file_->lock());
257
+ return SafeCallIntoPython([=]() -> Result<int64_t> {
258
+ RETURN_NOT_OK(Seek(position));
259
+ return Read(nbytes, out);
260
+ });
261
+ }
262
+
263
+ Result<std::shared_ptr<Buffer>> PyReadableFile::ReadAt(int64_t position, int64_t nbytes) {
264
+ std::lock_guard<std::mutex> guard(file_->lock());
265
+ return SafeCallIntoPython([=]() -> Result<std::shared_ptr<Buffer>> {
266
+ RETURN_NOT_OK(Seek(position));
267
+ return Read(nbytes);
268
+ });
269
+ }
270
+
271
+ Result<int64_t> PyReadableFile::GetSize() {
272
+ return SafeCallIntoPython([=]() -> Result<int64_t> {
273
+ ARROW_ASSIGN_OR_RAISE(int64_t current_position, file_->Tell());
274
+ RETURN_NOT_OK(file_->Seek(0, 2));
275
+
276
+ ARROW_ASSIGN_OR_RAISE(int64_t file_size, file_->Tell());
277
+ // Restore previous file position
278
+ RETURN_NOT_OK(file_->Seek(current_position, 0));
279
+
280
+ return file_size;
281
+ });
282
+ }
283
+
284
+ // ----------------------------------------------------------------------
285
+ // Output stream
286
+
287
+ PyOutputStream::PyOutputStream(PyObject* file) : position_(0) {
288
+ file_.reset(new PythonFile(file));
289
+ }
290
+
291
+ // The destructor does not close the underlying Python file object, as
292
+ // there may be multiple references to it. Instead let the Python
293
+ // destructor do its job.
294
+ PyOutputStream::~PyOutputStream() {}
295
+
296
+ Status PyOutputStream::Abort() {
297
+ return SafeCallIntoPython([=]() { return file_->Abort(); });
298
+ }
299
+
300
+ Status PyOutputStream::Close() {
301
+ return SafeCallIntoPython([=]() { return file_->Close(); });
302
+ }
303
+
304
+ bool PyOutputStream::closed() const {
305
+ bool res;
306
+ Status st = SafeCallIntoPython([this, &res]() {
307
+ res = file_->closed();
308
+ return Status::OK();
309
+ });
310
+ return res;
311
+ }
312
+
313
+ Result<int64_t> PyOutputStream::Tell() const { return position_; }
314
+
315
+ Status PyOutputStream::Write(const void* data, int64_t nbytes) {
316
+ return SafeCallIntoPython([=]() {
317
+ position_ += nbytes;
318
+ return file_->Write(data, nbytes);
319
+ });
320
+ }
321
+
322
+ Status PyOutputStream::Write(const std::shared_ptr<Buffer>& buffer) {
323
+ return SafeCallIntoPython([=]() {
324
+ position_ += buffer->size();
325
+ return file_->Write(buffer);
326
+ });
327
+ }
328
+
329
+ // ----------------------------------------------------------------------
330
+ // Foreign buffer
331
+
332
+ Status PyForeignBuffer::Make(const uint8_t* data, int64_t size, PyObject* base,
333
+ std::shared_ptr<Buffer>* out) {
334
+ PyForeignBuffer* buf = new PyForeignBuffer(data, size, base);
335
+ if (buf == NULL) {
336
+ return Status::OutOfMemory("could not allocate foreign buffer object");
337
+ } else {
338
+ *out = std::shared_ptr<Buffer>(buf);
339
+ return Status::OK();
340
+ }
341
+ }
342
+
343
+ // ----------------------------------------------------------------------
344
+ // TransformInputStream::TransformFunc wrapper
345
+
346
+ struct TransformFunctionWrapper {
347
+ TransformFunctionWrapper(TransformCallback cb, PyObject* arg)
348
+ : cb_(std::move(cb)), arg_(std::make_shared<OwnedRefNoGIL>(arg)) {
349
+ Py_INCREF(arg);
350
+ }
351
+
352
+ Result<std::shared_ptr<Buffer>> operator()(const std::shared_ptr<Buffer>& src) {
353
+ return SafeCallIntoPython([=]() -> Result<std::shared_ptr<Buffer>> {
354
+ std::shared_ptr<Buffer> dest;
355
+ cb_(arg_->obj(), src, &dest);
356
+ RETURN_NOT_OK(CheckPyError());
357
+ return dest;
358
+ });
359
+ }
360
+
361
+ protected:
362
+ // Need to wrap OwnedRefNoGIL because std::function needs the callable
363
+ // to be copy-constructible...
364
+ TransformCallback cb_;
365
+ std::shared_ptr<OwnedRefNoGIL> arg_;
366
+ };
367
+
368
+ std::shared_ptr<::arrow::io::InputStream> MakeTransformInputStream(
369
+ std::shared_ptr<::arrow::io::InputStream> wrapped, TransformInputStreamVTable vtable,
370
+ PyObject* handler) {
371
+ TransformInputStream::TransformFunc transform(
372
+ TransformFunctionWrapper{std::move(vtable.transform), handler});
373
+ return std::make_shared<TransformInputStream>(std::move(wrapped), std::move(transform));
374
+ }
375
+
376
+ std::shared_ptr<StreamWrapFunc> MakeStreamTransformFunc(TransformInputStreamVTable vtable,
377
+ PyObject* handler) {
378
+ TransformInputStream::TransformFunc transform(
379
+ TransformFunctionWrapper{std::move(vtable.transform), handler});
380
+ StreamWrapFunc func = [transform](std::shared_ptr<::arrow::io::InputStream> wrapped) {
381
+ return std::make_shared<TransformInputStream>(wrapped, transform);
382
+ };
383
+ return std::make_shared<StreamWrapFunc>(func);
384
+ }
385
+
386
+ } // namespace py
387
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/io.h ADDED
@@ -0,0 +1,121 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <memory>
21
+
22
+ #include "arrow/io/interfaces.h"
23
+ #include "arrow/io/transform.h"
24
+
25
+ #include "arrow/python/common.h"
26
+ #include "arrow/python/visibility.h"
27
+
28
+ namespace arrow {
29
+ namespace py {
30
+
31
+ class ARROW_NO_EXPORT PythonFile;
32
+
33
+ class ARROW_PYTHON_EXPORT PyReadableFile : public io::RandomAccessFile {
34
+ public:
35
+ explicit PyReadableFile(PyObject* file);
36
+ ~PyReadableFile() override;
37
+
38
+ Status Close() override;
39
+ Status Abort() override;
40
+ bool closed() const override;
41
+
42
+ Result<int64_t> Read(int64_t nbytes, void* out) override;
43
+ Result<std::shared_ptr<Buffer>> Read(int64_t nbytes) override;
44
+
45
+ // Thread-safe version
46
+ Result<int64_t> ReadAt(int64_t position, int64_t nbytes, void* out) override;
47
+
48
+ // Thread-safe version
49
+ Result<std::shared_ptr<Buffer>> ReadAt(int64_t position, int64_t nbytes) override;
50
+
51
+ Result<int64_t> GetSize() override;
52
+
53
+ Status Seek(int64_t position) override;
54
+
55
+ Result<int64_t> Tell() const override;
56
+
57
+ private:
58
+ std::unique_ptr<PythonFile> file_;
59
+ };
60
+
61
+ class ARROW_PYTHON_EXPORT PyOutputStream : public io::OutputStream {
62
+ public:
63
+ explicit PyOutputStream(PyObject* file);
64
+ ~PyOutputStream() override;
65
+
66
+ Status Close() override;
67
+ Status Abort() override;
68
+ bool closed() const override;
69
+ Result<int64_t> Tell() const override;
70
+ Status Write(const void* data, int64_t nbytes) override;
71
+ Status Write(const std::shared_ptr<Buffer>& buffer) override;
72
+
73
+ private:
74
+ std::unique_ptr<PythonFile> file_;
75
+ int64_t position_;
76
+ };
77
+
78
+ // TODO(wesm): seekable output files
79
+
80
+ // A Buffer subclass that keeps a PyObject reference throughout its
81
+ // lifetime, such that the Python object is kept alive as long as the
82
+ // C++ buffer is still needed.
83
+ // Keeping the reference in a Python wrapper would be incorrect as
84
+ // the Python wrapper can get destroyed even though the wrapped C++
85
+ // buffer is still alive (ARROW-2270).
86
+ class ARROW_PYTHON_EXPORT PyForeignBuffer : public Buffer {
87
+ public:
88
+ static Status Make(const uint8_t* data, int64_t size, PyObject* base,
89
+ std::shared_ptr<Buffer>* out);
90
+
91
+ private:
92
+ PyForeignBuffer(const uint8_t* data, int64_t size, PyObject* base)
93
+ : Buffer(data, size) {
94
+ Py_INCREF(base);
95
+ base_.reset(base);
96
+ }
97
+
98
+ OwnedRefNoGIL base_;
99
+ };
100
+
101
+ // All this rigamarole because Cython is really poor with std::function<>
102
+
103
+ using TransformCallback = std::function<void(
104
+ PyObject*, const std::shared_ptr<Buffer>& src, std::shared_ptr<Buffer>* out)>;
105
+
106
+ struct TransformInputStreamVTable {
107
+ TransformCallback transform;
108
+ };
109
+
110
+ ARROW_PYTHON_EXPORT
111
+ std::shared_ptr<::arrow::io::InputStream> MakeTransformInputStream(
112
+ std::shared_ptr<::arrow::io::InputStream> wrapped, TransformInputStreamVTable vtable,
113
+ PyObject* arg);
114
+
115
+ using StreamWrapFunc = std::function<Result<std::shared_ptr<io::InputStream>>(
116
+ std::shared_ptr<io::InputStream>)>;
117
+ ARROW_PYTHON_EXPORT
118
+ std::shared_ptr<StreamWrapFunc> MakeStreamTransformFunc(TransformInputStreamVTable vtable,
119
+ PyObject* handler);
120
+ } // namespace py
121
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/ipc.cc ADDED
@@ -0,0 +1,133 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "ipc.h"
19
+
20
+ #include <memory>
21
+
22
+ #include "arrow/compute/cast.h"
23
+ #include "arrow/python/pyarrow.h"
24
+
25
+ namespace arrow {
26
+ namespace py {
27
+
28
+ PyRecordBatchReader::PyRecordBatchReader() {}
29
+
30
+ Status PyRecordBatchReader::Init(std::shared_ptr<Schema> schema, PyObject* iterable) {
31
+ schema_ = std::move(schema);
32
+
33
+ iterator_.reset(PyObject_GetIter(iterable));
34
+ return CheckPyError();
35
+ }
36
+
37
+ std::shared_ptr<Schema> PyRecordBatchReader::schema() const { return schema_; }
38
+
39
+ Status PyRecordBatchReader::ReadNext(std::shared_ptr<RecordBatch>* batch) {
40
+ PyAcquireGIL lock;
41
+
42
+ if (!iterator_) {
43
+ // End of stream
44
+ batch->reset();
45
+ return Status::OK();
46
+ }
47
+
48
+ OwnedRef py_batch(PyIter_Next(iterator_.obj()));
49
+ if (!py_batch) {
50
+ RETURN_IF_PYERROR();
51
+ // End of stream
52
+ batch->reset();
53
+ iterator_.reset();
54
+ return Status::OK();
55
+ }
56
+
57
+ return unwrap_batch(py_batch.obj()).Value(batch);
58
+ }
59
+
60
+ Result<std::shared_ptr<RecordBatchReader>> PyRecordBatchReader::Make(
61
+ std::shared_ptr<Schema> schema, PyObject* iterable) {
62
+ auto reader = std::shared_ptr<PyRecordBatchReader>(new PyRecordBatchReader());
63
+ RETURN_NOT_OK(reader->Init(std::move(schema), iterable));
64
+ return reader;
65
+ }
66
+
67
+ CastingRecordBatchReader::CastingRecordBatchReader() = default;
68
+
69
+ Status CastingRecordBatchReader::Init(std::shared_ptr<RecordBatchReader> parent,
70
+ std::shared_ptr<Schema> schema) {
71
+ std::shared_ptr<Schema> src = parent->schema();
72
+
73
+ // The check for names has already been done in Python where it's easier to
74
+ // generate a nice error message.
75
+ int num_fields = schema->num_fields();
76
+ if (src->num_fields() != num_fields) {
77
+ return Status::Invalid("Number of fields not equal");
78
+ }
79
+
80
+ // Ensure all columns can be cast before succeeding
81
+ for (int i = 0; i < num_fields; i++) {
82
+ if (!compute::CanCast(*src->field(i)->type(), *schema->field(i)->type())) {
83
+ return Status::TypeError("Field ", i, " cannot be cast from ",
84
+ src->field(i)->type()->ToString(), " to ",
85
+ schema->field(i)->type()->ToString());
86
+ }
87
+ }
88
+
89
+ parent_ = std::move(parent);
90
+ schema_ = std::move(schema);
91
+
92
+ return Status::OK();
93
+ }
94
+
95
+ std::shared_ptr<Schema> CastingRecordBatchReader::schema() const { return schema_; }
96
+
97
+ Status CastingRecordBatchReader::ReadNext(std::shared_ptr<RecordBatch>* batch) {
98
+ std::shared_ptr<RecordBatch> out;
99
+ ARROW_RETURN_NOT_OK(parent_->ReadNext(&out));
100
+ if (!out) {
101
+ batch->reset();
102
+ return Status::OK();
103
+ }
104
+
105
+ auto num_columns = out->num_columns();
106
+ auto options = compute::CastOptions::Safe();
107
+ ArrayVector columns(num_columns);
108
+ for (int i = 0; i < num_columns; i++) {
109
+ const Array& src = *out->column(i);
110
+ if (!schema_->field(i)->nullable() && src.null_count() > 0) {
111
+ return Status::Invalid(
112
+ "Can't cast array that contains nulls to non-nullable field at index ", i);
113
+ }
114
+
115
+ ARROW_ASSIGN_OR_RAISE(columns[i],
116
+ compute::Cast(src, schema_->field(i)->type(), options));
117
+ }
118
+
119
+ *batch = RecordBatch::Make(schema_, out->num_rows(), std::move(columns));
120
+ return Status::OK();
121
+ }
122
+
123
+ Result<std::shared_ptr<RecordBatchReader>> CastingRecordBatchReader::Make(
124
+ std::shared_ptr<RecordBatchReader> parent, std::shared_ptr<Schema> schema) {
125
+ auto reader = std::shared_ptr<CastingRecordBatchReader>(new CastingRecordBatchReader());
126
+ ARROW_RETURN_NOT_OK(reader->Init(parent, schema));
127
+ return reader;
128
+ }
129
+
130
+ Status CastingRecordBatchReader::Close() { return parent_->Close(); }
131
+
132
+ } // namespace py
133
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/ipc.h ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <memory>
21
+
22
+ #include "arrow/python/common.h"
23
+ #include "arrow/python/visibility.h"
24
+ #include "arrow/record_batch.h"
25
+ #include "arrow/result.h"
26
+ #include "arrow/util/macros.h"
27
+
28
+ namespace arrow {
29
+ namespace py {
30
+
31
+ class ARROW_PYTHON_EXPORT PyRecordBatchReader : public RecordBatchReader {
32
+ public:
33
+ std::shared_ptr<Schema> schema() const override;
34
+
35
+ Status ReadNext(std::shared_ptr<RecordBatch>* batch) override;
36
+
37
+ // For use from Cython
38
+ // Assumes that `iterable` is borrowed
39
+ static Result<std::shared_ptr<RecordBatchReader>> Make(std::shared_ptr<Schema>,
40
+ PyObject* iterable);
41
+
42
+ protected:
43
+ PyRecordBatchReader();
44
+
45
+ Status Init(std::shared_ptr<Schema>, PyObject* iterable);
46
+
47
+ std::shared_ptr<Schema> schema_;
48
+ OwnedRefNoGIL iterator_;
49
+ };
50
+
51
+ class ARROW_PYTHON_EXPORT CastingRecordBatchReader : public RecordBatchReader {
52
+ public:
53
+ std::shared_ptr<Schema> schema() const override;
54
+
55
+ Status ReadNext(std::shared_ptr<RecordBatch>* batch) override;
56
+
57
+ static Result<std::shared_ptr<RecordBatchReader>> Make(
58
+ std::shared_ptr<RecordBatchReader> parent, std::shared_ptr<Schema> schema);
59
+
60
+ Status Close() override;
61
+
62
+ protected:
63
+ CastingRecordBatchReader();
64
+
65
+ Status Init(std::shared_ptr<RecordBatchReader> parent, std::shared_ptr<Schema> schema);
66
+
67
+ std::shared_ptr<RecordBatchReader> parent_;
68
+ std::shared_ptr<Schema> schema_;
69
+ };
70
+
71
+ } // namespace py
72
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/iterators.h ADDED
@@ -0,0 +1,194 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <utility>
21
+
22
+ #include "arrow/array/array_primitive.h"
23
+
24
+ #include "arrow/python/common.h"
25
+ #include "arrow/python/numpy_internal.h"
26
+
27
+ namespace arrow {
28
+ namespace py {
29
+ namespace internal {
30
+
31
+ using arrow::internal::checked_cast;
32
+
33
+ // Visit the Python sequence, calling the given callable on each element. If
34
+ // the callable returns a non-OK status, iteration stops and the status is
35
+ // returned.
36
+ //
37
+ // The call signature for Visitor must be
38
+ //
39
+ // Visit(PyObject* obj, int64_t index, bool* keep_going)
40
+ //
41
+ // If keep_going is set to false, the iteration terminates
42
+ template <class VisitorFunc>
43
+ inline Status VisitSequenceGeneric(PyObject* obj, int64_t offset, VisitorFunc&& func) {
44
+ // VisitorFunc may set to false to terminate iteration
45
+ bool keep_going = true;
46
+
47
+ if (PyArray_Check(obj)) {
48
+ PyArrayObject* arr_obj = reinterpret_cast<PyArrayObject*>(obj);
49
+ if (PyArray_NDIM(arr_obj) != 1) {
50
+ return Status::Invalid("Only 1D arrays accepted");
51
+ }
52
+
53
+ if (PyArray_DESCR(arr_obj)->type_num == NPY_OBJECT) {
54
+ // It's an array object, we can fetch object pointers directly
55
+ const Ndarray1DIndexer<PyObject*> objects(arr_obj);
56
+ for (int64_t i = offset; keep_going && i < objects.size(); ++i) {
57
+ RETURN_NOT_OK(func(objects[i], i, &keep_going));
58
+ }
59
+ return Status::OK();
60
+ }
61
+ // It's a non-object array, fall back on regular sequence access.
62
+ // (note PyArray_GETITEM() is slightly different: it returns standard
63
+ // Python types, not Numpy scalar types)
64
+ // This code path is inefficient: callers should implement dedicated
65
+ // logic for non-object arrays.
66
+ }
67
+ if (PySequence_Check(obj)) {
68
+ if (PyList_Check(obj) || PyTuple_Check(obj)) {
69
+ // Use fast item access
70
+ const Py_ssize_t size = PySequence_Fast_GET_SIZE(obj);
71
+ for (Py_ssize_t i = offset; keep_going && i < size; ++i) {
72
+ PyObject* value = PySequence_Fast_GET_ITEM(obj, i);
73
+ RETURN_NOT_OK(func(value, static_cast<int64_t>(i), &keep_going));
74
+ }
75
+ } else {
76
+ // Regular sequence: avoid making a potentially large copy
77
+ const Py_ssize_t size = PySequence_Size(obj);
78
+ RETURN_IF_PYERROR();
79
+ for (Py_ssize_t i = offset; keep_going && i < size; ++i) {
80
+ OwnedRef value_ref(PySequence_ITEM(obj, i));
81
+ RETURN_IF_PYERROR();
82
+ RETURN_NOT_OK(func(value_ref.obj(), static_cast<int64_t>(i), &keep_going));
83
+ }
84
+ }
85
+ } else {
86
+ return Status::TypeError("Object is not a sequence");
87
+ }
88
+ return Status::OK();
89
+ }
90
+
91
+ // Visit sequence with no null mask
92
+ template <class VisitorFunc>
93
+ inline Status VisitSequence(PyObject* obj, int64_t offset, VisitorFunc&& func) {
94
+ return VisitSequenceGeneric(
95
+ obj, offset, [&func](PyObject* value, int64_t i /* unused */, bool* keep_going) {
96
+ return func(value, keep_going);
97
+ });
98
+ }
99
+
100
+ /// Visit sequence with null mask
101
+ template <class VisitorFunc>
102
+ inline Status VisitSequenceMasked(PyObject* obj, PyObject* mo, int64_t offset,
103
+ VisitorFunc&& func) {
104
+ if (PyArray_Check(mo)) {
105
+ PyArrayObject* mask = reinterpret_cast<PyArrayObject*>(mo);
106
+ if (PyArray_NDIM(mask) != 1) {
107
+ return Status::Invalid("Mask must be 1D array");
108
+ }
109
+ if (PyArray_SIZE(mask) != static_cast<int64_t>(PySequence_Size(obj))) {
110
+ return Status::Invalid("Mask was a different length from sequence being converted");
111
+ }
112
+
113
+ const int dtype = fix_numpy_type_num(PyArray_DESCR(mask)->type_num);
114
+ if (dtype == NPY_BOOL) {
115
+ Ndarray1DIndexer<uint8_t> mask_values(mask);
116
+
117
+ return VisitSequenceGeneric(
118
+ obj, offset,
119
+ [&func, &mask_values](PyObject* value, int64_t i, bool* keep_going) {
120
+ return func(value, mask_values[i], keep_going);
121
+ });
122
+ } else {
123
+ return Status::TypeError("Mask must be boolean dtype");
124
+ }
125
+ } else if (py::is_array(mo)) {
126
+ auto unwrap_mask_result = unwrap_array(mo);
127
+ ARROW_RETURN_NOT_OK(unwrap_mask_result);
128
+ std::shared_ptr<Array> mask_ = unwrap_mask_result.ValueOrDie();
129
+ if (mask_->type_id() != Type::type::BOOL) {
130
+ return Status::TypeError("Mask must be an array of booleans");
131
+ }
132
+
133
+ if (mask_->length() != PySequence_Size(obj)) {
134
+ return Status::Invalid("Mask was a different length from sequence being converted");
135
+ }
136
+
137
+ if (mask_->null_count() != 0) {
138
+ return Status::TypeError("Mask must be an array of booleans");
139
+ }
140
+
141
+ BooleanArray* boolmask = checked_cast<BooleanArray*>(mask_.get());
142
+ return VisitSequenceGeneric(
143
+ obj, offset, [&func, &boolmask](PyObject* value, int64_t i, bool* keep_going) {
144
+ return func(value, boolmask->Value(i), keep_going);
145
+ });
146
+ } else if (PySequence_Check(mo)) {
147
+ if (PySequence_Size(mo) != PySequence_Size(obj)) {
148
+ return Status::Invalid("Mask was a different length from sequence being converted");
149
+ }
150
+ RETURN_IF_PYERROR();
151
+
152
+ return VisitSequenceGeneric(
153
+ obj, offset, [&func, &mo](PyObject* value, int64_t i, bool* keep_going) {
154
+ OwnedRef value_ref(PySequence_ITEM(mo, i));
155
+ if (!PyBool_Check(value_ref.obj()))
156
+ return Status::TypeError("Mask must be a sequence of booleans");
157
+ return func(value, value_ref.obj() == Py_True, keep_going);
158
+ });
159
+ } else {
160
+ return Status::Invalid("Null mask must be a NumPy array, Arrow array or a Sequence");
161
+ }
162
+
163
+ return Status::OK();
164
+ }
165
+
166
+ // Like IterateSequence, but accepts any generic iterable (including
167
+ // non-restartable iterators, e.g. generators).
168
+ //
169
+ // The call signature for VisitorFunc must be Visit(PyObject*, bool*
170
+ // keep_going). If keep_going is set to false, the iteration terminates
171
+ template <class VisitorFunc>
172
+ inline Status VisitIterable(PyObject* obj, VisitorFunc&& func) {
173
+ if (PySequence_Check(obj)) {
174
+ // Numpy arrays fall here as well
175
+ return VisitSequence(obj, /*offset=*/0, std::forward<VisitorFunc>(func));
176
+ }
177
+ // Fall back on the iterator protocol
178
+ OwnedRef iter_ref(PyObject_GetIter(obj));
179
+ PyObject* iter = iter_ref.obj();
180
+ RETURN_IF_PYERROR();
181
+ PyObject* value;
182
+
183
+ bool keep_going = true;
184
+ while (keep_going && (value = PyIter_Next(iter))) {
185
+ OwnedRef value_ref(value);
186
+ RETURN_NOT_OK(func(value_ref.obj(), &keep_going));
187
+ }
188
+ RETURN_IF_PYERROR(); // __next__() might have raised
189
+ return Status::OK();
190
+ }
191
+
192
+ } // namespace internal
193
+ } // namespace py
194
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_convert.cc ADDED
@@ -0,0 +1,551 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/numpy_interop.h"
19
+
20
+ #include "arrow/python/numpy_convert.h"
21
+
22
+ #include <cstdint>
23
+ #include <memory>
24
+ #include <string>
25
+ #include <vector>
26
+
27
+ #include "arrow/buffer.h"
28
+ #include "arrow/sparse_tensor.h"
29
+ #include "arrow/tensor.h"
30
+ #include "arrow/type.h"
31
+ #include "arrow/util/logging.h"
32
+
33
+ #include "arrow/python/common.h"
34
+ #include "arrow/python/pyarrow.h"
35
+ #include "arrow/python/type_traits.h"
36
+
37
+ namespace arrow {
38
+ namespace py {
39
+
40
+ NumPyBuffer::NumPyBuffer(PyObject* ao) : Buffer(nullptr, 0) {
41
+ PyAcquireGIL lock;
42
+ arr_ = ao;
43
+ Py_INCREF(ao);
44
+
45
+ if (PyArray_Check(ao)) {
46
+ PyArrayObject* ndarray = reinterpret_cast<PyArrayObject*>(ao);
47
+ auto ptr = reinterpret_cast<uint8_t*>(PyArray_DATA(ndarray));
48
+ data_ = const_cast<const uint8_t*>(ptr);
49
+ size_ = PyArray_NBYTES(ndarray);
50
+ capacity_ = size_;
51
+ is_mutable_ = !!(PyArray_FLAGS(ndarray) & NPY_ARRAY_WRITEABLE);
52
+ }
53
+ }
54
+
55
+ NumPyBuffer::~NumPyBuffer() {
56
+ PyAcquireGIL lock;
57
+ Py_XDECREF(arr_);
58
+ }
59
+
60
+ #define TO_ARROW_TYPE_CASE(NPY_NAME, FACTORY) \
61
+ case NPY_##NPY_NAME: \
62
+ return FACTORY();
63
+
64
+ namespace {
65
+
66
+ Result<std::shared_ptr<DataType>> GetTensorType(PyObject* dtype) {
67
+ if (!PyObject_TypeCheck(dtype, &PyArrayDescr_Type)) {
68
+ return Status::TypeError("Did not pass numpy.dtype object");
69
+ }
70
+ PyArray_Descr* descr = reinterpret_cast<PyArray_Descr*>(dtype);
71
+ int type_num = fix_numpy_type_num(descr->type_num);
72
+
73
+ switch (type_num) {
74
+ TO_ARROW_TYPE_CASE(BOOL, uint8);
75
+ TO_ARROW_TYPE_CASE(INT8, int8);
76
+ TO_ARROW_TYPE_CASE(INT16, int16);
77
+ TO_ARROW_TYPE_CASE(INT32, int32);
78
+ TO_ARROW_TYPE_CASE(INT64, int64);
79
+ TO_ARROW_TYPE_CASE(UINT8, uint8);
80
+ TO_ARROW_TYPE_CASE(UINT16, uint16);
81
+ TO_ARROW_TYPE_CASE(UINT32, uint32);
82
+ TO_ARROW_TYPE_CASE(UINT64, uint64);
83
+ TO_ARROW_TYPE_CASE(FLOAT16, float16);
84
+ TO_ARROW_TYPE_CASE(FLOAT32, float32);
85
+ TO_ARROW_TYPE_CASE(FLOAT64, float64);
86
+ }
87
+ return Status::NotImplemented("Unsupported numpy type ", descr->type_num);
88
+ }
89
+
90
+ Status GetNumPyType(const DataType& type, int* type_num) {
91
+ #define NUMPY_TYPE_CASE(ARROW_NAME, NPY_NAME) \
92
+ case Type::ARROW_NAME: \
93
+ *type_num = NPY_##NPY_NAME; \
94
+ break;
95
+
96
+ switch (type.id()) {
97
+ NUMPY_TYPE_CASE(UINT8, UINT8);
98
+ NUMPY_TYPE_CASE(INT8, INT8);
99
+ NUMPY_TYPE_CASE(UINT16, UINT16);
100
+ NUMPY_TYPE_CASE(INT16, INT16);
101
+ NUMPY_TYPE_CASE(UINT32, UINT32);
102
+ NUMPY_TYPE_CASE(INT32, INT32);
103
+ NUMPY_TYPE_CASE(UINT64, UINT64);
104
+ NUMPY_TYPE_CASE(INT64, INT64);
105
+ NUMPY_TYPE_CASE(HALF_FLOAT, FLOAT16);
106
+ NUMPY_TYPE_CASE(FLOAT, FLOAT32);
107
+ NUMPY_TYPE_CASE(DOUBLE, FLOAT64);
108
+ default: {
109
+ return Status::NotImplemented("Unsupported tensor type: ", type.ToString());
110
+ }
111
+ }
112
+ #undef NUMPY_TYPE_CASE
113
+
114
+ return Status::OK();
115
+ }
116
+
117
+ } // namespace
118
+
119
+ Result<std::shared_ptr<DataType>> NumPyScalarToArrowDataType(PyObject* scalar) {
120
+ PyArray_Descr* descr = PyArray_DescrFromScalar(scalar);
121
+ OwnedRef descr_ref(reinterpret_cast<PyObject*>(descr));
122
+ return NumPyDtypeToArrow(descr);
123
+ }
124
+
125
+ Result<std::shared_ptr<DataType>> NumPyDtypeToArrow(PyObject* dtype) {
126
+ if (!PyObject_TypeCheck(dtype, &PyArrayDescr_Type)) {
127
+ return Status::TypeError("Did not pass numpy.dtype object");
128
+ }
129
+ PyArray_Descr* descr = reinterpret_cast<PyArray_Descr*>(dtype);
130
+ return NumPyDtypeToArrow(descr);
131
+ }
132
+
133
+ Result<std::shared_ptr<DataType>> NumPyDtypeToArrow(PyArray_Descr* descr) {
134
+ int type_num = fix_numpy_type_num(descr->type_num);
135
+
136
+ switch (type_num) {
137
+ TO_ARROW_TYPE_CASE(BOOL, boolean);
138
+ TO_ARROW_TYPE_CASE(INT8, int8);
139
+ TO_ARROW_TYPE_CASE(INT16, int16);
140
+ TO_ARROW_TYPE_CASE(INT32, int32);
141
+ TO_ARROW_TYPE_CASE(INT64, int64);
142
+ TO_ARROW_TYPE_CASE(UINT8, uint8);
143
+ TO_ARROW_TYPE_CASE(UINT16, uint16);
144
+ TO_ARROW_TYPE_CASE(UINT32, uint32);
145
+ TO_ARROW_TYPE_CASE(UINT64, uint64);
146
+ TO_ARROW_TYPE_CASE(FLOAT16, float16);
147
+ TO_ARROW_TYPE_CASE(FLOAT32, float32);
148
+ TO_ARROW_TYPE_CASE(FLOAT64, float64);
149
+ TO_ARROW_TYPE_CASE(STRING, binary);
150
+ TO_ARROW_TYPE_CASE(UNICODE, utf8);
151
+ case NPY_DATETIME: {
152
+ auto date_dtype =
153
+ reinterpret_cast<PyArray_DatetimeDTypeMetaData*>(PyDataType_C_METADATA(descr));
154
+ switch (date_dtype->meta.base) {
155
+ case NPY_FR_s:
156
+ return timestamp(TimeUnit::SECOND);
157
+ case NPY_FR_ms:
158
+ return timestamp(TimeUnit::MILLI);
159
+ case NPY_FR_us:
160
+ return timestamp(TimeUnit::MICRO);
161
+ case NPY_FR_ns:
162
+ return timestamp(TimeUnit::NANO);
163
+ case NPY_FR_D:
164
+ return date32();
165
+ case NPY_FR_GENERIC:
166
+ return Status::NotImplemented("Unbound or generic datetime64 time unit");
167
+ default:
168
+ return Status::NotImplemented("Unsupported datetime64 time unit");
169
+ }
170
+ } break;
171
+ case NPY_TIMEDELTA: {
172
+ auto timedelta_dtype =
173
+ reinterpret_cast<PyArray_DatetimeDTypeMetaData*>(PyDataType_C_METADATA(descr));
174
+ switch (timedelta_dtype->meta.base) {
175
+ case NPY_FR_s:
176
+ return duration(TimeUnit::SECOND);
177
+ case NPY_FR_ms:
178
+ return duration(TimeUnit::MILLI);
179
+ case NPY_FR_us:
180
+ return duration(TimeUnit::MICRO);
181
+ case NPY_FR_ns:
182
+ return duration(TimeUnit::NANO);
183
+ case NPY_FR_GENERIC:
184
+ return Status::NotImplemented("Unbound or generic timedelta64 time unit");
185
+ default:
186
+ return Status::NotImplemented("Unsupported timedelta64 time unit");
187
+ }
188
+ } break;
189
+ }
190
+
191
+ return Status::NotImplemented("Unsupported numpy type ", descr->type_num);
192
+ }
193
+
194
+ #undef TO_ARROW_TYPE_CASE
195
+
196
+ Status NdarrayToTensor(MemoryPool* pool, PyObject* ao,
197
+ const std::vector<std::string>& dim_names,
198
+ std::shared_ptr<Tensor>* out) {
199
+ if (!PyArray_Check(ao)) {
200
+ return Status::TypeError("Did not pass ndarray object");
201
+ }
202
+
203
+ PyArrayObject* ndarray = reinterpret_cast<PyArrayObject*>(ao);
204
+
205
+ // TODO(wesm): What do we want to do with non-contiguous memory and negative strides?
206
+
207
+ int ndim = PyArray_NDIM(ndarray);
208
+
209
+ std::shared_ptr<Buffer> data = std::make_shared<NumPyBuffer>(ao);
210
+ std::vector<int64_t> shape(ndim);
211
+ std::vector<int64_t> strides(ndim);
212
+
213
+ npy_intp* array_strides = PyArray_STRIDES(ndarray);
214
+ npy_intp* array_shape = PyArray_SHAPE(ndarray);
215
+ for (int i = 0; i < ndim; ++i) {
216
+ if (array_strides[i] < 0) {
217
+ return Status::Invalid("Negative ndarray strides not supported");
218
+ }
219
+ shape[i] = array_shape[i];
220
+ strides[i] = array_strides[i];
221
+ }
222
+
223
+ ARROW_ASSIGN_OR_RAISE(
224
+ auto type, GetTensorType(reinterpret_cast<PyObject*>(PyArray_DESCR(ndarray))));
225
+ *out = std::make_shared<Tensor>(type, data, shape, strides, dim_names);
226
+ return Status::OK();
227
+ }
228
+
229
+ Status TensorToNdarray(const std::shared_ptr<Tensor>& tensor, PyObject* base,
230
+ PyObject** out) {
231
+ int type_num = 0;
232
+ RETURN_NOT_OK(GetNumPyType(*tensor->type(), &type_num));
233
+ PyArray_Descr* dtype = PyArray_DescrNewFromType(type_num);
234
+ RETURN_IF_PYERROR();
235
+
236
+ const int ndim = tensor->ndim();
237
+ std::vector<npy_intp> npy_shape(ndim);
238
+ std::vector<npy_intp> npy_strides(ndim);
239
+
240
+ for (int i = 0; i < ndim; ++i) {
241
+ npy_shape[i] = tensor->shape()[i];
242
+ npy_strides[i] = tensor->strides()[i];
243
+ }
244
+
245
+ const void* immutable_data = nullptr;
246
+ if (tensor->data()) {
247
+ immutable_data = tensor->data()->data();
248
+ }
249
+
250
+ // Remove const =(
251
+ void* mutable_data = const_cast<void*>(immutable_data);
252
+
253
+ int array_flags = 0;
254
+ if (tensor->is_row_major()) {
255
+ array_flags |= NPY_ARRAY_C_CONTIGUOUS;
256
+ }
257
+ if (tensor->is_column_major()) {
258
+ array_flags |= NPY_ARRAY_F_CONTIGUOUS;
259
+ }
260
+ if (tensor->is_mutable()) {
261
+ array_flags |= NPY_ARRAY_WRITEABLE;
262
+ }
263
+
264
+ PyObject* result =
265
+ PyArray_NewFromDescr(&PyArray_Type, dtype, ndim, npy_shape.data(),
266
+ npy_strides.data(), mutable_data, array_flags, nullptr);
267
+ RETURN_IF_PYERROR();
268
+
269
+ if (base == Py_None || base == nullptr) {
270
+ base = py::wrap_tensor(tensor);
271
+ } else {
272
+ Py_XINCREF(base);
273
+ }
274
+ PyArray_SetBaseObject(reinterpret_cast<PyArrayObject*>(result), base);
275
+ *out = result;
276
+ return Status::OK();
277
+ }
278
+
279
+ // Wrap the dense data of a sparse tensor in a ndarray
280
+ static Status SparseTensorDataToNdarray(const SparseTensor& sparse_tensor,
281
+ std::vector<npy_intp> data_shape, PyObject* base,
282
+ PyObject** out_data) {
283
+ int type_num_data = 0;
284
+ RETURN_NOT_OK(GetNumPyType(*sparse_tensor.type(), &type_num_data));
285
+ PyArray_Descr* dtype_data = PyArray_DescrNewFromType(type_num_data);
286
+ RETURN_IF_PYERROR();
287
+
288
+ const void* immutable_data = sparse_tensor.data()->data();
289
+ // Remove const =(
290
+ void* mutable_data = const_cast<void*>(immutable_data);
291
+ int array_flags = NPY_ARRAY_C_CONTIGUOUS | NPY_ARRAY_F_CONTIGUOUS;
292
+ if (sparse_tensor.is_mutable()) {
293
+ array_flags |= NPY_ARRAY_WRITEABLE;
294
+ }
295
+
296
+ *out_data = PyArray_NewFromDescr(&PyArray_Type, dtype_data,
297
+ static_cast<int>(data_shape.size()), data_shape.data(),
298
+ nullptr, mutable_data, array_flags, nullptr);
299
+ RETURN_IF_PYERROR();
300
+ Py_XINCREF(base);
301
+ PyArray_SetBaseObject(reinterpret_cast<PyArrayObject*>(*out_data), base);
302
+ return Status::OK();
303
+ }
304
+
305
+ Status SparseCOOTensorToNdarray(const std::shared_ptr<SparseCOOTensor>& sparse_tensor,
306
+ PyObject* base, PyObject** out_data,
307
+ PyObject** out_coords) {
308
+ const auto& sparse_index = arrow::internal::checked_cast<const SparseCOOIndex&>(
309
+ *sparse_tensor->sparse_index());
310
+
311
+ // Wrap tensor data
312
+ OwnedRef result_data;
313
+ RETURN_NOT_OK(SparseTensorDataToNdarray(
314
+ *sparse_tensor, {static_cast<npy_intp>(sparse_tensor->non_zero_length()), 1}, base,
315
+ result_data.ref()));
316
+
317
+ // Wrap indices
318
+ PyObject* result_coords;
319
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indices(), base, &result_coords));
320
+
321
+ *out_data = result_data.detach();
322
+ *out_coords = result_coords;
323
+ return Status::OK();
324
+ }
325
+
326
+ Status SparseCSXMatrixToNdarray(const std::shared_ptr<SparseTensor>& sparse_tensor,
327
+ PyObject* base, PyObject** out_data,
328
+ PyObject** out_indptr, PyObject** out_indices) {
329
+ // Wrap indices
330
+ OwnedRef result_indptr;
331
+ OwnedRef result_indices;
332
+
333
+ switch (sparse_tensor->format_id()) {
334
+ case SparseTensorFormat::CSR: {
335
+ const auto& sparse_index = arrow::internal::checked_cast<const SparseCSRIndex&>(
336
+ *sparse_tensor->sparse_index());
337
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indptr(), base, result_indptr.ref()));
338
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indices(), base, result_indices.ref()));
339
+ break;
340
+ }
341
+ case SparseTensorFormat::CSC: {
342
+ const auto& sparse_index = arrow::internal::checked_cast<const SparseCSCIndex&>(
343
+ *sparse_tensor->sparse_index());
344
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indptr(), base, result_indptr.ref()));
345
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indices(), base, result_indices.ref()));
346
+ break;
347
+ }
348
+ default:
349
+ return Status::NotImplemented("Invalid SparseTensor type.");
350
+ }
351
+
352
+ // Wrap tensor data
353
+ OwnedRef result_data;
354
+ RETURN_NOT_OK(SparseTensorDataToNdarray(
355
+ *sparse_tensor, {static_cast<npy_intp>(sparse_tensor->non_zero_length()), 1}, base,
356
+ result_data.ref()));
357
+
358
+ *out_data = result_data.detach();
359
+ *out_indptr = result_indptr.detach();
360
+ *out_indices = result_indices.detach();
361
+ return Status::OK();
362
+ }
363
+
364
+ Status SparseCSRMatrixToNdarray(const std::shared_ptr<SparseCSRMatrix>& sparse_tensor,
365
+ PyObject* base, PyObject** out_data,
366
+ PyObject** out_indptr, PyObject** out_indices) {
367
+ return SparseCSXMatrixToNdarray(sparse_tensor, base, out_data, out_indptr, out_indices);
368
+ }
369
+
370
+ Status SparseCSCMatrixToNdarray(const std::shared_ptr<SparseCSCMatrix>& sparse_tensor,
371
+ PyObject* base, PyObject** out_data,
372
+ PyObject** out_indptr, PyObject** out_indices) {
373
+ return SparseCSXMatrixToNdarray(sparse_tensor, base, out_data, out_indptr, out_indices);
374
+ }
375
+
376
+ Status SparseCSFTensorToNdarray(const std::shared_ptr<SparseCSFTensor>& sparse_tensor,
377
+ PyObject* base, PyObject** out_data,
378
+ PyObject** out_indptr, PyObject** out_indices) {
379
+ const auto& sparse_index = arrow::internal::checked_cast<const SparseCSFIndex&>(
380
+ *sparse_tensor->sparse_index());
381
+
382
+ // Wrap tensor data
383
+ OwnedRef result_data;
384
+ RETURN_NOT_OK(SparseTensorDataToNdarray(
385
+ *sparse_tensor, {static_cast<npy_intp>(sparse_tensor->non_zero_length()), 1}, base,
386
+ result_data.ref()));
387
+
388
+ // Wrap indices
389
+ int ndim = static_cast<int>(sparse_index.indices().size());
390
+ OwnedRef indptr(PyList_New(ndim - 1));
391
+ OwnedRef indices(PyList_New(ndim));
392
+ RETURN_IF_PYERROR();
393
+
394
+ for (int i = 0; i < ndim - 1; ++i) {
395
+ PyObject* item;
396
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indptr()[i], base, &item));
397
+ if (PyList_SetItem(indptr.obj(), i, item) < 0) {
398
+ Py_XDECREF(item);
399
+ RETURN_IF_PYERROR();
400
+ }
401
+ }
402
+ for (int i = 0; i < ndim; ++i) {
403
+ PyObject* item;
404
+ RETURN_NOT_OK(TensorToNdarray(sparse_index.indices()[i], base, &item));
405
+ if (PyList_SetItem(indices.obj(), i, item) < 0) {
406
+ Py_XDECREF(item);
407
+ RETURN_IF_PYERROR();
408
+ }
409
+ }
410
+
411
+ *out_indptr = indptr.detach();
412
+ *out_indices = indices.detach();
413
+ *out_data = result_data.detach();
414
+ return Status::OK();
415
+ }
416
+
417
+ Status NdarraysToSparseCOOTensor(MemoryPool* pool, PyObject* data_ao, PyObject* coords_ao,
418
+ const std::vector<int64_t>& shape,
419
+ const std::vector<std::string>& dim_names,
420
+ std::shared_ptr<SparseCOOTensor>* out) {
421
+ if (!PyArray_Check(data_ao) || !PyArray_Check(coords_ao)) {
422
+ return Status::TypeError("Did not pass ndarray object");
423
+ }
424
+
425
+ PyArrayObject* ndarray_data = reinterpret_cast<PyArrayObject*>(data_ao);
426
+ std::shared_ptr<Buffer> data = std::make_shared<NumPyBuffer>(data_ao);
427
+ ARROW_ASSIGN_OR_RAISE(
428
+ auto type_data,
429
+ GetTensorType(reinterpret_cast<PyObject*>(PyArray_DESCR(ndarray_data))));
430
+
431
+ std::shared_ptr<Tensor> coords;
432
+ RETURN_NOT_OK(NdarrayToTensor(pool, coords_ao, {}, &coords));
433
+ ARROW_CHECK_EQ(coords->type_id(), Type::INT64); // Should be ensured by caller
434
+
435
+ ARROW_ASSIGN_OR_RAISE(std::shared_ptr<SparseCOOIndex> sparse_index,
436
+ SparseCOOIndex::Make(coords));
437
+ *out = std::make_shared<SparseTensorImpl<SparseCOOIndex>>(sparse_index, type_data, data,
438
+ shape, dim_names);
439
+ return Status::OK();
440
+ }
441
+
442
+ template <class IndexType>
443
+ Status NdarraysToSparseCSXMatrix(MemoryPool* pool, PyObject* data_ao, PyObject* indptr_ao,
444
+ PyObject* indices_ao, const std::vector<int64_t>& shape,
445
+ const std::vector<std::string>& dim_names,
446
+ std::shared_ptr<SparseTensorImpl<IndexType>>* out) {
447
+ if (!PyArray_Check(data_ao) || !PyArray_Check(indptr_ao) ||
448
+ !PyArray_Check(indices_ao)) {
449
+ return Status::TypeError("Did not pass ndarray object");
450
+ }
451
+
452
+ PyArrayObject* ndarray_data = reinterpret_cast<PyArrayObject*>(data_ao);
453
+ std::shared_ptr<Buffer> data = std::make_shared<NumPyBuffer>(data_ao);
454
+ ARROW_ASSIGN_OR_RAISE(
455
+ auto type_data,
456
+ GetTensorType(reinterpret_cast<PyObject*>(PyArray_DESCR(ndarray_data))));
457
+
458
+ std::shared_ptr<Tensor> indptr, indices;
459
+ RETURN_NOT_OK(NdarrayToTensor(pool, indptr_ao, {}, &indptr));
460
+ RETURN_NOT_OK(NdarrayToTensor(pool, indices_ao, {}, &indices));
461
+ ARROW_CHECK_EQ(indptr->type_id(), Type::INT64); // Should be ensured by caller
462
+ ARROW_CHECK_EQ(indices->type_id(), Type::INT64); // Should be ensured by caller
463
+
464
+ auto sparse_index = std::make_shared<IndexType>(
465
+ std::static_pointer_cast<NumericTensor<Int64Type>>(indptr),
466
+ std::static_pointer_cast<NumericTensor<Int64Type>>(indices));
467
+ *out = std::make_shared<SparseTensorImpl<IndexType>>(sparse_index, type_data, data,
468
+ shape, dim_names);
469
+ return Status::OK();
470
+ }
471
+
472
+ Status NdarraysToSparseCSFTensor(MemoryPool* pool, PyObject* data_ao, PyObject* indptr_ao,
473
+ PyObject* indices_ao, const std::vector<int64_t>& shape,
474
+ const std::vector<int64_t>& axis_order,
475
+ const std::vector<std::string>& dim_names,
476
+ std::shared_ptr<SparseCSFTensor>* out) {
477
+ if (!PyArray_Check(data_ao)) {
478
+ return Status::TypeError("Did not pass ndarray object for data");
479
+ }
480
+ const int ndim = static_cast<const int>(shape.size());
481
+ PyArrayObject* ndarray_data = reinterpret_cast<PyArrayObject*>(data_ao);
482
+ std::shared_ptr<Buffer> data = std::make_shared<NumPyBuffer>(data_ao);
483
+ ARROW_ASSIGN_OR_RAISE(
484
+ auto type_data,
485
+ GetTensorType(reinterpret_cast<PyObject*>(PyArray_DESCR(ndarray_data))));
486
+
487
+ std::vector<std::shared_ptr<Tensor>> indptr(ndim - 1);
488
+ std::vector<std::shared_ptr<Tensor>> indices(ndim);
489
+
490
+ for (int i = 0; i < ndim - 1; ++i) {
491
+ PyObject* item = PySequence_Fast_GET_ITEM(indptr_ao, i);
492
+ if (!PyArray_Check(item)) {
493
+ return Status::TypeError("Did not pass ndarray object for indptr");
494
+ }
495
+ RETURN_NOT_OK(NdarrayToTensor(pool, item, {}, &indptr[i]));
496
+ ARROW_CHECK_EQ(indptr[i]->type_id(), Type::INT64); // Should be ensured by caller
497
+ }
498
+
499
+ for (int i = 0; i < ndim; ++i) {
500
+ PyObject* item = PySequence_Fast_GET_ITEM(indices_ao, i);
501
+ if (!PyArray_Check(item)) {
502
+ return Status::TypeError("Did not pass ndarray object for indices");
503
+ }
504
+ RETURN_NOT_OK(NdarrayToTensor(pool, item, {}, &indices[i]));
505
+ ARROW_CHECK_EQ(indices[i]->type_id(), Type::INT64); // Should be ensured by caller
506
+ }
507
+
508
+ auto sparse_index = std::make_shared<SparseCSFIndex>(indptr, indices, axis_order);
509
+ *out = std::make_shared<SparseTensorImpl<SparseCSFIndex>>(sparse_index, type_data, data,
510
+ shape, dim_names);
511
+ return Status::OK();
512
+ }
513
+
514
+ Status NdarraysToSparseCSRMatrix(MemoryPool* pool, PyObject* data_ao, PyObject* indptr_ao,
515
+ PyObject* indices_ao, const std::vector<int64_t>& shape,
516
+ const std::vector<std::string>& dim_names,
517
+ std::shared_ptr<SparseCSRMatrix>* out) {
518
+ return NdarraysToSparseCSXMatrix<SparseCSRIndex>(pool, data_ao, indptr_ao, indices_ao,
519
+ shape, dim_names, out);
520
+ }
521
+
522
+ Status NdarraysToSparseCSCMatrix(MemoryPool* pool, PyObject* data_ao, PyObject* indptr_ao,
523
+ PyObject* indices_ao, const std::vector<int64_t>& shape,
524
+ const std::vector<std::string>& dim_names,
525
+ std::shared_ptr<SparseCSCMatrix>* out) {
526
+ return NdarraysToSparseCSXMatrix<SparseCSCIndex>(pool, data_ao, indptr_ao, indices_ao,
527
+ shape, dim_names, out);
528
+ }
529
+
530
+ Status TensorToSparseCOOTensor(const std::shared_ptr<Tensor>& tensor,
531
+ std::shared_ptr<SparseCOOTensor>* out) {
532
+ return SparseCOOTensor::Make(*tensor).Value(out);
533
+ }
534
+
535
+ Status TensorToSparseCSRMatrix(const std::shared_ptr<Tensor>& tensor,
536
+ std::shared_ptr<SparseCSRMatrix>* out) {
537
+ return SparseCSRMatrix::Make(*tensor).Value(out);
538
+ }
539
+
540
+ Status TensorToSparseCSCMatrix(const std::shared_ptr<Tensor>& tensor,
541
+ std::shared_ptr<SparseCSCMatrix>* out) {
542
+ return SparseCSCMatrix::Make(*tensor).Value(out);
543
+ }
544
+
545
+ Status TensorToSparseCSFTensor(const std::shared_ptr<Tensor>& tensor,
546
+ std::shared_ptr<SparseCSFTensor>* out) {
547
+ return SparseCSFTensor::Make(*tensor).Value(out);
548
+ }
549
+
550
+ } // namespace py
551
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_internal.h ADDED
@@ -0,0 +1,182 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Internal utilities for dealing with NumPy
19
+
20
+ #pragma once
21
+
22
+ #include "arrow/python/numpy_interop.h"
23
+
24
+ #include "arrow/status.h"
25
+
26
+ #include "arrow/python/platform.h"
27
+
28
+ #include <cstdint>
29
+ #include <sstream>
30
+ #include <string>
31
+
32
+ namespace arrow {
33
+ namespace py {
34
+
35
+ /// Indexing convenience for interacting with strided 1-dim ndarray objects
36
+ template <typename T>
37
+ class Ndarray1DIndexer {
38
+ public:
39
+ typedef int64_t size_type;
40
+
41
+ Ndarray1DIndexer() : arr_(NULLPTR), data_(NULLPTR) {}
42
+
43
+ explicit Ndarray1DIndexer(PyArrayObject* arr) : Ndarray1DIndexer() {
44
+ arr_ = arr;
45
+ DCHECK_EQ(1, PyArray_NDIM(arr)) << "Only works with 1-dimensional arrays";
46
+ data_ = reinterpret_cast<uint8_t*>(PyArray_DATA(arr));
47
+ stride_ = PyArray_STRIDES(arr)[0];
48
+ }
49
+
50
+ ~Ndarray1DIndexer() = default;
51
+
52
+ int64_t size() const { return PyArray_SIZE(arr_); }
53
+
54
+ const T* data() const { return reinterpret_cast<const T*>(data_); }
55
+
56
+ bool is_strided() const { return stride_ != sizeof(T); }
57
+
58
+ T& operator[](size_type index) {
59
+ return *reinterpret_cast<T*>(data_ + index * stride_);
60
+ }
61
+ const T& operator[](size_type index) const {
62
+ return *reinterpret_cast<const T*>(data_ + index * stride_);
63
+ }
64
+
65
+ private:
66
+ PyArrayObject* arr_;
67
+ uint8_t* data_;
68
+ int64_t stride_;
69
+ };
70
+
71
+ // Handling of Numpy Types by their static numbers
72
+ // (the NPY_TYPES enum and related defines)
73
+
74
+ static inline std::string GetNumPyTypeName(int npy_type) {
75
+ #define TYPE_CASE(TYPE, NAME) \
76
+ case NPY_##TYPE: \
77
+ return NAME;
78
+
79
+ switch (npy_type) {
80
+ TYPE_CASE(BOOL, "bool")
81
+ TYPE_CASE(INT8, "int8")
82
+ TYPE_CASE(INT16, "int16")
83
+ TYPE_CASE(INT32, "int32")
84
+ TYPE_CASE(INT64, "int64")
85
+ #if !NPY_INT32_IS_INT
86
+ TYPE_CASE(INT, "intc")
87
+ #endif
88
+ #if !NPY_INT64_IS_LONG_LONG
89
+ TYPE_CASE(LONGLONG, "longlong")
90
+ #endif
91
+ TYPE_CASE(UINT8, "uint8")
92
+ TYPE_CASE(UINT16, "uint16")
93
+ TYPE_CASE(UINT32, "uint32")
94
+ TYPE_CASE(UINT64, "uint64")
95
+ #if !NPY_INT32_IS_INT
96
+ TYPE_CASE(UINT, "uintc")
97
+ #endif
98
+ #if !NPY_INT64_IS_LONG_LONG
99
+ TYPE_CASE(ULONGLONG, "ulonglong")
100
+ #endif
101
+ TYPE_CASE(FLOAT16, "float16")
102
+ TYPE_CASE(FLOAT32, "float32")
103
+ TYPE_CASE(FLOAT64, "float64")
104
+ TYPE_CASE(DATETIME, "datetime64")
105
+ TYPE_CASE(TIMEDELTA, "timedelta64")
106
+ TYPE_CASE(OBJECT, "object")
107
+ TYPE_CASE(VOID, "void")
108
+ default:
109
+ break;
110
+ }
111
+
112
+ #undef TYPE_CASE
113
+ std::stringstream ss;
114
+ ss << "unrecognized type (" << npy_type << ") in GetNumPyTypeName";
115
+ return ss.str();
116
+ }
117
+
118
+ #define TYPE_VISIT_INLINE(TYPE) \
119
+ case NPY_##TYPE: \
120
+ return visitor->template Visit<NPY_##TYPE>(arr);
121
+
122
+ template <typename VISITOR>
123
+ inline Status VisitNumpyArrayInline(PyArrayObject* arr, VISITOR* visitor) {
124
+ switch (PyArray_TYPE(arr)) {
125
+ TYPE_VISIT_INLINE(BOOL);
126
+ TYPE_VISIT_INLINE(INT8);
127
+ TYPE_VISIT_INLINE(UINT8);
128
+ TYPE_VISIT_INLINE(INT16);
129
+ TYPE_VISIT_INLINE(UINT16);
130
+ TYPE_VISIT_INLINE(INT32);
131
+ TYPE_VISIT_INLINE(UINT32);
132
+ TYPE_VISIT_INLINE(INT64);
133
+ TYPE_VISIT_INLINE(UINT64);
134
+ #if !NPY_INT32_IS_INT
135
+ TYPE_VISIT_INLINE(INT);
136
+ TYPE_VISIT_INLINE(UINT);
137
+ #endif
138
+ #if !NPY_INT64_IS_LONG_LONG
139
+ TYPE_VISIT_INLINE(LONGLONG);
140
+ TYPE_VISIT_INLINE(ULONGLONG);
141
+ #endif
142
+ TYPE_VISIT_INLINE(FLOAT16);
143
+ TYPE_VISIT_INLINE(FLOAT32);
144
+ TYPE_VISIT_INLINE(FLOAT64);
145
+ TYPE_VISIT_INLINE(DATETIME);
146
+ TYPE_VISIT_INLINE(TIMEDELTA);
147
+ TYPE_VISIT_INLINE(OBJECT);
148
+ }
149
+ return Status::NotImplemented("NumPy type not implemented: ",
150
+ GetNumPyTypeName(PyArray_TYPE(arr)));
151
+ }
152
+
153
+ #undef TYPE_VISIT_INLINE
154
+
155
+ namespace internal {
156
+
157
+ inline bool PyFloatScalar_Check(PyObject* obj) {
158
+ return PyFloat_Check(obj) || PyArray_IsScalar(obj, Floating);
159
+ }
160
+
161
+ inline bool PyIntScalar_Check(PyObject* obj) {
162
+ return PyLong_Check(obj) || PyArray_IsScalar(obj, Integer);
163
+ }
164
+
165
+ inline bool PyBoolScalar_Check(PyObject* obj) {
166
+ return PyBool_Check(obj) || PyArray_IsScalar(obj, Bool);
167
+ }
168
+
169
+ static inline PyArray_Descr* GetSafeNumPyDtype(int type) {
170
+ if (type == NPY_DATETIME || type == NPY_TIMEDELTA) {
171
+ // It is not safe to mutate the result of DescrFromType for datetime and
172
+ // timedelta descriptors
173
+ return PyArray_DescrNewFromType(type);
174
+ } else {
175
+ return PyArray_DescrFromType(type);
176
+ }
177
+ }
178
+
179
+ } // namespace internal
180
+
181
+ } // namespace py
182
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/numpy_interop.h ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/platform.h" // IWYU pragma: export
21
+
22
+ #include <numpy/numpyconfig.h> // IWYU pragma: export
23
+
24
+ // Don't use the deprecated Numpy functions
25
+ #ifdef NPY_1_7_API_VERSION
26
+ #define NPY_NO_DEPRECATED_API NPY_1_7_API_VERSION
27
+ #else
28
+ #define NPY_ARRAY_NOTSWAPPED NPY_NOTSWAPPED
29
+ #define NPY_ARRAY_ALIGNED NPY_ALIGNED
30
+ #define NPY_ARRAY_WRITEABLE NPY_WRITEABLE
31
+ #define NPY_ARRAY_UPDATEIFCOPY NPY_UPDATEIFCOPY
32
+ #endif
33
+
34
+ // This is required to be able to access the NumPy C API properly in C++ files
35
+ // other than init.cc.
36
+ #define PY_ARRAY_UNIQUE_SYMBOL arrow_ARRAY_API
37
+ #ifndef NUMPY_IMPORT_ARRAY
38
+ #define NO_IMPORT_ARRAY
39
+ #endif
40
+
41
+ #include <numpy/arrayobject.h> // IWYU pragma: export
42
+ #include <numpy/arrayscalars.h> // IWYU pragma: export
43
+ #include <numpy/ufuncobject.h> // IWYU pragma: export
44
+
45
+ // A bit subtle. Numpy has 5 canonical integer types:
46
+ // (or, rather, type pairs: signed and unsigned)
47
+ // NPY_BYTE, NPY_SHORT, NPY_INT, NPY_LONG, NPY_LONGLONG
48
+ // It also has 4 fixed-width integer aliases.
49
+ // When mapping Arrow integer types to these 4 fixed-width aliases,
50
+ // we always miss one of the canonical types (even though it may
51
+ // have the same width as one of the aliases).
52
+ // Which one depends on the platform...
53
+ // On a LP64 system, NPY_INT64 maps to NPY_LONG and
54
+ // NPY_LONGLONG needs to be handled separately.
55
+ // On a LLP64 system, NPY_INT32 maps to NPY_LONG and
56
+ // NPY_INT needs to be handled separately.
57
+
58
+ #if NPY_BITSOF_LONG == 32 && NPY_BITSOF_LONGLONG == 64
59
+ #define NPY_INT64_IS_LONG_LONG 1
60
+ #else
61
+ #define NPY_INT64_IS_LONG_LONG 0
62
+ #endif
63
+
64
+ #if NPY_BITSOF_INT == 32 && NPY_BITSOF_LONG == 64
65
+ #define NPY_INT32_IS_INT 1
66
+ #else
67
+ #define NPY_INT32_IS_INT 0
68
+ #endif
69
+
70
+ // Backported NumPy 2 API (can be removed if numpy 2 is required)
71
+ #if NPY_ABI_VERSION < 0x02000000
72
+ #define PyDataType_ELSIZE(descr) ((descr)->elsize)
73
+ #define PyDataType_C_METADATA(descr) ((descr)->c_metadata)
74
+ #define PyDataType_FIELDS(descr) ((descr)->fields)
75
+ #endif
76
+
77
+ namespace arrow {
78
+ namespace py {
79
+
80
+ inline int import_numpy() {
81
+ #ifdef NUMPY_IMPORT_ARRAY
82
+ import_array1(-1);
83
+ import_umath1(-1);
84
+ #endif
85
+
86
+ return 0;
87
+ }
88
+
89
+ // See above about the missing Numpy integer type numbers
90
+ inline int fix_numpy_type_num(int type_num) {
91
+ #if !NPY_INT32_IS_INT && NPY_BITSOF_INT == 32
92
+ if (type_num == NPY_INT) return NPY_INT32;
93
+ if (type_num == NPY_UINT) return NPY_UINT32;
94
+ #endif
95
+ #if !NPY_INT64_IS_LONG_LONG && NPY_BITSOF_LONGLONG == 64
96
+ if (type_num == NPY_LONGLONG) return NPY_INT64;
97
+ if (type_num == NPY_ULONGLONG) return NPY_UINT64;
98
+ #endif
99
+ return type_num;
100
+ }
101
+
102
+ } // namespace py
103
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/parquet_encryption.cc ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/parquet_encryption.h"
19
+ #include "parquet/exception.h"
20
+
21
+ namespace arrow {
22
+ namespace py {
23
+ namespace parquet {
24
+ namespace encryption {
25
+
26
+ PyKmsClient::PyKmsClient(PyObject* handler, PyKmsClientVtable vtable)
27
+ : handler_(handler), vtable_(std::move(vtable)) {
28
+ Py_INCREF(handler);
29
+ }
30
+
31
+ PyKmsClient::~PyKmsClient() {}
32
+
33
+ std::string PyKmsClient::WrapKey(const std::string& key_bytes,
34
+ const std::string& master_key_identifier) {
35
+ std::string wrapped;
36
+ auto st = SafeCallIntoPython([&]() -> Status {
37
+ vtable_.wrap_key(handler_.obj(), key_bytes, master_key_identifier, &wrapped);
38
+ return CheckPyError();
39
+ });
40
+ if (!st.ok()) {
41
+ throw ::parquet::ParquetStatusException(st);
42
+ }
43
+ return wrapped;
44
+ }
45
+
46
+ std::string PyKmsClient::UnwrapKey(const std::string& wrapped_key,
47
+ const std::string& master_key_identifier) {
48
+ std::string unwrapped;
49
+ auto st = SafeCallIntoPython([&]() -> Status {
50
+ vtable_.unwrap_key(handler_.obj(), wrapped_key, master_key_identifier, &unwrapped);
51
+ return CheckPyError();
52
+ });
53
+ if (!st.ok()) {
54
+ throw ::parquet::ParquetStatusException(st);
55
+ }
56
+ return unwrapped;
57
+ }
58
+
59
+ PyKmsClientFactory::PyKmsClientFactory(PyObject* handler, PyKmsClientFactoryVtable vtable)
60
+ : handler_(handler), vtable_(std::move(vtable)) {
61
+ Py_INCREF(handler);
62
+ }
63
+
64
+ PyKmsClientFactory::~PyKmsClientFactory() {}
65
+
66
+ std::shared_ptr<::parquet::encryption::KmsClient> PyKmsClientFactory::CreateKmsClient(
67
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config) {
68
+ std::shared_ptr<::parquet::encryption::KmsClient> kms_client;
69
+ auto st = SafeCallIntoPython([&]() -> Status {
70
+ vtable_.create_kms_client(handler_.obj(), kms_connection_config, &kms_client);
71
+ return CheckPyError();
72
+ });
73
+ if (!st.ok()) {
74
+ throw ::parquet::ParquetStatusException(st);
75
+ }
76
+ return kms_client;
77
+ }
78
+
79
+ arrow::Result<std::shared_ptr<::parquet::FileEncryptionProperties>>
80
+ PyCryptoFactory::SafeGetFileEncryptionProperties(
81
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config,
82
+ const ::parquet::encryption::EncryptionConfiguration& encryption_config) {
83
+ PARQUET_CATCH_AND_RETURN(
84
+ this->GetFileEncryptionProperties(kms_connection_config, encryption_config));
85
+ }
86
+
87
+ arrow::Result<std::shared_ptr<::parquet::FileDecryptionProperties>>
88
+ PyCryptoFactory::SafeGetFileDecryptionProperties(
89
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config,
90
+ const ::parquet::encryption::DecryptionConfiguration& decryption_config) {
91
+ PARQUET_CATCH_AND_RETURN(
92
+ this->GetFileDecryptionProperties(kms_connection_config, decryption_config));
93
+ }
94
+
95
+ } // namespace encryption
96
+ } // namespace parquet
97
+ } // namespace py
98
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/parquet_encryption.h ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <string>
21
+
22
+ #include "arrow/python/common.h"
23
+ #include "arrow/python/visibility.h"
24
+ #include "arrow/util/macros.h"
25
+ #include "parquet/encryption/crypto_factory.h"
26
+ #include "parquet/encryption/kms_client.h"
27
+ #include "parquet/encryption/kms_client_factory.h"
28
+
29
+ #if defined(_WIN32) || defined(__CYGWIN__) // Windows
30
+ #if defined(_MSC_VER)
31
+ #pragma warning(disable : 4251)
32
+ #else
33
+ #pragma GCC diagnostic ignored "-Wattributes"
34
+ #endif
35
+
36
+ #ifdef ARROW_PYTHON_STATIC
37
+ #define ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT
38
+ #elif defined(ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORTING)
39
+ #define ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT __declspec(dllexport)
40
+ #else
41
+ #define ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT __declspec(dllimport)
42
+ #endif
43
+
44
+ #else // Not Windows
45
+ #ifndef ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT
46
+ #define ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT __attribute__((visibility("default")))
47
+ #endif
48
+ #endif // Non-Windows
49
+
50
+ namespace arrow {
51
+ namespace py {
52
+ namespace parquet {
53
+ namespace encryption {
54
+
55
+ /// \brief A table of function pointers for calling from C++ into
56
+ /// Python.
57
+ class ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT PyKmsClientVtable {
58
+ public:
59
+ std::function<void(PyObject*, const std::string& key_bytes,
60
+ const std::string& master_key_identifier, std::string* out)>
61
+ wrap_key;
62
+ std::function<void(PyObject*, const std::string& wrapped_key,
63
+ const std::string& master_key_identifier, std::string* out)>
64
+ unwrap_key;
65
+ };
66
+
67
+ /// \brief A helper for KmsClient implementation in Python.
68
+ class ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT PyKmsClient
69
+ : public ::parquet::encryption::KmsClient {
70
+ public:
71
+ PyKmsClient(PyObject* handler, PyKmsClientVtable vtable);
72
+ ~PyKmsClient() override;
73
+
74
+ std::string WrapKey(const std::string& key_bytes,
75
+ const std::string& master_key_identifier) override;
76
+
77
+ std::string UnwrapKey(const std::string& wrapped_key,
78
+ const std::string& master_key_identifier) override;
79
+
80
+ private:
81
+ OwnedRefNoGIL handler_;
82
+ PyKmsClientVtable vtable_;
83
+ };
84
+
85
+ /// \brief A table of function pointers for calling from C++ into
86
+ /// Python.
87
+ class ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT PyKmsClientFactoryVtable {
88
+ public:
89
+ std::function<void(
90
+ PyObject*, const ::parquet::encryption::KmsConnectionConfig& kms_connection_config,
91
+ std::shared_ptr<::parquet::encryption::KmsClient>* out)>
92
+ create_kms_client;
93
+ };
94
+
95
+ /// \brief A helper for KmsClientFactory implementation in Python.
96
+ class ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT PyKmsClientFactory
97
+ : public ::parquet::encryption::KmsClientFactory {
98
+ public:
99
+ PyKmsClientFactory(PyObject* handler, PyKmsClientFactoryVtable vtable);
100
+ ~PyKmsClientFactory() override;
101
+
102
+ std::shared_ptr<::parquet::encryption::KmsClient> CreateKmsClient(
103
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config) override;
104
+
105
+ private:
106
+ OwnedRefNoGIL handler_;
107
+ PyKmsClientFactoryVtable vtable_;
108
+ };
109
+
110
+ /// \brief A CryptoFactory that returns Results instead of throwing exceptions.
111
+ class ARROW_PYTHON_PARQUET_ENCRYPTION_EXPORT PyCryptoFactory
112
+ : public ::parquet::encryption::CryptoFactory {
113
+ public:
114
+ arrow::Result<std::shared_ptr<::parquet::FileEncryptionProperties>>
115
+ SafeGetFileEncryptionProperties(
116
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config,
117
+ const ::parquet::encryption::EncryptionConfiguration& encryption_config);
118
+
119
+ /// The returned FileDecryptionProperties object will use the cache inside this
120
+ /// CryptoFactory object, so please keep this
121
+ /// CryptoFactory object alive along with the returned
122
+ /// FileDecryptionProperties object.
123
+ arrow::Result<std::shared_ptr<::parquet::FileDecryptionProperties>>
124
+ SafeGetFileDecryptionProperties(
125
+ const ::parquet::encryption::KmsConnectionConfig& kms_connection_config,
126
+ const ::parquet::encryption::DecryptionConfiguration& decryption_config);
127
+ };
128
+
129
+ } // namespace encryption
130
+ } // namespace parquet
131
+ } // namespace py
132
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pch.h ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // Often-used headers, for precompiling.
19
+ // If updating this header, please make sure you check compilation speed
20
+ // before checking in. Adding headers which are not used extremely often
21
+ // may incur a slowdown, since it makes the precompiled header heavier to load.
22
+
23
+ #include "arrow/pch.h"
24
+ #include "arrow/python/platform.h"
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow.cc ADDED
@@ -0,0 +1,100 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include "arrow/python/pyarrow.h"
19
+
20
+ #include <memory>
21
+ #include <utility>
22
+
23
+ #include "arrow/array.h"
24
+ #include "arrow/table.h"
25
+ #include "arrow/tensor.h"
26
+ #include "arrow/type.h"
27
+ #include "arrow/util/logging.h"
28
+
29
+ #include "arrow/python/common.h"
30
+ #include "arrow/python/datetime.h"
31
+ namespace {
32
+ #include "arrow/python/pyarrow_api.h"
33
+ }
34
+
35
+ namespace arrow {
36
+ namespace py {
37
+
38
+ static Status UnwrapError(PyObject* obj, const char* expected_type) {
39
+ return Status::TypeError("Could not unwrap ", expected_type,
40
+ " from Python object of type '", Py_TYPE(obj)->tp_name, "'");
41
+ }
42
+
43
+ int import_pyarrow() {
44
+ #ifdef PYPY_VERSION
45
+ PyDateTime_IMPORT;
46
+ #else
47
+ internal::InitDatetime();
48
+ #endif
49
+ return ::import_pyarrow__lib();
50
+ }
51
+
52
+ #define DEFINE_WRAP_FUNCTIONS(FUNC_SUFFIX, TYPE_NAME) \
53
+ bool is_##FUNC_SUFFIX(PyObject* obj) { return ::pyarrow_is_##FUNC_SUFFIX(obj) != 0; } \
54
+ \
55
+ PyObject* wrap_##FUNC_SUFFIX(const std::shared_ptr<TYPE_NAME>& src) { \
56
+ return ::pyarrow_wrap_##FUNC_SUFFIX(src); \
57
+ } \
58
+ Result<std::shared_ptr<TYPE_NAME>> unwrap_##FUNC_SUFFIX(PyObject* obj) { \
59
+ auto out = ::pyarrow_unwrap_##FUNC_SUFFIX(obj); \
60
+ if (out) { \
61
+ return std::move(out); \
62
+ } else { \
63
+ return UnwrapError(obj, #TYPE_NAME); \
64
+ } \
65
+ }
66
+
67
+ DEFINE_WRAP_FUNCTIONS(buffer, Buffer)
68
+
69
+ DEFINE_WRAP_FUNCTIONS(data_type, DataType)
70
+ DEFINE_WRAP_FUNCTIONS(field, Field)
71
+ DEFINE_WRAP_FUNCTIONS(schema, Schema)
72
+
73
+ DEFINE_WRAP_FUNCTIONS(scalar, Scalar)
74
+
75
+ DEFINE_WRAP_FUNCTIONS(array, Array)
76
+ DEFINE_WRAP_FUNCTIONS(chunked_array, ChunkedArray)
77
+
78
+ DEFINE_WRAP_FUNCTIONS(sparse_coo_tensor, SparseCOOTensor)
79
+ DEFINE_WRAP_FUNCTIONS(sparse_csc_matrix, SparseCSCMatrix)
80
+ DEFINE_WRAP_FUNCTIONS(sparse_csf_tensor, SparseCSFTensor)
81
+ DEFINE_WRAP_FUNCTIONS(sparse_csr_matrix, SparseCSRMatrix)
82
+ DEFINE_WRAP_FUNCTIONS(tensor, Tensor)
83
+
84
+ DEFINE_WRAP_FUNCTIONS(batch, RecordBatch)
85
+ DEFINE_WRAP_FUNCTIONS(table, Table)
86
+
87
+ #undef DEFINE_WRAP_FUNCTIONS
88
+
89
+ namespace internal {
90
+
91
+ int check_status(const Status& status) { return ::pyarrow_internal_check_status(status); }
92
+
93
+ PyObject* convert_status(const Status& status) {
94
+ DCHECK(!status.ok());
95
+ return ::pyarrow_internal_convert_status(status);
96
+ }
97
+
98
+ } // namespace internal
99
+ } // namespace py
100
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow.h ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include "arrow/python/platform.h"
21
+
22
+ #include <memory>
23
+
24
+ #include "arrow/python/visibility.h"
25
+
26
+ #include "arrow/sparse_tensor.h"
27
+
28
+ // Work around ARROW-2317 (C linkage warning from Cython)
29
+ extern "C++" {
30
+
31
+ namespace arrow {
32
+
33
+ class Array;
34
+ class Buffer;
35
+ class DataType;
36
+ class Field;
37
+ class RecordBatch;
38
+ class Schema;
39
+ class Status;
40
+ class Table;
41
+ class Tensor;
42
+
43
+ namespace py {
44
+
45
+ // Returns 0 on success, -1 on error.
46
+ ARROW_PYTHON_EXPORT int import_pyarrow();
47
+
48
+ #define DECLARE_WRAP_FUNCTIONS(FUNC_SUFFIX, TYPE_NAME) \
49
+ ARROW_PYTHON_EXPORT bool is_##FUNC_SUFFIX(PyObject*); \
50
+ ARROW_PYTHON_EXPORT Result<std::shared_ptr<TYPE_NAME>> unwrap_##FUNC_SUFFIX( \
51
+ PyObject*); \
52
+ ARROW_PYTHON_EXPORT PyObject* wrap_##FUNC_SUFFIX(const std::shared_ptr<TYPE_NAME>&);
53
+
54
+ DECLARE_WRAP_FUNCTIONS(buffer, Buffer)
55
+
56
+ DECLARE_WRAP_FUNCTIONS(data_type, DataType)
57
+ DECLARE_WRAP_FUNCTIONS(field, Field)
58
+ DECLARE_WRAP_FUNCTIONS(schema, Schema)
59
+
60
+ DECLARE_WRAP_FUNCTIONS(scalar, Scalar)
61
+
62
+ DECLARE_WRAP_FUNCTIONS(array, Array)
63
+ DECLARE_WRAP_FUNCTIONS(chunked_array, ChunkedArray)
64
+
65
+ DECLARE_WRAP_FUNCTIONS(sparse_coo_tensor, SparseCOOTensor)
66
+ DECLARE_WRAP_FUNCTIONS(sparse_csc_matrix, SparseCSCMatrix)
67
+ DECLARE_WRAP_FUNCTIONS(sparse_csf_tensor, SparseCSFTensor)
68
+ DECLARE_WRAP_FUNCTIONS(sparse_csr_matrix, SparseCSRMatrix)
69
+ DECLARE_WRAP_FUNCTIONS(tensor, Tensor)
70
+
71
+ DECLARE_WRAP_FUNCTIONS(batch, RecordBatch)
72
+ DECLARE_WRAP_FUNCTIONS(table, Table)
73
+
74
+ #undef DECLARE_WRAP_FUNCTIONS
75
+
76
+ namespace internal {
77
+
78
+ // If status is ok, return 0.
79
+ // If status is not ok, set Python error indicator and return -1.
80
+ ARROW_PYTHON_EXPORT int check_status(const Status& status);
81
+
82
+ // Convert status to a Python exception object. Status must not be ok.
83
+ ARROW_PYTHON_EXPORT PyObject* convert_status(const Status& status);
84
+
85
+ } // namespace internal
86
+ } // namespace py
87
+ } // namespace arrow
88
+
89
+ } // extern "C++"
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow_api.h ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // For backward compatibility.
19
+ #include "arrow/python/lib_api.h"
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/pyarrow_lib.h ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ // For backward compatibility.
19
+ #include "arrow/python/lib.h"
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/python_test.cc ADDED
@@ -0,0 +1,895 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #include <memory>
19
+ #include <optional>
20
+ #include <sstream>
21
+ #include <string>
22
+
23
+ #include "platform.h"
24
+
25
+ #include "arrow/array.h"
26
+ #include "arrow/array/builder_binary.h"
27
+ #include "arrow/table.h"
28
+ #include "arrow/util/decimal.h"
29
+ #include "arrow/util/logging.h"
30
+
31
+ #include "arrow/python/arrow_to_pandas.h"
32
+ #include "arrow/python/decimal.h"
33
+ #include "arrow/python/helpers.h"
34
+ #include "arrow/python/numpy_convert.h"
35
+ #include "arrow/python/numpy_interop.h"
36
+ #include "arrow/python/python_test.h"
37
+ #include "arrow/python/python_to_arrow.h"
38
+
39
+ #define ASSERT_EQ(x, y) \
40
+ { \
41
+ auto&& _left = (x); \
42
+ auto&& _right = (y); \
43
+ if (_left != _right) { \
44
+ return Status::Invalid("Expected equality between `", #x, "` and `", #y, \
45
+ "`, but ", arrow::py::testing::ToString(_left), \
46
+ " != ", arrow::py::testing::ToString(_right)); \
47
+ } \
48
+ }
49
+
50
+ #define ASSERT_NE(x, y) \
51
+ { \
52
+ auto&& _left = (x); \
53
+ auto&& _right = (y); \
54
+ if (_left == _right) { \
55
+ return Status::Invalid("Expected inequality between `", #x, "` and `", #y, \
56
+ "`, but ", arrow::py::testing::ToString(_left), \
57
+ " == ", arrow::py::testing::ToString(_right)); \
58
+ } \
59
+ }
60
+
61
+ #define ASSERT_FALSE(v) \
62
+ { \
63
+ auto&& _v = (v); \
64
+ if (!!_v) { \
65
+ return Status::Invalid("Expected `", #v, "` to evaluate to false, but got ", \
66
+ arrow::py::testing::ToString(_v)); \
67
+ } \
68
+ }
69
+
70
+ #define ASSERT_TRUE(v) \
71
+ { \
72
+ auto&& _v = (v); \
73
+ if (!_v) { \
74
+ return Status::Invalid("Expected `", #v, "` to evaluate to true, but got ", \
75
+ arrow::py::testing::ToString(_v)); \
76
+ } \
77
+ }
78
+
79
+ #define ASSERT_FALSE_MSG(v, msg) \
80
+ { \
81
+ auto&& _v = (v); \
82
+ if (!!_v) { \
83
+ return Status::Invalid("Expected `", #v, "` to evaluate to false, but got ", \
84
+ arrow::py::testing::ToString(_v), ": ", msg); \
85
+ } \
86
+ }
87
+
88
+ #define ASSERT_TRUE_MSG(v, msg) \
89
+ { \
90
+ auto&& _v = (v); \
91
+ if (!_v) { \
92
+ return Status::Invalid("Expected `", #v, "` to evaluate to true, but got ", \
93
+ arrow::py::testing::ToString(_v), ": ", msg); \
94
+ } \
95
+ }
96
+
97
+ #define ASSERT_OK(expr) \
98
+ { \
99
+ for (::arrow::Status _st = ::arrow::internal::GenericToStatus((expr)); !_st.ok();) \
100
+ return Status::Invalid("`", #expr, "` failed with ", _st.ToString()); \
101
+ }
102
+
103
+ #define ASSERT_RAISES(code, expr) \
104
+ { \
105
+ for (::arrow::Status _st_expr = ::arrow::internal::GenericToStatus((expr)); \
106
+ !_st_expr.Is##code();) \
107
+ return Status::Invalid("Expected `", #expr, "` to fail with ", #code, \
108
+ ", but got ", _st_expr.ToString()); \
109
+ }
110
+
111
+ namespace arrow {
112
+
113
+ using internal::checked_cast;
114
+
115
+ namespace py {
116
+ namespace testing {
117
+
118
+ // ARROW-17938: Some standard libraries have ambiguous operator<<(nullptr_t),
119
+ // work around it using a custom printer function.
120
+
121
+ template <typename T>
122
+ std::string ToString(const T& t) {
123
+ std::stringstream ss;
124
+ ss << t;
125
+ return ss.str();
126
+ }
127
+
128
+ template <>
129
+ std::string ToString(const std::nullptr_t&) {
130
+ return "nullptr";
131
+ }
132
+
133
+ namespace {
134
+
135
+ Status TestOwnedRefMoves() {
136
+ std::vector<OwnedRef> vec;
137
+ PyObject *u, *v;
138
+ u = PyList_New(0);
139
+ v = PyList_New(0);
140
+
141
+ {
142
+ OwnedRef ref(u);
143
+ vec.push_back(std::move(ref));
144
+ ASSERT_EQ(ref.obj(), nullptr);
145
+ }
146
+ vec.emplace_back(v);
147
+ ASSERT_EQ(Py_REFCNT(u), 1);
148
+ ASSERT_EQ(Py_REFCNT(v), 1);
149
+ return Status::OK();
150
+ }
151
+
152
+ Status TestOwnedRefNoGILMoves() {
153
+ PyAcquireGIL lock;
154
+ lock.release();
155
+
156
+ {
157
+ std::vector<OwnedRef> vec;
158
+ PyObject *u, *v;
159
+ {
160
+ lock.acquire();
161
+ u = PyList_New(0);
162
+ v = PyList_New(0);
163
+ lock.release();
164
+ }
165
+ {
166
+ OwnedRefNoGIL ref(u);
167
+ vec.push_back(std::move(ref));
168
+ ASSERT_EQ(ref.obj(), nullptr);
169
+ }
170
+ vec.emplace_back(v);
171
+ ASSERT_EQ(Py_REFCNT(u), 1);
172
+ ASSERT_EQ(Py_REFCNT(v), 1);
173
+ return Status::OK();
174
+ }
175
+ }
176
+
177
+ std::string FormatPythonException(const std::string& exc_class_name,
178
+ const std::string& exc_value) {
179
+ std::stringstream ss;
180
+ ss << "Python exception: ";
181
+ ss << exc_class_name;
182
+ ss << ": ";
183
+ ss << exc_value;
184
+ ss << "\n";
185
+ return ss.str();
186
+ }
187
+
188
+ Status TestCheckPyErrorStatus() {
189
+ Status st;
190
+ std::string expected_detail = "";
191
+
192
+ auto check_error = [](Status& st, const char* expected_message = "some error",
193
+ std::string expected_detail = "") {
194
+ st = CheckPyError();
195
+ ASSERT_EQ(st.message(), expected_message);
196
+ ASSERT_FALSE(PyErr_Occurred());
197
+ if (expected_detail.size() > 0) {
198
+ auto detail = st.detail();
199
+ ASSERT_NE(detail, nullptr);
200
+ ASSERT_EQ(detail->ToString(), expected_detail);
201
+ }
202
+ return Status::OK();
203
+ };
204
+
205
+ for (PyObject* exc_type : {PyExc_Exception, PyExc_SyntaxError}) {
206
+ PyErr_SetString(exc_type, "some error");
207
+ ASSERT_OK(check_error(st));
208
+ ASSERT_TRUE(st.IsUnknownError());
209
+ }
210
+
211
+ PyErr_SetString(PyExc_TypeError, "some error");
212
+ ASSERT_OK(
213
+ check_error(st, "some error", FormatPythonException("TypeError", "some error")));
214
+ ASSERT_TRUE(st.IsTypeError());
215
+
216
+ PyErr_SetString(PyExc_ValueError, "some error");
217
+ ASSERT_OK(check_error(st));
218
+ ASSERT_TRUE(st.IsInvalid());
219
+
220
+ PyErr_SetString(PyExc_KeyError, "some error");
221
+ ASSERT_OK(check_error(st, "'some error'"));
222
+ ASSERT_TRUE(st.IsKeyError());
223
+
224
+ for (PyObject* exc_type : {PyExc_OSError, PyExc_IOError}) {
225
+ PyErr_SetString(exc_type, "some error");
226
+ ASSERT_OK(check_error(st));
227
+ ASSERT_TRUE(st.IsIOError());
228
+ }
229
+
230
+ PyErr_SetString(PyExc_NotImplementedError, "some error");
231
+ ASSERT_OK(check_error(st, "some error",
232
+ FormatPythonException("NotImplementedError", "some error")));
233
+ ASSERT_TRUE(st.IsNotImplemented());
234
+
235
+ // No override if a specific status code is given
236
+ PyErr_SetString(PyExc_TypeError, "some error");
237
+ st = CheckPyError(StatusCode::SerializationError);
238
+ ASSERT_TRUE(st.IsSerializationError());
239
+ ASSERT_EQ(st.message(), "some error");
240
+ ASSERT_FALSE(PyErr_Occurred());
241
+
242
+ return Status::OK();
243
+ }
244
+
245
+ Status TestCheckPyErrorStatusNoGIL() {
246
+ PyAcquireGIL lock;
247
+ {
248
+ Status st;
249
+ PyErr_SetString(PyExc_ZeroDivisionError, "zzzt");
250
+ st = ConvertPyError();
251
+ ASSERT_FALSE(PyErr_Occurred());
252
+ lock.release();
253
+ ASSERT_TRUE(st.IsUnknownError());
254
+ ASSERT_EQ(st.message(), "zzzt");
255
+ ASSERT_EQ(st.detail()->ToString(),
256
+ FormatPythonException("ZeroDivisionError", "zzzt"));
257
+ return Status::OK();
258
+ }
259
+ }
260
+
261
+ Status TestRestorePyErrorBasics() {
262
+ PyErr_SetString(PyExc_ZeroDivisionError, "zzzt");
263
+ auto st = ConvertPyError();
264
+ ASSERT_FALSE(PyErr_Occurred());
265
+ ASSERT_TRUE(st.IsUnknownError());
266
+ ASSERT_EQ(st.message(), "zzzt");
267
+ ASSERT_EQ(st.detail()->ToString(), FormatPythonException("ZeroDivisionError", "zzzt"));
268
+
269
+ RestorePyError(st);
270
+ ASSERT_TRUE(PyErr_Occurred());
271
+ PyObject* exc_type;
272
+ PyObject* exc_value;
273
+ PyObject* exc_traceback;
274
+ PyErr_Fetch(&exc_type, &exc_value, &exc_traceback);
275
+ ASSERT_TRUE(PyErr_GivenExceptionMatches(exc_type, PyExc_ZeroDivisionError));
276
+ std::string py_message;
277
+ ASSERT_OK(internal::PyObject_StdStringStr(exc_value, &py_message));
278
+ ASSERT_EQ(py_message, "zzzt");
279
+
280
+ return Status::OK();
281
+ }
282
+
283
+ Status TestPyBufferInvalidInputObject() {
284
+ std::shared_ptr<Buffer> res;
285
+ PyObject* input = Py_None;
286
+ auto old_refcnt = Py_REFCNT(input);
287
+ {
288
+ Status st = PyBuffer::FromPyObject(input).status();
289
+ ASSERT_TRUE_MSG(IsPyError(st), st.ToString());
290
+ ASSERT_FALSE(PyErr_Occurred());
291
+ }
292
+ ASSERT_EQ(old_refcnt, Py_REFCNT(input));
293
+ return Status::OK();
294
+ }
295
+
296
+ // Because of how it is declared, the Numpy C API instance initialized
297
+ // within libarrow_python.dll may not be visible in this test under Windows
298
+ // ("unresolved external symbol arrow_ARRAY_API referenced").
299
+ #ifndef _WIN32
300
+ Status TestPyBufferNumpyArray() {
301
+ npy_intp dims[1] = {10};
302
+
303
+ OwnedRef arr_ref(PyArray_SimpleNew(1, dims, NPY_FLOAT));
304
+ PyObject* arr = arr_ref.obj();
305
+ ASSERT_NE(arr, nullptr);
306
+ auto old_refcnt = Py_REFCNT(arr);
307
+ auto buf = std::move(PyBuffer::FromPyObject(arr)).ValueOrDie();
308
+
309
+ ASSERT_TRUE(buf->is_cpu());
310
+ ASSERT_EQ(buf->data(), PyArray_DATA(reinterpret_cast<PyArrayObject*>(arr)));
311
+ ASSERT_TRUE(buf->is_mutable());
312
+ ASSERT_EQ(buf->mutable_data(), buf->data());
313
+ ASSERT_EQ(old_refcnt + 1, Py_REFCNT(arr));
314
+ buf.reset();
315
+ ASSERT_EQ(old_refcnt, Py_REFCNT(arr));
316
+
317
+ // Read-only
318
+ PyArray_CLEARFLAGS(reinterpret_cast<PyArrayObject*>(arr), NPY_ARRAY_WRITEABLE);
319
+ buf = std::move(PyBuffer::FromPyObject(arr)).ValueOrDie();
320
+ ASSERT_TRUE(buf->is_cpu());
321
+ ASSERT_EQ(buf->data(), PyArray_DATA(reinterpret_cast<PyArrayObject*>(arr)));
322
+ ASSERT_FALSE(buf->is_mutable());
323
+ ASSERT_EQ(old_refcnt + 1, Py_REFCNT(arr));
324
+ buf.reset();
325
+ ASSERT_EQ(old_refcnt, Py_REFCNT(arr));
326
+
327
+ return Status::OK();
328
+ }
329
+
330
+ Status TestNumPyBufferNumpyArray() {
331
+ npy_intp dims[1] = {10};
332
+
333
+ OwnedRef arr_ref(PyArray_SimpleNew(1, dims, NPY_FLOAT));
334
+ PyObject* arr = arr_ref.obj();
335
+ ASSERT_NE(arr, nullptr);
336
+ auto old_refcnt = Py_REFCNT(arr);
337
+
338
+ auto buf = std::make_shared<NumPyBuffer>(arr);
339
+ ASSERT_TRUE(buf->is_cpu());
340
+ ASSERT_EQ(buf->data(), PyArray_DATA(reinterpret_cast<PyArrayObject*>(arr)));
341
+ ASSERT_TRUE(buf->is_mutable());
342
+ ASSERT_EQ(buf->mutable_data(), buf->data());
343
+ ASSERT_EQ(old_refcnt + 1, Py_REFCNT(arr));
344
+ buf.reset();
345
+ ASSERT_EQ(old_refcnt, Py_REFCNT(arr));
346
+
347
+ // Read-only
348
+ PyArray_CLEARFLAGS(reinterpret_cast<PyArrayObject*>(arr), NPY_ARRAY_WRITEABLE);
349
+ buf = std::make_shared<NumPyBuffer>(arr);
350
+ ASSERT_TRUE(buf->is_cpu());
351
+ ASSERT_EQ(buf->data(), PyArray_DATA(reinterpret_cast<PyArrayObject*>(arr)));
352
+ ASSERT_FALSE(buf->is_mutable());
353
+ ASSERT_EQ(old_refcnt + 1, Py_REFCNT(arr));
354
+ buf.reset();
355
+ ASSERT_EQ(old_refcnt, Py_REFCNT(arr));
356
+
357
+ return Status::OK();
358
+ }
359
+ #endif
360
+
361
+ Status TestPythonDecimalToString() {
362
+ OwnedRef decimal_constructor_;
363
+ OwnedRef decimal_module;
364
+
365
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
366
+ RETURN_NOT_OK(
367
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
368
+
369
+ std::string decimal_string("-39402950693754869342983");
370
+ PyObject* python_object =
371
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
372
+ ASSERT_NE(python_object, nullptr);
373
+
374
+ std::string string_result;
375
+ ASSERT_OK(internal::PythonDecimalToString(python_object, &string_result));
376
+
377
+ return Status::OK();
378
+ }
379
+
380
+ Status TestInferPrecisionAndScale() {
381
+ OwnedRef decimal_constructor_;
382
+ OwnedRef decimal_module;
383
+
384
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
385
+ RETURN_NOT_OK(
386
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
387
+
388
+ std::string decimal_string("-394029506937548693.42983");
389
+ PyObject* python_decimal =
390
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
391
+
392
+ internal::DecimalMetadata metadata;
393
+ ASSERT_OK(metadata.Update(python_decimal));
394
+
395
+ const auto expected_precision =
396
+ static_cast<int32_t>(decimal_string.size() - 2); // 1 for -, 1 for .
397
+ const int32_t expected_scale = 5;
398
+
399
+ ASSERT_EQ(expected_precision, metadata.precision());
400
+ ASSERT_EQ(expected_scale, metadata.scale());
401
+
402
+ return Status::OK();
403
+ }
404
+
405
+ Status TestInferPrecisionAndNegativeScale() {
406
+ OwnedRef decimal_constructor_;
407
+ OwnedRef decimal_module;
408
+
409
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
410
+ RETURN_NOT_OK(
411
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
412
+
413
+ std::string decimal_string("-3.94042983E+10");
414
+ PyObject* python_decimal =
415
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
416
+
417
+ internal::DecimalMetadata metadata;
418
+ ASSERT_OK(metadata.Update(python_decimal));
419
+
420
+ const auto expected_precision = 11;
421
+ const int32_t expected_scale = 0;
422
+
423
+ ASSERT_EQ(expected_precision, metadata.precision());
424
+ ASSERT_EQ(expected_scale, metadata.scale());
425
+
426
+ return Status::OK();
427
+ }
428
+
429
+ Status TestInferAllLeadingZeros() {
430
+ OwnedRef decimal_constructor_;
431
+ OwnedRef decimal_module;
432
+
433
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
434
+ RETURN_NOT_OK(
435
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
436
+
437
+ std::string decimal_string("0.001");
438
+ PyObject* python_decimal =
439
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
440
+
441
+ internal::DecimalMetadata metadata;
442
+ ASSERT_OK(metadata.Update(python_decimal));
443
+ ASSERT_EQ(3, metadata.precision());
444
+ ASSERT_EQ(3, metadata.scale());
445
+
446
+ return Status::OK();
447
+ }
448
+
449
+ Status TestInferAllLeadingZerosExponentialNotationPositive() {
450
+ OwnedRef decimal_constructor_;
451
+ OwnedRef decimal_module;
452
+
453
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
454
+ RETURN_NOT_OK(
455
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
456
+
457
+ std::string decimal_string("0.01E5");
458
+ PyObject* python_decimal =
459
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
460
+
461
+ internal::DecimalMetadata metadata;
462
+ ASSERT_OK(metadata.Update(python_decimal));
463
+ ASSERT_EQ(4, metadata.precision());
464
+ ASSERT_EQ(0, metadata.scale());
465
+
466
+ return Status::OK();
467
+ }
468
+
469
+ Status TestInferAllLeadingZerosExponentialNotationNegative() {
470
+ OwnedRef decimal_constructor_;
471
+ OwnedRef decimal_module;
472
+
473
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
474
+ RETURN_NOT_OK(
475
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
476
+
477
+ std::string decimal_string("0.01E3");
478
+ PyObject* python_decimal =
479
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
480
+ internal::DecimalMetadata metadata;
481
+ ASSERT_OK(metadata.Update(python_decimal));
482
+ ASSERT_EQ(2, metadata.precision());
483
+ ASSERT_EQ(0, metadata.scale());
484
+
485
+ return Status::OK();
486
+ }
487
+
488
+ Status TestObjectBlockWriteFails() {
489
+ StringBuilder builder;
490
+ const char value[] = {'\xf1', '\0'};
491
+
492
+ for (int i = 0; i < 1000; ++i) {
493
+ ASSERT_OK(builder.Append(value, static_cast<int32_t>(strlen(value))));
494
+ }
495
+
496
+ std::shared_ptr<Array> arr;
497
+ ASSERT_OK(builder.Finish(&arr));
498
+
499
+ auto f1 = field("f1", utf8());
500
+ auto f2 = field("f2", utf8());
501
+ auto f3 = field("f3", utf8());
502
+ std::vector<std::shared_ptr<Field>> fields = {f1, f2, f3};
503
+ std::vector<std::shared_ptr<Array>> cols = {arr, arr, arr};
504
+
505
+ auto schema = ::arrow::schema(fields);
506
+ auto table = Table::Make(schema, cols);
507
+
508
+ Status st;
509
+ Py_BEGIN_ALLOW_THREADS;
510
+ PyObject* out;
511
+ PandasOptions options;
512
+ options.use_threads = true;
513
+ st = ConvertTableToPandas(options, table, &out);
514
+ Py_END_ALLOW_THREADS;
515
+ ASSERT_RAISES(UnknownError, st);
516
+
517
+ return Status::OK();
518
+ }
519
+
520
+ Status TestMixedTypeFails() {
521
+ OwnedRef list_ref(PyList_New(3));
522
+ PyObject* list = list_ref.obj();
523
+
524
+ ASSERT_NE(list, nullptr);
525
+
526
+ PyObject* str = PyUnicode_FromString("abc");
527
+ ASSERT_NE(str, nullptr);
528
+
529
+ PyObject* integer = PyLong_FromLong(1234L);
530
+ ASSERT_NE(integer, nullptr);
531
+
532
+ PyObject* doub = PyFloat_FromDouble(123.0234);
533
+ ASSERT_NE(doub, nullptr);
534
+
535
+ // This steals a reference to each object, so we don't need to decref them later
536
+ // just the list
537
+ ASSERT_EQ(PyList_SetItem(list, 0, str), 0);
538
+ ASSERT_EQ(PyList_SetItem(list, 1, integer), 0);
539
+ ASSERT_EQ(PyList_SetItem(list, 2, doub), 0);
540
+
541
+ ASSERT_RAISES(TypeError, ConvertPySequence(list, nullptr, {}));
542
+
543
+ return Status::OK();
544
+ }
545
+
546
+ template <typename DecimalValue>
547
+ Status DecimalTestFromPythonDecimalRescale(std::shared_ptr<DataType> type,
548
+ PyObject* python_decimal,
549
+ std::optional<int> expected) {
550
+ DecimalValue value;
551
+ const auto& decimal_type = checked_cast<const DecimalType&>(*type);
552
+
553
+ if (expected.has_value()) {
554
+ ASSERT_OK(internal::DecimalFromPythonDecimal(python_decimal, decimal_type, &value));
555
+ ASSERT_EQ(expected.value(), value);
556
+
557
+ ASSERT_OK(internal::DecimalFromPyObject(python_decimal, decimal_type, &value));
558
+ ASSERT_EQ(expected.value(), value);
559
+ } else {
560
+ ASSERT_RAISES(Invalid, internal::DecimalFromPythonDecimal(python_decimal,
561
+ decimal_type, &value));
562
+ ASSERT_RAISES(Invalid,
563
+ internal::DecimalFromPyObject(python_decimal, decimal_type, &value));
564
+ }
565
+ return Status::OK();
566
+ }
567
+
568
+ Status TestFromPythonDecimalRescaleNotTruncateable() {
569
+ OwnedRef decimal_constructor_;
570
+ OwnedRef decimal_module;
571
+
572
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
573
+ RETURN_NOT_OK(
574
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
575
+
576
+ std::string decimal_string("1.001");
577
+ PyObject* python_decimal =
578
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
579
+ // We fail when truncating values that would lose data if cast to a decimal type with
580
+ // lower scale
581
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal128>(::arrow::decimal128(10, 2),
582
+ python_decimal, {}));
583
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal256>(::arrow::decimal256(10, 2),
584
+ python_decimal, {}));
585
+
586
+ return Status::OK();
587
+ }
588
+
589
+ Status TestFromPythonDecimalRescaleTruncateable() {
590
+ OwnedRef decimal_constructor_;
591
+ OwnedRef decimal_module;
592
+
593
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
594
+ RETURN_NOT_OK(
595
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
596
+
597
+ std::string decimal_string("1.000");
598
+ PyObject* python_decimal =
599
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
600
+ // We allow truncation of values that do not lose precision when dividing by 10 * the
601
+ // difference between the scales, e.g., 1.000 -> 1.00
602
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal128>(::arrow::decimal128(10, 2),
603
+ python_decimal, 100));
604
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal256>(::arrow::decimal256(10, 2),
605
+ python_decimal, 100));
606
+
607
+ return Status::OK();
608
+ }
609
+
610
+ Status TestFromPythonNegativeDecimalRescale() {
611
+ OwnedRef decimal_constructor_;
612
+ OwnedRef decimal_module;
613
+
614
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
615
+ RETURN_NOT_OK(
616
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
617
+
618
+ std::string decimal_string("-1.000");
619
+ PyObject* python_decimal =
620
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
621
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal128>(::arrow::decimal128(10, 9),
622
+ python_decimal, -1000000000));
623
+ ASSERT_OK(DecimalTestFromPythonDecimalRescale<Decimal256>(::arrow::decimal256(10, 9),
624
+ python_decimal, -1000000000));
625
+
626
+ return Status::OK();
627
+ }
628
+
629
+ Status TestDecimal128FromPythonInteger() {
630
+ Decimal128 value;
631
+ OwnedRef python_long(PyLong_FromLong(42));
632
+ auto type = ::arrow::decimal128(10, 2);
633
+ const auto& decimal_type = checked_cast<const DecimalType&>(*type);
634
+ ASSERT_OK(internal::DecimalFromPyObject(python_long.obj(), decimal_type, &value));
635
+ ASSERT_EQ(4200, value);
636
+ return Status::OK();
637
+ }
638
+
639
+ Status TestDecimal256FromPythonInteger() {
640
+ Decimal256 value;
641
+ OwnedRef python_long(PyLong_FromLong(42));
642
+ auto type = ::arrow::decimal256(10, 2);
643
+ const auto& decimal_type = checked_cast<const DecimalType&>(*type);
644
+ ASSERT_OK(internal::DecimalFromPyObject(python_long.obj(), decimal_type, &value));
645
+ ASSERT_EQ(4200, value);
646
+ return Status::OK();
647
+ }
648
+
649
+ Status TestDecimal128OverflowFails() {
650
+ Decimal128 value;
651
+ OwnedRef decimal_constructor_;
652
+ OwnedRef decimal_module;
653
+
654
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
655
+ RETURN_NOT_OK(
656
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
657
+
658
+ std::string decimal_string("9999999999999999999999999999999999999.9");
659
+ PyObject* python_decimal =
660
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
661
+ internal::DecimalMetadata metadata;
662
+ ASSERT_OK(metadata.Update(python_decimal));
663
+ ASSERT_EQ(38, metadata.precision());
664
+ ASSERT_EQ(1, metadata.scale());
665
+
666
+ auto type = ::arrow::decimal(38, 38);
667
+ const auto& decimal_type = checked_cast<const DecimalType&>(*type);
668
+ ASSERT_RAISES(Invalid,
669
+ internal::DecimalFromPythonDecimal(python_decimal, decimal_type, &value));
670
+ return Status::OK();
671
+ }
672
+
673
+ Status TestDecimal256OverflowFails() {
674
+ Decimal256 value;
675
+ OwnedRef decimal_constructor_;
676
+ OwnedRef decimal_module;
677
+
678
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
679
+ RETURN_NOT_OK(
680
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
681
+
682
+ std::string decimal_string(
683
+ "999999999999999999999999999999999999999999999999999999999999999999999999999.9");
684
+ PyObject* python_decimal =
685
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
686
+
687
+ internal::DecimalMetadata metadata;
688
+ ASSERT_OK(metadata.Update(python_decimal));
689
+ ASSERT_EQ(76, metadata.precision());
690
+ ASSERT_EQ(1, metadata.scale());
691
+
692
+ auto type = ::arrow::decimal(76, 76);
693
+ const auto& decimal_type = checked_cast<const DecimalType&>(*type);
694
+ ASSERT_RAISES(Invalid,
695
+ internal::DecimalFromPythonDecimal(python_decimal, decimal_type, &value));
696
+ return Status::OK();
697
+ }
698
+
699
+ Status TestNoneAndNaN() {
700
+ OwnedRef list_ref(PyList_New(4));
701
+ PyObject* list = list_ref.obj();
702
+
703
+ ASSERT_NE(list, nullptr);
704
+
705
+ OwnedRef decimal_constructor_;
706
+ OwnedRef decimal_module;
707
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
708
+ RETURN_NOT_OK(
709
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
710
+ PyObject* constructor = decimal_constructor_.obj();
711
+ PyObject* decimal_value = internal::DecimalFromString(constructor, "1.234");
712
+ ASSERT_NE(decimal_value, nullptr);
713
+
714
+ Py_INCREF(Py_None);
715
+ PyObject* missing_value1 = Py_None;
716
+ ASSERT_NE(missing_value1, nullptr);
717
+
718
+ PyObject* missing_value2 = PyFloat_FromDouble(NPY_NAN);
719
+ ASSERT_NE(missing_value2, nullptr);
720
+
721
+ PyObject* missing_value3 = internal::DecimalFromString(constructor, "nan");
722
+ ASSERT_NE(missing_value3, nullptr);
723
+
724
+ // This steals a reference to each object, so we don't need to decref them later,
725
+ // just the list
726
+ ASSERT_EQ(0, PyList_SetItem(list, 0, decimal_value));
727
+ ASSERT_EQ(0, PyList_SetItem(list, 1, missing_value1));
728
+ ASSERT_EQ(0, PyList_SetItem(list, 2, missing_value2));
729
+ ASSERT_EQ(0, PyList_SetItem(list, 3, missing_value3));
730
+
731
+ PyConversionOptions options;
732
+ ASSERT_RAISES(TypeError, ConvertPySequence(list, nullptr, options));
733
+
734
+ options.from_pandas = true;
735
+ auto chunked = std::move(ConvertPySequence(list, nullptr, options)).ValueOrDie();
736
+ ASSERT_EQ(chunked->num_chunks(), 1);
737
+
738
+ auto arr = chunked->chunk(0);
739
+ ASSERT_TRUE(arr->IsValid(0));
740
+ ASSERT_TRUE(arr->IsNull(1));
741
+ ASSERT_TRUE(arr->IsNull(2));
742
+ ASSERT_TRUE(arr->IsNull(3));
743
+
744
+ return Status::OK();
745
+ }
746
+
747
+ Status TestMixedPrecisionAndScale() {
748
+ std::vector<std::string> strings{{"0.001", "1.01E5", "1.01E5"}};
749
+
750
+ OwnedRef list_ref(PyList_New(static_cast<Py_ssize_t>(strings.size())));
751
+ PyObject* list = list_ref.obj();
752
+
753
+ ASSERT_NE(list, nullptr);
754
+
755
+ OwnedRef decimal_constructor_;
756
+ OwnedRef decimal_module;
757
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
758
+ RETURN_NOT_OK(
759
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
760
+ // PyList_SetItem steals a reference to the item so we don't decref it later
761
+ PyObject* decimal_constructor = decimal_constructor_.obj();
762
+ for (Py_ssize_t i = 0; i < static_cast<Py_ssize_t>(strings.size()); ++i) {
763
+ const int result = PyList_SetItem(
764
+ list, i, internal::DecimalFromString(decimal_constructor, strings.at(i)));
765
+ ASSERT_EQ(0, result);
766
+ }
767
+
768
+ auto arr = std::move(ConvertPySequence(list, nullptr, {})).ValueOrDie();
769
+ const auto& type = checked_cast<const DecimalType&>(*arr->type());
770
+
771
+ int32_t expected_precision = 9;
772
+ int32_t expected_scale = 3;
773
+ ASSERT_EQ(expected_precision, type.precision());
774
+ ASSERT_EQ(expected_scale, type.scale());
775
+
776
+ return Status::OK();
777
+ }
778
+
779
+ Status TestMixedPrecisionAndScaleSequenceConvert() {
780
+ OwnedRef decimal_constructor_;
781
+ OwnedRef decimal_module;
782
+
783
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
784
+ RETURN_NOT_OK(
785
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
786
+
787
+ std::string decimal_string_1("0.01");
788
+ PyObject* value1 =
789
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string_1);
790
+ ASSERT_NE(value1, nullptr);
791
+
792
+ std::string decimal_string_2("0.001");
793
+ PyObject* value2 =
794
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string_2);
795
+ ASSERT_NE(value2, nullptr);
796
+
797
+ OwnedRef list_ref(PyList_New(2));
798
+ PyObject* list = list_ref.obj();
799
+
800
+ // This steals a reference to each object, so we don't need to decref them later
801
+ // just the list
802
+ ASSERT_EQ(PyList_SetItem(list, 0, value1), 0);
803
+ ASSERT_EQ(PyList_SetItem(list, 1, value2), 0);
804
+
805
+ auto arr = std::move(ConvertPySequence(list, nullptr, {})).ValueOrDie();
806
+ const auto& type = checked_cast<const Decimal128Type&>(*arr->type());
807
+ ASSERT_EQ(3, type.precision());
808
+ ASSERT_EQ(3, type.scale());
809
+
810
+ return Status::OK();
811
+ }
812
+
813
+ Status TestSimpleInference() {
814
+ OwnedRef decimal_constructor_;
815
+ OwnedRef decimal_module;
816
+
817
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
818
+ RETURN_NOT_OK(
819
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
820
+
821
+ std::string decimal_string("0.01");
822
+ PyObject* value =
823
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
824
+ ASSERT_NE(value, nullptr);
825
+ internal::DecimalMetadata metadata;
826
+ ASSERT_OK(metadata.Update(value));
827
+ ASSERT_EQ(2, metadata.precision());
828
+ ASSERT_EQ(2, metadata.scale());
829
+
830
+ return Status::OK();
831
+ }
832
+
833
+ Status TestUpdateWithNaN() {
834
+ internal::DecimalMetadata metadata;
835
+ OwnedRef decimal_constructor_;
836
+ OwnedRef decimal_module;
837
+ RETURN_NOT_OK(internal::ImportModule("decimal", &decimal_module));
838
+ RETURN_NOT_OK(
839
+ internal::ImportFromModule(decimal_module.obj(), "Decimal", &decimal_constructor_));
840
+ std::string decimal_string("nan");
841
+ PyObject* nan_value =
842
+ internal::DecimalFromString(decimal_constructor_.obj(), decimal_string);
843
+
844
+ ASSERT_OK(metadata.Update(nan_value));
845
+ ASSERT_EQ(std::numeric_limits<int32_t>::min(), metadata.precision());
846
+ ASSERT_EQ(std::numeric_limits<int32_t>::min(), metadata.scale());
847
+
848
+ return Status::OK();
849
+ }
850
+
851
+ } // namespace
852
+
853
+ std::vector<TestCase> GetCppTestCases() {
854
+ return {
855
+ {"test_owned_ref_moves", TestOwnedRefMoves},
856
+ {"test_owned_ref_nogil_moves", TestOwnedRefNoGILMoves},
857
+ {"test_check_pyerror_status", TestCheckPyErrorStatus},
858
+ {"test_check_pyerror_status_nogil", TestCheckPyErrorStatusNoGIL},
859
+ {"test_restore_pyerror_basics", TestRestorePyErrorBasics},
860
+ {"test_pybuffer_invalid_input_object", TestPyBufferInvalidInputObject},
861
+ #ifndef _WIN32
862
+ {"test_pybuffer_numpy_array", TestPyBufferNumpyArray},
863
+ {"test_numpybuffer_numpy_array", TestNumPyBufferNumpyArray},
864
+ #endif
865
+ {"test_python_decimal_to_string", TestPythonDecimalToString},
866
+ {"test_infer_precision_and_scale", TestInferPrecisionAndScale},
867
+ {"test_infer_precision_and_negative_scale", TestInferPrecisionAndNegativeScale},
868
+ {"test_infer_all_leading_zeros", TestInferAllLeadingZeros},
869
+ {"test_infer_all_leading_zeros_exponential_notation_positive",
870
+ TestInferAllLeadingZerosExponentialNotationPositive},
871
+ {"test_infer_all_leading_zeros_exponential_notation_negative",
872
+ TestInferAllLeadingZerosExponentialNotationNegative},
873
+ {"test_object_block_write_fails", TestObjectBlockWriteFails},
874
+ {"test_mixed_type_fails", TestMixedTypeFails},
875
+ {"test_from_python_decimal_rescale_not_truncateable",
876
+ TestFromPythonDecimalRescaleNotTruncateable},
877
+ {"test_from_python_decimal_rescale_truncateable",
878
+ TestFromPythonDecimalRescaleTruncateable},
879
+ {"test_from_python_negative_decimal_rescale", TestFromPythonNegativeDecimalRescale},
880
+ {"test_decimal128_from_python_integer", TestDecimal128FromPythonInteger},
881
+ {"test_decimal256_from_python_integer", TestDecimal256FromPythonInteger},
882
+ {"test_decimal128_overflow_fails", TestDecimal128OverflowFails},
883
+ {"test_decimal256_overflow_fails", TestDecimal256OverflowFails},
884
+ {"test_none_and_nan", TestNoneAndNaN},
885
+ {"test_mixed_precision_and_scale", TestMixedPrecisionAndScale},
886
+ {"test_mixed_precision_and_scale_sequence_convert",
887
+ TestMixedPrecisionAndScaleSequenceConvert},
888
+ {"test_simple_inference", TestSimpleInference},
889
+ {"test_update_with_nan", TestUpdateWithNaN},
890
+ };
891
+ }
892
+
893
+ } // namespace testing
894
+ } // namespace py
895
+ } // namespace arrow
venv/lib/python3.10/site-packages/pyarrow/src/arrow/python/python_test.h ADDED
@@ -0,0 +1,42 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ // Licensed to the Apache Software Foundation (ASF) under one
2
+ // or more contributor license agreements. See the NOTICE file
3
+ // distributed with this work for additional information
4
+ // regarding copyright ownership. The ASF licenses this file
5
+ // to you under the Apache License, Version 2.0 (the
6
+ // "License"); you may not use this file except in compliance
7
+ // with the License. You may obtain a copy of the License at
8
+ //
9
+ // http://www.apache.org/licenses/LICENSE-2.0
10
+ //
11
+ // Unless required by applicable law or agreed to in writing,
12
+ // software distributed under the License is distributed on an
13
+ // "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
14
+ // KIND, either express or implied. See the License for the
15
+ // specific language governing permissions and limitations
16
+ // under the License.
17
+
18
+ #pragma once
19
+
20
+ #include <functional>
21
+ #include <string>
22
+ #include <vector>
23
+
24
+ #include "arrow/status.h"
25
+
26
+ #include "arrow/python/visibility.h"
27
+
28
+ namespace arrow {
29
+ namespace py {
30
+ namespace testing {
31
+
32
+ struct TestCase {
33
+ std::string name;
34
+ std::function<Status()> func;
35
+ };
36
+
37
+ ARROW_PYTHON_EXPORT
38
+ std::vector<TestCase> GetCppTestCases();
39
+
40
+ } // namespace testing
41
+ } // namespace py
42
+ } // namespace arrow