Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/INSTALLER +1 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/LICENSE.rst +30 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/METADATA +48 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/RECORD +11 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/WHEEL +5 -0
- llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/top_level.txt +1 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.12 +3 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cuda_runtime/lib/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cuda_runtime/lib/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cudalibxt.h +97 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufft.h +317 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufftXt.h +268 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufftw.h +454 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.11 +3 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/nvJitLink.h +522 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/lib/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/lib/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__init__.py +15 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_manylinux.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_parser.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_structures.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/markers.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/requirements.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/tags.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/version.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_elffile.py +108 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_manylinux.py +260 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_musllinux.py +83 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_parser.py +356 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_structures.py +61 -0
- llmeval-env/lib/python3.10/site-packages/packaging/_tokenizer.py +192 -0
- llmeval-env/lib/python3.10/site-packages/packaging/markers.py +252 -0
- llmeval-env/lib/python3.10/site-packages/packaging/metadata.py +825 -0
- llmeval-env/lib/python3.10/site-packages/packaging/py.typed +0 -0
- llmeval-env/lib/python3.10/site-packages/packaging/requirements.py +90 -0
- llmeval-env/lib/python3.10/site-packages/packaging/specifiers.py +1017 -0
- llmeval-env/lib/python3.10/site-packages/packaging/tags.py +571 -0
- llmeval-env/lib/python3.10/site-packages/packaging/utils.py +172 -0
- llmeval-env/lib/python3.10/site-packages/packaging/version.py +563 -0
.gitattributes
CHANGED
@@ -99,3 +99,5 @@ llmeval-env/lib/python3.10/site-packages/nvidia/cuda_cupti/lib/libnvperf_host.so
|
|
99 |
llmeval-env/lib/python3.10/site-packages/scipy.libs/libopenblasp-r0-24bff013.3.26.dev.so filter=lfs diff=lfs merge=lfs -text
|
100 |
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc-builtins.so.12.1 filter=lfs diff=lfs merge=lfs -text
|
101 |
llmeval-env/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
99 |
llmeval-env/lib/python3.10/site-packages/scipy.libs/libopenblasp-r0-24bff013.3.26.dev.so filter=lfs diff=lfs merge=lfs -text
|
100 |
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc-builtins.so.12.1 filter=lfs diff=lfs merge=lfs -text
|
101 |
llmeval-env/lib/python3.10/site-packages/nvidia/cusolver/lib/libcusolverMg.so.11 filter=lfs diff=lfs merge=lfs -text
|
102 |
+
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.12 filter=lfs diff=lfs merge=lfs -text
|
103 |
+
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.11 filter=lfs diff=lfs merge=lfs -text
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/INSTALLER
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
pip
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/LICENSE.rst
ADDED
@@ -0,0 +1,30 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*(This is the OSI approved 3-clause "New BSD License".)*
|
2 |
+
|
3 |
+
Copyright © 2016, wouter bolsterlee
|
4 |
+
|
5 |
+
All rights reserved.
|
6 |
+
|
7 |
+
Redistribution and use in source and binary forms, with or without
|
8 |
+
modification, are permitted provided that the following conditions are met:
|
9 |
+
|
10 |
+
* Redistributions of source code must retain the above copyright notice, this
|
11 |
+
list of conditions and the following disclaimer.
|
12 |
+
|
13 |
+
* Redistributions in binary form must reproduce the above copyright notice, this
|
14 |
+
list of conditions and the following disclaimer in the documentation and/or
|
15 |
+
other materials provided with the distribution.
|
16 |
+
|
17 |
+
* Neither the name of the author nor the names of the contributors may be used
|
18 |
+
to endorse or promote products derived from this software without specific
|
19 |
+
prior written permission.
|
20 |
+
|
21 |
+
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
22 |
+
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
23 |
+
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
24 |
+
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
25 |
+
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
26 |
+
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
27 |
+
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
28 |
+
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
29 |
+
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
30 |
+
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/METADATA
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Metadata-Version: 2.1
|
2 |
+
Name: jsonlines
|
3 |
+
Version: 4.0.0
|
4 |
+
Summary: Library with helpers for the jsonlines file format
|
5 |
+
Home-page: https://github.com/wbolster/jsonlines
|
6 |
+
Author: wouter bolsterlee
|
7 |
+
Author-email: [email protected]
|
8 |
+
License: BSD
|
9 |
+
Classifier: Development Status :: 5 - Production/Stable
|
10 |
+
Classifier: Intended Audience :: Developers
|
11 |
+
Classifier: Intended Audience :: System Administrators
|
12 |
+
Classifier: License :: OSI Approved :: BSD License
|
13 |
+
Classifier: Programming Language :: Python
|
14 |
+
Classifier: Programming Language :: Python :: 3
|
15 |
+
Classifier: Programming Language :: Python :: 3 :: Only
|
16 |
+
Classifier: Topic :: Internet :: Log Analysis
|
17 |
+
Classifier: Topic :: Software Development :: Libraries :: Python Modules
|
18 |
+
Classifier: Topic :: System :: Logging
|
19 |
+
Classifier: Topic :: Utilities
|
20 |
+
Requires-Python: >=3.8
|
21 |
+
License-File: LICENSE.rst
|
22 |
+
Requires-Dist: attrs >=19.2.0
|
23 |
+
|
24 |
+
.. image:: https://pepy.tech/badge/jsonlines
|
25 |
+
:target: https://pepy.tech/project/jsonlines
|
26 |
+
|
27 |
+
.. image:: https://pepy.tech/badge/jsonlines/month
|
28 |
+
:target: https://pepy.tech/project/jsonlines
|
29 |
+
|
30 |
+
.. image:: https://anaconda.org/anaconda/anaconda/badges/installer/conda.svg
|
31 |
+
:target: https://anaconda.org/anaconda/jsonlines
|
32 |
+
|
33 |
+
=========
|
34 |
+
jsonlines
|
35 |
+
=========
|
36 |
+
|
37 |
+
``jsonlines`` is a Python library to simplify working with jsonlines_
|
38 |
+
and ndjson_ data.
|
39 |
+
|
40 |
+
.. _jsonlines: http://jsonlines.org/
|
41 |
+
.. _ndjson: http://ndjson.org/
|
42 |
+
|
43 |
+
* Documentation: https://jsonlines.readthedocs.io/
|
44 |
+
|
45 |
+
* Python Package Index (PyPI): https://pypi.python.org/pypi/jsonlines/
|
46 |
+
|
47 |
+
* Source code and issue tracker: https://github.com/wbolster/jsonlines
|
48 |
+
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/RECORD
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
jsonlines-4.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
|
2 |
+
jsonlines-4.0.0.dist-info/LICENSE.rst,sha256=vKNU5jkrJCH_sfHiNFRcUVQzuSkTYsG7n9EAkiuQ60I,1543
|
3 |
+
jsonlines-4.0.0.dist-info/METADATA,sha256=XDMhu0s_WdlpRSAcseysBZnpSInKa5EEMwyEZ-5ZtHE,1565
|
4 |
+
jsonlines-4.0.0.dist-info/RECORD,,
|
5 |
+
jsonlines-4.0.0.dist-info/WHEEL,sha256=yQN5g4mg4AybRjkgi-9yy4iQEFibGQmlz78Pik5Or-A,92
|
6 |
+
jsonlines-4.0.0.dist-info/top_level.txt,sha256=Y-KWmwRS4_Ci-mje2h6XC6xPeGV191NA6XhnbPot6eE,10
|
7 |
+
jsonlines/__init__.py,sha256=7R6ohpIk95mz93rtkWQLSK1_1UQWUd9ckaVxgFyfhsA,258
|
8 |
+
jsonlines/__pycache__/__init__.cpython-310.pyc,,
|
9 |
+
jsonlines/__pycache__/jsonlines.cpython-310.pyc,,
|
10 |
+
jsonlines/jsonlines.py,sha256=PpLVYlWwGiB4UoTVq2hkdp9oJ0ioSO6gW0k8_-P97-w,19895
|
11 |
+
jsonlines/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/WHEEL
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Wheel-Version: 1.0
|
2 |
+
Generator: bdist_wheel (0.41.2)
|
3 |
+
Root-Is-Purelib: true
|
4 |
+
Tag: py3-none-any
|
5 |
+
|
llmeval-env/lib/python3.10/site-packages/jsonlines-4.0.0.dist-info/top_level.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
jsonlines
|
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_nvrtc/lib/libnvrtc.so.12
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:83ec9ad7775e89f6280286ba11eb9d28cafe49c2f777a3e051bcc881de7449fc
|
3 |
+
size 56875328
|
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_runtime/lib/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/cuda_runtime/lib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (196 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (185 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (193 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cudalibxt.h
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* Copyright 2013,2014 NVIDIA Corporation. All rights reserved.
|
2 |
+
*
|
3 |
+
* NOTICE TO LICENSEE:
|
4 |
+
*
|
5 |
+
* The source code and/or documentation ("Licensed Deliverables") are
|
6 |
+
* subject to NVIDIA intellectual property rights under U.S. and
|
7 |
+
* international Copyright laws.
|
8 |
+
*
|
9 |
+
* The Licensed Deliverables contained herein are PROPRIETARY and
|
10 |
+
* CONFIDENTIAL to NVIDIA and are being provided under the terms and
|
11 |
+
* conditions of a form of NVIDIA software license agreement by and
|
12 |
+
* between NVIDIA and Licensee ("License Agreement") or electronically
|
13 |
+
* accepted by Licensee. Notwithstanding any terms or conditions to
|
14 |
+
* the contrary in the License Agreement, reproduction or disclosure
|
15 |
+
* of the Licensed Deliverables to any third party without the express
|
16 |
+
* written consent of NVIDIA is prohibited.
|
17 |
+
*
|
18 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
19 |
+
* LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
|
20 |
+
* SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
|
21 |
+
* PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
|
22 |
+
* NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
|
23 |
+
* DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
|
24 |
+
* NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
|
25 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
26 |
+
* LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
|
27 |
+
* SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
|
28 |
+
* DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
29 |
+
* WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
30 |
+
* ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
|
31 |
+
* OF THESE LICENSED DELIVERABLES.
|
32 |
+
*
|
33 |
+
* U.S. Government End Users. These Licensed Deliverables are a
|
34 |
+
* "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
|
35 |
+
* 1995), consisting of "commercial computer software" and "commercial
|
36 |
+
* computer software documentation" as such terms are used in 48
|
37 |
+
* C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
|
38 |
+
* only as a commercial end item. Consistent with 48 C.F.R.12.212 and
|
39 |
+
* 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
|
40 |
+
* U.S. Government End Users acquire the Licensed Deliverables with
|
41 |
+
* only those rights set forth herein.
|
42 |
+
*
|
43 |
+
* Any use of the Licensed Deliverables in individual and commercial
|
44 |
+
* software must include, in the user documentation and internal
|
45 |
+
* comments to the code, the above Disclaimer and U.S. Government End
|
46 |
+
* Users Notice.
|
47 |
+
*/
|
48 |
+
|
49 |
+
/*!
|
50 |
+
* \file cudalibxt.h
|
51 |
+
* \brief Public header file for the NVIDIA library multi-GPU support structures
|
52 |
+
*/
|
53 |
+
|
54 |
+
#ifndef _CUDA_LIB_XT_H_
|
55 |
+
#define _CUDA_LIB_XT_H_
|
56 |
+
#include <cuda_runtime.h>
|
57 |
+
|
58 |
+
#define CUDA_XT_DESCRIPTOR_VERSION 0x01000000 // This is added to CUDART_VERSION
|
59 |
+
|
60 |
+
enum cudaXtCopyType_t {
|
61 |
+
LIB_XT_COPY_HOST_TO_DEVICE,
|
62 |
+
LIB_XT_COPY_DEVICE_TO_HOST,
|
63 |
+
LIB_XT_COPY_DEVICE_TO_DEVICE
|
64 |
+
} ;
|
65 |
+
typedef enum cudaXtCopyType_t cudaLibXtCopyType;
|
66 |
+
|
67 |
+
enum libFormat_t {
|
68 |
+
LIB_FORMAT_CUFFT = 0x0,
|
69 |
+
LIB_FORMAT_UNDEFINED = 0x1
|
70 |
+
};
|
71 |
+
|
72 |
+
typedef enum libFormat_t libFormat;
|
73 |
+
|
74 |
+
#define MAX_CUDA_DESCRIPTOR_GPUS 64
|
75 |
+
|
76 |
+
struct cudaXtDesc_t{
|
77 |
+
int version; //descriptor version
|
78 |
+
int nGPUs; //number of GPUs
|
79 |
+
int GPUs[MAX_CUDA_DESCRIPTOR_GPUS]; //array of device IDs
|
80 |
+
void *data[MAX_CUDA_DESCRIPTOR_GPUS]; //array of pointers to data, one per GPU
|
81 |
+
size_t size[MAX_CUDA_DESCRIPTOR_GPUS]; //array of data sizes, one per GPU
|
82 |
+
void *cudaXtState; //opaque CUDA utility structure
|
83 |
+
};
|
84 |
+
typedef struct cudaXtDesc_t cudaXtDesc;
|
85 |
+
|
86 |
+
struct cudaLibXtDesc_t{
|
87 |
+
int version; //descriptor version
|
88 |
+
cudaXtDesc *descriptor; //multi-GPU memory descriptor
|
89 |
+
libFormat library; //which library recognizes the format
|
90 |
+
int subFormat; //library specific enumerator of sub formats
|
91 |
+
void *libDescriptor; //library specific descriptor e.g. FFT transform plan object
|
92 |
+
};
|
93 |
+
typedef struct cudaLibXtDesc_t cudaLibXtDesc;
|
94 |
+
|
95 |
+
|
96 |
+
#endif
|
97 |
+
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufft.h
ADDED
@@ -0,0 +1,317 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/* Copyright 2005-2021 NVIDIA Corporation. All rights reserved.
|
2 |
+
*
|
3 |
+
* NOTICE TO LICENSEE:
|
4 |
+
*
|
5 |
+
* The source code and/or documentation ("Licensed Deliverables") are
|
6 |
+
* subject to NVIDIA intellectual property rights under U.S. and
|
7 |
+
* international Copyright laws.
|
8 |
+
*
|
9 |
+
* The Licensed Deliverables contained herein are PROPRIETARY and
|
10 |
+
* CONFIDENTIAL to NVIDIA and are being provided under the terms and
|
11 |
+
* conditions of a form of NVIDIA software license agreement by and
|
12 |
+
* between NVIDIA and Licensee ("License Agreement") or electronically
|
13 |
+
* accepted by Licensee. Notwithstanding any terms or conditions to
|
14 |
+
* the contrary in the License Agreement, reproduction or disclosure
|
15 |
+
* of the Licensed Deliverables to any third party without the express
|
16 |
+
* written consent of NVIDIA is prohibited.
|
17 |
+
*
|
18 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
19 |
+
* LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
|
20 |
+
* SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
|
21 |
+
* PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
|
22 |
+
* NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
|
23 |
+
* DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
|
24 |
+
* NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
|
25 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
26 |
+
* LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
|
27 |
+
* SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
|
28 |
+
* DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
29 |
+
* WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
30 |
+
* ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
|
31 |
+
* OF THESE LICENSED DELIVERABLES.
|
32 |
+
*
|
33 |
+
* U.S. Government End Users. These Licensed Deliverables are a
|
34 |
+
* "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
|
35 |
+
* 1995), consisting of "commercial computer software" and "commercial
|
36 |
+
* computer software documentation" as such terms are used in 48
|
37 |
+
* C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
|
38 |
+
* only as a commercial end item. Consistent with 48 C.F.R.12.212 and
|
39 |
+
* 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
|
40 |
+
* U.S. Government End Users acquire the Licensed Deliverables with
|
41 |
+
* only those rights set forth herein.
|
42 |
+
*
|
43 |
+
* Any use of the Licensed Deliverables in individual and commercial
|
44 |
+
* software must include, in the user documentation and internal
|
45 |
+
* comments to the code, the above Disclaimer and U.S. Government End
|
46 |
+
* Users Notice.
|
47 |
+
*/
|
48 |
+
|
49 |
+
/*!
|
50 |
+
* \file cufft.h
|
51 |
+
* \brief Public header file for the NVIDIA CUDA FFT library (CUFFT)
|
52 |
+
*/
|
53 |
+
|
54 |
+
#ifndef _CUFFT_H_
|
55 |
+
#define _CUFFT_H_
|
56 |
+
|
57 |
+
|
58 |
+
#include "cuComplex.h"
|
59 |
+
#include "driver_types.h"
|
60 |
+
#include "library_types.h"
|
61 |
+
|
62 |
+
#ifndef CUFFTAPI
|
63 |
+
#ifdef _WIN32
|
64 |
+
#define CUFFTAPI __stdcall
|
65 |
+
#elif __GNUC__ >= 4
|
66 |
+
#define CUFFTAPI __attribute__ ((visibility ("default")))
|
67 |
+
#else
|
68 |
+
#define CUFFTAPI
|
69 |
+
#endif
|
70 |
+
#endif
|
71 |
+
|
72 |
+
#ifdef __cplusplus
|
73 |
+
extern "C" {
|
74 |
+
#endif
|
75 |
+
|
76 |
+
#define CUFFT_VER_MAJOR 11
|
77 |
+
#define CUFFT_VER_MINOR 0
|
78 |
+
#define CUFFT_VER_PATCH 2
|
79 |
+
#define CUFFT_VER_BUILD 54
|
80 |
+
|
81 |
+
#define CUFFT_VERSION 11002
|
82 |
+
|
83 |
+
// CUFFT API function return values
|
84 |
+
typedef enum cufftResult_t {
|
85 |
+
CUFFT_SUCCESS = 0x0,
|
86 |
+
CUFFT_INVALID_PLAN = 0x1,
|
87 |
+
CUFFT_ALLOC_FAILED = 0x2,
|
88 |
+
CUFFT_INVALID_TYPE = 0x3,
|
89 |
+
CUFFT_INVALID_VALUE = 0x4,
|
90 |
+
CUFFT_INTERNAL_ERROR = 0x5,
|
91 |
+
CUFFT_EXEC_FAILED = 0x6,
|
92 |
+
CUFFT_SETUP_FAILED = 0x7,
|
93 |
+
CUFFT_INVALID_SIZE = 0x8,
|
94 |
+
CUFFT_UNALIGNED_DATA = 0x9,
|
95 |
+
CUFFT_INCOMPLETE_PARAMETER_LIST = 0xA,
|
96 |
+
CUFFT_INVALID_DEVICE = 0xB,
|
97 |
+
CUFFT_PARSE_ERROR = 0xC,
|
98 |
+
CUFFT_NO_WORKSPACE = 0xD,
|
99 |
+
CUFFT_NOT_IMPLEMENTED = 0xE,
|
100 |
+
CUFFT_LICENSE_ERROR = 0x0F,
|
101 |
+
CUFFT_NOT_SUPPORTED = 0x10
|
102 |
+
|
103 |
+
} cufftResult;
|
104 |
+
|
105 |
+
#define MAX_CUFFT_ERROR 0x11
|
106 |
+
|
107 |
+
|
108 |
+
// CUFFT defines and supports the following data types
|
109 |
+
|
110 |
+
|
111 |
+
// cufftReal is a single-precision, floating-point real data type.
|
112 |
+
// cufftDoubleReal is a double-precision, real data type.
|
113 |
+
typedef float cufftReal;
|
114 |
+
typedef double cufftDoubleReal;
|
115 |
+
|
116 |
+
// cufftComplex is a single-precision, floating-point complex data type that
|
117 |
+
// consists of interleaved real and imaginary components.
|
118 |
+
// cufftDoubleComplex is the double-precision equivalent.
|
119 |
+
typedef cuComplex cufftComplex;
|
120 |
+
typedef cuDoubleComplex cufftDoubleComplex;
|
121 |
+
|
122 |
+
// CUFFT transform directions
|
123 |
+
#define CUFFT_FORWARD -1 // Forward FFT
|
124 |
+
#define CUFFT_INVERSE 1 // Inverse FFT
|
125 |
+
|
126 |
+
// CUFFT supports the following transform types
|
127 |
+
typedef enum cufftType_t {
|
128 |
+
CUFFT_R2C = 0x2a, // Real to Complex (interleaved)
|
129 |
+
CUFFT_C2R = 0x2c, // Complex (interleaved) to Real
|
130 |
+
CUFFT_C2C = 0x29, // Complex to Complex, interleaved
|
131 |
+
CUFFT_D2Z = 0x6a, // Double to Double-Complex
|
132 |
+
CUFFT_Z2D = 0x6c, // Double-Complex to Double
|
133 |
+
CUFFT_Z2Z = 0x69 // Double-Complex to Double-Complex
|
134 |
+
} cufftType;
|
135 |
+
|
136 |
+
// CUFFT supports the following data layouts
|
137 |
+
typedef enum cufftCompatibility_t {
|
138 |
+
CUFFT_COMPATIBILITY_FFTW_PADDING = 0x01 // The default value
|
139 |
+
} cufftCompatibility;
|
140 |
+
|
141 |
+
#define CUFFT_COMPATIBILITY_DEFAULT CUFFT_COMPATIBILITY_FFTW_PADDING
|
142 |
+
|
143 |
+
//
|
144 |
+
// structure definition used by the shim between old and new APIs
|
145 |
+
//
|
146 |
+
#define MAX_SHIM_RANK 3
|
147 |
+
|
148 |
+
// cufftHandle is a handle type used to store and access CUFFT plans.
|
149 |
+
typedef int cufftHandle;
|
150 |
+
|
151 |
+
|
152 |
+
cufftResult CUFFTAPI cufftPlan1d(cufftHandle *plan,
|
153 |
+
int nx,
|
154 |
+
cufftType type,
|
155 |
+
int batch);
|
156 |
+
|
157 |
+
cufftResult CUFFTAPI cufftPlan2d(cufftHandle *plan,
|
158 |
+
int nx, int ny,
|
159 |
+
cufftType type);
|
160 |
+
|
161 |
+
cufftResult CUFFTAPI cufftPlan3d(cufftHandle *plan,
|
162 |
+
int nx, int ny, int nz,
|
163 |
+
cufftType type);
|
164 |
+
|
165 |
+
cufftResult CUFFTAPI cufftPlanMany(cufftHandle *plan,
|
166 |
+
int rank,
|
167 |
+
int *n,
|
168 |
+
int *inembed, int istride, int idist,
|
169 |
+
int *onembed, int ostride, int odist,
|
170 |
+
cufftType type,
|
171 |
+
int batch);
|
172 |
+
|
173 |
+
cufftResult CUFFTAPI cufftMakePlan1d(cufftHandle plan,
|
174 |
+
int nx,
|
175 |
+
cufftType type,
|
176 |
+
int batch,
|
177 |
+
size_t *workSize);
|
178 |
+
|
179 |
+
cufftResult CUFFTAPI cufftMakePlan2d(cufftHandle plan,
|
180 |
+
int nx, int ny,
|
181 |
+
cufftType type,
|
182 |
+
size_t *workSize);
|
183 |
+
|
184 |
+
cufftResult CUFFTAPI cufftMakePlan3d(cufftHandle plan,
|
185 |
+
int nx, int ny, int nz,
|
186 |
+
cufftType type,
|
187 |
+
size_t *workSize);
|
188 |
+
|
189 |
+
cufftResult CUFFTAPI cufftMakePlanMany(cufftHandle plan,
|
190 |
+
int rank,
|
191 |
+
int *n,
|
192 |
+
int *inembed, int istride, int idist,
|
193 |
+
int *onembed, int ostride, int odist,
|
194 |
+
cufftType type,
|
195 |
+
int batch,
|
196 |
+
size_t *workSize);
|
197 |
+
|
198 |
+
cufftResult CUFFTAPI cufftMakePlanMany64(cufftHandle plan,
|
199 |
+
int rank,
|
200 |
+
long long int *n,
|
201 |
+
long long int *inembed,
|
202 |
+
long long int istride,
|
203 |
+
long long int idist,
|
204 |
+
long long int *onembed,
|
205 |
+
long long int ostride, long long int odist,
|
206 |
+
cufftType type,
|
207 |
+
long long int batch,
|
208 |
+
size_t * workSize);
|
209 |
+
|
210 |
+
cufftResult CUFFTAPI cufftGetSizeMany64(cufftHandle plan,
|
211 |
+
int rank,
|
212 |
+
long long int *n,
|
213 |
+
long long int *inembed,
|
214 |
+
long long int istride, long long int idist,
|
215 |
+
long long int *onembed,
|
216 |
+
long long int ostride, long long int odist,
|
217 |
+
cufftType type,
|
218 |
+
long long int batch,
|
219 |
+
size_t *workSize);
|
220 |
+
|
221 |
+
|
222 |
+
|
223 |
+
|
224 |
+
cufftResult CUFFTAPI cufftEstimate1d(int nx,
|
225 |
+
cufftType type,
|
226 |
+
int batch,
|
227 |
+
size_t *workSize);
|
228 |
+
|
229 |
+
cufftResult CUFFTAPI cufftEstimate2d(int nx, int ny,
|
230 |
+
cufftType type,
|
231 |
+
size_t *workSize);
|
232 |
+
|
233 |
+
cufftResult CUFFTAPI cufftEstimate3d(int nx, int ny, int nz,
|
234 |
+
cufftType type,
|
235 |
+
size_t *workSize);
|
236 |
+
|
237 |
+
cufftResult CUFFTAPI cufftEstimateMany(int rank,
|
238 |
+
int *n,
|
239 |
+
int *inembed, int istride, int idist,
|
240 |
+
int *onembed, int ostride, int odist,
|
241 |
+
cufftType type,
|
242 |
+
int batch,
|
243 |
+
size_t *workSize);
|
244 |
+
|
245 |
+
cufftResult CUFFTAPI cufftCreate(cufftHandle * handle);
|
246 |
+
|
247 |
+
cufftResult CUFFTAPI cufftGetSize1d(cufftHandle handle,
|
248 |
+
int nx,
|
249 |
+
cufftType type,
|
250 |
+
int batch,
|
251 |
+
size_t *workSize );
|
252 |
+
|
253 |
+
cufftResult CUFFTAPI cufftGetSize2d(cufftHandle handle,
|
254 |
+
int nx, int ny,
|
255 |
+
cufftType type,
|
256 |
+
size_t *workSize);
|
257 |
+
|
258 |
+
cufftResult CUFFTAPI cufftGetSize3d(cufftHandle handle,
|
259 |
+
int nx, int ny, int nz,
|
260 |
+
cufftType type,
|
261 |
+
size_t *workSize);
|
262 |
+
|
263 |
+
cufftResult CUFFTAPI cufftGetSizeMany(cufftHandle handle,
|
264 |
+
int rank, int *n,
|
265 |
+
int *inembed, int istride, int idist,
|
266 |
+
int *onembed, int ostride, int odist,
|
267 |
+
cufftType type, int batch, size_t *workArea);
|
268 |
+
|
269 |
+
cufftResult CUFFTAPI cufftGetSize(cufftHandle handle, size_t *workSize);
|
270 |
+
|
271 |
+
cufftResult CUFFTAPI cufftSetWorkArea(cufftHandle plan, void *workArea);
|
272 |
+
|
273 |
+
cufftResult CUFFTAPI cufftSetAutoAllocation(cufftHandle plan, int autoAllocate);
|
274 |
+
|
275 |
+
cufftResult CUFFTAPI cufftExecC2C(cufftHandle plan,
|
276 |
+
cufftComplex *idata,
|
277 |
+
cufftComplex *odata,
|
278 |
+
int direction);
|
279 |
+
|
280 |
+
cufftResult CUFFTAPI cufftExecR2C(cufftHandle plan,
|
281 |
+
cufftReal *idata,
|
282 |
+
cufftComplex *odata);
|
283 |
+
|
284 |
+
cufftResult CUFFTAPI cufftExecC2R(cufftHandle plan,
|
285 |
+
cufftComplex *idata,
|
286 |
+
cufftReal *odata);
|
287 |
+
|
288 |
+
cufftResult CUFFTAPI cufftExecZ2Z(cufftHandle plan,
|
289 |
+
cufftDoubleComplex *idata,
|
290 |
+
cufftDoubleComplex *odata,
|
291 |
+
int direction);
|
292 |
+
|
293 |
+
cufftResult CUFFTAPI cufftExecD2Z(cufftHandle plan,
|
294 |
+
cufftDoubleReal *idata,
|
295 |
+
cufftDoubleComplex *odata);
|
296 |
+
|
297 |
+
cufftResult CUFFTAPI cufftExecZ2D(cufftHandle plan,
|
298 |
+
cufftDoubleComplex *idata,
|
299 |
+
cufftDoubleReal *odata);
|
300 |
+
|
301 |
+
|
302 |
+
// utility functions
|
303 |
+
cufftResult CUFFTAPI cufftSetStream(cufftHandle plan,
|
304 |
+
cudaStream_t stream);
|
305 |
+
|
306 |
+
cufftResult CUFFTAPI cufftDestroy(cufftHandle plan);
|
307 |
+
|
308 |
+
cufftResult CUFFTAPI cufftGetVersion(int *version);
|
309 |
+
|
310 |
+
cufftResult CUFFTAPI cufftGetProperty(libraryPropertyType type,
|
311 |
+
int *value);
|
312 |
+
|
313 |
+
#ifdef __cplusplus
|
314 |
+
}
|
315 |
+
#endif
|
316 |
+
|
317 |
+
#endif /* _CUFFT_H_ */
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufftXt.h
ADDED
@@ -0,0 +1,268 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
/* Copyright 2005-2021 NVIDIA Corporation. All rights reserved.
|
3 |
+
*
|
4 |
+
* NOTICE TO LICENSEE:
|
5 |
+
*
|
6 |
+
* The source code and/or documentation ("Licensed Deliverables") are
|
7 |
+
* subject to NVIDIA intellectual property rights under U.S. and
|
8 |
+
* international Copyright laws.
|
9 |
+
*
|
10 |
+
* The Licensed Deliverables contained herein are PROPRIETARY and
|
11 |
+
* CONFIDENTIAL to NVIDIA and are being provided under the terms and
|
12 |
+
* conditions of a form of NVIDIA software license agreement by and
|
13 |
+
* between NVIDIA and Licensee ("License Agreement") or electronically
|
14 |
+
* accepted by Licensee. Notwithstanding any terms or conditions to
|
15 |
+
* the contrary in the License Agreement, reproduction or disclosure
|
16 |
+
* of the Licensed Deliverables to any third party without the express
|
17 |
+
* written consent of NVIDIA is prohibited.
|
18 |
+
*
|
19 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
20 |
+
* LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
|
21 |
+
* SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
|
22 |
+
* PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
|
23 |
+
* NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
|
24 |
+
* DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
|
25 |
+
* NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
|
26 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
27 |
+
* LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
|
28 |
+
* SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
|
29 |
+
* DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
30 |
+
* WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
31 |
+
* ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
|
32 |
+
* OF THESE LICENSED DELIVERABLES.
|
33 |
+
*
|
34 |
+
* U.S. Government End Users. These Licensed Deliverables are a
|
35 |
+
* "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
|
36 |
+
* 1995), consisting of "commercial computer software" and "commercial
|
37 |
+
* computer software documentation" as such terms are used in 48
|
38 |
+
* C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
|
39 |
+
* only as a commercial end item. Consistent with 48 C.F.R.12.212 and
|
40 |
+
* 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
|
41 |
+
* U.S. Government End Users acquire the Licensed Deliverables with
|
42 |
+
* only those rights set forth herein.
|
43 |
+
*
|
44 |
+
* Any use of the Licensed Deliverables in individual and commercial
|
45 |
+
* software must include, in the user documentation and internal
|
46 |
+
* comments to the code, the above Disclaimer and U.S. Government End
|
47 |
+
* Users Notice.
|
48 |
+
*/
|
49 |
+
|
50 |
+
/*!
|
51 |
+
* \file cufftXt.h
|
52 |
+
* \brief Public header file for the NVIDIA CUDA FFT library (CUFFT)
|
53 |
+
*/
|
54 |
+
|
55 |
+
#ifndef _CUFFTXT_H_
|
56 |
+
#define _CUFFTXT_H_
|
57 |
+
#include "cudalibxt.h"
|
58 |
+
#include "cufft.h"
|
59 |
+
|
60 |
+
|
61 |
+
#ifndef CUFFTAPI
|
62 |
+
#ifdef _WIN32
|
63 |
+
#define CUFFTAPI __stdcall
|
64 |
+
#else
|
65 |
+
#define CUFFTAPI
|
66 |
+
#endif
|
67 |
+
#endif
|
68 |
+
|
69 |
+
#ifdef __cplusplus
|
70 |
+
extern "C" {
|
71 |
+
#endif
|
72 |
+
|
73 |
+
//
|
74 |
+
// cufftXtSubFormat identifies the data layout of
|
75 |
+
// a memory descriptor owned by cufft.
|
76 |
+
// note that multi GPU cufft does not yet support out-of-place transforms
|
77 |
+
//
|
78 |
+
|
79 |
+
typedef enum cufftXtSubFormat_t {
|
80 |
+
CUFFT_XT_FORMAT_INPUT = 0x00, //by default input is in linear order across GPUs
|
81 |
+
CUFFT_XT_FORMAT_OUTPUT = 0x01, //by default output is in scrambled order depending on transform
|
82 |
+
CUFFT_XT_FORMAT_INPLACE = 0x02, //by default inplace is input order, which is linear across GPUs
|
83 |
+
CUFFT_XT_FORMAT_INPLACE_SHUFFLED = 0x03, //shuffled output order after execution of the transform
|
84 |
+
CUFFT_XT_FORMAT_1D_INPUT_SHUFFLED = 0x04, //shuffled input order prior to execution of 1D transforms
|
85 |
+
CUFFT_XT_FORMAT_DISTRIBUTED_INPUT = 0x05,
|
86 |
+
CUFFT_XT_FORMAT_DISTRIBUTED_OUTPUT = 0x06,
|
87 |
+
CUFFT_FORMAT_UNDEFINED = 0x07
|
88 |
+
} cufftXtSubFormat;
|
89 |
+
|
90 |
+
//
|
91 |
+
// cufftXtCopyType specifies the type of copy for cufftXtMemcpy
|
92 |
+
//
|
93 |
+
typedef enum cufftXtCopyType_t {
|
94 |
+
CUFFT_COPY_HOST_TO_DEVICE = 0x00,
|
95 |
+
CUFFT_COPY_DEVICE_TO_HOST = 0x01,
|
96 |
+
CUFFT_COPY_DEVICE_TO_DEVICE = 0x02,
|
97 |
+
CUFFT_COPY_UNDEFINED = 0x03
|
98 |
+
} cufftXtCopyType;
|
99 |
+
|
100 |
+
//
|
101 |
+
// cufftXtQueryType specifies the type of query for cufftXtQueryPlan
|
102 |
+
//
|
103 |
+
typedef enum cufftXtQueryType_t {
|
104 |
+
CUFFT_QUERY_1D_FACTORS = 0x00,
|
105 |
+
CUFFT_QUERY_UNDEFINED = 0x01
|
106 |
+
} cufftXtQueryType;
|
107 |
+
|
108 |
+
typedef struct cufftXt1dFactors_t {
|
109 |
+
long long int size;
|
110 |
+
long long int stringCount;
|
111 |
+
long long int stringLength;
|
112 |
+
long long int substringLength;
|
113 |
+
long long int factor1;
|
114 |
+
long long int factor2;
|
115 |
+
long long int stringMask;
|
116 |
+
long long int substringMask;
|
117 |
+
long long int factor1Mask;
|
118 |
+
long long int factor2Mask;
|
119 |
+
int stringShift;
|
120 |
+
int substringShift;
|
121 |
+
int factor1Shift;
|
122 |
+
int factor2Shift;
|
123 |
+
} cufftXt1dFactors;
|
124 |
+
|
125 |
+
//
|
126 |
+
// cufftXtWorkAreaPolicy specifies policy for cufftXtSetWorkAreaPolicy
|
127 |
+
//
|
128 |
+
typedef enum cufftXtWorkAreaPolicy_t {
|
129 |
+
CUFFT_WORKAREA_MINIMAL = 0, /* maximum reduction */
|
130 |
+
CUFFT_WORKAREA_USER = 1, /* use workSize parameter as limit */
|
131 |
+
CUFFT_WORKAREA_PERFORMANCE = 2, /* default - 1x overhead or more, maximum performance */
|
132 |
+
} cufftXtWorkAreaPolicy;
|
133 |
+
|
134 |
+
// multi-GPU routines
|
135 |
+
cufftResult CUFFTAPI cufftXtSetGPUs(cufftHandle handle, int nGPUs, int *whichGPUs);
|
136 |
+
|
137 |
+
cufftResult CUFFTAPI cufftXtMalloc(cufftHandle plan,
|
138 |
+
cudaLibXtDesc ** descriptor,
|
139 |
+
cufftXtSubFormat format);
|
140 |
+
|
141 |
+
cufftResult CUFFTAPI cufftXtMemcpy(cufftHandle plan,
|
142 |
+
void *dstPointer,
|
143 |
+
void *srcPointer,
|
144 |
+
cufftXtCopyType type);
|
145 |
+
|
146 |
+
cufftResult CUFFTAPI cufftXtFree(cudaLibXtDesc *descriptor);
|
147 |
+
|
148 |
+
cufftResult CUFFTAPI cufftXtSetWorkArea(cufftHandle plan, void **workArea);
|
149 |
+
|
150 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorC2C(cufftHandle plan,
|
151 |
+
cudaLibXtDesc *input,
|
152 |
+
cudaLibXtDesc *output,
|
153 |
+
int direction);
|
154 |
+
|
155 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorR2C(cufftHandle plan,
|
156 |
+
cudaLibXtDesc *input,
|
157 |
+
cudaLibXtDesc *output);
|
158 |
+
|
159 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorC2R(cufftHandle plan,
|
160 |
+
cudaLibXtDesc *input,
|
161 |
+
cudaLibXtDesc *output);
|
162 |
+
|
163 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorZ2Z(cufftHandle plan,
|
164 |
+
cudaLibXtDesc *input,
|
165 |
+
cudaLibXtDesc *output,
|
166 |
+
int direction);
|
167 |
+
|
168 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorD2Z(cufftHandle plan,
|
169 |
+
cudaLibXtDesc *input,
|
170 |
+
cudaLibXtDesc *output);
|
171 |
+
|
172 |
+
cufftResult CUFFTAPI cufftXtExecDescriptorZ2D(cufftHandle plan,
|
173 |
+
cudaLibXtDesc *input,
|
174 |
+
cudaLibXtDesc *output);
|
175 |
+
|
176 |
+
// Utility functions
|
177 |
+
|
178 |
+
cufftResult CUFFTAPI cufftXtQueryPlan(cufftHandle plan, void *queryStruct, cufftXtQueryType queryType);
|
179 |
+
|
180 |
+
|
181 |
+
// callbacks
|
182 |
+
|
183 |
+
|
184 |
+
typedef enum cufftXtCallbackType_t {
|
185 |
+
CUFFT_CB_LD_COMPLEX = 0x0,
|
186 |
+
CUFFT_CB_LD_COMPLEX_DOUBLE = 0x1,
|
187 |
+
CUFFT_CB_LD_REAL = 0x2,
|
188 |
+
CUFFT_CB_LD_REAL_DOUBLE = 0x3,
|
189 |
+
CUFFT_CB_ST_COMPLEX = 0x4,
|
190 |
+
CUFFT_CB_ST_COMPLEX_DOUBLE = 0x5,
|
191 |
+
CUFFT_CB_ST_REAL = 0x6,
|
192 |
+
CUFFT_CB_ST_REAL_DOUBLE = 0x7,
|
193 |
+
CUFFT_CB_UNDEFINED = 0x8
|
194 |
+
|
195 |
+
} cufftXtCallbackType;
|
196 |
+
|
197 |
+
typedef cufftComplex (*cufftCallbackLoadC)(void *dataIn, size_t offset, void *callerInfo, void *sharedPointer);
|
198 |
+
typedef cufftDoubleComplex (*cufftCallbackLoadZ)(void *dataIn, size_t offset, void *callerInfo, void *sharedPointer);
|
199 |
+
typedef cufftReal (*cufftCallbackLoadR)(void *dataIn, size_t offset, void *callerInfo, void *sharedPointer);
|
200 |
+
typedef cufftDoubleReal(*cufftCallbackLoadD)(void *dataIn, size_t offset, void *callerInfo, void *sharedPointer);
|
201 |
+
|
202 |
+
typedef void (*cufftCallbackStoreC)(void *dataOut, size_t offset, cufftComplex element, void *callerInfo, void *sharedPointer);
|
203 |
+
typedef void (*cufftCallbackStoreZ)(void *dataOut, size_t offset, cufftDoubleComplex element, void *callerInfo, void *sharedPointer);
|
204 |
+
typedef void (*cufftCallbackStoreR)(void *dataOut, size_t offset, cufftReal element, void *callerInfo, void *sharedPointer);
|
205 |
+
typedef void (*cufftCallbackStoreD)(void *dataOut, size_t offset, cufftDoubleReal element, void *callerInfo, void *sharedPointer);
|
206 |
+
|
207 |
+
|
208 |
+
cufftResult CUFFTAPI cufftXtSetCallback(cufftHandle plan, void **callback_routine, cufftXtCallbackType cbType, void **caller_info);
|
209 |
+
cufftResult CUFFTAPI cufftXtClearCallback(cufftHandle plan, cufftXtCallbackType cbType);
|
210 |
+
cufftResult CUFFTAPI cufftXtSetCallbackSharedSize(cufftHandle plan, cufftXtCallbackType cbType, size_t sharedSize);
|
211 |
+
|
212 |
+
cufftResult CUFFTAPI cufftXtMakePlanMany(cufftHandle plan,
|
213 |
+
int rank,
|
214 |
+
long long int *n,
|
215 |
+
long long int *inembed,
|
216 |
+
long long int istride,
|
217 |
+
long long int idist,
|
218 |
+
cudaDataType inputtype,
|
219 |
+
long long int *onembed,
|
220 |
+
long long int ostride,
|
221 |
+
long long int odist,
|
222 |
+
cudaDataType outputtype,
|
223 |
+
long long int batch,
|
224 |
+
size_t *workSize,
|
225 |
+
cudaDataType executiontype);
|
226 |
+
|
227 |
+
cufftResult CUFFTAPI cufftXtGetSizeMany(cufftHandle plan,
|
228 |
+
int rank,
|
229 |
+
long long int *n,
|
230 |
+
long long int *inembed,
|
231 |
+
long long int istride,
|
232 |
+
long long int idist,
|
233 |
+
cudaDataType inputtype,
|
234 |
+
long long int *onembed,
|
235 |
+
long long int ostride,
|
236 |
+
long long int odist,
|
237 |
+
cudaDataType outputtype,
|
238 |
+
long long int batch,
|
239 |
+
size_t *workSize,
|
240 |
+
cudaDataType executiontype);
|
241 |
+
|
242 |
+
|
243 |
+
cufftResult CUFFTAPI cufftXtExec(cufftHandle plan,
|
244 |
+
void *input,
|
245 |
+
void *output,
|
246 |
+
int direction);
|
247 |
+
|
248 |
+
cufftResult CUFFTAPI cufftXtExecDescriptor(cufftHandle plan,
|
249 |
+
cudaLibXtDesc *input,
|
250 |
+
cudaLibXtDesc *output,
|
251 |
+
int direction);
|
252 |
+
|
253 |
+
cufftResult CUFFTAPI cufftXtSetWorkAreaPolicy(cufftHandle plan, cufftXtWorkAreaPolicy policy, size_t *workSize);
|
254 |
+
|
255 |
+
cufftResult CUFFTAPI cufftXtSetDistribution(cufftHandle plan,
|
256 |
+
int rank,
|
257 |
+
const long long int* lower_input,
|
258 |
+
const long long int* upper_input,
|
259 |
+
const long long int* lower_output,
|
260 |
+
const long long int* upper_output,
|
261 |
+
const long long int* strides_input,
|
262 |
+
const long long int* strides_output);
|
263 |
+
|
264 |
+
#ifdef __cplusplus
|
265 |
+
}
|
266 |
+
#endif
|
267 |
+
|
268 |
+
#endif
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/include/cufftw.h
ADDED
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
/* Copyright 2005-2014 NVIDIA Corporation. All rights reserved.
|
3 |
+
*
|
4 |
+
* NOTICE TO LICENSEE:
|
5 |
+
*
|
6 |
+
* The source code and/or documentation ("Licensed Deliverables") are
|
7 |
+
* subject to NVIDIA intellectual property rights under U.S. and
|
8 |
+
* international Copyright laws.
|
9 |
+
*
|
10 |
+
* The Licensed Deliverables contained herein are PROPRIETARY and
|
11 |
+
* CONFIDENTIAL to NVIDIA and are being provided under the terms and
|
12 |
+
* conditions of a form of NVIDIA software license agreement by and
|
13 |
+
* between NVIDIA and Licensee ("License Agreement") or electronically
|
14 |
+
* accepted by Licensee. Notwithstanding any terms or conditions to
|
15 |
+
* the contrary in the License Agreement, reproduction or disclosure
|
16 |
+
* of the Licensed Deliverables to any third party without the express
|
17 |
+
* written consent of NVIDIA is prohibited.
|
18 |
+
*
|
19 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
20 |
+
* LICENSE AGREEMENT, NVIDIA MAKES NO REPRESENTATION ABOUT THE
|
21 |
+
* SUITABILITY OF THESE LICENSED DELIVERABLES FOR ANY PURPOSE. THEY ARE
|
22 |
+
* PROVIDED "AS IS" WITHOUT EXPRESS OR IMPLIED WARRANTY OF ANY KIND.
|
23 |
+
* NVIDIA DISCLAIMS ALL WARRANTIES WITH REGARD TO THESE LICENSED
|
24 |
+
* DELIVERABLES, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY,
|
25 |
+
* NONINFRINGEMENT, AND FITNESS FOR A PARTICULAR PURPOSE.
|
26 |
+
* NOTWITHSTANDING ANY TERMS OR CONDITIONS TO THE CONTRARY IN THE
|
27 |
+
* LICENSE AGREEMENT, IN NO EVENT SHALL NVIDIA BE LIABLE FOR ANY
|
28 |
+
* SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL DAMAGES, OR ANY
|
29 |
+
* DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
|
30 |
+
* WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
|
31 |
+
* ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
|
32 |
+
* OF THESE LICENSED DELIVERABLES.
|
33 |
+
*
|
34 |
+
* U.S. Government End Users. These Licensed Deliverables are a
|
35 |
+
* "commercial item" as that term is defined at 48 C.F.R. 2.101 (OCT
|
36 |
+
* 1995), consisting of "commercial computer software" and "commercial
|
37 |
+
* computer software documentation" as such terms are used in 48
|
38 |
+
* C.F.R. 12.212 (SEPT 1995) and are provided to the U.S. Government
|
39 |
+
* only as a commercial end item. Consistent with 48 C.F.R.12.212 and
|
40 |
+
* 48 C.F.R. 227.7202-1 through 227.7202-4 (JUNE 1995), all
|
41 |
+
* U.S. Government End Users acquire the Licensed Deliverables with
|
42 |
+
* only those rights set forth herein.
|
43 |
+
*
|
44 |
+
* Any use of the Licensed Deliverables in individual and commercial
|
45 |
+
* software must include, in the user documentation and internal
|
46 |
+
* comments to the code, the above Disclaimer and U.S. Government End
|
47 |
+
* Users Notice.
|
48 |
+
*/
|
49 |
+
|
50 |
+
/*!
|
51 |
+
* \file cufftw.h
|
52 |
+
* \brief Public header file for the NVIDIA CUDA FFTW library (CUFFTW)
|
53 |
+
*/
|
54 |
+
|
55 |
+
#ifndef _CUFFTW_H_
|
56 |
+
#define _CUFFTW_H_
|
57 |
+
|
58 |
+
|
59 |
+
#include <stdio.h>
|
60 |
+
#include "cufft.h"
|
61 |
+
|
62 |
+
#ifdef __cplusplus
|
63 |
+
extern "C" {
|
64 |
+
#endif
|
65 |
+
|
66 |
+
// transform direction
|
67 |
+
#define FFTW_FORWARD -1
|
68 |
+
#define FFTW_INVERSE 1
|
69 |
+
#define FFTW_BACKWARD 1
|
70 |
+
|
71 |
+
// Planner flags
|
72 |
+
|
73 |
+
#define FFTW_ESTIMATE 0x01
|
74 |
+
#define FFTW_MEASURE 0x02
|
75 |
+
#define FFTW_PATIENT 0x03
|
76 |
+
#define FFTW_EXHAUSTIVE 0x04
|
77 |
+
#define FFTW_WISDOM_ONLY 0x05
|
78 |
+
|
79 |
+
//Algorithm restriction flags
|
80 |
+
|
81 |
+
#define FFTW_DESTROY_INPUT 0x08
|
82 |
+
#define FFTW_PRESERVE_INPUT 0x0C
|
83 |
+
#define FFTW_UNALIGNED 0x10
|
84 |
+
|
85 |
+
// CUFFTW defines and supports the following data types
|
86 |
+
|
87 |
+
// note if complex.h has been included we use the C99 complex types
|
88 |
+
#if !defined(FFTW_NO_Complex) && defined(_Complex_I) && defined (complex)
|
89 |
+
typedef double _Complex fftw_complex;
|
90 |
+
typedef float _Complex fftwf_complex;
|
91 |
+
#else
|
92 |
+
typedef double fftw_complex[2];
|
93 |
+
typedef float fftwf_complex[2];
|
94 |
+
#endif
|
95 |
+
|
96 |
+
typedef void *fftw_plan;
|
97 |
+
|
98 |
+
typedef void *fftwf_plan;
|
99 |
+
|
100 |
+
typedef struct {
|
101 |
+
int n;
|
102 |
+
int is;
|
103 |
+
int os;
|
104 |
+
} fftw_iodim;
|
105 |
+
|
106 |
+
typedef fftw_iodim fftwf_iodim;
|
107 |
+
|
108 |
+
typedef struct {
|
109 |
+
ptrdiff_t n;
|
110 |
+
ptrdiff_t is;
|
111 |
+
ptrdiff_t os;
|
112 |
+
} fftw_iodim64;
|
113 |
+
|
114 |
+
typedef fftw_iodim64 fftwf_iodim64;
|
115 |
+
|
116 |
+
|
117 |
+
// CUFFTW defines and supports the following double precision APIs
|
118 |
+
|
119 |
+
|
120 |
+
fftw_plan CUFFTAPI fftw_plan_dft_1d(int n,
|
121 |
+
fftw_complex *in,
|
122 |
+
fftw_complex *out,
|
123 |
+
int sign,
|
124 |
+
unsigned flags);
|
125 |
+
|
126 |
+
fftw_plan CUFFTAPI fftw_plan_dft_2d(int n0,
|
127 |
+
int n1,
|
128 |
+
fftw_complex *in,
|
129 |
+
fftw_complex *out,
|
130 |
+
int sign,
|
131 |
+
unsigned flags);
|
132 |
+
|
133 |
+
fftw_plan CUFFTAPI fftw_plan_dft_3d(int n0,
|
134 |
+
int n1,
|
135 |
+
int n2,
|
136 |
+
fftw_complex *in,
|
137 |
+
fftw_complex *out,
|
138 |
+
int sign,
|
139 |
+
unsigned flags);
|
140 |
+
|
141 |
+
fftw_plan CUFFTAPI fftw_plan_dft(int rank,
|
142 |
+
const int *n,
|
143 |
+
fftw_complex *in,
|
144 |
+
fftw_complex *out,
|
145 |
+
int sign,
|
146 |
+
unsigned flags);
|
147 |
+
|
148 |
+
fftw_plan CUFFTAPI fftw_plan_dft_r2c_1d(int n,
|
149 |
+
double *in,
|
150 |
+
fftw_complex *out,
|
151 |
+
unsigned flags);
|
152 |
+
|
153 |
+
fftw_plan CUFFTAPI fftw_plan_dft_r2c_2d(int n0,
|
154 |
+
int n1,
|
155 |
+
double *in,
|
156 |
+
fftw_complex *out,
|
157 |
+
unsigned flags);
|
158 |
+
|
159 |
+
fftw_plan CUFFTAPI fftw_plan_dft_r2c_3d(int n0,
|
160 |
+
int n1,
|
161 |
+
int n2,
|
162 |
+
double *in,
|
163 |
+
fftw_complex *out,
|
164 |
+
unsigned flags);
|
165 |
+
|
166 |
+
fftw_plan CUFFTAPI fftw_plan_dft_r2c(int rank,
|
167 |
+
const int *n,
|
168 |
+
double *in,
|
169 |
+
fftw_complex *out,
|
170 |
+
unsigned flags);
|
171 |
+
|
172 |
+
fftw_plan CUFFTAPI fftw_plan_dft_c2r_1d(int n,
|
173 |
+
fftw_complex *in,
|
174 |
+
double *out,
|
175 |
+
unsigned flags);
|
176 |
+
|
177 |
+
fftw_plan CUFFTAPI fftw_plan_dft_c2r_2d(int n0,
|
178 |
+
int n1,
|
179 |
+
fftw_complex *in,
|
180 |
+
double *out,
|
181 |
+
unsigned flags);
|
182 |
+
|
183 |
+
fftw_plan CUFFTAPI fftw_plan_dft_c2r_3d(int n0,
|
184 |
+
int n1,
|
185 |
+
int n2,
|
186 |
+
fftw_complex *in,
|
187 |
+
double *out,
|
188 |
+
unsigned flags);
|
189 |
+
|
190 |
+
fftw_plan CUFFTAPI fftw_plan_dft_c2r(int rank,
|
191 |
+
const int *n,
|
192 |
+
fftw_complex *in,
|
193 |
+
double *out,
|
194 |
+
unsigned flags);
|
195 |
+
|
196 |
+
|
197 |
+
fftw_plan CUFFTAPI fftw_plan_many_dft(int rank,
|
198 |
+
const int *n,
|
199 |
+
int batch,
|
200 |
+
fftw_complex *in,
|
201 |
+
const int *inembed, int istride, int idist,
|
202 |
+
fftw_complex *out,
|
203 |
+
const int *onembed, int ostride, int odist,
|
204 |
+
int sign, unsigned flags);
|
205 |
+
|
206 |
+
fftw_plan CUFFTAPI fftw_plan_many_dft_r2c(int rank,
|
207 |
+
const int *n,
|
208 |
+
int batch,
|
209 |
+
double *in,
|
210 |
+
const int *inembed, int istride, int idist,
|
211 |
+
fftw_complex *out,
|
212 |
+
const int *onembed, int ostride, int odist,
|
213 |
+
unsigned flags);
|
214 |
+
|
215 |
+
fftw_plan CUFFTAPI fftw_plan_many_dft_c2r(int rank,
|
216 |
+
const int *n,
|
217 |
+
int batch,
|
218 |
+
fftw_complex *in,
|
219 |
+
const int *inembed, int istride, int idist,
|
220 |
+
double *out,
|
221 |
+
const int *onembed, int ostride, int odist,
|
222 |
+
unsigned flags);
|
223 |
+
|
224 |
+
fftw_plan CUFFTAPI fftw_plan_guru_dft(int rank, const fftw_iodim *dims,
|
225 |
+
int batch_rank, const fftw_iodim *batch_dims,
|
226 |
+
fftw_complex *in, fftw_complex *out,
|
227 |
+
int sign, unsigned flags);
|
228 |
+
|
229 |
+
fftw_plan CUFFTAPI fftw_plan_guru_dft_r2c(int rank, const fftw_iodim *dims,
|
230 |
+
int batch_rank, const fftw_iodim *batch_dims,
|
231 |
+
double *in, fftw_complex *out,
|
232 |
+
unsigned flags);
|
233 |
+
|
234 |
+
fftw_plan CUFFTAPI fftw_plan_guru_dft_c2r(int rank, const fftw_iodim *dims,
|
235 |
+
int batch_rank, const fftw_iodim *batch_dims,
|
236 |
+
fftw_complex *in, double *out,
|
237 |
+
unsigned flags);
|
238 |
+
|
239 |
+
void CUFFTAPI fftw_execute(const fftw_plan plan);
|
240 |
+
|
241 |
+
void CUFFTAPI fftw_execute_dft(const fftw_plan plan,
|
242 |
+
fftw_complex *idata,
|
243 |
+
fftw_complex *odata);
|
244 |
+
|
245 |
+
void CUFFTAPI fftw_execute_dft_r2c(const fftw_plan plan,
|
246 |
+
double *idata,
|
247 |
+
fftw_complex *odata);
|
248 |
+
|
249 |
+
void CUFFTAPI fftw_execute_dft_c2r(const fftw_plan plan,
|
250 |
+
fftw_complex *idata,
|
251 |
+
double *odata);
|
252 |
+
|
253 |
+
|
254 |
+
// CUFFTW defines and supports the following single precision APIs
|
255 |
+
|
256 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_1d(int n,
|
257 |
+
fftwf_complex *in,
|
258 |
+
fftwf_complex *out,
|
259 |
+
int sign,
|
260 |
+
unsigned flags);
|
261 |
+
|
262 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_2d(int n0,
|
263 |
+
int n1,
|
264 |
+
fftwf_complex *in,
|
265 |
+
fftwf_complex *out,
|
266 |
+
int sign,
|
267 |
+
unsigned flags);
|
268 |
+
|
269 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_3d(int n0,
|
270 |
+
int n1,
|
271 |
+
int n2,
|
272 |
+
fftwf_complex *in,
|
273 |
+
fftwf_complex *out,
|
274 |
+
int sign,
|
275 |
+
unsigned flags);
|
276 |
+
|
277 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft(int rank,
|
278 |
+
const int *n,
|
279 |
+
fftwf_complex *in,
|
280 |
+
fftwf_complex *out,
|
281 |
+
int sign,
|
282 |
+
unsigned flags);
|
283 |
+
|
284 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_r2c_1d(int n,
|
285 |
+
float *in,
|
286 |
+
fftwf_complex *out,
|
287 |
+
unsigned flags);
|
288 |
+
|
289 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_r2c_2d(int n0,
|
290 |
+
int n1,
|
291 |
+
float *in,
|
292 |
+
fftwf_complex *out,
|
293 |
+
unsigned flags);
|
294 |
+
|
295 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_r2c_3d(int n0,
|
296 |
+
int n1,
|
297 |
+
int n2,
|
298 |
+
float *in,
|
299 |
+
fftwf_complex *out,
|
300 |
+
unsigned flags);
|
301 |
+
|
302 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_r2c(int rank,
|
303 |
+
const int *n,
|
304 |
+
float *in,
|
305 |
+
fftwf_complex *out,
|
306 |
+
unsigned flags);
|
307 |
+
|
308 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_c2r_1d(int n,
|
309 |
+
fftwf_complex *in,
|
310 |
+
float *out,
|
311 |
+
unsigned flags);
|
312 |
+
|
313 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_c2r_2d(int n0,
|
314 |
+
int n1,
|
315 |
+
fftwf_complex *in,
|
316 |
+
float *out,
|
317 |
+
unsigned flags);
|
318 |
+
|
319 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_c2r_3d(int n0,
|
320 |
+
int n1,
|
321 |
+
int n2,
|
322 |
+
fftwf_complex *in,
|
323 |
+
float *out,
|
324 |
+
unsigned flags);
|
325 |
+
|
326 |
+
fftwf_plan CUFFTAPI fftwf_plan_dft_c2r(int rank,
|
327 |
+
const int *n,
|
328 |
+
fftwf_complex *in,
|
329 |
+
float *out,
|
330 |
+
unsigned flags);
|
331 |
+
|
332 |
+
fftwf_plan CUFFTAPI fftwf_plan_many_dft(int rank,
|
333 |
+
const int *n,
|
334 |
+
int batch,
|
335 |
+
fftwf_complex *in,
|
336 |
+
const int *inembed, int istride, int idist,
|
337 |
+
fftwf_complex *out,
|
338 |
+
const int *onembed, int ostride, int odist,
|
339 |
+
int sign, unsigned flags);
|
340 |
+
|
341 |
+
fftwf_plan CUFFTAPI fftwf_plan_many_dft_r2c(int rank,
|
342 |
+
const int *n,
|
343 |
+
int batch,
|
344 |
+
float *in,
|
345 |
+
const int *inembed, int istride, int idist,
|
346 |
+
fftwf_complex *out,
|
347 |
+
const int *onembed, int ostride, int odist,
|
348 |
+
unsigned flags);
|
349 |
+
|
350 |
+
fftwf_plan CUFFTAPI fftwf_plan_many_dft_c2r(int rank,
|
351 |
+
const int *n,
|
352 |
+
int batch,
|
353 |
+
fftwf_complex *in,
|
354 |
+
const int *inembed, int istride, int idist,
|
355 |
+
float *out,
|
356 |
+
const int *onembed, int ostride, int odist,
|
357 |
+
unsigned flags);
|
358 |
+
|
359 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru_dft(int rank, const fftwf_iodim *dims,
|
360 |
+
int batch_rank, const fftwf_iodim *batch_dims,
|
361 |
+
fftwf_complex *in, fftwf_complex *out,
|
362 |
+
int sign, unsigned flags);
|
363 |
+
|
364 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru_dft_r2c(int rank, const fftwf_iodim *dims,
|
365 |
+
int batch_rank, const fftwf_iodim *batch_dims,
|
366 |
+
float *in, fftwf_complex *out,
|
367 |
+
unsigned flags);
|
368 |
+
|
369 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru_dft_c2r(int rank, const fftwf_iodim *dims,
|
370 |
+
int batch_rank, const fftwf_iodim *batch_dims,
|
371 |
+
fftwf_complex *in, float *out,
|
372 |
+
unsigned flags);
|
373 |
+
|
374 |
+
void CUFFTAPI fftwf_execute(const fftw_plan plan);
|
375 |
+
|
376 |
+
void CUFFTAPI fftwf_execute_dft(const fftwf_plan plan,
|
377 |
+
fftwf_complex *idata,
|
378 |
+
fftwf_complex *odata);
|
379 |
+
|
380 |
+
void CUFFTAPI fftwf_execute_dft_r2c(const fftwf_plan plan,
|
381 |
+
float *idata,
|
382 |
+
fftwf_complex *odata);
|
383 |
+
|
384 |
+
void CUFFTAPI fftwf_execute_dft_c2r(const fftwf_plan plan,
|
385 |
+
fftwf_complex *idata,
|
386 |
+
float *odata);
|
387 |
+
|
388 |
+
/// CUFFTW 64-bit Guru Interface
|
389 |
+
/// dp
|
390 |
+
fftw_plan CUFFTAPI fftw_plan_guru64_dft(int rank, const fftw_iodim64* dims, int batch_rank, const fftw_iodim64* batch_dims, fftw_complex* in, fftw_complex* out, int sign, unsigned flags);
|
391 |
+
|
392 |
+
fftw_plan CUFFTAPI fftw_plan_guru64_dft_r2c(int rank, const fftw_iodim64* dims, int batch_rank, const fftw_iodim64* batch_dims, double* in, fftw_complex* out, unsigned flags);
|
393 |
+
|
394 |
+
fftw_plan CUFFTAPI fftw_plan_guru64_dft_c2r(int rank, const fftw_iodim64* dims, int batch_rank, const fftw_iodim64* batch_dims, fftw_complex* in, double* out, unsigned flags);
|
395 |
+
|
396 |
+
/// sp
|
397 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru64_dft(int rank, const fftwf_iodim64* dims, int batch_rank, const fftwf_iodim64* batch_dims, fftwf_complex* in, fftwf_complex* out, int sign, unsigned flags);
|
398 |
+
|
399 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru64_dft_r2c(int rank, const fftwf_iodim64* dims, int batch_rank, const fftwf_iodim64* batch_dims, float* in, fftwf_complex* out, unsigned flags);
|
400 |
+
|
401 |
+
fftwf_plan CUFFTAPI fftwf_plan_guru64_dft_c2r(int rank, const fftwf_iodim64* dims, int batch_rank, const fftwf_iodim64* batch_dims, fftwf_complex* in, float* out, unsigned flags);
|
402 |
+
|
403 |
+
#ifdef _WIN32
|
404 |
+
#define _CUFFTAPI(T) T CUFFTAPI
|
405 |
+
#else
|
406 |
+
#define _CUFFTAPI(T) CUFFTAPI T
|
407 |
+
#endif
|
408 |
+
|
409 |
+
// CUFFTW defines and supports the following support APIs
|
410 |
+
_CUFFTAPI(void *) fftw_malloc(size_t n);
|
411 |
+
|
412 |
+
_CUFFTAPI(void *) fftwf_malloc(size_t n);
|
413 |
+
|
414 |
+
void CUFFTAPI fftw_free(void *pointer);
|
415 |
+
|
416 |
+
void CUFFTAPI fftwf_free(void *pointer);
|
417 |
+
|
418 |
+
void CUFFTAPI fftw_export_wisdom_to_file(FILE * output_file);
|
419 |
+
|
420 |
+
void CUFFTAPI fftwf_export_wisdom_to_file(FILE * output_file);
|
421 |
+
|
422 |
+
void CUFFTAPI fftw_import_wisdom_from_file(FILE * input_file);
|
423 |
+
|
424 |
+
void CUFFTAPI fftwf_import_wisdom_from_file(FILE * input_file);
|
425 |
+
|
426 |
+
void CUFFTAPI fftw_print_plan(const fftw_plan plan);
|
427 |
+
|
428 |
+
void CUFFTAPI fftwf_print_plan(const fftwf_plan plan);
|
429 |
+
|
430 |
+
void CUFFTAPI fftw_set_timelimit(double seconds);
|
431 |
+
|
432 |
+
void CUFFTAPI fftwf_set_timelimit(double seconds);
|
433 |
+
|
434 |
+
double CUFFTAPI fftw_cost(const fftw_plan plan);
|
435 |
+
|
436 |
+
double CUFFTAPI fftwf_cost(const fftw_plan plan);
|
437 |
+
|
438 |
+
void CUFFTAPI fftw_flops(const fftw_plan plan, double *add, double *mul, double *fma);
|
439 |
+
|
440 |
+
void CUFFTAPI fftwf_flops(const fftw_plan plan, double *add, double *mul, double *fma);
|
441 |
+
|
442 |
+
void CUFFTAPI fftw_destroy_plan(fftw_plan plan);
|
443 |
+
|
444 |
+
void CUFFTAPI fftwf_destroy_plan(fftwf_plan plan);
|
445 |
+
|
446 |
+
void CUFFTAPI fftw_cleanup(void);
|
447 |
+
|
448 |
+
void CUFFTAPI fftwf_cleanup(void);
|
449 |
+
|
450 |
+
#ifdef __cplusplus
|
451 |
+
}
|
452 |
+
#endif
|
453 |
+
|
454 |
+
#endif /* _CUFFTW_H_ */
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (189 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/cufft/lib/libcufftw.so.11
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:ab06d9dfcfaf88ec2bcfb4c16b76ff0bf3b2728370d212e28607f53e1d40eff5
|
3 |
+
size 1614344
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (189 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (197 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/include/nvJitLink.h
ADDED
@@ -0,0 +1,522 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
/*
|
2 |
+
* NVIDIA_COPYRIGHT_BEGIN
|
3 |
+
*
|
4 |
+
* Copyright (c) 2022-2023, NVIDIA CORPORATION. All rights reserved.
|
5 |
+
*
|
6 |
+
* NVIDIA CORPORATION and its licensors retain all intellectual property
|
7 |
+
* and proprietary rights in and to this software, related documentation
|
8 |
+
* and any modifications thereto. Any use, reproduction, disclosure or
|
9 |
+
* distribution of this software and related documentation without an express
|
10 |
+
* license agreement from NVIDIA CORPORATION is strictly prohibited.
|
11 |
+
*
|
12 |
+
* NVIDIA_COPYRIGHT_END
|
13 |
+
*/
|
14 |
+
|
15 |
+
#ifndef nvJitLink_INCLUDED
|
16 |
+
#define nvJitLink_INCLUDED
|
17 |
+
|
18 |
+
#ifdef __cplusplus
|
19 |
+
extern "C" {
|
20 |
+
#endif
|
21 |
+
|
22 |
+
#include <stdint.h>
|
23 |
+
#include <stdlib.h>
|
24 |
+
|
25 |
+
/**
|
26 |
+
*
|
27 |
+
* \defgroup error Error codes
|
28 |
+
*
|
29 |
+
*/
|
30 |
+
|
31 |
+
/** \ingroup error
|
32 |
+
*
|
33 |
+
* \brief The enumerated type nvJitLinkResult defines API call result codes.
|
34 |
+
* nvJitLink APIs return nvJitLinkResult codes to indicate the result.
|
35 |
+
*/
|
36 |
+
|
37 |
+
typedef enum {
|
38 |
+
NVJITLINK_SUCCESS = 0,
|
39 |
+
NVJITLINK_ERROR_UNRECOGNIZED_OPTION,
|
40 |
+
NVJITLINK_ERROR_MISSING_ARCH, // -arch=sm_NN option not specified
|
41 |
+
NVJITLINK_ERROR_INVALID_INPUT,
|
42 |
+
NVJITLINK_ERROR_PTX_COMPILE,
|
43 |
+
NVJITLINK_ERROR_NVVM_COMPILE,
|
44 |
+
NVJITLINK_ERROR_INTERNAL,
|
45 |
+
NVJITLINK_ERROR_THREADPOOL,
|
46 |
+
NVJITLINK_ERROR_UNRECOGNIZED_INPUT,
|
47 |
+
#ifdef NEW_ERROR_CODES // These error codes will appear in a future CUDA release.
|
48 |
+
NVJITLINK_ERROR_NULL_INPUT,
|
49 |
+
NVJITLINK_ERROR_INCOMPATIBLE_OPTIONS,
|
50 |
+
NVJITLINK_ERROR_INCORRECT_INPUT_TYPE,
|
51 |
+
NVJITLINK_ERROR_ARCH_MISMATCH,
|
52 |
+
NVJITLINK_ERROR_OUTDATED_LIBRARY,
|
53 |
+
NVJITLINK_ERROR_MISSING_FATBIN
|
54 |
+
#endif
|
55 |
+
} nvJitLinkResult;
|
56 |
+
|
57 |
+
#ifndef NEW_ERROR_CODES // To avoid breaking compatibility, we map them to existing error codes for now.
|
58 |
+
#define NVJITLINK_ERROR_NULL_INPUT NVJITLINK_ERROR_INVALID_INPUT
|
59 |
+
#define NVJITLINK_ERROR_INCOMPATIBLE_OPTIONS NVJITLINK_ERROR_INVALID_INPUT
|
60 |
+
#define NVJITLINK_ERROR_INCORRECT_INPUT_TYPE NVJITLINK_ERROR_INVALID_INPUT
|
61 |
+
#define NVJITLINK_ERROR_ARCH_MISMATCH NVJITLINK_ERROR_INTERNAL
|
62 |
+
#define NVJITLINK_ERROR_OUTDATED_LIBRARY NVJITLINK_ERROR_INTERNAL
|
63 |
+
#define NVJITLINK_ERROR_MISSING_FATBIN NVJITLINK_ERROR_INVALID_INPUT
|
64 |
+
#endif
|
65 |
+
|
66 |
+
/**
|
67 |
+
*
|
68 |
+
* \defgroup linking Linking
|
69 |
+
*
|
70 |
+
*/
|
71 |
+
|
72 |
+
/** \ingroup linking
|
73 |
+
*
|
74 |
+
* \brief The enumerated type nvJitLinkInputType defines the kind of inputs
|
75 |
+
* that can be passed to nvJitLinkAdd* APIs.
|
76 |
+
*/
|
77 |
+
|
78 |
+
typedef enum {
|
79 |
+
NVJITLINK_INPUT_NONE = 0, // error
|
80 |
+
NVJITLINK_INPUT_CUBIN = 1,
|
81 |
+
NVJITLINK_INPUT_PTX,
|
82 |
+
NVJITLINK_INPUT_LTOIR,
|
83 |
+
NVJITLINK_INPUT_FATBIN,
|
84 |
+
NVJITLINK_INPUT_OBJECT,
|
85 |
+
NVJITLINK_INPUT_LIBRARY,
|
86 |
+
NVJITLINK_INPUT_ANY = 10 // will dynamically determine one of above types
|
87 |
+
} nvJitLinkInputType;
|
88 |
+
|
89 |
+
/**
|
90 |
+
* \defgroup options Supported Link Options
|
91 |
+
*
|
92 |
+
* nvJitLink supports the link options below.
|
93 |
+
* Option names are prefixed with a single dash (\c -).
|
94 |
+
* Options that take a value have an assignment operator (\c =)
|
95 |
+
* followed by the option value, with no spaces, e.g. \c "-arch=sm_90".
|
96 |
+
*
|
97 |
+
* The supported options are:
|
98 |
+
* - \c -arch=sm_<N\> \n
|
99 |
+
* Pass SM architecture value. See nvcc for valid values of <N\>.
|
100 |
+
* Can use compute_<N\> value instead if only generating PTX.
|
101 |
+
* This is a required option.
|
102 |
+
* - \c -maxrregcount=<N\> \n
|
103 |
+
* Maximum register count.
|
104 |
+
* - \c -time \n
|
105 |
+
* Print timing information to InfoLog.
|
106 |
+
* - \c -verbose \n
|
107 |
+
* Print verbose messages to InfoLog.
|
108 |
+
* - \c -lto \n
|
109 |
+
* Do link time optimization.
|
110 |
+
* - \c -ptx \n
|
111 |
+
* Emit ptx after linking instead of cubin; only supported with \c -lto
|
112 |
+
* - \c -O<N\> \n
|
113 |
+
* Optimization level. Only 0 and 3 are accepted.
|
114 |
+
* - \c -g \n
|
115 |
+
* Generate debug information.
|
116 |
+
* - \c -lineinfo \n
|
117 |
+
* Generate line information.
|
118 |
+
* - \c -ftz=<n\> \n
|
119 |
+
* Flush to zero.
|
120 |
+
* - \c -prec-div=<n\> \n
|
121 |
+
* Precise divide.
|
122 |
+
* - \c -prec-sqrt=<n\> \n
|
123 |
+
* Precise square root.
|
124 |
+
* - \c -fma=<n\> \n
|
125 |
+
* Fast multiply add.
|
126 |
+
* - \c -kernels-used=<name\> \n
|
127 |
+
* Pass list of kernels that are used; any not in the list can be removed.
|
128 |
+
* This option can be specified multiple times.
|
129 |
+
* - \c -variables-used=<name\> \n
|
130 |
+
* Pass list of variables that are used; any not in the list can be removed.
|
131 |
+
* This option can be specified multiple times.
|
132 |
+
* - \c -optimize-unused-variables \n
|
133 |
+
* Normally device code optimization is limited by not knowing what the
|
134 |
+
* host code references. With this option it can assume that if a variable
|
135 |
+
* is not referenced in device code then it can be removed.
|
136 |
+
* - \c -Xptxas=<opt\> \n
|
137 |
+
* Pass <opt\> to ptxas. This option can be called multiple times.
|
138 |
+
* - \c -split-compile=<N\> \n
|
139 |
+
* Split compilation maximum thread count. Use 0 to use all available processors.
|
140 |
+
* Value of 1 disables split compilation (default).
|
141 |
+
* - \c -split-compile-extended=<N\> \n
|
142 |
+
* [Experimental] A more aggressive form of split compilation.
|
143 |
+
* Accepts a maximum thread count value. Use 0 to use all available processors.
|
144 |
+
* Value of 1 disables extended split compilation (default).
|
145 |
+
* - \c -jump-table-density=<N\> \n
|
146 |
+
* When doing LTO, specify the case density percentage in switch statements,
|
147 |
+
* and use it as a minimal threshold to determine whether jump table(brx.idx
|
148 |
+
* instruction) will be used to implement a switch statement. Default
|
149 |
+
* value is 101. The percentage ranges from 0 to 101 inclusively.
|
150 |
+
*/
|
151 |
+
|
152 |
+
/**
|
153 |
+
* \ingroup linking
|
154 |
+
* \brief nvJitLinkHandle is the unit of linking, and an opaque handle for
|
155 |
+
* a program.
|
156 |
+
*
|
157 |
+
* To link inputs, an instance of nvJitLinkHandle must be created first with
|
158 |
+
* nvJitLinkCreate().
|
159 |
+
*/
|
160 |
+
|
161 |
+
typedef struct nvJitLink* nvJitLinkHandle; // opaque handle
|
162 |
+
|
163 |
+
// For versioning we will have separate API version for each library version
|
164 |
+
|
165 |
+
extern nvJitLinkResult __nvJitLinkCreate_12_4(
|
166 |
+
nvJitLinkHandle *handle,
|
167 |
+
uint32_t numOptions,
|
168 |
+
const char **options);
|
169 |
+
/**
|
170 |
+
* \ingroup linking
|
171 |
+
* \brief nvJitLinkCreate creates an instance of nvJitLinkHandle with the
|
172 |
+
* given input options, and sets the output parameter \p handle.
|
173 |
+
*
|
174 |
+
* \param [out] handle Address of nvJitLink handle.
|
175 |
+
* \param [in] numOptions Number of options passed.
|
176 |
+
* \param [in] options Array of size \p numOptions of option strings.
|
177 |
+
* \return
|
178 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
179 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_UNRECOGNIZED_OPTION\endlink
|
180 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_MISSING_ARCH\endlink
|
181 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
182 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
183 |
+
*
|
184 |
+
* It supports options listed in \ref options.
|
185 |
+
*
|
186 |
+
* \see nvJitLinkDestroy
|
187 |
+
*/
|
188 |
+
#ifndef NVJITLINK_NO_INLINE
|
189 |
+
static inline nvJitLinkResult nvJitLinkCreate(
|
190 |
+
nvJitLinkHandle *handle,
|
191 |
+
uint32_t numOptions,
|
192 |
+
const char **options)
|
193 |
+
{
|
194 |
+
return __nvJitLinkCreate_12_4 (handle, numOptions, options);
|
195 |
+
}
|
196 |
+
#endif
|
197 |
+
|
198 |
+
extern nvJitLinkResult __nvJitLinkDestroy_12_4 (nvJitLinkHandle *handle);
|
199 |
+
/**
|
200 |
+
* \ingroup linking
|
201 |
+
* \brief nvJitLinkDestroy frees the memory associated with the given handle
|
202 |
+
* and sets it to NULL.
|
203 |
+
*
|
204 |
+
* \param [in] handle Address of nvJitLink handle.
|
205 |
+
* \return
|
206 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
207 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
208 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
209 |
+
*
|
210 |
+
* \see nvJitLinkCreate
|
211 |
+
*/
|
212 |
+
#ifndef NVJITLINK_NO_INLINE
|
213 |
+
static inline nvJitLinkResult nvJitLinkDestroy (nvJitLinkHandle *handle)
|
214 |
+
{
|
215 |
+
return __nvJitLinkDestroy_12_4 (handle);
|
216 |
+
}
|
217 |
+
#endif
|
218 |
+
|
219 |
+
extern nvJitLinkResult __nvJitLinkAddData_12_4(
|
220 |
+
nvJitLinkHandle handle,
|
221 |
+
nvJitLinkInputType inputType,
|
222 |
+
const void *data,
|
223 |
+
size_t size,
|
224 |
+
const char *name); // name can be null
|
225 |
+
/**
|
226 |
+
* \ingroup linking
|
227 |
+
* \brief nvJitLinkAddData adds data image to the link.
|
228 |
+
*
|
229 |
+
* \param [in] handle nvJitLink handle.
|
230 |
+
* \param [in] inputType kind of input.
|
231 |
+
* \param [in] data pointer to data image in memory.
|
232 |
+
* \param [in] size size of the data.
|
233 |
+
* \param [in] name name of input object.
|
234 |
+
* \return
|
235 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
236 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
237 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
238 |
+
*/
|
239 |
+
#ifndef NVJITLINK_NO_INLINE
|
240 |
+
static inline nvJitLinkResult nvJitLinkAddData(
|
241 |
+
nvJitLinkHandle handle,
|
242 |
+
nvJitLinkInputType inputType,
|
243 |
+
const void *data,
|
244 |
+
size_t size,
|
245 |
+
const char *name) // name can be null
|
246 |
+
{
|
247 |
+
return __nvJitLinkAddData_12_4 (handle, inputType, data, size, name);
|
248 |
+
}
|
249 |
+
#endif
|
250 |
+
|
251 |
+
extern nvJitLinkResult __nvJitLinkAddFile_12_4(
|
252 |
+
nvJitLinkHandle handle,
|
253 |
+
nvJitLinkInputType inputType,
|
254 |
+
const char *fileName); // includes path to file
|
255 |
+
/**
|
256 |
+
* \ingroup linking
|
257 |
+
* \brief nvJitLinkAddFile reads data from file and links it in.
|
258 |
+
*
|
259 |
+
* \param [in] handle nvJitLink handle.
|
260 |
+
* \param [in] inputType kind of input.
|
261 |
+
* \param [in] fileName name of file.
|
262 |
+
* \return
|
263 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
264 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
265 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
266 |
+
*/
|
267 |
+
#ifndef NVJITLINK_NO_INLINE
|
268 |
+
static inline nvJitLinkResult nvJitLinkAddFile(
|
269 |
+
nvJitLinkHandle handle,
|
270 |
+
nvJitLinkInputType inputType,
|
271 |
+
const char *fileName) // includes path to file
|
272 |
+
{
|
273 |
+
return __nvJitLinkAddFile_12_4 (handle, inputType, fileName);
|
274 |
+
}
|
275 |
+
#endif
|
276 |
+
|
277 |
+
extern nvJitLinkResult __nvJitLinkComplete_12_4 (nvJitLinkHandle handle);
|
278 |
+
/**
|
279 |
+
* \ingroup linking
|
280 |
+
* \brief nvJitLinkComplete does the actual link.
|
281 |
+
*
|
282 |
+
* \param [in] handle nvJitLink handle.
|
283 |
+
* \return
|
284 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
285 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
286 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
287 |
+
*/
|
288 |
+
#ifndef NVJITLINK_NO_INLINE
|
289 |
+
static inline nvJitLinkResult nvJitLinkComplete (nvJitLinkHandle handle)
|
290 |
+
{
|
291 |
+
return __nvJitLinkComplete_12_4 (handle);
|
292 |
+
}
|
293 |
+
#endif
|
294 |
+
|
295 |
+
extern nvJitLinkResult __nvJitLinkGetLinkedCubinSize_12_4(
|
296 |
+
nvJitLinkHandle handle,
|
297 |
+
size_t *size);
|
298 |
+
/**
|
299 |
+
* \ingroup linking
|
300 |
+
* \brief nvJitLinkGetLinkedCubinSize gets the size of the linked cubin.
|
301 |
+
*
|
302 |
+
* \param [in] handle nvJitLink handle.
|
303 |
+
* \param [out] size Size of the linked cubin.
|
304 |
+
* \return
|
305 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
306 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
307 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
308 |
+
*
|
309 |
+
* \see nvJitLinkGetLinkedCubin
|
310 |
+
*/
|
311 |
+
#ifndef NVJITLINK_NO_INLINE
|
312 |
+
static inline nvJitLinkResult nvJitLinkGetLinkedCubinSize(
|
313 |
+
nvJitLinkHandle handle,
|
314 |
+
size_t *size)
|
315 |
+
{
|
316 |
+
return __nvJitLinkGetLinkedCubinSize_12_4 (handle, size);
|
317 |
+
}
|
318 |
+
#endif
|
319 |
+
|
320 |
+
extern nvJitLinkResult __nvJitLinkGetLinkedCubin_12_4(
|
321 |
+
nvJitLinkHandle handle,
|
322 |
+
void *cubin);
|
323 |
+
/**
|
324 |
+
* \ingroup linking
|
325 |
+
* \brief nvJitLinkGetLinkedCubin gets the linked cubin.
|
326 |
+
*
|
327 |
+
* \param [in] handle nvJitLink handle.
|
328 |
+
* \param [out] cubin The linked cubin.
|
329 |
+
* \return
|
330 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
331 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
332 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
333 |
+
*
|
334 |
+
* User is responsible for allocating enough space to hold the \p cubin.
|
335 |
+
* \see nvJitLinkGetLinkedCubinSize
|
336 |
+
*/
|
337 |
+
#ifndef NVJITLINK_NO_INLINE
|
338 |
+
static inline nvJitLinkResult nvJitLinkGetLinkedCubin(
|
339 |
+
nvJitLinkHandle handle,
|
340 |
+
void *cubin)
|
341 |
+
{
|
342 |
+
return __nvJitLinkGetLinkedCubin_12_4 (handle, cubin);
|
343 |
+
}
|
344 |
+
#endif
|
345 |
+
|
346 |
+
extern nvJitLinkResult __nvJitLinkGetLinkedPtxSize_12_4(
|
347 |
+
nvJitLinkHandle handle,
|
348 |
+
size_t *size);
|
349 |
+
/**
|
350 |
+
* \ingroup linking
|
351 |
+
* \brief nvJitLinkGetLinkedPtxSize gets the size of the linked ptx.
|
352 |
+
*
|
353 |
+
* \param [in] handle nvJitLink handle.
|
354 |
+
* \param [out] size Size of the linked PTX.
|
355 |
+
* \return
|
356 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
357 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
358 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
359 |
+
*
|
360 |
+
* Linked PTX is only available when using the \c -lto option.
|
361 |
+
* \see nvJitLinkGetLinkedPtx
|
362 |
+
*/
|
363 |
+
#ifndef NVJITLINK_NO_INLINE
|
364 |
+
static inline nvJitLinkResult nvJitLinkGetLinkedPtxSize(
|
365 |
+
nvJitLinkHandle handle,
|
366 |
+
size_t *size)
|
367 |
+
{
|
368 |
+
return __nvJitLinkGetLinkedPtxSize_12_4 (handle, size);
|
369 |
+
}
|
370 |
+
#endif
|
371 |
+
|
372 |
+
extern nvJitLinkResult __nvJitLinkGetLinkedPtx_12_4(
|
373 |
+
nvJitLinkHandle handle,
|
374 |
+
char *ptx);
|
375 |
+
/**
|
376 |
+
* \ingroup linking
|
377 |
+
* \brief nvJitLinkGetLinkedPtx gets the linked ptx.
|
378 |
+
*
|
379 |
+
* \param [in] handle nvJitLink handle.
|
380 |
+
* \param [out] ptx The linked PTX.
|
381 |
+
* \return
|
382 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
383 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
384 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
385 |
+
*
|
386 |
+
* Linked PTX is only available when using the \c -lto option.
|
387 |
+
* User is responsible for allocating enough space to hold the \p ptx.
|
388 |
+
* \see nvJitLinkGetLinkedPtxSize
|
389 |
+
*/
|
390 |
+
#ifndef NVJITLINK_NO_INLINE
|
391 |
+
static inline nvJitLinkResult nvJitLinkGetLinkedPtx(
|
392 |
+
nvJitLinkHandle handle,
|
393 |
+
char *ptx)
|
394 |
+
{
|
395 |
+
return __nvJitLinkGetLinkedPtx_12_4 (handle, ptx);
|
396 |
+
}
|
397 |
+
#endif
|
398 |
+
|
399 |
+
extern nvJitLinkResult __nvJitLinkGetErrorLogSize_12_4(
|
400 |
+
nvJitLinkHandle handle,
|
401 |
+
size_t *size);
|
402 |
+
/**
|
403 |
+
* \ingroup linking
|
404 |
+
* \brief nvJitLinkGetErrorLogSize gets the size of the error log.
|
405 |
+
*
|
406 |
+
* \param [in] handle nvJitLink handle.
|
407 |
+
* \param [out] size Size of the error log.
|
408 |
+
* \return
|
409 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
410 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
411 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
412 |
+
*
|
413 |
+
* \see nvJitLinkGetErrorLog
|
414 |
+
*/
|
415 |
+
#ifndef NVJITLINK_NO_INLINE
|
416 |
+
static inline nvJitLinkResult nvJitLinkGetErrorLogSize(
|
417 |
+
nvJitLinkHandle handle,
|
418 |
+
size_t *size)
|
419 |
+
{
|
420 |
+
return __nvJitLinkGetErrorLogSize_12_4 (handle, size);
|
421 |
+
}
|
422 |
+
#endif
|
423 |
+
|
424 |
+
extern nvJitLinkResult __nvJitLinkGetErrorLog_12_4(
|
425 |
+
nvJitLinkHandle handle,
|
426 |
+
char *log);
|
427 |
+
/**
|
428 |
+
* \ingroup linking
|
429 |
+
* \brief nvJitLinkGetErrorLog puts any error messages in the log.
|
430 |
+
*
|
431 |
+
* \param [in] handle nvJitLink handle.
|
432 |
+
* \param [out] log The error log.
|
433 |
+
* \return
|
434 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
435 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
436 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
437 |
+
*
|
438 |
+
* User is responsible for allocating enough space to hold the \p log.
|
439 |
+
* \see nvJitLinkGetErrorLogSize
|
440 |
+
*/
|
441 |
+
#ifndef NVJITLINK_NO_INLINE
|
442 |
+
static inline nvJitLinkResult nvJitLinkGetErrorLog(
|
443 |
+
nvJitLinkHandle handle,
|
444 |
+
char *log)
|
445 |
+
{
|
446 |
+
return __nvJitLinkGetErrorLog_12_4 (handle, log);
|
447 |
+
}
|
448 |
+
#endif
|
449 |
+
|
450 |
+
extern nvJitLinkResult __nvJitLinkGetInfoLogSize_12_4(
|
451 |
+
nvJitLinkHandle handle,
|
452 |
+
size_t *size);
|
453 |
+
/**
|
454 |
+
* \ingroup linking
|
455 |
+
* \brief nvJitLinkGetInfoLogSize gets the size of the info log.
|
456 |
+
*
|
457 |
+
* \param [in] handle nvJitLink handle.
|
458 |
+
* \param [out] size Size of the info log.
|
459 |
+
* \return
|
460 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
461 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
462 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
463 |
+
*
|
464 |
+
* \see nvJitLinkGetInfoLog
|
465 |
+
*/
|
466 |
+
#ifndef NVJITLINK_NO_INLINE
|
467 |
+
static inline nvJitLinkResult nvJitLinkGetInfoLogSize(
|
468 |
+
nvJitLinkHandle handle,
|
469 |
+
size_t *size)
|
470 |
+
{
|
471 |
+
return __nvJitLinkGetInfoLogSize_12_4 (handle, size);
|
472 |
+
}
|
473 |
+
#endif
|
474 |
+
|
475 |
+
extern nvJitLinkResult __nvJitLinkGetInfoLog_12_4(
|
476 |
+
nvJitLinkHandle handle,
|
477 |
+
char *log);
|
478 |
+
/**
|
479 |
+
* \ingroup linking
|
480 |
+
* \brief nvJitLinkGetInfoLog puts any info messages in the log.
|
481 |
+
*
|
482 |
+
* \param [in] handle nvJitLink handle.
|
483 |
+
* \param [out] log The info log.
|
484 |
+
* \return
|
485 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
486 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
487 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
488 |
+
*
|
489 |
+
* User is responsible for allocating enough space to hold the \p log.
|
490 |
+
* \see nvJitLinkGetInfoLogSize
|
491 |
+
*/
|
492 |
+
#ifndef NVJITLINK_NO_INLINE
|
493 |
+
static inline nvJitLinkResult nvJitLinkGetInfoLog(
|
494 |
+
nvJitLinkHandle handle,
|
495 |
+
char *log)
|
496 |
+
{
|
497 |
+
return __nvJitLinkGetInfoLog_12_4 (handle, log);
|
498 |
+
}
|
499 |
+
#endif
|
500 |
+
|
501 |
+
/**
|
502 |
+
* \ingroup linking
|
503 |
+
* \brief nvJitLinkVersion returns the current version of nvJitLink.
|
504 |
+
*
|
505 |
+
* \param [out] major The major version.
|
506 |
+
* \param [out] minor The minor version.
|
507 |
+
* \return
|
508 |
+
* - \link #nvJitLinkResult NVJITLINK_SUCCESS \endlink
|
509 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INVALID_INPUT\endlink
|
510 |
+
* - \link #nvJitLinkResult NVJITLINK_ERROR_INTERNAL\endlink
|
511 |
+
*
|
512 |
+
*/
|
513 |
+
extern nvJitLinkResult nvJitLinkVersion(
|
514 |
+
unsigned int *major,
|
515 |
+
unsigned int *minor);
|
516 |
+
|
517 |
+
#ifdef __cplusplus
|
518 |
+
}
|
519 |
+
#endif
|
520 |
+
|
521 |
+
#endif // nvJitLink_INCLUDED
|
522 |
+
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/lib/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/nvidia/nvjitlink/lib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (193 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
__title__ = "packaging"
|
6 |
+
__summary__ = "Core utilities for Python packages"
|
7 |
+
__uri__ = "https://github.com/pypa/packaging"
|
8 |
+
|
9 |
+
__version__ = "24.0"
|
10 |
+
|
11 |
+
__author__ = "Donald Stufft and individual contributors"
|
12 |
+
__email__ = "[email protected]"
|
13 |
+
|
14 |
+
__license__ = "BSD-2-Clause or Apache-2.0"
|
15 |
+
__copyright__ = "2014 %s" % __author__
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_manylinux.cpython-310.pyc
ADDED
Binary file (6.42 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_parser.cpython-310.pyc
ADDED
Binary file (8.96 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/_structures.cpython-310.pyc
ADDED
Binary file (2.69 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/markers.cpython-310.pyc
ADDED
Binary file (6.89 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/requirements.cpython-310.pyc
ADDED
Binary file (2.83 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/tags.cpython-310.pyc
ADDED
Binary file (13.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/__pycache__/version.cpython-310.pyc
ADDED
Binary file (14.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/packaging/_elffile.py
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
ELF file parser.
|
3 |
+
|
4 |
+
This provides a class ``ELFFile`` that parses an ELF executable in a similar
|
5 |
+
interface to ``ZipFile``. Only the read interface is implemented.
|
6 |
+
|
7 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
8 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
9 |
+
"""
|
10 |
+
|
11 |
+
import enum
|
12 |
+
import os
|
13 |
+
import struct
|
14 |
+
from typing import IO, Optional, Tuple
|
15 |
+
|
16 |
+
|
17 |
+
class ELFInvalid(ValueError):
|
18 |
+
pass
|
19 |
+
|
20 |
+
|
21 |
+
class EIClass(enum.IntEnum):
|
22 |
+
C32 = 1
|
23 |
+
C64 = 2
|
24 |
+
|
25 |
+
|
26 |
+
class EIData(enum.IntEnum):
|
27 |
+
Lsb = 1
|
28 |
+
Msb = 2
|
29 |
+
|
30 |
+
|
31 |
+
class EMachine(enum.IntEnum):
|
32 |
+
I386 = 3
|
33 |
+
S390 = 22
|
34 |
+
Arm = 40
|
35 |
+
X8664 = 62
|
36 |
+
AArc64 = 183
|
37 |
+
|
38 |
+
|
39 |
+
class ELFFile:
|
40 |
+
"""
|
41 |
+
Representation of an ELF executable.
|
42 |
+
"""
|
43 |
+
|
44 |
+
def __init__(self, f: IO[bytes]) -> None:
|
45 |
+
self._f = f
|
46 |
+
|
47 |
+
try:
|
48 |
+
ident = self._read("16B")
|
49 |
+
except struct.error:
|
50 |
+
raise ELFInvalid("unable to parse identification")
|
51 |
+
magic = bytes(ident[:4])
|
52 |
+
if magic != b"\x7fELF":
|
53 |
+
raise ELFInvalid(f"invalid magic: {magic!r}")
|
54 |
+
|
55 |
+
self.capacity = ident[4] # Format for program header (bitness).
|
56 |
+
self.encoding = ident[5] # Data structure encoding (endianness).
|
57 |
+
|
58 |
+
try:
|
59 |
+
# e_fmt: Format for program header.
|
60 |
+
# p_fmt: Format for section header.
|
61 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
62 |
+
e_fmt, self._p_fmt, self._p_idx = {
|
63 |
+
(1, 1): ("<HHIIIIIHHH", "<IIIIIIII", (0, 1, 4)), # 32-bit LSB.
|
64 |
+
(1, 2): (">HHIIIIIHHH", ">IIIIIIII", (0, 1, 4)), # 32-bit MSB.
|
65 |
+
(2, 1): ("<HHIQQQIHHH", "<IIQQQQQQ", (0, 2, 5)), # 64-bit LSB.
|
66 |
+
(2, 2): (">HHIQQQIHHH", ">IIQQQQQQ", (0, 2, 5)), # 64-bit MSB.
|
67 |
+
}[(self.capacity, self.encoding)]
|
68 |
+
except KeyError:
|
69 |
+
raise ELFInvalid(
|
70 |
+
f"unrecognized capacity ({self.capacity}) or "
|
71 |
+
f"encoding ({self.encoding})"
|
72 |
+
)
|
73 |
+
|
74 |
+
try:
|
75 |
+
(
|
76 |
+
_,
|
77 |
+
self.machine, # Architecture type.
|
78 |
+
_,
|
79 |
+
_,
|
80 |
+
self._e_phoff, # Offset of program header.
|
81 |
+
_,
|
82 |
+
self.flags, # Processor-specific flags.
|
83 |
+
_,
|
84 |
+
self._e_phentsize, # Size of section.
|
85 |
+
self._e_phnum, # Number of sections.
|
86 |
+
) = self._read(e_fmt)
|
87 |
+
except struct.error as e:
|
88 |
+
raise ELFInvalid("unable to parse machine and section information") from e
|
89 |
+
|
90 |
+
def _read(self, fmt: str) -> Tuple[int, ...]:
|
91 |
+
return struct.unpack(fmt, self._f.read(struct.calcsize(fmt)))
|
92 |
+
|
93 |
+
@property
|
94 |
+
def interpreter(self) -> Optional[str]:
|
95 |
+
"""
|
96 |
+
The path recorded in the ``PT_INTERP`` section header.
|
97 |
+
"""
|
98 |
+
for index in range(self._e_phnum):
|
99 |
+
self._f.seek(self._e_phoff + self._e_phentsize * index)
|
100 |
+
try:
|
101 |
+
data = self._read(self._p_fmt)
|
102 |
+
except struct.error:
|
103 |
+
continue
|
104 |
+
if data[self._p_idx[0]] != 3: # Not PT_INTERP.
|
105 |
+
continue
|
106 |
+
self._f.seek(data[self._p_idx[1]])
|
107 |
+
return os.fsdecode(self._f.read(data[self._p_idx[2]])).strip("\0")
|
108 |
+
return None
|
llmeval-env/lib/python3.10/site-packages/packaging/_manylinux.py
ADDED
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import collections
|
2 |
+
import contextlib
|
3 |
+
import functools
|
4 |
+
import os
|
5 |
+
import re
|
6 |
+
import sys
|
7 |
+
import warnings
|
8 |
+
from typing import Dict, Generator, Iterator, NamedTuple, Optional, Sequence, Tuple
|
9 |
+
|
10 |
+
from ._elffile import EIClass, EIData, ELFFile, EMachine
|
11 |
+
|
12 |
+
EF_ARM_ABIMASK = 0xFF000000
|
13 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
14 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
15 |
+
|
16 |
+
|
17 |
+
# `os.PathLike` not a generic type until Python 3.9, so sticking with `str`
|
18 |
+
# as the type for `path` until then.
|
19 |
+
@contextlib.contextmanager
|
20 |
+
def _parse_elf(path: str) -> Generator[Optional[ELFFile], None, None]:
|
21 |
+
try:
|
22 |
+
with open(path, "rb") as f:
|
23 |
+
yield ELFFile(f)
|
24 |
+
except (OSError, TypeError, ValueError):
|
25 |
+
yield None
|
26 |
+
|
27 |
+
|
28 |
+
def _is_linux_armhf(executable: str) -> bool:
|
29 |
+
# hard-float ABI can be detected from the ELF header of the running
|
30 |
+
# process
|
31 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
32 |
+
with _parse_elf(executable) as f:
|
33 |
+
return (
|
34 |
+
f is not None
|
35 |
+
and f.capacity == EIClass.C32
|
36 |
+
and f.encoding == EIData.Lsb
|
37 |
+
and f.machine == EMachine.Arm
|
38 |
+
and f.flags & EF_ARM_ABIMASK == EF_ARM_ABI_VER5
|
39 |
+
and f.flags & EF_ARM_ABI_FLOAT_HARD == EF_ARM_ABI_FLOAT_HARD
|
40 |
+
)
|
41 |
+
|
42 |
+
|
43 |
+
def _is_linux_i686(executable: str) -> bool:
|
44 |
+
with _parse_elf(executable) as f:
|
45 |
+
return (
|
46 |
+
f is not None
|
47 |
+
and f.capacity == EIClass.C32
|
48 |
+
and f.encoding == EIData.Lsb
|
49 |
+
and f.machine == EMachine.I386
|
50 |
+
)
|
51 |
+
|
52 |
+
|
53 |
+
def _have_compatible_abi(executable: str, archs: Sequence[str]) -> bool:
|
54 |
+
if "armv7l" in archs:
|
55 |
+
return _is_linux_armhf(executable)
|
56 |
+
if "i686" in archs:
|
57 |
+
return _is_linux_i686(executable)
|
58 |
+
allowed_archs = {
|
59 |
+
"x86_64",
|
60 |
+
"aarch64",
|
61 |
+
"ppc64",
|
62 |
+
"ppc64le",
|
63 |
+
"s390x",
|
64 |
+
"loongarch64",
|
65 |
+
"riscv64",
|
66 |
+
}
|
67 |
+
return any(arch in allowed_archs for arch in archs)
|
68 |
+
|
69 |
+
|
70 |
+
# If glibc ever changes its major version, we need to know what the last
|
71 |
+
# minor version was, so we can build the complete list of all versions.
|
72 |
+
# For now, guess what the highest minor version might be, assume it will
|
73 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
74 |
+
# with the actual value.
|
75 |
+
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
76 |
+
|
77 |
+
|
78 |
+
class _GLibCVersion(NamedTuple):
|
79 |
+
major: int
|
80 |
+
minor: int
|
81 |
+
|
82 |
+
|
83 |
+
def _glibc_version_string_confstr() -> Optional[str]:
|
84 |
+
"""
|
85 |
+
Primary implementation of glibc_version_string using os.confstr.
|
86 |
+
"""
|
87 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
88 |
+
# to be broken or missing. This strategy is used in the standard library
|
89 |
+
# platform module.
|
90 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
91 |
+
try:
|
92 |
+
# Should be a string like "glibc 2.17".
|
93 |
+
version_string: Optional[str] = os.confstr("CS_GNU_LIBC_VERSION")
|
94 |
+
assert version_string is not None
|
95 |
+
_, version = version_string.rsplit()
|
96 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
97 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
98 |
+
return None
|
99 |
+
return version
|
100 |
+
|
101 |
+
|
102 |
+
def _glibc_version_string_ctypes() -> Optional[str]:
|
103 |
+
"""
|
104 |
+
Fallback implementation of glibc_version_string using ctypes.
|
105 |
+
"""
|
106 |
+
try:
|
107 |
+
import ctypes
|
108 |
+
except ImportError:
|
109 |
+
return None
|
110 |
+
|
111 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
112 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
113 |
+
# main program". This way we can let the linker do the work to figure out
|
114 |
+
# which libc our process is actually using.
|
115 |
+
#
|
116 |
+
# We must also handle the special case where the executable is not a
|
117 |
+
# dynamically linked executable. This can occur when using musl libc,
|
118 |
+
# for example. In this situation, dlopen() will error, leading to an
|
119 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
120 |
+
# errno set on the OSError. The single string argument used to construct
|
121 |
+
# OSError comes from libc itself and is therefore not portable to
|
122 |
+
# hard code here. In any case, failure to call dlopen() means we
|
123 |
+
# can proceed, so we bail on our attempt.
|
124 |
+
try:
|
125 |
+
process_namespace = ctypes.CDLL(None)
|
126 |
+
except OSError:
|
127 |
+
return None
|
128 |
+
|
129 |
+
try:
|
130 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
131 |
+
except AttributeError:
|
132 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
133 |
+
# glibc.
|
134 |
+
return None
|
135 |
+
|
136 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
137 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
138 |
+
version_str: str = gnu_get_libc_version()
|
139 |
+
# py2 / py3 compatibility:
|
140 |
+
if not isinstance(version_str, str):
|
141 |
+
version_str = version_str.decode("ascii")
|
142 |
+
|
143 |
+
return version_str
|
144 |
+
|
145 |
+
|
146 |
+
def _glibc_version_string() -> Optional[str]:
|
147 |
+
"""Returns glibc version string, or None if not using glibc."""
|
148 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
149 |
+
|
150 |
+
|
151 |
+
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
152 |
+
"""Parse glibc version.
|
153 |
+
|
154 |
+
We use a regexp instead of str.split because we want to discard any
|
155 |
+
random junk that might come after the minor version -- this might happen
|
156 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
157 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
158 |
+
"""
|
159 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
160 |
+
if not m:
|
161 |
+
warnings.warn(
|
162 |
+
f"Expected glibc version with 2 components major.minor,"
|
163 |
+
f" got: {version_str}",
|
164 |
+
RuntimeWarning,
|
165 |
+
)
|
166 |
+
return -1, -1
|
167 |
+
return int(m.group("major")), int(m.group("minor"))
|
168 |
+
|
169 |
+
|
170 |
+
@functools.lru_cache()
|
171 |
+
def _get_glibc_version() -> Tuple[int, int]:
|
172 |
+
version_str = _glibc_version_string()
|
173 |
+
if version_str is None:
|
174 |
+
return (-1, -1)
|
175 |
+
return _parse_glibc_version(version_str)
|
176 |
+
|
177 |
+
|
178 |
+
# From PEP 513, PEP 600
|
179 |
+
def _is_compatible(arch: str, version: _GLibCVersion) -> bool:
|
180 |
+
sys_glibc = _get_glibc_version()
|
181 |
+
if sys_glibc < version:
|
182 |
+
return False
|
183 |
+
# Check for presence of _manylinux module.
|
184 |
+
try:
|
185 |
+
import _manylinux
|
186 |
+
except ImportError:
|
187 |
+
return True
|
188 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
189 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
190 |
+
if result is not None:
|
191 |
+
return bool(result)
|
192 |
+
return True
|
193 |
+
if version == _GLibCVersion(2, 5):
|
194 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
195 |
+
return bool(_manylinux.manylinux1_compatible)
|
196 |
+
if version == _GLibCVersion(2, 12):
|
197 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
198 |
+
return bool(_manylinux.manylinux2010_compatible)
|
199 |
+
if version == _GLibCVersion(2, 17):
|
200 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
201 |
+
return bool(_manylinux.manylinux2014_compatible)
|
202 |
+
return True
|
203 |
+
|
204 |
+
|
205 |
+
_LEGACY_MANYLINUX_MAP = {
|
206 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
207 |
+
(2, 17): "manylinux2014",
|
208 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
209 |
+
(2, 12): "manylinux2010",
|
210 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
211 |
+
(2, 5): "manylinux1",
|
212 |
+
}
|
213 |
+
|
214 |
+
|
215 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
216 |
+
"""Generate manylinux tags compatible to the current platform.
|
217 |
+
|
218 |
+
:param archs: Sequence of compatible architectures.
|
219 |
+
The first one shall be the closest to the actual architecture and be the part of
|
220 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
221 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
222 |
+
be manylinux-compatible.
|
223 |
+
|
224 |
+
:returns: An iterator of compatible manylinux tags.
|
225 |
+
"""
|
226 |
+
if not _have_compatible_abi(sys.executable, archs):
|
227 |
+
return
|
228 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
229 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
230 |
+
if set(archs) & {"x86_64", "i686"}:
|
231 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
232 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
233 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
234 |
+
glibc_max_list = [current_glibc]
|
235 |
+
# We can assume compatibility across glibc major versions.
|
236 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
237 |
+
#
|
238 |
+
# Build a list of maximum glibc versions so that we can
|
239 |
+
# output the canonical list of all glibc from current_glibc
|
240 |
+
# down to too_old_glibc2, including all intermediary versions.
|
241 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
242 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
243 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
244 |
+
for arch in archs:
|
245 |
+
for glibc_max in glibc_max_list:
|
246 |
+
if glibc_max.major == too_old_glibc2.major:
|
247 |
+
min_minor = too_old_glibc2.minor
|
248 |
+
else:
|
249 |
+
# For other glibc major versions oldest supported is (x, 0).
|
250 |
+
min_minor = -1
|
251 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
252 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
253 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
254 |
+
if _is_compatible(arch, glibc_version):
|
255 |
+
yield f"{tag}_{arch}"
|
256 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
257 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
258 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
259 |
+
if _is_compatible(arch, glibc_version):
|
260 |
+
yield f"{legacy_tag}_{arch}"
|
llmeval-env/lib/python3.10/site-packages/packaging/_musllinux.py
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""PEP 656 support.
|
2 |
+
|
3 |
+
This module implements logic to detect if the currently running Python is
|
4 |
+
linked against musl, and what musl version is used.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import functools
|
8 |
+
import re
|
9 |
+
import subprocess
|
10 |
+
import sys
|
11 |
+
from typing import Iterator, NamedTuple, Optional, Sequence
|
12 |
+
|
13 |
+
from ._elffile import ELFFile
|
14 |
+
|
15 |
+
|
16 |
+
class _MuslVersion(NamedTuple):
|
17 |
+
major: int
|
18 |
+
minor: int
|
19 |
+
|
20 |
+
|
21 |
+
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
22 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
23 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
24 |
+
return None
|
25 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
26 |
+
if not m:
|
27 |
+
return None
|
28 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
29 |
+
|
30 |
+
|
31 |
+
@functools.lru_cache()
|
32 |
+
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
33 |
+
"""Detect currently-running musl runtime version.
|
34 |
+
|
35 |
+
This is done by checking the specified executable's dynamic linking
|
36 |
+
information, and invoking the loader to parse its output for a version
|
37 |
+
string. If the loader is musl, the output would be something like::
|
38 |
+
|
39 |
+
musl libc (x86_64)
|
40 |
+
Version 1.2.2
|
41 |
+
Dynamic Program Loader
|
42 |
+
"""
|
43 |
+
try:
|
44 |
+
with open(executable, "rb") as f:
|
45 |
+
ld = ELFFile(f).interpreter
|
46 |
+
except (OSError, TypeError, ValueError):
|
47 |
+
return None
|
48 |
+
if ld is None or "musl" not in ld:
|
49 |
+
return None
|
50 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, text=True)
|
51 |
+
return _parse_musl_version(proc.stderr)
|
52 |
+
|
53 |
+
|
54 |
+
def platform_tags(archs: Sequence[str]) -> Iterator[str]:
|
55 |
+
"""Generate musllinux tags compatible to the current platform.
|
56 |
+
|
57 |
+
:param archs: Sequence of compatible architectures.
|
58 |
+
The first one shall be the closest to the actual architecture and be the part of
|
59 |
+
platform tag after the ``linux_`` prefix, e.g. ``x86_64``.
|
60 |
+
The ``linux_`` prefix is assumed as a prerequisite for the current platform to
|
61 |
+
be musllinux-compatible.
|
62 |
+
|
63 |
+
:returns: An iterator of compatible musllinux tags.
|
64 |
+
"""
|
65 |
+
sys_musl = _get_musl_version(sys.executable)
|
66 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
67 |
+
return
|
68 |
+
for arch in archs:
|
69 |
+
for minor in range(sys_musl.minor, -1, -1):
|
70 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
71 |
+
|
72 |
+
|
73 |
+
if __name__ == "__main__": # pragma: no cover
|
74 |
+
import sysconfig
|
75 |
+
|
76 |
+
plat = sysconfig.get_platform()
|
77 |
+
assert plat.startswith("linux-"), "not linux"
|
78 |
+
|
79 |
+
print("plat:", plat)
|
80 |
+
print("musl:", _get_musl_version(sys.executable))
|
81 |
+
print("tags:", end=" ")
|
82 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
83 |
+
print(t, end="\n ")
|
llmeval-env/lib/python3.10/site-packages/packaging/_parser.py
ADDED
@@ -0,0 +1,356 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Handwritten parser of dependency specifiers.
|
2 |
+
|
3 |
+
The docstring for each __parse_* function contains ENBF-inspired grammar representing
|
4 |
+
the implementation.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import ast
|
8 |
+
from typing import Any, List, NamedTuple, Optional, Tuple, Union
|
9 |
+
|
10 |
+
from ._tokenizer import DEFAULT_RULES, Tokenizer
|
11 |
+
|
12 |
+
|
13 |
+
class Node:
|
14 |
+
def __init__(self, value: str) -> None:
|
15 |
+
self.value = value
|
16 |
+
|
17 |
+
def __str__(self) -> str:
|
18 |
+
return self.value
|
19 |
+
|
20 |
+
def __repr__(self) -> str:
|
21 |
+
return f"<{self.__class__.__name__}('{self}')>"
|
22 |
+
|
23 |
+
def serialize(self) -> str:
|
24 |
+
raise NotImplementedError
|
25 |
+
|
26 |
+
|
27 |
+
class Variable(Node):
|
28 |
+
def serialize(self) -> str:
|
29 |
+
return str(self)
|
30 |
+
|
31 |
+
|
32 |
+
class Value(Node):
|
33 |
+
def serialize(self) -> str:
|
34 |
+
return f'"{self}"'
|
35 |
+
|
36 |
+
|
37 |
+
class Op(Node):
|
38 |
+
def serialize(self) -> str:
|
39 |
+
return str(self)
|
40 |
+
|
41 |
+
|
42 |
+
MarkerVar = Union[Variable, Value]
|
43 |
+
MarkerItem = Tuple[MarkerVar, Op, MarkerVar]
|
44 |
+
# MarkerAtom = Union[MarkerItem, List["MarkerAtom"]]
|
45 |
+
# MarkerList = List[Union["MarkerList", MarkerAtom, str]]
|
46 |
+
# mypy does not support recursive type definition
|
47 |
+
# https://github.com/python/mypy/issues/731
|
48 |
+
MarkerAtom = Any
|
49 |
+
MarkerList = List[Any]
|
50 |
+
|
51 |
+
|
52 |
+
class ParsedRequirement(NamedTuple):
|
53 |
+
name: str
|
54 |
+
url: str
|
55 |
+
extras: List[str]
|
56 |
+
specifier: str
|
57 |
+
marker: Optional[MarkerList]
|
58 |
+
|
59 |
+
|
60 |
+
# --------------------------------------------------------------------------------------
|
61 |
+
# Recursive descent parser for dependency specifier
|
62 |
+
# --------------------------------------------------------------------------------------
|
63 |
+
def parse_requirement(source: str) -> ParsedRequirement:
|
64 |
+
return _parse_requirement(Tokenizer(source, rules=DEFAULT_RULES))
|
65 |
+
|
66 |
+
|
67 |
+
def _parse_requirement(tokenizer: Tokenizer) -> ParsedRequirement:
|
68 |
+
"""
|
69 |
+
requirement = WS? IDENTIFIER WS? extras WS? requirement_details
|
70 |
+
"""
|
71 |
+
tokenizer.consume("WS")
|
72 |
+
|
73 |
+
name_token = tokenizer.expect(
|
74 |
+
"IDENTIFIER", expected="package name at the start of dependency specifier"
|
75 |
+
)
|
76 |
+
name = name_token.text
|
77 |
+
tokenizer.consume("WS")
|
78 |
+
|
79 |
+
extras = _parse_extras(tokenizer)
|
80 |
+
tokenizer.consume("WS")
|
81 |
+
|
82 |
+
url, specifier, marker = _parse_requirement_details(tokenizer)
|
83 |
+
tokenizer.expect("END", expected="end of dependency specifier")
|
84 |
+
|
85 |
+
return ParsedRequirement(name, url, extras, specifier, marker)
|
86 |
+
|
87 |
+
|
88 |
+
def _parse_requirement_details(
|
89 |
+
tokenizer: Tokenizer,
|
90 |
+
) -> Tuple[str, str, Optional[MarkerList]]:
|
91 |
+
"""
|
92 |
+
requirement_details = AT URL (WS requirement_marker?)?
|
93 |
+
| specifier WS? (requirement_marker)?
|
94 |
+
"""
|
95 |
+
|
96 |
+
specifier = ""
|
97 |
+
url = ""
|
98 |
+
marker = None
|
99 |
+
|
100 |
+
if tokenizer.check("AT"):
|
101 |
+
tokenizer.read()
|
102 |
+
tokenizer.consume("WS")
|
103 |
+
|
104 |
+
url_start = tokenizer.position
|
105 |
+
url = tokenizer.expect("URL", expected="URL after @").text
|
106 |
+
if tokenizer.check("END", peek=True):
|
107 |
+
return (url, specifier, marker)
|
108 |
+
|
109 |
+
tokenizer.expect("WS", expected="whitespace after URL")
|
110 |
+
|
111 |
+
# The input might end after whitespace.
|
112 |
+
if tokenizer.check("END", peek=True):
|
113 |
+
return (url, specifier, marker)
|
114 |
+
|
115 |
+
marker = _parse_requirement_marker(
|
116 |
+
tokenizer, span_start=url_start, after="URL and whitespace"
|
117 |
+
)
|
118 |
+
else:
|
119 |
+
specifier_start = tokenizer.position
|
120 |
+
specifier = _parse_specifier(tokenizer)
|
121 |
+
tokenizer.consume("WS")
|
122 |
+
|
123 |
+
if tokenizer.check("END", peek=True):
|
124 |
+
return (url, specifier, marker)
|
125 |
+
|
126 |
+
marker = _parse_requirement_marker(
|
127 |
+
tokenizer,
|
128 |
+
span_start=specifier_start,
|
129 |
+
after=(
|
130 |
+
"version specifier"
|
131 |
+
if specifier
|
132 |
+
else "name and no valid version specifier"
|
133 |
+
),
|
134 |
+
)
|
135 |
+
|
136 |
+
return (url, specifier, marker)
|
137 |
+
|
138 |
+
|
139 |
+
def _parse_requirement_marker(
|
140 |
+
tokenizer: Tokenizer, *, span_start: int, after: str
|
141 |
+
) -> MarkerList:
|
142 |
+
"""
|
143 |
+
requirement_marker = SEMICOLON marker WS?
|
144 |
+
"""
|
145 |
+
|
146 |
+
if not tokenizer.check("SEMICOLON"):
|
147 |
+
tokenizer.raise_syntax_error(
|
148 |
+
f"Expected end or semicolon (after {after})",
|
149 |
+
span_start=span_start,
|
150 |
+
)
|
151 |
+
tokenizer.read()
|
152 |
+
|
153 |
+
marker = _parse_marker(tokenizer)
|
154 |
+
tokenizer.consume("WS")
|
155 |
+
|
156 |
+
return marker
|
157 |
+
|
158 |
+
|
159 |
+
def _parse_extras(tokenizer: Tokenizer) -> List[str]:
|
160 |
+
"""
|
161 |
+
extras = (LEFT_BRACKET wsp* extras_list? wsp* RIGHT_BRACKET)?
|
162 |
+
"""
|
163 |
+
if not tokenizer.check("LEFT_BRACKET", peek=True):
|
164 |
+
return []
|
165 |
+
|
166 |
+
with tokenizer.enclosing_tokens(
|
167 |
+
"LEFT_BRACKET",
|
168 |
+
"RIGHT_BRACKET",
|
169 |
+
around="extras",
|
170 |
+
):
|
171 |
+
tokenizer.consume("WS")
|
172 |
+
extras = _parse_extras_list(tokenizer)
|
173 |
+
tokenizer.consume("WS")
|
174 |
+
|
175 |
+
return extras
|
176 |
+
|
177 |
+
|
178 |
+
def _parse_extras_list(tokenizer: Tokenizer) -> List[str]:
|
179 |
+
"""
|
180 |
+
extras_list = identifier (wsp* ',' wsp* identifier)*
|
181 |
+
"""
|
182 |
+
extras: List[str] = []
|
183 |
+
|
184 |
+
if not tokenizer.check("IDENTIFIER"):
|
185 |
+
return extras
|
186 |
+
|
187 |
+
extras.append(tokenizer.read().text)
|
188 |
+
|
189 |
+
while True:
|
190 |
+
tokenizer.consume("WS")
|
191 |
+
if tokenizer.check("IDENTIFIER", peek=True):
|
192 |
+
tokenizer.raise_syntax_error("Expected comma between extra names")
|
193 |
+
elif not tokenizer.check("COMMA"):
|
194 |
+
break
|
195 |
+
|
196 |
+
tokenizer.read()
|
197 |
+
tokenizer.consume("WS")
|
198 |
+
|
199 |
+
extra_token = tokenizer.expect("IDENTIFIER", expected="extra name after comma")
|
200 |
+
extras.append(extra_token.text)
|
201 |
+
|
202 |
+
return extras
|
203 |
+
|
204 |
+
|
205 |
+
def _parse_specifier(tokenizer: Tokenizer) -> str:
|
206 |
+
"""
|
207 |
+
specifier = LEFT_PARENTHESIS WS? version_many WS? RIGHT_PARENTHESIS
|
208 |
+
| WS? version_many WS?
|
209 |
+
"""
|
210 |
+
with tokenizer.enclosing_tokens(
|
211 |
+
"LEFT_PARENTHESIS",
|
212 |
+
"RIGHT_PARENTHESIS",
|
213 |
+
around="version specifier",
|
214 |
+
):
|
215 |
+
tokenizer.consume("WS")
|
216 |
+
parsed_specifiers = _parse_version_many(tokenizer)
|
217 |
+
tokenizer.consume("WS")
|
218 |
+
|
219 |
+
return parsed_specifiers
|
220 |
+
|
221 |
+
|
222 |
+
def _parse_version_many(tokenizer: Tokenizer) -> str:
|
223 |
+
"""
|
224 |
+
version_many = (SPECIFIER (WS? COMMA WS? SPECIFIER)*)?
|
225 |
+
"""
|
226 |
+
parsed_specifiers = ""
|
227 |
+
while tokenizer.check("SPECIFIER"):
|
228 |
+
span_start = tokenizer.position
|
229 |
+
parsed_specifiers += tokenizer.read().text
|
230 |
+
if tokenizer.check("VERSION_PREFIX_TRAIL", peek=True):
|
231 |
+
tokenizer.raise_syntax_error(
|
232 |
+
".* suffix can only be used with `==` or `!=` operators",
|
233 |
+
span_start=span_start,
|
234 |
+
span_end=tokenizer.position + 1,
|
235 |
+
)
|
236 |
+
if tokenizer.check("VERSION_LOCAL_LABEL_TRAIL", peek=True):
|
237 |
+
tokenizer.raise_syntax_error(
|
238 |
+
"Local version label can only be used with `==` or `!=` operators",
|
239 |
+
span_start=span_start,
|
240 |
+
span_end=tokenizer.position,
|
241 |
+
)
|
242 |
+
tokenizer.consume("WS")
|
243 |
+
if not tokenizer.check("COMMA"):
|
244 |
+
break
|
245 |
+
parsed_specifiers += tokenizer.read().text
|
246 |
+
tokenizer.consume("WS")
|
247 |
+
|
248 |
+
return parsed_specifiers
|
249 |
+
|
250 |
+
|
251 |
+
# --------------------------------------------------------------------------------------
|
252 |
+
# Recursive descent parser for marker expression
|
253 |
+
# --------------------------------------------------------------------------------------
|
254 |
+
def parse_marker(source: str) -> MarkerList:
|
255 |
+
return _parse_full_marker(Tokenizer(source, rules=DEFAULT_RULES))
|
256 |
+
|
257 |
+
|
258 |
+
def _parse_full_marker(tokenizer: Tokenizer) -> MarkerList:
|
259 |
+
retval = _parse_marker(tokenizer)
|
260 |
+
tokenizer.expect("END", expected="end of marker expression")
|
261 |
+
return retval
|
262 |
+
|
263 |
+
|
264 |
+
def _parse_marker(tokenizer: Tokenizer) -> MarkerList:
|
265 |
+
"""
|
266 |
+
marker = marker_atom (BOOLOP marker_atom)+
|
267 |
+
"""
|
268 |
+
expression = [_parse_marker_atom(tokenizer)]
|
269 |
+
while tokenizer.check("BOOLOP"):
|
270 |
+
token = tokenizer.read()
|
271 |
+
expr_right = _parse_marker_atom(tokenizer)
|
272 |
+
expression.extend((token.text, expr_right))
|
273 |
+
return expression
|
274 |
+
|
275 |
+
|
276 |
+
def _parse_marker_atom(tokenizer: Tokenizer) -> MarkerAtom:
|
277 |
+
"""
|
278 |
+
marker_atom = WS? LEFT_PARENTHESIS WS? marker WS? RIGHT_PARENTHESIS WS?
|
279 |
+
| WS? marker_item WS?
|
280 |
+
"""
|
281 |
+
|
282 |
+
tokenizer.consume("WS")
|
283 |
+
if tokenizer.check("LEFT_PARENTHESIS", peek=True):
|
284 |
+
with tokenizer.enclosing_tokens(
|
285 |
+
"LEFT_PARENTHESIS",
|
286 |
+
"RIGHT_PARENTHESIS",
|
287 |
+
around="marker expression",
|
288 |
+
):
|
289 |
+
tokenizer.consume("WS")
|
290 |
+
marker: MarkerAtom = _parse_marker(tokenizer)
|
291 |
+
tokenizer.consume("WS")
|
292 |
+
else:
|
293 |
+
marker = _parse_marker_item(tokenizer)
|
294 |
+
tokenizer.consume("WS")
|
295 |
+
return marker
|
296 |
+
|
297 |
+
|
298 |
+
def _parse_marker_item(tokenizer: Tokenizer) -> MarkerItem:
|
299 |
+
"""
|
300 |
+
marker_item = WS? marker_var WS? marker_op WS? marker_var WS?
|
301 |
+
"""
|
302 |
+
tokenizer.consume("WS")
|
303 |
+
marker_var_left = _parse_marker_var(tokenizer)
|
304 |
+
tokenizer.consume("WS")
|
305 |
+
marker_op = _parse_marker_op(tokenizer)
|
306 |
+
tokenizer.consume("WS")
|
307 |
+
marker_var_right = _parse_marker_var(tokenizer)
|
308 |
+
tokenizer.consume("WS")
|
309 |
+
return (marker_var_left, marker_op, marker_var_right)
|
310 |
+
|
311 |
+
|
312 |
+
def _parse_marker_var(tokenizer: Tokenizer) -> MarkerVar:
|
313 |
+
"""
|
314 |
+
marker_var = VARIABLE | QUOTED_STRING
|
315 |
+
"""
|
316 |
+
if tokenizer.check("VARIABLE"):
|
317 |
+
return process_env_var(tokenizer.read().text.replace(".", "_"))
|
318 |
+
elif tokenizer.check("QUOTED_STRING"):
|
319 |
+
return process_python_str(tokenizer.read().text)
|
320 |
+
else:
|
321 |
+
tokenizer.raise_syntax_error(
|
322 |
+
message="Expected a marker variable or quoted string"
|
323 |
+
)
|
324 |
+
|
325 |
+
|
326 |
+
def process_env_var(env_var: str) -> Variable:
|
327 |
+
if env_var in ("platform_python_implementation", "python_implementation"):
|
328 |
+
return Variable("platform_python_implementation")
|
329 |
+
else:
|
330 |
+
return Variable(env_var)
|
331 |
+
|
332 |
+
|
333 |
+
def process_python_str(python_str: str) -> Value:
|
334 |
+
value = ast.literal_eval(python_str)
|
335 |
+
return Value(str(value))
|
336 |
+
|
337 |
+
|
338 |
+
def _parse_marker_op(tokenizer: Tokenizer) -> Op:
|
339 |
+
"""
|
340 |
+
marker_op = IN | NOT IN | OP
|
341 |
+
"""
|
342 |
+
if tokenizer.check("IN"):
|
343 |
+
tokenizer.read()
|
344 |
+
return Op("in")
|
345 |
+
elif tokenizer.check("NOT"):
|
346 |
+
tokenizer.read()
|
347 |
+
tokenizer.expect("WS", expected="whitespace after 'not'")
|
348 |
+
tokenizer.expect("IN", expected="'in' after 'not'")
|
349 |
+
return Op("not in")
|
350 |
+
elif tokenizer.check("OP"):
|
351 |
+
return Op(tokenizer.read().text)
|
352 |
+
else:
|
353 |
+
return tokenizer.raise_syntax_error(
|
354 |
+
"Expected marker operator, one of "
|
355 |
+
"<=, <, !=, ==, >=, >, ~=, ===, in, not in"
|
356 |
+
)
|
llmeval-env/lib/python3.10/site-packages/packaging/_structures.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
|
6 |
+
class InfinityType:
|
7 |
+
def __repr__(self) -> str:
|
8 |
+
return "Infinity"
|
9 |
+
|
10 |
+
def __hash__(self) -> int:
|
11 |
+
return hash(repr(self))
|
12 |
+
|
13 |
+
def __lt__(self, other: object) -> bool:
|
14 |
+
return False
|
15 |
+
|
16 |
+
def __le__(self, other: object) -> bool:
|
17 |
+
return False
|
18 |
+
|
19 |
+
def __eq__(self, other: object) -> bool:
|
20 |
+
return isinstance(other, self.__class__)
|
21 |
+
|
22 |
+
def __gt__(self, other: object) -> bool:
|
23 |
+
return True
|
24 |
+
|
25 |
+
def __ge__(self, other: object) -> bool:
|
26 |
+
return True
|
27 |
+
|
28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
29 |
+
return NegativeInfinity
|
30 |
+
|
31 |
+
|
32 |
+
Infinity = InfinityType()
|
33 |
+
|
34 |
+
|
35 |
+
class NegativeInfinityType:
|
36 |
+
def __repr__(self) -> str:
|
37 |
+
return "-Infinity"
|
38 |
+
|
39 |
+
def __hash__(self) -> int:
|
40 |
+
return hash(repr(self))
|
41 |
+
|
42 |
+
def __lt__(self, other: object) -> bool:
|
43 |
+
return True
|
44 |
+
|
45 |
+
def __le__(self, other: object) -> bool:
|
46 |
+
return True
|
47 |
+
|
48 |
+
def __eq__(self, other: object) -> bool:
|
49 |
+
return isinstance(other, self.__class__)
|
50 |
+
|
51 |
+
def __gt__(self, other: object) -> bool:
|
52 |
+
return False
|
53 |
+
|
54 |
+
def __ge__(self, other: object) -> bool:
|
55 |
+
return False
|
56 |
+
|
57 |
+
def __neg__(self: object) -> InfinityType:
|
58 |
+
return Infinity
|
59 |
+
|
60 |
+
|
61 |
+
NegativeInfinity = NegativeInfinityType()
|
llmeval-env/lib/python3.10/site-packages/packaging/_tokenizer.py
ADDED
@@ -0,0 +1,192 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import contextlib
|
2 |
+
import re
|
3 |
+
from dataclasses import dataclass
|
4 |
+
from typing import Dict, Iterator, NoReturn, Optional, Tuple, Union
|
5 |
+
|
6 |
+
from .specifiers import Specifier
|
7 |
+
|
8 |
+
|
9 |
+
@dataclass
|
10 |
+
class Token:
|
11 |
+
name: str
|
12 |
+
text: str
|
13 |
+
position: int
|
14 |
+
|
15 |
+
|
16 |
+
class ParserSyntaxError(Exception):
|
17 |
+
"""The provided source text could not be parsed correctly."""
|
18 |
+
|
19 |
+
def __init__(
|
20 |
+
self,
|
21 |
+
message: str,
|
22 |
+
*,
|
23 |
+
source: str,
|
24 |
+
span: Tuple[int, int],
|
25 |
+
) -> None:
|
26 |
+
self.span = span
|
27 |
+
self.message = message
|
28 |
+
self.source = source
|
29 |
+
|
30 |
+
super().__init__()
|
31 |
+
|
32 |
+
def __str__(self) -> str:
|
33 |
+
marker = " " * self.span[0] + "~" * (self.span[1] - self.span[0]) + "^"
|
34 |
+
return "\n ".join([self.message, self.source, marker])
|
35 |
+
|
36 |
+
|
37 |
+
DEFAULT_RULES: "Dict[str, Union[str, re.Pattern[str]]]" = {
|
38 |
+
"LEFT_PARENTHESIS": r"\(",
|
39 |
+
"RIGHT_PARENTHESIS": r"\)",
|
40 |
+
"LEFT_BRACKET": r"\[",
|
41 |
+
"RIGHT_BRACKET": r"\]",
|
42 |
+
"SEMICOLON": r";",
|
43 |
+
"COMMA": r",",
|
44 |
+
"QUOTED_STRING": re.compile(
|
45 |
+
r"""
|
46 |
+
(
|
47 |
+
('[^']*')
|
48 |
+
|
|
49 |
+
("[^"]*")
|
50 |
+
)
|
51 |
+
""",
|
52 |
+
re.VERBOSE,
|
53 |
+
),
|
54 |
+
"OP": r"(===|==|~=|!=|<=|>=|<|>)",
|
55 |
+
"BOOLOP": r"\b(or|and)\b",
|
56 |
+
"IN": r"\bin\b",
|
57 |
+
"NOT": r"\bnot\b",
|
58 |
+
"VARIABLE": re.compile(
|
59 |
+
r"""
|
60 |
+
\b(
|
61 |
+
python_version
|
62 |
+
|python_full_version
|
63 |
+
|os[._]name
|
64 |
+
|sys[._]platform
|
65 |
+
|platform_(release|system)
|
66 |
+
|platform[._](version|machine|python_implementation)
|
67 |
+
|python_implementation
|
68 |
+
|implementation_(name|version)
|
69 |
+
|extra
|
70 |
+
)\b
|
71 |
+
""",
|
72 |
+
re.VERBOSE,
|
73 |
+
),
|
74 |
+
"SPECIFIER": re.compile(
|
75 |
+
Specifier._operator_regex_str + Specifier._version_regex_str,
|
76 |
+
re.VERBOSE | re.IGNORECASE,
|
77 |
+
),
|
78 |
+
"AT": r"\@",
|
79 |
+
"URL": r"[^ \t]+",
|
80 |
+
"IDENTIFIER": r"\b[a-zA-Z0-9][a-zA-Z0-9._-]*\b",
|
81 |
+
"VERSION_PREFIX_TRAIL": r"\.\*",
|
82 |
+
"VERSION_LOCAL_LABEL_TRAIL": r"\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*",
|
83 |
+
"WS": r"[ \t]+",
|
84 |
+
"END": r"$",
|
85 |
+
}
|
86 |
+
|
87 |
+
|
88 |
+
class Tokenizer:
|
89 |
+
"""Context-sensitive token parsing.
|
90 |
+
|
91 |
+
Provides methods to examine the input stream to check whether the next token
|
92 |
+
matches.
|
93 |
+
"""
|
94 |
+
|
95 |
+
def __init__(
|
96 |
+
self,
|
97 |
+
source: str,
|
98 |
+
*,
|
99 |
+
rules: "Dict[str, Union[str, re.Pattern[str]]]",
|
100 |
+
) -> None:
|
101 |
+
self.source = source
|
102 |
+
self.rules: Dict[str, re.Pattern[str]] = {
|
103 |
+
name: re.compile(pattern) for name, pattern in rules.items()
|
104 |
+
}
|
105 |
+
self.next_token: Optional[Token] = None
|
106 |
+
self.position = 0
|
107 |
+
|
108 |
+
def consume(self, name: str) -> None:
|
109 |
+
"""Move beyond provided token name, if at current position."""
|
110 |
+
if self.check(name):
|
111 |
+
self.read()
|
112 |
+
|
113 |
+
def check(self, name: str, *, peek: bool = False) -> bool:
|
114 |
+
"""Check whether the next token has the provided name.
|
115 |
+
|
116 |
+
By default, if the check succeeds, the token *must* be read before
|
117 |
+
another check. If `peek` is set to `True`, the token is not loaded and
|
118 |
+
would need to be checked again.
|
119 |
+
"""
|
120 |
+
assert (
|
121 |
+
self.next_token is None
|
122 |
+
), f"Cannot check for {name!r}, already have {self.next_token!r}"
|
123 |
+
assert name in self.rules, f"Unknown token name: {name!r}"
|
124 |
+
|
125 |
+
expression = self.rules[name]
|
126 |
+
|
127 |
+
match = expression.match(self.source, self.position)
|
128 |
+
if match is None:
|
129 |
+
return False
|
130 |
+
if not peek:
|
131 |
+
self.next_token = Token(name, match[0], self.position)
|
132 |
+
return True
|
133 |
+
|
134 |
+
def expect(self, name: str, *, expected: str) -> Token:
|
135 |
+
"""Expect a certain token name next, failing with a syntax error otherwise.
|
136 |
+
|
137 |
+
The token is *not* read.
|
138 |
+
"""
|
139 |
+
if not self.check(name):
|
140 |
+
raise self.raise_syntax_error(f"Expected {expected}")
|
141 |
+
return self.read()
|
142 |
+
|
143 |
+
def read(self) -> Token:
|
144 |
+
"""Consume the next token and return it."""
|
145 |
+
token = self.next_token
|
146 |
+
assert token is not None
|
147 |
+
|
148 |
+
self.position += len(token.text)
|
149 |
+
self.next_token = None
|
150 |
+
|
151 |
+
return token
|
152 |
+
|
153 |
+
def raise_syntax_error(
|
154 |
+
self,
|
155 |
+
message: str,
|
156 |
+
*,
|
157 |
+
span_start: Optional[int] = None,
|
158 |
+
span_end: Optional[int] = None,
|
159 |
+
) -> NoReturn:
|
160 |
+
"""Raise ParserSyntaxError at the given position."""
|
161 |
+
span = (
|
162 |
+
self.position if span_start is None else span_start,
|
163 |
+
self.position if span_end is None else span_end,
|
164 |
+
)
|
165 |
+
raise ParserSyntaxError(
|
166 |
+
message,
|
167 |
+
source=self.source,
|
168 |
+
span=span,
|
169 |
+
)
|
170 |
+
|
171 |
+
@contextlib.contextmanager
|
172 |
+
def enclosing_tokens(
|
173 |
+
self, open_token: str, close_token: str, *, around: str
|
174 |
+
) -> Iterator[None]:
|
175 |
+
if self.check(open_token):
|
176 |
+
open_position = self.position
|
177 |
+
self.read()
|
178 |
+
else:
|
179 |
+
open_position = None
|
180 |
+
|
181 |
+
yield
|
182 |
+
|
183 |
+
if open_position is None:
|
184 |
+
return
|
185 |
+
|
186 |
+
if not self.check(close_token):
|
187 |
+
self.raise_syntax_error(
|
188 |
+
f"Expected matching {close_token} for {open_token}, after {around}",
|
189 |
+
span_start=open_position,
|
190 |
+
)
|
191 |
+
|
192 |
+
self.read()
|
llmeval-env/lib/python3.10/site-packages/packaging/markers.py
ADDED
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import operator
|
6 |
+
import os
|
7 |
+
import platform
|
8 |
+
import sys
|
9 |
+
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
|
10 |
+
|
11 |
+
from ._parser import (
|
12 |
+
MarkerAtom,
|
13 |
+
MarkerList,
|
14 |
+
Op,
|
15 |
+
Value,
|
16 |
+
Variable,
|
17 |
+
parse_marker as _parse_marker,
|
18 |
+
)
|
19 |
+
from ._tokenizer import ParserSyntaxError
|
20 |
+
from .specifiers import InvalidSpecifier, Specifier
|
21 |
+
from .utils import canonicalize_name
|
22 |
+
|
23 |
+
__all__ = [
|
24 |
+
"InvalidMarker",
|
25 |
+
"UndefinedComparison",
|
26 |
+
"UndefinedEnvironmentName",
|
27 |
+
"Marker",
|
28 |
+
"default_environment",
|
29 |
+
]
|
30 |
+
|
31 |
+
Operator = Callable[[str, str], bool]
|
32 |
+
|
33 |
+
|
34 |
+
class InvalidMarker(ValueError):
|
35 |
+
"""
|
36 |
+
An invalid marker was found, users should refer to PEP 508.
|
37 |
+
"""
|
38 |
+
|
39 |
+
|
40 |
+
class UndefinedComparison(ValueError):
|
41 |
+
"""
|
42 |
+
An invalid operation was attempted on a value that doesn't support it.
|
43 |
+
"""
|
44 |
+
|
45 |
+
|
46 |
+
class UndefinedEnvironmentName(ValueError):
|
47 |
+
"""
|
48 |
+
A name was attempted to be used that does not exist inside of the
|
49 |
+
environment.
|
50 |
+
"""
|
51 |
+
|
52 |
+
|
53 |
+
def _normalize_extra_values(results: Any) -> Any:
|
54 |
+
"""
|
55 |
+
Normalize extra values.
|
56 |
+
"""
|
57 |
+
if isinstance(results[0], tuple):
|
58 |
+
lhs, op, rhs = results[0]
|
59 |
+
if isinstance(lhs, Variable) and lhs.value == "extra":
|
60 |
+
normalized_extra = canonicalize_name(rhs.value)
|
61 |
+
rhs = Value(normalized_extra)
|
62 |
+
elif isinstance(rhs, Variable) and rhs.value == "extra":
|
63 |
+
normalized_extra = canonicalize_name(lhs.value)
|
64 |
+
lhs = Value(normalized_extra)
|
65 |
+
results[0] = lhs, op, rhs
|
66 |
+
return results
|
67 |
+
|
68 |
+
|
69 |
+
def _format_marker(
|
70 |
+
marker: Union[List[str], MarkerAtom, str], first: Optional[bool] = True
|
71 |
+
) -> str:
|
72 |
+
|
73 |
+
assert isinstance(marker, (list, tuple, str))
|
74 |
+
|
75 |
+
# Sometimes we have a structure like [[...]] which is a single item list
|
76 |
+
# where the single item is itself it's own list. In that case we want skip
|
77 |
+
# the rest of this function so that we don't get extraneous () on the
|
78 |
+
# outside.
|
79 |
+
if (
|
80 |
+
isinstance(marker, list)
|
81 |
+
and len(marker) == 1
|
82 |
+
and isinstance(marker[0], (list, tuple))
|
83 |
+
):
|
84 |
+
return _format_marker(marker[0])
|
85 |
+
|
86 |
+
if isinstance(marker, list):
|
87 |
+
inner = (_format_marker(m, first=False) for m in marker)
|
88 |
+
if first:
|
89 |
+
return " ".join(inner)
|
90 |
+
else:
|
91 |
+
return "(" + " ".join(inner) + ")"
|
92 |
+
elif isinstance(marker, tuple):
|
93 |
+
return " ".join([m.serialize() for m in marker])
|
94 |
+
else:
|
95 |
+
return marker
|
96 |
+
|
97 |
+
|
98 |
+
_operators: Dict[str, Operator] = {
|
99 |
+
"in": lambda lhs, rhs: lhs in rhs,
|
100 |
+
"not in": lambda lhs, rhs: lhs not in rhs,
|
101 |
+
"<": operator.lt,
|
102 |
+
"<=": operator.le,
|
103 |
+
"==": operator.eq,
|
104 |
+
"!=": operator.ne,
|
105 |
+
">=": operator.ge,
|
106 |
+
">": operator.gt,
|
107 |
+
}
|
108 |
+
|
109 |
+
|
110 |
+
def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
|
111 |
+
try:
|
112 |
+
spec = Specifier("".join([op.serialize(), rhs]))
|
113 |
+
except InvalidSpecifier:
|
114 |
+
pass
|
115 |
+
else:
|
116 |
+
return spec.contains(lhs, prereleases=True)
|
117 |
+
|
118 |
+
oper: Optional[Operator] = _operators.get(op.serialize())
|
119 |
+
if oper is None:
|
120 |
+
raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
|
121 |
+
|
122 |
+
return oper(lhs, rhs)
|
123 |
+
|
124 |
+
|
125 |
+
def _normalize(*values: str, key: str) -> Tuple[str, ...]:
|
126 |
+
# PEP 685 – Comparison of extra names for optional distribution dependencies
|
127 |
+
# https://peps.python.org/pep-0685/
|
128 |
+
# > When comparing extra names, tools MUST normalize the names being
|
129 |
+
# > compared using the semantics outlined in PEP 503 for names
|
130 |
+
if key == "extra":
|
131 |
+
return tuple(canonicalize_name(v) for v in values)
|
132 |
+
|
133 |
+
# other environment markers don't have such standards
|
134 |
+
return values
|
135 |
+
|
136 |
+
|
137 |
+
def _evaluate_markers(markers: MarkerList, environment: Dict[str, str]) -> bool:
|
138 |
+
groups: List[List[bool]] = [[]]
|
139 |
+
|
140 |
+
for marker in markers:
|
141 |
+
assert isinstance(marker, (list, tuple, str))
|
142 |
+
|
143 |
+
if isinstance(marker, list):
|
144 |
+
groups[-1].append(_evaluate_markers(marker, environment))
|
145 |
+
elif isinstance(marker, tuple):
|
146 |
+
lhs, op, rhs = marker
|
147 |
+
|
148 |
+
if isinstance(lhs, Variable):
|
149 |
+
environment_key = lhs.value
|
150 |
+
lhs_value = environment[environment_key]
|
151 |
+
rhs_value = rhs.value
|
152 |
+
else:
|
153 |
+
lhs_value = lhs.value
|
154 |
+
environment_key = rhs.value
|
155 |
+
rhs_value = environment[environment_key]
|
156 |
+
|
157 |
+
lhs_value, rhs_value = _normalize(lhs_value, rhs_value, key=environment_key)
|
158 |
+
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
|
159 |
+
else:
|
160 |
+
assert marker in ["and", "or"]
|
161 |
+
if marker == "or":
|
162 |
+
groups.append([])
|
163 |
+
|
164 |
+
return any(all(item) for item in groups)
|
165 |
+
|
166 |
+
|
167 |
+
def format_full_version(info: "sys._version_info") -> str:
|
168 |
+
version = "{0.major}.{0.minor}.{0.micro}".format(info)
|
169 |
+
kind = info.releaselevel
|
170 |
+
if kind != "final":
|
171 |
+
version += kind[0] + str(info.serial)
|
172 |
+
return version
|
173 |
+
|
174 |
+
|
175 |
+
def default_environment() -> Dict[str, str]:
|
176 |
+
iver = format_full_version(sys.implementation.version)
|
177 |
+
implementation_name = sys.implementation.name
|
178 |
+
return {
|
179 |
+
"implementation_name": implementation_name,
|
180 |
+
"implementation_version": iver,
|
181 |
+
"os_name": os.name,
|
182 |
+
"platform_machine": platform.machine(),
|
183 |
+
"platform_release": platform.release(),
|
184 |
+
"platform_system": platform.system(),
|
185 |
+
"platform_version": platform.version(),
|
186 |
+
"python_full_version": platform.python_version(),
|
187 |
+
"platform_python_implementation": platform.python_implementation(),
|
188 |
+
"python_version": ".".join(platform.python_version_tuple()[:2]),
|
189 |
+
"sys_platform": sys.platform,
|
190 |
+
}
|
191 |
+
|
192 |
+
|
193 |
+
class Marker:
|
194 |
+
def __init__(self, marker: str) -> None:
|
195 |
+
# Note: We create a Marker object without calling this constructor in
|
196 |
+
# packaging.requirements.Requirement. If any additional logic is
|
197 |
+
# added here, make sure to mirror/adapt Requirement.
|
198 |
+
try:
|
199 |
+
self._markers = _normalize_extra_values(_parse_marker(marker))
|
200 |
+
# The attribute `_markers` can be described in terms of a recursive type:
|
201 |
+
# MarkerList = List[Union[Tuple[Node, ...], str, MarkerList]]
|
202 |
+
#
|
203 |
+
# For example, the following expression:
|
204 |
+
# python_version > "3.6" or (python_version == "3.6" and os_name == "unix")
|
205 |
+
#
|
206 |
+
# is parsed into:
|
207 |
+
# [
|
208 |
+
# (<Variable('python_version')>, <Op('>')>, <Value('3.6')>),
|
209 |
+
# 'and',
|
210 |
+
# [
|
211 |
+
# (<Variable('python_version')>, <Op('==')>, <Value('3.6')>),
|
212 |
+
# 'or',
|
213 |
+
# (<Variable('os_name')>, <Op('==')>, <Value('unix')>)
|
214 |
+
# ]
|
215 |
+
# ]
|
216 |
+
except ParserSyntaxError as e:
|
217 |
+
raise InvalidMarker(str(e)) from e
|
218 |
+
|
219 |
+
def __str__(self) -> str:
|
220 |
+
return _format_marker(self._markers)
|
221 |
+
|
222 |
+
def __repr__(self) -> str:
|
223 |
+
return f"<Marker('{self}')>"
|
224 |
+
|
225 |
+
def __hash__(self) -> int:
|
226 |
+
return hash((self.__class__.__name__, str(self)))
|
227 |
+
|
228 |
+
def __eq__(self, other: Any) -> bool:
|
229 |
+
if not isinstance(other, Marker):
|
230 |
+
return NotImplemented
|
231 |
+
|
232 |
+
return str(self) == str(other)
|
233 |
+
|
234 |
+
def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
|
235 |
+
"""Evaluate a marker.
|
236 |
+
|
237 |
+
Return the boolean from evaluating the given marker against the
|
238 |
+
environment. environment is an optional argument to override all or
|
239 |
+
part of the determined environment.
|
240 |
+
|
241 |
+
The environment is determined from the current Python process.
|
242 |
+
"""
|
243 |
+
current_environment = default_environment()
|
244 |
+
current_environment["extra"] = ""
|
245 |
+
if environment is not None:
|
246 |
+
current_environment.update(environment)
|
247 |
+
# The API used to allow setting extra to None. We need to handle this
|
248 |
+
# case for backwards compatibility.
|
249 |
+
if current_environment["extra"] is None:
|
250 |
+
current_environment["extra"] = ""
|
251 |
+
|
252 |
+
return _evaluate_markers(self._markers, current_environment)
|
llmeval-env/lib/python3.10/site-packages/packaging/metadata.py
ADDED
@@ -0,0 +1,825 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import email.feedparser
|
2 |
+
import email.header
|
3 |
+
import email.message
|
4 |
+
import email.parser
|
5 |
+
import email.policy
|
6 |
+
import sys
|
7 |
+
import typing
|
8 |
+
from typing import (
|
9 |
+
Any,
|
10 |
+
Callable,
|
11 |
+
Dict,
|
12 |
+
Generic,
|
13 |
+
List,
|
14 |
+
Optional,
|
15 |
+
Tuple,
|
16 |
+
Type,
|
17 |
+
Union,
|
18 |
+
cast,
|
19 |
+
)
|
20 |
+
|
21 |
+
from . import requirements, specifiers, utils, version as version_module
|
22 |
+
|
23 |
+
T = typing.TypeVar("T")
|
24 |
+
if sys.version_info[:2] >= (3, 8): # pragma: no cover
|
25 |
+
from typing import Literal, TypedDict
|
26 |
+
else: # pragma: no cover
|
27 |
+
if typing.TYPE_CHECKING:
|
28 |
+
from typing_extensions import Literal, TypedDict
|
29 |
+
else:
|
30 |
+
try:
|
31 |
+
from typing_extensions import Literal, TypedDict
|
32 |
+
except ImportError:
|
33 |
+
|
34 |
+
class Literal:
|
35 |
+
def __init_subclass__(*_args, **_kwargs):
|
36 |
+
pass
|
37 |
+
|
38 |
+
class TypedDict:
|
39 |
+
def __init_subclass__(*_args, **_kwargs):
|
40 |
+
pass
|
41 |
+
|
42 |
+
|
43 |
+
try:
|
44 |
+
ExceptionGroup
|
45 |
+
except NameError: # pragma: no cover
|
46 |
+
|
47 |
+
class ExceptionGroup(Exception): # noqa: N818
|
48 |
+
"""A minimal implementation of :external:exc:`ExceptionGroup` from Python 3.11.
|
49 |
+
|
50 |
+
If :external:exc:`ExceptionGroup` is already defined by Python itself,
|
51 |
+
that version is used instead.
|
52 |
+
"""
|
53 |
+
|
54 |
+
message: str
|
55 |
+
exceptions: List[Exception]
|
56 |
+
|
57 |
+
def __init__(self, message: str, exceptions: List[Exception]) -> None:
|
58 |
+
self.message = message
|
59 |
+
self.exceptions = exceptions
|
60 |
+
|
61 |
+
def __repr__(self) -> str:
|
62 |
+
return f"{self.__class__.__name__}({self.message!r}, {self.exceptions!r})"
|
63 |
+
|
64 |
+
else: # pragma: no cover
|
65 |
+
ExceptionGroup = ExceptionGroup
|
66 |
+
|
67 |
+
|
68 |
+
class InvalidMetadata(ValueError):
|
69 |
+
"""A metadata field contains invalid data."""
|
70 |
+
|
71 |
+
field: str
|
72 |
+
"""The name of the field that contains invalid data."""
|
73 |
+
|
74 |
+
def __init__(self, field: str, message: str) -> None:
|
75 |
+
self.field = field
|
76 |
+
super().__init__(message)
|
77 |
+
|
78 |
+
|
79 |
+
# The RawMetadata class attempts to make as few assumptions about the underlying
|
80 |
+
# serialization formats as possible. The idea is that as long as a serialization
|
81 |
+
# formats offer some very basic primitives in *some* way then we can support
|
82 |
+
# serializing to and from that format.
|
83 |
+
class RawMetadata(TypedDict, total=False):
|
84 |
+
"""A dictionary of raw core metadata.
|
85 |
+
|
86 |
+
Each field in core metadata maps to a key of this dictionary (when data is
|
87 |
+
provided). The key is lower-case and underscores are used instead of dashes
|
88 |
+
compared to the equivalent core metadata field. Any core metadata field that
|
89 |
+
can be specified multiple times or can hold multiple values in a single
|
90 |
+
field have a key with a plural name. See :class:`Metadata` whose attributes
|
91 |
+
match the keys of this dictionary.
|
92 |
+
|
93 |
+
Core metadata fields that can be specified multiple times are stored as a
|
94 |
+
list or dict depending on which is appropriate for the field. Any fields
|
95 |
+
which hold multiple values in a single field are stored as a list.
|
96 |
+
|
97 |
+
"""
|
98 |
+
|
99 |
+
# Metadata 1.0 - PEP 241
|
100 |
+
metadata_version: str
|
101 |
+
name: str
|
102 |
+
version: str
|
103 |
+
platforms: List[str]
|
104 |
+
summary: str
|
105 |
+
description: str
|
106 |
+
keywords: List[str]
|
107 |
+
home_page: str
|
108 |
+
author: str
|
109 |
+
author_email: str
|
110 |
+
license: str
|
111 |
+
|
112 |
+
# Metadata 1.1 - PEP 314
|
113 |
+
supported_platforms: List[str]
|
114 |
+
download_url: str
|
115 |
+
classifiers: List[str]
|
116 |
+
requires: List[str]
|
117 |
+
provides: List[str]
|
118 |
+
obsoletes: List[str]
|
119 |
+
|
120 |
+
# Metadata 1.2 - PEP 345
|
121 |
+
maintainer: str
|
122 |
+
maintainer_email: str
|
123 |
+
requires_dist: List[str]
|
124 |
+
provides_dist: List[str]
|
125 |
+
obsoletes_dist: List[str]
|
126 |
+
requires_python: str
|
127 |
+
requires_external: List[str]
|
128 |
+
project_urls: Dict[str, str]
|
129 |
+
|
130 |
+
# Metadata 2.0
|
131 |
+
# PEP 426 attempted to completely revamp the metadata format
|
132 |
+
# but got stuck without ever being able to build consensus on
|
133 |
+
# it and ultimately ended up withdrawn.
|
134 |
+
#
|
135 |
+
# However, a number of tools had started emitting METADATA with
|
136 |
+
# `2.0` Metadata-Version, so for historical reasons, this version
|
137 |
+
# was skipped.
|
138 |
+
|
139 |
+
# Metadata 2.1 - PEP 566
|
140 |
+
description_content_type: str
|
141 |
+
provides_extra: List[str]
|
142 |
+
|
143 |
+
# Metadata 2.2 - PEP 643
|
144 |
+
dynamic: List[str]
|
145 |
+
|
146 |
+
# Metadata 2.3 - PEP 685
|
147 |
+
# No new fields were added in PEP 685, just some edge case were
|
148 |
+
# tightened up to provide better interoptability.
|
149 |
+
|
150 |
+
|
151 |
+
_STRING_FIELDS = {
|
152 |
+
"author",
|
153 |
+
"author_email",
|
154 |
+
"description",
|
155 |
+
"description_content_type",
|
156 |
+
"download_url",
|
157 |
+
"home_page",
|
158 |
+
"license",
|
159 |
+
"maintainer",
|
160 |
+
"maintainer_email",
|
161 |
+
"metadata_version",
|
162 |
+
"name",
|
163 |
+
"requires_python",
|
164 |
+
"summary",
|
165 |
+
"version",
|
166 |
+
}
|
167 |
+
|
168 |
+
_LIST_FIELDS = {
|
169 |
+
"classifiers",
|
170 |
+
"dynamic",
|
171 |
+
"obsoletes",
|
172 |
+
"obsoletes_dist",
|
173 |
+
"platforms",
|
174 |
+
"provides",
|
175 |
+
"provides_dist",
|
176 |
+
"provides_extra",
|
177 |
+
"requires",
|
178 |
+
"requires_dist",
|
179 |
+
"requires_external",
|
180 |
+
"supported_platforms",
|
181 |
+
}
|
182 |
+
|
183 |
+
_DICT_FIELDS = {
|
184 |
+
"project_urls",
|
185 |
+
}
|
186 |
+
|
187 |
+
|
188 |
+
def _parse_keywords(data: str) -> List[str]:
|
189 |
+
"""Split a string of comma-separate keyboards into a list of keywords."""
|
190 |
+
return [k.strip() for k in data.split(",")]
|
191 |
+
|
192 |
+
|
193 |
+
def _parse_project_urls(data: List[str]) -> Dict[str, str]:
|
194 |
+
"""Parse a list of label/URL string pairings separated by a comma."""
|
195 |
+
urls = {}
|
196 |
+
for pair in data:
|
197 |
+
# Our logic is slightly tricky here as we want to try and do
|
198 |
+
# *something* reasonable with malformed data.
|
199 |
+
#
|
200 |
+
# The main thing that we have to worry about, is data that does
|
201 |
+
# not have a ',' at all to split the label from the Value. There
|
202 |
+
# isn't a singular right answer here, and we will fail validation
|
203 |
+
# later on (if the caller is validating) so it doesn't *really*
|
204 |
+
# matter, but since the missing value has to be an empty str
|
205 |
+
# and our return value is dict[str, str], if we let the key
|
206 |
+
# be the missing value, then they'd have multiple '' values that
|
207 |
+
# overwrite each other in a accumulating dict.
|
208 |
+
#
|
209 |
+
# The other potentional issue is that it's possible to have the
|
210 |
+
# same label multiple times in the metadata, with no solid "right"
|
211 |
+
# answer with what to do in that case. As such, we'll do the only
|
212 |
+
# thing we can, which is treat the field as unparseable and add it
|
213 |
+
# to our list of unparsed fields.
|
214 |
+
parts = [p.strip() for p in pair.split(",", 1)]
|
215 |
+
parts.extend([""] * (max(0, 2 - len(parts)))) # Ensure 2 items
|
216 |
+
|
217 |
+
# TODO: The spec doesn't say anything about if the keys should be
|
218 |
+
# considered case sensitive or not... logically they should
|
219 |
+
# be case-preserving and case-insensitive, but doing that
|
220 |
+
# would open up more cases where we might have duplicate
|
221 |
+
# entries.
|
222 |
+
label, url = parts
|
223 |
+
if label in urls:
|
224 |
+
# The label already exists in our set of urls, so this field
|
225 |
+
# is unparseable, and we can just add the whole thing to our
|
226 |
+
# unparseable data and stop processing it.
|
227 |
+
raise KeyError("duplicate labels in project urls")
|
228 |
+
urls[label] = url
|
229 |
+
|
230 |
+
return urls
|
231 |
+
|
232 |
+
|
233 |
+
def _get_payload(msg: email.message.Message, source: Union[bytes, str]) -> str:
|
234 |
+
"""Get the body of the message."""
|
235 |
+
# If our source is a str, then our caller has managed encodings for us,
|
236 |
+
# and we don't need to deal with it.
|
237 |
+
if isinstance(source, str):
|
238 |
+
payload: str = msg.get_payload()
|
239 |
+
return payload
|
240 |
+
# If our source is a bytes, then we're managing the encoding and we need
|
241 |
+
# to deal with it.
|
242 |
+
else:
|
243 |
+
bpayload: bytes = msg.get_payload(decode=True)
|
244 |
+
try:
|
245 |
+
return bpayload.decode("utf8", "strict")
|
246 |
+
except UnicodeDecodeError:
|
247 |
+
raise ValueError("payload in an invalid encoding")
|
248 |
+
|
249 |
+
|
250 |
+
# The various parse_FORMAT functions here are intended to be as lenient as
|
251 |
+
# possible in their parsing, while still returning a correctly typed
|
252 |
+
# RawMetadata.
|
253 |
+
#
|
254 |
+
# To aid in this, we also generally want to do as little touching of the
|
255 |
+
# data as possible, except where there are possibly some historic holdovers
|
256 |
+
# that make valid data awkward to work with.
|
257 |
+
#
|
258 |
+
# While this is a lower level, intermediate format than our ``Metadata``
|
259 |
+
# class, some light touch ups can make a massive difference in usability.
|
260 |
+
|
261 |
+
# Map METADATA fields to RawMetadata.
|
262 |
+
_EMAIL_TO_RAW_MAPPING = {
|
263 |
+
"author": "author",
|
264 |
+
"author-email": "author_email",
|
265 |
+
"classifier": "classifiers",
|
266 |
+
"description": "description",
|
267 |
+
"description-content-type": "description_content_type",
|
268 |
+
"download-url": "download_url",
|
269 |
+
"dynamic": "dynamic",
|
270 |
+
"home-page": "home_page",
|
271 |
+
"keywords": "keywords",
|
272 |
+
"license": "license",
|
273 |
+
"maintainer": "maintainer",
|
274 |
+
"maintainer-email": "maintainer_email",
|
275 |
+
"metadata-version": "metadata_version",
|
276 |
+
"name": "name",
|
277 |
+
"obsoletes": "obsoletes",
|
278 |
+
"obsoletes-dist": "obsoletes_dist",
|
279 |
+
"platform": "platforms",
|
280 |
+
"project-url": "project_urls",
|
281 |
+
"provides": "provides",
|
282 |
+
"provides-dist": "provides_dist",
|
283 |
+
"provides-extra": "provides_extra",
|
284 |
+
"requires": "requires",
|
285 |
+
"requires-dist": "requires_dist",
|
286 |
+
"requires-external": "requires_external",
|
287 |
+
"requires-python": "requires_python",
|
288 |
+
"summary": "summary",
|
289 |
+
"supported-platform": "supported_platforms",
|
290 |
+
"version": "version",
|
291 |
+
}
|
292 |
+
_RAW_TO_EMAIL_MAPPING = {raw: email for email, raw in _EMAIL_TO_RAW_MAPPING.items()}
|
293 |
+
|
294 |
+
|
295 |
+
def parse_email(data: Union[bytes, str]) -> Tuple[RawMetadata, Dict[str, List[str]]]:
|
296 |
+
"""Parse a distribution's metadata stored as email headers (e.g. from ``METADATA``).
|
297 |
+
|
298 |
+
This function returns a two-item tuple of dicts. The first dict is of
|
299 |
+
recognized fields from the core metadata specification. Fields that can be
|
300 |
+
parsed and translated into Python's built-in types are converted
|
301 |
+
appropriately. All other fields are left as-is. Fields that are allowed to
|
302 |
+
appear multiple times are stored as lists.
|
303 |
+
|
304 |
+
The second dict contains all other fields from the metadata. This includes
|
305 |
+
any unrecognized fields. It also includes any fields which are expected to
|
306 |
+
be parsed into a built-in type but were not formatted appropriately. Finally,
|
307 |
+
any fields that are expected to appear only once but are repeated are
|
308 |
+
included in this dict.
|
309 |
+
|
310 |
+
"""
|
311 |
+
raw: Dict[str, Union[str, List[str], Dict[str, str]]] = {}
|
312 |
+
unparsed: Dict[str, List[str]] = {}
|
313 |
+
|
314 |
+
if isinstance(data, str):
|
315 |
+
parsed = email.parser.Parser(policy=email.policy.compat32).parsestr(data)
|
316 |
+
else:
|
317 |
+
parsed = email.parser.BytesParser(policy=email.policy.compat32).parsebytes(data)
|
318 |
+
|
319 |
+
# We have to wrap parsed.keys() in a set, because in the case of multiple
|
320 |
+
# values for a key (a list), the key will appear multiple times in the
|
321 |
+
# list of keys, but we're avoiding that by using get_all().
|
322 |
+
for name in frozenset(parsed.keys()):
|
323 |
+
# Header names in RFC are case insensitive, so we'll normalize to all
|
324 |
+
# lower case to make comparisons easier.
|
325 |
+
name = name.lower()
|
326 |
+
|
327 |
+
# We use get_all() here, even for fields that aren't multiple use,
|
328 |
+
# because otherwise someone could have e.g. two Name fields, and we
|
329 |
+
# would just silently ignore it rather than doing something about it.
|
330 |
+
headers = parsed.get_all(name) or []
|
331 |
+
|
332 |
+
# The way the email module works when parsing bytes is that it
|
333 |
+
# unconditionally decodes the bytes as ascii using the surrogateescape
|
334 |
+
# handler. When you pull that data back out (such as with get_all() ),
|
335 |
+
# it looks to see if the str has any surrogate escapes, and if it does
|
336 |
+
# it wraps it in a Header object instead of returning the string.
|
337 |
+
#
|
338 |
+
# As such, we'll look for those Header objects, and fix up the encoding.
|
339 |
+
value = []
|
340 |
+
# Flag if we have run into any issues processing the headers, thus
|
341 |
+
# signalling that the data belongs in 'unparsed'.
|
342 |
+
valid_encoding = True
|
343 |
+
for h in headers:
|
344 |
+
# It's unclear if this can return more types than just a Header or
|
345 |
+
# a str, so we'll just assert here to make sure.
|
346 |
+
assert isinstance(h, (email.header.Header, str))
|
347 |
+
|
348 |
+
# If it's a header object, we need to do our little dance to get
|
349 |
+
# the real data out of it. In cases where there is invalid data
|
350 |
+
# we're going to end up with mojibake, but there's no obvious, good
|
351 |
+
# way around that without reimplementing parts of the Header object
|
352 |
+
# ourselves.
|
353 |
+
#
|
354 |
+
# That should be fine since, if mojibacked happens, this key is
|
355 |
+
# going into the unparsed dict anyways.
|
356 |
+
if isinstance(h, email.header.Header):
|
357 |
+
# The Header object stores it's data as chunks, and each chunk
|
358 |
+
# can be independently encoded, so we'll need to check each
|
359 |
+
# of them.
|
360 |
+
chunks: List[Tuple[bytes, Optional[str]]] = []
|
361 |
+
for bin, encoding in email.header.decode_header(h):
|
362 |
+
try:
|
363 |
+
bin.decode("utf8", "strict")
|
364 |
+
except UnicodeDecodeError:
|
365 |
+
# Enable mojibake.
|
366 |
+
encoding = "latin1"
|
367 |
+
valid_encoding = False
|
368 |
+
else:
|
369 |
+
encoding = "utf8"
|
370 |
+
chunks.append((bin, encoding))
|
371 |
+
|
372 |
+
# Turn our chunks back into a Header object, then let that
|
373 |
+
# Header object do the right thing to turn them into a
|
374 |
+
# string for us.
|
375 |
+
value.append(str(email.header.make_header(chunks)))
|
376 |
+
# This is already a string, so just add it.
|
377 |
+
else:
|
378 |
+
value.append(h)
|
379 |
+
|
380 |
+
# We've processed all of our values to get them into a list of str,
|
381 |
+
# but we may have mojibake data, in which case this is an unparsed
|
382 |
+
# field.
|
383 |
+
if not valid_encoding:
|
384 |
+
unparsed[name] = value
|
385 |
+
continue
|
386 |
+
|
387 |
+
raw_name = _EMAIL_TO_RAW_MAPPING.get(name)
|
388 |
+
if raw_name is None:
|
389 |
+
# This is a bit of a weird situation, we've encountered a key that
|
390 |
+
# we don't know what it means, so we don't know whether it's meant
|
391 |
+
# to be a list or not.
|
392 |
+
#
|
393 |
+
# Since we can't really tell one way or another, we'll just leave it
|
394 |
+
# as a list, even though it may be a single item list, because that's
|
395 |
+
# what makes the most sense for email headers.
|
396 |
+
unparsed[name] = value
|
397 |
+
continue
|
398 |
+
|
399 |
+
# If this is one of our string fields, then we'll check to see if our
|
400 |
+
# value is a list of a single item. If it is then we'll assume that
|
401 |
+
# it was emitted as a single string, and unwrap the str from inside
|
402 |
+
# the list.
|
403 |
+
#
|
404 |
+
# If it's any other kind of data, then we haven't the faintest clue
|
405 |
+
# what we should parse it as, and we have to just add it to our list
|
406 |
+
# of unparsed stuff.
|
407 |
+
if raw_name in _STRING_FIELDS and len(value) == 1:
|
408 |
+
raw[raw_name] = value[0]
|
409 |
+
# If this is one of our list of string fields, then we can just assign
|
410 |
+
# the value, since email *only* has strings, and our get_all() call
|
411 |
+
# above ensures that this is a list.
|
412 |
+
elif raw_name in _LIST_FIELDS:
|
413 |
+
raw[raw_name] = value
|
414 |
+
# Special Case: Keywords
|
415 |
+
# The keywords field is implemented in the metadata spec as a str,
|
416 |
+
# but it conceptually is a list of strings, and is serialized using
|
417 |
+
# ", ".join(keywords), so we'll do some light data massaging to turn
|
418 |
+
# this into what it logically is.
|
419 |
+
elif raw_name == "keywords" and len(value) == 1:
|
420 |
+
raw[raw_name] = _parse_keywords(value[0])
|
421 |
+
# Special Case: Project-URL
|
422 |
+
# The project urls is implemented in the metadata spec as a list of
|
423 |
+
# specially-formatted strings that represent a key and a value, which
|
424 |
+
# is fundamentally a mapping, however the email format doesn't support
|
425 |
+
# mappings in a sane way, so it was crammed into a list of strings
|
426 |
+
# instead.
|
427 |
+
#
|
428 |
+
# We will do a little light data massaging to turn this into a map as
|
429 |
+
# it logically should be.
|
430 |
+
elif raw_name == "project_urls":
|
431 |
+
try:
|
432 |
+
raw[raw_name] = _parse_project_urls(value)
|
433 |
+
except KeyError:
|
434 |
+
unparsed[name] = value
|
435 |
+
# Nothing that we've done has managed to parse this, so it'll just
|
436 |
+
# throw it in our unparseable data and move on.
|
437 |
+
else:
|
438 |
+
unparsed[name] = value
|
439 |
+
|
440 |
+
# We need to support getting the Description from the message payload in
|
441 |
+
# addition to getting it from the the headers. This does mean, though, there
|
442 |
+
# is the possibility of it being set both ways, in which case we put both
|
443 |
+
# in 'unparsed' since we don't know which is right.
|
444 |
+
try:
|
445 |
+
payload = _get_payload(parsed, data)
|
446 |
+
except ValueError:
|
447 |
+
unparsed.setdefault("description", []).append(
|
448 |
+
parsed.get_payload(decode=isinstance(data, bytes))
|
449 |
+
)
|
450 |
+
else:
|
451 |
+
if payload:
|
452 |
+
# Check to see if we've already got a description, if so then both
|
453 |
+
# it, and this body move to unparseable.
|
454 |
+
if "description" in raw:
|
455 |
+
description_header = cast(str, raw.pop("description"))
|
456 |
+
unparsed.setdefault("description", []).extend(
|
457 |
+
[description_header, payload]
|
458 |
+
)
|
459 |
+
elif "description" in unparsed:
|
460 |
+
unparsed["description"].append(payload)
|
461 |
+
else:
|
462 |
+
raw["description"] = payload
|
463 |
+
|
464 |
+
# We need to cast our `raw` to a metadata, because a TypedDict only support
|
465 |
+
# literal key names, but we're computing our key names on purpose, but the
|
466 |
+
# way this function is implemented, our `TypedDict` can only have valid key
|
467 |
+
# names.
|
468 |
+
return cast(RawMetadata, raw), unparsed
|
469 |
+
|
470 |
+
|
471 |
+
_NOT_FOUND = object()
|
472 |
+
|
473 |
+
|
474 |
+
# Keep the two values in sync.
|
475 |
+
_VALID_METADATA_VERSIONS = ["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
476 |
+
_MetadataVersion = Literal["1.0", "1.1", "1.2", "2.1", "2.2", "2.3"]
|
477 |
+
|
478 |
+
_REQUIRED_ATTRS = frozenset(["metadata_version", "name", "version"])
|
479 |
+
|
480 |
+
|
481 |
+
class _Validator(Generic[T]):
|
482 |
+
"""Validate a metadata field.
|
483 |
+
|
484 |
+
All _process_*() methods correspond to a core metadata field. The method is
|
485 |
+
called with the field's raw value. If the raw value is valid it is returned
|
486 |
+
in its "enriched" form (e.g. ``version.Version`` for the ``Version`` field).
|
487 |
+
If the raw value is invalid, :exc:`InvalidMetadata` is raised (with a cause
|
488 |
+
as appropriate).
|
489 |
+
"""
|
490 |
+
|
491 |
+
name: str
|
492 |
+
raw_name: str
|
493 |
+
added: _MetadataVersion
|
494 |
+
|
495 |
+
def __init__(
|
496 |
+
self,
|
497 |
+
*,
|
498 |
+
added: _MetadataVersion = "1.0",
|
499 |
+
) -> None:
|
500 |
+
self.added = added
|
501 |
+
|
502 |
+
def __set_name__(self, _owner: "Metadata", name: str) -> None:
|
503 |
+
self.name = name
|
504 |
+
self.raw_name = _RAW_TO_EMAIL_MAPPING[name]
|
505 |
+
|
506 |
+
def __get__(self, instance: "Metadata", _owner: Type["Metadata"]) -> T:
|
507 |
+
# With Python 3.8, the caching can be replaced with functools.cached_property().
|
508 |
+
# No need to check the cache as attribute lookup will resolve into the
|
509 |
+
# instance's __dict__ before __get__ is called.
|
510 |
+
cache = instance.__dict__
|
511 |
+
value = instance._raw.get(self.name)
|
512 |
+
|
513 |
+
# To make the _process_* methods easier, we'll check if the value is None
|
514 |
+
# and if this field is NOT a required attribute, and if both of those
|
515 |
+
# things are true, we'll skip the the converter. This will mean that the
|
516 |
+
# converters never have to deal with the None union.
|
517 |
+
if self.name in _REQUIRED_ATTRS or value is not None:
|
518 |
+
try:
|
519 |
+
converter: Callable[[Any], T] = getattr(self, f"_process_{self.name}")
|
520 |
+
except AttributeError:
|
521 |
+
pass
|
522 |
+
else:
|
523 |
+
value = converter(value)
|
524 |
+
|
525 |
+
cache[self.name] = value
|
526 |
+
try:
|
527 |
+
del instance._raw[self.name] # type: ignore[misc]
|
528 |
+
except KeyError:
|
529 |
+
pass
|
530 |
+
|
531 |
+
return cast(T, value)
|
532 |
+
|
533 |
+
def _invalid_metadata(
|
534 |
+
self, msg: str, cause: Optional[Exception] = None
|
535 |
+
) -> InvalidMetadata:
|
536 |
+
exc = InvalidMetadata(
|
537 |
+
self.raw_name, msg.format_map({"field": repr(self.raw_name)})
|
538 |
+
)
|
539 |
+
exc.__cause__ = cause
|
540 |
+
return exc
|
541 |
+
|
542 |
+
def _process_metadata_version(self, value: str) -> _MetadataVersion:
|
543 |
+
# Implicitly makes Metadata-Version required.
|
544 |
+
if value not in _VALID_METADATA_VERSIONS:
|
545 |
+
raise self._invalid_metadata(f"{value!r} is not a valid metadata version")
|
546 |
+
return cast(_MetadataVersion, value)
|
547 |
+
|
548 |
+
def _process_name(self, value: str) -> str:
|
549 |
+
if not value:
|
550 |
+
raise self._invalid_metadata("{field} is a required field")
|
551 |
+
# Validate the name as a side-effect.
|
552 |
+
try:
|
553 |
+
utils.canonicalize_name(value, validate=True)
|
554 |
+
except utils.InvalidName as exc:
|
555 |
+
raise self._invalid_metadata(
|
556 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
557 |
+
)
|
558 |
+
else:
|
559 |
+
return value
|
560 |
+
|
561 |
+
def _process_version(self, value: str) -> version_module.Version:
|
562 |
+
if not value:
|
563 |
+
raise self._invalid_metadata("{field} is a required field")
|
564 |
+
try:
|
565 |
+
return version_module.parse(value)
|
566 |
+
except version_module.InvalidVersion as exc:
|
567 |
+
raise self._invalid_metadata(
|
568 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
569 |
+
)
|
570 |
+
|
571 |
+
def _process_summary(self, value: str) -> str:
|
572 |
+
"""Check the field contains no newlines."""
|
573 |
+
if "\n" in value:
|
574 |
+
raise self._invalid_metadata("{field} must be a single line")
|
575 |
+
return value
|
576 |
+
|
577 |
+
def _process_description_content_type(self, value: str) -> str:
|
578 |
+
content_types = {"text/plain", "text/x-rst", "text/markdown"}
|
579 |
+
message = email.message.EmailMessage()
|
580 |
+
message["content-type"] = value
|
581 |
+
|
582 |
+
content_type, parameters = (
|
583 |
+
# Defaults to `text/plain` if parsing failed.
|
584 |
+
message.get_content_type().lower(),
|
585 |
+
message["content-type"].params,
|
586 |
+
)
|
587 |
+
# Check if content-type is valid or defaulted to `text/plain` and thus was
|
588 |
+
# not parseable.
|
589 |
+
if content_type not in content_types or content_type not in value.lower():
|
590 |
+
raise self._invalid_metadata(
|
591 |
+
f"{{field}} must be one of {list(content_types)}, not {value!r}"
|
592 |
+
)
|
593 |
+
|
594 |
+
charset = parameters.get("charset", "UTF-8")
|
595 |
+
if charset != "UTF-8":
|
596 |
+
raise self._invalid_metadata(
|
597 |
+
f"{{field}} can only specify the UTF-8 charset, not {list(charset)}"
|
598 |
+
)
|
599 |
+
|
600 |
+
markdown_variants = {"GFM", "CommonMark"}
|
601 |
+
variant = parameters.get("variant", "GFM") # Use an acceptable default.
|
602 |
+
if content_type == "text/markdown" and variant not in markdown_variants:
|
603 |
+
raise self._invalid_metadata(
|
604 |
+
f"valid Markdown variants for {{field}} are {list(markdown_variants)}, "
|
605 |
+
f"not {variant!r}",
|
606 |
+
)
|
607 |
+
return value
|
608 |
+
|
609 |
+
def _process_dynamic(self, value: List[str]) -> List[str]:
|
610 |
+
for dynamic_field in map(str.lower, value):
|
611 |
+
if dynamic_field in {"name", "version", "metadata-version"}:
|
612 |
+
raise self._invalid_metadata(
|
613 |
+
f"{value!r} is not allowed as a dynamic field"
|
614 |
+
)
|
615 |
+
elif dynamic_field not in _EMAIL_TO_RAW_MAPPING:
|
616 |
+
raise self._invalid_metadata(f"{value!r} is not a valid dynamic field")
|
617 |
+
return list(map(str.lower, value))
|
618 |
+
|
619 |
+
def _process_provides_extra(
|
620 |
+
self,
|
621 |
+
value: List[str],
|
622 |
+
) -> List[utils.NormalizedName]:
|
623 |
+
normalized_names = []
|
624 |
+
try:
|
625 |
+
for name in value:
|
626 |
+
normalized_names.append(utils.canonicalize_name(name, validate=True))
|
627 |
+
except utils.InvalidName as exc:
|
628 |
+
raise self._invalid_metadata(
|
629 |
+
f"{name!r} is invalid for {{field}}", cause=exc
|
630 |
+
)
|
631 |
+
else:
|
632 |
+
return normalized_names
|
633 |
+
|
634 |
+
def _process_requires_python(self, value: str) -> specifiers.SpecifierSet:
|
635 |
+
try:
|
636 |
+
return specifiers.SpecifierSet(value)
|
637 |
+
except specifiers.InvalidSpecifier as exc:
|
638 |
+
raise self._invalid_metadata(
|
639 |
+
f"{value!r} is invalid for {{field}}", cause=exc
|
640 |
+
)
|
641 |
+
|
642 |
+
def _process_requires_dist(
|
643 |
+
self,
|
644 |
+
value: List[str],
|
645 |
+
) -> List[requirements.Requirement]:
|
646 |
+
reqs = []
|
647 |
+
try:
|
648 |
+
for req in value:
|
649 |
+
reqs.append(requirements.Requirement(req))
|
650 |
+
except requirements.InvalidRequirement as exc:
|
651 |
+
raise self._invalid_metadata(f"{req!r} is invalid for {{field}}", cause=exc)
|
652 |
+
else:
|
653 |
+
return reqs
|
654 |
+
|
655 |
+
|
656 |
+
class Metadata:
|
657 |
+
"""Representation of distribution metadata.
|
658 |
+
|
659 |
+
Compared to :class:`RawMetadata`, this class provides objects representing
|
660 |
+
metadata fields instead of only using built-in types. Any invalid metadata
|
661 |
+
will cause :exc:`InvalidMetadata` to be raised (with a
|
662 |
+
:py:attr:`~BaseException.__cause__` attribute as appropriate).
|
663 |
+
"""
|
664 |
+
|
665 |
+
_raw: RawMetadata
|
666 |
+
|
667 |
+
@classmethod
|
668 |
+
def from_raw(cls, data: RawMetadata, *, validate: bool = True) -> "Metadata":
|
669 |
+
"""Create an instance from :class:`RawMetadata`.
|
670 |
+
|
671 |
+
If *validate* is true, all metadata will be validated. All exceptions
|
672 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
673 |
+
"""
|
674 |
+
ins = cls()
|
675 |
+
ins._raw = data.copy() # Mutations occur due to caching enriched values.
|
676 |
+
|
677 |
+
if validate:
|
678 |
+
exceptions: List[Exception] = []
|
679 |
+
try:
|
680 |
+
metadata_version = ins.metadata_version
|
681 |
+
metadata_age = _VALID_METADATA_VERSIONS.index(metadata_version)
|
682 |
+
except InvalidMetadata as metadata_version_exc:
|
683 |
+
exceptions.append(metadata_version_exc)
|
684 |
+
metadata_version = None
|
685 |
+
|
686 |
+
# Make sure to check for the fields that are present, the required
|
687 |
+
# fields (so their absence can be reported).
|
688 |
+
fields_to_check = frozenset(ins._raw) | _REQUIRED_ATTRS
|
689 |
+
# Remove fields that have already been checked.
|
690 |
+
fields_to_check -= {"metadata_version"}
|
691 |
+
|
692 |
+
for key in fields_to_check:
|
693 |
+
try:
|
694 |
+
if metadata_version:
|
695 |
+
# Can't use getattr() as that triggers descriptor protocol which
|
696 |
+
# will fail due to no value for the instance argument.
|
697 |
+
try:
|
698 |
+
field_metadata_version = cls.__dict__[key].added
|
699 |
+
except KeyError:
|
700 |
+
exc = InvalidMetadata(key, f"unrecognized field: {key!r}")
|
701 |
+
exceptions.append(exc)
|
702 |
+
continue
|
703 |
+
field_age = _VALID_METADATA_VERSIONS.index(
|
704 |
+
field_metadata_version
|
705 |
+
)
|
706 |
+
if field_age > metadata_age:
|
707 |
+
field = _RAW_TO_EMAIL_MAPPING[key]
|
708 |
+
exc = InvalidMetadata(
|
709 |
+
field,
|
710 |
+
"{field} introduced in metadata version "
|
711 |
+
"{field_metadata_version}, not {metadata_version}",
|
712 |
+
)
|
713 |
+
exceptions.append(exc)
|
714 |
+
continue
|
715 |
+
getattr(ins, key)
|
716 |
+
except InvalidMetadata as exc:
|
717 |
+
exceptions.append(exc)
|
718 |
+
|
719 |
+
if exceptions:
|
720 |
+
raise ExceptionGroup("invalid metadata", exceptions)
|
721 |
+
|
722 |
+
return ins
|
723 |
+
|
724 |
+
@classmethod
|
725 |
+
def from_email(
|
726 |
+
cls, data: Union[bytes, str], *, validate: bool = True
|
727 |
+
) -> "Metadata":
|
728 |
+
"""Parse metadata from email headers.
|
729 |
+
|
730 |
+
If *validate* is true, the metadata will be validated. All exceptions
|
731 |
+
related to validation will be gathered and raised as an :class:`ExceptionGroup`.
|
732 |
+
"""
|
733 |
+
raw, unparsed = parse_email(data)
|
734 |
+
|
735 |
+
if validate:
|
736 |
+
exceptions: list[Exception] = []
|
737 |
+
for unparsed_key in unparsed:
|
738 |
+
if unparsed_key in _EMAIL_TO_RAW_MAPPING:
|
739 |
+
message = f"{unparsed_key!r} has invalid data"
|
740 |
+
else:
|
741 |
+
message = f"unrecognized field: {unparsed_key!r}"
|
742 |
+
exceptions.append(InvalidMetadata(unparsed_key, message))
|
743 |
+
|
744 |
+
if exceptions:
|
745 |
+
raise ExceptionGroup("unparsed", exceptions)
|
746 |
+
|
747 |
+
try:
|
748 |
+
return cls.from_raw(raw, validate=validate)
|
749 |
+
except ExceptionGroup as exc_group:
|
750 |
+
raise ExceptionGroup(
|
751 |
+
"invalid or unparsed metadata", exc_group.exceptions
|
752 |
+
) from None
|
753 |
+
|
754 |
+
metadata_version: _Validator[_MetadataVersion] = _Validator()
|
755 |
+
""":external:ref:`core-metadata-metadata-version`
|
756 |
+
(required; validated to be a valid metadata version)"""
|
757 |
+
name: _Validator[str] = _Validator()
|
758 |
+
""":external:ref:`core-metadata-name`
|
759 |
+
(required; validated using :func:`~packaging.utils.canonicalize_name` and its
|
760 |
+
*validate* parameter)"""
|
761 |
+
version: _Validator[version_module.Version] = _Validator()
|
762 |
+
""":external:ref:`core-metadata-version` (required)"""
|
763 |
+
dynamic: _Validator[Optional[List[str]]] = _Validator(
|
764 |
+
added="2.2",
|
765 |
+
)
|
766 |
+
""":external:ref:`core-metadata-dynamic`
|
767 |
+
(validated against core metadata field names and lowercased)"""
|
768 |
+
platforms: _Validator[Optional[List[str]]] = _Validator()
|
769 |
+
""":external:ref:`core-metadata-platform`"""
|
770 |
+
supported_platforms: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
771 |
+
""":external:ref:`core-metadata-supported-platform`"""
|
772 |
+
summary: _Validator[Optional[str]] = _Validator()
|
773 |
+
""":external:ref:`core-metadata-summary` (validated to contain no newlines)"""
|
774 |
+
description: _Validator[Optional[str]] = _Validator() # TODO 2.1: can be in body
|
775 |
+
""":external:ref:`core-metadata-description`"""
|
776 |
+
description_content_type: _Validator[Optional[str]] = _Validator(added="2.1")
|
777 |
+
""":external:ref:`core-metadata-description-content-type` (validated)"""
|
778 |
+
keywords: _Validator[Optional[List[str]]] = _Validator()
|
779 |
+
""":external:ref:`core-metadata-keywords`"""
|
780 |
+
home_page: _Validator[Optional[str]] = _Validator()
|
781 |
+
""":external:ref:`core-metadata-home-page`"""
|
782 |
+
download_url: _Validator[Optional[str]] = _Validator(added="1.1")
|
783 |
+
""":external:ref:`core-metadata-download-url`"""
|
784 |
+
author: _Validator[Optional[str]] = _Validator()
|
785 |
+
""":external:ref:`core-metadata-author`"""
|
786 |
+
author_email: _Validator[Optional[str]] = _Validator()
|
787 |
+
""":external:ref:`core-metadata-author-email`"""
|
788 |
+
maintainer: _Validator[Optional[str]] = _Validator(added="1.2")
|
789 |
+
""":external:ref:`core-metadata-maintainer`"""
|
790 |
+
maintainer_email: _Validator[Optional[str]] = _Validator(added="1.2")
|
791 |
+
""":external:ref:`core-metadata-maintainer-email`"""
|
792 |
+
license: _Validator[Optional[str]] = _Validator()
|
793 |
+
""":external:ref:`core-metadata-license`"""
|
794 |
+
classifiers: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
795 |
+
""":external:ref:`core-metadata-classifier`"""
|
796 |
+
requires_dist: _Validator[Optional[List[requirements.Requirement]]] = _Validator(
|
797 |
+
added="1.2"
|
798 |
+
)
|
799 |
+
""":external:ref:`core-metadata-requires-dist`"""
|
800 |
+
requires_python: _Validator[Optional[specifiers.SpecifierSet]] = _Validator(
|
801 |
+
added="1.2"
|
802 |
+
)
|
803 |
+
""":external:ref:`core-metadata-requires-python`"""
|
804 |
+
# Because `Requires-External` allows for non-PEP 440 version specifiers, we
|
805 |
+
# don't do any processing on the values.
|
806 |
+
requires_external: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
807 |
+
""":external:ref:`core-metadata-requires-external`"""
|
808 |
+
project_urls: _Validator[Optional[Dict[str, str]]] = _Validator(added="1.2")
|
809 |
+
""":external:ref:`core-metadata-project-url`"""
|
810 |
+
# PEP 685 lets us raise an error if an extra doesn't pass `Name` validation
|
811 |
+
# regardless of metadata version.
|
812 |
+
provides_extra: _Validator[Optional[List[utils.NormalizedName]]] = _Validator(
|
813 |
+
added="2.1",
|
814 |
+
)
|
815 |
+
""":external:ref:`core-metadata-provides-extra`"""
|
816 |
+
provides_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
817 |
+
""":external:ref:`core-metadata-provides-dist`"""
|
818 |
+
obsoletes_dist: _Validator[Optional[List[str]]] = _Validator(added="1.2")
|
819 |
+
""":external:ref:`core-metadata-obsoletes-dist`"""
|
820 |
+
requires: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
821 |
+
"""``Requires`` (deprecated)"""
|
822 |
+
provides: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
823 |
+
"""``Provides`` (deprecated)"""
|
824 |
+
obsoletes: _Validator[Optional[List[str]]] = _Validator(added="1.1")
|
825 |
+
"""``Obsoletes`` (deprecated)"""
|
llmeval-env/lib/python3.10/site-packages/packaging/py.typed
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/packaging/requirements.py
ADDED
@@ -0,0 +1,90 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
from typing import Any, Iterator, Optional, Set
|
6 |
+
|
7 |
+
from ._parser import parse_requirement as _parse_requirement
|
8 |
+
from ._tokenizer import ParserSyntaxError
|
9 |
+
from .markers import Marker, _normalize_extra_values
|
10 |
+
from .specifiers import SpecifierSet
|
11 |
+
from .utils import canonicalize_name
|
12 |
+
|
13 |
+
|
14 |
+
class InvalidRequirement(ValueError):
|
15 |
+
"""
|
16 |
+
An invalid requirement was found, users should refer to PEP 508.
|
17 |
+
"""
|
18 |
+
|
19 |
+
|
20 |
+
class Requirement:
|
21 |
+
"""Parse a requirement.
|
22 |
+
|
23 |
+
Parse a given requirement string into its parts, such as name, specifier,
|
24 |
+
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
|
25 |
+
string.
|
26 |
+
"""
|
27 |
+
|
28 |
+
# TODO: Can we test whether something is contained within a requirement?
|
29 |
+
# If so how do we do that? Do we need to test against the _name_ of
|
30 |
+
# the thing as well as the version? What about the markers?
|
31 |
+
# TODO: Can we normalize the name and extra name?
|
32 |
+
|
33 |
+
def __init__(self, requirement_string: str) -> None:
|
34 |
+
try:
|
35 |
+
parsed = _parse_requirement(requirement_string)
|
36 |
+
except ParserSyntaxError as e:
|
37 |
+
raise InvalidRequirement(str(e)) from e
|
38 |
+
|
39 |
+
self.name: str = parsed.name
|
40 |
+
self.url: Optional[str] = parsed.url or None
|
41 |
+
self.extras: Set[str] = set(parsed.extras or [])
|
42 |
+
self.specifier: SpecifierSet = SpecifierSet(parsed.specifier)
|
43 |
+
self.marker: Optional[Marker] = None
|
44 |
+
if parsed.marker is not None:
|
45 |
+
self.marker = Marker.__new__(Marker)
|
46 |
+
self.marker._markers = _normalize_extra_values(parsed.marker)
|
47 |
+
|
48 |
+
def _iter_parts(self, name: str) -> Iterator[str]:
|
49 |
+
yield name
|
50 |
+
|
51 |
+
if self.extras:
|
52 |
+
formatted_extras = ",".join(sorted(self.extras))
|
53 |
+
yield f"[{formatted_extras}]"
|
54 |
+
|
55 |
+
if self.specifier:
|
56 |
+
yield str(self.specifier)
|
57 |
+
|
58 |
+
if self.url:
|
59 |
+
yield f"@ {self.url}"
|
60 |
+
if self.marker:
|
61 |
+
yield " "
|
62 |
+
|
63 |
+
if self.marker:
|
64 |
+
yield f"; {self.marker}"
|
65 |
+
|
66 |
+
def __str__(self) -> str:
|
67 |
+
return "".join(self._iter_parts(self.name))
|
68 |
+
|
69 |
+
def __repr__(self) -> str:
|
70 |
+
return f"<Requirement('{self}')>"
|
71 |
+
|
72 |
+
def __hash__(self) -> int:
|
73 |
+
return hash(
|
74 |
+
(
|
75 |
+
self.__class__.__name__,
|
76 |
+
*self._iter_parts(canonicalize_name(self.name)),
|
77 |
+
)
|
78 |
+
)
|
79 |
+
|
80 |
+
def __eq__(self, other: Any) -> bool:
|
81 |
+
if not isinstance(other, Requirement):
|
82 |
+
return NotImplemented
|
83 |
+
|
84 |
+
return (
|
85 |
+
canonicalize_name(self.name) == canonicalize_name(other.name)
|
86 |
+
and self.extras == other.extras
|
87 |
+
and self.specifier == other.specifier
|
88 |
+
and self.url == other.url
|
89 |
+
and self.marker == other.marker
|
90 |
+
)
|
llmeval-env/lib/python3.10/site-packages/packaging/specifiers.py
ADDED
@@ -0,0 +1,1017 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
"""
|
5 |
+
.. testsetup::
|
6 |
+
|
7 |
+
from packaging.specifiers import Specifier, SpecifierSet, InvalidSpecifier
|
8 |
+
from packaging.version import Version
|
9 |
+
"""
|
10 |
+
|
11 |
+
import abc
|
12 |
+
import itertools
|
13 |
+
import re
|
14 |
+
from typing import Callable, Iterable, Iterator, List, Optional, Tuple, TypeVar, Union
|
15 |
+
|
16 |
+
from .utils import canonicalize_version
|
17 |
+
from .version import Version
|
18 |
+
|
19 |
+
UnparsedVersion = Union[Version, str]
|
20 |
+
UnparsedVersionVar = TypeVar("UnparsedVersionVar", bound=UnparsedVersion)
|
21 |
+
CallableOperator = Callable[[Version, str], bool]
|
22 |
+
|
23 |
+
|
24 |
+
def _coerce_version(version: UnparsedVersion) -> Version:
|
25 |
+
if not isinstance(version, Version):
|
26 |
+
version = Version(version)
|
27 |
+
return version
|
28 |
+
|
29 |
+
|
30 |
+
class InvalidSpecifier(ValueError):
|
31 |
+
"""
|
32 |
+
Raised when attempting to create a :class:`Specifier` with a specifier
|
33 |
+
string that is invalid.
|
34 |
+
|
35 |
+
>>> Specifier("lolwat")
|
36 |
+
Traceback (most recent call last):
|
37 |
+
...
|
38 |
+
packaging.specifiers.InvalidSpecifier: Invalid specifier: 'lolwat'
|
39 |
+
"""
|
40 |
+
|
41 |
+
|
42 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
43 |
+
@abc.abstractmethod
|
44 |
+
def __str__(self) -> str:
|
45 |
+
"""
|
46 |
+
Returns the str representation of this Specifier-like object. This
|
47 |
+
should be representative of the Specifier itself.
|
48 |
+
"""
|
49 |
+
|
50 |
+
@abc.abstractmethod
|
51 |
+
def __hash__(self) -> int:
|
52 |
+
"""
|
53 |
+
Returns a hash value for this Specifier-like object.
|
54 |
+
"""
|
55 |
+
|
56 |
+
@abc.abstractmethod
|
57 |
+
def __eq__(self, other: object) -> bool:
|
58 |
+
"""
|
59 |
+
Returns a boolean representing whether or not the two Specifier-like
|
60 |
+
objects are equal.
|
61 |
+
|
62 |
+
:param other: The other object to check against.
|
63 |
+
"""
|
64 |
+
|
65 |
+
@property
|
66 |
+
@abc.abstractmethod
|
67 |
+
def prereleases(self) -> Optional[bool]:
|
68 |
+
"""Whether or not pre-releases as a whole are allowed.
|
69 |
+
|
70 |
+
This can be set to either ``True`` or ``False`` to explicitly enable or disable
|
71 |
+
prereleases or it can be set to ``None`` (the default) to use default semantics.
|
72 |
+
"""
|
73 |
+
|
74 |
+
@prereleases.setter
|
75 |
+
def prereleases(self, value: bool) -> None:
|
76 |
+
"""Setter for :attr:`prereleases`.
|
77 |
+
|
78 |
+
:param value: The value to set.
|
79 |
+
"""
|
80 |
+
|
81 |
+
@abc.abstractmethod
|
82 |
+
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
83 |
+
"""
|
84 |
+
Determines if the given item is contained within this specifier.
|
85 |
+
"""
|
86 |
+
|
87 |
+
@abc.abstractmethod
|
88 |
+
def filter(
|
89 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
90 |
+
) -> Iterator[UnparsedVersionVar]:
|
91 |
+
"""
|
92 |
+
Takes an iterable of items and filters them so that only items which
|
93 |
+
are contained within this specifier are allowed in it.
|
94 |
+
"""
|
95 |
+
|
96 |
+
|
97 |
+
class Specifier(BaseSpecifier):
|
98 |
+
"""This class abstracts handling of version specifiers.
|
99 |
+
|
100 |
+
.. tip::
|
101 |
+
|
102 |
+
It is generally not required to instantiate this manually. You should instead
|
103 |
+
prefer to work with :class:`SpecifierSet` instead, which can parse
|
104 |
+
comma-separated version specifiers (which is what package metadata contains).
|
105 |
+
"""
|
106 |
+
|
107 |
+
_operator_regex_str = r"""
|
108 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
109 |
+
"""
|
110 |
+
_version_regex_str = r"""
|
111 |
+
(?P<version>
|
112 |
+
(?:
|
113 |
+
# The identity operators allow for an escape hatch that will
|
114 |
+
# do an exact string match of the version you wish to install.
|
115 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
116 |
+
# any semantic meaning from it. This operator is discouraged
|
117 |
+
# but included entirely as an escape hatch.
|
118 |
+
(?<====) # Only match for the identity operator
|
119 |
+
\s*
|
120 |
+
[^\s;)]* # The arbitrary version can be just about anything,
|
121 |
+
# we match everything except for whitespace, a
|
122 |
+
# semi-colon for marker support, and a closing paren
|
123 |
+
# since versions can be enclosed in them.
|
124 |
+
)
|
125 |
+
|
|
126 |
+
(?:
|
127 |
+
# The (non)equality operators allow for wild card and local
|
128 |
+
# versions to be specified so we have to define these two
|
129 |
+
# operators separately to enable that.
|
130 |
+
(?<===|!=) # Only match for equals and not equals
|
131 |
+
|
132 |
+
\s*
|
133 |
+
v?
|
134 |
+
(?:[0-9]+!)? # epoch
|
135 |
+
[0-9]+(?:\.[0-9]+)* # release
|
136 |
+
|
137 |
+
# You cannot use a wild card and a pre-release, post-release, a dev or
|
138 |
+
# local version together so group them with a | and make them optional.
|
139 |
+
(?:
|
140 |
+
\.\* # Wild card syntax of .*
|
141 |
+
|
|
142 |
+
(?: # pre release
|
143 |
+
[-_\.]?
|
144 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
145 |
+
[-_\.]?
|
146 |
+
[0-9]*
|
147 |
+
)?
|
148 |
+
(?: # post release
|
149 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
150 |
+
)?
|
151 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
152 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
153 |
+
)?
|
154 |
+
)
|
155 |
+
|
|
156 |
+
(?:
|
157 |
+
# The compatible operator requires at least two digits in the
|
158 |
+
# release segment.
|
159 |
+
(?<=~=) # Only match for the compatible operator
|
160 |
+
|
161 |
+
\s*
|
162 |
+
v?
|
163 |
+
(?:[0-9]+!)? # epoch
|
164 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
165 |
+
(?: # pre release
|
166 |
+
[-_\.]?
|
167 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
168 |
+
[-_\.]?
|
169 |
+
[0-9]*
|
170 |
+
)?
|
171 |
+
(?: # post release
|
172 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
173 |
+
)?
|
174 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
175 |
+
)
|
176 |
+
|
|
177 |
+
(?:
|
178 |
+
# All other operators only allow a sub set of what the
|
179 |
+
# (non)equality operators do. Specifically they do not allow
|
180 |
+
# local versions to be specified nor do they allow the prefix
|
181 |
+
# matching wild cards.
|
182 |
+
(?<!==|!=|~=) # We have special cases for these
|
183 |
+
# operators so we want to make sure they
|
184 |
+
# don't match here.
|
185 |
+
|
186 |
+
\s*
|
187 |
+
v?
|
188 |
+
(?:[0-9]+!)? # epoch
|
189 |
+
[0-9]+(?:\.[0-9]+)* # release
|
190 |
+
(?: # pre release
|
191 |
+
[-_\.]?
|
192 |
+
(alpha|beta|preview|pre|a|b|c|rc)
|
193 |
+
[-_\.]?
|
194 |
+
[0-9]*
|
195 |
+
)?
|
196 |
+
(?: # post release
|
197 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
198 |
+
)?
|
199 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
200 |
+
)
|
201 |
+
)
|
202 |
+
"""
|
203 |
+
|
204 |
+
_regex = re.compile(
|
205 |
+
r"^\s*" + _operator_regex_str + _version_regex_str + r"\s*$",
|
206 |
+
re.VERBOSE | re.IGNORECASE,
|
207 |
+
)
|
208 |
+
|
209 |
+
_operators = {
|
210 |
+
"~=": "compatible",
|
211 |
+
"==": "equal",
|
212 |
+
"!=": "not_equal",
|
213 |
+
"<=": "less_than_equal",
|
214 |
+
">=": "greater_than_equal",
|
215 |
+
"<": "less_than",
|
216 |
+
">": "greater_than",
|
217 |
+
"===": "arbitrary",
|
218 |
+
}
|
219 |
+
|
220 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
221 |
+
"""Initialize a Specifier instance.
|
222 |
+
|
223 |
+
:param spec:
|
224 |
+
The string representation of a specifier which will be parsed and
|
225 |
+
normalized before use.
|
226 |
+
:param prereleases:
|
227 |
+
This tells the specifier if it should accept prerelease versions if
|
228 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
229 |
+
given specifiers.
|
230 |
+
:raises InvalidSpecifier:
|
231 |
+
If the given specifier is invalid (i.e. bad syntax).
|
232 |
+
"""
|
233 |
+
match = self._regex.search(spec)
|
234 |
+
if not match:
|
235 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
236 |
+
|
237 |
+
self._spec: Tuple[str, str] = (
|
238 |
+
match.group("operator").strip(),
|
239 |
+
match.group("version").strip(),
|
240 |
+
)
|
241 |
+
|
242 |
+
# Store whether or not this Specifier should accept prereleases
|
243 |
+
self._prereleases = prereleases
|
244 |
+
|
245 |
+
# https://github.com/python/mypy/pull/13475#pullrequestreview-1079784515
|
246 |
+
@property # type: ignore[override]
|
247 |
+
def prereleases(self) -> bool:
|
248 |
+
# If there is an explicit prereleases set for this, then we'll just
|
249 |
+
# blindly use that.
|
250 |
+
if self._prereleases is not None:
|
251 |
+
return self._prereleases
|
252 |
+
|
253 |
+
# Look at all of our specifiers and determine if they are inclusive
|
254 |
+
# operators, and if they are if they are including an explicit
|
255 |
+
# prerelease.
|
256 |
+
operator, version = self._spec
|
257 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
258 |
+
# The == specifier can include a trailing .*, if it does we
|
259 |
+
# want to remove before parsing.
|
260 |
+
if operator == "==" and version.endswith(".*"):
|
261 |
+
version = version[:-2]
|
262 |
+
|
263 |
+
# Parse the version, and if it is a pre-release than this
|
264 |
+
# specifier allows pre-releases.
|
265 |
+
if Version(version).is_prerelease:
|
266 |
+
return True
|
267 |
+
|
268 |
+
return False
|
269 |
+
|
270 |
+
@prereleases.setter
|
271 |
+
def prereleases(self, value: bool) -> None:
|
272 |
+
self._prereleases = value
|
273 |
+
|
274 |
+
@property
|
275 |
+
def operator(self) -> str:
|
276 |
+
"""The operator of this specifier.
|
277 |
+
|
278 |
+
>>> Specifier("==1.2.3").operator
|
279 |
+
'=='
|
280 |
+
"""
|
281 |
+
return self._spec[0]
|
282 |
+
|
283 |
+
@property
|
284 |
+
def version(self) -> str:
|
285 |
+
"""The version of this specifier.
|
286 |
+
|
287 |
+
>>> Specifier("==1.2.3").version
|
288 |
+
'1.2.3'
|
289 |
+
"""
|
290 |
+
return self._spec[1]
|
291 |
+
|
292 |
+
def __repr__(self) -> str:
|
293 |
+
"""A representation of the Specifier that shows all internal state.
|
294 |
+
|
295 |
+
>>> Specifier('>=1.0.0')
|
296 |
+
<Specifier('>=1.0.0')>
|
297 |
+
>>> Specifier('>=1.0.0', prereleases=False)
|
298 |
+
<Specifier('>=1.0.0', prereleases=False)>
|
299 |
+
>>> Specifier('>=1.0.0', prereleases=True)
|
300 |
+
<Specifier('>=1.0.0', prereleases=True)>
|
301 |
+
"""
|
302 |
+
pre = (
|
303 |
+
f", prereleases={self.prereleases!r}"
|
304 |
+
if self._prereleases is not None
|
305 |
+
else ""
|
306 |
+
)
|
307 |
+
|
308 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
309 |
+
|
310 |
+
def __str__(self) -> str:
|
311 |
+
"""A string representation of the Specifier that can be round-tripped.
|
312 |
+
|
313 |
+
>>> str(Specifier('>=1.0.0'))
|
314 |
+
'>=1.0.0'
|
315 |
+
>>> str(Specifier('>=1.0.0', prereleases=False))
|
316 |
+
'>=1.0.0'
|
317 |
+
"""
|
318 |
+
return "{}{}".format(*self._spec)
|
319 |
+
|
320 |
+
@property
|
321 |
+
def _canonical_spec(self) -> Tuple[str, str]:
|
322 |
+
canonical_version = canonicalize_version(
|
323 |
+
self._spec[1],
|
324 |
+
strip_trailing_zero=(self._spec[0] != "~="),
|
325 |
+
)
|
326 |
+
return self._spec[0], canonical_version
|
327 |
+
|
328 |
+
def __hash__(self) -> int:
|
329 |
+
return hash(self._canonical_spec)
|
330 |
+
|
331 |
+
def __eq__(self, other: object) -> bool:
|
332 |
+
"""Whether or not the two Specifier-like objects are equal.
|
333 |
+
|
334 |
+
:param other: The other object to check against.
|
335 |
+
|
336 |
+
The value of :attr:`prereleases` is ignored.
|
337 |
+
|
338 |
+
>>> Specifier("==1.2.3") == Specifier("== 1.2.3.0")
|
339 |
+
True
|
340 |
+
>>> (Specifier("==1.2.3", prereleases=False) ==
|
341 |
+
... Specifier("==1.2.3", prereleases=True))
|
342 |
+
True
|
343 |
+
>>> Specifier("==1.2.3") == "==1.2.3"
|
344 |
+
True
|
345 |
+
>>> Specifier("==1.2.3") == Specifier("==1.2.4")
|
346 |
+
False
|
347 |
+
>>> Specifier("==1.2.3") == Specifier("~=1.2.3")
|
348 |
+
False
|
349 |
+
"""
|
350 |
+
if isinstance(other, str):
|
351 |
+
try:
|
352 |
+
other = self.__class__(str(other))
|
353 |
+
except InvalidSpecifier:
|
354 |
+
return NotImplemented
|
355 |
+
elif not isinstance(other, self.__class__):
|
356 |
+
return NotImplemented
|
357 |
+
|
358 |
+
return self._canonical_spec == other._canonical_spec
|
359 |
+
|
360 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
361 |
+
operator_callable: CallableOperator = getattr(
|
362 |
+
self, f"_compare_{self._operators[op]}"
|
363 |
+
)
|
364 |
+
return operator_callable
|
365 |
+
|
366 |
+
def _compare_compatible(self, prospective: Version, spec: str) -> bool:
|
367 |
+
|
368 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
369 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
370 |
+
# implement this in terms of the other specifiers instead of
|
371 |
+
# implementing it ourselves. The only thing we need to do is construct
|
372 |
+
# the other specifiers.
|
373 |
+
|
374 |
+
# We want everything but the last item in the version, but we want to
|
375 |
+
# ignore suffix segments.
|
376 |
+
prefix = _version_join(
|
377 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
378 |
+
)
|
379 |
+
|
380 |
+
# Add the prefix notation to the end of our string
|
381 |
+
prefix += ".*"
|
382 |
+
|
383 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
384 |
+
prospective, prefix
|
385 |
+
)
|
386 |
+
|
387 |
+
def _compare_equal(self, prospective: Version, spec: str) -> bool:
|
388 |
+
|
389 |
+
# We need special logic to handle prefix matching
|
390 |
+
if spec.endswith(".*"):
|
391 |
+
# In the case of prefix matching we want to ignore local segment.
|
392 |
+
normalized_prospective = canonicalize_version(
|
393 |
+
prospective.public, strip_trailing_zero=False
|
394 |
+
)
|
395 |
+
# Get the normalized version string ignoring the trailing .*
|
396 |
+
normalized_spec = canonicalize_version(spec[:-2], strip_trailing_zero=False)
|
397 |
+
# Split the spec out by bangs and dots, and pretend that there is
|
398 |
+
# an implicit dot in between a release segment and a pre-release segment.
|
399 |
+
split_spec = _version_split(normalized_spec)
|
400 |
+
|
401 |
+
# Split the prospective version out by bangs and dots, and pretend
|
402 |
+
# that there is an implicit dot in between a release segment and
|
403 |
+
# a pre-release segment.
|
404 |
+
split_prospective = _version_split(normalized_prospective)
|
405 |
+
|
406 |
+
# 0-pad the prospective version before shortening it to get the correct
|
407 |
+
# shortened version.
|
408 |
+
padded_prospective, _ = _pad_version(split_prospective, split_spec)
|
409 |
+
|
410 |
+
# Shorten the prospective version to be the same length as the spec
|
411 |
+
# so that we can determine if the specifier is a prefix of the
|
412 |
+
# prospective version or not.
|
413 |
+
shortened_prospective = padded_prospective[: len(split_spec)]
|
414 |
+
|
415 |
+
return shortened_prospective == split_spec
|
416 |
+
else:
|
417 |
+
# Convert our spec string into a Version
|
418 |
+
spec_version = Version(spec)
|
419 |
+
|
420 |
+
# If the specifier does not have a local segment, then we want to
|
421 |
+
# act as if the prospective version also does not have a local
|
422 |
+
# segment.
|
423 |
+
if not spec_version.local:
|
424 |
+
prospective = Version(prospective.public)
|
425 |
+
|
426 |
+
return prospective == spec_version
|
427 |
+
|
428 |
+
def _compare_not_equal(self, prospective: Version, spec: str) -> bool:
|
429 |
+
return not self._compare_equal(prospective, spec)
|
430 |
+
|
431 |
+
def _compare_less_than_equal(self, prospective: Version, spec: str) -> bool:
|
432 |
+
|
433 |
+
# NB: Local version identifiers are NOT permitted in the version
|
434 |
+
# specifier, so local version labels can be universally removed from
|
435 |
+
# the prospective version.
|
436 |
+
return Version(prospective.public) <= Version(spec)
|
437 |
+
|
438 |
+
def _compare_greater_than_equal(self, prospective: Version, spec: str) -> bool:
|
439 |
+
|
440 |
+
# NB: Local version identifiers are NOT permitted in the version
|
441 |
+
# specifier, so local version labels can be universally removed from
|
442 |
+
# the prospective version.
|
443 |
+
return Version(prospective.public) >= Version(spec)
|
444 |
+
|
445 |
+
def _compare_less_than(self, prospective: Version, spec_str: str) -> bool:
|
446 |
+
|
447 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
448 |
+
# it as a version.
|
449 |
+
spec = Version(spec_str)
|
450 |
+
|
451 |
+
# Check to see if the prospective version is less than the spec
|
452 |
+
# version. If it's not we can short circuit and just return False now
|
453 |
+
# instead of doing extra unneeded work.
|
454 |
+
if not prospective < spec:
|
455 |
+
return False
|
456 |
+
|
457 |
+
# This special case is here so that, unless the specifier itself
|
458 |
+
# includes is a pre-release version, that we do not accept pre-release
|
459 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
460 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
461 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
462 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
463 |
+
return False
|
464 |
+
|
465 |
+
# If we've gotten to here, it means that prospective version is both
|
466 |
+
# less than the spec version *and* it's not a pre-release of the same
|
467 |
+
# version in the spec.
|
468 |
+
return True
|
469 |
+
|
470 |
+
def _compare_greater_than(self, prospective: Version, spec_str: str) -> bool:
|
471 |
+
|
472 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
473 |
+
# it as a version.
|
474 |
+
spec = Version(spec_str)
|
475 |
+
|
476 |
+
# Check to see if the prospective version is greater than the spec
|
477 |
+
# version. If it's not we can short circuit and just return False now
|
478 |
+
# instead of doing extra unneeded work.
|
479 |
+
if not prospective > spec:
|
480 |
+
return False
|
481 |
+
|
482 |
+
# This special case is here so that, unless the specifier itself
|
483 |
+
# includes is a post-release version, that we do not accept
|
484 |
+
# post-release versions for the version mentioned in the specifier
|
485 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
486 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
487 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
488 |
+
return False
|
489 |
+
|
490 |
+
# Ensure that we do not allow a local version of the version mentioned
|
491 |
+
# in the specifier, which is technically greater than, to match.
|
492 |
+
if prospective.local is not None:
|
493 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
494 |
+
return False
|
495 |
+
|
496 |
+
# If we've gotten to here, it means that prospective version is both
|
497 |
+
# greater than the spec version *and* it's not a pre-release of the
|
498 |
+
# same version in the spec.
|
499 |
+
return True
|
500 |
+
|
501 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
502 |
+
return str(prospective).lower() == str(spec).lower()
|
503 |
+
|
504 |
+
def __contains__(self, item: Union[str, Version]) -> bool:
|
505 |
+
"""Return whether or not the item is contained in this specifier.
|
506 |
+
|
507 |
+
:param item: The item to check for.
|
508 |
+
|
509 |
+
This is used for the ``in`` operator and behaves the same as
|
510 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
511 |
+
|
512 |
+
>>> "1.2.3" in Specifier(">=1.2.3")
|
513 |
+
True
|
514 |
+
>>> Version("1.2.3") in Specifier(">=1.2.3")
|
515 |
+
True
|
516 |
+
>>> "1.0.0" in Specifier(">=1.2.3")
|
517 |
+
False
|
518 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3")
|
519 |
+
False
|
520 |
+
>>> "1.3.0a1" in Specifier(">=1.2.3", prereleases=True)
|
521 |
+
True
|
522 |
+
"""
|
523 |
+
return self.contains(item)
|
524 |
+
|
525 |
+
def contains(
|
526 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
527 |
+
) -> bool:
|
528 |
+
"""Return whether or not the item is contained in this specifier.
|
529 |
+
|
530 |
+
:param item:
|
531 |
+
The item to check for, which can be a version string or a
|
532 |
+
:class:`Version` instance.
|
533 |
+
:param prereleases:
|
534 |
+
Whether or not to match prereleases with this Specifier. If set to
|
535 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
536 |
+
whether or not prereleases are allowed.
|
537 |
+
|
538 |
+
>>> Specifier(">=1.2.3").contains("1.2.3")
|
539 |
+
True
|
540 |
+
>>> Specifier(">=1.2.3").contains(Version("1.2.3"))
|
541 |
+
True
|
542 |
+
>>> Specifier(">=1.2.3").contains("1.0.0")
|
543 |
+
False
|
544 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1")
|
545 |
+
False
|
546 |
+
>>> Specifier(">=1.2.3", prereleases=True).contains("1.3.0a1")
|
547 |
+
True
|
548 |
+
>>> Specifier(">=1.2.3").contains("1.3.0a1", prereleases=True)
|
549 |
+
True
|
550 |
+
"""
|
551 |
+
|
552 |
+
# Determine if prereleases are to be allowed or not.
|
553 |
+
if prereleases is None:
|
554 |
+
prereleases = self.prereleases
|
555 |
+
|
556 |
+
# Normalize item to a Version, this allows us to have a shortcut for
|
557 |
+
# "2.0" in Specifier(">=2")
|
558 |
+
normalized_item = _coerce_version(item)
|
559 |
+
|
560 |
+
# Determine if we should be supporting prereleases in this specifier
|
561 |
+
# or not, if we do not support prereleases than we can short circuit
|
562 |
+
# logic if this version is a prereleases.
|
563 |
+
if normalized_item.is_prerelease and not prereleases:
|
564 |
+
return False
|
565 |
+
|
566 |
+
# Actually do the comparison to determine if this item is contained
|
567 |
+
# within this Specifier or not.
|
568 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
569 |
+
return operator_callable(normalized_item, self.version)
|
570 |
+
|
571 |
+
def filter(
|
572 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
573 |
+
) -> Iterator[UnparsedVersionVar]:
|
574 |
+
"""Filter items in the given iterable, that match the specifier.
|
575 |
+
|
576 |
+
:param iterable:
|
577 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
578 |
+
The items in the iterable will be filtered according to the specifier.
|
579 |
+
:param prereleases:
|
580 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
581 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
582 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
583 |
+
whether the only versions matching are prereleases).
|
584 |
+
|
585 |
+
This method is smarter than just ``filter(Specifier().contains, [...])``
|
586 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
587 |
+
SHOULD be accepted if no other versions match the given specifier.
|
588 |
+
|
589 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
590 |
+
['1.3']
|
591 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.2.3", "1.3", Version("1.4")]))
|
592 |
+
['1.2.3', '1.3', <Version('1.4')>]
|
593 |
+
>>> list(Specifier(">=1.2.3").filter(["1.2", "1.5a1"]))
|
594 |
+
['1.5a1']
|
595 |
+
>>> list(Specifier(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
596 |
+
['1.3', '1.5a1']
|
597 |
+
>>> list(Specifier(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
598 |
+
['1.3', '1.5a1']
|
599 |
+
"""
|
600 |
+
|
601 |
+
yielded = False
|
602 |
+
found_prereleases = []
|
603 |
+
|
604 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
605 |
+
|
606 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
607 |
+
# them match, yield them.
|
608 |
+
for version in iterable:
|
609 |
+
parsed_version = _coerce_version(version)
|
610 |
+
|
611 |
+
if self.contains(parsed_version, **kw):
|
612 |
+
# If our version is a prerelease, and we were not set to allow
|
613 |
+
# prereleases, then we'll store it for later in case nothing
|
614 |
+
# else matches this specifier.
|
615 |
+
if parsed_version.is_prerelease and not (
|
616 |
+
prereleases or self.prereleases
|
617 |
+
):
|
618 |
+
found_prereleases.append(version)
|
619 |
+
# Either this is not a prerelease, or we should have been
|
620 |
+
# accepting prereleases from the beginning.
|
621 |
+
else:
|
622 |
+
yielded = True
|
623 |
+
yield version
|
624 |
+
|
625 |
+
# Now that we've iterated over everything, determine if we've yielded
|
626 |
+
# any values, and if we have not and we have any prereleases stored up
|
627 |
+
# then we will go ahead and yield the prereleases.
|
628 |
+
if not yielded and found_prereleases:
|
629 |
+
for version in found_prereleases:
|
630 |
+
yield version
|
631 |
+
|
632 |
+
|
633 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
634 |
+
|
635 |
+
|
636 |
+
def _version_split(version: str) -> List[str]:
|
637 |
+
"""Split version into components.
|
638 |
+
|
639 |
+
The split components are intended for version comparison. The logic does
|
640 |
+
not attempt to retain the original version string, so joining the
|
641 |
+
components back with :func:`_version_join` may not produce the original
|
642 |
+
version string.
|
643 |
+
"""
|
644 |
+
result: List[str] = []
|
645 |
+
|
646 |
+
epoch, _, rest = version.rpartition("!")
|
647 |
+
result.append(epoch or "0")
|
648 |
+
|
649 |
+
for item in rest.split("."):
|
650 |
+
match = _prefix_regex.search(item)
|
651 |
+
if match:
|
652 |
+
result.extend(match.groups())
|
653 |
+
else:
|
654 |
+
result.append(item)
|
655 |
+
return result
|
656 |
+
|
657 |
+
|
658 |
+
def _version_join(components: List[str]) -> str:
|
659 |
+
"""Join split version components into a version string.
|
660 |
+
|
661 |
+
This function assumes the input came from :func:`_version_split`, where the
|
662 |
+
first component must be the epoch (either empty or numeric), and all other
|
663 |
+
components numeric.
|
664 |
+
"""
|
665 |
+
epoch, *rest = components
|
666 |
+
return f"{epoch}!{'.'.join(rest)}"
|
667 |
+
|
668 |
+
|
669 |
+
def _is_not_suffix(segment: str) -> bool:
|
670 |
+
return not any(
|
671 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
672 |
+
)
|
673 |
+
|
674 |
+
|
675 |
+
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
676 |
+
left_split, right_split = [], []
|
677 |
+
|
678 |
+
# Get the release segment of our versions
|
679 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
680 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
681 |
+
|
682 |
+
# Get the rest of our versions
|
683 |
+
left_split.append(left[len(left_split[0]) :])
|
684 |
+
right_split.append(right[len(right_split[0]) :])
|
685 |
+
|
686 |
+
# Insert our padding
|
687 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
688 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
689 |
+
|
690 |
+
return (
|
691 |
+
list(itertools.chain.from_iterable(left_split)),
|
692 |
+
list(itertools.chain.from_iterable(right_split)),
|
693 |
+
)
|
694 |
+
|
695 |
+
|
696 |
+
class SpecifierSet(BaseSpecifier):
|
697 |
+
"""This class abstracts handling of a set of version specifiers.
|
698 |
+
|
699 |
+
It can be passed a single specifier (``>=3.0``), a comma-separated list of
|
700 |
+
specifiers (``>=3.0,!=3.1``), or no specifier at all.
|
701 |
+
"""
|
702 |
+
|
703 |
+
def __init__(
|
704 |
+
self, specifiers: str = "", prereleases: Optional[bool] = None
|
705 |
+
) -> None:
|
706 |
+
"""Initialize a SpecifierSet instance.
|
707 |
+
|
708 |
+
:param specifiers:
|
709 |
+
The string representation of a specifier or a comma-separated list of
|
710 |
+
specifiers which will be parsed and normalized before use.
|
711 |
+
:param prereleases:
|
712 |
+
This tells the SpecifierSet if it should accept prerelease versions if
|
713 |
+
applicable or not. The default of ``None`` will autodetect it from the
|
714 |
+
given specifiers.
|
715 |
+
|
716 |
+
:raises InvalidSpecifier:
|
717 |
+
If the given ``specifiers`` are not parseable than this exception will be
|
718 |
+
raised.
|
719 |
+
"""
|
720 |
+
|
721 |
+
# Split on `,` to break each individual specifier into it's own item, and
|
722 |
+
# strip each item to remove leading/trailing whitespace.
|
723 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
724 |
+
|
725 |
+
# Make each individual specifier a Specifier and save in a frozen set for later.
|
726 |
+
self._specs = frozenset(map(Specifier, split_specifiers))
|
727 |
+
|
728 |
+
# Store our prereleases value so we can use it later to determine if
|
729 |
+
# we accept prereleases or not.
|
730 |
+
self._prereleases = prereleases
|
731 |
+
|
732 |
+
@property
|
733 |
+
def prereleases(self) -> Optional[bool]:
|
734 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
735 |
+
# pass that through here.
|
736 |
+
if self._prereleases is not None:
|
737 |
+
return self._prereleases
|
738 |
+
|
739 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
740 |
+
# then we'll just return None since we don't know if this should have
|
741 |
+
# pre-releases or not.
|
742 |
+
if not self._specs:
|
743 |
+
return None
|
744 |
+
|
745 |
+
# Otherwise we'll see if any of the given specifiers accept
|
746 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
747 |
+
return any(s.prereleases for s in self._specs)
|
748 |
+
|
749 |
+
@prereleases.setter
|
750 |
+
def prereleases(self, value: bool) -> None:
|
751 |
+
self._prereleases = value
|
752 |
+
|
753 |
+
def __repr__(self) -> str:
|
754 |
+
"""A representation of the specifier set that shows all internal state.
|
755 |
+
|
756 |
+
Note that the ordering of the individual specifiers within the set may not
|
757 |
+
match the input string.
|
758 |
+
|
759 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0')
|
760 |
+
<SpecifierSet('!=2.0.0,>=1.0.0')>
|
761 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=False)
|
762 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=False)>
|
763 |
+
>>> SpecifierSet('>=1.0.0,!=2.0.0', prereleases=True)
|
764 |
+
<SpecifierSet('!=2.0.0,>=1.0.0', prereleases=True)>
|
765 |
+
"""
|
766 |
+
pre = (
|
767 |
+
f", prereleases={self.prereleases!r}"
|
768 |
+
if self._prereleases is not None
|
769 |
+
else ""
|
770 |
+
)
|
771 |
+
|
772 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
773 |
+
|
774 |
+
def __str__(self) -> str:
|
775 |
+
"""A string representation of the specifier set that can be round-tripped.
|
776 |
+
|
777 |
+
Note that the ordering of the individual specifiers within the set may not
|
778 |
+
match the input string.
|
779 |
+
|
780 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1"))
|
781 |
+
'!=1.0.1,>=1.0.0'
|
782 |
+
>>> str(SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False))
|
783 |
+
'!=1.0.1,>=1.0.0'
|
784 |
+
"""
|
785 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
786 |
+
|
787 |
+
def __hash__(self) -> int:
|
788 |
+
return hash(self._specs)
|
789 |
+
|
790 |
+
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
791 |
+
"""Return a SpecifierSet which is a combination of the two sets.
|
792 |
+
|
793 |
+
:param other: The other object to combine with.
|
794 |
+
|
795 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & '<=2.0.0,!=2.0.1'
|
796 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
797 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") & SpecifierSet('<=2.0.0,!=2.0.1')
|
798 |
+
<SpecifierSet('!=1.0.1,!=2.0.1,<=2.0.0,>=1.0.0')>
|
799 |
+
"""
|
800 |
+
if isinstance(other, str):
|
801 |
+
other = SpecifierSet(other)
|
802 |
+
elif not isinstance(other, SpecifierSet):
|
803 |
+
return NotImplemented
|
804 |
+
|
805 |
+
specifier = SpecifierSet()
|
806 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
807 |
+
|
808 |
+
if self._prereleases is None and other._prereleases is not None:
|
809 |
+
specifier._prereleases = other._prereleases
|
810 |
+
elif self._prereleases is not None and other._prereleases is None:
|
811 |
+
specifier._prereleases = self._prereleases
|
812 |
+
elif self._prereleases == other._prereleases:
|
813 |
+
specifier._prereleases = self._prereleases
|
814 |
+
else:
|
815 |
+
raise ValueError(
|
816 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
817 |
+
"overrides."
|
818 |
+
)
|
819 |
+
|
820 |
+
return specifier
|
821 |
+
|
822 |
+
def __eq__(self, other: object) -> bool:
|
823 |
+
"""Whether or not the two SpecifierSet-like objects are equal.
|
824 |
+
|
825 |
+
:param other: The other object to check against.
|
826 |
+
|
827 |
+
The value of :attr:`prereleases` is ignored.
|
828 |
+
|
829 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.1")
|
830 |
+
True
|
831 |
+
>>> (SpecifierSet(">=1.0.0,!=1.0.1", prereleases=False) ==
|
832 |
+
... SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True))
|
833 |
+
True
|
834 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == ">=1.0.0,!=1.0.1"
|
835 |
+
True
|
836 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0")
|
837 |
+
False
|
838 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1") == SpecifierSet(">=1.0.0,!=1.0.2")
|
839 |
+
False
|
840 |
+
"""
|
841 |
+
if isinstance(other, (str, Specifier)):
|
842 |
+
other = SpecifierSet(str(other))
|
843 |
+
elif not isinstance(other, SpecifierSet):
|
844 |
+
return NotImplemented
|
845 |
+
|
846 |
+
return self._specs == other._specs
|
847 |
+
|
848 |
+
def __len__(self) -> int:
|
849 |
+
"""Returns the number of specifiers in this specifier set."""
|
850 |
+
return len(self._specs)
|
851 |
+
|
852 |
+
def __iter__(self) -> Iterator[Specifier]:
|
853 |
+
"""
|
854 |
+
Returns an iterator over all the underlying :class:`Specifier` instances
|
855 |
+
in this specifier set.
|
856 |
+
|
857 |
+
>>> sorted(SpecifierSet(">=1.0.0,!=1.0.1"), key=str)
|
858 |
+
[<Specifier('!=1.0.1')>, <Specifier('>=1.0.0')>]
|
859 |
+
"""
|
860 |
+
return iter(self._specs)
|
861 |
+
|
862 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
863 |
+
"""Return whether or not the item is contained in this specifier.
|
864 |
+
|
865 |
+
:param item: The item to check for.
|
866 |
+
|
867 |
+
This is used for the ``in`` operator and behaves the same as
|
868 |
+
:meth:`contains` with no ``prereleases`` argument passed.
|
869 |
+
|
870 |
+
>>> "1.2.3" in SpecifierSet(">=1.0.0,!=1.0.1")
|
871 |
+
True
|
872 |
+
>>> Version("1.2.3") in SpecifierSet(">=1.0.0,!=1.0.1")
|
873 |
+
True
|
874 |
+
>>> "1.0.1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
875 |
+
False
|
876 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1")
|
877 |
+
False
|
878 |
+
>>> "1.3.0a1" in SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True)
|
879 |
+
True
|
880 |
+
"""
|
881 |
+
return self.contains(item)
|
882 |
+
|
883 |
+
def contains(
|
884 |
+
self,
|
885 |
+
item: UnparsedVersion,
|
886 |
+
prereleases: Optional[bool] = None,
|
887 |
+
installed: Optional[bool] = None,
|
888 |
+
) -> bool:
|
889 |
+
"""Return whether or not the item is contained in this SpecifierSet.
|
890 |
+
|
891 |
+
:param item:
|
892 |
+
The item to check for, which can be a version string or a
|
893 |
+
:class:`Version` instance.
|
894 |
+
:param prereleases:
|
895 |
+
Whether or not to match prereleases with this SpecifierSet. If set to
|
896 |
+
``None`` (the default), it uses :attr:`prereleases` to determine
|
897 |
+
whether or not prereleases are allowed.
|
898 |
+
|
899 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.2.3")
|
900 |
+
True
|
901 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains(Version("1.2.3"))
|
902 |
+
True
|
903 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.0.1")
|
904 |
+
False
|
905 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1")
|
906 |
+
False
|
907 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1", prereleases=True).contains("1.3.0a1")
|
908 |
+
True
|
909 |
+
>>> SpecifierSet(">=1.0.0,!=1.0.1").contains("1.3.0a1", prereleases=True)
|
910 |
+
True
|
911 |
+
"""
|
912 |
+
# Ensure that our item is a Version instance.
|
913 |
+
if not isinstance(item, Version):
|
914 |
+
item = Version(item)
|
915 |
+
|
916 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
917 |
+
# one for this particular filter call, then we'll use whatever the
|
918 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
919 |
+
if prereleases is None:
|
920 |
+
prereleases = self.prereleases
|
921 |
+
|
922 |
+
# We can determine if we're going to allow pre-releases by looking to
|
923 |
+
# see if any of the underlying items supports them. If none of them do
|
924 |
+
# and this item is a pre-release then we do not allow it and we can
|
925 |
+
# short circuit that here.
|
926 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
927 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
928 |
+
if not prereleases and item.is_prerelease:
|
929 |
+
return False
|
930 |
+
|
931 |
+
if installed and item.is_prerelease:
|
932 |
+
item = Version(item.base_version)
|
933 |
+
|
934 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
935 |
+
# given version is contained within all of them.
|
936 |
+
# Note: This use of all() here means that an empty set of specifiers
|
937 |
+
# will always return True, this is an explicit design decision.
|
938 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
939 |
+
|
940 |
+
def filter(
|
941 |
+
self, iterable: Iterable[UnparsedVersionVar], prereleases: Optional[bool] = None
|
942 |
+
) -> Iterator[UnparsedVersionVar]:
|
943 |
+
"""Filter items in the given iterable, that match the specifiers in this set.
|
944 |
+
|
945 |
+
:param iterable:
|
946 |
+
An iterable that can contain version strings and :class:`Version` instances.
|
947 |
+
The items in the iterable will be filtered according to the specifier.
|
948 |
+
:param prereleases:
|
949 |
+
Whether or not to allow prereleases in the returned iterator. If set to
|
950 |
+
``None`` (the default), it will be intelligently decide whether to allow
|
951 |
+
prereleases or not (based on the :attr:`prereleases` attribute, and
|
952 |
+
whether the only versions matching are prereleases).
|
953 |
+
|
954 |
+
This method is smarter than just ``filter(SpecifierSet(...).contains, [...])``
|
955 |
+
because it implements the rule from :pep:`440` that a prerelease item
|
956 |
+
SHOULD be accepted if no other versions match the given specifier.
|
957 |
+
|
958 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", "1.5a1"]))
|
959 |
+
['1.3']
|
960 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.3", Version("1.4")]))
|
961 |
+
['1.3', <Version('1.4')>]
|
962 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.2", "1.5a1"]))
|
963 |
+
[]
|
964 |
+
>>> list(SpecifierSet(">=1.2.3").filter(["1.3", "1.5a1"], prereleases=True))
|
965 |
+
['1.3', '1.5a1']
|
966 |
+
>>> list(SpecifierSet(">=1.2.3", prereleases=True).filter(["1.3", "1.5a1"]))
|
967 |
+
['1.3', '1.5a1']
|
968 |
+
|
969 |
+
An "empty" SpecifierSet will filter items based on the presence of prerelease
|
970 |
+
versions in the set.
|
971 |
+
|
972 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"]))
|
973 |
+
['1.3']
|
974 |
+
>>> list(SpecifierSet("").filter(["1.5a1"]))
|
975 |
+
['1.5a1']
|
976 |
+
>>> list(SpecifierSet("", prereleases=True).filter(["1.3", "1.5a1"]))
|
977 |
+
['1.3', '1.5a1']
|
978 |
+
>>> list(SpecifierSet("").filter(["1.3", "1.5a1"], prereleases=True))
|
979 |
+
['1.3', '1.5a1']
|
980 |
+
"""
|
981 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
982 |
+
# one for this particular filter call, then we'll use whatever the
|
983 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
984 |
+
if prereleases is None:
|
985 |
+
prereleases = self.prereleases
|
986 |
+
|
987 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
988 |
+
# filter method for each one, this will act as a logical AND amongst
|
989 |
+
# each specifier.
|
990 |
+
if self._specs:
|
991 |
+
for spec in self._specs:
|
992 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
993 |
+
return iter(iterable)
|
994 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
995 |
+
# which will filter out any pre-releases, unless there are no final
|
996 |
+
# releases.
|
997 |
+
else:
|
998 |
+
filtered: List[UnparsedVersionVar] = []
|
999 |
+
found_prereleases: List[UnparsedVersionVar] = []
|
1000 |
+
|
1001 |
+
for item in iterable:
|
1002 |
+
parsed_version = _coerce_version(item)
|
1003 |
+
|
1004 |
+
# Store any item which is a pre-release for later unless we've
|
1005 |
+
# already found a final version or we are accepting prereleases
|
1006 |
+
if parsed_version.is_prerelease and not prereleases:
|
1007 |
+
if not filtered:
|
1008 |
+
found_prereleases.append(item)
|
1009 |
+
else:
|
1010 |
+
filtered.append(item)
|
1011 |
+
|
1012 |
+
# If we've found no items except for pre-releases, then we'll go
|
1013 |
+
# ahead and use the pre-releases
|
1014 |
+
if not filtered and found_prereleases and prereleases is None:
|
1015 |
+
return iter(found_prereleases)
|
1016 |
+
|
1017 |
+
return iter(filtered)
|
llmeval-env/lib/python3.10/site-packages/packaging/tags.py
ADDED
@@ -0,0 +1,571 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import logging
|
6 |
+
import platform
|
7 |
+
import re
|
8 |
+
import struct
|
9 |
+
import subprocess
|
10 |
+
import sys
|
11 |
+
import sysconfig
|
12 |
+
from importlib.machinery import EXTENSION_SUFFIXES
|
13 |
+
from typing import (
|
14 |
+
Dict,
|
15 |
+
FrozenSet,
|
16 |
+
Iterable,
|
17 |
+
Iterator,
|
18 |
+
List,
|
19 |
+
Optional,
|
20 |
+
Sequence,
|
21 |
+
Tuple,
|
22 |
+
Union,
|
23 |
+
cast,
|
24 |
+
)
|
25 |
+
|
26 |
+
from . import _manylinux, _musllinux
|
27 |
+
|
28 |
+
logger = logging.getLogger(__name__)
|
29 |
+
|
30 |
+
PythonVersion = Sequence[int]
|
31 |
+
MacVersion = Tuple[int, int]
|
32 |
+
|
33 |
+
INTERPRETER_SHORT_NAMES: Dict[str, str] = {
|
34 |
+
"python": "py", # Generic.
|
35 |
+
"cpython": "cp",
|
36 |
+
"pypy": "pp",
|
37 |
+
"ironpython": "ip",
|
38 |
+
"jython": "jy",
|
39 |
+
}
|
40 |
+
|
41 |
+
|
42 |
+
_32_BIT_INTERPRETER = struct.calcsize("P") == 4
|
43 |
+
|
44 |
+
|
45 |
+
class Tag:
|
46 |
+
"""
|
47 |
+
A representation of the tag triple for a wheel.
|
48 |
+
|
49 |
+
Instances are considered immutable and thus are hashable. Equality checking
|
50 |
+
is also supported.
|
51 |
+
"""
|
52 |
+
|
53 |
+
__slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
|
54 |
+
|
55 |
+
def __init__(self, interpreter: str, abi: str, platform: str) -> None:
|
56 |
+
self._interpreter = interpreter.lower()
|
57 |
+
self._abi = abi.lower()
|
58 |
+
self._platform = platform.lower()
|
59 |
+
# The __hash__ of every single element in a Set[Tag] will be evaluated each time
|
60 |
+
# that a set calls its `.disjoint()` method, which may be called hundreds of
|
61 |
+
# times when scanning a page of links for packages with tags matching that
|
62 |
+
# Set[Tag]. Pre-computing the value here produces significant speedups for
|
63 |
+
# downstream consumers.
|
64 |
+
self._hash = hash((self._interpreter, self._abi, self._platform))
|
65 |
+
|
66 |
+
@property
|
67 |
+
def interpreter(self) -> str:
|
68 |
+
return self._interpreter
|
69 |
+
|
70 |
+
@property
|
71 |
+
def abi(self) -> str:
|
72 |
+
return self._abi
|
73 |
+
|
74 |
+
@property
|
75 |
+
def platform(self) -> str:
|
76 |
+
return self._platform
|
77 |
+
|
78 |
+
def __eq__(self, other: object) -> bool:
|
79 |
+
if not isinstance(other, Tag):
|
80 |
+
return NotImplemented
|
81 |
+
|
82 |
+
return (
|
83 |
+
(self._hash == other._hash) # Short-circuit ASAP for perf reasons.
|
84 |
+
and (self._platform == other._platform)
|
85 |
+
and (self._abi == other._abi)
|
86 |
+
and (self._interpreter == other._interpreter)
|
87 |
+
)
|
88 |
+
|
89 |
+
def __hash__(self) -> int:
|
90 |
+
return self._hash
|
91 |
+
|
92 |
+
def __str__(self) -> str:
|
93 |
+
return f"{self._interpreter}-{self._abi}-{self._platform}"
|
94 |
+
|
95 |
+
def __repr__(self) -> str:
|
96 |
+
return f"<{self} @ {id(self)}>"
|
97 |
+
|
98 |
+
|
99 |
+
def parse_tag(tag: str) -> FrozenSet[Tag]:
|
100 |
+
"""
|
101 |
+
Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
|
102 |
+
|
103 |
+
Returning a set is required due to the possibility that the tag is a
|
104 |
+
compressed tag set.
|
105 |
+
"""
|
106 |
+
tags = set()
|
107 |
+
interpreters, abis, platforms = tag.split("-")
|
108 |
+
for interpreter in interpreters.split("."):
|
109 |
+
for abi in abis.split("."):
|
110 |
+
for platform_ in platforms.split("."):
|
111 |
+
tags.add(Tag(interpreter, abi, platform_))
|
112 |
+
return frozenset(tags)
|
113 |
+
|
114 |
+
|
115 |
+
def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
|
116 |
+
value: Union[int, str, None] = sysconfig.get_config_var(name)
|
117 |
+
if value is None and warn:
|
118 |
+
logger.debug(
|
119 |
+
"Config variable '%s' is unset, Python ABI tag may be incorrect", name
|
120 |
+
)
|
121 |
+
return value
|
122 |
+
|
123 |
+
|
124 |
+
def _normalize_string(string: str) -> str:
|
125 |
+
return string.replace(".", "_").replace("-", "_").replace(" ", "_")
|
126 |
+
|
127 |
+
|
128 |
+
def _is_threaded_cpython(abis: List[str]) -> bool:
|
129 |
+
"""
|
130 |
+
Determine if the ABI corresponds to a threaded (`--disable-gil`) build.
|
131 |
+
|
132 |
+
The threaded builds are indicated by a "t" in the abiflags.
|
133 |
+
"""
|
134 |
+
if len(abis) == 0:
|
135 |
+
return False
|
136 |
+
# expect e.g., cp313
|
137 |
+
m = re.match(r"cp\d+(.*)", abis[0])
|
138 |
+
if not m:
|
139 |
+
return False
|
140 |
+
abiflags = m.group(1)
|
141 |
+
return "t" in abiflags
|
142 |
+
|
143 |
+
|
144 |
+
def _abi3_applies(python_version: PythonVersion, threading: bool) -> bool:
|
145 |
+
"""
|
146 |
+
Determine if the Python version supports abi3.
|
147 |
+
|
148 |
+
PEP 384 was first implemented in Python 3.2. The threaded (`--disable-gil`)
|
149 |
+
builds do not support abi3.
|
150 |
+
"""
|
151 |
+
return len(python_version) > 1 and tuple(python_version) >= (3, 2) and not threading
|
152 |
+
|
153 |
+
|
154 |
+
def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
|
155 |
+
py_version = tuple(py_version) # To allow for version comparison.
|
156 |
+
abis = []
|
157 |
+
version = _version_nodot(py_version[:2])
|
158 |
+
threading = debug = pymalloc = ucs4 = ""
|
159 |
+
with_debug = _get_config_var("Py_DEBUG", warn)
|
160 |
+
has_refcount = hasattr(sys, "gettotalrefcount")
|
161 |
+
# Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
|
162 |
+
# extension modules is the best option.
|
163 |
+
# https://github.com/pypa/pip/issues/3383#issuecomment-173267692
|
164 |
+
has_ext = "_d.pyd" in EXTENSION_SUFFIXES
|
165 |
+
if with_debug or (with_debug is None and (has_refcount or has_ext)):
|
166 |
+
debug = "d"
|
167 |
+
if py_version >= (3, 13) and _get_config_var("Py_GIL_DISABLED", warn):
|
168 |
+
threading = "t"
|
169 |
+
if py_version < (3, 8):
|
170 |
+
with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
|
171 |
+
if with_pymalloc or with_pymalloc is None:
|
172 |
+
pymalloc = "m"
|
173 |
+
if py_version < (3, 3):
|
174 |
+
unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
|
175 |
+
if unicode_size == 4 or (
|
176 |
+
unicode_size is None and sys.maxunicode == 0x10FFFF
|
177 |
+
):
|
178 |
+
ucs4 = "u"
|
179 |
+
elif debug:
|
180 |
+
# Debug builds can also load "normal" extension modules.
|
181 |
+
# We can also assume no UCS-4 or pymalloc requirement.
|
182 |
+
abis.append(f"cp{version}{threading}")
|
183 |
+
abis.insert(0, f"cp{version}{threading}{debug}{pymalloc}{ucs4}")
|
184 |
+
return abis
|
185 |
+
|
186 |
+
|
187 |
+
def cpython_tags(
|
188 |
+
python_version: Optional[PythonVersion] = None,
|
189 |
+
abis: Optional[Iterable[str]] = None,
|
190 |
+
platforms: Optional[Iterable[str]] = None,
|
191 |
+
*,
|
192 |
+
warn: bool = False,
|
193 |
+
) -> Iterator[Tag]:
|
194 |
+
"""
|
195 |
+
Yields the tags for a CPython interpreter.
|
196 |
+
|
197 |
+
The tags consist of:
|
198 |
+
- cp<python_version>-<abi>-<platform>
|
199 |
+
- cp<python_version>-abi3-<platform>
|
200 |
+
- cp<python_version>-none-<platform>
|
201 |
+
- cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
|
202 |
+
|
203 |
+
If python_version only specifies a major version then user-provided ABIs and
|
204 |
+
the 'none' ABItag will be used.
|
205 |
+
|
206 |
+
If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
|
207 |
+
their normal position and not at the beginning.
|
208 |
+
"""
|
209 |
+
if not python_version:
|
210 |
+
python_version = sys.version_info[:2]
|
211 |
+
|
212 |
+
interpreter = f"cp{_version_nodot(python_version[:2])}"
|
213 |
+
|
214 |
+
if abis is None:
|
215 |
+
if len(python_version) > 1:
|
216 |
+
abis = _cpython_abis(python_version, warn)
|
217 |
+
else:
|
218 |
+
abis = []
|
219 |
+
abis = list(abis)
|
220 |
+
# 'abi3' and 'none' are explicitly handled later.
|
221 |
+
for explicit_abi in ("abi3", "none"):
|
222 |
+
try:
|
223 |
+
abis.remove(explicit_abi)
|
224 |
+
except ValueError:
|
225 |
+
pass
|
226 |
+
|
227 |
+
platforms = list(platforms or platform_tags())
|
228 |
+
for abi in abis:
|
229 |
+
for platform_ in platforms:
|
230 |
+
yield Tag(interpreter, abi, platform_)
|
231 |
+
|
232 |
+
threading = _is_threaded_cpython(abis)
|
233 |
+
use_abi3 = _abi3_applies(python_version, threading)
|
234 |
+
if use_abi3:
|
235 |
+
yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
|
236 |
+
yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
|
237 |
+
|
238 |
+
if use_abi3:
|
239 |
+
for minor_version in range(python_version[1] - 1, 1, -1):
|
240 |
+
for platform_ in platforms:
|
241 |
+
interpreter = "cp{version}".format(
|
242 |
+
version=_version_nodot((python_version[0], minor_version))
|
243 |
+
)
|
244 |
+
yield Tag(interpreter, "abi3", platform_)
|
245 |
+
|
246 |
+
|
247 |
+
def _generic_abi() -> List[str]:
|
248 |
+
"""
|
249 |
+
Return the ABI tag based on EXT_SUFFIX.
|
250 |
+
"""
|
251 |
+
# The following are examples of `EXT_SUFFIX`.
|
252 |
+
# We want to keep the parts which are related to the ABI and remove the
|
253 |
+
# parts which are related to the platform:
|
254 |
+
# - linux: '.cpython-310-x86_64-linux-gnu.so' => cp310
|
255 |
+
# - mac: '.cpython-310-darwin.so' => cp310
|
256 |
+
# - win: '.cp310-win_amd64.pyd' => cp310
|
257 |
+
# - win: '.pyd' => cp37 (uses _cpython_abis())
|
258 |
+
# - pypy: '.pypy38-pp73-x86_64-linux-gnu.so' => pypy38_pp73
|
259 |
+
# - graalpy: '.graalpy-38-native-x86_64-darwin.dylib'
|
260 |
+
# => graalpy_38_native
|
261 |
+
|
262 |
+
ext_suffix = _get_config_var("EXT_SUFFIX", warn=True)
|
263 |
+
if not isinstance(ext_suffix, str) or ext_suffix[0] != ".":
|
264 |
+
raise SystemError("invalid sysconfig.get_config_var('EXT_SUFFIX')")
|
265 |
+
parts = ext_suffix.split(".")
|
266 |
+
if len(parts) < 3:
|
267 |
+
# CPython3.7 and earlier uses ".pyd" on Windows.
|
268 |
+
return _cpython_abis(sys.version_info[:2])
|
269 |
+
soabi = parts[1]
|
270 |
+
if soabi.startswith("cpython"):
|
271 |
+
# non-windows
|
272 |
+
abi = "cp" + soabi.split("-")[1]
|
273 |
+
elif soabi.startswith("cp"):
|
274 |
+
# windows
|
275 |
+
abi = soabi.split("-")[0]
|
276 |
+
elif soabi.startswith("pypy"):
|
277 |
+
abi = "-".join(soabi.split("-")[:2])
|
278 |
+
elif soabi.startswith("graalpy"):
|
279 |
+
abi = "-".join(soabi.split("-")[:3])
|
280 |
+
elif soabi:
|
281 |
+
# pyston, ironpython, others?
|
282 |
+
abi = soabi
|
283 |
+
else:
|
284 |
+
return []
|
285 |
+
return [_normalize_string(abi)]
|
286 |
+
|
287 |
+
|
288 |
+
def generic_tags(
|
289 |
+
interpreter: Optional[str] = None,
|
290 |
+
abis: Optional[Iterable[str]] = None,
|
291 |
+
platforms: Optional[Iterable[str]] = None,
|
292 |
+
*,
|
293 |
+
warn: bool = False,
|
294 |
+
) -> Iterator[Tag]:
|
295 |
+
"""
|
296 |
+
Yields the tags for a generic interpreter.
|
297 |
+
|
298 |
+
The tags consist of:
|
299 |
+
- <interpreter>-<abi>-<platform>
|
300 |
+
|
301 |
+
The "none" ABI will be added if it was not explicitly provided.
|
302 |
+
"""
|
303 |
+
if not interpreter:
|
304 |
+
interp_name = interpreter_name()
|
305 |
+
interp_version = interpreter_version(warn=warn)
|
306 |
+
interpreter = "".join([interp_name, interp_version])
|
307 |
+
if abis is None:
|
308 |
+
abis = _generic_abi()
|
309 |
+
else:
|
310 |
+
abis = list(abis)
|
311 |
+
platforms = list(platforms or platform_tags())
|
312 |
+
if "none" not in abis:
|
313 |
+
abis.append("none")
|
314 |
+
for abi in abis:
|
315 |
+
for platform_ in platforms:
|
316 |
+
yield Tag(interpreter, abi, platform_)
|
317 |
+
|
318 |
+
|
319 |
+
def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
|
320 |
+
"""
|
321 |
+
Yields Python versions in descending order.
|
322 |
+
|
323 |
+
After the latest version, the major-only version will be yielded, and then
|
324 |
+
all previous versions of that major version.
|
325 |
+
"""
|
326 |
+
if len(py_version) > 1:
|
327 |
+
yield f"py{_version_nodot(py_version[:2])}"
|
328 |
+
yield f"py{py_version[0]}"
|
329 |
+
if len(py_version) > 1:
|
330 |
+
for minor in range(py_version[1] - 1, -1, -1):
|
331 |
+
yield f"py{_version_nodot((py_version[0], minor))}"
|
332 |
+
|
333 |
+
|
334 |
+
def compatible_tags(
|
335 |
+
python_version: Optional[PythonVersion] = None,
|
336 |
+
interpreter: Optional[str] = None,
|
337 |
+
platforms: Optional[Iterable[str]] = None,
|
338 |
+
) -> Iterator[Tag]:
|
339 |
+
"""
|
340 |
+
Yields the sequence of tags that are compatible with a specific version of Python.
|
341 |
+
|
342 |
+
The tags consist of:
|
343 |
+
- py*-none-<platform>
|
344 |
+
- <interpreter>-none-any # ... if `interpreter` is provided.
|
345 |
+
- py*-none-any
|
346 |
+
"""
|
347 |
+
if not python_version:
|
348 |
+
python_version = sys.version_info[:2]
|
349 |
+
platforms = list(platforms or platform_tags())
|
350 |
+
for version in _py_interpreter_range(python_version):
|
351 |
+
for platform_ in platforms:
|
352 |
+
yield Tag(version, "none", platform_)
|
353 |
+
if interpreter:
|
354 |
+
yield Tag(interpreter, "none", "any")
|
355 |
+
for version in _py_interpreter_range(python_version):
|
356 |
+
yield Tag(version, "none", "any")
|
357 |
+
|
358 |
+
|
359 |
+
def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
|
360 |
+
if not is_32bit:
|
361 |
+
return arch
|
362 |
+
|
363 |
+
if arch.startswith("ppc"):
|
364 |
+
return "ppc"
|
365 |
+
|
366 |
+
return "i386"
|
367 |
+
|
368 |
+
|
369 |
+
def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
|
370 |
+
formats = [cpu_arch]
|
371 |
+
if cpu_arch == "x86_64":
|
372 |
+
if version < (10, 4):
|
373 |
+
return []
|
374 |
+
formats.extend(["intel", "fat64", "fat32"])
|
375 |
+
|
376 |
+
elif cpu_arch == "i386":
|
377 |
+
if version < (10, 4):
|
378 |
+
return []
|
379 |
+
formats.extend(["intel", "fat32", "fat"])
|
380 |
+
|
381 |
+
elif cpu_arch == "ppc64":
|
382 |
+
# TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
|
383 |
+
if version > (10, 5) or version < (10, 4):
|
384 |
+
return []
|
385 |
+
formats.append("fat64")
|
386 |
+
|
387 |
+
elif cpu_arch == "ppc":
|
388 |
+
if version > (10, 6):
|
389 |
+
return []
|
390 |
+
formats.extend(["fat32", "fat"])
|
391 |
+
|
392 |
+
if cpu_arch in {"arm64", "x86_64"}:
|
393 |
+
formats.append("universal2")
|
394 |
+
|
395 |
+
if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
|
396 |
+
formats.append("universal")
|
397 |
+
|
398 |
+
return formats
|
399 |
+
|
400 |
+
|
401 |
+
def mac_platforms(
|
402 |
+
version: Optional[MacVersion] = None, arch: Optional[str] = None
|
403 |
+
) -> Iterator[str]:
|
404 |
+
"""
|
405 |
+
Yields the platform tags for a macOS system.
|
406 |
+
|
407 |
+
The `version` parameter is a two-item tuple specifying the macOS version to
|
408 |
+
generate platform tags for. The `arch` parameter is the CPU architecture to
|
409 |
+
generate platform tags for. Both parameters default to the appropriate value
|
410 |
+
for the current system.
|
411 |
+
"""
|
412 |
+
version_str, _, cpu_arch = platform.mac_ver()
|
413 |
+
if version is None:
|
414 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
415 |
+
if version == (10, 16):
|
416 |
+
# When built against an older macOS SDK, Python will report macOS 10.16
|
417 |
+
# instead of the real version.
|
418 |
+
version_str = subprocess.run(
|
419 |
+
[
|
420 |
+
sys.executable,
|
421 |
+
"-sS",
|
422 |
+
"-c",
|
423 |
+
"import platform; print(platform.mac_ver()[0])",
|
424 |
+
],
|
425 |
+
check=True,
|
426 |
+
env={"SYSTEM_VERSION_COMPAT": "0"},
|
427 |
+
stdout=subprocess.PIPE,
|
428 |
+
text=True,
|
429 |
+
).stdout
|
430 |
+
version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
|
431 |
+
else:
|
432 |
+
version = version
|
433 |
+
if arch is None:
|
434 |
+
arch = _mac_arch(cpu_arch)
|
435 |
+
else:
|
436 |
+
arch = arch
|
437 |
+
|
438 |
+
if (10, 0) <= version and version < (11, 0):
|
439 |
+
# Prior to Mac OS 11, each yearly release of Mac OS bumped the
|
440 |
+
# "minor" version number. The major version was always 10.
|
441 |
+
for minor_version in range(version[1], -1, -1):
|
442 |
+
compat_version = 10, minor_version
|
443 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
444 |
+
for binary_format in binary_formats:
|
445 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
446 |
+
major=10, minor=minor_version, binary_format=binary_format
|
447 |
+
)
|
448 |
+
|
449 |
+
if version >= (11, 0):
|
450 |
+
# Starting with Mac OS 11, each yearly release bumps the major version
|
451 |
+
# number. The minor versions are now the midyear updates.
|
452 |
+
for major_version in range(version[0], 10, -1):
|
453 |
+
compat_version = major_version, 0
|
454 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
455 |
+
for binary_format in binary_formats:
|
456 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
457 |
+
major=major_version, minor=0, binary_format=binary_format
|
458 |
+
)
|
459 |
+
|
460 |
+
if version >= (11, 0):
|
461 |
+
# Mac OS 11 on x86_64 is compatible with binaries from previous releases.
|
462 |
+
# Arm64 support was introduced in 11.0, so no Arm binaries from previous
|
463 |
+
# releases exist.
|
464 |
+
#
|
465 |
+
# However, the "universal2" binary format can have a
|
466 |
+
# macOS version earlier than 11.0 when the x86_64 part of the binary supports
|
467 |
+
# that version of macOS.
|
468 |
+
if arch == "x86_64":
|
469 |
+
for minor_version in range(16, 3, -1):
|
470 |
+
compat_version = 10, minor_version
|
471 |
+
binary_formats = _mac_binary_formats(compat_version, arch)
|
472 |
+
for binary_format in binary_formats:
|
473 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
474 |
+
major=compat_version[0],
|
475 |
+
minor=compat_version[1],
|
476 |
+
binary_format=binary_format,
|
477 |
+
)
|
478 |
+
else:
|
479 |
+
for minor_version in range(16, 3, -1):
|
480 |
+
compat_version = 10, minor_version
|
481 |
+
binary_format = "universal2"
|
482 |
+
yield "macosx_{major}_{minor}_{binary_format}".format(
|
483 |
+
major=compat_version[0],
|
484 |
+
minor=compat_version[1],
|
485 |
+
binary_format=binary_format,
|
486 |
+
)
|
487 |
+
|
488 |
+
|
489 |
+
def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
|
490 |
+
linux = _normalize_string(sysconfig.get_platform())
|
491 |
+
if not linux.startswith("linux_"):
|
492 |
+
# we should never be here, just yield the sysconfig one and return
|
493 |
+
yield linux
|
494 |
+
return
|
495 |
+
if is_32bit:
|
496 |
+
if linux == "linux_x86_64":
|
497 |
+
linux = "linux_i686"
|
498 |
+
elif linux == "linux_aarch64":
|
499 |
+
linux = "linux_armv8l"
|
500 |
+
_, arch = linux.split("_", 1)
|
501 |
+
archs = {"armv8l": ["armv8l", "armv7l"]}.get(arch, [arch])
|
502 |
+
yield from _manylinux.platform_tags(archs)
|
503 |
+
yield from _musllinux.platform_tags(archs)
|
504 |
+
for arch in archs:
|
505 |
+
yield f"linux_{arch}"
|
506 |
+
|
507 |
+
|
508 |
+
def _generic_platforms() -> Iterator[str]:
|
509 |
+
yield _normalize_string(sysconfig.get_platform())
|
510 |
+
|
511 |
+
|
512 |
+
def platform_tags() -> Iterator[str]:
|
513 |
+
"""
|
514 |
+
Provides the platform tags for this installation.
|
515 |
+
"""
|
516 |
+
if platform.system() == "Darwin":
|
517 |
+
return mac_platforms()
|
518 |
+
elif platform.system() == "Linux":
|
519 |
+
return _linux_platforms()
|
520 |
+
else:
|
521 |
+
return _generic_platforms()
|
522 |
+
|
523 |
+
|
524 |
+
def interpreter_name() -> str:
|
525 |
+
"""
|
526 |
+
Returns the name of the running interpreter.
|
527 |
+
|
528 |
+
Some implementations have a reserved, two-letter abbreviation which will
|
529 |
+
be returned when appropriate.
|
530 |
+
"""
|
531 |
+
name = sys.implementation.name
|
532 |
+
return INTERPRETER_SHORT_NAMES.get(name) or name
|
533 |
+
|
534 |
+
|
535 |
+
def interpreter_version(*, warn: bool = False) -> str:
|
536 |
+
"""
|
537 |
+
Returns the version of the running interpreter.
|
538 |
+
"""
|
539 |
+
version = _get_config_var("py_version_nodot", warn=warn)
|
540 |
+
if version:
|
541 |
+
version = str(version)
|
542 |
+
else:
|
543 |
+
version = _version_nodot(sys.version_info[:2])
|
544 |
+
return version
|
545 |
+
|
546 |
+
|
547 |
+
def _version_nodot(version: PythonVersion) -> str:
|
548 |
+
return "".join(map(str, version))
|
549 |
+
|
550 |
+
|
551 |
+
def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
|
552 |
+
"""
|
553 |
+
Returns the sequence of tag triples for the running interpreter.
|
554 |
+
|
555 |
+
The order of the sequence corresponds to priority order for the
|
556 |
+
interpreter, from most to least important.
|
557 |
+
"""
|
558 |
+
|
559 |
+
interp_name = interpreter_name()
|
560 |
+
if interp_name == "cp":
|
561 |
+
yield from cpython_tags(warn=warn)
|
562 |
+
else:
|
563 |
+
yield from generic_tags()
|
564 |
+
|
565 |
+
if interp_name == "pp":
|
566 |
+
interp = "pp3"
|
567 |
+
elif interp_name == "cp":
|
568 |
+
interp = "cp" + interpreter_version(warn=warn)
|
569 |
+
else:
|
570 |
+
interp = None
|
571 |
+
yield from compatible_tags(interpreter=interp)
|
llmeval-env/lib/python3.10/site-packages/packaging/utils.py
ADDED
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import re
|
6 |
+
from typing import FrozenSet, NewType, Tuple, Union, cast
|
7 |
+
|
8 |
+
from .tags import Tag, parse_tag
|
9 |
+
from .version import InvalidVersion, Version
|
10 |
+
|
11 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
12 |
+
NormalizedName = NewType("NormalizedName", str)
|
13 |
+
|
14 |
+
|
15 |
+
class InvalidName(ValueError):
|
16 |
+
"""
|
17 |
+
An invalid distribution name; users should refer to the packaging user guide.
|
18 |
+
"""
|
19 |
+
|
20 |
+
|
21 |
+
class InvalidWheelFilename(ValueError):
|
22 |
+
"""
|
23 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
24 |
+
"""
|
25 |
+
|
26 |
+
|
27 |
+
class InvalidSdistFilename(ValueError):
|
28 |
+
"""
|
29 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
30 |
+
"""
|
31 |
+
|
32 |
+
|
33 |
+
# Core metadata spec for `Name`
|
34 |
+
_validate_regex = re.compile(
|
35 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", re.IGNORECASE
|
36 |
+
)
|
37 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
38 |
+
_normalized_regex = re.compile(r"^([a-z0-9]|[a-z0-9]([a-z0-9-](?!--))*[a-z0-9])$")
|
39 |
+
# PEP 427: The build number must start with a digit.
|
40 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
41 |
+
|
42 |
+
|
43 |
+
def canonicalize_name(name: str, *, validate: bool = False) -> NormalizedName:
|
44 |
+
if validate and not _validate_regex.match(name):
|
45 |
+
raise InvalidName(f"name is invalid: {name!r}")
|
46 |
+
# This is taken from PEP 503.
|
47 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
48 |
+
return cast(NormalizedName, value)
|
49 |
+
|
50 |
+
|
51 |
+
def is_normalized_name(name: str) -> bool:
|
52 |
+
return _normalized_regex.match(name) is not None
|
53 |
+
|
54 |
+
|
55 |
+
def canonicalize_version(
|
56 |
+
version: Union[Version, str], *, strip_trailing_zero: bool = True
|
57 |
+
) -> str:
|
58 |
+
"""
|
59 |
+
This is very similar to Version.__str__, but has one subtle difference
|
60 |
+
with the way it handles the release segment.
|
61 |
+
"""
|
62 |
+
if isinstance(version, str):
|
63 |
+
try:
|
64 |
+
parsed = Version(version)
|
65 |
+
except InvalidVersion:
|
66 |
+
# Legacy versions cannot be normalized
|
67 |
+
return version
|
68 |
+
else:
|
69 |
+
parsed = version
|
70 |
+
|
71 |
+
parts = []
|
72 |
+
|
73 |
+
# Epoch
|
74 |
+
if parsed.epoch != 0:
|
75 |
+
parts.append(f"{parsed.epoch}!")
|
76 |
+
|
77 |
+
# Release segment
|
78 |
+
release_segment = ".".join(str(x) for x in parsed.release)
|
79 |
+
if strip_trailing_zero:
|
80 |
+
# NB: This strips trailing '.0's to normalize
|
81 |
+
release_segment = re.sub(r"(\.0)+$", "", release_segment)
|
82 |
+
parts.append(release_segment)
|
83 |
+
|
84 |
+
# Pre-release
|
85 |
+
if parsed.pre is not None:
|
86 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
87 |
+
|
88 |
+
# Post-release
|
89 |
+
if parsed.post is not None:
|
90 |
+
parts.append(f".post{parsed.post}")
|
91 |
+
|
92 |
+
# Development release
|
93 |
+
if parsed.dev is not None:
|
94 |
+
parts.append(f".dev{parsed.dev}")
|
95 |
+
|
96 |
+
# Local version segment
|
97 |
+
if parsed.local is not None:
|
98 |
+
parts.append(f"+{parsed.local}")
|
99 |
+
|
100 |
+
return "".join(parts)
|
101 |
+
|
102 |
+
|
103 |
+
def parse_wheel_filename(
|
104 |
+
filename: str,
|
105 |
+
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
106 |
+
if not filename.endswith(".whl"):
|
107 |
+
raise InvalidWheelFilename(
|
108 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
109 |
+
)
|
110 |
+
|
111 |
+
filename = filename[:-4]
|
112 |
+
dashes = filename.count("-")
|
113 |
+
if dashes not in (4, 5):
|
114 |
+
raise InvalidWheelFilename(
|
115 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
116 |
+
)
|
117 |
+
|
118 |
+
parts = filename.split("-", dashes - 2)
|
119 |
+
name_part = parts[0]
|
120 |
+
# See PEP 427 for the rules on escaping the project name.
|
121 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
122 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
123 |
+
name = canonicalize_name(name_part)
|
124 |
+
|
125 |
+
try:
|
126 |
+
version = Version(parts[1])
|
127 |
+
except InvalidVersion as e:
|
128 |
+
raise InvalidWheelFilename(
|
129 |
+
f"Invalid wheel filename (invalid version): {filename}"
|
130 |
+
) from e
|
131 |
+
|
132 |
+
if dashes == 5:
|
133 |
+
build_part = parts[2]
|
134 |
+
build_match = _build_tag_regex.match(build_part)
|
135 |
+
if build_match is None:
|
136 |
+
raise InvalidWheelFilename(
|
137 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
138 |
+
)
|
139 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
140 |
+
else:
|
141 |
+
build = ()
|
142 |
+
tags = parse_tag(parts[-1])
|
143 |
+
return (name, version, build, tags)
|
144 |
+
|
145 |
+
|
146 |
+
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
147 |
+
if filename.endswith(".tar.gz"):
|
148 |
+
file_stem = filename[: -len(".tar.gz")]
|
149 |
+
elif filename.endswith(".zip"):
|
150 |
+
file_stem = filename[: -len(".zip")]
|
151 |
+
else:
|
152 |
+
raise InvalidSdistFilename(
|
153 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
154 |
+
f" {filename}"
|
155 |
+
)
|
156 |
+
|
157 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
158 |
+
# so we split on the last dash.
|
159 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
160 |
+
if not sep:
|
161 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
162 |
+
|
163 |
+
name = canonicalize_name(name_part)
|
164 |
+
|
165 |
+
try:
|
166 |
+
version = Version(version_part)
|
167 |
+
except InvalidVersion as e:
|
168 |
+
raise InvalidSdistFilename(
|
169 |
+
f"Invalid sdist filename (invalid version): {filename}"
|
170 |
+
) from e
|
171 |
+
|
172 |
+
return (name, version)
|
llmeval-env/lib/python3.10/site-packages/packaging/version.py
ADDED
@@ -0,0 +1,563 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
"""
|
5 |
+
.. testsetup::
|
6 |
+
|
7 |
+
from packaging.version import parse, Version
|
8 |
+
"""
|
9 |
+
|
10 |
+
import itertools
|
11 |
+
import re
|
12 |
+
from typing import Any, Callable, NamedTuple, Optional, SupportsInt, Tuple, Union
|
13 |
+
|
14 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
15 |
+
|
16 |
+
__all__ = ["VERSION_PATTERN", "parse", "Version", "InvalidVersion"]
|
17 |
+
|
18 |
+
LocalType = Tuple[Union[int, str], ...]
|
19 |
+
|
20 |
+
CmpPrePostDevType = Union[InfinityType, NegativeInfinityType, Tuple[str, int]]
|
21 |
+
CmpLocalType = Union[
|
22 |
+
NegativeInfinityType,
|
23 |
+
Tuple[Union[Tuple[int, str], Tuple[NegativeInfinityType, Union[int, str]]], ...],
|
24 |
+
]
|
25 |
+
CmpKey = Tuple[
|
26 |
+
int,
|
27 |
+
Tuple[int, ...],
|
28 |
+
CmpPrePostDevType,
|
29 |
+
CmpPrePostDevType,
|
30 |
+
CmpPrePostDevType,
|
31 |
+
CmpLocalType,
|
32 |
+
]
|
33 |
+
VersionComparisonMethod = Callable[[CmpKey, CmpKey], bool]
|
34 |
+
|
35 |
+
|
36 |
+
class _Version(NamedTuple):
|
37 |
+
epoch: int
|
38 |
+
release: Tuple[int, ...]
|
39 |
+
dev: Optional[Tuple[str, int]]
|
40 |
+
pre: Optional[Tuple[str, int]]
|
41 |
+
post: Optional[Tuple[str, int]]
|
42 |
+
local: Optional[LocalType]
|
43 |
+
|
44 |
+
|
45 |
+
def parse(version: str) -> "Version":
|
46 |
+
"""Parse the given version string.
|
47 |
+
|
48 |
+
>>> parse('1.0.dev1')
|
49 |
+
<Version('1.0.dev1')>
|
50 |
+
|
51 |
+
:param version: The version string to parse.
|
52 |
+
:raises InvalidVersion: When the version string is not a valid version.
|
53 |
+
"""
|
54 |
+
return Version(version)
|
55 |
+
|
56 |
+
|
57 |
+
class InvalidVersion(ValueError):
|
58 |
+
"""Raised when a version string is not a valid version.
|
59 |
+
|
60 |
+
>>> Version("invalid")
|
61 |
+
Traceback (most recent call last):
|
62 |
+
...
|
63 |
+
packaging.version.InvalidVersion: Invalid version: 'invalid'
|
64 |
+
"""
|
65 |
+
|
66 |
+
|
67 |
+
class _BaseVersion:
|
68 |
+
_key: Tuple[Any, ...]
|
69 |
+
|
70 |
+
def __hash__(self) -> int:
|
71 |
+
return hash(self._key)
|
72 |
+
|
73 |
+
# Please keep the duplicated `isinstance` check
|
74 |
+
# in the six comparisons hereunder
|
75 |
+
# unless you find a way to avoid adding overhead function calls.
|
76 |
+
def __lt__(self, other: "_BaseVersion") -> bool:
|
77 |
+
if not isinstance(other, _BaseVersion):
|
78 |
+
return NotImplemented
|
79 |
+
|
80 |
+
return self._key < other._key
|
81 |
+
|
82 |
+
def __le__(self, other: "_BaseVersion") -> bool:
|
83 |
+
if not isinstance(other, _BaseVersion):
|
84 |
+
return NotImplemented
|
85 |
+
|
86 |
+
return self._key <= other._key
|
87 |
+
|
88 |
+
def __eq__(self, other: object) -> bool:
|
89 |
+
if not isinstance(other, _BaseVersion):
|
90 |
+
return NotImplemented
|
91 |
+
|
92 |
+
return self._key == other._key
|
93 |
+
|
94 |
+
def __ge__(self, other: "_BaseVersion") -> bool:
|
95 |
+
if not isinstance(other, _BaseVersion):
|
96 |
+
return NotImplemented
|
97 |
+
|
98 |
+
return self._key >= other._key
|
99 |
+
|
100 |
+
def __gt__(self, other: "_BaseVersion") -> bool:
|
101 |
+
if not isinstance(other, _BaseVersion):
|
102 |
+
return NotImplemented
|
103 |
+
|
104 |
+
return self._key > other._key
|
105 |
+
|
106 |
+
def __ne__(self, other: object) -> bool:
|
107 |
+
if not isinstance(other, _BaseVersion):
|
108 |
+
return NotImplemented
|
109 |
+
|
110 |
+
return self._key != other._key
|
111 |
+
|
112 |
+
|
113 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
114 |
+
# easier for 3rd party code to reuse
|
115 |
+
_VERSION_PATTERN = r"""
|
116 |
+
v?
|
117 |
+
(?:
|
118 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
119 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
120 |
+
(?P<pre> # pre-release
|
121 |
+
[-_\.]?
|
122 |
+
(?P<pre_l>alpha|a|beta|b|preview|pre|c|rc)
|
123 |
+
[-_\.]?
|
124 |
+
(?P<pre_n>[0-9]+)?
|
125 |
+
)?
|
126 |
+
(?P<post> # post release
|
127 |
+
(?:-(?P<post_n1>[0-9]+))
|
128 |
+
|
|
129 |
+
(?:
|
130 |
+
[-_\.]?
|
131 |
+
(?P<post_l>post|rev|r)
|
132 |
+
[-_\.]?
|
133 |
+
(?P<post_n2>[0-9]+)?
|
134 |
+
)
|
135 |
+
)?
|
136 |
+
(?P<dev> # dev release
|
137 |
+
[-_\.]?
|
138 |
+
(?P<dev_l>dev)
|
139 |
+
[-_\.]?
|
140 |
+
(?P<dev_n>[0-9]+)?
|
141 |
+
)?
|
142 |
+
)
|
143 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
144 |
+
"""
|
145 |
+
|
146 |
+
VERSION_PATTERN = _VERSION_PATTERN
|
147 |
+
"""
|
148 |
+
A string containing the regular expression used to match a valid version.
|
149 |
+
|
150 |
+
The pattern is not anchored at either end, and is intended for embedding in larger
|
151 |
+
expressions (for example, matching a version number as part of a file name). The
|
152 |
+
regular expression should be compiled with the ``re.VERBOSE`` and ``re.IGNORECASE``
|
153 |
+
flags set.
|
154 |
+
|
155 |
+
:meta hide-value:
|
156 |
+
"""
|
157 |
+
|
158 |
+
|
159 |
+
class Version(_BaseVersion):
|
160 |
+
"""This class abstracts handling of a project's versions.
|
161 |
+
|
162 |
+
A :class:`Version` instance is comparison aware and can be compared and
|
163 |
+
sorted using the standard Python interfaces.
|
164 |
+
|
165 |
+
>>> v1 = Version("1.0a5")
|
166 |
+
>>> v2 = Version("1.0")
|
167 |
+
>>> v1
|
168 |
+
<Version('1.0a5')>
|
169 |
+
>>> v2
|
170 |
+
<Version('1.0')>
|
171 |
+
>>> v1 < v2
|
172 |
+
True
|
173 |
+
>>> v1 == v2
|
174 |
+
False
|
175 |
+
>>> v1 > v2
|
176 |
+
False
|
177 |
+
>>> v1 >= v2
|
178 |
+
False
|
179 |
+
>>> v1 <= v2
|
180 |
+
True
|
181 |
+
"""
|
182 |
+
|
183 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
184 |
+
_key: CmpKey
|
185 |
+
|
186 |
+
def __init__(self, version: str) -> None:
|
187 |
+
"""Initialize a Version object.
|
188 |
+
|
189 |
+
:param version:
|
190 |
+
The string representation of a version which will be parsed and normalized
|
191 |
+
before use.
|
192 |
+
:raises InvalidVersion:
|
193 |
+
If the ``version`` does not conform to PEP 440 in any way then this
|
194 |
+
exception will be raised.
|
195 |
+
"""
|
196 |
+
|
197 |
+
# Validate the version and parse it into pieces
|
198 |
+
match = self._regex.search(version)
|
199 |
+
if not match:
|
200 |
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
201 |
+
|
202 |
+
# Store the parsed out pieces of the version
|
203 |
+
self._version = _Version(
|
204 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
205 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
206 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
207 |
+
post=_parse_letter_version(
|
208 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
209 |
+
),
|
210 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
211 |
+
local=_parse_local_version(match.group("local")),
|
212 |
+
)
|
213 |
+
|
214 |
+
# Generate a key which will be used for sorting
|
215 |
+
self._key = _cmpkey(
|
216 |
+
self._version.epoch,
|
217 |
+
self._version.release,
|
218 |
+
self._version.pre,
|
219 |
+
self._version.post,
|
220 |
+
self._version.dev,
|
221 |
+
self._version.local,
|
222 |
+
)
|
223 |
+
|
224 |
+
def __repr__(self) -> str:
|
225 |
+
"""A representation of the Version that shows all internal state.
|
226 |
+
|
227 |
+
>>> Version('1.0.0')
|
228 |
+
<Version('1.0.0')>
|
229 |
+
"""
|
230 |
+
return f"<Version('{self}')>"
|
231 |
+
|
232 |
+
def __str__(self) -> str:
|
233 |
+
"""A string representation of the version that can be rounded-tripped.
|
234 |
+
|
235 |
+
>>> str(Version("1.0a5"))
|
236 |
+
'1.0a5'
|
237 |
+
"""
|
238 |
+
parts = []
|
239 |
+
|
240 |
+
# Epoch
|
241 |
+
if self.epoch != 0:
|
242 |
+
parts.append(f"{self.epoch}!")
|
243 |
+
|
244 |
+
# Release segment
|
245 |
+
parts.append(".".join(str(x) for x in self.release))
|
246 |
+
|
247 |
+
# Pre-release
|
248 |
+
if self.pre is not None:
|
249 |
+
parts.append("".join(str(x) for x in self.pre))
|
250 |
+
|
251 |
+
# Post-release
|
252 |
+
if self.post is not None:
|
253 |
+
parts.append(f".post{self.post}")
|
254 |
+
|
255 |
+
# Development release
|
256 |
+
if self.dev is not None:
|
257 |
+
parts.append(f".dev{self.dev}")
|
258 |
+
|
259 |
+
# Local version segment
|
260 |
+
if self.local is not None:
|
261 |
+
parts.append(f"+{self.local}")
|
262 |
+
|
263 |
+
return "".join(parts)
|
264 |
+
|
265 |
+
@property
|
266 |
+
def epoch(self) -> int:
|
267 |
+
"""The epoch of the version.
|
268 |
+
|
269 |
+
>>> Version("2.0.0").epoch
|
270 |
+
0
|
271 |
+
>>> Version("1!2.0.0").epoch
|
272 |
+
1
|
273 |
+
"""
|
274 |
+
return self._version.epoch
|
275 |
+
|
276 |
+
@property
|
277 |
+
def release(self) -> Tuple[int, ...]:
|
278 |
+
"""The components of the "release" segment of the version.
|
279 |
+
|
280 |
+
>>> Version("1.2.3").release
|
281 |
+
(1, 2, 3)
|
282 |
+
>>> Version("2.0.0").release
|
283 |
+
(2, 0, 0)
|
284 |
+
>>> Version("1!2.0.0.post0").release
|
285 |
+
(2, 0, 0)
|
286 |
+
|
287 |
+
Includes trailing zeroes but not the epoch or any pre-release / development /
|
288 |
+
post-release suffixes.
|
289 |
+
"""
|
290 |
+
return self._version.release
|
291 |
+
|
292 |
+
@property
|
293 |
+
def pre(self) -> Optional[Tuple[str, int]]:
|
294 |
+
"""The pre-release segment of the version.
|
295 |
+
|
296 |
+
>>> print(Version("1.2.3").pre)
|
297 |
+
None
|
298 |
+
>>> Version("1.2.3a1").pre
|
299 |
+
('a', 1)
|
300 |
+
>>> Version("1.2.3b1").pre
|
301 |
+
('b', 1)
|
302 |
+
>>> Version("1.2.3rc1").pre
|
303 |
+
('rc', 1)
|
304 |
+
"""
|
305 |
+
return self._version.pre
|
306 |
+
|
307 |
+
@property
|
308 |
+
def post(self) -> Optional[int]:
|
309 |
+
"""The post-release number of the version.
|
310 |
+
|
311 |
+
>>> print(Version("1.2.3").post)
|
312 |
+
None
|
313 |
+
>>> Version("1.2.3.post1").post
|
314 |
+
1
|
315 |
+
"""
|
316 |
+
return self._version.post[1] if self._version.post else None
|
317 |
+
|
318 |
+
@property
|
319 |
+
def dev(self) -> Optional[int]:
|
320 |
+
"""The development number of the version.
|
321 |
+
|
322 |
+
>>> print(Version("1.2.3").dev)
|
323 |
+
None
|
324 |
+
>>> Version("1.2.3.dev1").dev
|
325 |
+
1
|
326 |
+
"""
|
327 |
+
return self._version.dev[1] if self._version.dev else None
|
328 |
+
|
329 |
+
@property
|
330 |
+
def local(self) -> Optional[str]:
|
331 |
+
"""The local version segment of the version.
|
332 |
+
|
333 |
+
>>> print(Version("1.2.3").local)
|
334 |
+
None
|
335 |
+
>>> Version("1.2.3+abc").local
|
336 |
+
'abc'
|
337 |
+
"""
|
338 |
+
if self._version.local:
|
339 |
+
return ".".join(str(x) for x in self._version.local)
|
340 |
+
else:
|
341 |
+
return None
|
342 |
+
|
343 |
+
@property
|
344 |
+
def public(self) -> str:
|
345 |
+
"""The public portion of the version.
|
346 |
+
|
347 |
+
>>> Version("1.2.3").public
|
348 |
+
'1.2.3'
|
349 |
+
>>> Version("1.2.3+abc").public
|
350 |
+
'1.2.3'
|
351 |
+
>>> Version("1.2.3+abc.dev1").public
|
352 |
+
'1.2.3'
|
353 |
+
"""
|
354 |
+
return str(self).split("+", 1)[0]
|
355 |
+
|
356 |
+
@property
|
357 |
+
def base_version(self) -> str:
|
358 |
+
"""The "base version" of the version.
|
359 |
+
|
360 |
+
>>> Version("1.2.3").base_version
|
361 |
+
'1.2.3'
|
362 |
+
>>> Version("1.2.3+abc").base_version
|
363 |
+
'1.2.3'
|
364 |
+
>>> Version("1!1.2.3+abc.dev1").base_version
|
365 |
+
'1!1.2.3'
|
366 |
+
|
367 |
+
The "base version" is the public version of the project without any pre or post
|
368 |
+
release markers.
|
369 |
+
"""
|
370 |
+
parts = []
|
371 |
+
|
372 |
+
# Epoch
|
373 |
+
if self.epoch != 0:
|
374 |
+
parts.append(f"{self.epoch}!")
|
375 |
+
|
376 |
+
# Release segment
|
377 |
+
parts.append(".".join(str(x) for x in self.release))
|
378 |
+
|
379 |
+
return "".join(parts)
|
380 |
+
|
381 |
+
@property
|
382 |
+
def is_prerelease(self) -> bool:
|
383 |
+
"""Whether this version is a pre-release.
|
384 |
+
|
385 |
+
>>> Version("1.2.3").is_prerelease
|
386 |
+
False
|
387 |
+
>>> Version("1.2.3a1").is_prerelease
|
388 |
+
True
|
389 |
+
>>> Version("1.2.3b1").is_prerelease
|
390 |
+
True
|
391 |
+
>>> Version("1.2.3rc1").is_prerelease
|
392 |
+
True
|
393 |
+
>>> Version("1.2.3dev1").is_prerelease
|
394 |
+
True
|
395 |
+
"""
|
396 |
+
return self.dev is not None or self.pre is not None
|
397 |
+
|
398 |
+
@property
|
399 |
+
def is_postrelease(self) -> bool:
|
400 |
+
"""Whether this version is a post-release.
|
401 |
+
|
402 |
+
>>> Version("1.2.3").is_postrelease
|
403 |
+
False
|
404 |
+
>>> Version("1.2.3.post1").is_postrelease
|
405 |
+
True
|
406 |
+
"""
|
407 |
+
return self.post is not None
|
408 |
+
|
409 |
+
@property
|
410 |
+
def is_devrelease(self) -> bool:
|
411 |
+
"""Whether this version is a development release.
|
412 |
+
|
413 |
+
>>> Version("1.2.3").is_devrelease
|
414 |
+
False
|
415 |
+
>>> Version("1.2.3.dev1").is_devrelease
|
416 |
+
True
|
417 |
+
"""
|
418 |
+
return self.dev is not None
|
419 |
+
|
420 |
+
@property
|
421 |
+
def major(self) -> int:
|
422 |
+
"""The first item of :attr:`release` or ``0`` if unavailable.
|
423 |
+
|
424 |
+
>>> Version("1.2.3").major
|
425 |
+
1
|
426 |
+
"""
|
427 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
428 |
+
|
429 |
+
@property
|
430 |
+
def minor(self) -> int:
|
431 |
+
"""The second item of :attr:`release` or ``0`` if unavailable.
|
432 |
+
|
433 |
+
>>> Version("1.2.3").minor
|
434 |
+
2
|
435 |
+
>>> Version("1").minor
|
436 |
+
0
|
437 |
+
"""
|
438 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
439 |
+
|
440 |
+
@property
|
441 |
+
def micro(self) -> int:
|
442 |
+
"""The third item of :attr:`release` or ``0`` if unavailable.
|
443 |
+
|
444 |
+
>>> Version("1.2.3").micro
|
445 |
+
3
|
446 |
+
>>> Version("1").micro
|
447 |
+
0
|
448 |
+
"""
|
449 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
450 |
+
|
451 |
+
|
452 |
+
def _parse_letter_version(
|
453 |
+
letter: Optional[str], number: Union[str, bytes, SupportsInt, None]
|
454 |
+
) -> Optional[Tuple[str, int]]:
|
455 |
+
|
456 |
+
if letter:
|
457 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
458 |
+
# not a numeral associated with it.
|
459 |
+
if number is None:
|
460 |
+
number = 0
|
461 |
+
|
462 |
+
# We normalize any letters to their lower case form
|
463 |
+
letter = letter.lower()
|
464 |
+
|
465 |
+
# We consider some words to be alternate spellings of other words and
|
466 |
+
# in those cases we want to normalize the spellings to our preferred
|
467 |
+
# spelling.
|
468 |
+
if letter == "alpha":
|
469 |
+
letter = "a"
|
470 |
+
elif letter == "beta":
|
471 |
+
letter = "b"
|
472 |
+
elif letter in ["c", "pre", "preview"]:
|
473 |
+
letter = "rc"
|
474 |
+
elif letter in ["rev", "r"]:
|
475 |
+
letter = "post"
|
476 |
+
|
477 |
+
return letter, int(number)
|
478 |
+
if not letter and number:
|
479 |
+
# We assume if we are given a number, but we are not given a letter
|
480 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
481 |
+
letter = "post"
|
482 |
+
|
483 |
+
return letter, int(number)
|
484 |
+
|
485 |
+
return None
|
486 |
+
|
487 |
+
|
488 |
+
_local_version_separators = re.compile(r"[\._-]")
|
489 |
+
|
490 |
+
|
491 |
+
def _parse_local_version(local: Optional[str]) -> Optional[LocalType]:
|
492 |
+
"""
|
493 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
494 |
+
"""
|
495 |
+
if local is not None:
|
496 |
+
return tuple(
|
497 |
+
part.lower() if not part.isdigit() else int(part)
|
498 |
+
for part in _local_version_separators.split(local)
|
499 |
+
)
|
500 |
+
return None
|
501 |
+
|
502 |
+
|
503 |
+
def _cmpkey(
|
504 |
+
epoch: int,
|
505 |
+
release: Tuple[int, ...],
|
506 |
+
pre: Optional[Tuple[str, int]],
|
507 |
+
post: Optional[Tuple[str, int]],
|
508 |
+
dev: Optional[Tuple[str, int]],
|
509 |
+
local: Optional[LocalType],
|
510 |
+
) -> CmpKey:
|
511 |
+
|
512 |
+
# When we compare a release version, we want to compare it with all of the
|
513 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
514 |
+
# leading zeros until we come to something non zero, then take the rest
|
515 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
516 |
+
# that for our sorting key.
|
517 |
+
_release = tuple(
|
518 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
519 |
+
)
|
520 |
+
|
521 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
522 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
523 |
+
# if there is not a pre or a post segment. If we have one of those then
|
524 |
+
# the normal sorting rules will handle this case correctly.
|
525 |
+
if pre is None and post is None and dev is not None:
|
526 |
+
_pre: CmpPrePostDevType = NegativeInfinity
|
527 |
+
# Versions without a pre-release (except as noted above) should sort after
|
528 |
+
# those with one.
|
529 |
+
elif pre is None:
|
530 |
+
_pre = Infinity
|
531 |
+
else:
|
532 |
+
_pre = pre
|
533 |
+
|
534 |
+
# Versions without a post segment should sort before those with one.
|
535 |
+
if post is None:
|
536 |
+
_post: CmpPrePostDevType = NegativeInfinity
|
537 |
+
|
538 |
+
else:
|
539 |
+
_post = post
|
540 |
+
|
541 |
+
# Versions without a development segment should sort after those with one.
|
542 |
+
if dev is None:
|
543 |
+
_dev: CmpPrePostDevType = Infinity
|
544 |
+
|
545 |
+
else:
|
546 |
+
_dev = dev
|
547 |
+
|
548 |
+
if local is None:
|
549 |
+
# Versions without a local segment should sort before those with one.
|
550 |
+
_local: CmpLocalType = NegativeInfinity
|
551 |
+
else:
|
552 |
+
# Versions with a local segment need that segment parsed to implement
|
553 |
+
# the sorting rules in PEP440.
|
554 |
+
# - Alpha numeric segments sort before numeric segments
|
555 |
+
# - Alpha numeric segments sort lexicographically
|
556 |
+
# - Numeric segments sort numerically
|
557 |
+
# - Shorter versions sort before longer versions when the prefixes
|
558 |
+
# match exactly
|
559 |
+
_local = tuple(
|
560 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
561 |
+
)
|
562 |
+
|
563 |
+
return epoch, _release, _pre, _post, _dev, _local
|