Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__init__.py +25 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_manylinux.py +301 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_musllinux.py +136 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_structures.py +61 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py +802 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py +136 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/version.py +504 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__init__.py +242 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/_imp.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/depends.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/dist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/errors.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/extension.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/glob.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/monkey.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/msvc.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/namespaces.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/package_index.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/py34compat.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/sandbox.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/unicode_utils.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/wheel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_deprecation_warning.py +7 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/_imp.py +82 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/archive_util.py +205 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/build_meta.py +290 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/cli-32.exe +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/cli-64.exe +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/cli-arm64.exe +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/cli.exe +0 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/__init__.py +8 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/egg_info.py +755 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/install_egg_info.py +82 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/install_lib.py +148 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/saveopts.py +22 -0
- llmeval-env/lib/python3.10/site-packages/setuptools/command/test.py +252 -0
llmeval-env/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (632 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc
ADDED
Binary file (594 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__init__.py
ADDED
@@ -0,0 +1,25 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
from .__about__ import (
|
6 |
+
__author__,
|
7 |
+
__copyright__,
|
8 |
+
__email__,
|
9 |
+
__license__,
|
10 |
+
__summary__,
|
11 |
+
__title__,
|
12 |
+
__uri__,
|
13 |
+
__version__,
|
14 |
+
)
|
15 |
+
|
16 |
+
__all__ = [
|
17 |
+
"__title__",
|
18 |
+
"__summary__",
|
19 |
+
"__uri__",
|
20 |
+
"__version__",
|
21 |
+
"__author__",
|
22 |
+
"__email__",
|
23 |
+
"__license__",
|
24 |
+
"__copyright__",
|
25 |
+
]
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc
ADDED
Binary file (591 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (447 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc
ADDED
Binary file (7.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc
ADDED
Binary file (4.61 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc
ADDED
Binary file (2.71 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc
ADDED
Binary file (9.29 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc
ADDED
Binary file (3.98 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc
ADDED
Binary file (21.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc
ADDED
Binary file (12.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc
ADDED
Binary file (3.58 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-310.pyc
ADDED
Binary file (12.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_manylinux.py
ADDED
@@ -0,0 +1,301 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import collections
|
2 |
+
import functools
|
3 |
+
import os
|
4 |
+
import re
|
5 |
+
import struct
|
6 |
+
import sys
|
7 |
+
import warnings
|
8 |
+
from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
|
9 |
+
|
10 |
+
|
11 |
+
# Python does not provide platform information at sufficient granularity to
|
12 |
+
# identify the architecture of the running executable in some cases, so we
|
13 |
+
# determine it dynamically by reading the information from the running
|
14 |
+
# process. This only applies on Linux, which uses the ELF format.
|
15 |
+
class _ELFFileHeader:
|
16 |
+
# https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
|
17 |
+
class _InvalidELFFileHeader(ValueError):
|
18 |
+
"""
|
19 |
+
An invalid ELF file header was found.
|
20 |
+
"""
|
21 |
+
|
22 |
+
ELF_MAGIC_NUMBER = 0x7F454C46
|
23 |
+
ELFCLASS32 = 1
|
24 |
+
ELFCLASS64 = 2
|
25 |
+
ELFDATA2LSB = 1
|
26 |
+
ELFDATA2MSB = 2
|
27 |
+
EM_386 = 3
|
28 |
+
EM_S390 = 22
|
29 |
+
EM_ARM = 40
|
30 |
+
EM_X86_64 = 62
|
31 |
+
EF_ARM_ABIMASK = 0xFF000000
|
32 |
+
EF_ARM_ABI_VER5 = 0x05000000
|
33 |
+
EF_ARM_ABI_FLOAT_HARD = 0x00000400
|
34 |
+
|
35 |
+
def __init__(self, file: IO[bytes]) -> None:
|
36 |
+
def unpack(fmt: str) -> int:
|
37 |
+
try:
|
38 |
+
data = file.read(struct.calcsize(fmt))
|
39 |
+
result: Tuple[int, ...] = struct.unpack(fmt, data)
|
40 |
+
except struct.error:
|
41 |
+
raise _ELFFileHeader._InvalidELFFileHeader()
|
42 |
+
return result[0]
|
43 |
+
|
44 |
+
self.e_ident_magic = unpack(">I")
|
45 |
+
if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
|
46 |
+
raise _ELFFileHeader._InvalidELFFileHeader()
|
47 |
+
self.e_ident_class = unpack("B")
|
48 |
+
if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
|
49 |
+
raise _ELFFileHeader._InvalidELFFileHeader()
|
50 |
+
self.e_ident_data = unpack("B")
|
51 |
+
if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
|
52 |
+
raise _ELFFileHeader._InvalidELFFileHeader()
|
53 |
+
self.e_ident_version = unpack("B")
|
54 |
+
self.e_ident_osabi = unpack("B")
|
55 |
+
self.e_ident_abiversion = unpack("B")
|
56 |
+
self.e_ident_pad = file.read(7)
|
57 |
+
format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
|
58 |
+
format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
|
59 |
+
format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
|
60 |
+
format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
|
61 |
+
self.e_type = unpack(format_h)
|
62 |
+
self.e_machine = unpack(format_h)
|
63 |
+
self.e_version = unpack(format_i)
|
64 |
+
self.e_entry = unpack(format_p)
|
65 |
+
self.e_phoff = unpack(format_p)
|
66 |
+
self.e_shoff = unpack(format_p)
|
67 |
+
self.e_flags = unpack(format_i)
|
68 |
+
self.e_ehsize = unpack(format_h)
|
69 |
+
self.e_phentsize = unpack(format_h)
|
70 |
+
self.e_phnum = unpack(format_h)
|
71 |
+
self.e_shentsize = unpack(format_h)
|
72 |
+
self.e_shnum = unpack(format_h)
|
73 |
+
self.e_shstrndx = unpack(format_h)
|
74 |
+
|
75 |
+
|
76 |
+
def _get_elf_header() -> Optional[_ELFFileHeader]:
|
77 |
+
try:
|
78 |
+
with open(sys.executable, "rb") as f:
|
79 |
+
elf_header = _ELFFileHeader(f)
|
80 |
+
except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
|
81 |
+
return None
|
82 |
+
return elf_header
|
83 |
+
|
84 |
+
|
85 |
+
def _is_linux_armhf() -> bool:
|
86 |
+
# hard-float ABI can be detected from the ELF header of the running
|
87 |
+
# process
|
88 |
+
# https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
|
89 |
+
elf_header = _get_elf_header()
|
90 |
+
if elf_header is None:
|
91 |
+
return False
|
92 |
+
result = elf_header.e_ident_class == elf_header.ELFCLASS32
|
93 |
+
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
|
94 |
+
result &= elf_header.e_machine == elf_header.EM_ARM
|
95 |
+
result &= (
|
96 |
+
elf_header.e_flags & elf_header.EF_ARM_ABIMASK
|
97 |
+
) == elf_header.EF_ARM_ABI_VER5
|
98 |
+
result &= (
|
99 |
+
elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
|
100 |
+
) == elf_header.EF_ARM_ABI_FLOAT_HARD
|
101 |
+
return result
|
102 |
+
|
103 |
+
|
104 |
+
def _is_linux_i686() -> bool:
|
105 |
+
elf_header = _get_elf_header()
|
106 |
+
if elf_header is None:
|
107 |
+
return False
|
108 |
+
result = elf_header.e_ident_class == elf_header.ELFCLASS32
|
109 |
+
result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
|
110 |
+
result &= elf_header.e_machine == elf_header.EM_386
|
111 |
+
return result
|
112 |
+
|
113 |
+
|
114 |
+
def _have_compatible_abi(arch: str) -> bool:
|
115 |
+
if arch == "armv7l":
|
116 |
+
return _is_linux_armhf()
|
117 |
+
if arch == "i686":
|
118 |
+
return _is_linux_i686()
|
119 |
+
return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
|
120 |
+
|
121 |
+
|
122 |
+
# If glibc ever changes its major version, we need to know what the last
|
123 |
+
# minor version was, so we can build the complete list of all versions.
|
124 |
+
# For now, guess what the highest minor version might be, assume it will
|
125 |
+
# be 50 for testing. Once this actually happens, update the dictionary
|
126 |
+
# with the actual value.
|
127 |
+
_LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
|
128 |
+
|
129 |
+
|
130 |
+
class _GLibCVersion(NamedTuple):
|
131 |
+
major: int
|
132 |
+
minor: int
|
133 |
+
|
134 |
+
|
135 |
+
def _glibc_version_string_confstr() -> Optional[str]:
|
136 |
+
"""
|
137 |
+
Primary implementation of glibc_version_string using os.confstr.
|
138 |
+
"""
|
139 |
+
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
|
140 |
+
# to be broken or missing. This strategy is used in the standard library
|
141 |
+
# platform module.
|
142 |
+
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
|
143 |
+
try:
|
144 |
+
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
|
145 |
+
version_string = os.confstr("CS_GNU_LIBC_VERSION")
|
146 |
+
assert version_string is not None
|
147 |
+
_, version = version_string.split()
|
148 |
+
except (AssertionError, AttributeError, OSError, ValueError):
|
149 |
+
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
|
150 |
+
return None
|
151 |
+
return version
|
152 |
+
|
153 |
+
|
154 |
+
def _glibc_version_string_ctypes() -> Optional[str]:
|
155 |
+
"""
|
156 |
+
Fallback implementation of glibc_version_string using ctypes.
|
157 |
+
"""
|
158 |
+
try:
|
159 |
+
import ctypes
|
160 |
+
except ImportError:
|
161 |
+
return None
|
162 |
+
|
163 |
+
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
|
164 |
+
# manpage says, "If filename is NULL, then the returned handle is for the
|
165 |
+
# main program". This way we can let the linker do the work to figure out
|
166 |
+
# which libc our process is actually using.
|
167 |
+
#
|
168 |
+
# We must also handle the special case where the executable is not a
|
169 |
+
# dynamically linked executable. This can occur when using musl libc,
|
170 |
+
# for example. In this situation, dlopen() will error, leading to an
|
171 |
+
# OSError. Interestingly, at least in the case of musl, there is no
|
172 |
+
# errno set on the OSError. The single string argument used to construct
|
173 |
+
# OSError comes from libc itself and is therefore not portable to
|
174 |
+
# hard code here. In any case, failure to call dlopen() means we
|
175 |
+
# can proceed, so we bail on our attempt.
|
176 |
+
try:
|
177 |
+
process_namespace = ctypes.CDLL(None)
|
178 |
+
except OSError:
|
179 |
+
return None
|
180 |
+
|
181 |
+
try:
|
182 |
+
gnu_get_libc_version = process_namespace.gnu_get_libc_version
|
183 |
+
except AttributeError:
|
184 |
+
# Symbol doesn't exist -> therefore, we are not linked to
|
185 |
+
# glibc.
|
186 |
+
return None
|
187 |
+
|
188 |
+
# Call gnu_get_libc_version, which returns a string like "2.5"
|
189 |
+
gnu_get_libc_version.restype = ctypes.c_char_p
|
190 |
+
version_str: str = gnu_get_libc_version()
|
191 |
+
# py2 / py3 compatibility:
|
192 |
+
if not isinstance(version_str, str):
|
193 |
+
version_str = version_str.decode("ascii")
|
194 |
+
|
195 |
+
return version_str
|
196 |
+
|
197 |
+
|
198 |
+
def _glibc_version_string() -> Optional[str]:
|
199 |
+
"""Returns glibc version string, or None if not using glibc."""
|
200 |
+
return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
|
201 |
+
|
202 |
+
|
203 |
+
def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
|
204 |
+
"""Parse glibc version.
|
205 |
+
|
206 |
+
We use a regexp instead of str.split because we want to discard any
|
207 |
+
random junk that might come after the minor version -- this might happen
|
208 |
+
in patched/forked versions of glibc (e.g. Linaro's version of glibc
|
209 |
+
uses version strings like "2.20-2014.11"). See gh-3588.
|
210 |
+
"""
|
211 |
+
m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
|
212 |
+
if not m:
|
213 |
+
warnings.warn(
|
214 |
+
"Expected glibc version with 2 components major.minor,"
|
215 |
+
" got: %s" % version_str,
|
216 |
+
RuntimeWarning,
|
217 |
+
)
|
218 |
+
return -1, -1
|
219 |
+
return int(m.group("major")), int(m.group("minor"))
|
220 |
+
|
221 |
+
|
222 |
+
@functools.lru_cache()
|
223 |
+
def _get_glibc_version() -> Tuple[int, int]:
|
224 |
+
version_str = _glibc_version_string()
|
225 |
+
if version_str is None:
|
226 |
+
return (-1, -1)
|
227 |
+
return _parse_glibc_version(version_str)
|
228 |
+
|
229 |
+
|
230 |
+
# From PEP 513, PEP 600
|
231 |
+
def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
|
232 |
+
sys_glibc = _get_glibc_version()
|
233 |
+
if sys_glibc < version:
|
234 |
+
return False
|
235 |
+
# Check for presence of _manylinux module.
|
236 |
+
try:
|
237 |
+
import _manylinux # noqa
|
238 |
+
except ImportError:
|
239 |
+
return True
|
240 |
+
if hasattr(_manylinux, "manylinux_compatible"):
|
241 |
+
result = _manylinux.manylinux_compatible(version[0], version[1], arch)
|
242 |
+
if result is not None:
|
243 |
+
return bool(result)
|
244 |
+
return True
|
245 |
+
if version == _GLibCVersion(2, 5):
|
246 |
+
if hasattr(_manylinux, "manylinux1_compatible"):
|
247 |
+
return bool(_manylinux.manylinux1_compatible)
|
248 |
+
if version == _GLibCVersion(2, 12):
|
249 |
+
if hasattr(_manylinux, "manylinux2010_compatible"):
|
250 |
+
return bool(_manylinux.manylinux2010_compatible)
|
251 |
+
if version == _GLibCVersion(2, 17):
|
252 |
+
if hasattr(_manylinux, "manylinux2014_compatible"):
|
253 |
+
return bool(_manylinux.manylinux2014_compatible)
|
254 |
+
return True
|
255 |
+
|
256 |
+
|
257 |
+
_LEGACY_MANYLINUX_MAP = {
|
258 |
+
# CentOS 7 w/ glibc 2.17 (PEP 599)
|
259 |
+
(2, 17): "manylinux2014",
|
260 |
+
# CentOS 6 w/ glibc 2.12 (PEP 571)
|
261 |
+
(2, 12): "manylinux2010",
|
262 |
+
# CentOS 5 w/ glibc 2.5 (PEP 513)
|
263 |
+
(2, 5): "manylinux1",
|
264 |
+
}
|
265 |
+
|
266 |
+
|
267 |
+
def platform_tags(linux: str, arch: str) -> Iterator[str]:
|
268 |
+
if not _have_compatible_abi(arch):
|
269 |
+
return
|
270 |
+
# Oldest glibc to be supported regardless of architecture is (2, 17).
|
271 |
+
too_old_glibc2 = _GLibCVersion(2, 16)
|
272 |
+
if arch in {"x86_64", "i686"}:
|
273 |
+
# On x86/i686 also oldest glibc to be supported is (2, 5).
|
274 |
+
too_old_glibc2 = _GLibCVersion(2, 4)
|
275 |
+
current_glibc = _GLibCVersion(*_get_glibc_version())
|
276 |
+
glibc_max_list = [current_glibc]
|
277 |
+
# We can assume compatibility across glibc major versions.
|
278 |
+
# https://sourceware.org/bugzilla/show_bug.cgi?id=24636
|
279 |
+
#
|
280 |
+
# Build a list of maximum glibc versions so that we can
|
281 |
+
# output the canonical list of all glibc from current_glibc
|
282 |
+
# down to too_old_glibc2, including all intermediary versions.
|
283 |
+
for glibc_major in range(current_glibc.major - 1, 1, -1):
|
284 |
+
glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
|
285 |
+
glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
|
286 |
+
for glibc_max in glibc_max_list:
|
287 |
+
if glibc_max.major == too_old_glibc2.major:
|
288 |
+
min_minor = too_old_glibc2.minor
|
289 |
+
else:
|
290 |
+
# For other glibc major versions oldest supported is (x, 0).
|
291 |
+
min_minor = -1
|
292 |
+
for glibc_minor in range(glibc_max.minor, min_minor, -1):
|
293 |
+
glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
|
294 |
+
tag = "manylinux_{}_{}".format(*glibc_version)
|
295 |
+
if _is_compatible(tag, arch, glibc_version):
|
296 |
+
yield linux.replace("linux", tag)
|
297 |
+
# Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
|
298 |
+
if glibc_version in _LEGACY_MANYLINUX_MAP:
|
299 |
+
legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
|
300 |
+
if _is_compatible(legacy_tag, arch, glibc_version):
|
301 |
+
yield linux.replace("linux", legacy_tag)
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_musllinux.py
ADDED
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""PEP 656 support.
|
2 |
+
|
3 |
+
This module implements logic to detect if the currently running Python is
|
4 |
+
linked against musl, and what musl version is used.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import contextlib
|
8 |
+
import functools
|
9 |
+
import operator
|
10 |
+
import os
|
11 |
+
import re
|
12 |
+
import struct
|
13 |
+
import subprocess
|
14 |
+
import sys
|
15 |
+
from typing import IO, Iterator, NamedTuple, Optional, Tuple
|
16 |
+
|
17 |
+
|
18 |
+
def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
|
19 |
+
return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
|
20 |
+
|
21 |
+
|
22 |
+
def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
|
23 |
+
"""Detect musl libc location by parsing the Python executable.
|
24 |
+
|
25 |
+
Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
|
26 |
+
ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
|
27 |
+
"""
|
28 |
+
f.seek(0)
|
29 |
+
try:
|
30 |
+
ident = _read_unpacked(f, "16B")
|
31 |
+
except struct.error:
|
32 |
+
return None
|
33 |
+
if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
|
34 |
+
return None
|
35 |
+
f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
|
36 |
+
|
37 |
+
try:
|
38 |
+
# e_fmt: Format for program header.
|
39 |
+
# p_fmt: Format for section header.
|
40 |
+
# p_idx: Indexes to find p_type, p_offset, and p_filesz.
|
41 |
+
e_fmt, p_fmt, p_idx = {
|
42 |
+
1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
|
43 |
+
2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
|
44 |
+
}[ident[4]]
|
45 |
+
except KeyError:
|
46 |
+
return None
|
47 |
+
else:
|
48 |
+
p_get = operator.itemgetter(*p_idx)
|
49 |
+
|
50 |
+
# Find the interpreter section and return its content.
|
51 |
+
try:
|
52 |
+
_, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
|
53 |
+
except struct.error:
|
54 |
+
return None
|
55 |
+
for i in range(e_phnum + 1):
|
56 |
+
f.seek(e_phoff + e_phentsize * i)
|
57 |
+
try:
|
58 |
+
p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
|
59 |
+
except struct.error:
|
60 |
+
return None
|
61 |
+
if p_type != 3: # Not PT_INTERP.
|
62 |
+
continue
|
63 |
+
f.seek(p_offset)
|
64 |
+
interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
|
65 |
+
if "musl" not in interpreter:
|
66 |
+
return None
|
67 |
+
return interpreter
|
68 |
+
return None
|
69 |
+
|
70 |
+
|
71 |
+
class _MuslVersion(NamedTuple):
|
72 |
+
major: int
|
73 |
+
minor: int
|
74 |
+
|
75 |
+
|
76 |
+
def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
|
77 |
+
lines = [n for n in (n.strip() for n in output.splitlines()) if n]
|
78 |
+
if len(lines) < 2 or lines[0][:4] != "musl":
|
79 |
+
return None
|
80 |
+
m = re.match(r"Version (\d+)\.(\d+)", lines[1])
|
81 |
+
if not m:
|
82 |
+
return None
|
83 |
+
return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
|
84 |
+
|
85 |
+
|
86 |
+
@functools.lru_cache()
|
87 |
+
def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
|
88 |
+
"""Detect currently-running musl runtime version.
|
89 |
+
|
90 |
+
This is done by checking the specified executable's dynamic linking
|
91 |
+
information, and invoking the loader to parse its output for a version
|
92 |
+
string. If the loader is musl, the output would be something like::
|
93 |
+
|
94 |
+
musl libc (x86_64)
|
95 |
+
Version 1.2.2
|
96 |
+
Dynamic Program Loader
|
97 |
+
"""
|
98 |
+
with contextlib.ExitStack() as stack:
|
99 |
+
try:
|
100 |
+
f = stack.enter_context(open(executable, "rb"))
|
101 |
+
except OSError:
|
102 |
+
return None
|
103 |
+
ld = _parse_ld_musl_from_elf(f)
|
104 |
+
if not ld:
|
105 |
+
return None
|
106 |
+
proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
|
107 |
+
return _parse_musl_version(proc.stderr)
|
108 |
+
|
109 |
+
|
110 |
+
def platform_tags(arch: str) -> Iterator[str]:
|
111 |
+
"""Generate musllinux tags compatible to the current platform.
|
112 |
+
|
113 |
+
:param arch: Should be the part of platform tag after the ``linux_``
|
114 |
+
prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
|
115 |
+
prerequisite for the current platform to be musllinux-compatible.
|
116 |
+
|
117 |
+
:returns: An iterator of compatible musllinux tags.
|
118 |
+
"""
|
119 |
+
sys_musl = _get_musl_version(sys.executable)
|
120 |
+
if sys_musl is None: # Python not dynamically linked against musl.
|
121 |
+
return
|
122 |
+
for minor in range(sys_musl.minor, -1, -1):
|
123 |
+
yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
|
124 |
+
|
125 |
+
|
126 |
+
if __name__ == "__main__": # pragma: no cover
|
127 |
+
import sysconfig
|
128 |
+
|
129 |
+
plat = sysconfig.get_platform()
|
130 |
+
assert plat.startswith("linux-"), "not linux"
|
131 |
+
|
132 |
+
print("plat:", plat)
|
133 |
+
print("musl:", _get_musl_version(sys.executable))
|
134 |
+
print("tags:", end=" ")
|
135 |
+
for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
|
136 |
+
print(t, end="\n ")
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/_structures.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
|
6 |
+
class InfinityType:
|
7 |
+
def __repr__(self) -> str:
|
8 |
+
return "Infinity"
|
9 |
+
|
10 |
+
def __hash__(self) -> int:
|
11 |
+
return hash(repr(self))
|
12 |
+
|
13 |
+
def __lt__(self, other: object) -> bool:
|
14 |
+
return False
|
15 |
+
|
16 |
+
def __le__(self, other: object) -> bool:
|
17 |
+
return False
|
18 |
+
|
19 |
+
def __eq__(self, other: object) -> bool:
|
20 |
+
return isinstance(other, self.__class__)
|
21 |
+
|
22 |
+
def __gt__(self, other: object) -> bool:
|
23 |
+
return True
|
24 |
+
|
25 |
+
def __ge__(self, other: object) -> bool:
|
26 |
+
return True
|
27 |
+
|
28 |
+
def __neg__(self: object) -> "NegativeInfinityType":
|
29 |
+
return NegativeInfinity
|
30 |
+
|
31 |
+
|
32 |
+
Infinity = InfinityType()
|
33 |
+
|
34 |
+
|
35 |
+
class NegativeInfinityType:
|
36 |
+
def __repr__(self) -> str:
|
37 |
+
return "-Infinity"
|
38 |
+
|
39 |
+
def __hash__(self) -> int:
|
40 |
+
return hash(repr(self))
|
41 |
+
|
42 |
+
def __lt__(self, other: object) -> bool:
|
43 |
+
return True
|
44 |
+
|
45 |
+
def __le__(self, other: object) -> bool:
|
46 |
+
return True
|
47 |
+
|
48 |
+
def __eq__(self, other: object) -> bool:
|
49 |
+
return isinstance(other, self.__class__)
|
50 |
+
|
51 |
+
def __gt__(self, other: object) -> bool:
|
52 |
+
return False
|
53 |
+
|
54 |
+
def __ge__(self, other: object) -> bool:
|
55 |
+
return False
|
56 |
+
|
57 |
+
def __neg__(self: object) -> InfinityType:
|
58 |
+
return Infinity
|
59 |
+
|
60 |
+
|
61 |
+
NegativeInfinity = NegativeInfinityType()
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py
ADDED
@@ -0,0 +1,802 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import abc
|
6 |
+
import functools
|
7 |
+
import itertools
|
8 |
+
import re
|
9 |
+
import warnings
|
10 |
+
from typing import (
|
11 |
+
Callable,
|
12 |
+
Dict,
|
13 |
+
Iterable,
|
14 |
+
Iterator,
|
15 |
+
List,
|
16 |
+
Optional,
|
17 |
+
Pattern,
|
18 |
+
Set,
|
19 |
+
Tuple,
|
20 |
+
TypeVar,
|
21 |
+
Union,
|
22 |
+
)
|
23 |
+
|
24 |
+
from .utils import canonicalize_version
|
25 |
+
from .version import LegacyVersion, Version, parse
|
26 |
+
|
27 |
+
ParsedVersion = Union[Version, LegacyVersion]
|
28 |
+
UnparsedVersion = Union[Version, LegacyVersion, str]
|
29 |
+
VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
|
30 |
+
CallableOperator = Callable[[ParsedVersion, str], bool]
|
31 |
+
|
32 |
+
|
33 |
+
class InvalidSpecifier(ValueError):
|
34 |
+
"""
|
35 |
+
An invalid specifier was found, users should refer to PEP 440.
|
36 |
+
"""
|
37 |
+
|
38 |
+
|
39 |
+
class BaseSpecifier(metaclass=abc.ABCMeta):
|
40 |
+
@abc.abstractmethod
|
41 |
+
def __str__(self) -> str:
|
42 |
+
"""
|
43 |
+
Returns the str representation of this Specifier like object. This
|
44 |
+
should be representative of the Specifier itself.
|
45 |
+
"""
|
46 |
+
|
47 |
+
@abc.abstractmethod
|
48 |
+
def __hash__(self) -> int:
|
49 |
+
"""
|
50 |
+
Returns a hash value for this Specifier like object.
|
51 |
+
"""
|
52 |
+
|
53 |
+
@abc.abstractmethod
|
54 |
+
def __eq__(self, other: object) -> bool:
|
55 |
+
"""
|
56 |
+
Returns a boolean representing whether or not the two Specifier like
|
57 |
+
objects are equal.
|
58 |
+
"""
|
59 |
+
|
60 |
+
@abc.abstractproperty
|
61 |
+
def prereleases(self) -> Optional[bool]:
|
62 |
+
"""
|
63 |
+
Returns whether or not pre-releases as a whole are allowed by this
|
64 |
+
specifier.
|
65 |
+
"""
|
66 |
+
|
67 |
+
@prereleases.setter
|
68 |
+
def prereleases(self, value: bool) -> None:
|
69 |
+
"""
|
70 |
+
Sets whether or not pre-releases as a whole are allowed by this
|
71 |
+
specifier.
|
72 |
+
"""
|
73 |
+
|
74 |
+
@abc.abstractmethod
|
75 |
+
def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
|
76 |
+
"""
|
77 |
+
Determines if the given item is contained within this specifier.
|
78 |
+
"""
|
79 |
+
|
80 |
+
@abc.abstractmethod
|
81 |
+
def filter(
|
82 |
+
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
83 |
+
) -> Iterable[VersionTypeVar]:
|
84 |
+
"""
|
85 |
+
Takes an iterable of items and filters them so that only items which
|
86 |
+
are contained within this specifier are allowed in it.
|
87 |
+
"""
|
88 |
+
|
89 |
+
|
90 |
+
class _IndividualSpecifier(BaseSpecifier):
|
91 |
+
|
92 |
+
_operators: Dict[str, str] = {}
|
93 |
+
_regex: Pattern[str]
|
94 |
+
|
95 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
96 |
+
match = self._regex.search(spec)
|
97 |
+
if not match:
|
98 |
+
raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
|
99 |
+
|
100 |
+
self._spec: Tuple[str, str] = (
|
101 |
+
match.group("operator").strip(),
|
102 |
+
match.group("version").strip(),
|
103 |
+
)
|
104 |
+
|
105 |
+
# Store whether or not this Specifier should accept prereleases
|
106 |
+
self._prereleases = prereleases
|
107 |
+
|
108 |
+
def __repr__(self) -> str:
|
109 |
+
pre = (
|
110 |
+
f", prereleases={self.prereleases!r}"
|
111 |
+
if self._prereleases is not None
|
112 |
+
else ""
|
113 |
+
)
|
114 |
+
|
115 |
+
return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
|
116 |
+
|
117 |
+
def __str__(self) -> str:
|
118 |
+
return "{}{}".format(*self._spec)
|
119 |
+
|
120 |
+
@property
|
121 |
+
def _canonical_spec(self) -> Tuple[str, str]:
|
122 |
+
return self._spec[0], canonicalize_version(self._spec[1])
|
123 |
+
|
124 |
+
def __hash__(self) -> int:
|
125 |
+
return hash(self._canonical_spec)
|
126 |
+
|
127 |
+
def __eq__(self, other: object) -> bool:
|
128 |
+
if isinstance(other, str):
|
129 |
+
try:
|
130 |
+
other = self.__class__(str(other))
|
131 |
+
except InvalidSpecifier:
|
132 |
+
return NotImplemented
|
133 |
+
elif not isinstance(other, self.__class__):
|
134 |
+
return NotImplemented
|
135 |
+
|
136 |
+
return self._canonical_spec == other._canonical_spec
|
137 |
+
|
138 |
+
def _get_operator(self, op: str) -> CallableOperator:
|
139 |
+
operator_callable: CallableOperator = getattr(
|
140 |
+
self, f"_compare_{self._operators[op]}"
|
141 |
+
)
|
142 |
+
return operator_callable
|
143 |
+
|
144 |
+
def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
|
145 |
+
if not isinstance(version, (LegacyVersion, Version)):
|
146 |
+
version = parse(version)
|
147 |
+
return version
|
148 |
+
|
149 |
+
@property
|
150 |
+
def operator(self) -> str:
|
151 |
+
return self._spec[0]
|
152 |
+
|
153 |
+
@property
|
154 |
+
def version(self) -> str:
|
155 |
+
return self._spec[1]
|
156 |
+
|
157 |
+
@property
|
158 |
+
def prereleases(self) -> Optional[bool]:
|
159 |
+
return self._prereleases
|
160 |
+
|
161 |
+
@prereleases.setter
|
162 |
+
def prereleases(self, value: bool) -> None:
|
163 |
+
self._prereleases = value
|
164 |
+
|
165 |
+
def __contains__(self, item: str) -> bool:
|
166 |
+
return self.contains(item)
|
167 |
+
|
168 |
+
def contains(
|
169 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
170 |
+
) -> bool:
|
171 |
+
|
172 |
+
# Determine if prereleases are to be allowed or not.
|
173 |
+
if prereleases is None:
|
174 |
+
prereleases = self.prereleases
|
175 |
+
|
176 |
+
# Normalize item to a Version or LegacyVersion, this allows us to have
|
177 |
+
# a shortcut for ``"2.0" in Specifier(">=2")
|
178 |
+
normalized_item = self._coerce_version(item)
|
179 |
+
|
180 |
+
# Determine if we should be supporting prereleases in this specifier
|
181 |
+
# or not, if we do not support prereleases than we can short circuit
|
182 |
+
# logic if this version is a prereleases.
|
183 |
+
if normalized_item.is_prerelease and not prereleases:
|
184 |
+
return False
|
185 |
+
|
186 |
+
# Actually do the comparison to determine if this item is contained
|
187 |
+
# within this Specifier or not.
|
188 |
+
operator_callable: CallableOperator = self._get_operator(self.operator)
|
189 |
+
return operator_callable(normalized_item, self.version)
|
190 |
+
|
191 |
+
def filter(
|
192 |
+
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
193 |
+
) -> Iterable[VersionTypeVar]:
|
194 |
+
|
195 |
+
yielded = False
|
196 |
+
found_prereleases = []
|
197 |
+
|
198 |
+
kw = {"prereleases": prereleases if prereleases is not None else True}
|
199 |
+
|
200 |
+
# Attempt to iterate over all the values in the iterable and if any of
|
201 |
+
# them match, yield them.
|
202 |
+
for version in iterable:
|
203 |
+
parsed_version = self._coerce_version(version)
|
204 |
+
|
205 |
+
if self.contains(parsed_version, **kw):
|
206 |
+
# If our version is a prerelease, and we were not set to allow
|
207 |
+
# prereleases, then we'll store it for later in case nothing
|
208 |
+
# else matches this specifier.
|
209 |
+
if parsed_version.is_prerelease and not (
|
210 |
+
prereleases or self.prereleases
|
211 |
+
):
|
212 |
+
found_prereleases.append(version)
|
213 |
+
# Either this is not a prerelease, or we should have been
|
214 |
+
# accepting prereleases from the beginning.
|
215 |
+
else:
|
216 |
+
yielded = True
|
217 |
+
yield version
|
218 |
+
|
219 |
+
# Now that we've iterated over everything, determine if we've yielded
|
220 |
+
# any values, and if we have not and we have any prereleases stored up
|
221 |
+
# then we will go ahead and yield the prereleases.
|
222 |
+
if not yielded and found_prereleases:
|
223 |
+
for version in found_prereleases:
|
224 |
+
yield version
|
225 |
+
|
226 |
+
|
227 |
+
class LegacySpecifier(_IndividualSpecifier):
|
228 |
+
|
229 |
+
_regex_str = r"""
|
230 |
+
(?P<operator>(==|!=|<=|>=|<|>))
|
231 |
+
\s*
|
232 |
+
(?P<version>
|
233 |
+
[^,;\s)]* # Since this is a "legacy" specifier, and the version
|
234 |
+
# string can be just about anything, we match everything
|
235 |
+
# except for whitespace, a semi-colon for marker support,
|
236 |
+
# a closing paren since versions can be enclosed in
|
237 |
+
# them, and a comma since it's a version separator.
|
238 |
+
)
|
239 |
+
"""
|
240 |
+
|
241 |
+
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
242 |
+
|
243 |
+
_operators = {
|
244 |
+
"==": "equal",
|
245 |
+
"!=": "not_equal",
|
246 |
+
"<=": "less_than_equal",
|
247 |
+
">=": "greater_than_equal",
|
248 |
+
"<": "less_than",
|
249 |
+
">": "greater_than",
|
250 |
+
}
|
251 |
+
|
252 |
+
def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
|
253 |
+
super().__init__(spec, prereleases)
|
254 |
+
|
255 |
+
warnings.warn(
|
256 |
+
"Creating a LegacyVersion has been deprecated and will be "
|
257 |
+
"removed in the next major release",
|
258 |
+
DeprecationWarning,
|
259 |
+
)
|
260 |
+
|
261 |
+
def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
|
262 |
+
if not isinstance(version, LegacyVersion):
|
263 |
+
version = LegacyVersion(str(version))
|
264 |
+
return version
|
265 |
+
|
266 |
+
def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
267 |
+
return prospective == self._coerce_version(spec)
|
268 |
+
|
269 |
+
def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
270 |
+
return prospective != self._coerce_version(spec)
|
271 |
+
|
272 |
+
def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
|
273 |
+
return prospective <= self._coerce_version(spec)
|
274 |
+
|
275 |
+
def _compare_greater_than_equal(
|
276 |
+
self, prospective: LegacyVersion, spec: str
|
277 |
+
) -> bool:
|
278 |
+
return prospective >= self._coerce_version(spec)
|
279 |
+
|
280 |
+
def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
|
281 |
+
return prospective < self._coerce_version(spec)
|
282 |
+
|
283 |
+
def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
|
284 |
+
return prospective > self._coerce_version(spec)
|
285 |
+
|
286 |
+
|
287 |
+
def _require_version_compare(
|
288 |
+
fn: Callable[["Specifier", ParsedVersion, str], bool]
|
289 |
+
) -> Callable[["Specifier", ParsedVersion, str], bool]:
|
290 |
+
@functools.wraps(fn)
|
291 |
+
def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
|
292 |
+
if not isinstance(prospective, Version):
|
293 |
+
return False
|
294 |
+
return fn(self, prospective, spec)
|
295 |
+
|
296 |
+
return wrapped
|
297 |
+
|
298 |
+
|
299 |
+
class Specifier(_IndividualSpecifier):
|
300 |
+
|
301 |
+
_regex_str = r"""
|
302 |
+
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
|
303 |
+
(?P<version>
|
304 |
+
(?:
|
305 |
+
# The identity operators allow for an escape hatch that will
|
306 |
+
# do an exact string match of the version you wish to install.
|
307 |
+
# This will not be parsed by PEP 440 and we cannot determine
|
308 |
+
# any semantic meaning from it. This operator is discouraged
|
309 |
+
# but included entirely as an escape hatch.
|
310 |
+
(?<====) # Only match for the identity operator
|
311 |
+
\s*
|
312 |
+
[^\s]* # We just match everything, except for whitespace
|
313 |
+
# since we are only testing for strict identity.
|
314 |
+
)
|
315 |
+
|
|
316 |
+
(?:
|
317 |
+
# The (non)equality operators allow for wild card and local
|
318 |
+
# versions to be specified so we have to define these two
|
319 |
+
# operators separately to enable that.
|
320 |
+
(?<===|!=) # Only match for equals and not equals
|
321 |
+
|
322 |
+
\s*
|
323 |
+
v?
|
324 |
+
(?:[0-9]+!)? # epoch
|
325 |
+
[0-9]+(?:\.[0-9]+)* # release
|
326 |
+
(?: # pre release
|
327 |
+
[-_\.]?
|
328 |
+
(a|b|c|rc|alpha|beta|pre|preview)
|
329 |
+
[-_\.]?
|
330 |
+
[0-9]*
|
331 |
+
)?
|
332 |
+
(?: # post release
|
333 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
334 |
+
)?
|
335 |
+
|
336 |
+
# You cannot use a wild card and a dev or local version
|
337 |
+
# together so group them with a | and make them optional.
|
338 |
+
(?:
|
339 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
340 |
+
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
341 |
+
|
|
342 |
+
\.\* # Wild card syntax of .*
|
343 |
+
)?
|
344 |
+
)
|
345 |
+
|
|
346 |
+
(?:
|
347 |
+
# The compatible operator requires at least two digits in the
|
348 |
+
# release segment.
|
349 |
+
(?<=~=) # Only match for the compatible operator
|
350 |
+
|
351 |
+
\s*
|
352 |
+
v?
|
353 |
+
(?:[0-9]+!)? # epoch
|
354 |
+
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
|
355 |
+
(?: # pre release
|
356 |
+
[-_\.]?
|
357 |
+
(a|b|c|rc|alpha|beta|pre|preview)
|
358 |
+
[-_\.]?
|
359 |
+
[0-9]*
|
360 |
+
)?
|
361 |
+
(?: # post release
|
362 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
363 |
+
)?
|
364 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
365 |
+
)
|
366 |
+
|
|
367 |
+
(?:
|
368 |
+
# All other operators only allow a sub set of what the
|
369 |
+
# (non)equality operators do. Specifically they do not allow
|
370 |
+
# local versions to be specified nor do they allow the prefix
|
371 |
+
# matching wild cards.
|
372 |
+
(?<!==|!=|~=) # We have special cases for these
|
373 |
+
# operators so we want to make sure they
|
374 |
+
# don't match here.
|
375 |
+
|
376 |
+
\s*
|
377 |
+
v?
|
378 |
+
(?:[0-9]+!)? # epoch
|
379 |
+
[0-9]+(?:\.[0-9]+)* # release
|
380 |
+
(?: # pre release
|
381 |
+
[-_\.]?
|
382 |
+
(a|b|c|rc|alpha|beta|pre|preview)
|
383 |
+
[-_\.]?
|
384 |
+
[0-9]*
|
385 |
+
)?
|
386 |
+
(?: # post release
|
387 |
+
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
|
388 |
+
)?
|
389 |
+
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
|
390 |
+
)
|
391 |
+
)
|
392 |
+
"""
|
393 |
+
|
394 |
+
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
395 |
+
|
396 |
+
_operators = {
|
397 |
+
"~=": "compatible",
|
398 |
+
"==": "equal",
|
399 |
+
"!=": "not_equal",
|
400 |
+
"<=": "less_than_equal",
|
401 |
+
">=": "greater_than_equal",
|
402 |
+
"<": "less_than",
|
403 |
+
">": "greater_than",
|
404 |
+
"===": "arbitrary",
|
405 |
+
}
|
406 |
+
|
407 |
+
@_require_version_compare
|
408 |
+
def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
|
409 |
+
|
410 |
+
# Compatible releases have an equivalent combination of >= and ==. That
|
411 |
+
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
|
412 |
+
# implement this in terms of the other specifiers instead of
|
413 |
+
# implementing it ourselves. The only thing we need to do is construct
|
414 |
+
# the other specifiers.
|
415 |
+
|
416 |
+
# We want everything but the last item in the version, but we want to
|
417 |
+
# ignore suffix segments.
|
418 |
+
prefix = ".".join(
|
419 |
+
list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
|
420 |
+
)
|
421 |
+
|
422 |
+
# Add the prefix notation to the end of our string
|
423 |
+
prefix += ".*"
|
424 |
+
|
425 |
+
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
|
426 |
+
prospective, prefix
|
427 |
+
)
|
428 |
+
|
429 |
+
@_require_version_compare
|
430 |
+
def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
431 |
+
|
432 |
+
# We need special logic to handle prefix matching
|
433 |
+
if spec.endswith(".*"):
|
434 |
+
# In the case of prefix matching we want to ignore local segment.
|
435 |
+
prospective = Version(prospective.public)
|
436 |
+
# Split the spec out by dots, and pretend that there is an implicit
|
437 |
+
# dot in between a release segment and a pre-release segment.
|
438 |
+
split_spec = _version_split(spec[:-2]) # Remove the trailing .*
|
439 |
+
|
440 |
+
# Split the prospective version out by dots, and pretend that there
|
441 |
+
# is an implicit dot in between a release segment and a pre-release
|
442 |
+
# segment.
|
443 |
+
split_prospective = _version_split(str(prospective))
|
444 |
+
|
445 |
+
# Shorten the prospective version to be the same length as the spec
|
446 |
+
# so that we can determine if the specifier is a prefix of the
|
447 |
+
# prospective version or not.
|
448 |
+
shortened_prospective = split_prospective[: len(split_spec)]
|
449 |
+
|
450 |
+
# Pad out our two sides with zeros so that they both equal the same
|
451 |
+
# length.
|
452 |
+
padded_spec, padded_prospective = _pad_version(
|
453 |
+
split_spec, shortened_prospective
|
454 |
+
)
|
455 |
+
|
456 |
+
return padded_prospective == padded_spec
|
457 |
+
else:
|
458 |
+
# Convert our spec string into a Version
|
459 |
+
spec_version = Version(spec)
|
460 |
+
|
461 |
+
# If the specifier does not have a local segment, then we want to
|
462 |
+
# act as if the prospective version also does not have a local
|
463 |
+
# segment.
|
464 |
+
if not spec_version.local:
|
465 |
+
prospective = Version(prospective.public)
|
466 |
+
|
467 |
+
return prospective == spec_version
|
468 |
+
|
469 |
+
@_require_version_compare
|
470 |
+
def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
471 |
+
return not self._compare_equal(prospective, spec)
|
472 |
+
|
473 |
+
@_require_version_compare
|
474 |
+
def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
|
475 |
+
|
476 |
+
# NB: Local version identifiers are NOT permitted in the version
|
477 |
+
# specifier, so local version labels can be universally removed from
|
478 |
+
# the prospective version.
|
479 |
+
return Version(prospective.public) <= Version(spec)
|
480 |
+
|
481 |
+
@_require_version_compare
|
482 |
+
def _compare_greater_than_equal(
|
483 |
+
self, prospective: ParsedVersion, spec: str
|
484 |
+
) -> bool:
|
485 |
+
|
486 |
+
# NB: Local version identifiers are NOT permitted in the version
|
487 |
+
# specifier, so local version labels can be universally removed from
|
488 |
+
# the prospective version.
|
489 |
+
return Version(prospective.public) >= Version(spec)
|
490 |
+
|
491 |
+
@_require_version_compare
|
492 |
+
def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
|
493 |
+
|
494 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
495 |
+
# it as a version.
|
496 |
+
spec = Version(spec_str)
|
497 |
+
|
498 |
+
# Check to see if the prospective version is less than the spec
|
499 |
+
# version. If it's not we can short circuit and just return False now
|
500 |
+
# instead of doing extra unneeded work.
|
501 |
+
if not prospective < spec:
|
502 |
+
return False
|
503 |
+
|
504 |
+
# This special case is here so that, unless the specifier itself
|
505 |
+
# includes is a pre-release version, that we do not accept pre-release
|
506 |
+
# versions for the version mentioned in the specifier (e.g. <3.1 should
|
507 |
+
# not match 3.1.dev0, but should match 3.0.dev0).
|
508 |
+
if not spec.is_prerelease and prospective.is_prerelease:
|
509 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
510 |
+
return False
|
511 |
+
|
512 |
+
# If we've gotten to here, it means that prospective version is both
|
513 |
+
# less than the spec version *and* it's not a pre-release of the same
|
514 |
+
# version in the spec.
|
515 |
+
return True
|
516 |
+
|
517 |
+
@_require_version_compare
|
518 |
+
def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
|
519 |
+
|
520 |
+
# Convert our spec to a Version instance, since we'll want to work with
|
521 |
+
# it as a version.
|
522 |
+
spec = Version(spec_str)
|
523 |
+
|
524 |
+
# Check to see if the prospective version is greater than the spec
|
525 |
+
# version. If it's not we can short circuit and just return False now
|
526 |
+
# instead of doing extra unneeded work.
|
527 |
+
if not prospective > spec:
|
528 |
+
return False
|
529 |
+
|
530 |
+
# This special case is here so that, unless the specifier itself
|
531 |
+
# includes is a post-release version, that we do not accept
|
532 |
+
# post-release versions for the version mentioned in the specifier
|
533 |
+
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
|
534 |
+
if not spec.is_postrelease and prospective.is_postrelease:
|
535 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
536 |
+
return False
|
537 |
+
|
538 |
+
# Ensure that we do not allow a local version of the version mentioned
|
539 |
+
# in the specifier, which is technically greater than, to match.
|
540 |
+
if prospective.local is not None:
|
541 |
+
if Version(prospective.base_version) == Version(spec.base_version):
|
542 |
+
return False
|
543 |
+
|
544 |
+
# If we've gotten to here, it means that prospective version is both
|
545 |
+
# greater than the spec version *and* it's not a pre-release of the
|
546 |
+
# same version in the spec.
|
547 |
+
return True
|
548 |
+
|
549 |
+
def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
|
550 |
+
return str(prospective).lower() == str(spec).lower()
|
551 |
+
|
552 |
+
@property
|
553 |
+
def prereleases(self) -> bool:
|
554 |
+
|
555 |
+
# If there is an explicit prereleases set for this, then we'll just
|
556 |
+
# blindly use that.
|
557 |
+
if self._prereleases is not None:
|
558 |
+
return self._prereleases
|
559 |
+
|
560 |
+
# Look at all of our specifiers and determine if they are inclusive
|
561 |
+
# operators, and if they are if they are including an explicit
|
562 |
+
# prerelease.
|
563 |
+
operator, version = self._spec
|
564 |
+
if operator in ["==", ">=", "<=", "~=", "==="]:
|
565 |
+
# The == specifier can include a trailing .*, if it does we
|
566 |
+
# want to remove before parsing.
|
567 |
+
if operator == "==" and version.endswith(".*"):
|
568 |
+
version = version[:-2]
|
569 |
+
|
570 |
+
# Parse the version, and if it is a pre-release than this
|
571 |
+
# specifier allows pre-releases.
|
572 |
+
if parse(version).is_prerelease:
|
573 |
+
return True
|
574 |
+
|
575 |
+
return False
|
576 |
+
|
577 |
+
@prereleases.setter
|
578 |
+
def prereleases(self, value: bool) -> None:
|
579 |
+
self._prereleases = value
|
580 |
+
|
581 |
+
|
582 |
+
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
|
583 |
+
|
584 |
+
|
585 |
+
def _version_split(version: str) -> List[str]:
|
586 |
+
result: List[str] = []
|
587 |
+
for item in version.split("."):
|
588 |
+
match = _prefix_regex.search(item)
|
589 |
+
if match:
|
590 |
+
result.extend(match.groups())
|
591 |
+
else:
|
592 |
+
result.append(item)
|
593 |
+
return result
|
594 |
+
|
595 |
+
|
596 |
+
def _is_not_suffix(segment: str) -> bool:
|
597 |
+
return not any(
|
598 |
+
segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
|
599 |
+
)
|
600 |
+
|
601 |
+
|
602 |
+
def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
|
603 |
+
left_split, right_split = [], []
|
604 |
+
|
605 |
+
# Get the release segment of our versions
|
606 |
+
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
|
607 |
+
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
|
608 |
+
|
609 |
+
# Get the rest of our versions
|
610 |
+
left_split.append(left[len(left_split[0]) :])
|
611 |
+
right_split.append(right[len(right_split[0]) :])
|
612 |
+
|
613 |
+
# Insert our padding
|
614 |
+
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
|
615 |
+
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
|
616 |
+
|
617 |
+
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
|
618 |
+
|
619 |
+
|
620 |
+
class SpecifierSet(BaseSpecifier):
|
621 |
+
def __init__(
|
622 |
+
self, specifiers: str = "", prereleases: Optional[bool] = None
|
623 |
+
) -> None:
|
624 |
+
|
625 |
+
# Split on , to break each individual specifier into it's own item, and
|
626 |
+
# strip each item to remove leading/trailing whitespace.
|
627 |
+
split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
|
628 |
+
|
629 |
+
# Parsed each individual specifier, attempting first to make it a
|
630 |
+
# Specifier and falling back to a LegacySpecifier.
|
631 |
+
parsed: Set[_IndividualSpecifier] = set()
|
632 |
+
for specifier in split_specifiers:
|
633 |
+
try:
|
634 |
+
parsed.add(Specifier(specifier))
|
635 |
+
except InvalidSpecifier:
|
636 |
+
parsed.add(LegacySpecifier(specifier))
|
637 |
+
|
638 |
+
# Turn our parsed specifiers into a frozen set and save them for later.
|
639 |
+
self._specs = frozenset(parsed)
|
640 |
+
|
641 |
+
# Store our prereleases value so we can use it later to determine if
|
642 |
+
# we accept prereleases or not.
|
643 |
+
self._prereleases = prereleases
|
644 |
+
|
645 |
+
def __repr__(self) -> str:
|
646 |
+
pre = (
|
647 |
+
f", prereleases={self.prereleases!r}"
|
648 |
+
if self._prereleases is not None
|
649 |
+
else ""
|
650 |
+
)
|
651 |
+
|
652 |
+
return f"<SpecifierSet({str(self)!r}{pre})>"
|
653 |
+
|
654 |
+
def __str__(self) -> str:
|
655 |
+
return ",".join(sorted(str(s) for s in self._specs))
|
656 |
+
|
657 |
+
def __hash__(self) -> int:
|
658 |
+
return hash(self._specs)
|
659 |
+
|
660 |
+
def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
|
661 |
+
if isinstance(other, str):
|
662 |
+
other = SpecifierSet(other)
|
663 |
+
elif not isinstance(other, SpecifierSet):
|
664 |
+
return NotImplemented
|
665 |
+
|
666 |
+
specifier = SpecifierSet()
|
667 |
+
specifier._specs = frozenset(self._specs | other._specs)
|
668 |
+
|
669 |
+
if self._prereleases is None and other._prereleases is not None:
|
670 |
+
specifier._prereleases = other._prereleases
|
671 |
+
elif self._prereleases is not None and other._prereleases is None:
|
672 |
+
specifier._prereleases = self._prereleases
|
673 |
+
elif self._prereleases == other._prereleases:
|
674 |
+
specifier._prereleases = self._prereleases
|
675 |
+
else:
|
676 |
+
raise ValueError(
|
677 |
+
"Cannot combine SpecifierSets with True and False prerelease "
|
678 |
+
"overrides."
|
679 |
+
)
|
680 |
+
|
681 |
+
return specifier
|
682 |
+
|
683 |
+
def __eq__(self, other: object) -> bool:
|
684 |
+
if isinstance(other, (str, _IndividualSpecifier)):
|
685 |
+
other = SpecifierSet(str(other))
|
686 |
+
elif not isinstance(other, SpecifierSet):
|
687 |
+
return NotImplemented
|
688 |
+
|
689 |
+
return self._specs == other._specs
|
690 |
+
|
691 |
+
def __len__(self) -> int:
|
692 |
+
return len(self._specs)
|
693 |
+
|
694 |
+
def __iter__(self) -> Iterator[_IndividualSpecifier]:
|
695 |
+
return iter(self._specs)
|
696 |
+
|
697 |
+
@property
|
698 |
+
def prereleases(self) -> Optional[bool]:
|
699 |
+
|
700 |
+
# If we have been given an explicit prerelease modifier, then we'll
|
701 |
+
# pass that through here.
|
702 |
+
if self._prereleases is not None:
|
703 |
+
return self._prereleases
|
704 |
+
|
705 |
+
# If we don't have any specifiers, and we don't have a forced value,
|
706 |
+
# then we'll just return None since we don't know if this should have
|
707 |
+
# pre-releases or not.
|
708 |
+
if not self._specs:
|
709 |
+
return None
|
710 |
+
|
711 |
+
# Otherwise we'll see if any of the given specifiers accept
|
712 |
+
# prereleases, if any of them do we'll return True, otherwise False.
|
713 |
+
return any(s.prereleases for s in self._specs)
|
714 |
+
|
715 |
+
@prereleases.setter
|
716 |
+
def prereleases(self, value: bool) -> None:
|
717 |
+
self._prereleases = value
|
718 |
+
|
719 |
+
def __contains__(self, item: UnparsedVersion) -> bool:
|
720 |
+
return self.contains(item)
|
721 |
+
|
722 |
+
def contains(
|
723 |
+
self, item: UnparsedVersion, prereleases: Optional[bool] = None
|
724 |
+
) -> bool:
|
725 |
+
|
726 |
+
# Ensure that our item is a Version or LegacyVersion instance.
|
727 |
+
if not isinstance(item, (LegacyVersion, Version)):
|
728 |
+
item = parse(item)
|
729 |
+
|
730 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
731 |
+
# one for this particular filter call, then we'll use whatever the
|
732 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
733 |
+
if prereleases is None:
|
734 |
+
prereleases = self.prereleases
|
735 |
+
|
736 |
+
# We can determine if we're going to allow pre-releases by looking to
|
737 |
+
# see if any of the underlying items supports them. If none of them do
|
738 |
+
# and this item is a pre-release then we do not allow it and we can
|
739 |
+
# short circuit that here.
|
740 |
+
# Note: This means that 1.0.dev1 would not be contained in something
|
741 |
+
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
|
742 |
+
if not prereleases and item.is_prerelease:
|
743 |
+
return False
|
744 |
+
|
745 |
+
# We simply dispatch to the underlying specs here to make sure that the
|
746 |
+
# given version is contained within all of them.
|
747 |
+
# Note: This use of all() here means that an empty set of specifiers
|
748 |
+
# will always return True, this is an explicit design decision.
|
749 |
+
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
|
750 |
+
|
751 |
+
def filter(
|
752 |
+
self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
|
753 |
+
) -> Iterable[VersionTypeVar]:
|
754 |
+
|
755 |
+
# Determine if we're forcing a prerelease or not, if we're not forcing
|
756 |
+
# one for this particular filter call, then we'll use whatever the
|
757 |
+
# SpecifierSet thinks for whether or not we should support prereleases.
|
758 |
+
if prereleases is None:
|
759 |
+
prereleases = self.prereleases
|
760 |
+
|
761 |
+
# If we have any specifiers, then we want to wrap our iterable in the
|
762 |
+
# filter method for each one, this will act as a logical AND amongst
|
763 |
+
# each specifier.
|
764 |
+
if self._specs:
|
765 |
+
for spec in self._specs:
|
766 |
+
iterable = spec.filter(iterable, prereleases=bool(prereleases))
|
767 |
+
return iterable
|
768 |
+
# If we do not have any specifiers, then we need to have a rough filter
|
769 |
+
# which will filter out any pre-releases, unless there are no final
|
770 |
+
# releases, and which will filter out LegacyVersion in general.
|
771 |
+
else:
|
772 |
+
filtered: List[VersionTypeVar] = []
|
773 |
+
found_prereleases: List[VersionTypeVar] = []
|
774 |
+
|
775 |
+
item: UnparsedVersion
|
776 |
+
parsed_version: Union[Version, LegacyVersion]
|
777 |
+
|
778 |
+
for item in iterable:
|
779 |
+
# Ensure that we some kind of Version class for this item.
|
780 |
+
if not isinstance(item, (LegacyVersion, Version)):
|
781 |
+
parsed_version = parse(item)
|
782 |
+
else:
|
783 |
+
parsed_version = item
|
784 |
+
|
785 |
+
# Filter out any item which is parsed as a LegacyVersion
|
786 |
+
if isinstance(parsed_version, LegacyVersion):
|
787 |
+
continue
|
788 |
+
|
789 |
+
# Store any item which is a pre-release for later unless we've
|
790 |
+
# already found a final version or we are accepting prereleases
|
791 |
+
if parsed_version.is_prerelease and not prereleases:
|
792 |
+
if not filtered:
|
793 |
+
found_prereleases.append(item)
|
794 |
+
else:
|
795 |
+
filtered.append(item)
|
796 |
+
|
797 |
+
# If we've found no items except for pre-releases, then we'll go
|
798 |
+
# ahead and use the pre-releases
|
799 |
+
if not filtered and found_prereleases and prereleases is None:
|
800 |
+
return found_prereleases
|
801 |
+
|
802 |
+
return filtered
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py
ADDED
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import re
|
6 |
+
from typing import FrozenSet, NewType, Tuple, Union, cast
|
7 |
+
|
8 |
+
from .tags import Tag, parse_tag
|
9 |
+
from .version import InvalidVersion, Version
|
10 |
+
|
11 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
12 |
+
NormalizedName = NewType("NormalizedName", str)
|
13 |
+
|
14 |
+
|
15 |
+
class InvalidWheelFilename(ValueError):
|
16 |
+
"""
|
17 |
+
An invalid wheel filename was found, users should refer to PEP 427.
|
18 |
+
"""
|
19 |
+
|
20 |
+
|
21 |
+
class InvalidSdistFilename(ValueError):
|
22 |
+
"""
|
23 |
+
An invalid sdist filename was found, users should refer to the packaging user guide.
|
24 |
+
"""
|
25 |
+
|
26 |
+
|
27 |
+
_canonicalize_regex = re.compile(r"[-_.]+")
|
28 |
+
# PEP 427: The build number must start with a digit.
|
29 |
+
_build_tag_regex = re.compile(r"(\d+)(.*)")
|
30 |
+
|
31 |
+
|
32 |
+
def canonicalize_name(name: str) -> NormalizedName:
|
33 |
+
# This is taken from PEP 503.
|
34 |
+
value = _canonicalize_regex.sub("-", name).lower()
|
35 |
+
return cast(NormalizedName, value)
|
36 |
+
|
37 |
+
|
38 |
+
def canonicalize_version(version: Union[Version, str]) -> str:
|
39 |
+
"""
|
40 |
+
This is very similar to Version.__str__, but has one subtle difference
|
41 |
+
with the way it handles the release segment.
|
42 |
+
"""
|
43 |
+
if isinstance(version, str):
|
44 |
+
try:
|
45 |
+
parsed = Version(version)
|
46 |
+
except InvalidVersion:
|
47 |
+
# Legacy versions cannot be normalized
|
48 |
+
return version
|
49 |
+
else:
|
50 |
+
parsed = version
|
51 |
+
|
52 |
+
parts = []
|
53 |
+
|
54 |
+
# Epoch
|
55 |
+
if parsed.epoch != 0:
|
56 |
+
parts.append(f"{parsed.epoch}!")
|
57 |
+
|
58 |
+
# Release segment
|
59 |
+
# NB: This strips trailing '.0's to normalize
|
60 |
+
parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
|
61 |
+
|
62 |
+
# Pre-release
|
63 |
+
if parsed.pre is not None:
|
64 |
+
parts.append("".join(str(x) for x in parsed.pre))
|
65 |
+
|
66 |
+
# Post-release
|
67 |
+
if parsed.post is not None:
|
68 |
+
parts.append(f".post{parsed.post}")
|
69 |
+
|
70 |
+
# Development release
|
71 |
+
if parsed.dev is not None:
|
72 |
+
parts.append(f".dev{parsed.dev}")
|
73 |
+
|
74 |
+
# Local version segment
|
75 |
+
if parsed.local is not None:
|
76 |
+
parts.append(f"+{parsed.local}")
|
77 |
+
|
78 |
+
return "".join(parts)
|
79 |
+
|
80 |
+
|
81 |
+
def parse_wheel_filename(
|
82 |
+
filename: str,
|
83 |
+
) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
|
84 |
+
if not filename.endswith(".whl"):
|
85 |
+
raise InvalidWheelFilename(
|
86 |
+
f"Invalid wheel filename (extension must be '.whl'): {filename}"
|
87 |
+
)
|
88 |
+
|
89 |
+
filename = filename[:-4]
|
90 |
+
dashes = filename.count("-")
|
91 |
+
if dashes not in (4, 5):
|
92 |
+
raise InvalidWheelFilename(
|
93 |
+
f"Invalid wheel filename (wrong number of parts): {filename}"
|
94 |
+
)
|
95 |
+
|
96 |
+
parts = filename.split("-", dashes - 2)
|
97 |
+
name_part = parts[0]
|
98 |
+
# See PEP 427 for the rules on escaping the project name
|
99 |
+
if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
|
100 |
+
raise InvalidWheelFilename(f"Invalid project name: {filename}")
|
101 |
+
name = canonicalize_name(name_part)
|
102 |
+
version = Version(parts[1])
|
103 |
+
if dashes == 5:
|
104 |
+
build_part = parts[2]
|
105 |
+
build_match = _build_tag_regex.match(build_part)
|
106 |
+
if build_match is None:
|
107 |
+
raise InvalidWheelFilename(
|
108 |
+
f"Invalid build number: {build_part} in '{filename}'"
|
109 |
+
)
|
110 |
+
build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
|
111 |
+
else:
|
112 |
+
build = ()
|
113 |
+
tags = parse_tag(parts[-1])
|
114 |
+
return (name, version, build, tags)
|
115 |
+
|
116 |
+
|
117 |
+
def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
|
118 |
+
if filename.endswith(".tar.gz"):
|
119 |
+
file_stem = filename[: -len(".tar.gz")]
|
120 |
+
elif filename.endswith(".zip"):
|
121 |
+
file_stem = filename[: -len(".zip")]
|
122 |
+
else:
|
123 |
+
raise InvalidSdistFilename(
|
124 |
+
f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
|
125 |
+
f" {filename}"
|
126 |
+
)
|
127 |
+
|
128 |
+
# We are requiring a PEP 440 version, which cannot contain dashes,
|
129 |
+
# so we split on the last dash.
|
130 |
+
name_part, sep, version_part = file_stem.rpartition("-")
|
131 |
+
if not sep:
|
132 |
+
raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
|
133 |
+
|
134 |
+
name = canonicalize_name(name_part)
|
135 |
+
version = Version(version_part)
|
136 |
+
return (name, version)
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/packaging/version.py
ADDED
@@ -0,0 +1,504 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# This file is dual licensed under the terms of the Apache License, Version
|
2 |
+
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
|
3 |
+
# for complete details.
|
4 |
+
|
5 |
+
import collections
|
6 |
+
import itertools
|
7 |
+
import re
|
8 |
+
import warnings
|
9 |
+
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
|
10 |
+
|
11 |
+
from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
|
12 |
+
|
13 |
+
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
14 |
+
|
15 |
+
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
16 |
+
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
17 |
+
SubLocalType = Union[InfiniteTypes, int, str]
|
18 |
+
LocalType = Union[
|
19 |
+
NegativeInfinityType,
|
20 |
+
Tuple[
|
21 |
+
Union[
|
22 |
+
SubLocalType,
|
23 |
+
Tuple[SubLocalType, str],
|
24 |
+
Tuple[NegativeInfinityType, SubLocalType],
|
25 |
+
],
|
26 |
+
...,
|
27 |
+
],
|
28 |
+
]
|
29 |
+
CmpKey = Tuple[
|
30 |
+
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
31 |
+
]
|
32 |
+
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
|
33 |
+
VersionComparisonMethod = Callable[
|
34 |
+
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
|
35 |
+
]
|
36 |
+
|
37 |
+
_Version = collections.namedtuple(
|
38 |
+
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
39 |
+
)
|
40 |
+
|
41 |
+
|
42 |
+
def parse(version: str) -> Union["LegacyVersion", "Version"]:
|
43 |
+
"""
|
44 |
+
Parse the given version string and return either a :class:`Version` object
|
45 |
+
or a :class:`LegacyVersion` object depending on if the given version is
|
46 |
+
a valid PEP 440 version or a legacy version.
|
47 |
+
"""
|
48 |
+
try:
|
49 |
+
return Version(version)
|
50 |
+
except InvalidVersion:
|
51 |
+
return LegacyVersion(version)
|
52 |
+
|
53 |
+
|
54 |
+
class InvalidVersion(ValueError):
|
55 |
+
"""
|
56 |
+
An invalid version was found, users should refer to PEP 440.
|
57 |
+
"""
|
58 |
+
|
59 |
+
|
60 |
+
class _BaseVersion:
|
61 |
+
_key: Union[CmpKey, LegacyCmpKey]
|
62 |
+
|
63 |
+
def __hash__(self) -> int:
|
64 |
+
return hash(self._key)
|
65 |
+
|
66 |
+
# Please keep the duplicated `isinstance` check
|
67 |
+
# in the six comparisons hereunder
|
68 |
+
# unless you find a way to avoid adding overhead function calls.
|
69 |
+
def __lt__(self, other: "_BaseVersion") -> bool:
|
70 |
+
if not isinstance(other, _BaseVersion):
|
71 |
+
return NotImplemented
|
72 |
+
|
73 |
+
return self._key < other._key
|
74 |
+
|
75 |
+
def __le__(self, other: "_BaseVersion") -> bool:
|
76 |
+
if not isinstance(other, _BaseVersion):
|
77 |
+
return NotImplemented
|
78 |
+
|
79 |
+
return self._key <= other._key
|
80 |
+
|
81 |
+
def __eq__(self, other: object) -> bool:
|
82 |
+
if not isinstance(other, _BaseVersion):
|
83 |
+
return NotImplemented
|
84 |
+
|
85 |
+
return self._key == other._key
|
86 |
+
|
87 |
+
def __ge__(self, other: "_BaseVersion") -> bool:
|
88 |
+
if not isinstance(other, _BaseVersion):
|
89 |
+
return NotImplemented
|
90 |
+
|
91 |
+
return self._key >= other._key
|
92 |
+
|
93 |
+
def __gt__(self, other: "_BaseVersion") -> bool:
|
94 |
+
if not isinstance(other, _BaseVersion):
|
95 |
+
return NotImplemented
|
96 |
+
|
97 |
+
return self._key > other._key
|
98 |
+
|
99 |
+
def __ne__(self, other: object) -> bool:
|
100 |
+
if not isinstance(other, _BaseVersion):
|
101 |
+
return NotImplemented
|
102 |
+
|
103 |
+
return self._key != other._key
|
104 |
+
|
105 |
+
|
106 |
+
class LegacyVersion(_BaseVersion):
|
107 |
+
def __init__(self, version: str) -> None:
|
108 |
+
self._version = str(version)
|
109 |
+
self._key = _legacy_cmpkey(self._version)
|
110 |
+
|
111 |
+
warnings.warn(
|
112 |
+
"Creating a LegacyVersion has been deprecated and will be "
|
113 |
+
"removed in the next major release",
|
114 |
+
DeprecationWarning,
|
115 |
+
)
|
116 |
+
|
117 |
+
def __str__(self) -> str:
|
118 |
+
return self._version
|
119 |
+
|
120 |
+
def __repr__(self) -> str:
|
121 |
+
return f"<LegacyVersion('{self}')>"
|
122 |
+
|
123 |
+
@property
|
124 |
+
def public(self) -> str:
|
125 |
+
return self._version
|
126 |
+
|
127 |
+
@property
|
128 |
+
def base_version(self) -> str:
|
129 |
+
return self._version
|
130 |
+
|
131 |
+
@property
|
132 |
+
def epoch(self) -> int:
|
133 |
+
return -1
|
134 |
+
|
135 |
+
@property
|
136 |
+
def release(self) -> None:
|
137 |
+
return None
|
138 |
+
|
139 |
+
@property
|
140 |
+
def pre(self) -> None:
|
141 |
+
return None
|
142 |
+
|
143 |
+
@property
|
144 |
+
def post(self) -> None:
|
145 |
+
return None
|
146 |
+
|
147 |
+
@property
|
148 |
+
def dev(self) -> None:
|
149 |
+
return None
|
150 |
+
|
151 |
+
@property
|
152 |
+
def local(self) -> None:
|
153 |
+
return None
|
154 |
+
|
155 |
+
@property
|
156 |
+
def is_prerelease(self) -> bool:
|
157 |
+
return False
|
158 |
+
|
159 |
+
@property
|
160 |
+
def is_postrelease(self) -> bool:
|
161 |
+
return False
|
162 |
+
|
163 |
+
@property
|
164 |
+
def is_devrelease(self) -> bool:
|
165 |
+
return False
|
166 |
+
|
167 |
+
|
168 |
+
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
169 |
+
|
170 |
+
_legacy_version_replacement_map = {
|
171 |
+
"pre": "c",
|
172 |
+
"preview": "c",
|
173 |
+
"-": "final-",
|
174 |
+
"rc": "c",
|
175 |
+
"dev": "@",
|
176 |
+
}
|
177 |
+
|
178 |
+
|
179 |
+
def _parse_version_parts(s: str) -> Iterator[str]:
|
180 |
+
for part in _legacy_version_component_re.split(s):
|
181 |
+
part = _legacy_version_replacement_map.get(part, part)
|
182 |
+
|
183 |
+
if not part or part == ".":
|
184 |
+
continue
|
185 |
+
|
186 |
+
if part[:1] in "0123456789":
|
187 |
+
# pad for numeric comparison
|
188 |
+
yield part.zfill(8)
|
189 |
+
else:
|
190 |
+
yield "*" + part
|
191 |
+
|
192 |
+
# ensure that alpha/beta/candidate are before final
|
193 |
+
yield "*final"
|
194 |
+
|
195 |
+
|
196 |
+
def _legacy_cmpkey(version: str) -> LegacyCmpKey:
|
197 |
+
|
198 |
+
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
199 |
+
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
200 |
+
# which uses the defacto standard originally implemented by setuptools,
|
201 |
+
# as before all PEP 440 versions.
|
202 |
+
epoch = -1
|
203 |
+
|
204 |
+
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
205 |
+
# it's adoption of the packaging library.
|
206 |
+
parts: List[str] = []
|
207 |
+
for part in _parse_version_parts(version.lower()):
|
208 |
+
if part.startswith("*"):
|
209 |
+
# remove "-" before a prerelease tag
|
210 |
+
if part < "*final":
|
211 |
+
while parts and parts[-1] == "*final-":
|
212 |
+
parts.pop()
|
213 |
+
|
214 |
+
# remove trailing zeros from each series of numeric parts
|
215 |
+
while parts and parts[-1] == "00000000":
|
216 |
+
parts.pop()
|
217 |
+
|
218 |
+
parts.append(part)
|
219 |
+
|
220 |
+
return epoch, tuple(parts)
|
221 |
+
|
222 |
+
|
223 |
+
# Deliberately not anchored to the start and end of the string, to make it
|
224 |
+
# easier for 3rd party code to reuse
|
225 |
+
VERSION_PATTERN = r"""
|
226 |
+
v?
|
227 |
+
(?:
|
228 |
+
(?:(?P<epoch>[0-9]+)!)? # epoch
|
229 |
+
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
230 |
+
(?P<pre> # pre-release
|
231 |
+
[-_\.]?
|
232 |
+
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
233 |
+
[-_\.]?
|
234 |
+
(?P<pre_n>[0-9]+)?
|
235 |
+
)?
|
236 |
+
(?P<post> # post release
|
237 |
+
(?:-(?P<post_n1>[0-9]+))
|
238 |
+
|
|
239 |
+
(?:
|
240 |
+
[-_\.]?
|
241 |
+
(?P<post_l>post|rev|r)
|
242 |
+
[-_\.]?
|
243 |
+
(?P<post_n2>[0-9]+)?
|
244 |
+
)
|
245 |
+
)?
|
246 |
+
(?P<dev> # dev release
|
247 |
+
[-_\.]?
|
248 |
+
(?P<dev_l>dev)
|
249 |
+
[-_\.]?
|
250 |
+
(?P<dev_n>[0-9]+)?
|
251 |
+
)?
|
252 |
+
)
|
253 |
+
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
254 |
+
"""
|
255 |
+
|
256 |
+
|
257 |
+
class Version(_BaseVersion):
|
258 |
+
|
259 |
+
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
260 |
+
|
261 |
+
def __init__(self, version: str) -> None:
|
262 |
+
|
263 |
+
# Validate the version and parse it into pieces
|
264 |
+
match = self._regex.search(version)
|
265 |
+
if not match:
|
266 |
+
raise InvalidVersion(f"Invalid version: '{version}'")
|
267 |
+
|
268 |
+
# Store the parsed out pieces of the version
|
269 |
+
self._version = _Version(
|
270 |
+
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
271 |
+
release=tuple(int(i) for i in match.group("release").split(".")),
|
272 |
+
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
273 |
+
post=_parse_letter_version(
|
274 |
+
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
275 |
+
),
|
276 |
+
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
277 |
+
local=_parse_local_version(match.group("local")),
|
278 |
+
)
|
279 |
+
|
280 |
+
# Generate a key which will be used for sorting
|
281 |
+
self._key = _cmpkey(
|
282 |
+
self._version.epoch,
|
283 |
+
self._version.release,
|
284 |
+
self._version.pre,
|
285 |
+
self._version.post,
|
286 |
+
self._version.dev,
|
287 |
+
self._version.local,
|
288 |
+
)
|
289 |
+
|
290 |
+
def __repr__(self) -> str:
|
291 |
+
return f"<Version('{self}')>"
|
292 |
+
|
293 |
+
def __str__(self) -> str:
|
294 |
+
parts = []
|
295 |
+
|
296 |
+
# Epoch
|
297 |
+
if self.epoch != 0:
|
298 |
+
parts.append(f"{self.epoch}!")
|
299 |
+
|
300 |
+
# Release segment
|
301 |
+
parts.append(".".join(str(x) for x in self.release))
|
302 |
+
|
303 |
+
# Pre-release
|
304 |
+
if self.pre is not None:
|
305 |
+
parts.append("".join(str(x) for x in self.pre))
|
306 |
+
|
307 |
+
# Post-release
|
308 |
+
if self.post is not None:
|
309 |
+
parts.append(f".post{self.post}")
|
310 |
+
|
311 |
+
# Development release
|
312 |
+
if self.dev is not None:
|
313 |
+
parts.append(f".dev{self.dev}")
|
314 |
+
|
315 |
+
# Local version segment
|
316 |
+
if self.local is not None:
|
317 |
+
parts.append(f"+{self.local}")
|
318 |
+
|
319 |
+
return "".join(parts)
|
320 |
+
|
321 |
+
@property
|
322 |
+
def epoch(self) -> int:
|
323 |
+
_epoch: int = self._version.epoch
|
324 |
+
return _epoch
|
325 |
+
|
326 |
+
@property
|
327 |
+
def release(self) -> Tuple[int, ...]:
|
328 |
+
_release: Tuple[int, ...] = self._version.release
|
329 |
+
return _release
|
330 |
+
|
331 |
+
@property
|
332 |
+
def pre(self) -> Optional[Tuple[str, int]]:
|
333 |
+
_pre: Optional[Tuple[str, int]] = self._version.pre
|
334 |
+
return _pre
|
335 |
+
|
336 |
+
@property
|
337 |
+
def post(self) -> Optional[int]:
|
338 |
+
return self._version.post[1] if self._version.post else None
|
339 |
+
|
340 |
+
@property
|
341 |
+
def dev(self) -> Optional[int]:
|
342 |
+
return self._version.dev[1] if self._version.dev else None
|
343 |
+
|
344 |
+
@property
|
345 |
+
def local(self) -> Optional[str]:
|
346 |
+
if self._version.local:
|
347 |
+
return ".".join(str(x) for x in self._version.local)
|
348 |
+
else:
|
349 |
+
return None
|
350 |
+
|
351 |
+
@property
|
352 |
+
def public(self) -> str:
|
353 |
+
return str(self).split("+", 1)[0]
|
354 |
+
|
355 |
+
@property
|
356 |
+
def base_version(self) -> str:
|
357 |
+
parts = []
|
358 |
+
|
359 |
+
# Epoch
|
360 |
+
if self.epoch != 0:
|
361 |
+
parts.append(f"{self.epoch}!")
|
362 |
+
|
363 |
+
# Release segment
|
364 |
+
parts.append(".".join(str(x) for x in self.release))
|
365 |
+
|
366 |
+
return "".join(parts)
|
367 |
+
|
368 |
+
@property
|
369 |
+
def is_prerelease(self) -> bool:
|
370 |
+
return self.dev is not None or self.pre is not None
|
371 |
+
|
372 |
+
@property
|
373 |
+
def is_postrelease(self) -> bool:
|
374 |
+
return self.post is not None
|
375 |
+
|
376 |
+
@property
|
377 |
+
def is_devrelease(self) -> bool:
|
378 |
+
return self.dev is not None
|
379 |
+
|
380 |
+
@property
|
381 |
+
def major(self) -> int:
|
382 |
+
return self.release[0] if len(self.release) >= 1 else 0
|
383 |
+
|
384 |
+
@property
|
385 |
+
def minor(self) -> int:
|
386 |
+
return self.release[1] if len(self.release) >= 2 else 0
|
387 |
+
|
388 |
+
@property
|
389 |
+
def micro(self) -> int:
|
390 |
+
return self.release[2] if len(self.release) >= 3 else 0
|
391 |
+
|
392 |
+
|
393 |
+
def _parse_letter_version(
|
394 |
+
letter: str, number: Union[str, bytes, SupportsInt]
|
395 |
+
) -> Optional[Tuple[str, int]]:
|
396 |
+
|
397 |
+
if letter:
|
398 |
+
# We consider there to be an implicit 0 in a pre-release if there is
|
399 |
+
# not a numeral associated with it.
|
400 |
+
if number is None:
|
401 |
+
number = 0
|
402 |
+
|
403 |
+
# We normalize any letters to their lower case form
|
404 |
+
letter = letter.lower()
|
405 |
+
|
406 |
+
# We consider some words to be alternate spellings of other words and
|
407 |
+
# in those cases we want to normalize the spellings to our preferred
|
408 |
+
# spelling.
|
409 |
+
if letter == "alpha":
|
410 |
+
letter = "a"
|
411 |
+
elif letter == "beta":
|
412 |
+
letter = "b"
|
413 |
+
elif letter in ["c", "pre", "preview"]:
|
414 |
+
letter = "rc"
|
415 |
+
elif letter in ["rev", "r"]:
|
416 |
+
letter = "post"
|
417 |
+
|
418 |
+
return letter, int(number)
|
419 |
+
if not letter and number:
|
420 |
+
# We assume if we are given a number, but we are not given a letter
|
421 |
+
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
422 |
+
letter = "post"
|
423 |
+
|
424 |
+
return letter, int(number)
|
425 |
+
|
426 |
+
return None
|
427 |
+
|
428 |
+
|
429 |
+
_local_version_separators = re.compile(r"[\._-]")
|
430 |
+
|
431 |
+
|
432 |
+
def _parse_local_version(local: str) -> Optional[LocalType]:
|
433 |
+
"""
|
434 |
+
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
435 |
+
"""
|
436 |
+
if local is not None:
|
437 |
+
return tuple(
|
438 |
+
part.lower() if not part.isdigit() else int(part)
|
439 |
+
for part in _local_version_separators.split(local)
|
440 |
+
)
|
441 |
+
return None
|
442 |
+
|
443 |
+
|
444 |
+
def _cmpkey(
|
445 |
+
epoch: int,
|
446 |
+
release: Tuple[int, ...],
|
447 |
+
pre: Optional[Tuple[str, int]],
|
448 |
+
post: Optional[Tuple[str, int]],
|
449 |
+
dev: Optional[Tuple[str, int]],
|
450 |
+
local: Optional[Tuple[SubLocalType]],
|
451 |
+
) -> CmpKey:
|
452 |
+
|
453 |
+
# When we compare a release version, we want to compare it with all of the
|
454 |
+
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
455 |
+
# leading zeros until we come to something non zero, then take the rest
|
456 |
+
# re-reverse it back into the correct order and make it a tuple and use
|
457 |
+
# that for our sorting key.
|
458 |
+
_release = tuple(
|
459 |
+
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
460 |
+
)
|
461 |
+
|
462 |
+
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
463 |
+
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
464 |
+
# if there is not a pre or a post segment. If we have one of those then
|
465 |
+
# the normal sorting rules will handle this case correctly.
|
466 |
+
if pre is None and post is None and dev is not None:
|
467 |
+
_pre: PrePostDevType = NegativeInfinity
|
468 |
+
# Versions without a pre-release (except as noted above) should sort after
|
469 |
+
# those with one.
|
470 |
+
elif pre is None:
|
471 |
+
_pre = Infinity
|
472 |
+
else:
|
473 |
+
_pre = pre
|
474 |
+
|
475 |
+
# Versions without a post segment should sort before those with one.
|
476 |
+
if post is None:
|
477 |
+
_post: PrePostDevType = NegativeInfinity
|
478 |
+
|
479 |
+
else:
|
480 |
+
_post = post
|
481 |
+
|
482 |
+
# Versions without a development segment should sort after those with one.
|
483 |
+
if dev is None:
|
484 |
+
_dev: PrePostDevType = Infinity
|
485 |
+
|
486 |
+
else:
|
487 |
+
_dev = dev
|
488 |
+
|
489 |
+
if local is None:
|
490 |
+
# Versions without a local segment should sort before those with one.
|
491 |
+
_local: LocalType = NegativeInfinity
|
492 |
+
else:
|
493 |
+
# Versions with a local segment need that segment parsed to implement
|
494 |
+
# the sorting rules in PEP440.
|
495 |
+
# - Alpha numeric segments sort before numeric segments
|
496 |
+
# - Alpha numeric segments sort lexicographically
|
497 |
+
# - Numeric segments sort numerically
|
498 |
+
# - Shorter versions sort before longer versions when the prefixes
|
499 |
+
# match exactly
|
500 |
+
_local = tuple(
|
501 |
+
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
502 |
+
)
|
503 |
+
|
504 |
+
return epoch, _release, _pre, _post, _dev, _local
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc
ADDED
Binary file (296 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__init__.py
ADDED
@@ -0,0 +1,242 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Extensions to the 'distutils' for large or complex distributions"""
|
2 |
+
|
3 |
+
from fnmatch import fnmatchcase
|
4 |
+
import functools
|
5 |
+
import os
|
6 |
+
import re
|
7 |
+
|
8 |
+
import _distutils_hack.override # noqa: F401
|
9 |
+
|
10 |
+
import distutils.core
|
11 |
+
from distutils.errors import DistutilsOptionError
|
12 |
+
from distutils.util import convert_path
|
13 |
+
|
14 |
+
from ._deprecation_warning import SetuptoolsDeprecationWarning
|
15 |
+
|
16 |
+
import setuptools.version
|
17 |
+
from setuptools.extension import Extension
|
18 |
+
from setuptools.dist import Distribution
|
19 |
+
from setuptools.depends import Require
|
20 |
+
from . import monkey
|
21 |
+
|
22 |
+
|
23 |
+
__all__ = [
|
24 |
+
'setup',
|
25 |
+
'Distribution',
|
26 |
+
'Command',
|
27 |
+
'Extension',
|
28 |
+
'Require',
|
29 |
+
'SetuptoolsDeprecationWarning',
|
30 |
+
'find_packages',
|
31 |
+
'find_namespace_packages',
|
32 |
+
]
|
33 |
+
|
34 |
+
__version__ = setuptools.version.__version__
|
35 |
+
|
36 |
+
bootstrap_install_from = None
|
37 |
+
|
38 |
+
|
39 |
+
class PackageFinder:
|
40 |
+
"""
|
41 |
+
Generate a list of all Python packages found within a directory
|
42 |
+
"""
|
43 |
+
|
44 |
+
@classmethod
|
45 |
+
def find(cls, where='.', exclude=(), include=('*',)):
|
46 |
+
"""Return a list all Python packages found within directory 'where'
|
47 |
+
|
48 |
+
'where' is the root directory which will be searched for packages. It
|
49 |
+
should be supplied as a "cross-platform" (i.e. URL-style) path; it will
|
50 |
+
be converted to the appropriate local path syntax.
|
51 |
+
|
52 |
+
'exclude' is a sequence of package names to exclude; '*' can be used
|
53 |
+
as a wildcard in the names, such that 'foo.*' will exclude all
|
54 |
+
subpackages of 'foo' (but not 'foo' itself).
|
55 |
+
|
56 |
+
'include' is a sequence of package names to include. If it's
|
57 |
+
specified, only the named packages will be included. If it's not
|
58 |
+
specified, all found packages will be included. 'include' can contain
|
59 |
+
shell style wildcard patterns just like 'exclude'.
|
60 |
+
"""
|
61 |
+
|
62 |
+
return list(
|
63 |
+
cls._find_packages_iter(
|
64 |
+
convert_path(where),
|
65 |
+
cls._build_filter('ez_setup', '*__pycache__', *exclude),
|
66 |
+
cls._build_filter(*include),
|
67 |
+
)
|
68 |
+
)
|
69 |
+
|
70 |
+
@classmethod
|
71 |
+
def _find_packages_iter(cls, where, exclude, include):
|
72 |
+
"""
|
73 |
+
All the packages found in 'where' that pass the 'include' filter, but
|
74 |
+
not the 'exclude' filter.
|
75 |
+
"""
|
76 |
+
for root, dirs, files in os.walk(where, followlinks=True):
|
77 |
+
# Copy dirs to iterate over it, then empty dirs.
|
78 |
+
all_dirs = dirs[:]
|
79 |
+
dirs[:] = []
|
80 |
+
|
81 |
+
for dir in all_dirs:
|
82 |
+
full_path = os.path.join(root, dir)
|
83 |
+
rel_path = os.path.relpath(full_path, where)
|
84 |
+
package = rel_path.replace(os.path.sep, '.')
|
85 |
+
|
86 |
+
# Skip directory trees that are not valid packages
|
87 |
+
if '.' in dir or not cls._looks_like_package(full_path):
|
88 |
+
continue
|
89 |
+
|
90 |
+
# Should this package be included?
|
91 |
+
if include(package) and not exclude(package):
|
92 |
+
yield package
|
93 |
+
|
94 |
+
# Keep searching subdirectories, as there may be more packages
|
95 |
+
# down there, even if the parent was excluded.
|
96 |
+
dirs.append(dir)
|
97 |
+
|
98 |
+
@staticmethod
|
99 |
+
def _looks_like_package(path):
|
100 |
+
"""Does a directory look like a package?"""
|
101 |
+
return os.path.isfile(os.path.join(path, '__init__.py'))
|
102 |
+
|
103 |
+
@staticmethod
|
104 |
+
def _build_filter(*patterns):
|
105 |
+
"""
|
106 |
+
Given a list of patterns, return a callable that will be true only if
|
107 |
+
the input matches at least one of the patterns.
|
108 |
+
"""
|
109 |
+
return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns)
|
110 |
+
|
111 |
+
|
112 |
+
class PEP420PackageFinder(PackageFinder):
|
113 |
+
@staticmethod
|
114 |
+
def _looks_like_package(path):
|
115 |
+
return True
|
116 |
+
|
117 |
+
|
118 |
+
find_packages = PackageFinder.find
|
119 |
+
find_namespace_packages = PEP420PackageFinder.find
|
120 |
+
|
121 |
+
|
122 |
+
def _install_setup_requires(attrs):
|
123 |
+
# Note: do not use `setuptools.Distribution` directly, as
|
124 |
+
# our PEP 517 backend patch `distutils.core.Distribution`.
|
125 |
+
class MinimalDistribution(distutils.core.Distribution):
|
126 |
+
"""
|
127 |
+
A minimal version of a distribution for supporting the
|
128 |
+
fetch_build_eggs interface.
|
129 |
+
"""
|
130 |
+
|
131 |
+
def __init__(self, attrs):
|
132 |
+
_incl = 'dependency_links', 'setup_requires'
|
133 |
+
filtered = {k: attrs[k] for k in set(_incl) & set(attrs)}
|
134 |
+
distutils.core.Distribution.__init__(self, filtered)
|
135 |
+
|
136 |
+
def finalize_options(self):
|
137 |
+
"""
|
138 |
+
Disable finalize_options to avoid building the working set.
|
139 |
+
Ref #2158.
|
140 |
+
"""
|
141 |
+
|
142 |
+
dist = MinimalDistribution(attrs)
|
143 |
+
|
144 |
+
# Honor setup.cfg's options.
|
145 |
+
dist.parse_config_files(ignore_option_errors=True)
|
146 |
+
if dist.setup_requires:
|
147 |
+
dist.fetch_build_eggs(dist.setup_requires)
|
148 |
+
|
149 |
+
|
150 |
+
def setup(**attrs):
|
151 |
+
# Make sure we have any requirements needed to interpret 'attrs'.
|
152 |
+
_install_setup_requires(attrs)
|
153 |
+
return distutils.core.setup(**attrs)
|
154 |
+
|
155 |
+
|
156 |
+
setup.__doc__ = distutils.core.setup.__doc__
|
157 |
+
|
158 |
+
|
159 |
+
_Command = monkey.get_unpatched(distutils.core.Command)
|
160 |
+
|
161 |
+
|
162 |
+
class Command(_Command):
|
163 |
+
__doc__ = _Command.__doc__
|
164 |
+
|
165 |
+
command_consumes_arguments = False
|
166 |
+
|
167 |
+
def __init__(self, dist, **kw):
|
168 |
+
"""
|
169 |
+
Construct the command for dist, updating
|
170 |
+
vars(self) with any keyword parameters.
|
171 |
+
"""
|
172 |
+
_Command.__init__(self, dist)
|
173 |
+
vars(self).update(kw)
|
174 |
+
|
175 |
+
def _ensure_stringlike(self, option, what, default=None):
|
176 |
+
val = getattr(self, option)
|
177 |
+
if val is None:
|
178 |
+
setattr(self, option, default)
|
179 |
+
return default
|
180 |
+
elif not isinstance(val, str):
|
181 |
+
raise DistutilsOptionError(
|
182 |
+
"'%s' must be a %s (got `%s`)" % (option, what, val)
|
183 |
+
)
|
184 |
+
return val
|
185 |
+
|
186 |
+
def ensure_string_list(self, option):
|
187 |
+
r"""Ensure that 'option' is a list of strings. If 'option' is
|
188 |
+
currently a string, we split it either on /,\s*/ or /\s+/, so
|
189 |
+
"foo bar baz", "foo,bar,baz", and "foo, bar baz" all become
|
190 |
+
["foo", "bar", "baz"].
|
191 |
+
"""
|
192 |
+
val = getattr(self, option)
|
193 |
+
if val is None:
|
194 |
+
return
|
195 |
+
elif isinstance(val, str):
|
196 |
+
setattr(self, option, re.split(r',\s*|\s+', val))
|
197 |
+
else:
|
198 |
+
if isinstance(val, list):
|
199 |
+
ok = all(isinstance(v, str) for v in val)
|
200 |
+
else:
|
201 |
+
ok = False
|
202 |
+
if not ok:
|
203 |
+
raise DistutilsOptionError(
|
204 |
+
"'%s' must be a list of strings (got %r)" % (option, val)
|
205 |
+
)
|
206 |
+
|
207 |
+
def reinitialize_command(self, command, reinit_subcommands=0, **kw):
|
208 |
+
cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
|
209 |
+
vars(cmd).update(kw)
|
210 |
+
return cmd
|
211 |
+
|
212 |
+
|
213 |
+
def _find_all_simple(path):
|
214 |
+
"""
|
215 |
+
Find all files under 'path'
|
216 |
+
"""
|
217 |
+
results = (
|
218 |
+
os.path.join(base, file)
|
219 |
+
for base, dirs, files in os.walk(path, followlinks=True)
|
220 |
+
for file in files
|
221 |
+
)
|
222 |
+
return filter(os.path.isfile, results)
|
223 |
+
|
224 |
+
|
225 |
+
def findall(dir=os.curdir):
|
226 |
+
"""
|
227 |
+
Find all files under 'dir' and return the list of full filenames.
|
228 |
+
Unless dir is '.', return full filenames with dir prepended.
|
229 |
+
"""
|
230 |
+
files = _find_all_simple(dir)
|
231 |
+
if dir == os.curdir:
|
232 |
+
make_rel = functools.partial(os.path.relpath, start=dir)
|
233 |
+
files = map(make_rel, files)
|
234 |
+
return list(files)
|
235 |
+
|
236 |
+
|
237 |
+
class sic(str):
|
238 |
+
"""Treat this string as-is (https://en.wikipedia.org/wiki/Sic)"""
|
239 |
+
|
240 |
+
|
241 |
+
# Apply monkey patches
|
242 |
+
monkey.patch_all()
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/_imp.cpython-310.pyc
ADDED
Binary file (2.08 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/depends.cpython-310.pyc
ADDED
Binary file (5.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/dist.cpython-310.pyc
ADDED
Binary file (36.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/errors.cpython-310.pyc
ADDED
Binary file (1.51 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/extension.cpython-310.pyc
ADDED
Binary file (1.95 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/glob.cpython-310.pyc
ADDED
Binary file (3.74 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/monkey.cpython-310.pyc
ADDED
Binary file (4.64 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/msvc.cpython-310.pyc
ADDED
Binary file (42.6 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/namespaces.cpython-310.pyc
ADDED
Binary file (3.62 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/package_index.cpython-310.pyc
ADDED
Binary file (32.7 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/py34compat.cpython-310.pyc
ADDED
Binary file (487 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/sandbox.cpython-310.pyc
ADDED
Binary file (15.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/unicode_utils.cpython-310.pyc
ADDED
Binary file (1.12 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (7.36 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/_deprecation_warning.py
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class SetuptoolsDeprecationWarning(Warning):
|
2 |
+
"""
|
3 |
+
Base class for warning deprecations in ``setuptools``
|
4 |
+
|
5 |
+
This class is not derived from ``DeprecationWarning``, and as such is
|
6 |
+
visible by default.
|
7 |
+
"""
|
llmeval-env/lib/python3.10/site-packages/setuptools/_imp.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Re-implementation of find_module and get_frozen_object
|
3 |
+
from the deprecated imp module.
|
4 |
+
"""
|
5 |
+
|
6 |
+
import os
|
7 |
+
import importlib.util
|
8 |
+
import importlib.machinery
|
9 |
+
|
10 |
+
from .py34compat import module_from_spec
|
11 |
+
|
12 |
+
|
13 |
+
PY_SOURCE = 1
|
14 |
+
PY_COMPILED = 2
|
15 |
+
C_EXTENSION = 3
|
16 |
+
C_BUILTIN = 6
|
17 |
+
PY_FROZEN = 7
|
18 |
+
|
19 |
+
|
20 |
+
def find_spec(module, paths):
|
21 |
+
finder = (
|
22 |
+
importlib.machinery.PathFinder().find_spec
|
23 |
+
if isinstance(paths, list) else
|
24 |
+
importlib.util.find_spec
|
25 |
+
)
|
26 |
+
return finder(module, paths)
|
27 |
+
|
28 |
+
|
29 |
+
def find_module(module, paths=None):
|
30 |
+
"""Just like 'imp.find_module()', but with package support"""
|
31 |
+
spec = find_spec(module, paths)
|
32 |
+
if spec is None:
|
33 |
+
raise ImportError("Can't find %s" % module)
|
34 |
+
if not spec.has_location and hasattr(spec, 'submodule_search_locations'):
|
35 |
+
spec = importlib.util.spec_from_loader('__init__.py', spec.loader)
|
36 |
+
|
37 |
+
kind = -1
|
38 |
+
file = None
|
39 |
+
static = isinstance(spec.loader, type)
|
40 |
+
if spec.origin == 'frozen' or static and issubclass(
|
41 |
+
spec.loader, importlib.machinery.FrozenImporter):
|
42 |
+
kind = PY_FROZEN
|
43 |
+
path = None # imp compabilty
|
44 |
+
suffix = mode = '' # imp compatibility
|
45 |
+
elif spec.origin == 'built-in' or static and issubclass(
|
46 |
+
spec.loader, importlib.machinery.BuiltinImporter):
|
47 |
+
kind = C_BUILTIN
|
48 |
+
path = None # imp compabilty
|
49 |
+
suffix = mode = '' # imp compatibility
|
50 |
+
elif spec.has_location:
|
51 |
+
path = spec.origin
|
52 |
+
suffix = os.path.splitext(path)[1]
|
53 |
+
mode = 'r' if suffix in importlib.machinery.SOURCE_SUFFIXES else 'rb'
|
54 |
+
|
55 |
+
if suffix in importlib.machinery.SOURCE_SUFFIXES:
|
56 |
+
kind = PY_SOURCE
|
57 |
+
elif suffix in importlib.machinery.BYTECODE_SUFFIXES:
|
58 |
+
kind = PY_COMPILED
|
59 |
+
elif suffix in importlib.machinery.EXTENSION_SUFFIXES:
|
60 |
+
kind = C_EXTENSION
|
61 |
+
|
62 |
+
if kind in {PY_SOURCE, PY_COMPILED}:
|
63 |
+
file = open(path, mode)
|
64 |
+
else:
|
65 |
+
path = None
|
66 |
+
suffix = mode = ''
|
67 |
+
|
68 |
+
return file, path, (suffix, mode, kind)
|
69 |
+
|
70 |
+
|
71 |
+
def get_frozen_object(module, paths=None):
|
72 |
+
spec = find_spec(module, paths)
|
73 |
+
if not spec:
|
74 |
+
raise ImportError("Can't find %s" % module)
|
75 |
+
return spec.loader.get_code(module)
|
76 |
+
|
77 |
+
|
78 |
+
def get_module(module, paths, info):
|
79 |
+
spec = find_spec(module, paths)
|
80 |
+
if not spec:
|
81 |
+
raise ImportError("Can't find %s" % module)
|
82 |
+
return module_from_spec(spec)
|
llmeval-env/lib/python3.10/site-packages/setuptools/archive_util.py
ADDED
@@ -0,0 +1,205 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Utilities for extracting common archive formats"""
|
2 |
+
|
3 |
+
import zipfile
|
4 |
+
import tarfile
|
5 |
+
import os
|
6 |
+
import shutil
|
7 |
+
import posixpath
|
8 |
+
import contextlib
|
9 |
+
from distutils.errors import DistutilsError
|
10 |
+
|
11 |
+
from pkg_resources import ensure_directory
|
12 |
+
|
13 |
+
__all__ = [
|
14 |
+
"unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
|
15 |
+
"UnrecognizedFormat", "extraction_drivers", "unpack_directory",
|
16 |
+
]
|
17 |
+
|
18 |
+
|
19 |
+
class UnrecognizedFormat(DistutilsError):
|
20 |
+
"""Couldn't recognize the archive type"""
|
21 |
+
|
22 |
+
|
23 |
+
def default_filter(src, dst):
|
24 |
+
"""The default progress/filter callback; returns True for all files"""
|
25 |
+
return dst
|
26 |
+
|
27 |
+
|
28 |
+
def unpack_archive(
|
29 |
+
filename, extract_dir, progress_filter=default_filter,
|
30 |
+
drivers=None):
|
31 |
+
"""Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
|
32 |
+
|
33 |
+
`progress_filter` is a function taking two arguments: a source path
|
34 |
+
internal to the archive ('/'-separated), and a filesystem path where it
|
35 |
+
will be extracted. The callback must return the desired extract path
|
36 |
+
(which may be the same as the one passed in), or else ``None`` to skip
|
37 |
+
that file or directory. The callback can thus be used to report on the
|
38 |
+
progress of the extraction, as well as to filter the items extracted or
|
39 |
+
alter their extraction paths.
|
40 |
+
|
41 |
+
`drivers`, if supplied, must be a non-empty sequence of functions with the
|
42 |
+
same signature as this function (minus the `drivers` argument), that raise
|
43 |
+
``UnrecognizedFormat`` if they do not support extracting the designated
|
44 |
+
archive type. The `drivers` are tried in sequence until one is found that
|
45 |
+
does not raise an error, or until all are exhausted (in which case
|
46 |
+
``UnrecognizedFormat`` is raised). If you do not supply a sequence of
|
47 |
+
drivers, the module's ``extraction_drivers`` constant will be used, which
|
48 |
+
means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
|
49 |
+
order.
|
50 |
+
"""
|
51 |
+
for driver in drivers or extraction_drivers:
|
52 |
+
try:
|
53 |
+
driver(filename, extract_dir, progress_filter)
|
54 |
+
except UnrecognizedFormat:
|
55 |
+
continue
|
56 |
+
else:
|
57 |
+
return
|
58 |
+
else:
|
59 |
+
raise UnrecognizedFormat(
|
60 |
+
"Not a recognized archive type: %s" % filename
|
61 |
+
)
|
62 |
+
|
63 |
+
|
64 |
+
def unpack_directory(filename, extract_dir, progress_filter=default_filter):
|
65 |
+
""""Unpack" a directory, using the same interface as for archives
|
66 |
+
|
67 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a directory
|
68 |
+
"""
|
69 |
+
if not os.path.isdir(filename):
|
70 |
+
raise UnrecognizedFormat("%s is not a directory" % filename)
|
71 |
+
|
72 |
+
paths = {
|
73 |
+
filename: ('', extract_dir),
|
74 |
+
}
|
75 |
+
for base, dirs, files in os.walk(filename):
|
76 |
+
src, dst = paths[base]
|
77 |
+
for d in dirs:
|
78 |
+
paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d)
|
79 |
+
for f in files:
|
80 |
+
target = os.path.join(dst, f)
|
81 |
+
target = progress_filter(src + f, target)
|
82 |
+
if not target:
|
83 |
+
# skip non-files
|
84 |
+
continue
|
85 |
+
ensure_directory(target)
|
86 |
+
f = os.path.join(base, f)
|
87 |
+
shutil.copyfile(f, target)
|
88 |
+
shutil.copystat(f, target)
|
89 |
+
|
90 |
+
|
91 |
+
def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
|
92 |
+
"""Unpack zip `filename` to `extract_dir`
|
93 |
+
|
94 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
|
95 |
+
by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation
|
96 |
+
of the `progress_filter` argument.
|
97 |
+
"""
|
98 |
+
|
99 |
+
if not zipfile.is_zipfile(filename):
|
100 |
+
raise UnrecognizedFormat("%s is not a zip file" % (filename,))
|
101 |
+
|
102 |
+
with zipfile.ZipFile(filename) as z:
|
103 |
+
for info in z.infolist():
|
104 |
+
name = info.filename
|
105 |
+
|
106 |
+
# don't extract absolute paths or ones with .. in them
|
107 |
+
if name.startswith('/') or '..' in name.split('/'):
|
108 |
+
continue
|
109 |
+
|
110 |
+
target = os.path.join(extract_dir, *name.split('/'))
|
111 |
+
target = progress_filter(name, target)
|
112 |
+
if not target:
|
113 |
+
continue
|
114 |
+
if name.endswith('/'):
|
115 |
+
# directory
|
116 |
+
ensure_directory(target)
|
117 |
+
else:
|
118 |
+
# file
|
119 |
+
ensure_directory(target)
|
120 |
+
data = z.read(info.filename)
|
121 |
+
with open(target, 'wb') as f:
|
122 |
+
f.write(data)
|
123 |
+
unix_attributes = info.external_attr >> 16
|
124 |
+
if unix_attributes:
|
125 |
+
os.chmod(target, unix_attributes)
|
126 |
+
|
127 |
+
|
128 |
+
def _resolve_tar_file_or_dir(tar_obj, tar_member_obj):
|
129 |
+
"""Resolve any links and extract link targets as normal files."""
|
130 |
+
while tar_member_obj is not None and (
|
131 |
+
tar_member_obj.islnk() or tar_member_obj.issym()):
|
132 |
+
linkpath = tar_member_obj.linkname
|
133 |
+
if tar_member_obj.issym():
|
134 |
+
base = posixpath.dirname(tar_member_obj.name)
|
135 |
+
linkpath = posixpath.join(base, linkpath)
|
136 |
+
linkpath = posixpath.normpath(linkpath)
|
137 |
+
tar_member_obj = tar_obj._getmember(linkpath)
|
138 |
+
|
139 |
+
is_file_or_dir = (
|
140 |
+
tar_member_obj is not None and
|
141 |
+
(tar_member_obj.isfile() or tar_member_obj.isdir())
|
142 |
+
)
|
143 |
+
if is_file_or_dir:
|
144 |
+
return tar_member_obj
|
145 |
+
|
146 |
+
raise LookupError('Got unknown file type')
|
147 |
+
|
148 |
+
|
149 |
+
def _iter_open_tar(tar_obj, extract_dir, progress_filter):
|
150 |
+
"""Emit member-destination pairs from a tar archive."""
|
151 |
+
# don't do any chowning!
|
152 |
+
tar_obj.chown = lambda *args: None
|
153 |
+
|
154 |
+
with contextlib.closing(tar_obj):
|
155 |
+
for member in tar_obj:
|
156 |
+
name = member.name
|
157 |
+
# don't extract absolute paths or ones with .. in them
|
158 |
+
if name.startswith('/') or '..' in name.split('/'):
|
159 |
+
continue
|
160 |
+
|
161 |
+
prelim_dst = os.path.join(extract_dir, *name.split('/'))
|
162 |
+
|
163 |
+
try:
|
164 |
+
member = _resolve_tar_file_or_dir(tar_obj, member)
|
165 |
+
except LookupError:
|
166 |
+
continue
|
167 |
+
|
168 |
+
final_dst = progress_filter(name, prelim_dst)
|
169 |
+
if not final_dst:
|
170 |
+
continue
|
171 |
+
|
172 |
+
if final_dst.endswith(os.sep):
|
173 |
+
final_dst = final_dst[:-1]
|
174 |
+
|
175 |
+
yield member, final_dst
|
176 |
+
|
177 |
+
|
178 |
+
def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
|
179 |
+
"""Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
|
180 |
+
|
181 |
+
Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
|
182 |
+
by ``tarfile.open()``). See ``unpack_archive()`` for an explanation
|
183 |
+
of the `progress_filter` argument.
|
184 |
+
"""
|
185 |
+
try:
|
186 |
+
tarobj = tarfile.open(filename)
|
187 |
+
except tarfile.TarError as e:
|
188 |
+
raise UnrecognizedFormat(
|
189 |
+
"%s is not a compressed or uncompressed tar file" % (filename,)
|
190 |
+
) from e
|
191 |
+
|
192 |
+
for member, final_dst in _iter_open_tar(
|
193 |
+
tarobj, extract_dir, progress_filter,
|
194 |
+
):
|
195 |
+
try:
|
196 |
+
# XXX Ugh
|
197 |
+
tarobj._extract_member(member, final_dst)
|
198 |
+
except tarfile.ExtractError:
|
199 |
+
# chown/chmod/mkfifo/mknode/makedev failed
|
200 |
+
pass
|
201 |
+
|
202 |
+
return True
|
203 |
+
|
204 |
+
|
205 |
+
extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
|
llmeval-env/lib/python3.10/site-packages/setuptools/build_meta.py
ADDED
@@ -0,0 +1,290 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""A PEP 517 interface to setuptools
|
2 |
+
|
3 |
+
Previously, when a user or a command line tool (let's call it a "frontend")
|
4 |
+
needed to make a request of setuptools to take a certain action, for
|
5 |
+
example, generating a list of installation requirements, the frontend would
|
6 |
+
would call "setup.py egg_info" or "setup.py bdist_wheel" on the command line.
|
7 |
+
|
8 |
+
PEP 517 defines a different method of interfacing with setuptools. Rather
|
9 |
+
than calling "setup.py" directly, the frontend should:
|
10 |
+
|
11 |
+
1. Set the current directory to the directory with a setup.py file
|
12 |
+
2. Import this module into a safe python interpreter (one in which
|
13 |
+
setuptools can potentially set global variables or crash hard).
|
14 |
+
3. Call one of the functions defined in PEP 517.
|
15 |
+
|
16 |
+
What each function does is defined in PEP 517. However, here is a "casual"
|
17 |
+
definition of the functions (this definition should not be relied on for
|
18 |
+
bug reports or API stability):
|
19 |
+
|
20 |
+
- `build_wheel`: build a wheel in the folder and return the basename
|
21 |
+
- `get_requires_for_build_wheel`: get the `setup_requires` to build
|
22 |
+
- `prepare_metadata_for_build_wheel`: get the `install_requires`
|
23 |
+
- `build_sdist`: build an sdist in the folder and return the basename
|
24 |
+
- `get_requires_for_build_sdist`: get the `setup_requires` to build
|
25 |
+
|
26 |
+
Again, this is not a formal definition! Just a "taste" of the module.
|
27 |
+
"""
|
28 |
+
|
29 |
+
import io
|
30 |
+
import os
|
31 |
+
import sys
|
32 |
+
import tokenize
|
33 |
+
import shutil
|
34 |
+
import contextlib
|
35 |
+
import tempfile
|
36 |
+
import warnings
|
37 |
+
|
38 |
+
import setuptools
|
39 |
+
import distutils
|
40 |
+
|
41 |
+
from pkg_resources import parse_requirements
|
42 |
+
|
43 |
+
__all__ = ['get_requires_for_build_sdist',
|
44 |
+
'get_requires_for_build_wheel',
|
45 |
+
'prepare_metadata_for_build_wheel',
|
46 |
+
'build_wheel',
|
47 |
+
'build_sdist',
|
48 |
+
'__legacy__',
|
49 |
+
'SetupRequirementsError']
|
50 |
+
|
51 |
+
|
52 |
+
class SetupRequirementsError(BaseException):
|
53 |
+
def __init__(self, specifiers):
|
54 |
+
self.specifiers = specifiers
|
55 |
+
|
56 |
+
|
57 |
+
class Distribution(setuptools.dist.Distribution):
|
58 |
+
def fetch_build_eggs(self, specifiers):
|
59 |
+
specifier_list = list(map(str, parse_requirements(specifiers)))
|
60 |
+
|
61 |
+
raise SetupRequirementsError(specifier_list)
|
62 |
+
|
63 |
+
@classmethod
|
64 |
+
@contextlib.contextmanager
|
65 |
+
def patch(cls):
|
66 |
+
"""
|
67 |
+
Replace
|
68 |
+
distutils.dist.Distribution with this class
|
69 |
+
for the duration of this context.
|
70 |
+
"""
|
71 |
+
orig = distutils.core.Distribution
|
72 |
+
distutils.core.Distribution = cls
|
73 |
+
try:
|
74 |
+
yield
|
75 |
+
finally:
|
76 |
+
distutils.core.Distribution = orig
|
77 |
+
|
78 |
+
|
79 |
+
@contextlib.contextmanager
|
80 |
+
def no_install_setup_requires():
|
81 |
+
"""Temporarily disable installing setup_requires
|
82 |
+
|
83 |
+
Under PEP 517, the backend reports build dependencies to the frontend,
|
84 |
+
and the frontend is responsible for ensuring they're installed.
|
85 |
+
So setuptools (acting as a backend) should not try to install them.
|
86 |
+
"""
|
87 |
+
orig = setuptools._install_setup_requires
|
88 |
+
setuptools._install_setup_requires = lambda attrs: None
|
89 |
+
try:
|
90 |
+
yield
|
91 |
+
finally:
|
92 |
+
setuptools._install_setup_requires = orig
|
93 |
+
|
94 |
+
|
95 |
+
def _get_immediate_subdirectories(a_dir):
|
96 |
+
return [name for name in os.listdir(a_dir)
|
97 |
+
if os.path.isdir(os.path.join(a_dir, name))]
|
98 |
+
|
99 |
+
|
100 |
+
def _file_with_extension(directory, extension):
|
101 |
+
matching = (
|
102 |
+
f for f in os.listdir(directory)
|
103 |
+
if f.endswith(extension)
|
104 |
+
)
|
105 |
+
try:
|
106 |
+
file, = matching
|
107 |
+
except ValueError:
|
108 |
+
raise ValueError(
|
109 |
+
'No distribution was found. Ensure that `setup.py` '
|
110 |
+
'is not empty and that it calls `setup()`.')
|
111 |
+
return file
|
112 |
+
|
113 |
+
|
114 |
+
def _open_setup_script(setup_script):
|
115 |
+
if not os.path.exists(setup_script):
|
116 |
+
# Supply a default setup.py
|
117 |
+
return io.StringIO(u"from setuptools import setup; setup()")
|
118 |
+
|
119 |
+
return getattr(tokenize, 'open', open)(setup_script)
|
120 |
+
|
121 |
+
|
122 |
+
@contextlib.contextmanager
|
123 |
+
def suppress_known_deprecation():
|
124 |
+
with warnings.catch_warnings():
|
125 |
+
warnings.filterwarnings('ignore', 'setup.py install is deprecated')
|
126 |
+
yield
|
127 |
+
|
128 |
+
|
129 |
+
class _BuildMetaBackend(object):
|
130 |
+
|
131 |
+
def _fix_config(self, config_settings):
|
132 |
+
config_settings = config_settings or {}
|
133 |
+
config_settings.setdefault('--global-option', [])
|
134 |
+
return config_settings
|
135 |
+
|
136 |
+
def _get_build_requires(self, config_settings, requirements):
|
137 |
+
config_settings = self._fix_config(config_settings)
|
138 |
+
|
139 |
+
sys.argv = sys.argv[:1] + ['egg_info'] + \
|
140 |
+
config_settings["--global-option"]
|
141 |
+
try:
|
142 |
+
with Distribution.patch():
|
143 |
+
self.run_setup()
|
144 |
+
except SetupRequirementsError as e:
|
145 |
+
requirements += e.specifiers
|
146 |
+
|
147 |
+
return requirements
|
148 |
+
|
149 |
+
def run_setup(self, setup_script='setup.py'):
|
150 |
+
# Note that we can reuse our build directory between calls
|
151 |
+
# Correctness comes first, then optimization later
|
152 |
+
__file__ = setup_script
|
153 |
+
__name__ = '__main__'
|
154 |
+
|
155 |
+
with _open_setup_script(__file__) as f:
|
156 |
+
code = f.read().replace(r'\r\n', r'\n')
|
157 |
+
|
158 |
+
exec(compile(code, __file__, 'exec'), locals())
|
159 |
+
|
160 |
+
def get_requires_for_build_wheel(self, config_settings=None):
|
161 |
+
config_settings = self._fix_config(config_settings)
|
162 |
+
return self._get_build_requires(
|
163 |
+
config_settings, requirements=['wheel'])
|
164 |
+
|
165 |
+
def get_requires_for_build_sdist(self, config_settings=None):
|
166 |
+
config_settings = self._fix_config(config_settings)
|
167 |
+
return self._get_build_requires(config_settings, requirements=[])
|
168 |
+
|
169 |
+
def prepare_metadata_for_build_wheel(self, metadata_directory,
|
170 |
+
config_settings=None):
|
171 |
+
sys.argv = sys.argv[:1] + [
|
172 |
+
'dist_info', '--egg-base', metadata_directory]
|
173 |
+
with no_install_setup_requires():
|
174 |
+
self.run_setup()
|
175 |
+
|
176 |
+
dist_info_directory = metadata_directory
|
177 |
+
while True:
|
178 |
+
dist_infos = [f for f in os.listdir(dist_info_directory)
|
179 |
+
if f.endswith('.dist-info')]
|
180 |
+
|
181 |
+
if (
|
182 |
+
len(dist_infos) == 0 and
|
183 |
+
len(_get_immediate_subdirectories(dist_info_directory)) == 1
|
184 |
+
):
|
185 |
+
|
186 |
+
dist_info_directory = os.path.join(
|
187 |
+
dist_info_directory, os.listdir(dist_info_directory)[0])
|
188 |
+
continue
|
189 |
+
|
190 |
+
assert len(dist_infos) == 1
|
191 |
+
break
|
192 |
+
|
193 |
+
# PEP 517 requires that the .dist-info directory be placed in the
|
194 |
+
# metadata_directory. To comply, we MUST copy the directory to the root
|
195 |
+
if dist_info_directory != metadata_directory:
|
196 |
+
shutil.move(
|
197 |
+
os.path.join(dist_info_directory, dist_infos[0]),
|
198 |
+
metadata_directory)
|
199 |
+
shutil.rmtree(dist_info_directory, ignore_errors=True)
|
200 |
+
|
201 |
+
return dist_infos[0]
|
202 |
+
|
203 |
+
def _build_with_temp_dir(self, setup_command, result_extension,
|
204 |
+
result_directory, config_settings):
|
205 |
+
config_settings = self._fix_config(config_settings)
|
206 |
+
result_directory = os.path.abspath(result_directory)
|
207 |
+
|
208 |
+
# Build in a temporary directory, then copy to the target.
|
209 |
+
os.makedirs(result_directory, exist_ok=True)
|
210 |
+
with tempfile.TemporaryDirectory(dir=result_directory) as tmp_dist_dir:
|
211 |
+
sys.argv = (sys.argv[:1] + setup_command +
|
212 |
+
['--dist-dir', tmp_dist_dir] +
|
213 |
+
config_settings["--global-option"])
|
214 |
+
with no_install_setup_requires():
|
215 |
+
self.run_setup()
|
216 |
+
|
217 |
+
result_basename = _file_with_extension(
|
218 |
+
tmp_dist_dir, result_extension)
|
219 |
+
result_path = os.path.join(result_directory, result_basename)
|
220 |
+
if os.path.exists(result_path):
|
221 |
+
# os.rename will fail overwriting on non-Unix.
|
222 |
+
os.remove(result_path)
|
223 |
+
os.rename(os.path.join(tmp_dist_dir, result_basename), result_path)
|
224 |
+
|
225 |
+
return result_basename
|
226 |
+
|
227 |
+
def build_wheel(self, wheel_directory, config_settings=None,
|
228 |
+
metadata_directory=None):
|
229 |
+
with suppress_known_deprecation():
|
230 |
+
return self._build_with_temp_dir(['bdist_wheel'], '.whl',
|
231 |
+
wheel_directory, config_settings)
|
232 |
+
|
233 |
+
def build_sdist(self, sdist_directory, config_settings=None):
|
234 |
+
return self._build_with_temp_dir(['sdist', '--formats', 'gztar'],
|
235 |
+
'.tar.gz', sdist_directory,
|
236 |
+
config_settings)
|
237 |
+
|
238 |
+
|
239 |
+
class _BuildMetaLegacyBackend(_BuildMetaBackend):
|
240 |
+
"""Compatibility backend for setuptools
|
241 |
+
|
242 |
+
This is a version of setuptools.build_meta that endeavors
|
243 |
+
to maintain backwards
|
244 |
+
compatibility with pre-PEP 517 modes of invocation. It
|
245 |
+
exists as a temporary
|
246 |
+
bridge between the old packaging mechanism and the new
|
247 |
+
packaging mechanism,
|
248 |
+
and will eventually be removed.
|
249 |
+
"""
|
250 |
+
def run_setup(self, setup_script='setup.py'):
|
251 |
+
# In order to maintain compatibility with scripts assuming that
|
252 |
+
# the setup.py script is in a directory on the PYTHONPATH, inject
|
253 |
+
# '' into sys.path. (pypa/setuptools#1642)
|
254 |
+
sys_path = list(sys.path) # Save the original path
|
255 |
+
|
256 |
+
script_dir = os.path.dirname(os.path.abspath(setup_script))
|
257 |
+
if script_dir not in sys.path:
|
258 |
+
sys.path.insert(0, script_dir)
|
259 |
+
|
260 |
+
# Some setup.py scripts (e.g. in pygame and numpy) use sys.argv[0] to
|
261 |
+
# get the directory of the source code. They expect it to refer to the
|
262 |
+
# setup.py script.
|
263 |
+
sys_argv_0 = sys.argv[0]
|
264 |
+
sys.argv[0] = setup_script
|
265 |
+
|
266 |
+
try:
|
267 |
+
super(_BuildMetaLegacyBackend,
|
268 |
+
self).run_setup(setup_script=setup_script)
|
269 |
+
finally:
|
270 |
+
# While PEP 517 frontends should be calling each hook in a fresh
|
271 |
+
# subprocess according to the standard (and thus it should not be
|
272 |
+
# strictly necessary to restore the old sys.path), we'll restore
|
273 |
+
# the original path so that the path manipulation does not persist
|
274 |
+
# within the hook after run_setup is called.
|
275 |
+
sys.path[:] = sys_path
|
276 |
+
sys.argv[0] = sys_argv_0
|
277 |
+
|
278 |
+
|
279 |
+
# The primary backend
|
280 |
+
_BACKEND = _BuildMetaBackend()
|
281 |
+
|
282 |
+
get_requires_for_build_wheel = _BACKEND.get_requires_for_build_wheel
|
283 |
+
get_requires_for_build_sdist = _BACKEND.get_requires_for_build_sdist
|
284 |
+
prepare_metadata_for_build_wheel = _BACKEND.prepare_metadata_for_build_wheel
|
285 |
+
build_wheel = _BACKEND.build_wheel
|
286 |
+
build_sdist = _BACKEND.build_sdist
|
287 |
+
|
288 |
+
|
289 |
+
# The legacy backend
|
290 |
+
__legacy__ = _BuildMetaLegacyBackend()
|
llmeval-env/lib/python3.10/site-packages/setuptools/cli-32.exe
ADDED
Binary file (65.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/cli-64.exe
ADDED
Binary file (74.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/cli-arm64.exe
ADDED
Binary file (137 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/cli.exe
ADDED
Binary file (65.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/__init__.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from distutils.command.bdist import bdist
|
2 |
+
import sys
|
3 |
+
|
4 |
+
if 'egg' not in bdist.format_commands:
|
5 |
+
bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
|
6 |
+
bdist.format_commands.append('egg')
|
7 |
+
|
8 |
+
del bdist, sys
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/egg_info.py
ADDED
@@ -0,0 +1,755 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""setuptools.command.egg_info
|
2 |
+
|
3 |
+
Create a distribution's .egg-info directory and contents"""
|
4 |
+
|
5 |
+
from distutils.filelist import FileList as _FileList
|
6 |
+
from distutils.errors import DistutilsInternalError
|
7 |
+
from distutils.util import convert_path
|
8 |
+
from distutils import log
|
9 |
+
import distutils.errors
|
10 |
+
import distutils.filelist
|
11 |
+
import functools
|
12 |
+
import os
|
13 |
+
import re
|
14 |
+
import sys
|
15 |
+
import io
|
16 |
+
import warnings
|
17 |
+
import time
|
18 |
+
import collections
|
19 |
+
|
20 |
+
from setuptools import Command
|
21 |
+
from setuptools.command.sdist import sdist
|
22 |
+
from setuptools.command.sdist import walk_revctrl
|
23 |
+
from setuptools.command.setopt import edit_config
|
24 |
+
from setuptools.command import bdist_egg
|
25 |
+
from pkg_resources import (
|
26 |
+
parse_requirements, safe_name, parse_version,
|
27 |
+
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
|
28 |
+
import setuptools.unicode_utils as unicode_utils
|
29 |
+
from setuptools.glob import glob
|
30 |
+
|
31 |
+
from setuptools.extern import packaging
|
32 |
+
from setuptools import SetuptoolsDeprecationWarning
|
33 |
+
|
34 |
+
|
35 |
+
def translate_pattern(glob): # noqa: C901 # is too complex (14) # FIXME
|
36 |
+
"""
|
37 |
+
Translate a file path glob like '*.txt' in to a regular expression.
|
38 |
+
This differs from fnmatch.translate which allows wildcards to match
|
39 |
+
directory separators. It also knows about '**/' which matches any number of
|
40 |
+
directories.
|
41 |
+
"""
|
42 |
+
pat = ''
|
43 |
+
|
44 |
+
# This will split on '/' within [character classes]. This is deliberate.
|
45 |
+
chunks = glob.split(os.path.sep)
|
46 |
+
|
47 |
+
sep = re.escape(os.sep)
|
48 |
+
valid_char = '[^%s]' % (sep,)
|
49 |
+
|
50 |
+
for c, chunk in enumerate(chunks):
|
51 |
+
last_chunk = c == len(chunks) - 1
|
52 |
+
|
53 |
+
# Chunks that are a literal ** are globstars. They match anything.
|
54 |
+
if chunk == '**':
|
55 |
+
if last_chunk:
|
56 |
+
# Match anything if this is the last component
|
57 |
+
pat += '.*'
|
58 |
+
else:
|
59 |
+
# Match '(name/)*'
|
60 |
+
pat += '(?:%s+%s)*' % (valid_char, sep)
|
61 |
+
continue # Break here as the whole path component has been handled
|
62 |
+
|
63 |
+
# Find any special characters in the remainder
|
64 |
+
i = 0
|
65 |
+
chunk_len = len(chunk)
|
66 |
+
while i < chunk_len:
|
67 |
+
char = chunk[i]
|
68 |
+
if char == '*':
|
69 |
+
# Match any number of name characters
|
70 |
+
pat += valid_char + '*'
|
71 |
+
elif char == '?':
|
72 |
+
# Match a name character
|
73 |
+
pat += valid_char
|
74 |
+
elif char == '[':
|
75 |
+
# Character class
|
76 |
+
inner_i = i + 1
|
77 |
+
# Skip initial !/] chars
|
78 |
+
if inner_i < chunk_len and chunk[inner_i] == '!':
|
79 |
+
inner_i = inner_i + 1
|
80 |
+
if inner_i < chunk_len and chunk[inner_i] == ']':
|
81 |
+
inner_i = inner_i + 1
|
82 |
+
|
83 |
+
# Loop till the closing ] is found
|
84 |
+
while inner_i < chunk_len and chunk[inner_i] != ']':
|
85 |
+
inner_i = inner_i + 1
|
86 |
+
|
87 |
+
if inner_i >= chunk_len:
|
88 |
+
# Got to the end of the string without finding a closing ]
|
89 |
+
# Do not treat this as a matching group, but as a literal [
|
90 |
+
pat += re.escape(char)
|
91 |
+
else:
|
92 |
+
# Grab the insides of the [brackets]
|
93 |
+
inner = chunk[i + 1:inner_i]
|
94 |
+
char_class = ''
|
95 |
+
|
96 |
+
# Class negation
|
97 |
+
if inner[0] == '!':
|
98 |
+
char_class = '^'
|
99 |
+
inner = inner[1:]
|
100 |
+
|
101 |
+
char_class += re.escape(inner)
|
102 |
+
pat += '[%s]' % (char_class,)
|
103 |
+
|
104 |
+
# Skip to the end ]
|
105 |
+
i = inner_i
|
106 |
+
else:
|
107 |
+
pat += re.escape(char)
|
108 |
+
i += 1
|
109 |
+
|
110 |
+
# Join each chunk with the dir separator
|
111 |
+
if not last_chunk:
|
112 |
+
pat += sep
|
113 |
+
|
114 |
+
pat += r'\Z'
|
115 |
+
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
|
116 |
+
|
117 |
+
|
118 |
+
class InfoCommon:
|
119 |
+
tag_build = None
|
120 |
+
tag_date = None
|
121 |
+
|
122 |
+
@property
|
123 |
+
def name(self):
|
124 |
+
return safe_name(self.distribution.get_name())
|
125 |
+
|
126 |
+
def tagged_version(self):
|
127 |
+
return safe_version(self._maybe_tag(self.distribution.get_version()))
|
128 |
+
|
129 |
+
def _maybe_tag(self, version):
|
130 |
+
"""
|
131 |
+
egg_info may be called more than once for a distribution,
|
132 |
+
in which case the version string already contains all tags.
|
133 |
+
"""
|
134 |
+
return (
|
135 |
+
version if self.vtags and version.endswith(self.vtags)
|
136 |
+
else version + self.vtags
|
137 |
+
)
|
138 |
+
|
139 |
+
def tags(self):
|
140 |
+
version = ''
|
141 |
+
if self.tag_build:
|
142 |
+
version += self.tag_build
|
143 |
+
if self.tag_date:
|
144 |
+
version += time.strftime("-%Y%m%d")
|
145 |
+
return version
|
146 |
+
vtags = property(tags)
|
147 |
+
|
148 |
+
|
149 |
+
class egg_info(InfoCommon, Command):
|
150 |
+
description = "create a distribution's .egg-info directory"
|
151 |
+
|
152 |
+
user_options = [
|
153 |
+
('egg-base=', 'e', "directory containing .egg-info directories"
|
154 |
+
" (default: top of the source tree)"),
|
155 |
+
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
|
156 |
+
('tag-build=', 'b', "Specify explicit tag to add to version number"),
|
157 |
+
('no-date', 'D', "Don't include date stamp [default]"),
|
158 |
+
]
|
159 |
+
|
160 |
+
boolean_options = ['tag-date']
|
161 |
+
negative_opt = {
|
162 |
+
'no-date': 'tag-date',
|
163 |
+
}
|
164 |
+
|
165 |
+
def initialize_options(self):
|
166 |
+
self.egg_base = None
|
167 |
+
self.egg_name = None
|
168 |
+
self.egg_info = None
|
169 |
+
self.egg_version = None
|
170 |
+
self.broken_egg_info = False
|
171 |
+
|
172 |
+
####################################
|
173 |
+
# allow the 'tag_svn_revision' to be detected and
|
174 |
+
# set, supporting sdists built on older Setuptools.
|
175 |
+
@property
|
176 |
+
def tag_svn_revision(self):
|
177 |
+
pass
|
178 |
+
|
179 |
+
@tag_svn_revision.setter
|
180 |
+
def tag_svn_revision(self, value):
|
181 |
+
pass
|
182 |
+
####################################
|
183 |
+
|
184 |
+
def save_version_info(self, filename):
|
185 |
+
"""
|
186 |
+
Materialize the value of date into the
|
187 |
+
build tag. Install build keys in a deterministic order
|
188 |
+
to avoid arbitrary reordering on subsequent builds.
|
189 |
+
"""
|
190 |
+
egg_info = collections.OrderedDict()
|
191 |
+
# follow the order these keys would have been added
|
192 |
+
# when PYTHONHASHSEED=0
|
193 |
+
egg_info['tag_build'] = self.tags()
|
194 |
+
egg_info['tag_date'] = 0
|
195 |
+
edit_config(filename, dict(egg_info=egg_info))
|
196 |
+
|
197 |
+
def finalize_options(self):
|
198 |
+
# Note: we need to capture the current value returned
|
199 |
+
# by `self.tagged_version()`, so we can later update
|
200 |
+
# `self.distribution.metadata.version` without
|
201 |
+
# repercussions.
|
202 |
+
self.egg_name = self.name
|
203 |
+
self.egg_version = self.tagged_version()
|
204 |
+
parsed_version = parse_version(self.egg_version)
|
205 |
+
|
206 |
+
try:
|
207 |
+
is_version = isinstance(parsed_version, packaging.version.Version)
|
208 |
+
spec = (
|
209 |
+
"%s==%s" if is_version else "%s===%s"
|
210 |
+
)
|
211 |
+
list(
|
212 |
+
parse_requirements(spec % (self.egg_name, self.egg_version))
|
213 |
+
)
|
214 |
+
except ValueError as e:
|
215 |
+
raise distutils.errors.DistutilsOptionError(
|
216 |
+
"Invalid distribution name or version syntax: %s-%s" %
|
217 |
+
(self.egg_name, self.egg_version)
|
218 |
+
) from e
|
219 |
+
|
220 |
+
if self.egg_base is None:
|
221 |
+
dirs = self.distribution.package_dir
|
222 |
+
self.egg_base = (dirs or {}).get('', os.curdir)
|
223 |
+
|
224 |
+
self.ensure_dirname('egg_base')
|
225 |
+
self.egg_info = to_filename(self.egg_name) + '.egg-info'
|
226 |
+
if self.egg_base != os.curdir:
|
227 |
+
self.egg_info = os.path.join(self.egg_base, self.egg_info)
|
228 |
+
if '-' in self.egg_name:
|
229 |
+
self.check_broken_egg_info()
|
230 |
+
|
231 |
+
# Set package version for the benefit of dumber commands
|
232 |
+
# (e.g. sdist, bdist_wininst, etc.)
|
233 |
+
#
|
234 |
+
self.distribution.metadata.version = self.egg_version
|
235 |
+
|
236 |
+
# If we bootstrapped around the lack of a PKG-INFO, as might be the
|
237 |
+
# case in a fresh checkout, make sure that any special tags get added
|
238 |
+
# to the version info
|
239 |
+
#
|
240 |
+
pd = self.distribution._patched_dist
|
241 |
+
if pd is not None and pd.key == self.egg_name.lower():
|
242 |
+
pd._version = self.egg_version
|
243 |
+
pd._parsed_version = parse_version(self.egg_version)
|
244 |
+
self.distribution._patched_dist = None
|
245 |
+
|
246 |
+
def write_or_delete_file(self, what, filename, data, force=False):
|
247 |
+
"""Write `data` to `filename` or delete if empty
|
248 |
+
|
249 |
+
If `data` is non-empty, this routine is the same as ``write_file()``.
|
250 |
+
If `data` is empty but not ``None``, this is the same as calling
|
251 |
+
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
|
252 |
+
unless `filename` exists, in which case a warning is issued about the
|
253 |
+
orphaned file (if `force` is false), or deleted (if `force` is true).
|
254 |
+
"""
|
255 |
+
if data:
|
256 |
+
self.write_file(what, filename, data)
|
257 |
+
elif os.path.exists(filename):
|
258 |
+
if data is None and not force:
|
259 |
+
log.warn(
|
260 |
+
"%s not set in setup(), but %s exists", what, filename
|
261 |
+
)
|
262 |
+
return
|
263 |
+
else:
|
264 |
+
self.delete_file(filename)
|
265 |
+
|
266 |
+
def write_file(self, what, filename, data):
|
267 |
+
"""Write `data` to `filename` (if not a dry run) after announcing it
|
268 |
+
|
269 |
+
`what` is used in a log message to identify what is being written
|
270 |
+
to the file.
|
271 |
+
"""
|
272 |
+
log.info("writing %s to %s", what, filename)
|
273 |
+
data = data.encode("utf-8")
|
274 |
+
if not self.dry_run:
|
275 |
+
f = open(filename, 'wb')
|
276 |
+
f.write(data)
|
277 |
+
f.close()
|
278 |
+
|
279 |
+
def delete_file(self, filename):
|
280 |
+
"""Delete `filename` (if not a dry run) after announcing it"""
|
281 |
+
log.info("deleting %s", filename)
|
282 |
+
if not self.dry_run:
|
283 |
+
os.unlink(filename)
|
284 |
+
|
285 |
+
def run(self):
|
286 |
+
self.mkpath(self.egg_info)
|
287 |
+
os.utime(self.egg_info, None)
|
288 |
+
installer = self.distribution.fetch_build_egg
|
289 |
+
for ep in iter_entry_points('egg_info.writers'):
|
290 |
+
ep.require(installer=installer)
|
291 |
+
writer = ep.resolve()
|
292 |
+
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
|
293 |
+
|
294 |
+
# Get rid of native_libs.txt if it was put there by older bdist_egg
|
295 |
+
nl = os.path.join(self.egg_info, "native_libs.txt")
|
296 |
+
if os.path.exists(nl):
|
297 |
+
self.delete_file(nl)
|
298 |
+
|
299 |
+
self.find_sources()
|
300 |
+
|
301 |
+
def find_sources(self):
|
302 |
+
"""Generate SOURCES.txt manifest file"""
|
303 |
+
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
|
304 |
+
mm = manifest_maker(self.distribution)
|
305 |
+
mm.manifest = manifest_filename
|
306 |
+
mm.run()
|
307 |
+
self.filelist = mm.filelist
|
308 |
+
|
309 |
+
def check_broken_egg_info(self):
|
310 |
+
bei = self.egg_name + '.egg-info'
|
311 |
+
if self.egg_base != os.curdir:
|
312 |
+
bei = os.path.join(self.egg_base, bei)
|
313 |
+
if os.path.exists(bei):
|
314 |
+
log.warn(
|
315 |
+
"-" * 78 + '\n'
|
316 |
+
"Note: Your current .egg-info directory has a '-' in its name;"
|
317 |
+
'\nthis will not work correctly with "setup.py develop".\n\n'
|
318 |
+
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
|
319 |
+
bei, self.egg_info
|
320 |
+
)
|
321 |
+
self.broken_egg_info = self.egg_info
|
322 |
+
self.egg_info = bei # make it work for now
|
323 |
+
|
324 |
+
|
325 |
+
class FileList(_FileList):
|
326 |
+
# Implementations of the various MANIFEST.in commands
|
327 |
+
|
328 |
+
def process_template_line(self, line):
|
329 |
+
# Parse the line: split it up, make sure the right number of words
|
330 |
+
# is there, and return the relevant words. 'action' is always
|
331 |
+
# defined: it's the first word of the line. Which of the other
|
332 |
+
# three are defined depends on the action; it'll be either
|
333 |
+
# patterns, (dir and patterns), or (dir_pattern).
|
334 |
+
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
|
335 |
+
|
336 |
+
action_map = {
|
337 |
+
'include': self.include,
|
338 |
+
'exclude': self.exclude,
|
339 |
+
'global-include': self.global_include,
|
340 |
+
'global-exclude': self.global_exclude,
|
341 |
+
'recursive-include': functools.partial(
|
342 |
+
self.recursive_include, dir,
|
343 |
+
),
|
344 |
+
'recursive-exclude': functools.partial(
|
345 |
+
self.recursive_exclude, dir,
|
346 |
+
),
|
347 |
+
'graft': self.graft,
|
348 |
+
'prune': self.prune,
|
349 |
+
}
|
350 |
+
log_map = {
|
351 |
+
'include': "warning: no files found matching '%s'",
|
352 |
+
'exclude': (
|
353 |
+
"warning: no previously-included files found "
|
354 |
+
"matching '%s'"
|
355 |
+
),
|
356 |
+
'global-include': (
|
357 |
+
"warning: no files found matching '%s' "
|
358 |
+
"anywhere in distribution"
|
359 |
+
),
|
360 |
+
'global-exclude': (
|
361 |
+
"warning: no previously-included files matching "
|
362 |
+
"'%s' found anywhere in distribution"
|
363 |
+
),
|
364 |
+
'recursive-include': (
|
365 |
+
"warning: no files found matching '%s' "
|
366 |
+
"under directory '%s'"
|
367 |
+
),
|
368 |
+
'recursive-exclude': (
|
369 |
+
"warning: no previously-included files matching "
|
370 |
+
"'%s' found under directory '%s'"
|
371 |
+
),
|
372 |
+
'graft': "warning: no directories found matching '%s'",
|
373 |
+
'prune': "no previously-included directories found matching '%s'",
|
374 |
+
}
|
375 |
+
|
376 |
+
try:
|
377 |
+
process_action = action_map[action]
|
378 |
+
except KeyError:
|
379 |
+
raise DistutilsInternalError(
|
380 |
+
"this cannot happen: invalid action '{action!s}'".
|
381 |
+
format(action=action),
|
382 |
+
)
|
383 |
+
|
384 |
+
# OK, now we know that the action is valid and we have the
|
385 |
+
# right number of words on the line for that action -- so we
|
386 |
+
# can proceed with minimal error-checking.
|
387 |
+
|
388 |
+
action_is_recursive = action.startswith('recursive-')
|
389 |
+
if action in {'graft', 'prune'}:
|
390 |
+
patterns = [dir_pattern]
|
391 |
+
extra_log_args = (dir, ) if action_is_recursive else ()
|
392 |
+
log_tmpl = log_map[action]
|
393 |
+
|
394 |
+
self.debug_print(
|
395 |
+
' '.join(
|
396 |
+
[action] +
|
397 |
+
([dir] if action_is_recursive else []) +
|
398 |
+
patterns,
|
399 |
+
)
|
400 |
+
)
|
401 |
+
for pattern in patterns:
|
402 |
+
if not process_action(pattern):
|
403 |
+
log.warn(log_tmpl, pattern, *extra_log_args)
|
404 |
+
|
405 |
+
def _remove_files(self, predicate):
|
406 |
+
"""
|
407 |
+
Remove all files from the file list that match the predicate.
|
408 |
+
Return True if any matching files were removed
|
409 |
+
"""
|
410 |
+
found = False
|
411 |
+
for i in range(len(self.files) - 1, -1, -1):
|
412 |
+
if predicate(self.files[i]):
|
413 |
+
self.debug_print(" removing " + self.files[i])
|
414 |
+
del self.files[i]
|
415 |
+
found = True
|
416 |
+
return found
|
417 |
+
|
418 |
+
def include(self, pattern):
|
419 |
+
"""Include files that match 'pattern'."""
|
420 |
+
found = [f for f in glob(pattern) if not os.path.isdir(f)]
|
421 |
+
self.extend(found)
|
422 |
+
return bool(found)
|
423 |
+
|
424 |
+
def exclude(self, pattern):
|
425 |
+
"""Exclude files that match 'pattern'."""
|
426 |
+
match = translate_pattern(pattern)
|
427 |
+
return self._remove_files(match.match)
|
428 |
+
|
429 |
+
def recursive_include(self, dir, pattern):
|
430 |
+
"""
|
431 |
+
Include all files anywhere in 'dir/' that match the pattern.
|
432 |
+
"""
|
433 |
+
full_pattern = os.path.join(dir, '**', pattern)
|
434 |
+
found = [f for f in glob(full_pattern, recursive=True)
|
435 |
+
if not os.path.isdir(f)]
|
436 |
+
self.extend(found)
|
437 |
+
return bool(found)
|
438 |
+
|
439 |
+
def recursive_exclude(self, dir, pattern):
|
440 |
+
"""
|
441 |
+
Exclude any file anywhere in 'dir/' that match the pattern.
|
442 |
+
"""
|
443 |
+
match = translate_pattern(os.path.join(dir, '**', pattern))
|
444 |
+
return self._remove_files(match.match)
|
445 |
+
|
446 |
+
def graft(self, dir):
|
447 |
+
"""Include all files from 'dir/'."""
|
448 |
+
found = [
|
449 |
+
item
|
450 |
+
for match_dir in glob(dir)
|
451 |
+
for item in distutils.filelist.findall(match_dir)
|
452 |
+
]
|
453 |
+
self.extend(found)
|
454 |
+
return bool(found)
|
455 |
+
|
456 |
+
def prune(self, dir):
|
457 |
+
"""Filter out files from 'dir/'."""
|
458 |
+
match = translate_pattern(os.path.join(dir, '**'))
|
459 |
+
return self._remove_files(match.match)
|
460 |
+
|
461 |
+
def global_include(self, pattern):
|
462 |
+
"""
|
463 |
+
Include all files anywhere in the current directory that match the
|
464 |
+
pattern. This is very inefficient on large file trees.
|
465 |
+
"""
|
466 |
+
if self.allfiles is None:
|
467 |
+
self.findall()
|
468 |
+
match = translate_pattern(os.path.join('**', pattern))
|
469 |
+
found = [f for f in self.allfiles if match.match(f)]
|
470 |
+
self.extend(found)
|
471 |
+
return bool(found)
|
472 |
+
|
473 |
+
def global_exclude(self, pattern):
|
474 |
+
"""
|
475 |
+
Exclude all files anywhere that match the pattern.
|
476 |
+
"""
|
477 |
+
match = translate_pattern(os.path.join('**', pattern))
|
478 |
+
return self._remove_files(match.match)
|
479 |
+
|
480 |
+
def append(self, item):
|
481 |
+
if item.endswith('\r'): # Fix older sdists built on Windows
|
482 |
+
item = item[:-1]
|
483 |
+
path = convert_path(item)
|
484 |
+
|
485 |
+
if self._safe_path(path):
|
486 |
+
self.files.append(path)
|
487 |
+
|
488 |
+
def extend(self, paths):
|
489 |
+
self.files.extend(filter(self._safe_path, paths))
|
490 |
+
|
491 |
+
def _repair(self):
|
492 |
+
"""
|
493 |
+
Replace self.files with only safe paths
|
494 |
+
|
495 |
+
Because some owners of FileList manipulate the underlying
|
496 |
+
``files`` attribute directly, this method must be called to
|
497 |
+
repair those paths.
|
498 |
+
"""
|
499 |
+
self.files = list(filter(self._safe_path, self.files))
|
500 |
+
|
501 |
+
def _safe_path(self, path):
|
502 |
+
enc_warn = "'%s' not %s encodable -- skipping"
|
503 |
+
|
504 |
+
# To avoid accidental trans-codings errors, first to unicode
|
505 |
+
u_path = unicode_utils.filesys_decode(path)
|
506 |
+
if u_path is None:
|
507 |
+
log.warn("'%s' in unexpected encoding -- skipping" % path)
|
508 |
+
return False
|
509 |
+
|
510 |
+
# Must ensure utf-8 encodability
|
511 |
+
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
|
512 |
+
if utf8_path is None:
|
513 |
+
log.warn(enc_warn, path, 'utf-8')
|
514 |
+
return False
|
515 |
+
|
516 |
+
try:
|
517 |
+
# accept is either way checks out
|
518 |
+
if os.path.exists(u_path) or os.path.exists(utf8_path):
|
519 |
+
return True
|
520 |
+
# this will catch any encode errors decoding u_path
|
521 |
+
except UnicodeEncodeError:
|
522 |
+
log.warn(enc_warn, path, sys.getfilesystemencoding())
|
523 |
+
|
524 |
+
|
525 |
+
class manifest_maker(sdist):
|
526 |
+
template = "MANIFEST.in"
|
527 |
+
|
528 |
+
def initialize_options(self):
|
529 |
+
self.use_defaults = 1
|
530 |
+
self.prune = 1
|
531 |
+
self.manifest_only = 1
|
532 |
+
self.force_manifest = 1
|
533 |
+
|
534 |
+
def finalize_options(self):
|
535 |
+
pass
|
536 |
+
|
537 |
+
def run(self):
|
538 |
+
self.filelist = FileList()
|
539 |
+
if not os.path.exists(self.manifest):
|
540 |
+
self.write_manifest() # it must exist so it'll get in the list
|
541 |
+
self.add_defaults()
|
542 |
+
if os.path.exists(self.template):
|
543 |
+
self.read_template()
|
544 |
+
self.add_license_files()
|
545 |
+
self.prune_file_list()
|
546 |
+
self.filelist.sort()
|
547 |
+
self.filelist.remove_duplicates()
|
548 |
+
self.write_manifest()
|
549 |
+
|
550 |
+
def _manifest_normalize(self, path):
|
551 |
+
path = unicode_utils.filesys_decode(path)
|
552 |
+
return path.replace(os.sep, '/')
|
553 |
+
|
554 |
+
def write_manifest(self):
|
555 |
+
"""
|
556 |
+
Write the file list in 'self.filelist' to the manifest file
|
557 |
+
named by 'self.manifest'.
|
558 |
+
"""
|
559 |
+
self.filelist._repair()
|
560 |
+
|
561 |
+
# Now _repairs should encodability, but not unicode
|
562 |
+
files = [self._manifest_normalize(f) for f in self.filelist.files]
|
563 |
+
msg = "writing manifest file '%s'" % self.manifest
|
564 |
+
self.execute(write_file, (self.manifest, files), msg)
|
565 |
+
|
566 |
+
def warn(self, msg):
|
567 |
+
if not self._should_suppress_warning(msg):
|
568 |
+
sdist.warn(self, msg)
|
569 |
+
|
570 |
+
@staticmethod
|
571 |
+
def _should_suppress_warning(msg):
|
572 |
+
"""
|
573 |
+
suppress missing-file warnings from sdist
|
574 |
+
"""
|
575 |
+
return re.match(r"standard file .*not found", msg)
|
576 |
+
|
577 |
+
def add_defaults(self):
|
578 |
+
sdist.add_defaults(self)
|
579 |
+
self.filelist.append(self.template)
|
580 |
+
self.filelist.append(self.manifest)
|
581 |
+
rcfiles = list(walk_revctrl())
|
582 |
+
if rcfiles:
|
583 |
+
self.filelist.extend(rcfiles)
|
584 |
+
elif os.path.exists(self.manifest):
|
585 |
+
self.read_manifest()
|
586 |
+
|
587 |
+
if os.path.exists("setup.py"):
|
588 |
+
# setup.py should be included by default, even if it's not
|
589 |
+
# the script called to create the sdist
|
590 |
+
self.filelist.append("setup.py")
|
591 |
+
|
592 |
+
ei_cmd = self.get_finalized_command('egg_info')
|
593 |
+
self.filelist.graft(ei_cmd.egg_info)
|
594 |
+
|
595 |
+
def add_license_files(self):
|
596 |
+
license_files = self.distribution.metadata.license_files or []
|
597 |
+
for lf in license_files:
|
598 |
+
log.info("adding license file '%s'", lf)
|
599 |
+
pass
|
600 |
+
self.filelist.extend(license_files)
|
601 |
+
|
602 |
+
def prune_file_list(self):
|
603 |
+
build = self.get_finalized_command('build')
|
604 |
+
base_dir = self.distribution.get_fullname()
|
605 |
+
self.filelist.prune(build.build_base)
|
606 |
+
self.filelist.prune(base_dir)
|
607 |
+
sep = re.escape(os.sep)
|
608 |
+
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
|
609 |
+
is_regex=1)
|
610 |
+
|
611 |
+
def _safe_data_files(self, build_py):
|
612 |
+
"""
|
613 |
+
The parent class implementation of this method
|
614 |
+
(``sdist``) will try to include data files, which
|
615 |
+
might cause recursion problems when
|
616 |
+
``include_package_data=True``.
|
617 |
+
|
618 |
+
Therefore, avoid triggering any attempt of
|
619 |
+
analyzing/building the manifest again.
|
620 |
+
"""
|
621 |
+
if hasattr(build_py, 'get_data_files_without_manifest'):
|
622 |
+
return build_py.get_data_files_without_manifest()
|
623 |
+
|
624 |
+
warnings.warn(
|
625 |
+
"Custom 'build_py' does not implement "
|
626 |
+
"'get_data_files_without_manifest'.\nPlease extend command classes"
|
627 |
+
" from setuptools instead of distutils.",
|
628 |
+
SetuptoolsDeprecationWarning
|
629 |
+
)
|
630 |
+
return build_py.get_data_files()
|
631 |
+
|
632 |
+
|
633 |
+
def write_file(filename, contents):
|
634 |
+
"""Create a file with the specified name and write 'contents' (a
|
635 |
+
sequence of strings without line terminators) to it.
|
636 |
+
"""
|
637 |
+
contents = "\n".join(contents)
|
638 |
+
|
639 |
+
# assuming the contents has been vetted for utf-8 encoding
|
640 |
+
contents = contents.encode("utf-8")
|
641 |
+
|
642 |
+
with open(filename, "wb") as f: # always write POSIX-style manifest
|
643 |
+
f.write(contents)
|
644 |
+
|
645 |
+
|
646 |
+
def write_pkg_info(cmd, basename, filename):
|
647 |
+
log.info("writing %s", filename)
|
648 |
+
if not cmd.dry_run:
|
649 |
+
metadata = cmd.distribution.metadata
|
650 |
+
metadata.version, oldver = cmd.egg_version, metadata.version
|
651 |
+
metadata.name, oldname = cmd.egg_name, metadata.name
|
652 |
+
|
653 |
+
try:
|
654 |
+
# write unescaped data to PKG-INFO, so older pkg_resources
|
655 |
+
# can still parse it
|
656 |
+
metadata.write_pkg_info(cmd.egg_info)
|
657 |
+
finally:
|
658 |
+
metadata.name, metadata.version = oldname, oldver
|
659 |
+
|
660 |
+
safe = getattr(cmd.distribution, 'zip_safe', None)
|
661 |
+
|
662 |
+
bdist_egg.write_safety_flag(cmd.egg_info, safe)
|
663 |
+
|
664 |
+
|
665 |
+
def warn_depends_obsolete(cmd, basename, filename):
|
666 |
+
if os.path.exists(filename):
|
667 |
+
log.warn(
|
668 |
+
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
|
669 |
+
"Use the install_requires/extras_require setup() args instead."
|
670 |
+
)
|
671 |
+
|
672 |
+
|
673 |
+
def _write_requirements(stream, reqs):
|
674 |
+
lines = yield_lines(reqs or ())
|
675 |
+
|
676 |
+
def append_cr(line):
|
677 |
+
return line + '\n'
|
678 |
+
lines = map(append_cr, sorted(lines))
|
679 |
+
stream.writelines(lines)
|
680 |
+
|
681 |
+
|
682 |
+
def write_requirements(cmd, basename, filename):
|
683 |
+
dist = cmd.distribution
|
684 |
+
data = io.StringIO()
|
685 |
+
_write_requirements(data, dist.install_requires)
|
686 |
+
extras_require = dist.extras_require or {}
|
687 |
+
for extra in sorted(extras_require):
|
688 |
+
data.write('\n[{extra}]\n'.format(**vars()))
|
689 |
+
_write_requirements(data, extras_require[extra])
|
690 |
+
cmd.write_or_delete_file("requirements", filename, data.getvalue())
|
691 |
+
|
692 |
+
|
693 |
+
def write_setup_requirements(cmd, basename, filename):
|
694 |
+
data = io.StringIO()
|
695 |
+
_write_requirements(data, cmd.distribution.setup_requires)
|
696 |
+
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
|
697 |
+
|
698 |
+
|
699 |
+
def write_toplevel_names(cmd, basename, filename):
|
700 |
+
pkgs = dict.fromkeys(
|
701 |
+
[
|
702 |
+
k.split('.', 1)[0]
|
703 |
+
for k in cmd.distribution.iter_distribution_names()
|
704 |
+
]
|
705 |
+
)
|
706 |
+
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
|
707 |
+
|
708 |
+
|
709 |
+
def overwrite_arg(cmd, basename, filename):
|
710 |
+
write_arg(cmd, basename, filename, True)
|
711 |
+
|
712 |
+
|
713 |
+
def write_arg(cmd, basename, filename, force=False):
|
714 |
+
argname = os.path.splitext(basename)[0]
|
715 |
+
value = getattr(cmd.distribution, argname, None)
|
716 |
+
if value is not None:
|
717 |
+
value = '\n'.join(value) + '\n'
|
718 |
+
cmd.write_or_delete_file(argname, filename, value, force)
|
719 |
+
|
720 |
+
|
721 |
+
def write_entries(cmd, basename, filename):
|
722 |
+
ep = cmd.distribution.entry_points
|
723 |
+
|
724 |
+
if isinstance(ep, str) or ep is None:
|
725 |
+
data = ep
|
726 |
+
elif ep is not None:
|
727 |
+
data = []
|
728 |
+
for section, contents in sorted(ep.items()):
|
729 |
+
if not isinstance(contents, str):
|
730 |
+
contents = EntryPoint.parse_group(section, contents)
|
731 |
+
contents = '\n'.join(sorted(map(str, contents.values())))
|
732 |
+
data.append('[%s]\n%s\n\n' % (section, contents))
|
733 |
+
data = ''.join(data)
|
734 |
+
|
735 |
+
cmd.write_or_delete_file('entry points', filename, data, True)
|
736 |
+
|
737 |
+
|
738 |
+
def get_pkg_info_revision():
|
739 |
+
"""
|
740 |
+
Get a -r### off of PKG-INFO Version in case this is an sdist of
|
741 |
+
a subversion revision.
|
742 |
+
"""
|
743 |
+
warnings.warn(
|
744 |
+
"get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
|
745 |
+
if os.path.exists('PKG-INFO'):
|
746 |
+
with io.open('PKG-INFO') as f:
|
747 |
+
for line in f:
|
748 |
+
match = re.match(r"Version:.*-r(\d+)\s*$", line)
|
749 |
+
if match:
|
750 |
+
return int(match.group(1))
|
751 |
+
return 0
|
752 |
+
|
753 |
+
|
754 |
+
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
|
755 |
+
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/install_egg_info.py
ADDED
@@ -0,0 +1,82 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from distutils import log, dir_util
|
2 |
+
import os, sys
|
3 |
+
|
4 |
+
from setuptools import Command
|
5 |
+
from setuptools import namespaces
|
6 |
+
from setuptools.archive_util import unpack_archive
|
7 |
+
import pkg_resources
|
8 |
+
|
9 |
+
|
10 |
+
class install_egg_info(namespaces.Installer, Command):
|
11 |
+
"""Install an .egg-info directory for the package"""
|
12 |
+
|
13 |
+
description = "Install an .egg-info directory for the package"
|
14 |
+
|
15 |
+
user_options = [
|
16 |
+
('install-dir=', 'd', "directory to install to"),
|
17 |
+
]
|
18 |
+
|
19 |
+
def initialize_options(self):
|
20 |
+
self.install_dir = None
|
21 |
+
self.install_layout = None
|
22 |
+
self.prefix_option = None
|
23 |
+
|
24 |
+
def finalize_options(self):
|
25 |
+
self.set_undefined_options('install_lib',
|
26 |
+
('install_dir', 'install_dir'))
|
27 |
+
self.set_undefined_options('install',('install_layout','install_layout'))
|
28 |
+
if sys.hexversion > 0x2060000:
|
29 |
+
self.set_undefined_options('install',('prefix_option','prefix_option'))
|
30 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
31 |
+
basename = pkg_resources.Distribution(
|
32 |
+
None, None, ei_cmd.egg_name, ei_cmd.egg_version
|
33 |
+
).egg_name() + '.egg-info'
|
34 |
+
|
35 |
+
if self.install_layout:
|
36 |
+
if not self.install_layout.lower() in ['deb']:
|
37 |
+
raise DistutilsOptionError("unknown value for --install-layout")
|
38 |
+
self.install_layout = self.install_layout.lower()
|
39 |
+
basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
|
40 |
+
elif self.prefix_option or 'real_prefix' in sys.__dict__:
|
41 |
+
# don't modify for virtualenv
|
42 |
+
pass
|
43 |
+
else:
|
44 |
+
basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '')
|
45 |
+
|
46 |
+
self.source = ei_cmd.egg_info
|
47 |
+
self.target = os.path.join(self.install_dir, basename)
|
48 |
+
self.outputs = []
|
49 |
+
|
50 |
+
def run(self):
|
51 |
+
self.run_command('egg_info')
|
52 |
+
if os.path.isdir(self.target) and not os.path.islink(self.target):
|
53 |
+
dir_util.remove_tree(self.target, dry_run=self.dry_run)
|
54 |
+
elif os.path.exists(self.target):
|
55 |
+
self.execute(os.unlink, (self.target,), "Removing " + self.target)
|
56 |
+
if not self.dry_run:
|
57 |
+
pkg_resources.ensure_directory(self.target)
|
58 |
+
self.execute(
|
59 |
+
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
|
60 |
+
)
|
61 |
+
self.install_namespaces()
|
62 |
+
|
63 |
+
def get_outputs(self):
|
64 |
+
return self.outputs
|
65 |
+
|
66 |
+
def copytree(self):
|
67 |
+
# Copy the .egg-info tree to site-packages
|
68 |
+
def skimmer(src, dst):
|
69 |
+
# filter out source-control directories; note that 'src' is always
|
70 |
+
# a '/'-separated path, regardless of platform. 'dst' is a
|
71 |
+
# platform-specific path.
|
72 |
+
for skip in '.svn/', 'CVS/':
|
73 |
+
if src.startswith(skip) or '/' + skip in src:
|
74 |
+
return None
|
75 |
+
if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'):
|
76 |
+
log.info("Skipping SOURCES.txt")
|
77 |
+
return None
|
78 |
+
self.outputs.append(dst)
|
79 |
+
log.debug("Copying %s to %s", src, dst)
|
80 |
+
return dst
|
81 |
+
|
82 |
+
unpack_archive(self.source, self.target, skimmer)
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/install_lib.py
ADDED
@@ -0,0 +1,148 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import sys
|
3 |
+
from itertools import product, starmap
|
4 |
+
import distutils.command.install_lib as orig
|
5 |
+
|
6 |
+
|
7 |
+
class install_lib(orig.install_lib):
|
8 |
+
"""Don't add compiled flags to filenames of non-Python files"""
|
9 |
+
|
10 |
+
def initialize_options(self):
|
11 |
+
orig.install_lib.initialize_options(self)
|
12 |
+
self.multiarch = None
|
13 |
+
self.install_layout = None
|
14 |
+
|
15 |
+
def finalize_options(self):
|
16 |
+
orig.install_lib.finalize_options(self)
|
17 |
+
self.set_undefined_options('install',('install_layout','install_layout'))
|
18 |
+
if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3):
|
19 |
+
import sysconfig
|
20 |
+
self.multiarch = sysconfig.get_config_var('MULTIARCH')
|
21 |
+
|
22 |
+
def run(self):
|
23 |
+
self.build()
|
24 |
+
outfiles = self.install()
|
25 |
+
if outfiles is not None:
|
26 |
+
# always compile, in case we have any extension stubs to deal with
|
27 |
+
self.byte_compile(outfiles)
|
28 |
+
|
29 |
+
def get_exclusions(self):
|
30 |
+
"""
|
31 |
+
Return a collections.Sized collections.Container of paths to be
|
32 |
+
excluded for single_version_externally_managed installations.
|
33 |
+
"""
|
34 |
+
all_packages = (
|
35 |
+
pkg
|
36 |
+
for ns_pkg in self._get_SVEM_NSPs()
|
37 |
+
for pkg in self._all_packages(ns_pkg)
|
38 |
+
)
|
39 |
+
|
40 |
+
excl_specs = product(all_packages, self._gen_exclusion_paths())
|
41 |
+
return set(starmap(self._exclude_pkg_path, excl_specs))
|
42 |
+
|
43 |
+
def _exclude_pkg_path(self, pkg, exclusion_path):
|
44 |
+
"""
|
45 |
+
Given a package name and exclusion path within that package,
|
46 |
+
compute the full exclusion path.
|
47 |
+
"""
|
48 |
+
parts = pkg.split('.') + [exclusion_path]
|
49 |
+
return os.path.join(self.install_dir, *parts)
|
50 |
+
|
51 |
+
@staticmethod
|
52 |
+
def _all_packages(pkg_name):
|
53 |
+
"""
|
54 |
+
>>> list(install_lib._all_packages('foo.bar.baz'))
|
55 |
+
['foo.bar.baz', 'foo.bar', 'foo']
|
56 |
+
"""
|
57 |
+
while pkg_name:
|
58 |
+
yield pkg_name
|
59 |
+
pkg_name, sep, child = pkg_name.rpartition('.')
|
60 |
+
|
61 |
+
def _get_SVEM_NSPs(self):
|
62 |
+
"""
|
63 |
+
Get namespace packages (list) but only for
|
64 |
+
single_version_externally_managed installations and empty otherwise.
|
65 |
+
"""
|
66 |
+
# TODO: is it necessary to short-circuit here? i.e. what's the cost
|
67 |
+
# if get_finalized_command is called even when namespace_packages is
|
68 |
+
# False?
|
69 |
+
if not self.distribution.namespace_packages:
|
70 |
+
return []
|
71 |
+
|
72 |
+
install_cmd = self.get_finalized_command('install')
|
73 |
+
svem = install_cmd.single_version_externally_managed
|
74 |
+
|
75 |
+
return self.distribution.namespace_packages if svem else []
|
76 |
+
|
77 |
+
@staticmethod
|
78 |
+
def _gen_exclusion_paths():
|
79 |
+
"""
|
80 |
+
Generate file paths to be excluded for namespace packages (bytecode
|
81 |
+
cache files).
|
82 |
+
"""
|
83 |
+
# always exclude the package module itself
|
84 |
+
yield '__init__.py'
|
85 |
+
|
86 |
+
yield '__init__.pyc'
|
87 |
+
yield '__init__.pyo'
|
88 |
+
|
89 |
+
if not hasattr(sys, 'implementation'):
|
90 |
+
return
|
91 |
+
|
92 |
+
base = os.path.join(
|
93 |
+
'__pycache__', '__init__.' + sys.implementation.cache_tag)
|
94 |
+
yield base + '.pyc'
|
95 |
+
yield base + '.pyo'
|
96 |
+
yield base + '.opt-1.pyc'
|
97 |
+
yield base + '.opt-2.pyc'
|
98 |
+
|
99 |
+
def copy_tree(
|
100 |
+
self, infile, outfile,
|
101 |
+
preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
|
102 |
+
):
|
103 |
+
assert preserve_mode and preserve_times and not preserve_symlinks
|
104 |
+
exclude = self.get_exclusions()
|
105 |
+
|
106 |
+
if not exclude:
|
107 |
+
import distutils.dir_util
|
108 |
+
distutils.dir_util._multiarch = self.multiarch
|
109 |
+
return orig.install_lib.copy_tree(self, infile, outfile)
|
110 |
+
|
111 |
+
# Exclude namespace package __init__.py* files from the output
|
112 |
+
|
113 |
+
from setuptools.archive_util import unpack_directory
|
114 |
+
from distutils import log
|
115 |
+
|
116 |
+
outfiles = []
|
117 |
+
|
118 |
+
if self.multiarch:
|
119 |
+
import sysconfig
|
120 |
+
ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX')
|
121 |
+
if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]):
|
122 |
+
new_suffix = None
|
123 |
+
else:
|
124 |
+
new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:])
|
125 |
+
|
126 |
+
def pf(src, dst):
|
127 |
+
if dst in exclude:
|
128 |
+
log.warn("Skipping installation of %s (namespace package)",
|
129 |
+
dst)
|
130 |
+
return False
|
131 |
+
|
132 |
+
if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix):
|
133 |
+
dst = dst.replace(ext_suffix, new_suffix)
|
134 |
+
log.info("renaming extension to %s", os.path.basename(dst))
|
135 |
+
|
136 |
+
log.info("copying %s -> %s", src, os.path.dirname(dst))
|
137 |
+
outfiles.append(dst)
|
138 |
+
return dst
|
139 |
+
|
140 |
+
unpack_directory(infile, outfile, pf)
|
141 |
+
return outfiles
|
142 |
+
|
143 |
+
def get_outputs(self):
|
144 |
+
outputs = orig.install_lib.get_outputs(self)
|
145 |
+
exclude = self.get_exclusions()
|
146 |
+
if exclude:
|
147 |
+
return [f for f in outputs if f not in exclude]
|
148 |
+
return outputs
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/saveopts.py
ADDED
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from setuptools.command.setopt import edit_config, option_base
|
2 |
+
|
3 |
+
|
4 |
+
class saveopts(option_base):
|
5 |
+
"""Save command-line options to a file"""
|
6 |
+
|
7 |
+
description = "save supplied options to setup.cfg or other config file"
|
8 |
+
|
9 |
+
def run(self):
|
10 |
+
dist = self.distribution
|
11 |
+
settings = {}
|
12 |
+
|
13 |
+
for cmd in dist.command_options:
|
14 |
+
|
15 |
+
if cmd == 'saveopts':
|
16 |
+
continue # don't save our own options!
|
17 |
+
|
18 |
+
for opt, (src, val) in dist.get_option_dict(cmd).items():
|
19 |
+
if src == "command line":
|
20 |
+
settings.setdefault(cmd, {})[opt] = val
|
21 |
+
|
22 |
+
edit_config(self.filename, settings, self.dry_run)
|
llmeval-env/lib/python3.10/site-packages/setuptools/command/test.py
ADDED
@@ -0,0 +1,252 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import operator
|
3 |
+
import sys
|
4 |
+
import contextlib
|
5 |
+
import itertools
|
6 |
+
import unittest
|
7 |
+
from distutils.errors import DistutilsError, DistutilsOptionError
|
8 |
+
from distutils import log
|
9 |
+
from unittest import TestLoader
|
10 |
+
|
11 |
+
from pkg_resources import (
|
12 |
+
resource_listdir,
|
13 |
+
resource_exists,
|
14 |
+
normalize_path,
|
15 |
+
working_set,
|
16 |
+
evaluate_marker,
|
17 |
+
add_activation_listener,
|
18 |
+
require,
|
19 |
+
EntryPoint,
|
20 |
+
)
|
21 |
+
from setuptools import Command
|
22 |
+
from setuptools.extern.more_itertools import unique_everseen
|
23 |
+
|
24 |
+
|
25 |
+
class ScanningLoader(TestLoader):
|
26 |
+
def __init__(self):
|
27 |
+
TestLoader.__init__(self)
|
28 |
+
self._visited = set()
|
29 |
+
|
30 |
+
def loadTestsFromModule(self, module, pattern=None):
|
31 |
+
"""Return a suite of all tests cases contained in the given module
|
32 |
+
|
33 |
+
If the module is a package, load tests from all the modules in it.
|
34 |
+
If the module has an ``additional_tests`` function, call it and add
|
35 |
+
the return value to the tests.
|
36 |
+
"""
|
37 |
+
if module in self._visited:
|
38 |
+
return None
|
39 |
+
self._visited.add(module)
|
40 |
+
|
41 |
+
tests = []
|
42 |
+
tests.append(TestLoader.loadTestsFromModule(self, module))
|
43 |
+
|
44 |
+
if hasattr(module, "additional_tests"):
|
45 |
+
tests.append(module.additional_tests())
|
46 |
+
|
47 |
+
if hasattr(module, '__path__'):
|
48 |
+
for file in resource_listdir(module.__name__, ''):
|
49 |
+
if file.endswith('.py') and file != '__init__.py':
|
50 |
+
submodule = module.__name__ + '.' + file[:-3]
|
51 |
+
else:
|
52 |
+
if resource_exists(module.__name__, file + '/__init__.py'):
|
53 |
+
submodule = module.__name__ + '.' + file
|
54 |
+
else:
|
55 |
+
continue
|
56 |
+
tests.append(self.loadTestsFromName(submodule))
|
57 |
+
|
58 |
+
if len(tests) != 1:
|
59 |
+
return self.suiteClass(tests)
|
60 |
+
else:
|
61 |
+
return tests[0] # don't create a nested suite for only one return
|
62 |
+
|
63 |
+
|
64 |
+
# adapted from jaraco.classes.properties:NonDataProperty
|
65 |
+
class NonDataProperty:
|
66 |
+
def __init__(self, fget):
|
67 |
+
self.fget = fget
|
68 |
+
|
69 |
+
def __get__(self, obj, objtype=None):
|
70 |
+
if obj is None:
|
71 |
+
return self
|
72 |
+
return self.fget(obj)
|
73 |
+
|
74 |
+
|
75 |
+
class test(Command):
|
76 |
+
"""Command to run unit tests after in-place build"""
|
77 |
+
|
78 |
+
description = "run unit tests after in-place build (deprecated)"
|
79 |
+
|
80 |
+
user_options = [
|
81 |
+
('test-module=', 'm', "Run 'test_suite' in specified module"),
|
82 |
+
(
|
83 |
+
'test-suite=',
|
84 |
+
's',
|
85 |
+
"Run single test, case or suite (e.g. 'module.test_suite')",
|
86 |
+
),
|
87 |
+
('test-runner=', 'r', "Test runner to use"),
|
88 |
+
]
|
89 |
+
|
90 |
+
def initialize_options(self):
|
91 |
+
self.test_suite = None
|
92 |
+
self.test_module = None
|
93 |
+
self.test_loader = None
|
94 |
+
self.test_runner = None
|
95 |
+
|
96 |
+
def finalize_options(self):
|
97 |
+
|
98 |
+
if self.test_suite and self.test_module:
|
99 |
+
msg = "You may specify a module or a suite, but not both"
|
100 |
+
raise DistutilsOptionError(msg)
|
101 |
+
|
102 |
+
if self.test_suite is None:
|
103 |
+
if self.test_module is None:
|
104 |
+
self.test_suite = self.distribution.test_suite
|
105 |
+
else:
|
106 |
+
self.test_suite = self.test_module + ".test_suite"
|
107 |
+
|
108 |
+
if self.test_loader is None:
|
109 |
+
self.test_loader = getattr(self.distribution, 'test_loader', None)
|
110 |
+
if self.test_loader is None:
|
111 |
+
self.test_loader = "setuptools.command.test:ScanningLoader"
|
112 |
+
if self.test_runner is None:
|
113 |
+
self.test_runner = getattr(self.distribution, 'test_runner', None)
|
114 |
+
|
115 |
+
@NonDataProperty
|
116 |
+
def test_args(self):
|
117 |
+
return list(self._test_args())
|
118 |
+
|
119 |
+
def _test_args(self):
|
120 |
+
if not self.test_suite and sys.version_info >= (2, 7):
|
121 |
+
yield 'discover'
|
122 |
+
if self.verbose:
|
123 |
+
yield '--verbose'
|
124 |
+
if self.test_suite:
|
125 |
+
yield self.test_suite
|
126 |
+
|
127 |
+
def with_project_on_sys_path(self, func):
|
128 |
+
"""
|
129 |
+
Backward compatibility for project_on_sys_path context.
|
130 |
+
"""
|
131 |
+
with self.project_on_sys_path():
|
132 |
+
func()
|
133 |
+
|
134 |
+
@contextlib.contextmanager
|
135 |
+
def project_on_sys_path(self, include_dists=[]):
|
136 |
+
self.run_command('egg_info')
|
137 |
+
|
138 |
+
# Build extensions in-place
|
139 |
+
self.reinitialize_command('build_ext', inplace=1)
|
140 |
+
self.run_command('build_ext')
|
141 |
+
|
142 |
+
ei_cmd = self.get_finalized_command("egg_info")
|
143 |
+
|
144 |
+
old_path = sys.path[:]
|
145 |
+
old_modules = sys.modules.copy()
|
146 |
+
|
147 |
+
try:
|
148 |
+
project_path = normalize_path(ei_cmd.egg_base)
|
149 |
+
sys.path.insert(0, project_path)
|
150 |
+
working_set.__init__()
|
151 |
+
add_activation_listener(lambda dist: dist.activate())
|
152 |
+
require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version))
|
153 |
+
with self.paths_on_pythonpath([project_path]):
|
154 |
+
yield
|
155 |
+
finally:
|
156 |
+
sys.path[:] = old_path
|
157 |
+
sys.modules.clear()
|
158 |
+
sys.modules.update(old_modules)
|
159 |
+
working_set.__init__()
|
160 |
+
|
161 |
+
@staticmethod
|
162 |
+
@contextlib.contextmanager
|
163 |
+
def paths_on_pythonpath(paths):
|
164 |
+
"""
|
165 |
+
Add the indicated paths to the head of the PYTHONPATH environment
|
166 |
+
variable so that subprocesses will also see the packages at
|
167 |
+
these paths.
|
168 |
+
|
169 |
+
Do this in a context that restores the value on exit.
|
170 |
+
"""
|
171 |
+
nothing = object()
|
172 |
+
orig_pythonpath = os.environ.get('PYTHONPATH', nothing)
|
173 |
+
current_pythonpath = os.environ.get('PYTHONPATH', '')
|
174 |
+
try:
|
175 |
+
prefix = os.pathsep.join(unique_everseen(paths))
|
176 |
+
to_join = filter(None, [prefix, current_pythonpath])
|
177 |
+
new_path = os.pathsep.join(to_join)
|
178 |
+
if new_path:
|
179 |
+
os.environ['PYTHONPATH'] = new_path
|
180 |
+
yield
|
181 |
+
finally:
|
182 |
+
if orig_pythonpath is nothing:
|
183 |
+
os.environ.pop('PYTHONPATH', None)
|
184 |
+
else:
|
185 |
+
os.environ['PYTHONPATH'] = orig_pythonpath
|
186 |
+
|
187 |
+
@staticmethod
|
188 |
+
def install_dists(dist):
|
189 |
+
"""
|
190 |
+
Install the requirements indicated by self.distribution and
|
191 |
+
return an iterable of the dists that were built.
|
192 |
+
"""
|
193 |
+
ir_d = dist.fetch_build_eggs(dist.install_requires)
|
194 |
+
tr_d = dist.fetch_build_eggs(dist.tests_require or [])
|
195 |
+
er_d = dist.fetch_build_eggs(
|
196 |
+
v
|
197 |
+
for k, v in dist.extras_require.items()
|
198 |
+
if k.startswith(':') and evaluate_marker(k[1:])
|
199 |
+
)
|
200 |
+
return itertools.chain(ir_d, tr_d, er_d)
|
201 |
+
|
202 |
+
def run(self):
|
203 |
+
self.announce(
|
204 |
+
"WARNING: Testing via this command is deprecated and will be "
|
205 |
+
"removed in a future version. Users looking for a generic test "
|
206 |
+
"entry point independent of test runner are encouraged to use "
|
207 |
+
"tox.",
|
208 |
+
log.WARN,
|
209 |
+
)
|
210 |
+
|
211 |
+
installed_dists = self.install_dists(self.distribution)
|
212 |
+
|
213 |
+
cmd = ' '.join(self._argv)
|
214 |
+
if self.dry_run:
|
215 |
+
self.announce('skipping "%s" (dry run)' % cmd)
|
216 |
+
return
|
217 |
+
|
218 |
+
self.announce('running "%s"' % cmd)
|
219 |
+
|
220 |
+
paths = map(operator.attrgetter('location'), installed_dists)
|
221 |
+
with self.paths_on_pythonpath(paths):
|
222 |
+
with self.project_on_sys_path():
|
223 |
+
self.run_tests()
|
224 |
+
|
225 |
+
def run_tests(self):
|
226 |
+
test = unittest.main(
|
227 |
+
None,
|
228 |
+
None,
|
229 |
+
self._argv,
|
230 |
+
testLoader=self._resolve_as_ep(self.test_loader),
|
231 |
+
testRunner=self._resolve_as_ep(self.test_runner),
|
232 |
+
exit=False,
|
233 |
+
)
|
234 |
+
if not test.result.wasSuccessful():
|
235 |
+
msg = 'Test failed: %s' % test.result
|
236 |
+
self.announce(msg, log.ERROR)
|
237 |
+
raise DistutilsError(msg)
|
238 |
+
|
239 |
+
@property
|
240 |
+
def _argv(self):
|
241 |
+
return ['unittest'] + self.test_args
|
242 |
+
|
243 |
+
@staticmethod
|
244 |
+
def _resolve_as_ep(val):
|
245 |
+
"""
|
246 |
+
Load the indicated attribute value, called, as a as if it were
|
247 |
+
specified as an entry point.
|
248 |
+
"""
|
249 |
+
if val is None:
|
250 |
+
return
|
251 |
+
parsed = EntryPoint.parse("x=" + val)
|
252 |
+
return parsed.resolve()()
|