applied-ai-018 commited on
Commit
1ccce62
·
verified ·
1 Parent(s): 4fb4f4f

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__about__.py +26 -0
  2. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__init__.py +25 -0
  3. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-310.pyc +0 -0
  14. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_manylinux.py +301 -0
  15. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_musllinux.py +136 -0
  16. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_structures.py +61 -0
  17. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/markers.py +304 -0
  18. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/requirements.py +146 -0
  19. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py +802 -0
  20. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/tags.py +487 -0
  21. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py +136 -0
  22. env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/version.py +504 -0
  23. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__init__.py +6 -0
  24. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc +0 -0
  25. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/build.py +127 -0
  34. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/check.py +207 -0
  35. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/colorlog.py +115 -0
  36. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/compat.py +51 -0
  37. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/dirtools.py +44 -0
  38. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/envbuild.py +171 -0
  39. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__init__.py +17 -0
  40. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc +0 -0
  41. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc +0 -0
  42. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/_in_process.py +363 -0
  43. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/meta.py +92 -0
  44. env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/wrappers.py +375 -0
  45. env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/__init__.py +189 -0
  46. env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/bar.py +93 -0
  47. env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/colors.py +79 -0
  48. env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/spinner.py +45 -0
  49. env-llmeval/lib/python3.10/site-packages/pip/_vendor/rich/__init__.py +172 -0
  50. env-llmeval/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__about__.py ADDED
@@ -0,0 +1,26 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ __all__ = [
6
+ "__title__",
7
+ "__summary__",
8
+ "__uri__",
9
+ "__version__",
10
+ "__author__",
11
+ "__email__",
12
+ "__license__",
13
+ "__copyright__",
14
+ ]
15
+
16
+ __title__ = "packaging"
17
+ __summary__ = "Core utilities for Python packages"
18
+ __uri__ = "https://github.com/pypa/packaging"
19
+
20
+ __version__ = "21.3"
21
+
22
+ __author__ = "Donald Stufft and individual contributors"
23
+ __email__ = "[email protected]"
24
+
25
+ __license__ = "BSD-2-Clause or Apache-2.0"
26
+ __copyright__ = "2014-2019 %s" % __author__
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__init__.py ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ from .__about__ import (
6
+ __author__,
7
+ __copyright__,
8
+ __email__,
9
+ __license__,
10
+ __summary__,
11
+ __title__,
12
+ __uri__,
13
+ __version__,
14
+ )
15
+
16
+ __all__ = [
17
+ "__title__",
18
+ "__summary__",
19
+ "__uri__",
20
+ "__version__",
21
+ "__author__",
22
+ "__email__",
23
+ "__license__",
24
+ "__copyright__",
25
+ ]
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__about__.cpython-310.pyc ADDED
Binary file (583 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (439 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_manylinux.cpython-310.pyc ADDED
Binary file (7.29 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_musllinux.cpython-310.pyc ADDED
Binary file (4.61 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/_structures.cpython-310.pyc ADDED
Binary file (2.7 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/markers.cpython-310.pyc ADDED
Binary file (9.28 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/requirements.cpython-310.pyc ADDED
Binary file (3.97 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/specifiers.cpython-310.pyc ADDED
Binary file (21.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/tags.cpython-310.pyc ADDED
Binary file (12.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/utils.cpython-310.pyc ADDED
Binary file (3.57 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/__pycache__/version.cpython-310.pyc ADDED
Binary file (12.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_manylinux.py ADDED
@@ -0,0 +1,301 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import functools
3
+ import os
4
+ import re
5
+ import struct
6
+ import sys
7
+ import warnings
8
+ from typing import IO, Dict, Iterator, NamedTuple, Optional, Tuple
9
+
10
+
11
+ # Python does not provide platform information at sufficient granularity to
12
+ # identify the architecture of the running executable in some cases, so we
13
+ # determine it dynamically by reading the information from the running
14
+ # process. This only applies on Linux, which uses the ELF format.
15
+ class _ELFFileHeader:
16
+ # https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
17
+ class _InvalidELFFileHeader(ValueError):
18
+ """
19
+ An invalid ELF file header was found.
20
+ """
21
+
22
+ ELF_MAGIC_NUMBER = 0x7F454C46
23
+ ELFCLASS32 = 1
24
+ ELFCLASS64 = 2
25
+ ELFDATA2LSB = 1
26
+ ELFDATA2MSB = 2
27
+ EM_386 = 3
28
+ EM_S390 = 22
29
+ EM_ARM = 40
30
+ EM_X86_64 = 62
31
+ EF_ARM_ABIMASK = 0xFF000000
32
+ EF_ARM_ABI_VER5 = 0x05000000
33
+ EF_ARM_ABI_FLOAT_HARD = 0x00000400
34
+
35
+ def __init__(self, file: IO[bytes]) -> None:
36
+ def unpack(fmt: str) -> int:
37
+ try:
38
+ data = file.read(struct.calcsize(fmt))
39
+ result: Tuple[int, ...] = struct.unpack(fmt, data)
40
+ except struct.error:
41
+ raise _ELFFileHeader._InvalidELFFileHeader()
42
+ return result[0]
43
+
44
+ self.e_ident_magic = unpack(">I")
45
+ if self.e_ident_magic != self.ELF_MAGIC_NUMBER:
46
+ raise _ELFFileHeader._InvalidELFFileHeader()
47
+ self.e_ident_class = unpack("B")
48
+ if self.e_ident_class not in {self.ELFCLASS32, self.ELFCLASS64}:
49
+ raise _ELFFileHeader._InvalidELFFileHeader()
50
+ self.e_ident_data = unpack("B")
51
+ if self.e_ident_data not in {self.ELFDATA2LSB, self.ELFDATA2MSB}:
52
+ raise _ELFFileHeader._InvalidELFFileHeader()
53
+ self.e_ident_version = unpack("B")
54
+ self.e_ident_osabi = unpack("B")
55
+ self.e_ident_abiversion = unpack("B")
56
+ self.e_ident_pad = file.read(7)
57
+ format_h = "<H" if self.e_ident_data == self.ELFDATA2LSB else ">H"
58
+ format_i = "<I" if self.e_ident_data == self.ELFDATA2LSB else ">I"
59
+ format_q = "<Q" if self.e_ident_data == self.ELFDATA2LSB else ">Q"
60
+ format_p = format_i if self.e_ident_class == self.ELFCLASS32 else format_q
61
+ self.e_type = unpack(format_h)
62
+ self.e_machine = unpack(format_h)
63
+ self.e_version = unpack(format_i)
64
+ self.e_entry = unpack(format_p)
65
+ self.e_phoff = unpack(format_p)
66
+ self.e_shoff = unpack(format_p)
67
+ self.e_flags = unpack(format_i)
68
+ self.e_ehsize = unpack(format_h)
69
+ self.e_phentsize = unpack(format_h)
70
+ self.e_phnum = unpack(format_h)
71
+ self.e_shentsize = unpack(format_h)
72
+ self.e_shnum = unpack(format_h)
73
+ self.e_shstrndx = unpack(format_h)
74
+
75
+
76
+ def _get_elf_header() -> Optional[_ELFFileHeader]:
77
+ try:
78
+ with open(sys.executable, "rb") as f:
79
+ elf_header = _ELFFileHeader(f)
80
+ except (OSError, TypeError, _ELFFileHeader._InvalidELFFileHeader):
81
+ return None
82
+ return elf_header
83
+
84
+
85
+ def _is_linux_armhf() -> bool:
86
+ # hard-float ABI can be detected from the ELF header of the running
87
+ # process
88
+ # https://static.docs.arm.com/ihi0044/g/aaelf32.pdf
89
+ elf_header = _get_elf_header()
90
+ if elf_header is None:
91
+ return False
92
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
93
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
94
+ result &= elf_header.e_machine == elf_header.EM_ARM
95
+ result &= (
96
+ elf_header.e_flags & elf_header.EF_ARM_ABIMASK
97
+ ) == elf_header.EF_ARM_ABI_VER5
98
+ result &= (
99
+ elf_header.e_flags & elf_header.EF_ARM_ABI_FLOAT_HARD
100
+ ) == elf_header.EF_ARM_ABI_FLOAT_HARD
101
+ return result
102
+
103
+
104
+ def _is_linux_i686() -> bool:
105
+ elf_header = _get_elf_header()
106
+ if elf_header is None:
107
+ return False
108
+ result = elf_header.e_ident_class == elf_header.ELFCLASS32
109
+ result &= elf_header.e_ident_data == elf_header.ELFDATA2LSB
110
+ result &= elf_header.e_machine == elf_header.EM_386
111
+ return result
112
+
113
+
114
+ def _have_compatible_abi(arch: str) -> bool:
115
+ if arch == "armv7l":
116
+ return _is_linux_armhf()
117
+ if arch == "i686":
118
+ return _is_linux_i686()
119
+ return arch in {"x86_64", "aarch64", "ppc64", "ppc64le", "s390x"}
120
+
121
+
122
+ # If glibc ever changes its major version, we need to know what the last
123
+ # minor version was, so we can build the complete list of all versions.
124
+ # For now, guess what the highest minor version might be, assume it will
125
+ # be 50 for testing. Once this actually happens, update the dictionary
126
+ # with the actual value.
127
+ _LAST_GLIBC_MINOR: Dict[int, int] = collections.defaultdict(lambda: 50)
128
+
129
+
130
+ class _GLibCVersion(NamedTuple):
131
+ major: int
132
+ minor: int
133
+
134
+
135
+ def _glibc_version_string_confstr() -> Optional[str]:
136
+ """
137
+ Primary implementation of glibc_version_string using os.confstr.
138
+ """
139
+ # os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
140
+ # to be broken or missing. This strategy is used in the standard library
141
+ # platform module.
142
+ # https://github.com/python/cpython/blob/fcf1d003bf4f0100c/Lib/platform.py#L175-L183
143
+ try:
144
+ # os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17".
145
+ version_string = os.confstr("CS_GNU_LIBC_VERSION")
146
+ assert version_string is not None
147
+ _, version = version_string.split()
148
+ except (AssertionError, AttributeError, OSError, ValueError):
149
+ # os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
150
+ return None
151
+ return version
152
+
153
+
154
+ def _glibc_version_string_ctypes() -> Optional[str]:
155
+ """
156
+ Fallback implementation of glibc_version_string using ctypes.
157
+ """
158
+ try:
159
+ import ctypes
160
+ except ImportError:
161
+ return None
162
+
163
+ # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
164
+ # manpage says, "If filename is NULL, then the returned handle is for the
165
+ # main program". This way we can let the linker do the work to figure out
166
+ # which libc our process is actually using.
167
+ #
168
+ # We must also handle the special case where the executable is not a
169
+ # dynamically linked executable. This can occur when using musl libc,
170
+ # for example. In this situation, dlopen() will error, leading to an
171
+ # OSError. Interestingly, at least in the case of musl, there is no
172
+ # errno set on the OSError. The single string argument used to construct
173
+ # OSError comes from libc itself and is therefore not portable to
174
+ # hard code here. In any case, failure to call dlopen() means we
175
+ # can proceed, so we bail on our attempt.
176
+ try:
177
+ process_namespace = ctypes.CDLL(None)
178
+ except OSError:
179
+ return None
180
+
181
+ try:
182
+ gnu_get_libc_version = process_namespace.gnu_get_libc_version
183
+ except AttributeError:
184
+ # Symbol doesn't exist -> therefore, we are not linked to
185
+ # glibc.
186
+ return None
187
+
188
+ # Call gnu_get_libc_version, which returns a string like "2.5"
189
+ gnu_get_libc_version.restype = ctypes.c_char_p
190
+ version_str: str = gnu_get_libc_version()
191
+ # py2 / py3 compatibility:
192
+ if not isinstance(version_str, str):
193
+ version_str = version_str.decode("ascii")
194
+
195
+ return version_str
196
+
197
+
198
+ def _glibc_version_string() -> Optional[str]:
199
+ """Returns glibc version string, or None if not using glibc."""
200
+ return _glibc_version_string_confstr() or _glibc_version_string_ctypes()
201
+
202
+
203
+ def _parse_glibc_version(version_str: str) -> Tuple[int, int]:
204
+ """Parse glibc version.
205
+
206
+ We use a regexp instead of str.split because we want to discard any
207
+ random junk that might come after the minor version -- this might happen
208
+ in patched/forked versions of glibc (e.g. Linaro's version of glibc
209
+ uses version strings like "2.20-2014.11"). See gh-3588.
210
+ """
211
+ m = re.match(r"(?P<major>[0-9]+)\.(?P<minor>[0-9]+)", version_str)
212
+ if not m:
213
+ warnings.warn(
214
+ "Expected glibc version with 2 components major.minor,"
215
+ " got: %s" % version_str,
216
+ RuntimeWarning,
217
+ )
218
+ return -1, -1
219
+ return int(m.group("major")), int(m.group("minor"))
220
+
221
+
222
+ @functools.lru_cache()
223
+ def _get_glibc_version() -> Tuple[int, int]:
224
+ version_str = _glibc_version_string()
225
+ if version_str is None:
226
+ return (-1, -1)
227
+ return _parse_glibc_version(version_str)
228
+
229
+
230
+ # From PEP 513, PEP 600
231
+ def _is_compatible(name: str, arch: str, version: _GLibCVersion) -> bool:
232
+ sys_glibc = _get_glibc_version()
233
+ if sys_glibc < version:
234
+ return False
235
+ # Check for presence of _manylinux module.
236
+ try:
237
+ import _manylinux # noqa
238
+ except ImportError:
239
+ return True
240
+ if hasattr(_manylinux, "manylinux_compatible"):
241
+ result = _manylinux.manylinux_compatible(version[0], version[1], arch)
242
+ if result is not None:
243
+ return bool(result)
244
+ return True
245
+ if version == _GLibCVersion(2, 5):
246
+ if hasattr(_manylinux, "manylinux1_compatible"):
247
+ return bool(_manylinux.manylinux1_compatible)
248
+ if version == _GLibCVersion(2, 12):
249
+ if hasattr(_manylinux, "manylinux2010_compatible"):
250
+ return bool(_manylinux.manylinux2010_compatible)
251
+ if version == _GLibCVersion(2, 17):
252
+ if hasattr(_manylinux, "manylinux2014_compatible"):
253
+ return bool(_manylinux.manylinux2014_compatible)
254
+ return True
255
+
256
+
257
+ _LEGACY_MANYLINUX_MAP = {
258
+ # CentOS 7 w/ glibc 2.17 (PEP 599)
259
+ (2, 17): "manylinux2014",
260
+ # CentOS 6 w/ glibc 2.12 (PEP 571)
261
+ (2, 12): "manylinux2010",
262
+ # CentOS 5 w/ glibc 2.5 (PEP 513)
263
+ (2, 5): "manylinux1",
264
+ }
265
+
266
+
267
+ def platform_tags(linux: str, arch: str) -> Iterator[str]:
268
+ if not _have_compatible_abi(arch):
269
+ return
270
+ # Oldest glibc to be supported regardless of architecture is (2, 17).
271
+ too_old_glibc2 = _GLibCVersion(2, 16)
272
+ if arch in {"x86_64", "i686"}:
273
+ # On x86/i686 also oldest glibc to be supported is (2, 5).
274
+ too_old_glibc2 = _GLibCVersion(2, 4)
275
+ current_glibc = _GLibCVersion(*_get_glibc_version())
276
+ glibc_max_list = [current_glibc]
277
+ # We can assume compatibility across glibc major versions.
278
+ # https://sourceware.org/bugzilla/show_bug.cgi?id=24636
279
+ #
280
+ # Build a list of maximum glibc versions so that we can
281
+ # output the canonical list of all glibc from current_glibc
282
+ # down to too_old_glibc2, including all intermediary versions.
283
+ for glibc_major in range(current_glibc.major - 1, 1, -1):
284
+ glibc_minor = _LAST_GLIBC_MINOR[glibc_major]
285
+ glibc_max_list.append(_GLibCVersion(glibc_major, glibc_minor))
286
+ for glibc_max in glibc_max_list:
287
+ if glibc_max.major == too_old_glibc2.major:
288
+ min_minor = too_old_glibc2.minor
289
+ else:
290
+ # For other glibc major versions oldest supported is (x, 0).
291
+ min_minor = -1
292
+ for glibc_minor in range(glibc_max.minor, min_minor, -1):
293
+ glibc_version = _GLibCVersion(glibc_max.major, glibc_minor)
294
+ tag = "manylinux_{}_{}".format(*glibc_version)
295
+ if _is_compatible(tag, arch, glibc_version):
296
+ yield linux.replace("linux", tag)
297
+ # Handle the legacy manylinux1, manylinux2010, manylinux2014 tags.
298
+ if glibc_version in _LEGACY_MANYLINUX_MAP:
299
+ legacy_tag = _LEGACY_MANYLINUX_MAP[glibc_version]
300
+ if _is_compatible(legacy_tag, arch, glibc_version):
301
+ yield linux.replace("linux", legacy_tag)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_musllinux.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PEP 656 support.
2
+
3
+ This module implements logic to detect if the currently running Python is
4
+ linked against musl, and what musl version is used.
5
+ """
6
+
7
+ import contextlib
8
+ import functools
9
+ import operator
10
+ import os
11
+ import re
12
+ import struct
13
+ import subprocess
14
+ import sys
15
+ from typing import IO, Iterator, NamedTuple, Optional, Tuple
16
+
17
+
18
+ def _read_unpacked(f: IO[bytes], fmt: str) -> Tuple[int, ...]:
19
+ return struct.unpack(fmt, f.read(struct.calcsize(fmt)))
20
+
21
+
22
+ def _parse_ld_musl_from_elf(f: IO[bytes]) -> Optional[str]:
23
+ """Detect musl libc location by parsing the Python executable.
24
+
25
+ Based on: https://gist.github.com/lyssdod/f51579ae8d93c8657a5564aefc2ffbca
26
+ ELF header: https://refspecs.linuxfoundation.org/elf/gabi4+/ch4.eheader.html
27
+ """
28
+ f.seek(0)
29
+ try:
30
+ ident = _read_unpacked(f, "16B")
31
+ except struct.error:
32
+ return None
33
+ if ident[:4] != tuple(b"\x7fELF"): # Invalid magic, not ELF.
34
+ return None
35
+ f.seek(struct.calcsize("HHI"), 1) # Skip file type, machine, and version.
36
+
37
+ try:
38
+ # e_fmt: Format for program header.
39
+ # p_fmt: Format for section header.
40
+ # p_idx: Indexes to find p_type, p_offset, and p_filesz.
41
+ e_fmt, p_fmt, p_idx = {
42
+ 1: ("IIIIHHH", "IIIIIIII", (0, 1, 4)), # 32-bit.
43
+ 2: ("QQQIHHH", "IIQQQQQQ", (0, 2, 5)), # 64-bit.
44
+ }[ident[4]]
45
+ except KeyError:
46
+ return None
47
+ else:
48
+ p_get = operator.itemgetter(*p_idx)
49
+
50
+ # Find the interpreter section and return its content.
51
+ try:
52
+ _, e_phoff, _, _, _, e_phentsize, e_phnum = _read_unpacked(f, e_fmt)
53
+ except struct.error:
54
+ return None
55
+ for i in range(e_phnum + 1):
56
+ f.seek(e_phoff + e_phentsize * i)
57
+ try:
58
+ p_type, p_offset, p_filesz = p_get(_read_unpacked(f, p_fmt))
59
+ except struct.error:
60
+ return None
61
+ if p_type != 3: # Not PT_INTERP.
62
+ continue
63
+ f.seek(p_offset)
64
+ interpreter = os.fsdecode(f.read(p_filesz)).strip("\0")
65
+ if "musl" not in interpreter:
66
+ return None
67
+ return interpreter
68
+ return None
69
+
70
+
71
+ class _MuslVersion(NamedTuple):
72
+ major: int
73
+ minor: int
74
+
75
+
76
+ def _parse_musl_version(output: str) -> Optional[_MuslVersion]:
77
+ lines = [n for n in (n.strip() for n in output.splitlines()) if n]
78
+ if len(lines) < 2 or lines[0][:4] != "musl":
79
+ return None
80
+ m = re.match(r"Version (\d+)\.(\d+)", lines[1])
81
+ if not m:
82
+ return None
83
+ return _MuslVersion(major=int(m.group(1)), minor=int(m.group(2)))
84
+
85
+
86
+ @functools.lru_cache()
87
+ def _get_musl_version(executable: str) -> Optional[_MuslVersion]:
88
+ """Detect currently-running musl runtime version.
89
+
90
+ This is done by checking the specified executable's dynamic linking
91
+ information, and invoking the loader to parse its output for a version
92
+ string. If the loader is musl, the output would be something like::
93
+
94
+ musl libc (x86_64)
95
+ Version 1.2.2
96
+ Dynamic Program Loader
97
+ """
98
+ with contextlib.ExitStack() as stack:
99
+ try:
100
+ f = stack.enter_context(open(executable, "rb"))
101
+ except OSError:
102
+ return None
103
+ ld = _parse_ld_musl_from_elf(f)
104
+ if not ld:
105
+ return None
106
+ proc = subprocess.run([ld], stderr=subprocess.PIPE, universal_newlines=True)
107
+ return _parse_musl_version(proc.stderr)
108
+
109
+
110
+ def platform_tags(arch: str) -> Iterator[str]:
111
+ """Generate musllinux tags compatible to the current platform.
112
+
113
+ :param arch: Should be the part of platform tag after the ``linux_``
114
+ prefix, e.g. ``x86_64``. The ``linux_`` prefix is assumed as a
115
+ prerequisite for the current platform to be musllinux-compatible.
116
+
117
+ :returns: An iterator of compatible musllinux tags.
118
+ """
119
+ sys_musl = _get_musl_version(sys.executable)
120
+ if sys_musl is None: # Python not dynamically linked against musl.
121
+ return
122
+ for minor in range(sys_musl.minor, -1, -1):
123
+ yield f"musllinux_{sys_musl.major}_{minor}_{arch}"
124
+
125
+
126
+ if __name__ == "__main__": # pragma: no cover
127
+ import sysconfig
128
+
129
+ plat = sysconfig.get_platform()
130
+ assert plat.startswith("linux-"), "not linux"
131
+
132
+ print("plat:", plat)
133
+ print("musl:", _get_musl_version(sys.executable))
134
+ print("tags:", end=" ")
135
+ for t in platform_tags(re.sub(r"[.-]", "_", plat.split("-", 1)[-1])):
136
+ print(t, end="\n ")
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/_structures.py ADDED
@@ -0,0 +1,61 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+
6
+ class InfinityType:
7
+ def __repr__(self) -> str:
8
+ return "Infinity"
9
+
10
+ def __hash__(self) -> int:
11
+ return hash(repr(self))
12
+
13
+ def __lt__(self, other: object) -> bool:
14
+ return False
15
+
16
+ def __le__(self, other: object) -> bool:
17
+ return False
18
+
19
+ def __eq__(self, other: object) -> bool:
20
+ return isinstance(other, self.__class__)
21
+
22
+ def __gt__(self, other: object) -> bool:
23
+ return True
24
+
25
+ def __ge__(self, other: object) -> bool:
26
+ return True
27
+
28
+ def __neg__(self: object) -> "NegativeInfinityType":
29
+ return NegativeInfinity
30
+
31
+
32
+ Infinity = InfinityType()
33
+
34
+
35
+ class NegativeInfinityType:
36
+ def __repr__(self) -> str:
37
+ return "-Infinity"
38
+
39
+ def __hash__(self) -> int:
40
+ return hash(repr(self))
41
+
42
+ def __lt__(self, other: object) -> bool:
43
+ return True
44
+
45
+ def __le__(self, other: object) -> bool:
46
+ return True
47
+
48
+ def __eq__(self, other: object) -> bool:
49
+ return isinstance(other, self.__class__)
50
+
51
+ def __gt__(self, other: object) -> bool:
52
+ return False
53
+
54
+ def __ge__(self, other: object) -> bool:
55
+ return False
56
+
57
+ def __neg__(self: object) -> InfinityType:
58
+ return Infinity
59
+
60
+
61
+ NegativeInfinity = NegativeInfinityType()
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/markers.py ADDED
@@ -0,0 +1,304 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import operator
6
+ import os
7
+ import platform
8
+ import sys
9
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
10
+
11
+ from pip._vendor.pyparsing import ( # noqa: N817
12
+ Forward,
13
+ Group,
14
+ Literal as L,
15
+ ParseException,
16
+ ParseResults,
17
+ QuotedString,
18
+ ZeroOrMore,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .specifiers import InvalidSpecifier, Specifier
24
+
25
+ __all__ = [
26
+ "InvalidMarker",
27
+ "UndefinedComparison",
28
+ "UndefinedEnvironmentName",
29
+ "Marker",
30
+ "default_environment",
31
+ ]
32
+
33
+ Operator = Callable[[str, str], bool]
34
+
35
+
36
+ class InvalidMarker(ValueError):
37
+ """
38
+ An invalid marker was found, users should refer to PEP 508.
39
+ """
40
+
41
+
42
+ class UndefinedComparison(ValueError):
43
+ """
44
+ An invalid operation was attempted on a value that doesn't support it.
45
+ """
46
+
47
+
48
+ class UndefinedEnvironmentName(ValueError):
49
+ """
50
+ A name was attempted to be used that does not exist inside of the
51
+ environment.
52
+ """
53
+
54
+
55
+ class Node:
56
+ def __init__(self, value: Any) -> None:
57
+ self.value = value
58
+
59
+ def __str__(self) -> str:
60
+ return str(self.value)
61
+
62
+ def __repr__(self) -> str:
63
+ return f"<{self.__class__.__name__}('{self}')>"
64
+
65
+ def serialize(self) -> str:
66
+ raise NotImplementedError
67
+
68
+
69
+ class Variable(Node):
70
+ def serialize(self) -> str:
71
+ return str(self)
72
+
73
+
74
+ class Value(Node):
75
+ def serialize(self) -> str:
76
+ return f'"{self}"'
77
+
78
+
79
+ class Op(Node):
80
+ def serialize(self) -> str:
81
+ return str(self)
82
+
83
+
84
+ VARIABLE = (
85
+ L("implementation_version")
86
+ | L("platform_python_implementation")
87
+ | L("implementation_name")
88
+ | L("python_full_version")
89
+ | L("platform_release")
90
+ | L("platform_version")
91
+ | L("platform_machine")
92
+ | L("platform_system")
93
+ | L("python_version")
94
+ | L("sys_platform")
95
+ | L("os_name")
96
+ | L("os.name") # PEP-345
97
+ | L("sys.platform") # PEP-345
98
+ | L("platform.version") # PEP-345
99
+ | L("platform.machine") # PEP-345
100
+ | L("platform.python_implementation") # PEP-345
101
+ | L("python_implementation") # undocumented setuptools legacy
102
+ | L("extra") # PEP-508
103
+ )
104
+ ALIASES = {
105
+ "os.name": "os_name",
106
+ "sys.platform": "sys_platform",
107
+ "platform.version": "platform_version",
108
+ "platform.machine": "platform_machine",
109
+ "platform.python_implementation": "platform_python_implementation",
110
+ "python_implementation": "platform_python_implementation",
111
+ }
112
+ VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
113
+
114
+ VERSION_CMP = (
115
+ L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
116
+ )
117
+
118
+ MARKER_OP = VERSION_CMP | L("not in") | L("in")
119
+ MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
120
+
121
+ MARKER_VALUE = QuotedString("'") | QuotedString('"')
122
+ MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
123
+
124
+ BOOLOP = L("and") | L("or")
125
+
126
+ MARKER_VAR = VARIABLE | MARKER_VALUE
127
+
128
+ MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
129
+ MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
130
+
131
+ LPAREN = L("(").suppress()
132
+ RPAREN = L(")").suppress()
133
+
134
+ MARKER_EXPR = Forward()
135
+ MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
136
+ MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
137
+
138
+ MARKER = stringStart + MARKER_EXPR + stringEnd
139
+
140
+
141
+ def _coerce_parse_result(results: Union[ParseResults, List[Any]]) -> List[Any]:
142
+ if isinstance(results, ParseResults):
143
+ return [_coerce_parse_result(i) for i in results]
144
+ else:
145
+ return results
146
+
147
+
148
+ def _format_marker(
149
+ marker: Union[List[str], Tuple[Node, ...], str], first: Optional[bool] = True
150
+ ) -> str:
151
+
152
+ assert isinstance(marker, (list, tuple, str))
153
+
154
+ # Sometimes we have a structure like [[...]] which is a single item list
155
+ # where the single item is itself it's own list. In that case we want skip
156
+ # the rest of this function so that we don't get extraneous () on the
157
+ # outside.
158
+ if (
159
+ isinstance(marker, list)
160
+ and len(marker) == 1
161
+ and isinstance(marker[0], (list, tuple))
162
+ ):
163
+ return _format_marker(marker[0])
164
+
165
+ if isinstance(marker, list):
166
+ inner = (_format_marker(m, first=False) for m in marker)
167
+ if first:
168
+ return " ".join(inner)
169
+ else:
170
+ return "(" + " ".join(inner) + ")"
171
+ elif isinstance(marker, tuple):
172
+ return " ".join([m.serialize() for m in marker])
173
+ else:
174
+ return marker
175
+
176
+
177
+ _operators: Dict[str, Operator] = {
178
+ "in": lambda lhs, rhs: lhs in rhs,
179
+ "not in": lambda lhs, rhs: lhs not in rhs,
180
+ "<": operator.lt,
181
+ "<=": operator.le,
182
+ "==": operator.eq,
183
+ "!=": operator.ne,
184
+ ">=": operator.ge,
185
+ ">": operator.gt,
186
+ }
187
+
188
+
189
+ def _eval_op(lhs: str, op: Op, rhs: str) -> bool:
190
+ try:
191
+ spec = Specifier("".join([op.serialize(), rhs]))
192
+ except InvalidSpecifier:
193
+ pass
194
+ else:
195
+ return spec.contains(lhs)
196
+
197
+ oper: Optional[Operator] = _operators.get(op.serialize())
198
+ if oper is None:
199
+ raise UndefinedComparison(f"Undefined {op!r} on {lhs!r} and {rhs!r}.")
200
+
201
+ return oper(lhs, rhs)
202
+
203
+
204
+ class Undefined:
205
+ pass
206
+
207
+
208
+ _undefined = Undefined()
209
+
210
+
211
+ def _get_env(environment: Dict[str, str], name: str) -> str:
212
+ value: Union[str, Undefined] = environment.get(name, _undefined)
213
+
214
+ if isinstance(value, Undefined):
215
+ raise UndefinedEnvironmentName(
216
+ f"{name!r} does not exist in evaluation environment."
217
+ )
218
+
219
+ return value
220
+
221
+
222
+ def _evaluate_markers(markers: List[Any], environment: Dict[str, str]) -> bool:
223
+ groups: List[List[bool]] = [[]]
224
+
225
+ for marker in markers:
226
+ assert isinstance(marker, (list, tuple, str))
227
+
228
+ if isinstance(marker, list):
229
+ groups[-1].append(_evaluate_markers(marker, environment))
230
+ elif isinstance(marker, tuple):
231
+ lhs, op, rhs = marker
232
+
233
+ if isinstance(lhs, Variable):
234
+ lhs_value = _get_env(environment, lhs.value)
235
+ rhs_value = rhs.value
236
+ else:
237
+ lhs_value = lhs.value
238
+ rhs_value = _get_env(environment, rhs.value)
239
+
240
+ groups[-1].append(_eval_op(lhs_value, op, rhs_value))
241
+ else:
242
+ assert marker in ["and", "or"]
243
+ if marker == "or":
244
+ groups.append([])
245
+
246
+ return any(all(item) for item in groups)
247
+
248
+
249
+ def format_full_version(info: "sys._version_info") -> str:
250
+ version = "{0.major}.{0.minor}.{0.micro}".format(info)
251
+ kind = info.releaselevel
252
+ if kind != "final":
253
+ version += kind[0] + str(info.serial)
254
+ return version
255
+
256
+
257
+ def default_environment() -> Dict[str, str]:
258
+ iver = format_full_version(sys.implementation.version)
259
+ implementation_name = sys.implementation.name
260
+ return {
261
+ "implementation_name": implementation_name,
262
+ "implementation_version": iver,
263
+ "os_name": os.name,
264
+ "platform_machine": platform.machine(),
265
+ "platform_release": platform.release(),
266
+ "platform_system": platform.system(),
267
+ "platform_version": platform.version(),
268
+ "python_full_version": platform.python_version(),
269
+ "platform_python_implementation": platform.python_implementation(),
270
+ "python_version": ".".join(platform.python_version_tuple()[:2]),
271
+ "sys_platform": sys.platform,
272
+ }
273
+
274
+
275
+ class Marker:
276
+ def __init__(self, marker: str) -> None:
277
+ try:
278
+ self._markers = _coerce_parse_result(MARKER.parseString(marker))
279
+ except ParseException as e:
280
+ raise InvalidMarker(
281
+ f"Invalid marker: {marker!r}, parse error at "
282
+ f"{marker[e.loc : e.loc + 8]!r}"
283
+ )
284
+
285
+ def __str__(self) -> str:
286
+ return _format_marker(self._markers)
287
+
288
+ def __repr__(self) -> str:
289
+ return f"<Marker('{self}')>"
290
+
291
+ def evaluate(self, environment: Optional[Dict[str, str]] = None) -> bool:
292
+ """Evaluate a marker.
293
+
294
+ Return the boolean from evaluating the given marker against the
295
+ environment. environment is an optional argument to override all or
296
+ part of the determined environment.
297
+
298
+ The environment is determined from the current Python process.
299
+ """
300
+ current_environment = default_environment()
301
+ if environment is not None:
302
+ current_environment.update(environment)
303
+
304
+ return _evaluate_markers(self._markers, current_environment)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/requirements.py ADDED
@@ -0,0 +1,146 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ import string
7
+ import urllib.parse
8
+ from typing import List, Optional as TOptional, Set
9
+
10
+ from pip._vendor.pyparsing import ( # noqa
11
+ Combine,
12
+ Literal as L,
13
+ Optional,
14
+ ParseException,
15
+ Regex,
16
+ Word,
17
+ ZeroOrMore,
18
+ originalTextFor,
19
+ stringEnd,
20
+ stringStart,
21
+ )
22
+
23
+ from .markers import MARKER_EXPR, Marker
24
+ from .specifiers import LegacySpecifier, Specifier, SpecifierSet
25
+
26
+
27
+ class InvalidRequirement(ValueError):
28
+ """
29
+ An invalid requirement was found, users should refer to PEP 508.
30
+ """
31
+
32
+
33
+ ALPHANUM = Word(string.ascii_letters + string.digits)
34
+
35
+ LBRACKET = L("[").suppress()
36
+ RBRACKET = L("]").suppress()
37
+ LPAREN = L("(").suppress()
38
+ RPAREN = L(")").suppress()
39
+ COMMA = L(",").suppress()
40
+ SEMICOLON = L(";").suppress()
41
+ AT = L("@").suppress()
42
+
43
+ PUNCTUATION = Word("-_.")
44
+ IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
45
+ IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
46
+
47
+ NAME = IDENTIFIER("name")
48
+ EXTRA = IDENTIFIER
49
+
50
+ URI = Regex(r"[^ ]+")("url")
51
+ URL = AT + URI
52
+
53
+ EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
54
+ EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
55
+
56
+ VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
57
+ VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
58
+
59
+ VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
60
+ VERSION_MANY = Combine(
61
+ VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
62
+ )("_raw_spec")
63
+ _VERSION_SPEC = Optional((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)
64
+ _VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
65
+
66
+ VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
67
+ VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
68
+
69
+ MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
70
+ MARKER_EXPR.setParseAction(
71
+ lambda s, l, t: Marker(s[t._original_start : t._original_end])
72
+ )
73
+ MARKER_SEPARATOR = SEMICOLON
74
+ MARKER = MARKER_SEPARATOR + MARKER_EXPR
75
+
76
+ VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
77
+ URL_AND_MARKER = URL + Optional(MARKER)
78
+
79
+ NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
80
+
81
+ REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
82
+ # pyparsing isn't thread safe during initialization, so we do it eagerly, see
83
+ # issue #104
84
+ REQUIREMENT.parseString("x[]")
85
+
86
+
87
+ class Requirement:
88
+ """Parse a requirement.
89
+
90
+ Parse a given requirement string into its parts, such as name, specifier,
91
+ URL, and extras. Raises InvalidRequirement on a badly-formed requirement
92
+ string.
93
+ """
94
+
95
+ # TODO: Can we test whether something is contained within a requirement?
96
+ # If so how do we do that? Do we need to test against the _name_ of
97
+ # the thing as well as the version? What about the markers?
98
+ # TODO: Can we normalize the name and extra name?
99
+
100
+ def __init__(self, requirement_string: str) -> None:
101
+ try:
102
+ req = REQUIREMENT.parseString(requirement_string)
103
+ except ParseException as e:
104
+ raise InvalidRequirement(
105
+ f'Parse error at "{ requirement_string[e.loc : e.loc + 8]!r}": {e.msg}'
106
+ )
107
+
108
+ self.name: str = req.name
109
+ if req.url:
110
+ parsed_url = urllib.parse.urlparse(req.url)
111
+ if parsed_url.scheme == "file":
112
+ if urllib.parse.urlunparse(parsed_url) != req.url:
113
+ raise InvalidRequirement("Invalid URL given")
114
+ elif not (parsed_url.scheme and parsed_url.netloc) or (
115
+ not parsed_url.scheme and not parsed_url.netloc
116
+ ):
117
+ raise InvalidRequirement(f"Invalid URL: {req.url}")
118
+ self.url: TOptional[str] = req.url
119
+ else:
120
+ self.url = None
121
+ self.extras: Set[str] = set(req.extras.asList() if req.extras else [])
122
+ self.specifier: SpecifierSet = SpecifierSet(req.specifier)
123
+ self.marker: TOptional[Marker] = req.marker if req.marker else None
124
+
125
+ def __str__(self) -> str:
126
+ parts: List[str] = [self.name]
127
+
128
+ if self.extras:
129
+ formatted_extras = ",".join(sorted(self.extras))
130
+ parts.append(f"[{formatted_extras}]")
131
+
132
+ if self.specifier:
133
+ parts.append(str(self.specifier))
134
+
135
+ if self.url:
136
+ parts.append(f"@ {self.url}")
137
+ if self.marker:
138
+ parts.append(" ")
139
+
140
+ if self.marker:
141
+ parts.append(f"; {self.marker}")
142
+
143
+ return "".join(parts)
144
+
145
+ def __repr__(self) -> str:
146
+ return f"<Requirement('{self}')>"
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/specifiers.py ADDED
@@ -0,0 +1,802 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import abc
6
+ import functools
7
+ import itertools
8
+ import re
9
+ import warnings
10
+ from typing import (
11
+ Callable,
12
+ Dict,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Optional,
17
+ Pattern,
18
+ Set,
19
+ Tuple,
20
+ TypeVar,
21
+ Union,
22
+ )
23
+
24
+ from .utils import canonicalize_version
25
+ from .version import LegacyVersion, Version, parse
26
+
27
+ ParsedVersion = Union[Version, LegacyVersion]
28
+ UnparsedVersion = Union[Version, LegacyVersion, str]
29
+ VersionTypeVar = TypeVar("VersionTypeVar", bound=UnparsedVersion)
30
+ CallableOperator = Callable[[ParsedVersion, str], bool]
31
+
32
+
33
+ class InvalidSpecifier(ValueError):
34
+ """
35
+ An invalid specifier was found, users should refer to PEP 440.
36
+ """
37
+
38
+
39
+ class BaseSpecifier(metaclass=abc.ABCMeta):
40
+ @abc.abstractmethod
41
+ def __str__(self) -> str:
42
+ """
43
+ Returns the str representation of this Specifier like object. This
44
+ should be representative of the Specifier itself.
45
+ """
46
+
47
+ @abc.abstractmethod
48
+ def __hash__(self) -> int:
49
+ """
50
+ Returns a hash value for this Specifier like object.
51
+ """
52
+
53
+ @abc.abstractmethod
54
+ def __eq__(self, other: object) -> bool:
55
+ """
56
+ Returns a boolean representing whether or not the two Specifier like
57
+ objects are equal.
58
+ """
59
+
60
+ @abc.abstractproperty
61
+ def prereleases(self) -> Optional[bool]:
62
+ """
63
+ Returns whether or not pre-releases as a whole are allowed by this
64
+ specifier.
65
+ """
66
+
67
+ @prereleases.setter
68
+ def prereleases(self, value: bool) -> None:
69
+ """
70
+ Sets whether or not pre-releases as a whole are allowed by this
71
+ specifier.
72
+ """
73
+
74
+ @abc.abstractmethod
75
+ def contains(self, item: str, prereleases: Optional[bool] = None) -> bool:
76
+ """
77
+ Determines if the given item is contained within this specifier.
78
+ """
79
+
80
+ @abc.abstractmethod
81
+ def filter(
82
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
83
+ ) -> Iterable[VersionTypeVar]:
84
+ """
85
+ Takes an iterable of items and filters them so that only items which
86
+ are contained within this specifier are allowed in it.
87
+ """
88
+
89
+
90
+ class _IndividualSpecifier(BaseSpecifier):
91
+
92
+ _operators: Dict[str, str] = {}
93
+ _regex: Pattern[str]
94
+
95
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
96
+ match = self._regex.search(spec)
97
+ if not match:
98
+ raise InvalidSpecifier(f"Invalid specifier: '{spec}'")
99
+
100
+ self._spec: Tuple[str, str] = (
101
+ match.group("operator").strip(),
102
+ match.group("version").strip(),
103
+ )
104
+
105
+ # Store whether or not this Specifier should accept prereleases
106
+ self._prereleases = prereleases
107
+
108
+ def __repr__(self) -> str:
109
+ pre = (
110
+ f", prereleases={self.prereleases!r}"
111
+ if self._prereleases is not None
112
+ else ""
113
+ )
114
+
115
+ return f"<{self.__class__.__name__}({str(self)!r}{pre})>"
116
+
117
+ def __str__(self) -> str:
118
+ return "{}{}".format(*self._spec)
119
+
120
+ @property
121
+ def _canonical_spec(self) -> Tuple[str, str]:
122
+ return self._spec[0], canonicalize_version(self._spec[1])
123
+
124
+ def __hash__(self) -> int:
125
+ return hash(self._canonical_spec)
126
+
127
+ def __eq__(self, other: object) -> bool:
128
+ if isinstance(other, str):
129
+ try:
130
+ other = self.__class__(str(other))
131
+ except InvalidSpecifier:
132
+ return NotImplemented
133
+ elif not isinstance(other, self.__class__):
134
+ return NotImplemented
135
+
136
+ return self._canonical_spec == other._canonical_spec
137
+
138
+ def _get_operator(self, op: str) -> CallableOperator:
139
+ operator_callable: CallableOperator = getattr(
140
+ self, f"_compare_{self._operators[op]}"
141
+ )
142
+ return operator_callable
143
+
144
+ def _coerce_version(self, version: UnparsedVersion) -> ParsedVersion:
145
+ if not isinstance(version, (LegacyVersion, Version)):
146
+ version = parse(version)
147
+ return version
148
+
149
+ @property
150
+ def operator(self) -> str:
151
+ return self._spec[0]
152
+
153
+ @property
154
+ def version(self) -> str:
155
+ return self._spec[1]
156
+
157
+ @property
158
+ def prereleases(self) -> Optional[bool]:
159
+ return self._prereleases
160
+
161
+ @prereleases.setter
162
+ def prereleases(self, value: bool) -> None:
163
+ self._prereleases = value
164
+
165
+ def __contains__(self, item: str) -> bool:
166
+ return self.contains(item)
167
+
168
+ def contains(
169
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
170
+ ) -> bool:
171
+
172
+ # Determine if prereleases are to be allowed or not.
173
+ if prereleases is None:
174
+ prereleases = self.prereleases
175
+
176
+ # Normalize item to a Version or LegacyVersion, this allows us to have
177
+ # a shortcut for ``"2.0" in Specifier(">=2")
178
+ normalized_item = self._coerce_version(item)
179
+
180
+ # Determine if we should be supporting prereleases in this specifier
181
+ # or not, if we do not support prereleases than we can short circuit
182
+ # logic if this version is a prereleases.
183
+ if normalized_item.is_prerelease and not prereleases:
184
+ return False
185
+
186
+ # Actually do the comparison to determine if this item is contained
187
+ # within this Specifier or not.
188
+ operator_callable: CallableOperator = self._get_operator(self.operator)
189
+ return operator_callable(normalized_item, self.version)
190
+
191
+ def filter(
192
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
193
+ ) -> Iterable[VersionTypeVar]:
194
+
195
+ yielded = False
196
+ found_prereleases = []
197
+
198
+ kw = {"prereleases": prereleases if prereleases is not None else True}
199
+
200
+ # Attempt to iterate over all the values in the iterable and if any of
201
+ # them match, yield them.
202
+ for version in iterable:
203
+ parsed_version = self._coerce_version(version)
204
+
205
+ if self.contains(parsed_version, **kw):
206
+ # If our version is a prerelease, and we were not set to allow
207
+ # prereleases, then we'll store it for later in case nothing
208
+ # else matches this specifier.
209
+ if parsed_version.is_prerelease and not (
210
+ prereleases or self.prereleases
211
+ ):
212
+ found_prereleases.append(version)
213
+ # Either this is not a prerelease, or we should have been
214
+ # accepting prereleases from the beginning.
215
+ else:
216
+ yielded = True
217
+ yield version
218
+
219
+ # Now that we've iterated over everything, determine if we've yielded
220
+ # any values, and if we have not and we have any prereleases stored up
221
+ # then we will go ahead and yield the prereleases.
222
+ if not yielded and found_prereleases:
223
+ for version in found_prereleases:
224
+ yield version
225
+
226
+
227
+ class LegacySpecifier(_IndividualSpecifier):
228
+
229
+ _regex_str = r"""
230
+ (?P<operator>(==|!=|<=|>=|<|>))
231
+ \s*
232
+ (?P<version>
233
+ [^,;\s)]* # Since this is a "legacy" specifier, and the version
234
+ # string can be just about anything, we match everything
235
+ # except for whitespace, a semi-colon for marker support,
236
+ # a closing paren since versions can be enclosed in
237
+ # them, and a comma since it's a version separator.
238
+ )
239
+ """
240
+
241
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
242
+
243
+ _operators = {
244
+ "==": "equal",
245
+ "!=": "not_equal",
246
+ "<=": "less_than_equal",
247
+ ">=": "greater_than_equal",
248
+ "<": "less_than",
249
+ ">": "greater_than",
250
+ }
251
+
252
+ def __init__(self, spec: str = "", prereleases: Optional[bool] = None) -> None:
253
+ super().__init__(spec, prereleases)
254
+
255
+ warnings.warn(
256
+ "Creating a LegacyVersion has been deprecated and will be "
257
+ "removed in the next major release",
258
+ DeprecationWarning,
259
+ )
260
+
261
+ def _coerce_version(self, version: UnparsedVersion) -> LegacyVersion:
262
+ if not isinstance(version, LegacyVersion):
263
+ version = LegacyVersion(str(version))
264
+ return version
265
+
266
+ def _compare_equal(self, prospective: LegacyVersion, spec: str) -> bool:
267
+ return prospective == self._coerce_version(spec)
268
+
269
+ def _compare_not_equal(self, prospective: LegacyVersion, spec: str) -> bool:
270
+ return prospective != self._coerce_version(spec)
271
+
272
+ def _compare_less_than_equal(self, prospective: LegacyVersion, spec: str) -> bool:
273
+ return prospective <= self._coerce_version(spec)
274
+
275
+ def _compare_greater_than_equal(
276
+ self, prospective: LegacyVersion, spec: str
277
+ ) -> bool:
278
+ return prospective >= self._coerce_version(spec)
279
+
280
+ def _compare_less_than(self, prospective: LegacyVersion, spec: str) -> bool:
281
+ return prospective < self._coerce_version(spec)
282
+
283
+ def _compare_greater_than(self, prospective: LegacyVersion, spec: str) -> bool:
284
+ return prospective > self._coerce_version(spec)
285
+
286
+
287
+ def _require_version_compare(
288
+ fn: Callable[["Specifier", ParsedVersion, str], bool]
289
+ ) -> Callable[["Specifier", ParsedVersion, str], bool]:
290
+ @functools.wraps(fn)
291
+ def wrapped(self: "Specifier", prospective: ParsedVersion, spec: str) -> bool:
292
+ if not isinstance(prospective, Version):
293
+ return False
294
+ return fn(self, prospective, spec)
295
+
296
+ return wrapped
297
+
298
+
299
+ class Specifier(_IndividualSpecifier):
300
+
301
+ _regex_str = r"""
302
+ (?P<operator>(~=|==|!=|<=|>=|<|>|===))
303
+ (?P<version>
304
+ (?:
305
+ # The identity operators allow for an escape hatch that will
306
+ # do an exact string match of the version you wish to install.
307
+ # This will not be parsed by PEP 440 and we cannot determine
308
+ # any semantic meaning from it. This operator is discouraged
309
+ # but included entirely as an escape hatch.
310
+ (?<====) # Only match for the identity operator
311
+ \s*
312
+ [^\s]* # We just match everything, except for whitespace
313
+ # since we are only testing for strict identity.
314
+ )
315
+ |
316
+ (?:
317
+ # The (non)equality operators allow for wild card and local
318
+ # versions to be specified so we have to define these two
319
+ # operators separately to enable that.
320
+ (?<===|!=) # Only match for equals and not equals
321
+
322
+ \s*
323
+ v?
324
+ (?:[0-9]+!)? # epoch
325
+ [0-9]+(?:\.[0-9]+)* # release
326
+ (?: # pre release
327
+ [-_\.]?
328
+ (a|b|c|rc|alpha|beta|pre|preview)
329
+ [-_\.]?
330
+ [0-9]*
331
+ )?
332
+ (?: # post release
333
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
334
+ )?
335
+
336
+ # You cannot use a wild card and a dev or local version
337
+ # together so group them with a | and make them optional.
338
+ (?:
339
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
340
+ (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
341
+ |
342
+ \.\* # Wild card syntax of .*
343
+ )?
344
+ )
345
+ |
346
+ (?:
347
+ # The compatible operator requires at least two digits in the
348
+ # release segment.
349
+ (?<=~=) # Only match for the compatible operator
350
+
351
+ \s*
352
+ v?
353
+ (?:[0-9]+!)? # epoch
354
+ [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
355
+ (?: # pre release
356
+ [-_\.]?
357
+ (a|b|c|rc|alpha|beta|pre|preview)
358
+ [-_\.]?
359
+ [0-9]*
360
+ )?
361
+ (?: # post release
362
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
363
+ )?
364
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
365
+ )
366
+ |
367
+ (?:
368
+ # All other operators only allow a sub set of what the
369
+ # (non)equality operators do. Specifically they do not allow
370
+ # local versions to be specified nor do they allow the prefix
371
+ # matching wild cards.
372
+ (?<!==|!=|~=) # We have special cases for these
373
+ # operators so we want to make sure they
374
+ # don't match here.
375
+
376
+ \s*
377
+ v?
378
+ (?:[0-9]+!)? # epoch
379
+ [0-9]+(?:\.[0-9]+)* # release
380
+ (?: # pre release
381
+ [-_\.]?
382
+ (a|b|c|rc|alpha|beta|pre|preview)
383
+ [-_\.]?
384
+ [0-9]*
385
+ )?
386
+ (?: # post release
387
+ (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
388
+ )?
389
+ (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
390
+ )
391
+ )
392
+ """
393
+
394
+ _regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
395
+
396
+ _operators = {
397
+ "~=": "compatible",
398
+ "==": "equal",
399
+ "!=": "not_equal",
400
+ "<=": "less_than_equal",
401
+ ">=": "greater_than_equal",
402
+ "<": "less_than",
403
+ ">": "greater_than",
404
+ "===": "arbitrary",
405
+ }
406
+
407
+ @_require_version_compare
408
+ def _compare_compatible(self, prospective: ParsedVersion, spec: str) -> bool:
409
+
410
+ # Compatible releases have an equivalent combination of >= and ==. That
411
+ # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
412
+ # implement this in terms of the other specifiers instead of
413
+ # implementing it ourselves. The only thing we need to do is construct
414
+ # the other specifiers.
415
+
416
+ # We want everything but the last item in the version, but we want to
417
+ # ignore suffix segments.
418
+ prefix = ".".join(
419
+ list(itertools.takewhile(_is_not_suffix, _version_split(spec)))[:-1]
420
+ )
421
+
422
+ # Add the prefix notation to the end of our string
423
+ prefix += ".*"
424
+
425
+ return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
426
+ prospective, prefix
427
+ )
428
+
429
+ @_require_version_compare
430
+ def _compare_equal(self, prospective: ParsedVersion, spec: str) -> bool:
431
+
432
+ # We need special logic to handle prefix matching
433
+ if spec.endswith(".*"):
434
+ # In the case of prefix matching we want to ignore local segment.
435
+ prospective = Version(prospective.public)
436
+ # Split the spec out by dots, and pretend that there is an implicit
437
+ # dot in between a release segment and a pre-release segment.
438
+ split_spec = _version_split(spec[:-2]) # Remove the trailing .*
439
+
440
+ # Split the prospective version out by dots, and pretend that there
441
+ # is an implicit dot in between a release segment and a pre-release
442
+ # segment.
443
+ split_prospective = _version_split(str(prospective))
444
+
445
+ # Shorten the prospective version to be the same length as the spec
446
+ # so that we can determine if the specifier is a prefix of the
447
+ # prospective version or not.
448
+ shortened_prospective = split_prospective[: len(split_spec)]
449
+
450
+ # Pad out our two sides with zeros so that they both equal the same
451
+ # length.
452
+ padded_spec, padded_prospective = _pad_version(
453
+ split_spec, shortened_prospective
454
+ )
455
+
456
+ return padded_prospective == padded_spec
457
+ else:
458
+ # Convert our spec string into a Version
459
+ spec_version = Version(spec)
460
+
461
+ # If the specifier does not have a local segment, then we want to
462
+ # act as if the prospective version also does not have a local
463
+ # segment.
464
+ if not spec_version.local:
465
+ prospective = Version(prospective.public)
466
+
467
+ return prospective == spec_version
468
+
469
+ @_require_version_compare
470
+ def _compare_not_equal(self, prospective: ParsedVersion, spec: str) -> bool:
471
+ return not self._compare_equal(prospective, spec)
472
+
473
+ @_require_version_compare
474
+ def _compare_less_than_equal(self, prospective: ParsedVersion, spec: str) -> bool:
475
+
476
+ # NB: Local version identifiers are NOT permitted in the version
477
+ # specifier, so local version labels can be universally removed from
478
+ # the prospective version.
479
+ return Version(prospective.public) <= Version(spec)
480
+
481
+ @_require_version_compare
482
+ def _compare_greater_than_equal(
483
+ self, prospective: ParsedVersion, spec: str
484
+ ) -> bool:
485
+
486
+ # NB: Local version identifiers are NOT permitted in the version
487
+ # specifier, so local version labels can be universally removed from
488
+ # the prospective version.
489
+ return Version(prospective.public) >= Version(spec)
490
+
491
+ @_require_version_compare
492
+ def _compare_less_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
493
+
494
+ # Convert our spec to a Version instance, since we'll want to work with
495
+ # it as a version.
496
+ spec = Version(spec_str)
497
+
498
+ # Check to see if the prospective version is less than the spec
499
+ # version. If it's not we can short circuit and just return False now
500
+ # instead of doing extra unneeded work.
501
+ if not prospective < spec:
502
+ return False
503
+
504
+ # This special case is here so that, unless the specifier itself
505
+ # includes is a pre-release version, that we do not accept pre-release
506
+ # versions for the version mentioned in the specifier (e.g. <3.1 should
507
+ # not match 3.1.dev0, but should match 3.0.dev0).
508
+ if not spec.is_prerelease and prospective.is_prerelease:
509
+ if Version(prospective.base_version) == Version(spec.base_version):
510
+ return False
511
+
512
+ # If we've gotten to here, it means that prospective version is both
513
+ # less than the spec version *and* it's not a pre-release of the same
514
+ # version in the spec.
515
+ return True
516
+
517
+ @_require_version_compare
518
+ def _compare_greater_than(self, prospective: ParsedVersion, spec_str: str) -> bool:
519
+
520
+ # Convert our spec to a Version instance, since we'll want to work with
521
+ # it as a version.
522
+ spec = Version(spec_str)
523
+
524
+ # Check to see if the prospective version is greater than the spec
525
+ # version. If it's not we can short circuit and just return False now
526
+ # instead of doing extra unneeded work.
527
+ if not prospective > spec:
528
+ return False
529
+
530
+ # This special case is here so that, unless the specifier itself
531
+ # includes is a post-release version, that we do not accept
532
+ # post-release versions for the version mentioned in the specifier
533
+ # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
534
+ if not spec.is_postrelease and prospective.is_postrelease:
535
+ if Version(prospective.base_version) == Version(spec.base_version):
536
+ return False
537
+
538
+ # Ensure that we do not allow a local version of the version mentioned
539
+ # in the specifier, which is technically greater than, to match.
540
+ if prospective.local is not None:
541
+ if Version(prospective.base_version) == Version(spec.base_version):
542
+ return False
543
+
544
+ # If we've gotten to here, it means that prospective version is both
545
+ # greater than the spec version *and* it's not a pre-release of the
546
+ # same version in the spec.
547
+ return True
548
+
549
+ def _compare_arbitrary(self, prospective: Version, spec: str) -> bool:
550
+ return str(prospective).lower() == str(spec).lower()
551
+
552
+ @property
553
+ def prereleases(self) -> bool:
554
+
555
+ # If there is an explicit prereleases set for this, then we'll just
556
+ # blindly use that.
557
+ if self._prereleases is not None:
558
+ return self._prereleases
559
+
560
+ # Look at all of our specifiers and determine if they are inclusive
561
+ # operators, and if they are if they are including an explicit
562
+ # prerelease.
563
+ operator, version = self._spec
564
+ if operator in ["==", ">=", "<=", "~=", "==="]:
565
+ # The == specifier can include a trailing .*, if it does we
566
+ # want to remove before parsing.
567
+ if operator == "==" and version.endswith(".*"):
568
+ version = version[:-2]
569
+
570
+ # Parse the version, and if it is a pre-release than this
571
+ # specifier allows pre-releases.
572
+ if parse(version).is_prerelease:
573
+ return True
574
+
575
+ return False
576
+
577
+ @prereleases.setter
578
+ def prereleases(self, value: bool) -> None:
579
+ self._prereleases = value
580
+
581
+
582
+ _prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
583
+
584
+
585
+ def _version_split(version: str) -> List[str]:
586
+ result: List[str] = []
587
+ for item in version.split("."):
588
+ match = _prefix_regex.search(item)
589
+ if match:
590
+ result.extend(match.groups())
591
+ else:
592
+ result.append(item)
593
+ return result
594
+
595
+
596
+ def _is_not_suffix(segment: str) -> bool:
597
+ return not any(
598
+ segment.startswith(prefix) for prefix in ("dev", "a", "b", "rc", "post")
599
+ )
600
+
601
+
602
+ def _pad_version(left: List[str], right: List[str]) -> Tuple[List[str], List[str]]:
603
+ left_split, right_split = [], []
604
+
605
+ # Get the release segment of our versions
606
+ left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
607
+ right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
608
+
609
+ # Get the rest of our versions
610
+ left_split.append(left[len(left_split[0]) :])
611
+ right_split.append(right[len(right_split[0]) :])
612
+
613
+ # Insert our padding
614
+ left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
615
+ right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
616
+
617
+ return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
618
+
619
+
620
+ class SpecifierSet(BaseSpecifier):
621
+ def __init__(
622
+ self, specifiers: str = "", prereleases: Optional[bool] = None
623
+ ) -> None:
624
+
625
+ # Split on , to break each individual specifier into it's own item, and
626
+ # strip each item to remove leading/trailing whitespace.
627
+ split_specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
628
+
629
+ # Parsed each individual specifier, attempting first to make it a
630
+ # Specifier and falling back to a LegacySpecifier.
631
+ parsed: Set[_IndividualSpecifier] = set()
632
+ for specifier in split_specifiers:
633
+ try:
634
+ parsed.add(Specifier(specifier))
635
+ except InvalidSpecifier:
636
+ parsed.add(LegacySpecifier(specifier))
637
+
638
+ # Turn our parsed specifiers into a frozen set and save them for later.
639
+ self._specs = frozenset(parsed)
640
+
641
+ # Store our prereleases value so we can use it later to determine if
642
+ # we accept prereleases or not.
643
+ self._prereleases = prereleases
644
+
645
+ def __repr__(self) -> str:
646
+ pre = (
647
+ f", prereleases={self.prereleases!r}"
648
+ if self._prereleases is not None
649
+ else ""
650
+ )
651
+
652
+ return f"<SpecifierSet({str(self)!r}{pre})>"
653
+
654
+ def __str__(self) -> str:
655
+ return ",".join(sorted(str(s) for s in self._specs))
656
+
657
+ def __hash__(self) -> int:
658
+ return hash(self._specs)
659
+
660
+ def __and__(self, other: Union["SpecifierSet", str]) -> "SpecifierSet":
661
+ if isinstance(other, str):
662
+ other = SpecifierSet(other)
663
+ elif not isinstance(other, SpecifierSet):
664
+ return NotImplemented
665
+
666
+ specifier = SpecifierSet()
667
+ specifier._specs = frozenset(self._specs | other._specs)
668
+
669
+ if self._prereleases is None and other._prereleases is not None:
670
+ specifier._prereleases = other._prereleases
671
+ elif self._prereleases is not None and other._prereleases is None:
672
+ specifier._prereleases = self._prereleases
673
+ elif self._prereleases == other._prereleases:
674
+ specifier._prereleases = self._prereleases
675
+ else:
676
+ raise ValueError(
677
+ "Cannot combine SpecifierSets with True and False prerelease "
678
+ "overrides."
679
+ )
680
+
681
+ return specifier
682
+
683
+ def __eq__(self, other: object) -> bool:
684
+ if isinstance(other, (str, _IndividualSpecifier)):
685
+ other = SpecifierSet(str(other))
686
+ elif not isinstance(other, SpecifierSet):
687
+ return NotImplemented
688
+
689
+ return self._specs == other._specs
690
+
691
+ def __len__(self) -> int:
692
+ return len(self._specs)
693
+
694
+ def __iter__(self) -> Iterator[_IndividualSpecifier]:
695
+ return iter(self._specs)
696
+
697
+ @property
698
+ def prereleases(self) -> Optional[bool]:
699
+
700
+ # If we have been given an explicit prerelease modifier, then we'll
701
+ # pass that through here.
702
+ if self._prereleases is not None:
703
+ return self._prereleases
704
+
705
+ # If we don't have any specifiers, and we don't have a forced value,
706
+ # then we'll just return None since we don't know if this should have
707
+ # pre-releases or not.
708
+ if not self._specs:
709
+ return None
710
+
711
+ # Otherwise we'll see if any of the given specifiers accept
712
+ # prereleases, if any of them do we'll return True, otherwise False.
713
+ return any(s.prereleases for s in self._specs)
714
+
715
+ @prereleases.setter
716
+ def prereleases(self, value: bool) -> None:
717
+ self._prereleases = value
718
+
719
+ def __contains__(self, item: UnparsedVersion) -> bool:
720
+ return self.contains(item)
721
+
722
+ def contains(
723
+ self, item: UnparsedVersion, prereleases: Optional[bool] = None
724
+ ) -> bool:
725
+
726
+ # Ensure that our item is a Version or LegacyVersion instance.
727
+ if not isinstance(item, (LegacyVersion, Version)):
728
+ item = parse(item)
729
+
730
+ # Determine if we're forcing a prerelease or not, if we're not forcing
731
+ # one for this particular filter call, then we'll use whatever the
732
+ # SpecifierSet thinks for whether or not we should support prereleases.
733
+ if prereleases is None:
734
+ prereleases = self.prereleases
735
+
736
+ # We can determine if we're going to allow pre-releases by looking to
737
+ # see if any of the underlying items supports them. If none of them do
738
+ # and this item is a pre-release then we do not allow it and we can
739
+ # short circuit that here.
740
+ # Note: This means that 1.0.dev1 would not be contained in something
741
+ # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
742
+ if not prereleases and item.is_prerelease:
743
+ return False
744
+
745
+ # We simply dispatch to the underlying specs here to make sure that the
746
+ # given version is contained within all of them.
747
+ # Note: This use of all() here means that an empty set of specifiers
748
+ # will always return True, this is an explicit design decision.
749
+ return all(s.contains(item, prereleases=prereleases) for s in self._specs)
750
+
751
+ def filter(
752
+ self, iterable: Iterable[VersionTypeVar], prereleases: Optional[bool] = None
753
+ ) -> Iterable[VersionTypeVar]:
754
+
755
+ # Determine if we're forcing a prerelease or not, if we're not forcing
756
+ # one for this particular filter call, then we'll use whatever the
757
+ # SpecifierSet thinks for whether or not we should support prereleases.
758
+ if prereleases is None:
759
+ prereleases = self.prereleases
760
+
761
+ # If we have any specifiers, then we want to wrap our iterable in the
762
+ # filter method for each one, this will act as a logical AND amongst
763
+ # each specifier.
764
+ if self._specs:
765
+ for spec in self._specs:
766
+ iterable = spec.filter(iterable, prereleases=bool(prereleases))
767
+ return iterable
768
+ # If we do not have any specifiers, then we need to have a rough filter
769
+ # which will filter out any pre-releases, unless there are no final
770
+ # releases, and which will filter out LegacyVersion in general.
771
+ else:
772
+ filtered: List[VersionTypeVar] = []
773
+ found_prereleases: List[VersionTypeVar] = []
774
+
775
+ item: UnparsedVersion
776
+ parsed_version: Union[Version, LegacyVersion]
777
+
778
+ for item in iterable:
779
+ # Ensure that we some kind of Version class for this item.
780
+ if not isinstance(item, (LegacyVersion, Version)):
781
+ parsed_version = parse(item)
782
+ else:
783
+ parsed_version = item
784
+
785
+ # Filter out any item which is parsed as a LegacyVersion
786
+ if isinstance(parsed_version, LegacyVersion):
787
+ continue
788
+
789
+ # Store any item which is a pre-release for later unless we've
790
+ # already found a final version or we are accepting prereleases
791
+ if parsed_version.is_prerelease and not prereleases:
792
+ if not filtered:
793
+ found_prereleases.append(item)
794
+ else:
795
+ filtered.append(item)
796
+
797
+ # If we've found no items except for pre-releases, then we'll go
798
+ # ahead and use the pre-releases
799
+ if not filtered and found_prereleases and prereleases is None:
800
+ return found_prereleases
801
+
802
+ return filtered
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/tags.py ADDED
@@ -0,0 +1,487 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import logging
6
+ import platform
7
+ import sys
8
+ import sysconfig
9
+ from importlib.machinery import EXTENSION_SUFFIXES
10
+ from typing import (
11
+ Dict,
12
+ FrozenSet,
13
+ Iterable,
14
+ Iterator,
15
+ List,
16
+ Optional,
17
+ Sequence,
18
+ Tuple,
19
+ Union,
20
+ cast,
21
+ )
22
+
23
+ from . import _manylinux, _musllinux
24
+
25
+ logger = logging.getLogger(__name__)
26
+
27
+ PythonVersion = Sequence[int]
28
+ MacVersion = Tuple[int, int]
29
+
30
+ INTERPRETER_SHORT_NAMES: Dict[str, str] = {
31
+ "python": "py", # Generic.
32
+ "cpython": "cp",
33
+ "pypy": "pp",
34
+ "ironpython": "ip",
35
+ "jython": "jy",
36
+ }
37
+
38
+
39
+ _32_BIT_INTERPRETER = sys.maxsize <= 2 ** 32
40
+
41
+
42
+ class Tag:
43
+ """
44
+ A representation of the tag triple for a wheel.
45
+
46
+ Instances are considered immutable and thus are hashable. Equality checking
47
+ is also supported.
48
+ """
49
+
50
+ __slots__ = ["_interpreter", "_abi", "_platform", "_hash"]
51
+
52
+ def __init__(self, interpreter: str, abi: str, platform: str) -> None:
53
+ self._interpreter = interpreter.lower()
54
+ self._abi = abi.lower()
55
+ self._platform = platform.lower()
56
+ # The __hash__ of every single element in a Set[Tag] will be evaluated each time
57
+ # that a set calls its `.disjoint()` method, which may be called hundreds of
58
+ # times when scanning a page of links for packages with tags matching that
59
+ # Set[Tag]. Pre-computing the value here produces significant speedups for
60
+ # downstream consumers.
61
+ self._hash = hash((self._interpreter, self._abi, self._platform))
62
+
63
+ @property
64
+ def interpreter(self) -> str:
65
+ return self._interpreter
66
+
67
+ @property
68
+ def abi(self) -> str:
69
+ return self._abi
70
+
71
+ @property
72
+ def platform(self) -> str:
73
+ return self._platform
74
+
75
+ def __eq__(self, other: object) -> bool:
76
+ if not isinstance(other, Tag):
77
+ return NotImplemented
78
+
79
+ return (
80
+ (self._hash == other._hash) # Short-circuit ASAP for perf reasons.
81
+ and (self._platform == other._platform)
82
+ and (self._abi == other._abi)
83
+ and (self._interpreter == other._interpreter)
84
+ )
85
+
86
+ def __hash__(self) -> int:
87
+ return self._hash
88
+
89
+ def __str__(self) -> str:
90
+ return f"{self._interpreter}-{self._abi}-{self._platform}"
91
+
92
+ def __repr__(self) -> str:
93
+ return f"<{self} @ {id(self)}>"
94
+
95
+
96
+ def parse_tag(tag: str) -> FrozenSet[Tag]:
97
+ """
98
+ Parses the provided tag (e.g. `py3-none-any`) into a frozenset of Tag instances.
99
+
100
+ Returning a set is required due to the possibility that the tag is a
101
+ compressed tag set.
102
+ """
103
+ tags = set()
104
+ interpreters, abis, platforms = tag.split("-")
105
+ for interpreter in interpreters.split("."):
106
+ for abi in abis.split("."):
107
+ for platform_ in platforms.split("."):
108
+ tags.add(Tag(interpreter, abi, platform_))
109
+ return frozenset(tags)
110
+
111
+
112
+ def _get_config_var(name: str, warn: bool = False) -> Union[int, str, None]:
113
+ value = sysconfig.get_config_var(name)
114
+ if value is None and warn:
115
+ logger.debug(
116
+ "Config variable '%s' is unset, Python ABI tag may be incorrect", name
117
+ )
118
+ return value
119
+
120
+
121
+ def _normalize_string(string: str) -> str:
122
+ return string.replace(".", "_").replace("-", "_")
123
+
124
+
125
+ def _abi3_applies(python_version: PythonVersion) -> bool:
126
+ """
127
+ Determine if the Python version supports abi3.
128
+
129
+ PEP 384 was first implemented in Python 3.2.
130
+ """
131
+ return len(python_version) > 1 and tuple(python_version) >= (3, 2)
132
+
133
+
134
+ def _cpython_abis(py_version: PythonVersion, warn: bool = False) -> List[str]:
135
+ py_version = tuple(py_version) # To allow for version comparison.
136
+ abis = []
137
+ version = _version_nodot(py_version[:2])
138
+ debug = pymalloc = ucs4 = ""
139
+ with_debug = _get_config_var("Py_DEBUG", warn)
140
+ has_refcount = hasattr(sys, "gettotalrefcount")
141
+ # Windows doesn't set Py_DEBUG, so checking for support of debug-compiled
142
+ # extension modules is the best option.
143
+ # https://github.com/pypa/pip/issues/3383#issuecomment-173267692
144
+ has_ext = "_d.pyd" in EXTENSION_SUFFIXES
145
+ if with_debug or (with_debug is None and (has_refcount or has_ext)):
146
+ debug = "d"
147
+ if py_version < (3, 8):
148
+ with_pymalloc = _get_config_var("WITH_PYMALLOC", warn)
149
+ if with_pymalloc or with_pymalloc is None:
150
+ pymalloc = "m"
151
+ if py_version < (3, 3):
152
+ unicode_size = _get_config_var("Py_UNICODE_SIZE", warn)
153
+ if unicode_size == 4 or (
154
+ unicode_size is None and sys.maxunicode == 0x10FFFF
155
+ ):
156
+ ucs4 = "u"
157
+ elif debug:
158
+ # Debug builds can also load "normal" extension modules.
159
+ # We can also assume no UCS-4 or pymalloc requirement.
160
+ abis.append(f"cp{version}")
161
+ abis.insert(
162
+ 0,
163
+ "cp{version}{debug}{pymalloc}{ucs4}".format(
164
+ version=version, debug=debug, pymalloc=pymalloc, ucs4=ucs4
165
+ ),
166
+ )
167
+ return abis
168
+
169
+
170
+ def cpython_tags(
171
+ python_version: Optional[PythonVersion] = None,
172
+ abis: Optional[Iterable[str]] = None,
173
+ platforms: Optional[Iterable[str]] = None,
174
+ *,
175
+ warn: bool = False,
176
+ ) -> Iterator[Tag]:
177
+ """
178
+ Yields the tags for a CPython interpreter.
179
+
180
+ The tags consist of:
181
+ - cp<python_version>-<abi>-<platform>
182
+ - cp<python_version>-abi3-<platform>
183
+ - cp<python_version>-none-<platform>
184
+ - cp<less than python_version>-abi3-<platform> # Older Python versions down to 3.2.
185
+
186
+ If python_version only specifies a major version then user-provided ABIs and
187
+ the 'none' ABItag will be used.
188
+
189
+ If 'abi3' or 'none' are specified in 'abis' then they will be yielded at
190
+ their normal position and not at the beginning.
191
+ """
192
+ if not python_version:
193
+ python_version = sys.version_info[:2]
194
+
195
+ interpreter = f"cp{_version_nodot(python_version[:2])}"
196
+
197
+ if abis is None:
198
+ if len(python_version) > 1:
199
+ abis = _cpython_abis(python_version, warn)
200
+ else:
201
+ abis = []
202
+ abis = list(abis)
203
+ # 'abi3' and 'none' are explicitly handled later.
204
+ for explicit_abi in ("abi3", "none"):
205
+ try:
206
+ abis.remove(explicit_abi)
207
+ except ValueError:
208
+ pass
209
+
210
+ platforms = list(platforms or platform_tags())
211
+ for abi in abis:
212
+ for platform_ in platforms:
213
+ yield Tag(interpreter, abi, platform_)
214
+ if _abi3_applies(python_version):
215
+ yield from (Tag(interpreter, "abi3", platform_) for platform_ in platforms)
216
+ yield from (Tag(interpreter, "none", platform_) for platform_ in platforms)
217
+
218
+ if _abi3_applies(python_version):
219
+ for minor_version in range(python_version[1] - 1, 1, -1):
220
+ for platform_ in platforms:
221
+ interpreter = "cp{version}".format(
222
+ version=_version_nodot((python_version[0], minor_version))
223
+ )
224
+ yield Tag(interpreter, "abi3", platform_)
225
+
226
+
227
+ def _generic_abi() -> Iterator[str]:
228
+ abi = sysconfig.get_config_var("SOABI")
229
+ if abi:
230
+ yield _normalize_string(abi)
231
+
232
+
233
+ def generic_tags(
234
+ interpreter: Optional[str] = None,
235
+ abis: Optional[Iterable[str]] = None,
236
+ platforms: Optional[Iterable[str]] = None,
237
+ *,
238
+ warn: bool = False,
239
+ ) -> Iterator[Tag]:
240
+ """
241
+ Yields the tags for a generic interpreter.
242
+
243
+ The tags consist of:
244
+ - <interpreter>-<abi>-<platform>
245
+
246
+ The "none" ABI will be added if it was not explicitly provided.
247
+ """
248
+ if not interpreter:
249
+ interp_name = interpreter_name()
250
+ interp_version = interpreter_version(warn=warn)
251
+ interpreter = "".join([interp_name, interp_version])
252
+ if abis is None:
253
+ abis = _generic_abi()
254
+ platforms = list(platforms or platform_tags())
255
+ abis = list(abis)
256
+ if "none" not in abis:
257
+ abis.append("none")
258
+ for abi in abis:
259
+ for platform_ in platforms:
260
+ yield Tag(interpreter, abi, platform_)
261
+
262
+
263
+ def _py_interpreter_range(py_version: PythonVersion) -> Iterator[str]:
264
+ """
265
+ Yields Python versions in descending order.
266
+
267
+ After the latest version, the major-only version will be yielded, and then
268
+ all previous versions of that major version.
269
+ """
270
+ if len(py_version) > 1:
271
+ yield f"py{_version_nodot(py_version[:2])}"
272
+ yield f"py{py_version[0]}"
273
+ if len(py_version) > 1:
274
+ for minor in range(py_version[1] - 1, -1, -1):
275
+ yield f"py{_version_nodot((py_version[0], minor))}"
276
+
277
+
278
+ def compatible_tags(
279
+ python_version: Optional[PythonVersion] = None,
280
+ interpreter: Optional[str] = None,
281
+ platforms: Optional[Iterable[str]] = None,
282
+ ) -> Iterator[Tag]:
283
+ """
284
+ Yields the sequence of tags that are compatible with a specific version of Python.
285
+
286
+ The tags consist of:
287
+ - py*-none-<platform>
288
+ - <interpreter>-none-any # ... if `interpreter` is provided.
289
+ - py*-none-any
290
+ """
291
+ if not python_version:
292
+ python_version = sys.version_info[:2]
293
+ platforms = list(platforms or platform_tags())
294
+ for version in _py_interpreter_range(python_version):
295
+ for platform_ in platforms:
296
+ yield Tag(version, "none", platform_)
297
+ if interpreter:
298
+ yield Tag(interpreter, "none", "any")
299
+ for version in _py_interpreter_range(python_version):
300
+ yield Tag(version, "none", "any")
301
+
302
+
303
+ def _mac_arch(arch: str, is_32bit: bool = _32_BIT_INTERPRETER) -> str:
304
+ if not is_32bit:
305
+ return arch
306
+
307
+ if arch.startswith("ppc"):
308
+ return "ppc"
309
+
310
+ return "i386"
311
+
312
+
313
+ def _mac_binary_formats(version: MacVersion, cpu_arch: str) -> List[str]:
314
+ formats = [cpu_arch]
315
+ if cpu_arch == "x86_64":
316
+ if version < (10, 4):
317
+ return []
318
+ formats.extend(["intel", "fat64", "fat32"])
319
+
320
+ elif cpu_arch == "i386":
321
+ if version < (10, 4):
322
+ return []
323
+ formats.extend(["intel", "fat32", "fat"])
324
+
325
+ elif cpu_arch == "ppc64":
326
+ # TODO: Need to care about 32-bit PPC for ppc64 through 10.2?
327
+ if version > (10, 5) or version < (10, 4):
328
+ return []
329
+ formats.append("fat64")
330
+
331
+ elif cpu_arch == "ppc":
332
+ if version > (10, 6):
333
+ return []
334
+ formats.extend(["fat32", "fat"])
335
+
336
+ if cpu_arch in {"arm64", "x86_64"}:
337
+ formats.append("universal2")
338
+
339
+ if cpu_arch in {"x86_64", "i386", "ppc64", "ppc", "intel"}:
340
+ formats.append("universal")
341
+
342
+ return formats
343
+
344
+
345
+ def mac_platforms(
346
+ version: Optional[MacVersion] = None, arch: Optional[str] = None
347
+ ) -> Iterator[str]:
348
+ """
349
+ Yields the platform tags for a macOS system.
350
+
351
+ The `version` parameter is a two-item tuple specifying the macOS version to
352
+ generate platform tags for. The `arch` parameter is the CPU architecture to
353
+ generate platform tags for. Both parameters default to the appropriate value
354
+ for the current system.
355
+ """
356
+ version_str, _, cpu_arch = platform.mac_ver()
357
+ if version is None:
358
+ version = cast("MacVersion", tuple(map(int, version_str.split(".")[:2])))
359
+ else:
360
+ version = version
361
+ if arch is None:
362
+ arch = _mac_arch(cpu_arch)
363
+ else:
364
+ arch = arch
365
+
366
+ if (10, 0) <= version and version < (11, 0):
367
+ # Prior to Mac OS 11, each yearly release of Mac OS bumped the
368
+ # "minor" version number. The major version was always 10.
369
+ for minor_version in range(version[1], -1, -1):
370
+ compat_version = 10, minor_version
371
+ binary_formats = _mac_binary_formats(compat_version, arch)
372
+ for binary_format in binary_formats:
373
+ yield "macosx_{major}_{minor}_{binary_format}".format(
374
+ major=10, minor=minor_version, binary_format=binary_format
375
+ )
376
+
377
+ if version >= (11, 0):
378
+ # Starting with Mac OS 11, each yearly release bumps the major version
379
+ # number. The minor versions are now the midyear updates.
380
+ for major_version in range(version[0], 10, -1):
381
+ compat_version = major_version, 0
382
+ binary_formats = _mac_binary_formats(compat_version, arch)
383
+ for binary_format in binary_formats:
384
+ yield "macosx_{major}_{minor}_{binary_format}".format(
385
+ major=major_version, minor=0, binary_format=binary_format
386
+ )
387
+
388
+ if version >= (11, 0):
389
+ # Mac OS 11 on x86_64 is compatible with binaries from previous releases.
390
+ # Arm64 support was introduced in 11.0, so no Arm binaries from previous
391
+ # releases exist.
392
+ #
393
+ # However, the "universal2" binary format can have a
394
+ # macOS version earlier than 11.0 when the x86_64 part of the binary supports
395
+ # that version of macOS.
396
+ if arch == "x86_64":
397
+ for minor_version in range(16, 3, -1):
398
+ compat_version = 10, minor_version
399
+ binary_formats = _mac_binary_formats(compat_version, arch)
400
+ for binary_format in binary_formats:
401
+ yield "macosx_{major}_{minor}_{binary_format}".format(
402
+ major=compat_version[0],
403
+ minor=compat_version[1],
404
+ binary_format=binary_format,
405
+ )
406
+ else:
407
+ for minor_version in range(16, 3, -1):
408
+ compat_version = 10, minor_version
409
+ binary_format = "universal2"
410
+ yield "macosx_{major}_{minor}_{binary_format}".format(
411
+ major=compat_version[0],
412
+ minor=compat_version[1],
413
+ binary_format=binary_format,
414
+ )
415
+
416
+
417
+ def _linux_platforms(is_32bit: bool = _32_BIT_INTERPRETER) -> Iterator[str]:
418
+ linux = _normalize_string(sysconfig.get_platform())
419
+ if is_32bit:
420
+ if linux == "linux_x86_64":
421
+ linux = "linux_i686"
422
+ elif linux == "linux_aarch64":
423
+ linux = "linux_armv7l"
424
+ _, arch = linux.split("_", 1)
425
+ yield from _manylinux.platform_tags(linux, arch)
426
+ yield from _musllinux.platform_tags(arch)
427
+ yield linux
428
+
429
+
430
+ def _generic_platforms() -> Iterator[str]:
431
+ yield _normalize_string(sysconfig.get_platform())
432
+
433
+
434
+ def platform_tags() -> Iterator[str]:
435
+ """
436
+ Provides the platform tags for this installation.
437
+ """
438
+ if platform.system() == "Darwin":
439
+ return mac_platforms()
440
+ elif platform.system() == "Linux":
441
+ return _linux_platforms()
442
+ else:
443
+ return _generic_platforms()
444
+
445
+
446
+ def interpreter_name() -> str:
447
+ """
448
+ Returns the name of the running interpreter.
449
+ """
450
+ name = sys.implementation.name
451
+ return INTERPRETER_SHORT_NAMES.get(name) or name
452
+
453
+
454
+ def interpreter_version(*, warn: bool = False) -> str:
455
+ """
456
+ Returns the version of the running interpreter.
457
+ """
458
+ version = _get_config_var("py_version_nodot", warn=warn)
459
+ if version:
460
+ version = str(version)
461
+ else:
462
+ version = _version_nodot(sys.version_info[:2])
463
+ return version
464
+
465
+
466
+ def _version_nodot(version: PythonVersion) -> str:
467
+ return "".join(map(str, version))
468
+
469
+
470
+ def sys_tags(*, warn: bool = False) -> Iterator[Tag]:
471
+ """
472
+ Returns the sequence of tag triples for the running interpreter.
473
+
474
+ The order of the sequence corresponds to priority order for the
475
+ interpreter, from most to least important.
476
+ """
477
+
478
+ interp_name = interpreter_name()
479
+ if interp_name == "cp":
480
+ yield from cpython_tags(warn=warn)
481
+ else:
482
+ yield from generic_tags()
483
+
484
+ if interp_name == "pp":
485
+ yield from compatible_tags(interpreter="pp3")
486
+ else:
487
+ yield from compatible_tags()
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/utils.py ADDED
@@ -0,0 +1,136 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import re
6
+ from typing import FrozenSet, NewType, Tuple, Union, cast
7
+
8
+ from .tags import Tag, parse_tag
9
+ from .version import InvalidVersion, Version
10
+
11
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
12
+ NormalizedName = NewType("NormalizedName", str)
13
+
14
+
15
+ class InvalidWheelFilename(ValueError):
16
+ """
17
+ An invalid wheel filename was found, users should refer to PEP 427.
18
+ """
19
+
20
+
21
+ class InvalidSdistFilename(ValueError):
22
+ """
23
+ An invalid sdist filename was found, users should refer to the packaging user guide.
24
+ """
25
+
26
+
27
+ _canonicalize_regex = re.compile(r"[-_.]+")
28
+ # PEP 427: The build number must start with a digit.
29
+ _build_tag_regex = re.compile(r"(\d+)(.*)")
30
+
31
+
32
+ def canonicalize_name(name: str) -> NormalizedName:
33
+ # This is taken from PEP 503.
34
+ value = _canonicalize_regex.sub("-", name).lower()
35
+ return cast(NormalizedName, value)
36
+
37
+
38
+ def canonicalize_version(version: Union[Version, str]) -> str:
39
+ """
40
+ This is very similar to Version.__str__, but has one subtle difference
41
+ with the way it handles the release segment.
42
+ """
43
+ if isinstance(version, str):
44
+ try:
45
+ parsed = Version(version)
46
+ except InvalidVersion:
47
+ # Legacy versions cannot be normalized
48
+ return version
49
+ else:
50
+ parsed = version
51
+
52
+ parts = []
53
+
54
+ # Epoch
55
+ if parsed.epoch != 0:
56
+ parts.append(f"{parsed.epoch}!")
57
+
58
+ # Release segment
59
+ # NB: This strips trailing '.0's to normalize
60
+ parts.append(re.sub(r"(\.0)+$", "", ".".join(str(x) for x in parsed.release)))
61
+
62
+ # Pre-release
63
+ if parsed.pre is not None:
64
+ parts.append("".join(str(x) for x in parsed.pre))
65
+
66
+ # Post-release
67
+ if parsed.post is not None:
68
+ parts.append(f".post{parsed.post}")
69
+
70
+ # Development release
71
+ if parsed.dev is not None:
72
+ parts.append(f".dev{parsed.dev}")
73
+
74
+ # Local version segment
75
+ if parsed.local is not None:
76
+ parts.append(f"+{parsed.local}")
77
+
78
+ return "".join(parts)
79
+
80
+
81
+ def parse_wheel_filename(
82
+ filename: str,
83
+ ) -> Tuple[NormalizedName, Version, BuildTag, FrozenSet[Tag]]:
84
+ if not filename.endswith(".whl"):
85
+ raise InvalidWheelFilename(
86
+ f"Invalid wheel filename (extension must be '.whl'): {filename}"
87
+ )
88
+
89
+ filename = filename[:-4]
90
+ dashes = filename.count("-")
91
+ if dashes not in (4, 5):
92
+ raise InvalidWheelFilename(
93
+ f"Invalid wheel filename (wrong number of parts): {filename}"
94
+ )
95
+
96
+ parts = filename.split("-", dashes - 2)
97
+ name_part = parts[0]
98
+ # See PEP 427 for the rules on escaping the project name
99
+ if "__" in name_part or re.match(r"^[\w\d._]*$", name_part, re.UNICODE) is None:
100
+ raise InvalidWheelFilename(f"Invalid project name: {filename}")
101
+ name = canonicalize_name(name_part)
102
+ version = Version(parts[1])
103
+ if dashes == 5:
104
+ build_part = parts[2]
105
+ build_match = _build_tag_regex.match(build_part)
106
+ if build_match is None:
107
+ raise InvalidWheelFilename(
108
+ f"Invalid build number: {build_part} in '{filename}'"
109
+ )
110
+ build = cast(BuildTag, (int(build_match.group(1)), build_match.group(2)))
111
+ else:
112
+ build = ()
113
+ tags = parse_tag(parts[-1])
114
+ return (name, version, build, tags)
115
+
116
+
117
+ def parse_sdist_filename(filename: str) -> Tuple[NormalizedName, Version]:
118
+ if filename.endswith(".tar.gz"):
119
+ file_stem = filename[: -len(".tar.gz")]
120
+ elif filename.endswith(".zip"):
121
+ file_stem = filename[: -len(".zip")]
122
+ else:
123
+ raise InvalidSdistFilename(
124
+ f"Invalid sdist filename (extension must be '.tar.gz' or '.zip'):"
125
+ f" {filename}"
126
+ )
127
+
128
+ # We are requiring a PEP 440 version, which cannot contain dashes,
129
+ # so we split on the last dash.
130
+ name_part, sep, version_part = file_stem.rpartition("-")
131
+ if not sep:
132
+ raise InvalidSdistFilename(f"Invalid sdist filename: {filename}")
133
+
134
+ name = canonicalize_name(name_part)
135
+ version = Version(version_part)
136
+ return (name, version)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/packaging/version.py ADDED
@@ -0,0 +1,504 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file is dual licensed under the terms of the Apache License, Version
2
+ # 2.0, and the BSD License. See the LICENSE file in the root of this repository
3
+ # for complete details.
4
+
5
+ import collections
6
+ import itertools
7
+ import re
8
+ import warnings
9
+ from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
10
+
11
+ from ._structures import Infinity, InfinityType, NegativeInfinity, NegativeInfinityType
12
+
13
+ __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
14
+
15
+ InfiniteTypes = Union[InfinityType, NegativeInfinityType]
16
+ PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
17
+ SubLocalType = Union[InfiniteTypes, int, str]
18
+ LocalType = Union[
19
+ NegativeInfinityType,
20
+ Tuple[
21
+ Union[
22
+ SubLocalType,
23
+ Tuple[SubLocalType, str],
24
+ Tuple[NegativeInfinityType, SubLocalType],
25
+ ],
26
+ ...,
27
+ ],
28
+ ]
29
+ CmpKey = Tuple[
30
+ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
31
+ ]
32
+ LegacyCmpKey = Tuple[int, Tuple[str, ...]]
33
+ VersionComparisonMethod = Callable[
34
+ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
35
+ ]
36
+
37
+ _Version = collections.namedtuple(
38
+ "_Version", ["epoch", "release", "dev", "pre", "post", "local"]
39
+ )
40
+
41
+
42
+ def parse(version: str) -> Union["LegacyVersion", "Version"]:
43
+ """
44
+ Parse the given version string and return either a :class:`Version` object
45
+ or a :class:`LegacyVersion` object depending on if the given version is
46
+ a valid PEP 440 version or a legacy version.
47
+ """
48
+ try:
49
+ return Version(version)
50
+ except InvalidVersion:
51
+ return LegacyVersion(version)
52
+
53
+
54
+ class InvalidVersion(ValueError):
55
+ """
56
+ An invalid version was found, users should refer to PEP 440.
57
+ """
58
+
59
+
60
+ class _BaseVersion:
61
+ _key: Union[CmpKey, LegacyCmpKey]
62
+
63
+ def __hash__(self) -> int:
64
+ return hash(self._key)
65
+
66
+ # Please keep the duplicated `isinstance` check
67
+ # in the six comparisons hereunder
68
+ # unless you find a way to avoid adding overhead function calls.
69
+ def __lt__(self, other: "_BaseVersion") -> bool:
70
+ if not isinstance(other, _BaseVersion):
71
+ return NotImplemented
72
+
73
+ return self._key < other._key
74
+
75
+ def __le__(self, other: "_BaseVersion") -> bool:
76
+ if not isinstance(other, _BaseVersion):
77
+ return NotImplemented
78
+
79
+ return self._key <= other._key
80
+
81
+ def __eq__(self, other: object) -> bool:
82
+ if not isinstance(other, _BaseVersion):
83
+ return NotImplemented
84
+
85
+ return self._key == other._key
86
+
87
+ def __ge__(self, other: "_BaseVersion") -> bool:
88
+ if not isinstance(other, _BaseVersion):
89
+ return NotImplemented
90
+
91
+ return self._key >= other._key
92
+
93
+ def __gt__(self, other: "_BaseVersion") -> bool:
94
+ if not isinstance(other, _BaseVersion):
95
+ return NotImplemented
96
+
97
+ return self._key > other._key
98
+
99
+ def __ne__(self, other: object) -> bool:
100
+ if not isinstance(other, _BaseVersion):
101
+ return NotImplemented
102
+
103
+ return self._key != other._key
104
+
105
+
106
+ class LegacyVersion(_BaseVersion):
107
+ def __init__(self, version: str) -> None:
108
+ self._version = str(version)
109
+ self._key = _legacy_cmpkey(self._version)
110
+
111
+ warnings.warn(
112
+ "Creating a LegacyVersion has been deprecated and will be "
113
+ "removed in the next major release",
114
+ DeprecationWarning,
115
+ )
116
+
117
+ def __str__(self) -> str:
118
+ return self._version
119
+
120
+ def __repr__(self) -> str:
121
+ return f"<LegacyVersion('{self}')>"
122
+
123
+ @property
124
+ def public(self) -> str:
125
+ return self._version
126
+
127
+ @property
128
+ def base_version(self) -> str:
129
+ return self._version
130
+
131
+ @property
132
+ def epoch(self) -> int:
133
+ return -1
134
+
135
+ @property
136
+ def release(self) -> None:
137
+ return None
138
+
139
+ @property
140
+ def pre(self) -> None:
141
+ return None
142
+
143
+ @property
144
+ def post(self) -> None:
145
+ return None
146
+
147
+ @property
148
+ def dev(self) -> None:
149
+ return None
150
+
151
+ @property
152
+ def local(self) -> None:
153
+ return None
154
+
155
+ @property
156
+ def is_prerelease(self) -> bool:
157
+ return False
158
+
159
+ @property
160
+ def is_postrelease(self) -> bool:
161
+ return False
162
+
163
+ @property
164
+ def is_devrelease(self) -> bool:
165
+ return False
166
+
167
+
168
+ _legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
169
+
170
+ _legacy_version_replacement_map = {
171
+ "pre": "c",
172
+ "preview": "c",
173
+ "-": "final-",
174
+ "rc": "c",
175
+ "dev": "@",
176
+ }
177
+
178
+
179
+ def _parse_version_parts(s: str) -> Iterator[str]:
180
+ for part in _legacy_version_component_re.split(s):
181
+ part = _legacy_version_replacement_map.get(part, part)
182
+
183
+ if not part or part == ".":
184
+ continue
185
+
186
+ if part[:1] in "0123456789":
187
+ # pad for numeric comparison
188
+ yield part.zfill(8)
189
+ else:
190
+ yield "*" + part
191
+
192
+ # ensure that alpha/beta/candidate are before final
193
+ yield "*final"
194
+
195
+
196
+ def _legacy_cmpkey(version: str) -> LegacyCmpKey:
197
+
198
+ # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
199
+ # greater than or equal to 0. This will effectively put the LegacyVersion,
200
+ # which uses the defacto standard originally implemented by setuptools,
201
+ # as before all PEP 440 versions.
202
+ epoch = -1
203
+
204
+ # This scheme is taken from pkg_resources.parse_version setuptools prior to
205
+ # it's adoption of the packaging library.
206
+ parts: List[str] = []
207
+ for part in _parse_version_parts(version.lower()):
208
+ if part.startswith("*"):
209
+ # remove "-" before a prerelease tag
210
+ if part < "*final":
211
+ while parts and parts[-1] == "*final-":
212
+ parts.pop()
213
+
214
+ # remove trailing zeros from each series of numeric parts
215
+ while parts and parts[-1] == "00000000":
216
+ parts.pop()
217
+
218
+ parts.append(part)
219
+
220
+ return epoch, tuple(parts)
221
+
222
+
223
+ # Deliberately not anchored to the start and end of the string, to make it
224
+ # easier for 3rd party code to reuse
225
+ VERSION_PATTERN = r"""
226
+ v?
227
+ (?:
228
+ (?:(?P<epoch>[0-9]+)!)? # epoch
229
+ (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
230
+ (?P<pre> # pre-release
231
+ [-_\.]?
232
+ (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
233
+ [-_\.]?
234
+ (?P<pre_n>[0-9]+)?
235
+ )?
236
+ (?P<post> # post release
237
+ (?:-(?P<post_n1>[0-9]+))
238
+ |
239
+ (?:
240
+ [-_\.]?
241
+ (?P<post_l>post|rev|r)
242
+ [-_\.]?
243
+ (?P<post_n2>[0-9]+)?
244
+ )
245
+ )?
246
+ (?P<dev> # dev release
247
+ [-_\.]?
248
+ (?P<dev_l>dev)
249
+ [-_\.]?
250
+ (?P<dev_n>[0-9]+)?
251
+ )?
252
+ )
253
+ (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
254
+ """
255
+
256
+
257
+ class Version(_BaseVersion):
258
+
259
+ _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
260
+
261
+ def __init__(self, version: str) -> None:
262
+
263
+ # Validate the version and parse it into pieces
264
+ match = self._regex.search(version)
265
+ if not match:
266
+ raise InvalidVersion(f"Invalid version: '{version}'")
267
+
268
+ # Store the parsed out pieces of the version
269
+ self._version = _Version(
270
+ epoch=int(match.group("epoch")) if match.group("epoch") else 0,
271
+ release=tuple(int(i) for i in match.group("release").split(".")),
272
+ pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
273
+ post=_parse_letter_version(
274
+ match.group("post_l"), match.group("post_n1") or match.group("post_n2")
275
+ ),
276
+ dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
277
+ local=_parse_local_version(match.group("local")),
278
+ )
279
+
280
+ # Generate a key which will be used for sorting
281
+ self._key = _cmpkey(
282
+ self._version.epoch,
283
+ self._version.release,
284
+ self._version.pre,
285
+ self._version.post,
286
+ self._version.dev,
287
+ self._version.local,
288
+ )
289
+
290
+ def __repr__(self) -> str:
291
+ return f"<Version('{self}')>"
292
+
293
+ def __str__(self) -> str:
294
+ parts = []
295
+
296
+ # Epoch
297
+ if self.epoch != 0:
298
+ parts.append(f"{self.epoch}!")
299
+
300
+ # Release segment
301
+ parts.append(".".join(str(x) for x in self.release))
302
+
303
+ # Pre-release
304
+ if self.pre is not None:
305
+ parts.append("".join(str(x) for x in self.pre))
306
+
307
+ # Post-release
308
+ if self.post is not None:
309
+ parts.append(f".post{self.post}")
310
+
311
+ # Development release
312
+ if self.dev is not None:
313
+ parts.append(f".dev{self.dev}")
314
+
315
+ # Local version segment
316
+ if self.local is not None:
317
+ parts.append(f"+{self.local}")
318
+
319
+ return "".join(parts)
320
+
321
+ @property
322
+ def epoch(self) -> int:
323
+ _epoch: int = self._version.epoch
324
+ return _epoch
325
+
326
+ @property
327
+ def release(self) -> Tuple[int, ...]:
328
+ _release: Tuple[int, ...] = self._version.release
329
+ return _release
330
+
331
+ @property
332
+ def pre(self) -> Optional[Tuple[str, int]]:
333
+ _pre: Optional[Tuple[str, int]] = self._version.pre
334
+ return _pre
335
+
336
+ @property
337
+ def post(self) -> Optional[int]:
338
+ return self._version.post[1] if self._version.post else None
339
+
340
+ @property
341
+ def dev(self) -> Optional[int]:
342
+ return self._version.dev[1] if self._version.dev else None
343
+
344
+ @property
345
+ def local(self) -> Optional[str]:
346
+ if self._version.local:
347
+ return ".".join(str(x) for x in self._version.local)
348
+ else:
349
+ return None
350
+
351
+ @property
352
+ def public(self) -> str:
353
+ return str(self).split("+", 1)[0]
354
+
355
+ @property
356
+ def base_version(self) -> str:
357
+ parts = []
358
+
359
+ # Epoch
360
+ if self.epoch != 0:
361
+ parts.append(f"{self.epoch}!")
362
+
363
+ # Release segment
364
+ parts.append(".".join(str(x) for x in self.release))
365
+
366
+ return "".join(parts)
367
+
368
+ @property
369
+ def is_prerelease(self) -> bool:
370
+ return self.dev is not None or self.pre is not None
371
+
372
+ @property
373
+ def is_postrelease(self) -> bool:
374
+ return self.post is not None
375
+
376
+ @property
377
+ def is_devrelease(self) -> bool:
378
+ return self.dev is not None
379
+
380
+ @property
381
+ def major(self) -> int:
382
+ return self.release[0] if len(self.release) >= 1 else 0
383
+
384
+ @property
385
+ def minor(self) -> int:
386
+ return self.release[1] if len(self.release) >= 2 else 0
387
+
388
+ @property
389
+ def micro(self) -> int:
390
+ return self.release[2] if len(self.release) >= 3 else 0
391
+
392
+
393
+ def _parse_letter_version(
394
+ letter: str, number: Union[str, bytes, SupportsInt]
395
+ ) -> Optional[Tuple[str, int]]:
396
+
397
+ if letter:
398
+ # We consider there to be an implicit 0 in a pre-release if there is
399
+ # not a numeral associated with it.
400
+ if number is None:
401
+ number = 0
402
+
403
+ # We normalize any letters to their lower case form
404
+ letter = letter.lower()
405
+
406
+ # We consider some words to be alternate spellings of other words and
407
+ # in those cases we want to normalize the spellings to our preferred
408
+ # spelling.
409
+ if letter == "alpha":
410
+ letter = "a"
411
+ elif letter == "beta":
412
+ letter = "b"
413
+ elif letter in ["c", "pre", "preview"]:
414
+ letter = "rc"
415
+ elif letter in ["rev", "r"]:
416
+ letter = "post"
417
+
418
+ return letter, int(number)
419
+ if not letter and number:
420
+ # We assume if we are given a number, but we are not given a letter
421
+ # then this is using the implicit post release syntax (e.g. 1.0-1)
422
+ letter = "post"
423
+
424
+ return letter, int(number)
425
+
426
+ return None
427
+
428
+
429
+ _local_version_separators = re.compile(r"[\._-]")
430
+
431
+
432
+ def _parse_local_version(local: str) -> Optional[LocalType]:
433
+ """
434
+ Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
435
+ """
436
+ if local is not None:
437
+ return tuple(
438
+ part.lower() if not part.isdigit() else int(part)
439
+ for part in _local_version_separators.split(local)
440
+ )
441
+ return None
442
+
443
+
444
+ def _cmpkey(
445
+ epoch: int,
446
+ release: Tuple[int, ...],
447
+ pre: Optional[Tuple[str, int]],
448
+ post: Optional[Tuple[str, int]],
449
+ dev: Optional[Tuple[str, int]],
450
+ local: Optional[Tuple[SubLocalType]],
451
+ ) -> CmpKey:
452
+
453
+ # When we compare a release version, we want to compare it with all of the
454
+ # trailing zeros removed. So we'll use a reverse the list, drop all the now
455
+ # leading zeros until we come to something non zero, then take the rest
456
+ # re-reverse it back into the correct order and make it a tuple and use
457
+ # that for our sorting key.
458
+ _release = tuple(
459
+ reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
460
+ )
461
+
462
+ # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
463
+ # We'll do this by abusing the pre segment, but we _only_ want to do this
464
+ # if there is not a pre or a post segment. If we have one of those then
465
+ # the normal sorting rules will handle this case correctly.
466
+ if pre is None and post is None and dev is not None:
467
+ _pre: PrePostDevType = NegativeInfinity
468
+ # Versions without a pre-release (except as noted above) should sort after
469
+ # those with one.
470
+ elif pre is None:
471
+ _pre = Infinity
472
+ else:
473
+ _pre = pre
474
+
475
+ # Versions without a post segment should sort before those with one.
476
+ if post is None:
477
+ _post: PrePostDevType = NegativeInfinity
478
+
479
+ else:
480
+ _post = post
481
+
482
+ # Versions without a development segment should sort after those with one.
483
+ if dev is None:
484
+ _dev: PrePostDevType = Infinity
485
+
486
+ else:
487
+ _dev = dev
488
+
489
+ if local is None:
490
+ # Versions without a local segment should sort before those with one.
491
+ _local: LocalType = NegativeInfinity
492
+ else:
493
+ # Versions with a local segment need that segment parsed to implement
494
+ # the sorting rules in PEP440.
495
+ # - Alpha numeric segments sort before numeric segments
496
+ # - Alpha numeric segments sort lexicographically
497
+ # - Numeric segments sort numerically
498
+ # - Shorter versions sort before longer versions when the prefixes
499
+ # match exactly
500
+ _local = tuple(
501
+ (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
502
+ )
503
+
504
+ return epoch, _release, _pre, _post, _dev, _local
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__init__.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ """Wrappers to build Python packages using PEP 517 hooks
2
+ """
3
+
4
+ __version__ = '0.12.0'
5
+
6
+ from .wrappers import * # noqa: F401, F403
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (306 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-310.pyc ADDED
Binary file (3.59 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-310.pyc ADDED
Binary file (4.55 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc ADDED
Binary file (2.96 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc ADDED
Binary file (1.53 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc ADDED
Binary file (1.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc ADDED
Binary file (4.37 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc ADDED
Binary file (2.95 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc ADDED
Binary file (12.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/build.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build a project using PEP 517 hooks.
2
+ """
3
+ import argparse
4
+ import io
5
+ import logging
6
+ import os
7
+ import shutil
8
+
9
+ from .envbuild import BuildEnvironment
10
+ from .wrappers import Pep517HookCaller
11
+ from .dirtools import tempdir, mkdir_p
12
+ from .compat import FileNotFoundError, toml_load
13
+
14
+ log = logging.getLogger(__name__)
15
+
16
+
17
+ def validate_system(system):
18
+ """
19
+ Ensure build system has the requisite fields.
20
+ """
21
+ required = {'requires', 'build-backend'}
22
+ if not (required <= set(system)):
23
+ message = "Missing required fields: {missing}".format(
24
+ missing=required-set(system),
25
+ )
26
+ raise ValueError(message)
27
+
28
+
29
+ def load_system(source_dir):
30
+ """
31
+ Load the build system from a source dir (pyproject.toml).
32
+ """
33
+ pyproject = os.path.join(source_dir, 'pyproject.toml')
34
+ with io.open(pyproject, 'rb') as f:
35
+ pyproject_data = toml_load(f)
36
+ return pyproject_data['build-system']
37
+
38
+
39
+ def compat_system(source_dir):
40
+ """
41
+ Given a source dir, attempt to get a build system backend
42
+ and requirements from pyproject.toml. Fallback to
43
+ setuptools but only if the file was not found or a build
44
+ system was not indicated.
45
+ """
46
+ try:
47
+ system = load_system(source_dir)
48
+ except (FileNotFoundError, KeyError):
49
+ system = {}
50
+ system.setdefault(
51
+ 'build-backend',
52
+ 'setuptools.build_meta:__legacy__',
53
+ )
54
+ system.setdefault('requires', ['setuptools', 'wheel'])
55
+ return system
56
+
57
+
58
+ def _do_build(hooks, env, dist, dest):
59
+ get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
60
+ get_requires = getattr(hooks, get_requires_name)
61
+ reqs = get_requires({})
62
+ log.info('Got build requires: %s', reqs)
63
+
64
+ env.pip_install(reqs)
65
+ log.info('Installed dynamic build dependencies')
66
+
67
+ with tempdir() as td:
68
+ log.info('Trying to build %s in %s', dist, td)
69
+ build_name = 'build_{dist}'.format(**locals())
70
+ build = getattr(hooks, build_name)
71
+ filename = build(td, {})
72
+ source = os.path.join(td, filename)
73
+ shutil.move(source, os.path.join(dest, os.path.basename(filename)))
74
+
75
+
76
+ def build(source_dir, dist, dest=None, system=None):
77
+ system = system or load_system(source_dir)
78
+ dest = os.path.join(source_dir, dest or 'dist')
79
+ mkdir_p(dest)
80
+
81
+ validate_system(system)
82
+ hooks = Pep517HookCaller(
83
+ source_dir, system['build-backend'], system.get('backend-path')
84
+ )
85
+
86
+ with BuildEnvironment() as env:
87
+ env.pip_install(system['requires'])
88
+ _do_build(hooks, env, dist, dest)
89
+
90
+
91
+ parser = argparse.ArgumentParser()
92
+ parser.add_argument(
93
+ 'source_dir',
94
+ help="A directory containing pyproject.toml",
95
+ )
96
+ parser.add_argument(
97
+ '--binary', '-b',
98
+ action='store_true',
99
+ default=False,
100
+ )
101
+ parser.add_argument(
102
+ '--source', '-s',
103
+ action='store_true',
104
+ default=False,
105
+ )
106
+ parser.add_argument(
107
+ '--out-dir', '-o',
108
+ help="Destination in which to save the builds relative to source dir",
109
+ )
110
+
111
+
112
+ def main(args):
113
+ log.warning('pep517.build is deprecated. '
114
+ 'Consider switching to https://pypi.org/project/build/')
115
+
116
+ # determine which dists to build
117
+ dists = list(filter(None, (
118
+ 'sdist' if args.source or not args.binary else None,
119
+ 'wheel' if args.binary or not args.source else None,
120
+ )))
121
+
122
+ for dist in dists:
123
+ build(args.source_dir, dist, args.out_dir)
124
+
125
+
126
+ if __name__ == '__main__':
127
+ main(parser.parse_args())
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/check.py ADDED
@@ -0,0 +1,207 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Check a project and backend by attempting to build using PEP 517 hooks.
2
+ """
3
+ import argparse
4
+ import io
5
+ import logging
6
+ import os
7
+ from os.path import isfile, join as pjoin
8
+ import shutil
9
+ from subprocess import CalledProcessError
10
+ import sys
11
+ import tarfile
12
+ from tempfile import mkdtemp
13
+ import zipfile
14
+
15
+ from .colorlog import enable_colourful_output
16
+ from .compat import TOMLDecodeError, toml_load
17
+ from .envbuild import BuildEnvironment
18
+ from .wrappers import Pep517HookCaller
19
+
20
+ log = logging.getLogger(__name__)
21
+
22
+
23
+ def check_build_sdist(hooks, build_sys_requires):
24
+ with BuildEnvironment() as env:
25
+ try:
26
+ env.pip_install(build_sys_requires)
27
+ log.info('Installed static build dependencies')
28
+ except CalledProcessError:
29
+ log.error('Failed to install static build dependencies')
30
+ return False
31
+
32
+ try:
33
+ reqs = hooks.get_requires_for_build_sdist({})
34
+ log.info('Got build requires: %s', reqs)
35
+ except Exception:
36
+ log.error('Failure in get_requires_for_build_sdist', exc_info=True)
37
+ return False
38
+
39
+ try:
40
+ env.pip_install(reqs)
41
+ log.info('Installed dynamic build dependencies')
42
+ except CalledProcessError:
43
+ log.error('Failed to install dynamic build dependencies')
44
+ return False
45
+
46
+ td = mkdtemp()
47
+ log.info('Trying to build sdist in %s', td)
48
+ try:
49
+ try:
50
+ filename = hooks.build_sdist(td, {})
51
+ log.info('build_sdist returned %r', filename)
52
+ except Exception:
53
+ log.info('Failure in build_sdist', exc_info=True)
54
+ return False
55
+
56
+ if not filename.endswith('.tar.gz'):
57
+ log.error(
58
+ "Filename %s doesn't have .tar.gz extension", filename)
59
+ return False
60
+
61
+ path = pjoin(td, filename)
62
+ if isfile(path):
63
+ log.info("Output file %s exists", path)
64
+ else:
65
+ log.error("Output file %s does not exist", path)
66
+ return False
67
+
68
+ if tarfile.is_tarfile(path):
69
+ log.info("Output file is a tar file")
70
+ else:
71
+ log.error("Output file is not a tar file")
72
+ return False
73
+
74
+ finally:
75
+ shutil.rmtree(td)
76
+
77
+ return True
78
+
79
+
80
+ def check_build_wheel(hooks, build_sys_requires):
81
+ with BuildEnvironment() as env:
82
+ try:
83
+ env.pip_install(build_sys_requires)
84
+ log.info('Installed static build dependencies')
85
+ except CalledProcessError:
86
+ log.error('Failed to install static build dependencies')
87
+ return False
88
+
89
+ try:
90
+ reqs = hooks.get_requires_for_build_wheel({})
91
+ log.info('Got build requires: %s', reqs)
92
+ except Exception:
93
+ log.error('Failure in get_requires_for_build_sdist', exc_info=True)
94
+ return False
95
+
96
+ try:
97
+ env.pip_install(reqs)
98
+ log.info('Installed dynamic build dependencies')
99
+ except CalledProcessError:
100
+ log.error('Failed to install dynamic build dependencies')
101
+ return False
102
+
103
+ td = mkdtemp()
104
+ log.info('Trying to build wheel in %s', td)
105
+ try:
106
+ try:
107
+ filename = hooks.build_wheel(td, {})
108
+ log.info('build_wheel returned %r', filename)
109
+ except Exception:
110
+ log.info('Failure in build_wheel', exc_info=True)
111
+ return False
112
+
113
+ if not filename.endswith('.whl'):
114
+ log.error("Filename %s doesn't have .whl extension", filename)
115
+ return False
116
+
117
+ path = pjoin(td, filename)
118
+ if isfile(path):
119
+ log.info("Output file %s exists", path)
120
+ else:
121
+ log.error("Output file %s does not exist", path)
122
+ return False
123
+
124
+ if zipfile.is_zipfile(path):
125
+ log.info("Output file is a zip file")
126
+ else:
127
+ log.error("Output file is not a zip file")
128
+ return False
129
+
130
+ finally:
131
+ shutil.rmtree(td)
132
+
133
+ return True
134
+
135
+
136
+ def check(source_dir):
137
+ pyproject = pjoin(source_dir, 'pyproject.toml')
138
+ if isfile(pyproject):
139
+ log.info('Found pyproject.toml')
140
+ else:
141
+ log.error('Missing pyproject.toml')
142
+ return False
143
+
144
+ try:
145
+ with io.open(pyproject, 'rb') as f:
146
+ pyproject_data = toml_load(f)
147
+ # Ensure the mandatory data can be loaded
148
+ buildsys = pyproject_data['build-system']
149
+ requires = buildsys['requires']
150
+ backend = buildsys['build-backend']
151
+ backend_path = buildsys.get('backend-path')
152
+ log.info('Loaded pyproject.toml')
153
+ except (TOMLDecodeError, KeyError):
154
+ log.error("Invalid pyproject.toml", exc_info=True)
155
+ return False
156
+
157
+ hooks = Pep517HookCaller(source_dir, backend, backend_path)
158
+
159
+ sdist_ok = check_build_sdist(hooks, requires)
160
+ wheel_ok = check_build_wheel(hooks, requires)
161
+
162
+ if not sdist_ok:
163
+ log.warning('Sdist checks failed; scroll up to see')
164
+ if not wheel_ok:
165
+ log.warning('Wheel checks failed')
166
+
167
+ return sdist_ok
168
+
169
+
170
+ def main(argv=None):
171
+ log.warning('pep517.check is deprecated. '
172
+ 'Consider switching to https://pypi.org/project/build/')
173
+
174
+ ap = argparse.ArgumentParser()
175
+ ap.add_argument(
176
+ 'source_dir',
177
+ help="A directory containing pyproject.toml")
178
+ args = ap.parse_args(argv)
179
+
180
+ enable_colourful_output()
181
+
182
+ ok = check(args.source_dir)
183
+
184
+ if ok:
185
+ print(ansi('Checks passed', 'green'))
186
+ else:
187
+ print(ansi('Checks failed', 'red'))
188
+ sys.exit(1)
189
+
190
+
191
+ ansi_codes = {
192
+ 'reset': '\x1b[0m',
193
+ 'bold': '\x1b[1m',
194
+ 'red': '\x1b[31m',
195
+ 'green': '\x1b[32m',
196
+ }
197
+
198
+
199
+ def ansi(s, attr):
200
+ if os.name != 'nt' and sys.stdout.isatty():
201
+ return ansi_codes[attr] + str(s) + ansi_codes['reset']
202
+ else:
203
+ return str(s)
204
+
205
+
206
+ if __name__ == '__main__':
207
+ main()
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/colorlog.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Nicer log formatting with colours.
2
+
3
+ Code copied from Tornado, Apache licensed.
4
+ """
5
+ # Copyright 2012 Facebook
6
+ #
7
+ # Licensed under the Apache License, Version 2.0 (the "License"); you may
8
+ # not use this file except in compliance with the License. You may obtain
9
+ # a copy of the License at
10
+ #
11
+ # http://www.apache.org/licenses/LICENSE-2.0
12
+ #
13
+ # Unless required by applicable law or agreed to in writing, software
14
+ # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
15
+ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
16
+ # License for the specific language governing permissions and limitations
17
+ # under the License.
18
+
19
+ import logging
20
+ import sys
21
+
22
+ try:
23
+ import curses
24
+ except ImportError:
25
+ curses = None
26
+
27
+
28
+ def _stderr_supports_color():
29
+ color = False
30
+ if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
31
+ try:
32
+ curses.setupterm()
33
+ if curses.tigetnum("colors") > 0:
34
+ color = True
35
+ except Exception:
36
+ pass
37
+ return color
38
+
39
+
40
+ class LogFormatter(logging.Formatter):
41
+ """Log formatter with colour support
42
+ """
43
+ DEFAULT_COLORS = {
44
+ logging.INFO: 2, # Green
45
+ logging.WARNING: 3, # Yellow
46
+ logging.ERROR: 1, # Red
47
+ logging.CRITICAL: 1,
48
+ }
49
+
50
+ def __init__(self, color=True, datefmt=None):
51
+ r"""
52
+ :arg bool color: Enables color support.
53
+ :arg string fmt: Log message format.
54
+ It will be applied to the attributes dict of log records. The
55
+ text between ``%(color)s`` and ``%(end_color)s`` will be colored
56
+ depending on the level if color support is on.
57
+ :arg dict colors: color mappings from logging level to terminal color
58
+ code
59
+ :arg string datefmt: Datetime format.
60
+ Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
61
+ .. versionchanged:: 3.2
62
+ Added ``fmt`` and ``datefmt`` arguments.
63
+ """
64
+ logging.Formatter.__init__(self, datefmt=datefmt)
65
+ self._colors = {}
66
+ if color and _stderr_supports_color():
67
+ # The curses module has some str/bytes confusion in
68
+ # python3. Until version 3.2.3, most methods return
69
+ # bytes, but only accept strings. In addition, we want to
70
+ # output these strings with the logging module, which
71
+ # works with unicode strings. The explicit calls to
72
+ # unicode() below are harmless in python2 but will do the
73
+ # right conversion in python 3.
74
+ fg_color = (curses.tigetstr("setaf") or
75
+ curses.tigetstr("setf") or "")
76
+ if (3, 0) < sys.version_info < (3, 2, 3):
77
+ fg_color = str(fg_color, "ascii")
78
+
79
+ for levelno, code in self.DEFAULT_COLORS.items():
80
+ self._colors[levelno] = str(
81
+ curses.tparm(fg_color, code), "ascii")
82
+ self._normal = str(curses.tigetstr("sgr0"), "ascii")
83
+
84
+ scr = curses.initscr()
85
+ self.termwidth = scr.getmaxyx()[1]
86
+ curses.endwin()
87
+ else:
88
+ self._normal = ''
89
+ # Default width is usually 80, but too wide is
90
+ # worse than too narrow
91
+ self.termwidth = 70
92
+
93
+ def formatMessage(self, record):
94
+ mlen = len(record.message)
95
+ right_text = '{initial}-{name}'.format(initial=record.levelname[0],
96
+ name=record.name)
97
+ if mlen + len(right_text) < self.termwidth:
98
+ space = ' ' * (self.termwidth - (mlen + len(right_text)))
99
+ else:
100
+ space = ' '
101
+
102
+ if record.levelno in self._colors:
103
+ start_color = self._colors[record.levelno]
104
+ end_color = self._normal
105
+ else:
106
+ start_color = end_color = ''
107
+
108
+ return record.message + space + start_color + right_text + end_color
109
+
110
+
111
+ def enable_colourful_output(level=logging.INFO):
112
+ handler = logging.StreamHandler()
113
+ handler.setFormatter(LogFormatter())
114
+ logging.root.addHandler(handler)
115
+ logging.root.setLevel(level)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/compat.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Python 2/3 compatibility"""
2
+ import io
3
+ import json
4
+ import sys
5
+
6
+
7
+ # Handle reading and writing JSON in UTF-8, on Python 3 and 2.
8
+
9
+ if sys.version_info[0] >= 3:
10
+ # Python 3
11
+ def write_json(obj, path, **kwargs):
12
+ with open(path, 'w', encoding='utf-8') as f:
13
+ json.dump(obj, f, **kwargs)
14
+
15
+ def read_json(path):
16
+ with open(path, 'r', encoding='utf-8') as f:
17
+ return json.load(f)
18
+
19
+ else:
20
+ # Python 2
21
+ def write_json(obj, path, **kwargs):
22
+ with open(path, 'wb') as f:
23
+ json.dump(obj, f, encoding='utf-8', **kwargs)
24
+
25
+ def read_json(path):
26
+ with open(path, 'rb') as f:
27
+ return json.load(f)
28
+
29
+
30
+ # FileNotFoundError
31
+
32
+ try:
33
+ FileNotFoundError = FileNotFoundError
34
+ except NameError:
35
+ FileNotFoundError = IOError
36
+
37
+
38
+ if sys.version_info < (3, 6):
39
+ from toml import load as _toml_load # noqa: F401
40
+
41
+ def toml_load(f):
42
+ w = io.TextIOWrapper(f, encoding="utf8", newline="")
43
+ try:
44
+ return _toml_load(w)
45
+ finally:
46
+ w.detach()
47
+
48
+ from toml import TomlDecodeError as TOMLDecodeError # noqa: F401
49
+ else:
50
+ from pip._vendor.tomli import load as toml_load # noqa: F401
51
+ from pip._vendor.tomli import TOMLDecodeError # noqa: F401
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/dirtools.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import io
3
+ import contextlib
4
+ import tempfile
5
+ import shutil
6
+ import errno
7
+ import zipfile
8
+
9
+
10
+ @contextlib.contextmanager
11
+ def tempdir():
12
+ """Create a temporary directory in a context manager."""
13
+ td = tempfile.mkdtemp()
14
+ try:
15
+ yield td
16
+ finally:
17
+ shutil.rmtree(td)
18
+
19
+
20
+ def mkdir_p(*args, **kwargs):
21
+ """Like `mkdir`, but does not raise an exception if the
22
+ directory already exists.
23
+ """
24
+ try:
25
+ return os.mkdir(*args, **kwargs)
26
+ except OSError as exc:
27
+ if exc.errno != errno.EEXIST:
28
+ raise
29
+
30
+
31
+ def dir_to_zipfile(root):
32
+ """Construct an in-memory zip file for a directory."""
33
+ buffer = io.BytesIO()
34
+ zip_file = zipfile.ZipFile(buffer, 'w')
35
+ for root, dirs, files in os.walk(root):
36
+ for path in dirs:
37
+ fs_path = os.path.join(root, path)
38
+ rel_path = os.path.relpath(fs_path, root)
39
+ zip_file.writestr(rel_path + '/', '')
40
+ for path in files:
41
+ fs_path = os.path.join(root, path)
42
+ rel_path = os.path.relpath(fs_path, root)
43
+ zip_file.write(fs_path, rel_path)
44
+ return zip_file
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/envbuild.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build wheels/sdists by installing build deps to a temporary environment.
2
+ """
3
+
4
+ import io
5
+ import os
6
+ import logging
7
+ import shutil
8
+ from subprocess import check_call
9
+ import sys
10
+ from sysconfig import get_paths
11
+ from tempfile import mkdtemp
12
+
13
+ from .compat import toml_load
14
+ from .wrappers import Pep517HookCaller, LoggerWrapper
15
+
16
+ log = logging.getLogger(__name__)
17
+
18
+
19
+ def _load_pyproject(source_dir):
20
+ with io.open(
21
+ os.path.join(source_dir, 'pyproject.toml'),
22
+ 'rb',
23
+ ) as f:
24
+ pyproject_data = toml_load(f)
25
+ buildsys = pyproject_data['build-system']
26
+ return (
27
+ buildsys['requires'],
28
+ buildsys['build-backend'],
29
+ buildsys.get('backend-path'),
30
+ )
31
+
32
+
33
+ class BuildEnvironment(object):
34
+ """Context manager to install build deps in a simple temporary environment
35
+
36
+ Based on code I wrote for pip, which is MIT licensed.
37
+ """
38
+ # Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
39
+ #
40
+ # Permission is hereby granted, free of charge, to any person obtaining
41
+ # a copy of this software and associated documentation files (the
42
+ # "Software"), to deal in the Software without restriction, including
43
+ # without limitation the rights to use, copy, modify, merge, publish,
44
+ # distribute, sublicense, and/or sell copies of the Software, and to
45
+ # permit persons to whom the Software is furnished to do so, subject to
46
+ # the following conditions:
47
+ #
48
+ # The above copyright notice and this permission notice shall be
49
+ # included in all copies or substantial portions of the Software.
50
+ #
51
+ # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
52
+ # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
53
+ # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
54
+ # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
55
+ # LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
56
+ # OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
57
+ # WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
58
+
59
+ path = None
60
+
61
+ def __init__(self, cleanup=True):
62
+ self._cleanup = cleanup
63
+
64
+ def __enter__(self):
65
+ self.path = mkdtemp(prefix='pep517-build-env-')
66
+ log.info('Temporary build environment: %s', self.path)
67
+
68
+ self.save_path = os.environ.get('PATH', None)
69
+ self.save_pythonpath = os.environ.get('PYTHONPATH', None)
70
+
71
+ install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
72
+ install_dirs = get_paths(install_scheme, vars={
73
+ 'base': self.path,
74
+ 'platbase': self.path,
75
+ })
76
+
77
+ scripts = install_dirs['scripts']
78
+ if self.save_path:
79
+ os.environ['PATH'] = scripts + os.pathsep + self.save_path
80
+ else:
81
+ os.environ['PATH'] = scripts + os.pathsep + os.defpath
82
+
83
+ if install_dirs['purelib'] == install_dirs['platlib']:
84
+ lib_dirs = install_dirs['purelib']
85
+ else:
86
+ lib_dirs = install_dirs['purelib'] + os.pathsep + \
87
+ install_dirs['platlib']
88
+ if self.save_pythonpath:
89
+ os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
90
+ self.save_pythonpath
91
+ else:
92
+ os.environ['PYTHONPATH'] = lib_dirs
93
+
94
+ return self
95
+
96
+ def pip_install(self, reqs):
97
+ """Install dependencies into this env by calling pip in a subprocess"""
98
+ if not reqs:
99
+ return
100
+ log.info('Calling pip to install %s', reqs)
101
+ cmd = [
102
+ sys.executable, '-m', 'pip', 'install', '--ignore-installed',
103
+ '--prefix', self.path] + list(reqs)
104
+ check_call(
105
+ cmd,
106
+ stdout=LoggerWrapper(log, logging.INFO),
107
+ stderr=LoggerWrapper(log, logging.ERROR),
108
+ )
109
+
110
+ def __exit__(self, exc_type, exc_val, exc_tb):
111
+ needs_cleanup = (
112
+ self._cleanup and
113
+ self.path is not None and
114
+ os.path.isdir(self.path)
115
+ )
116
+ if needs_cleanup:
117
+ shutil.rmtree(self.path)
118
+
119
+ if self.save_path is None:
120
+ os.environ.pop('PATH', None)
121
+ else:
122
+ os.environ['PATH'] = self.save_path
123
+
124
+ if self.save_pythonpath is None:
125
+ os.environ.pop('PYTHONPATH', None)
126
+ else:
127
+ os.environ['PYTHONPATH'] = self.save_pythonpath
128
+
129
+
130
+ def build_wheel(source_dir, wheel_dir, config_settings=None):
131
+ """Build a wheel from a source directory using PEP 517 hooks.
132
+
133
+ :param str source_dir: Source directory containing pyproject.toml
134
+ :param str wheel_dir: Target directory to create wheel in
135
+ :param dict config_settings: Options to pass to build backend
136
+
137
+ This is a blocking function which will run pip in a subprocess to install
138
+ build requirements.
139
+ """
140
+ if config_settings is None:
141
+ config_settings = {}
142
+ requires, backend, backend_path = _load_pyproject(source_dir)
143
+ hooks = Pep517HookCaller(source_dir, backend, backend_path)
144
+
145
+ with BuildEnvironment() as env:
146
+ env.pip_install(requires)
147
+ reqs = hooks.get_requires_for_build_wheel(config_settings)
148
+ env.pip_install(reqs)
149
+ return hooks.build_wheel(wheel_dir, config_settings)
150
+
151
+
152
+ def build_sdist(source_dir, sdist_dir, config_settings=None):
153
+ """Build an sdist from a source directory using PEP 517 hooks.
154
+
155
+ :param str source_dir: Source directory containing pyproject.toml
156
+ :param str sdist_dir: Target directory to place sdist in
157
+ :param dict config_settings: Options to pass to build backend
158
+
159
+ This is a blocking function which will run pip in a subprocess to install
160
+ build requirements.
161
+ """
162
+ if config_settings is None:
163
+ config_settings = {}
164
+ requires, backend, backend_path = _load_pyproject(source_dir)
165
+ hooks = Pep517HookCaller(source_dir, backend, backend_path)
166
+
167
+ with BuildEnvironment() as env:
168
+ env.pip_install(requires)
169
+ reqs = hooks.get_requires_for_build_sdist(config_settings)
170
+ env.pip_install(reqs)
171
+ return hooks.build_sdist(sdist_dir, config_settings)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__init__.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """This is a subpackage because the directory is on sys.path for _in_process.py
2
+
3
+ The subpackage should stay as empty as possible to avoid shadowing modules that
4
+ the backend might import.
5
+ """
6
+ from os.path import dirname, abspath, join as pjoin
7
+ from contextlib import contextmanager
8
+
9
+ try:
10
+ import importlib.resources as resources
11
+
12
+ def _in_proc_script_path():
13
+ return resources.path(__package__, '_in_process.py')
14
+ except ImportError:
15
+ @contextmanager
16
+ def _in_proc_script_path():
17
+ yield pjoin(dirname(abspath(__file__)), '_in_process.py')
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (914 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc ADDED
Binary file (10.1 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/_in_process.py ADDED
@@ -0,0 +1,363 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """This is invoked in a subprocess to call the build backend hooks.
2
+
3
+ It expects:
4
+ - Command line args: hook_name, control_dir
5
+ - Environment variables:
6
+ PEP517_BUILD_BACKEND=entry.point:spec
7
+ PEP517_BACKEND_PATH=paths (separated with os.pathsep)
8
+ - control_dir/input.json:
9
+ - {"kwargs": {...}}
10
+
11
+ Results:
12
+ - control_dir/output.json
13
+ - {"return_val": ...}
14
+ """
15
+ from glob import glob
16
+ from importlib import import_module
17
+ import json
18
+ import os
19
+ import os.path
20
+ from os.path import join as pjoin
21
+ import re
22
+ import shutil
23
+ import sys
24
+ import traceback
25
+
26
+ # This file is run as a script, and `import compat` is not zip-safe, so we
27
+ # include write_json() and read_json() from compat.py.
28
+ #
29
+ # Handle reading and writing JSON in UTF-8, on Python 3 and 2.
30
+
31
+ if sys.version_info[0] >= 3:
32
+ # Python 3
33
+ def write_json(obj, path, **kwargs):
34
+ with open(path, 'w', encoding='utf-8') as f:
35
+ json.dump(obj, f, **kwargs)
36
+
37
+ def read_json(path):
38
+ with open(path, 'r', encoding='utf-8') as f:
39
+ return json.load(f)
40
+
41
+ else:
42
+ # Python 2
43
+ def write_json(obj, path, **kwargs):
44
+ with open(path, 'wb') as f:
45
+ json.dump(obj, f, encoding='utf-8', **kwargs)
46
+
47
+ def read_json(path):
48
+ with open(path, 'rb') as f:
49
+ return json.load(f)
50
+
51
+
52
+ class BackendUnavailable(Exception):
53
+ """Raised if we cannot import the backend"""
54
+ def __init__(self, traceback):
55
+ self.traceback = traceback
56
+
57
+
58
+ class BackendInvalid(Exception):
59
+ """Raised if the backend is invalid"""
60
+ def __init__(self, message):
61
+ self.message = message
62
+
63
+
64
+ class HookMissing(Exception):
65
+ """Raised if a hook is missing and we are not executing the fallback"""
66
+ def __init__(self, hook_name=None):
67
+ super(HookMissing, self).__init__(hook_name)
68
+ self.hook_name = hook_name
69
+
70
+
71
+ def contained_in(filename, directory):
72
+ """Test if a file is located within the given directory."""
73
+ filename = os.path.normcase(os.path.abspath(filename))
74
+ directory = os.path.normcase(os.path.abspath(directory))
75
+ return os.path.commonprefix([filename, directory]) == directory
76
+
77
+
78
+ def _build_backend():
79
+ """Find and load the build backend"""
80
+ # Add in-tree backend directories to the front of sys.path.
81
+ backend_path = os.environ.get('PEP517_BACKEND_PATH')
82
+ if backend_path:
83
+ extra_pathitems = backend_path.split(os.pathsep)
84
+ sys.path[:0] = extra_pathitems
85
+
86
+ ep = os.environ['PEP517_BUILD_BACKEND']
87
+ mod_path, _, obj_path = ep.partition(':')
88
+ try:
89
+ obj = import_module(mod_path)
90
+ except ImportError:
91
+ raise BackendUnavailable(traceback.format_exc())
92
+
93
+ if backend_path:
94
+ if not any(
95
+ contained_in(obj.__file__, path)
96
+ for path in extra_pathitems
97
+ ):
98
+ raise BackendInvalid("Backend was not loaded from backend-path")
99
+
100
+ if obj_path:
101
+ for path_part in obj_path.split('.'):
102
+ obj = getattr(obj, path_part)
103
+ return obj
104
+
105
+
106
+ def _supported_features():
107
+ """Return the list of options features supported by the backend.
108
+
109
+ Returns a list of strings.
110
+ The only possible value is 'build_editable'.
111
+ """
112
+ backend = _build_backend()
113
+ features = []
114
+ if hasattr(backend, "build_editable"):
115
+ features.append("build_editable")
116
+ return features
117
+
118
+
119
+ def get_requires_for_build_wheel(config_settings):
120
+ """Invoke the optional get_requires_for_build_wheel hook
121
+
122
+ Returns [] if the hook is not defined.
123
+ """
124
+ backend = _build_backend()
125
+ try:
126
+ hook = backend.get_requires_for_build_wheel
127
+ except AttributeError:
128
+ return []
129
+ else:
130
+ return hook(config_settings)
131
+
132
+
133
+ def get_requires_for_build_editable(config_settings):
134
+ """Invoke the optional get_requires_for_build_editable hook
135
+
136
+ Returns [] if the hook is not defined.
137
+ """
138
+ backend = _build_backend()
139
+ try:
140
+ hook = backend.get_requires_for_build_editable
141
+ except AttributeError:
142
+ return []
143
+ else:
144
+ return hook(config_settings)
145
+
146
+
147
+ def prepare_metadata_for_build_wheel(
148
+ metadata_directory, config_settings, _allow_fallback):
149
+ """Invoke optional prepare_metadata_for_build_wheel
150
+
151
+ Implements a fallback by building a wheel if the hook isn't defined,
152
+ unless _allow_fallback is False in which case HookMissing is raised.
153
+ """
154
+ backend = _build_backend()
155
+ try:
156
+ hook = backend.prepare_metadata_for_build_wheel
157
+ except AttributeError:
158
+ if not _allow_fallback:
159
+ raise HookMissing()
160
+ whl_basename = backend.build_wheel(metadata_directory, config_settings)
161
+ return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory,
162
+ config_settings)
163
+ else:
164
+ return hook(metadata_directory, config_settings)
165
+
166
+
167
+ def prepare_metadata_for_build_editable(
168
+ metadata_directory, config_settings, _allow_fallback):
169
+ """Invoke optional prepare_metadata_for_build_editable
170
+
171
+ Implements a fallback by building an editable wheel if the hook isn't
172
+ defined, unless _allow_fallback is False in which case HookMissing is
173
+ raised.
174
+ """
175
+ backend = _build_backend()
176
+ try:
177
+ hook = backend.prepare_metadata_for_build_editable
178
+ except AttributeError:
179
+ if not _allow_fallback:
180
+ raise HookMissing()
181
+ try:
182
+ build_hook = backend.build_editable
183
+ except AttributeError:
184
+ raise HookMissing(hook_name='build_editable')
185
+ else:
186
+ whl_basename = build_hook(metadata_directory, config_settings)
187
+ return _get_wheel_metadata_from_wheel(whl_basename,
188
+ metadata_directory,
189
+ config_settings)
190
+ else:
191
+ return hook(metadata_directory, config_settings)
192
+
193
+
194
+ WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
195
+
196
+
197
+ def _dist_info_files(whl_zip):
198
+ """Identify the .dist-info folder inside a wheel ZipFile."""
199
+ res = []
200
+ for path in whl_zip.namelist():
201
+ m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
202
+ if m:
203
+ res.append(path)
204
+ if res:
205
+ return res
206
+ raise Exception("No .dist-info folder found in wheel")
207
+
208
+
209
+ def _get_wheel_metadata_from_wheel(
210
+ whl_basename, metadata_directory, config_settings):
211
+ """Extract the metadata from a wheel.
212
+
213
+ Fallback for when the build backend does not
214
+ define the 'get_wheel_metadata' hook.
215
+ """
216
+ from zipfile import ZipFile
217
+ with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
218
+ pass # Touch marker file
219
+
220
+ whl_file = os.path.join(metadata_directory, whl_basename)
221
+ with ZipFile(whl_file) as zipf:
222
+ dist_info = _dist_info_files(zipf)
223
+ zipf.extractall(path=metadata_directory, members=dist_info)
224
+ return dist_info[0].split('/')[0]
225
+
226
+
227
+ def _find_already_built_wheel(metadata_directory):
228
+ """Check for a wheel already built during the get_wheel_metadata hook.
229
+ """
230
+ if not metadata_directory:
231
+ return None
232
+ metadata_parent = os.path.dirname(metadata_directory)
233
+ if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
234
+ return None
235
+
236
+ whl_files = glob(os.path.join(metadata_parent, '*.whl'))
237
+ if not whl_files:
238
+ print('Found wheel built marker, but no .whl files')
239
+ return None
240
+ if len(whl_files) > 1:
241
+ print('Found multiple .whl files; unspecified behaviour. '
242
+ 'Will call build_wheel.')
243
+ return None
244
+
245
+ # Exactly one .whl file
246
+ return whl_files[0]
247
+
248
+
249
+ def build_wheel(wheel_directory, config_settings, metadata_directory=None):
250
+ """Invoke the mandatory build_wheel hook.
251
+
252
+ If a wheel was already built in the
253
+ prepare_metadata_for_build_wheel fallback, this
254
+ will copy it rather than rebuilding the wheel.
255
+ """
256
+ prebuilt_whl = _find_already_built_wheel(metadata_directory)
257
+ if prebuilt_whl:
258
+ shutil.copy2(prebuilt_whl, wheel_directory)
259
+ return os.path.basename(prebuilt_whl)
260
+
261
+ return _build_backend().build_wheel(wheel_directory, config_settings,
262
+ metadata_directory)
263
+
264
+
265
+ def build_editable(wheel_directory, config_settings, metadata_directory=None):
266
+ """Invoke the optional build_editable hook.
267
+
268
+ If a wheel was already built in the
269
+ prepare_metadata_for_build_editable fallback, this
270
+ will copy it rather than rebuilding the wheel.
271
+ """
272
+ backend = _build_backend()
273
+ try:
274
+ hook = backend.build_editable
275
+ except AttributeError:
276
+ raise HookMissing()
277
+ else:
278
+ prebuilt_whl = _find_already_built_wheel(metadata_directory)
279
+ if prebuilt_whl:
280
+ shutil.copy2(prebuilt_whl, wheel_directory)
281
+ return os.path.basename(prebuilt_whl)
282
+
283
+ return hook(wheel_directory, config_settings, metadata_directory)
284
+
285
+
286
+ def get_requires_for_build_sdist(config_settings):
287
+ """Invoke the optional get_requires_for_build_wheel hook
288
+
289
+ Returns [] if the hook is not defined.
290
+ """
291
+ backend = _build_backend()
292
+ try:
293
+ hook = backend.get_requires_for_build_sdist
294
+ except AttributeError:
295
+ return []
296
+ else:
297
+ return hook(config_settings)
298
+
299
+
300
+ class _DummyException(Exception):
301
+ """Nothing should ever raise this exception"""
302
+
303
+
304
+ class GotUnsupportedOperation(Exception):
305
+ """For internal use when backend raises UnsupportedOperation"""
306
+ def __init__(self, traceback):
307
+ self.traceback = traceback
308
+
309
+
310
+ def build_sdist(sdist_directory, config_settings):
311
+ """Invoke the mandatory build_sdist hook."""
312
+ backend = _build_backend()
313
+ try:
314
+ return backend.build_sdist(sdist_directory, config_settings)
315
+ except getattr(backend, 'UnsupportedOperation', _DummyException):
316
+ raise GotUnsupportedOperation(traceback.format_exc())
317
+
318
+
319
+ HOOK_NAMES = {
320
+ 'get_requires_for_build_wheel',
321
+ 'prepare_metadata_for_build_wheel',
322
+ 'build_wheel',
323
+ 'get_requires_for_build_editable',
324
+ 'prepare_metadata_for_build_editable',
325
+ 'build_editable',
326
+ 'get_requires_for_build_sdist',
327
+ 'build_sdist',
328
+ '_supported_features',
329
+ }
330
+
331
+
332
+ def main():
333
+ if len(sys.argv) < 3:
334
+ sys.exit("Needs args: hook_name, control_dir")
335
+ hook_name = sys.argv[1]
336
+ control_dir = sys.argv[2]
337
+ if hook_name not in HOOK_NAMES:
338
+ sys.exit("Unknown hook: %s" % hook_name)
339
+ hook = globals()[hook_name]
340
+
341
+ hook_input = read_json(pjoin(control_dir, 'input.json'))
342
+
343
+ json_out = {'unsupported': False, 'return_val': None}
344
+ try:
345
+ json_out['return_val'] = hook(**hook_input['kwargs'])
346
+ except BackendUnavailable as e:
347
+ json_out['no_backend'] = True
348
+ json_out['traceback'] = e.traceback
349
+ except BackendInvalid as e:
350
+ json_out['backend_invalid'] = True
351
+ json_out['backend_error'] = e.message
352
+ except GotUnsupportedOperation as e:
353
+ json_out['unsupported'] = True
354
+ json_out['traceback'] = e.traceback
355
+ except HookMissing as e:
356
+ json_out['hook_missing'] = True
357
+ json_out['missing_hook_name'] = e.hook_name or hook_name
358
+
359
+ write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
360
+
361
+
362
+ if __name__ == '__main__':
363
+ main()
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/meta.py ADDED
@@ -0,0 +1,92 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build metadata for a project using PEP 517 hooks.
2
+ """
3
+ import argparse
4
+ import logging
5
+ import os
6
+ import shutil
7
+ import functools
8
+
9
+ try:
10
+ import importlib.metadata as imp_meta
11
+ except ImportError:
12
+ import importlib_metadata as imp_meta
13
+
14
+ try:
15
+ from zipfile import Path
16
+ except ImportError:
17
+ from zipp import Path
18
+
19
+ from .envbuild import BuildEnvironment
20
+ from .wrappers import Pep517HookCaller, quiet_subprocess_runner
21
+ from .dirtools import tempdir, mkdir_p, dir_to_zipfile
22
+ from .build import validate_system, load_system, compat_system
23
+
24
+ log = logging.getLogger(__name__)
25
+
26
+
27
+ def _prep_meta(hooks, env, dest):
28
+ reqs = hooks.get_requires_for_build_wheel({})
29
+ log.info('Got build requires: %s', reqs)
30
+
31
+ env.pip_install(reqs)
32
+ log.info('Installed dynamic build dependencies')
33
+
34
+ with tempdir() as td:
35
+ log.info('Trying to build metadata in %s', td)
36
+ filename = hooks.prepare_metadata_for_build_wheel(td, {})
37
+ source = os.path.join(td, filename)
38
+ shutil.move(source, os.path.join(dest, os.path.basename(filename)))
39
+
40
+
41
+ def build(source_dir='.', dest=None, system=None):
42
+ system = system or load_system(source_dir)
43
+ dest = os.path.join(source_dir, dest or 'dist')
44
+ mkdir_p(dest)
45
+ validate_system(system)
46
+ hooks = Pep517HookCaller(
47
+ source_dir, system['build-backend'], system.get('backend-path')
48
+ )
49
+
50
+ with hooks.subprocess_runner(quiet_subprocess_runner):
51
+ with BuildEnvironment() as env:
52
+ env.pip_install(system['requires'])
53
+ _prep_meta(hooks, env, dest)
54
+
55
+
56
+ def build_as_zip(builder=build):
57
+ with tempdir() as out_dir:
58
+ builder(dest=out_dir)
59
+ return dir_to_zipfile(out_dir)
60
+
61
+
62
+ def load(root):
63
+ """
64
+ Given a source directory (root) of a package,
65
+ return an importlib.metadata.Distribution object
66
+ with metadata build from that package.
67
+ """
68
+ root = os.path.expanduser(root)
69
+ system = compat_system(root)
70
+ builder = functools.partial(build, source_dir=root, system=system)
71
+ path = Path(build_as_zip(builder))
72
+ return imp_meta.PathDistribution(path)
73
+
74
+
75
+ parser = argparse.ArgumentParser()
76
+ parser.add_argument(
77
+ 'source_dir',
78
+ help="A directory containing pyproject.toml",
79
+ )
80
+ parser.add_argument(
81
+ '--out-dir', '-o',
82
+ help="Destination in which to save the builds relative to source dir",
83
+ )
84
+
85
+
86
+ def main():
87
+ args = parser.parse_args()
88
+ build(args.source_dir, args.out_dir)
89
+
90
+
91
+ if __name__ == '__main__':
92
+ main()
env-llmeval/lib/python3.10/site-packages/pip/_vendor/pep517/wrappers.py ADDED
@@ -0,0 +1,375 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import threading
2
+ from contextlib import contextmanager
3
+ import os
4
+ from os.path import abspath, join as pjoin
5
+ import shutil
6
+ from subprocess import check_call, check_output, STDOUT
7
+ import sys
8
+ from tempfile import mkdtemp
9
+
10
+ from . import compat
11
+ from .in_process import _in_proc_script_path
12
+
13
+ __all__ = [
14
+ 'BackendUnavailable',
15
+ 'BackendInvalid',
16
+ 'HookMissing',
17
+ 'UnsupportedOperation',
18
+ 'default_subprocess_runner',
19
+ 'quiet_subprocess_runner',
20
+ 'Pep517HookCaller',
21
+ ]
22
+
23
+
24
+ @contextmanager
25
+ def tempdir():
26
+ td = mkdtemp()
27
+ try:
28
+ yield td
29
+ finally:
30
+ shutil.rmtree(td)
31
+
32
+
33
+ class BackendUnavailable(Exception):
34
+ """Will be raised if the backend cannot be imported in the hook process."""
35
+ def __init__(self, traceback):
36
+ self.traceback = traceback
37
+
38
+
39
+ class BackendInvalid(Exception):
40
+ """Will be raised if the backend is invalid."""
41
+ def __init__(self, backend_name, backend_path, message):
42
+ self.backend_name = backend_name
43
+ self.backend_path = backend_path
44
+ self.message = message
45
+
46
+
47
+ class HookMissing(Exception):
48
+ """Will be raised on missing hooks."""
49
+ def __init__(self, hook_name):
50
+ super(HookMissing, self).__init__(hook_name)
51
+ self.hook_name = hook_name
52
+
53
+
54
+ class UnsupportedOperation(Exception):
55
+ """May be raised by build_sdist if the backend indicates that it can't."""
56
+ def __init__(self, traceback):
57
+ self.traceback = traceback
58
+
59
+
60
+ def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
61
+ """The default method of calling the wrapper subprocess."""
62
+ env = os.environ.copy()
63
+ if extra_environ:
64
+ env.update(extra_environ)
65
+
66
+ check_call(cmd, cwd=cwd, env=env)
67
+
68
+
69
+ def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
70
+ """A method of calling the wrapper subprocess while suppressing output."""
71
+ env = os.environ.copy()
72
+ if extra_environ:
73
+ env.update(extra_environ)
74
+
75
+ check_output(cmd, cwd=cwd, env=env, stderr=STDOUT)
76
+
77
+
78
+ def norm_and_check(source_tree, requested):
79
+ """Normalise and check a backend path.
80
+
81
+ Ensure that the requested backend path is specified as a relative path,
82
+ and resolves to a location under the given source tree.
83
+
84
+ Return an absolute version of the requested path.
85
+ """
86
+ if os.path.isabs(requested):
87
+ raise ValueError("paths must be relative")
88
+
89
+ abs_source = os.path.abspath(source_tree)
90
+ abs_requested = os.path.normpath(os.path.join(abs_source, requested))
91
+ # We have to use commonprefix for Python 2.7 compatibility. So we
92
+ # normalise case to avoid problems because commonprefix is a character
93
+ # based comparison :-(
94
+ norm_source = os.path.normcase(abs_source)
95
+ norm_requested = os.path.normcase(abs_requested)
96
+ if os.path.commonprefix([norm_source, norm_requested]) != norm_source:
97
+ raise ValueError("paths must be inside source tree")
98
+
99
+ return abs_requested
100
+
101
+
102
+ class Pep517HookCaller(object):
103
+ """A wrapper around a source directory to be built with a PEP 517 backend.
104
+
105
+ :param source_dir: The path to the source directory, containing
106
+ pyproject.toml.
107
+ :param build_backend: The build backend spec, as per PEP 517, from
108
+ pyproject.toml.
109
+ :param backend_path: The backend path, as per PEP 517, from pyproject.toml.
110
+ :param runner: A callable that invokes the wrapper subprocess.
111
+ :param python_executable: The Python executable used to invoke the backend
112
+
113
+ The 'runner', if provided, must expect the following:
114
+
115
+ - cmd: a list of strings representing the command and arguments to
116
+ execute, as would be passed to e.g. 'subprocess.check_call'.
117
+ - cwd: a string representing the working directory that must be
118
+ used for the subprocess. Corresponds to the provided source_dir.
119
+ - extra_environ: a dict mapping environment variable names to values
120
+ which must be set for the subprocess execution.
121
+ """
122
+ def __init__(
123
+ self,
124
+ source_dir,
125
+ build_backend,
126
+ backend_path=None,
127
+ runner=None,
128
+ python_executable=None,
129
+ ):
130
+ if runner is None:
131
+ runner = default_subprocess_runner
132
+
133
+ self.source_dir = abspath(source_dir)
134
+ self.build_backend = build_backend
135
+ if backend_path:
136
+ backend_path = [
137
+ norm_and_check(self.source_dir, p) for p in backend_path
138
+ ]
139
+ self.backend_path = backend_path
140
+ self._subprocess_runner = runner
141
+ if not python_executable:
142
+ python_executable = sys.executable
143
+ self.python_executable = python_executable
144
+
145
+ @contextmanager
146
+ def subprocess_runner(self, runner):
147
+ """A context manager for temporarily overriding the default subprocess
148
+ runner.
149
+ """
150
+ prev = self._subprocess_runner
151
+ self._subprocess_runner = runner
152
+ try:
153
+ yield
154
+ finally:
155
+ self._subprocess_runner = prev
156
+
157
+ def _supported_features(self):
158
+ """Return the list of optional features supported by the backend."""
159
+ return self._call_hook('_supported_features', {})
160
+
161
+ def get_requires_for_build_wheel(self, config_settings=None):
162
+ """Identify packages required for building a wheel
163
+
164
+ Returns a list of dependency specifications, e.g.::
165
+
166
+ ["wheel >= 0.25", "setuptools"]
167
+
168
+ This does not include requirements specified in pyproject.toml.
169
+ It returns the result of calling the equivalently named hook in a
170
+ subprocess.
171
+ """
172
+ return self._call_hook('get_requires_for_build_wheel', {
173
+ 'config_settings': config_settings
174
+ })
175
+
176
+ def prepare_metadata_for_build_wheel(
177
+ self, metadata_directory, config_settings=None,
178
+ _allow_fallback=True):
179
+ """Prepare a ``*.dist-info`` folder with metadata for this project.
180
+
181
+ Returns the name of the newly created folder.
182
+
183
+ If the build backend defines a hook with this name, it will be called
184
+ in a subprocess. If not, the backend will be asked to build a wheel,
185
+ and the dist-info extracted from that (unless _allow_fallback is
186
+ False).
187
+ """
188
+ return self._call_hook('prepare_metadata_for_build_wheel', {
189
+ 'metadata_directory': abspath(metadata_directory),
190
+ 'config_settings': config_settings,
191
+ '_allow_fallback': _allow_fallback,
192
+ })
193
+
194
+ def build_wheel(
195
+ self, wheel_directory, config_settings=None,
196
+ metadata_directory=None):
197
+ """Build a wheel from this project.
198
+
199
+ Returns the name of the newly created file.
200
+
201
+ In general, this will call the 'build_wheel' hook in the backend.
202
+ However, if that was previously called by
203
+ 'prepare_metadata_for_build_wheel', and the same metadata_directory is
204
+ used, the previously built wheel will be copied to wheel_directory.
205
+ """
206
+ if metadata_directory is not None:
207
+ metadata_directory = abspath(metadata_directory)
208
+ return self._call_hook('build_wheel', {
209
+ 'wheel_directory': abspath(wheel_directory),
210
+ 'config_settings': config_settings,
211
+ 'metadata_directory': metadata_directory,
212
+ })
213
+
214
+ def get_requires_for_build_editable(self, config_settings=None):
215
+ """Identify packages required for building an editable wheel
216
+
217
+ Returns a list of dependency specifications, e.g.::
218
+
219
+ ["wheel >= 0.25", "setuptools"]
220
+
221
+ This does not include requirements specified in pyproject.toml.
222
+ It returns the result of calling the equivalently named hook in a
223
+ subprocess.
224
+ """
225
+ return self._call_hook('get_requires_for_build_editable', {
226
+ 'config_settings': config_settings
227
+ })
228
+
229
+ def prepare_metadata_for_build_editable(
230
+ self, metadata_directory, config_settings=None,
231
+ _allow_fallback=True):
232
+ """Prepare a ``*.dist-info`` folder with metadata for this project.
233
+
234
+ Returns the name of the newly created folder.
235
+
236
+ If the build backend defines a hook with this name, it will be called
237
+ in a subprocess. If not, the backend will be asked to build an editable
238
+ wheel, and the dist-info extracted from that (unless _allow_fallback is
239
+ False).
240
+ """
241
+ return self._call_hook('prepare_metadata_for_build_editable', {
242
+ 'metadata_directory': abspath(metadata_directory),
243
+ 'config_settings': config_settings,
244
+ '_allow_fallback': _allow_fallback,
245
+ })
246
+
247
+ def build_editable(
248
+ self, wheel_directory, config_settings=None,
249
+ metadata_directory=None):
250
+ """Build an editable wheel from this project.
251
+
252
+ Returns the name of the newly created file.
253
+
254
+ In general, this will call the 'build_editable' hook in the backend.
255
+ However, if that was previously called by
256
+ 'prepare_metadata_for_build_editable', and the same metadata_directory
257
+ is used, the previously built wheel will be copied to wheel_directory.
258
+ """
259
+ if metadata_directory is not None:
260
+ metadata_directory = abspath(metadata_directory)
261
+ return self._call_hook('build_editable', {
262
+ 'wheel_directory': abspath(wheel_directory),
263
+ 'config_settings': config_settings,
264
+ 'metadata_directory': metadata_directory,
265
+ })
266
+
267
+ def get_requires_for_build_sdist(self, config_settings=None):
268
+ """Identify packages required for building a wheel
269
+
270
+ Returns a list of dependency specifications, e.g.::
271
+
272
+ ["setuptools >= 26"]
273
+
274
+ This does not include requirements specified in pyproject.toml.
275
+ It returns the result of calling the equivalently named hook in a
276
+ subprocess.
277
+ """
278
+ return self._call_hook('get_requires_for_build_sdist', {
279
+ 'config_settings': config_settings
280
+ })
281
+
282
+ def build_sdist(self, sdist_directory, config_settings=None):
283
+ """Build an sdist from this project.
284
+
285
+ Returns the name of the newly created file.
286
+
287
+ This calls the 'build_sdist' backend hook in a subprocess.
288
+ """
289
+ return self._call_hook('build_sdist', {
290
+ 'sdist_directory': abspath(sdist_directory),
291
+ 'config_settings': config_settings,
292
+ })
293
+
294
+ def _call_hook(self, hook_name, kwargs):
295
+ # On Python 2, pytoml returns Unicode values (which is correct) but the
296
+ # environment passed to check_call needs to contain string values. We
297
+ # convert here by encoding using ASCII (the backend can only contain
298
+ # letters, digits and _, . and : characters, and will be used as a
299
+ # Python identifier, so non-ASCII content is wrong on Python 2 in
300
+ # any case).
301
+ # For backend_path, we use sys.getfilesystemencoding.
302
+ if sys.version_info[0] == 2:
303
+ build_backend = self.build_backend.encode('ASCII')
304
+ else:
305
+ build_backend = self.build_backend
306
+ extra_environ = {'PEP517_BUILD_BACKEND': build_backend}
307
+
308
+ if self.backend_path:
309
+ backend_path = os.pathsep.join(self.backend_path)
310
+ if sys.version_info[0] == 2:
311
+ backend_path = backend_path.encode(sys.getfilesystemencoding())
312
+ extra_environ['PEP517_BACKEND_PATH'] = backend_path
313
+
314
+ with tempdir() as td:
315
+ hook_input = {'kwargs': kwargs}
316
+ compat.write_json(hook_input, pjoin(td, 'input.json'),
317
+ indent=2)
318
+
319
+ # Run the hook in a subprocess
320
+ with _in_proc_script_path() as script:
321
+ python = self.python_executable
322
+ self._subprocess_runner(
323
+ [python, abspath(str(script)), hook_name, td],
324
+ cwd=self.source_dir,
325
+ extra_environ=extra_environ
326
+ )
327
+
328
+ data = compat.read_json(pjoin(td, 'output.json'))
329
+ if data.get('unsupported'):
330
+ raise UnsupportedOperation(data.get('traceback', ''))
331
+ if data.get('no_backend'):
332
+ raise BackendUnavailable(data.get('traceback', ''))
333
+ if data.get('backend_invalid'):
334
+ raise BackendInvalid(
335
+ backend_name=self.build_backend,
336
+ backend_path=self.backend_path,
337
+ message=data.get('backend_error', '')
338
+ )
339
+ if data.get('hook_missing'):
340
+ raise HookMissing(data.get('missing_hook_name') or hook_name)
341
+ return data['return_val']
342
+
343
+
344
+ class LoggerWrapper(threading.Thread):
345
+ """
346
+ Read messages from a pipe and redirect them
347
+ to a logger (see python's logging module).
348
+ """
349
+
350
+ def __init__(self, logger, level):
351
+ threading.Thread.__init__(self)
352
+ self.daemon = True
353
+
354
+ self.logger = logger
355
+ self.level = level
356
+
357
+ # create the pipe and reader
358
+ self.fd_read, self.fd_write = os.pipe()
359
+ self.reader = os.fdopen(self.fd_read)
360
+
361
+ self.start()
362
+
363
+ def fileno(self):
364
+ return self.fd_write
365
+
366
+ @staticmethod
367
+ def remove_newline(msg):
368
+ return msg[:-1] if msg.endswith(os.linesep) else msg
369
+
370
+ def run(self):
371
+ for line in self.reader:
372
+ self._write(self.remove_newline(line))
373
+
374
+ def _write(self, message):
375
+ self.logger.log(self.level, message)
env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/__init__.py ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright (c) 2012 Georgios Verigakis <[email protected]>
2
+ #
3
+ # Permission to use, copy, modify, and distribute this software for any
4
+ # purpose with or without fee is hereby granted, provided that the above
5
+ # copyright notice and this permission notice appear in all copies.
6
+ #
7
+ # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
8
+ # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
9
+ # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
10
+ # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
11
+ # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
12
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
13
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
14
+
15
+ from __future__ import division, print_function
16
+
17
+ from collections import deque
18
+ from datetime import timedelta
19
+ from math import ceil
20
+ from sys import stderr
21
+ try:
22
+ from time import monotonic
23
+ except ImportError:
24
+ from time import time as monotonic
25
+
26
+
27
+ __version__ = '1.6'
28
+
29
+ HIDE_CURSOR = '\x1b[?25l'
30
+ SHOW_CURSOR = '\x1b[?25h'
31
+
32
+
33
+ class Infinite(object):
34
+ file = stderr
35
+ sma_window = 10 # Simple Moving Average window
36
+ check_tty = True
37
+ hide_cursor = True
38
+
39
+ def __init__(self, message='', **kwargs):
40
+ self.index = 0
41
+ self.start_ts = monotonic()
42
+ self.avg = 0
43
+ self._avg_update_ts = self.start_ts
44
+ self._ts = self.start_ts
45
+ self._xput = deque(maxlen=self.sma_window)
46
+ for key, val in kwargs.items():
47
+ setattr(self, key, val)
48
+
49
+ self._max_width = 0
50
+ self._hidden_cursor = False
51
+ self.message = message
52
+
53
+ if self.file and self.is_tty():
54
+ if self.hide_cursor:
55
+ print(HIDE_CURSOR, end='', file=self.file)
56
+ self._hidden_cursor = True
57
+ self.writeln('')
58
+
59
+ def __del__(self):
60
+ if self._hidden_cursor:
61
+ print(SHOW_CURSOR, end='', file=self.file)
62
+
63
+ def __getitem__(self, key):
64
+ if key.startswith('_'):
65
+ return None
66
+ return getattr(self, key, None)
67
+
68
+ @property
69
+ def elapsed(self):
70
+ return int(monotonic() - self.start_ts)
71
+
72
+ @property
73
+ def elapsed_td(self):
74
+ return timedelta(seconds=self.elapsed)
75
+
76
+ def update_avg(self, n, dt):
77
+ if n > 0:
78
+ xput_len = len(self._xput)
79
+ self._xput.append(dt / n)
80
+ now = monotonic()
81
+ # update when we're still filling _xput, then after every second
82
+ if (xput_len < self.sma_window or
83
+ now - self._avg_update_ts > 1):
84
+ self.avg = sum(self._xput) / len(self._xput)
85
+ self._avg_update_ts = now
86
+
87
+ def update(self):
88
+ pass
89
+
90
+ def start(self):
91
+ pass
92
+
93
+ def writeln(self, line):
94
+ if self.file and self.is_tty():
95
+ width = len(line)
96
+ if width < self._max_width:
97
+ # Add padding to cover previous contents
98
+ line += ' ' * (self._max_width - width)
99
+ else:
100
+ self._max_width = width
101
+ print('\r' + line, end='', file=self.file)
102
+ self.file.flush()
103
+
104
+ def finish(self):
105
+ if self.file and self.is_tty():
106
+ print(file=self.file)
107
+ if self._hidden_cursor:
108
+ print(SHOW_CURSOR, end='', file=self.file)
109
+ self._hidden_cursor = False
110
+
111
+ def is_tty(self):
112
+ try:
113
+ return self.file.isatty() if self.check_tty else True
114
+ except AttributeError:
115
+ msg = "%s has no attribute 'isatty'. Try setting check_tty=False." % self
116
+ raise AttributeError(msg)
117
+
118
+ def next(self, n=1):
119
+ now = monotonic()
120
+ dt = now - self._ts
121
+ self.update_avg(n, dt)
122
+ self._ts = now
123
+ self.index = self.index + n
124
+ self.update()
125
+
126
+ def iter(self, it):
127
+ self.iter_value = None
128
+ with self:
129
+ for x in it:
130
+ self.iter_value = x
131
+ yield x
132
+ self.next()
133
+ del self.iter_value
134
+
135
+ def __enter__(self):
136
+ self.start()
137
+ return self
138
+
139
+ def __exit__(self, exc_type, exc_val, exc_tb):
140
+ self.finish()
141
+
142
+
143
+ class Progress(Infinite):
144
+ def __init__(self, *args, **kwargs):
145
+ super(Progress, self).__init__(*args, **kwargs)
146
+ self.max = kwargs.get('max', 100)
147
+
148
+ @property
149
+ def eta(self):
150
+ return int(ceil(self.avg * self.remaining))
151
+
152
+ @property
153
+ def eta_td(self):
154
+ return timedelta(seconds=self.eta)
155
+
156
+ @property
157
+ def percent(self):
158
+ return self.progress * 100
159
+
160
+ @property
161
+ def progress(self):
162
+ if self.max == 0:
163
+ return 0
164
+ return min(1, self.index / self.max)
165
+
166
+ @property
167
+ def remaining(self):
168
+ return max(self.max - self.index, 0)
169
+
170
+ def start(self):
171
+ self.update()
172
+
173
+ def goto(self, index):
174
+ incr = index - self.index
175
+ self.next(incr)
176
+
177
+ def iter(self, it):
178
+ try:
179
+ self.max = len(it)
180
+ except TypeError:
181
+ pass
182
+
183
+ self.iter_value = None
184
+ with self:
185
+ for x in it:
186
+ self.iter_value = x
187
+ yield x
188
+ self.next()
189
+ del self.iter_value
env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/bar.py ADDED
@@ -0,0 +1,93 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+ # Copyright (c) 2012 Georgios Verigakis <[email protected]>
4
+ #
5
+ # Permission to use, copy, modify, and distribute this software for any
6
+ # purpose with or without fee is hereby granted, provided that the above
7
+ # copyright notice and this permission notice appear in all copies.
8
+ #
9
+ # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10
+ # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11
+ # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12
+ # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13
+ # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16
+
17
+ from __future__ import unicode_literals
18
+
19
+ import sys
20
+
21
+ from . import Progress
22
+ from .colors import color
23
+
24
+
25
+ class Bar(Progress):
26
+ width = 32
27
+ suffix = '%(index)d/%(max)d'
28
+ bar_prefix = ' |'
29
+ bar_suffix = '| '
30
+ empty_fill = ' '
31
+ fill = '#'
32
+ color = None
33
+
34
+ def update(self):
35
+ filled_length = int(self.width * self.progress)
36
+ empty_length = self.width - filled_length
37
+
38
+ message = self.message % self
39
+ bar = color(self.fill * filled_length, fg=self.color)
40
+ empty = self.empty_fill * empty_length
41
+ suffix = self.suffix % self
42
+ line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
43
+ suffix])
44
+ self.writeln(line)
45
+
46
+
47
+ class ChargingBar(Bar):
48
+ suffix = '%(percent)d%%'
49
+ bar_prefix = ' '
50
+ bar_suffix = ' '
51
+ empty_fill = '∙'
52
+ fill = '█'
53
+
54
+
55
+ class FillingSquaresBar(ChargingBar):
56
+ empty_fill = '▢'
57
+ fill = '▣'
58
+
59
+
60
+ class FillingCirclesBar(ChargingBar):
61
+ empty_fill = '◯'
62
+ fill = '◉'
63
+
64
+
65
+ class IncrementalBar(Bar):
66
+ if sys.platform.startswith('win'):
67
+ phases = (u' ', u'▌', u'█')
68
+ else:
69
+ phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
70
+
71
+ def update(self):
72
+ nphases = len(self.phases)
73
+ filled_len = self.width * self.progress
74
+ nfull = int(filled_len) # Number of full chars
75
+ phase = int((filled_len - nfull) * nphases) # Phase of last char
76
+ nempty = self.width - nfull # Number of empty chars
77
+
78
+ message = self.message % self
79
+ bar = color(self.phases[-1] * nfull, fg=self.color)
80
+ current = self.phases[phase] if phase > 0 else ''
81
+ empty = self.empty_fill * max(0, nempty - len(current))
82
+ suffix = self.suffix % self
83
+ line = ''.join([message, self.bar_prefix, bar, current, empty,
84
+ self.bar_suffix, suffix])
85
+ self.writeln(line)
86
+
87
+
88
+ class PixelBar(IncrementalBar):
89
+ phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿')
90
+
91
+
92
+ class ShadyBar(IncrementalBar):
93
+ phases = (' ', '░', '▒', '▓', '█')
env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/colors.py ADDED
@@ -0,0 +1,79 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+ # Copyright (c) 2020 Georgios Verigakis <[email protected]>
4
+ #
5
+ # Permission to use, copy, modify, and distribute this software for any
6
+ # purpose with or without fee is hereby granted, provided that the above
7
+ # copyright notice and this permission notice appear in all copies.
8
+ #
9
+ # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10
+ # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11
+ # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12
+ # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13
+ # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16
+
17
+ from functools import partial
18
+
19
+
20
+ COLORS = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan',
21
+ 'white')
22
+ STYLES = ('bold', 'faint', 'italic', 'underline', 'blink', 'blink2',
23
+ 'negative', 'concealed', 'crossed')
24
+
25
+
26
+ def color(s, fg=None, bg=None, style=None):
27
+ sgr = []
28
+
29
+ if fg:
30
+ if fg in COLORS:
31
+ sgr.append(str(30 + COLORS.index(fg)))
32
+ elif isinstance(fg, int) and 0 <= fg <= 255:
33
+ sgr.append('38;5;%d' % int(fg))
34
+ else:
35
+ raise Exception('Invalid color "%s"' % fg)
36
+
37
+ if bg:
38
+ if bg in COLORS:
39
+ sgr.append(str(40 + COLORS.index(bg)))
40
+ elif isinstance(bg, int) and 0 <= bg <= 255:
41
+ sgr.append('48;5;%d' % bg)
42
+ else:
43
+ raise Exception('Invalid color "%s"' % bg)
44
+
45
+ if style:
46
+ for st in style.split('+'):
47
+ if st in STYLES:
48
+ sgr.append(str(1 + STYLES.index(st)))
49
+ else:
50
+ raise Exception('Invalid style "%s"' % st)
51
+
52
+ if sgr:
53
+ prefix = '\x1b[' + ';'.join(sgr) + 'm'
54
+ suffix = '\x1b[0m'
55
+ return prefix + s + suffix
56
+ else:
57
+ return s
58
+
59
+
60
+ # Foreground shortcuts
61
+ black = partial(color, fg='black')
62
+ red = partial(color, fg='red')
63
+ green = partial(color, fg='green')
64
+ yellow = partial(color, fg='yellow')
65
+ blue = partial(color, fg='blue')
66
+ magenta = partial(color, fg='magenta')
67
+ cyan = partial(color, fg='cyan')
68
+ white = partial(color, fg='white')
69
+
70
+ # Style shortcuts
71
+ bold = partial(color, style='bold')
72
+ faint = partial(color, style='faint')
73
+ italic = partial(color, style='italic')
74
+ underline = partial(color, style='underline')
75
+ blink = partial(color, style='blink')
76
+ blink2 = partial(color, style='blink2')
77
+ negative = partial(color, style='negative')
78
+ concealed = partial(color, style='concealed')
79
+ crossed = partial(color, style='crossed')
env-llmeval/lib/python3.10/site-packages/pip/_vendor/progress/spinner.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+
3
+ # Copyright (c) 2012 Georgios Verigakis <[email protected]>
4
+ #
5
+ # Permission to use, copy, modify, and distribute this software for any
6
+ # purpose with or without fee is hereby granted, provided that the above
7
+ # copyright notice and this permission notice appear in all copies.
8
+ #
9
+ # THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
10
+ # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
11
+ # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
12
+ # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
13
+ # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
14
+ # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
15
+ # OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
16
+
17
+ from __future__ import unicode_literals
18
+ from . import Infinite
19
+
20
+
21
+ class Spinner(Infinite):
22
+ phases = ('-', '\\', '|', '/')
23
+ hide_cursor = True
24
+
25
+ def update(self):
26
+ i = self.index % len(self.phases)
27
+ message = self.message % self
28
+ line = ''.join([message, self.phases[i]])
29
+ self.writeln(line)
30
+
31
+
32
+ class PieSpinner(Spinner):
33
+ phases = ['◷', '◶', '◵', '◴']
34
+
35
+
36
+ class MoonSpinner(Spinner):
37
+ phases = ['◑', '◒', '◐', '◓']
38
+
39
+
40
+ class LineSpinner(Spinner):
41
+ phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻']
42
+
43
+
44
+ class PixelSpinner(Spinner):
45
+ phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
env-llmeval/lib/python3.10/site-packages/pip/_vendor/rich/__init__.py ADDED
@@ -0,0 +1,172 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Rich text and beautiful formatting in the terminal."""
2
+
3
+ import os
4
+ from typing import Callable, IO, TYPE_CHECKING, Any, Optional
5
+
6
+ from ._extension import load_ipython_extension
7
+
8
+ __all__ = ["get_console", "reconfigure", "print", "inspect"]
9
+
10
+ if TYPE_CHECKING:
11
+ from .console import Console
12
+
13
+ # Global console used by alternative print
14
+ _console: Optional["Console"] = None
15
+
16
+ _IMPORT_CWD = os.path.abspath(os.getcwd())
17
+
18
+
19
+ def get_console() -> "Console":
20
+ """Get a global :class:`~rich.console.Console` instance. This function is used when Rich requires a Console,
21
+ and hasn't been explicitly given one.
22
+
23
+ Returns:
24
+ Console: A console instance.
25
+ """
26
+ global _console
27
+ if _console is None:
28
+ from .console import Console
29
+
30
+ _console = Console()
31
+
32
+ return _console
33
+
34
+
35
+ def reconfigure(*args: Any, **kwargs: Any) -> None:
36
+ """Reconfigures the global console by replacing it with another.
37
+
38
+ Args:
39
+ console (Console): Replacement console instance.
40
+ """
41
+ from pip._vendor.rich.console import Console
42
+
43
+ new_console = Console(*args, **kwargs)
44
+ _console = get_console()
45
+ _console.__dict__ = new_console.__dict__
46
+
47
+
48
+ def print(
49
+ *objects: Any,
50
+ sep: str = " ",
51
+ end: str = "\n",
52
+ file: Optional[IO[str]] = None,
53
+ flush: bool = False,
54
+ ) -> None:
55
+ r"""Print object(s) supplied via positional arguments.
56
+ This function has an identical signature to the built-in print.
57
+ For more advanced features, see the :class:`~rich.console.Console` class.
58
+
59
+ Args:
60
+ sep (str, optional): Separator between printed objects. Defaults to " ".
61
+ end (str, optional): Character to write at end of output. Defaults to "\\n".
62
+ file (IO[str], optional): File to write to, or None for stdout. Defaults to None.
63
+ flush (bool, optional): Has no effect as Rich always flushes output. Defaults to False.
64
+
65
+ """
66
+ from .console import Console
67
+
68
+ write_console = get_console() if file is None else Console(file=file)
69
+ return write_console.print(*objects, sep=sep, end=end)
70
+
71
+
72
+ def print_json(
73
+ json: Optional[str] = None,
74
+ *,
75
+ data: Any = None,
76
+ indent: int = 2,
77
+ highlight: bool = True,
78
+ skip_keys: bool = False,
79
+ ensure_ascii: bool = True,
80
+ check_circular: bool = True,
81
+ allow_nan: bool = True,
82
+ default: Optional[Callable[[Any], Any]] = None,
83
+ sort_keys: bool = False,
84
+ ) -> None:
85
+ """Pretty prints JSON. Output will be valid JSON.
86
+
87
+ Args:
88
+ json (str): A string containing JSON.
89
+ data (Any): If json is not supplied, then encode this data.
90
+ indent (int, optional): Number of spaces to indent. Defaults to 2.
91
+ highlight (bool, optional): Enable highlighting of output: Defaults to True.
92
+ skip_keys (bool, optional): Skip keys not of a basic type. Defaults to False.
93
+ ensure_ascii (bool, optional): Escape all non-ascii characters. Defaults to False.
94
+ check_circular (bool, optional): Check for circular references. Defaults to True.
95
+ allow_nan (bool, optional): Allow NaN and Infinity values. Defaults to True.
96
+ default (Callable, optional): A callable that converts values that can not be encoded
97
+ in to something that can be JSON encoded. Defaults to None.
98
+ sort_keys (bool, optional): Sort dictionary keys. Defaults to False.
99
+ """
100
+
101
+ get_console().print_json(
102
+ json,
103
+ data=data,
104
+ indent=indent,
105
+ highlight=highlight,
106
+ skip_keys=skip_keys,
107
+ ensure_ascii=ensure_ascii,
108
+ check_circular=check_circular,
109
+ allow_nan=allow_nan,
110
+ default=default,
111
+ sort_keys=sort_keys,
112
+ )
113
+
114
+
115
+ def inspect(
116
+ obj: Any,
117
+ *,
118
+ console: Optional["Console"] = None,
119
+ title: Optional[str] = None,
120
+ help: bool = False,
121
+ methods: bool = False,
122
+ docs: bool = True,
123
+ private: bool = False,
124
+ dunder: bool = False,
125
+ sort: bool = True,
126
+ all: bool = False,
127
+ value: bool = True,
128
+ ) -> None:
129
+ """Inspect any Python object.
130
+
131
+ * inspect(<OBJECT>) to see summarized info.
132
+ * inspect(<OBJECT>, methods=True) to see methods.
133
+ * inspect(<OBJECT>, help=True) to see full (non-abbreviated) help.
134
+ * inspect(<OBJECT>, private=True) to see private attributes (single underscore).
135
+ * inspect(<OBJECT>, dunder=True) to see attributes beginning with double underscore.
136
+ * inspect(<OBJECT>, all=True) to see all attributes.
137
+
138
+ Args:
139
+ obj (Any): An object to inspect.
140
+ title (str, optional): Title to display over inspect result, or None use type. Defaults to None.
141
+ help (bool, optional): Show full help text rather than just first paragraph. Defaults to False.
142
+ methods (bool, optional): Enable inspection of callables. Defaults to False.
143
+ docs (bool, optional): Also render doc strings. Defaults to True.
144
+ private (bool, optional): Show private attributes (beginning with underscore). Defaults to False.
145
+ dunder (bool, optional): Show attributes starting with double underscore. Defaults to False.
146
+ sort (bool, optional): Sort attributes alphabetically. Defaults to True.
147
+ all (bool, optional): Show all attributes. Defaults to False.
148
+ value (bool, optional): Pretty print value. Defaults to True.
149
+ """
150
+ _console = console or get_console()
151
+ from pip._vendor.rich._inspect import Inspect
152
+
153
+ # Special case for inspect(inspect)
154
+ is_inspect = obj is inspect
155
+
156
+ _inspect = Inspect(
157
+ obj,
158
+ title=title,
159
+ help=is_inspect or help,
160
+ methods=is_inspect or methods,
161
+ docs=is_inspect or docs,
162
+ private=private,
163
+ dunder=dunder,
164
+ sort=sort,
165
+ all=all,
166
+ value=value,
167
+ )
168
+ _console.print(_inspect)
169
+
170
+
171
+ if __name__ == "__main__": # pragma: no cover
172
+ print("Hello, **World**")
env-llmeval/lib/python3.10/site-packages/pip/_vendor/rich/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (5.88 kB). View file