applied-ai-018 commited on
Commit
297b47b
·
verified ·
1 Parent(s): 78d580c

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. env-llmeval/lib/python3.10/site-packages/numpy.libs/libgfortran-040039e1.so.5.0.0 +3 -0
  3. env-llmeval/lib/python3.10/site-packages/pip/__main__.py +31 -0
  4. env-llmeval/lib/python3.10/site-packages/pip/_internal/__init__.py +19 -0
  5. env-llmeval/lib/python3.10/site-packages/pip/_internal/build_env.py +296 -0
  6. env-llmeval/lib/python3.10/site-packages/pip/_internal/cache.py +264 -0
  7. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__init__.py +4 -0
  8. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc +0 -0
  11. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc +0 -0
  12. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc +0 -0
  13. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py +171 -0
  14. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/base_command.py +220 -0
  15. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/command_context.py +27 -0
  16. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/main.py +70 -0
  17. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/parser.py +292 -0
  18. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/req_command.py +506 -0
  19. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/spinners.py +157 -0
  20. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py +6 -0
  21. env-llmeval/lib/python3.10/site-packages/pip/_internal/configuration.py +366 -0
  22. env-llmeval/lib/python3.10/site-packages/pip/_internal/exceptions.py +658 -0
  23. env-llmeval/lib/python3.10/site-packages/pip/_internal/main.py +12 -0
  24. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__init__.py +2 -0
  25. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc +0 -0
  32. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc +0 -0
  33. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc +0 -0
  34. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc +0 -0
  35. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
  36. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/candidate.py +34 -0
  37. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/direct_url.py +220 -0
  38. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/format_control.py +80 -0
  39. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/index.py +28 -0
  40. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/link.py +288 -0
  41. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/scheme.py +31 -0
  42. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/search_scope.py +129 -0
  43. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py +51 -0
  44. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/target_python.py +110 -0
  45. env-llmeval/lib/python3.10/site-packages/pip/_internal/models/wheel.py +89 -0
  46. env-llmeval/lib/python3.10/site-packages/pip/_internal/pyproject.py +168 -0
  47. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/constructors.py +490 -0
  48. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_set.py +189 -0
  49. env-llmeval/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py +0 -0
  50. env-llmeval/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -216,3 +216,4 @@ llmeval-env/lib/python3.10/site-packages/scipy/io/_fast_matrix_market/_fmm_core.
216
  env-llmeval/lib/python3.10/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
217
  env-llmeval/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
218
  env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
 
 
216
  env-llmeval/lib/python3.10/site-packages/torch/lib/libtorch_cuda.so filter=lfs diff=lfs merge=lfs -text
217
  env-llmeval/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
218
  env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
219
+ env-llmeval/lib/python3.10/site-packages/numpy.libs/libgfortran-040039e1.so.5.0.0 filter=lfs diff=lfs merge=lfs -text
env-llmeval/lib/python3.10/site-packages/numpy.libs/libgfortran-040039e1.so.5.0.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:14afb3129b1a8b50bc40a3b0820c7f1152ea9bc10121aab152943f7057472886
3
+ size 2686065
env-llmeval/lib/python3.10/site-packages/pip/__main__.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import warnings
4
+
5
+ # Remove '' and current working directory from the first entry
6
+ # of sys.path, if present to avoid using current directory
7
+ # in pip commands check, freeze, install, list and show,
8
+ # when invoked as python -m pip <command>
9
+ if sys.path[0] in ("", os.getcwd()):
10
+ sys.path.pop(0)
11
+
12
+ # If we are running from a wheel, add the wheel to sys.path
13
+ # This allows the usage python pip-*.whl/pip install pip-*.whl
14
+ if __package__ == "":
15
+ # __file__ is pip-*.whl/pip/__main__.py
16
+ # first dirname call strips of '/__main__.py', second strips off '/pip'
17
+ # Resulting path is the name of the wheel itself
18
+ # Add that to sys.path so we can import pip
19
+ path = os.path.dirname(os.path.dirname(__file__))
20
+ sys.path.insert(0, path)
21
+
22
+ if __name__ == "__main__":
23
+ # Work around the error reported in #9540, pending a proper fix.
24
+ # Note: It is essential the warning filter is set *before* importing
25
+ # pip, as the deprecation happens at import time, not runtime.
26
+ warnings.filterwarnings(
27
+ "ignore", category=DeprecationWarning, module=".*packaging\\.version"
28
+ )
29
+ from pip._internal.cli.main import main as _main
30
+
31
+ sys.exit(_main())
env-llmeval/lib/python3.10/site-packages/pip/_internal/__init__.py ADDED
@@ -0,0 +1,19 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+ import pip._internal.utils.inject_securetransport # noqa
4
+ from pip._internal.utils import _log
5
+
6
+ # init_logging() must be called before any call to logging.getLogger()
7
+ # which happens at import of most modules.
8
+ _log.init_logging()
9
+
10
+
11
+ def main(args: (Optional[List[str]]) = None) -> int:
12
+ """This is preserved for old console scripts that may still be referencing
13
+ it.
14
+
15
+ For additional details, see https://github.com/pypa/pip/issues/7498.
16
+ """
17
+ from pip._internal.utils.entrypoints import _wrapper
18
+
19
+ return _wrapper(args)
env-llmeval/lib/python3.10/site-packages/pip/_internal/build_env.py ADDED
@@ -0,0 +1,296 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Build Environment used for isolation during sdist building
2
+ """
3
+
4
+ import contextlib
5
+ import logging
6
+ import os
7
+ import pathlib
8
+ import sys
9
+ import textwrap
10
+ import zipfile
11
+ from collections import OrderedDict
12
+ from sysconfig import get_paths
13
+ from types import TracebackType
14
+ from typing import TYPE_CHECKING, Iterable, Iterator, List, Optional, Set, Tuple, Type
15
+
16
+ from pip._vendor.certifi import where
17
+ from pip._vendor.packaging.requirements import Requirement
18
+ from pip._vendor.packaging.version import Version
19
+
20
+ from pip import __file__ as pip_location
21
+ from pip._internal.cli.spinners import open_spinner
22
+ from pip._internal.locations import get_platlib, get_prefixed_libs, get_purelib
23
+ from pip._internal.metadata import get_environment
24
+ from pip._internal.utils.subprocess import call_subprocess
25
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
26
+
27
+ if TYPE_CHECKING:
28
+ from pip._internal.index.package_finder import PackageFinder
29
+
30
+ logger = logging.getLogger(__name__)
31
+
32
+
33
+ class _Prefix:
34
+ def __init__(self, path: str) -> None:
35
+ self.path = path
36
+ self.setup = False
37
+ self.bin_dir = get_paths(
38
+ "nt" if os.name == "nt" else "posix_prefix",
39
+ vars={"base": path, "platbase": path},
40
+ )["scripts"]
41
+ self.lib_dirs = get_prefixed_libs(path)
42
+
43
+
44
+ @contextlib.contextmanager
45
+ def _create_standalone_pip() -> Iterator[str]:
46
+ """Create a "standalone pip" zip file.
47
+
48
+ The zip file's content is identical to the currently-running pip.
49
+ It will be used to install requirements into the build environment.
50
+ """
51
+ source = pathlib.Path(pip_location).resolve().parent
52
+
53
+ # Return the current instance if `source` is not a directory. We can't build
54
+ # a zip from this, and it likely means the instance is already standalone.
55
+ if not source.is_dir():
56
+ yield str(source)
57
+ return
58
+
59
+ with TempDirectory(kind="standalone-pip") as tmp_dir:
60
+ pip_zip = os.path.join(tmp_dir.path, "__env_pip__.zip")
61
+ kwargs = {}
62
+ if sys.version_info >= (3, 8):
63
+ kwargs["strict_timestamps"] = False
64
+ with zipfile.ZipFile(pip_zip, "w", **kwargs) as zf:
65
+ for child in source.rglob("*"):
66
+ zf.write(child, child.relative_to(source.parent).as_posix())
67
+ yield os.path.join(pip_zip, "pip")
68
+
69
+
70
+ class BuildEnvironment:
71
+ """Creates and manages an isolated environment to install build deps"""
72
+
73
+ def __init__(self) -> None:
74
+ temp_dir = TempDirectory(kind=tempdir_kinds.BUILD_ENV, globally_managed=True)
75
+
76
+ self._prefixes = OrderedDict(
77
+ (name, _Prefix(os.path.join(temp_dir.path, name)))
78
+ for name in ("normal", "overlay")
79
+ )
80
+
81
+ self._bin_dirs: List[str] = []
82
+ self._lib_dirs: List[str] = []
83
+ for prefix in reversed(list(self._prefixes.values())):
84
+ self._bin_dirs.append(prefix.bin_dir)
85
+ self._lib_dirs.extend(prefix.lib_dirs)
86
+
87
+ # Customize site to:
88
+ # - ensure .pth files are honored
89
+ # - prevent access to system site packages
90
+ system_sites = {
91
+ os.path.normcase(site) for site in (get_purelib(), get_platlib())
92
+ }
93
+ self._site_dir = os.path.join(temp_dir.path, "site")
94
+ if not os.path.exists(self._site_dir):
95
+ os.mkdir(self._site_dir)
96
+ with open(
97
+ os.path.join(self._site_dir, "sitecustomize.py"), "w", encoding="utf-8"
98
+ ) as fp:
99
+ fp.write(
100
+ textwrap.dedent(
101
+ """
102
+ import os, site, sys
103
+
104
+ # First, drop system-sites related paths.
105
+ original_sys_path = sys.path[:]
106
+ known_paths = set()
107
+ for path in {system_sites!r}:
108
+ site.addsitedir(path, known_paths=known_paths)
109
+ system_paths = set(
110
+ os.path.normcase(path)
111
+ for path in sys.path[len(original_sys_path):]
112
+ )
113
+ original_sys_path = [
114
+ path for path in original_sys_path
115
+ if os.path.normcase(path) not in system_paths
116
+ ]
117
+ sys.path = original_sys_path
118
+
119
+ # Second, add lib directories.
120
+ # ensuring .pth file are processed.
121
+ for path in {lib_dirs!r}:
122
+ assert not path in sys.path
123
+ site.addsitedir(path)
124
+ """
125
+ ).format(system_sites=system_sites, lib_dirs=self._lib_dirs)
126
+ )
127
+
128
+ def __enter__(self) -> None:
129
+ self._save_env = {
130
+ name: os.environ.get(name, None)
131
+ for name in ("PATH", "PYTHONNOUSERSITE", "PYTHONPATH")
132
+ }
133
+
134
+ path = self._bin_dirs[:]
135
+ old_path = self._save_env["PATH"]
136
+ if old_path:
137
+ path.extend(old_path.split(os.pathsep))
138
+
139
+ pythonpath = [self._site_dir]
140
+
141
+ os.environ.update(
142
+ {
143
+ "PATH": os.pathsep.join(path),
144
+ "PYTHONNOUSERSITE": "1",
145
+ "PYTHONPATH": os.pathsep.join(pythonpath),
146
+ }
147
+ )
148
+
149
+ def __exit__(
150
+ self,
151
+ exc_type: Optional[Type[BaseException]],
152
+ exc_val: Optional[BaseException],
153
+ exc_tb: Optional[TracebackType],
154
+ ) -> None:
155
+ for varname, old_value in self._save_env.items():
156
+ if old_value is None:
157
+ os.environ.pop(varname, None)
158
+ else:
159
+ os.environ[varname] = old_value
160
+
161
+ def check_requirements(
162
+ self, reqs: Iterable[str]
163
+ ) -> Tuple[Set[Tuple[str, str]], Set[str]]:
164
+ """Return 2 sets:
165
+ - conflicting requirements: set of (installed, wanted) reqs tuples
166
+ - missing requirements: set of reqs
167
+ """
168
+ missing = set()
169
+ conflicting = set()
170
+ if reqs:
171
+ env = get_environment(self._lib_dirs)
172
+ for req_str in reqs:
173
+ req = Requirement(req_str)
174
+ dist = env.get_distribution(req.name)
175
+ if not dist:
176
+ missing.add(req_str)
177
+ continue
178
+ if isinstance(dist.version, Version):
179
+ installed_req_str = f"{req.name}=={dist.version}"
180
+ else:
181
+ installed_req_str = f"{req.name}==={dist.version}"
182
+ if dist.version not in req.specifier:
183
+ conflicting.add((installed_req_str, req_str))
184
+ # FIXME: Consider direct URL?
185
+ return conflicting, missing
186
+
187
+ def install_requirements(
188
+ self,
189
+ finder: "PackageFinder",
190
+ requirements: Iterable[str],
191
+ prefix_as_string: str,
192
+ *,
193
+ kind: str,
194
+ ) -> None:
195
+ prefix = self._prefixes[prefix_as_string]
196
+ assert not prefix.setup
197
+ prefix.setup = True
198
+ if not requirements:
199
+ return
200
+ with contextlib.ExitStack() as ctx:
201
+ pip_runnable = ctx.enter_context(_create_standalone_pip())
202
+ self._install_requirements(
203
+ pip_runnable,
204
+ finder,
205
+ requirements,
206
+ prefix,
207
+ kind=kind,
208
+ )
209
+
210
+ @staticmethod
211
+ def _install_requirements(
212
+ pip_runnable: str,
213
+ finder: "PackageFinder",
214
+ requirements: Iterable[str],
215
+ prefix: _Prefix,
216
+ *,
217
+ kind: str,
218
+ ) -> None:
219
+ args: List[str] = [
220
+ sys.executable,
221
+ pip_runnable,
222
+ "install",
223
+ "--ignore-installed",
224
+ "--no-user",
225
+ "--prefix",
226
+ prefix.path,
227
+ "--no-warn-script-location",
228
+ ]
229
+ if logger.getEffectiveLevel() <= logging.DEBUG:
230
+ args.append("-v")
231
+ for format_control in ("no_binary", "only_binary"):
232
+ formats = getattr(finder.format_control, format_control)
233
+ args.extend(
234
+ (
235
+ "--" + format_control.replace("_", "-"),
236
+ ",".join(sorted(formats or {":none:"})),
237
+ )
238
+ )
239
+
240
+ index_urls = finder.index_urls
241
+ if index_urls:
242
+ args.extend(["-i", index_urls[0]])
243
+ for extra_index in index_urls[1:]:
244
+ args.extend(["--extra-index-url", extra_index])
245
+ else:
246
+ args.append("--no-index")
247
+ for link in finder.find_links:
248
+ args.extend(["--find-links", link])
249
+
250
+ for host in finder.trusted_hosts:
251
+ args.extend(["--trusted-host", host])
252
+ if finder.allow_all_prereleases:
253
+ args.append("--pre")
254
+ if finder.prefer_binary:
255
+ args.append("--prefer-binary")
256
+ args.append("--")
257
+ args.extend(requirements)
258
+ extra_environ = {"_PIP_STANDALONE_CERT": where()}
259
+ with open_spinner(f"Installing {kind}") as spinner:
260
+ call_subprocess(
261
+ args,
262
+ command_desc=f"pip subprocess to install {kind}",
263
+ spinner=spinner,
264
+ extra_environ=extra_environ,
265
+ )
266
+
267
+
268
+ class NoOpBuildEnvironment(BuildEnvironment):
269
+ """A no-op drop-in replacement for BuildEnvironment"""
270
+
271
+ def __init__(self) -> None:
272
+ pass
273
+
274
+ def __enter__(self) -> None:
275
+ pass
276
+
277
+ def __exit__(
278
+ self,
279
+ exc_type: Optional[Type[BaseException]],
280
+ exc_val: Optional[BaseException],
281
+ exc_tb: Optional[TracebackType],
282
+ ) -> None:
283
+ pass
284
+
285
+ def cleanup(self) -> None:
286
+ pass
287
+
288
+ def install_requirements(
289
+ self,
290
+ finder: "PackageFinder",
291
+ requirements: Iterable[str],
292
+ prefix_as_string: str,
293
+ *,
294
+ kind: str,
295
+ ) -> None:
296
+ raise NotImplementedError()
env-llmeval/lib/python3.10/site-packages/pip/_internal/cache.py ADDED
@@ -0,0 +1,264 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Cache Management
2
+ """
3
+
4
+ import hashlib
5
+ import json
6
+ import logging
7
+ import os
8
+ from typing import Any, Dict, List, Optional, Set
9
+
10
+ from pip._vendor.packaging.tags import Tag, interpreter_name, interpreter_version
11
+ from pip._vendor.packaging.utils import canonicalize_name
12
+
13
+ from pip._internal.exceptions import InvalidWheelFilename
14
+ from pip._internal.models.format_control import FormatControl
15
+ from pip._internal.models.link import Link
16
+ from pip._internal.models.wheel import Wheel
17
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
18
+ from pip._internal.utils.urls import path_to_url
19
+
20
+ logger = logging.getLogger(__name__)
21
+
22
+
23
+ def _hash_dict(d: Dict[str, str]) -> str:
24
+ """Return a stable sha224 of a dictionary."""
25
+ s = json.dumps(d, sort_keys=True, separators=(",", ":"), ensure_ascii=True)
26
+ return hashlib.sha224(s.encode("ascii")).hexdigest()
27
+
28
+
29
+ class Cache:
30
+ """An abstract class - provides cache directories for data from links
31
+
32
+
33
+ :param cache_dir: The root of the cache.
34
+ :param format_control: An object of FormatControl class to limit
35
+ binaries being read from the cache.
36
+ :param allowed_formats: which formats of files the cache should store.
37
+ ('binary' and 'source' are the only allowed values)
38
+ """
39
+
40
+ def __init__(
41
+ self, cache_dir: str, format_control: FormatControl, allowed_formats: Set[str]
42
+ ) -> None:
43
+ super().__init__()
44
+ assert not cache_dir or os.path.isabs(cache_dir)
45
+ self.cache_dir = cache_dir or None
46
+ self.format_control = format_control
47
+ self.allowed_formats = allowed_formats
48
+
49
+ _valid_formats = {"source", "binary"}
50
+ assert self.allowed_formats.union(_valid_formats) == _valid_formats
51
+
52
+ def _get_cache_path_parts(self, link: Link) -> List[str]:
53
+ """Get parts of part that must be os.path.joined with cache_dir"""
54
+
55
+ # We want to generate an url to use as our cache key, we don't want to
56
+ # just re-use the URL because it might have other items in the fragment
57
+ # and we don't care about those.
58
+ key_parts = {"url": link.url_without_fragment}
59
+ if link.hash_name is not None and link.hash is not None:
60
+ key_parts[link.hash_name] = link.hash
61
+ if link.subdirectory_fragment:
62
+ key_parts["subdirectory"] = link.subdirectory_fragment
63
+
64
+ # Include interpreter name, major and minor version in cache key
65
+ # to cope with ill-behaved sdists that build a different wheel
66
+ # depending on the python version their setup.py is being run on,
67
+ # and don't encode the difference in compatibility tags.
68
+ # https://github.com/pypa/pip/issues/7296
69
+ key_parts["interpreter_name"] = interpreter_name()
70
+ key_parts["interpreter_version"] = interpreter_version()
71
+
72
+ # Encode our key url with sha224, we'll use this because it has similar
73
+ # security properties to sha256, but with a shorter total output (and
74
+ # thus less secure). However the differences don't make a lot of
75
+ # difference for our use case here.
76
+ hashed = _hash_dict(key_parts)
77
+
78
+ # We want to nest the directories some to prevent having a ton of top
79
+ # level directories where we might run out of sub directories on some
80
+ # FS.
81
+ parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]]
82
+
83
+ return parts
84
+
85
+ def _get_candidates(self, link: Link, canonical_package_name: str) -> List[Any]:
86
+ can_not_cache = not self.cache_dir or not canonical_package_name or not link
87
+ if can_not_cache:
88
+ return []
89
+
90
+ formats = self.format_control.get_allowed_formats(canonical_package_name)
91
+ if not self.allowed_formats.intersection(formats):
92
+ return []
93
+
94
+ candidates = []
95
+ path = self.get_path_for_link(link)
96
+ if os.path.isdir(path):
97
+ for candidate in os.listdir(path):
98
+ candidates.append((candidate, path))
99
+ return candidates
100
+
101
+ def get_path_for_link(self, link: Link) -> str:
102
+ """Return a directory to store cached items in for link."""
103
+ raise NotImplementedError()
104
+
105
+ def get(
106
+ self,
107
+ link: Link,
108
+ package_name: Optional[str],
109
+ supported_tags: List[Tag],
110
+ ) -> Link:
111
+ """Returns a link to a cached item if it exists, otherwise returns the
112
+ passed link.
113
+ """
114
+ raise NotImplementedError()
115
+
116
+
117
+ class SimpleWheelCache(Cache):
118
+ """A cache of wheels for future installs."""
119
+
120
+ def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
121
+ super().__init__(cache_dir, format_control, {"binary"})
122
+
123
+ def get_path_for_link(self, link: Link) -> str:
124
+ """Return a directory to store cached wheels for link
125
+
126
+ Because there are M wheels for any one sdist, we provide a directory
127
+ to cache them in, and then consult that directory when looking up
128
+ cache hits.
129
+
130
+ We only insert things into the cache if they have plausible version
131
+ numbers, so that we don't contaminate the cache with things that were
132
+ not unique. E.g. ./package might have dozens of installs done for it
133
+ and build a version of 0.0...and if we built and cached a wheel, we'd
134
+ end up using the same wheel even if the source has been edited.
135
+
136
+ :param link: The link of the sdist for which this will cache wheels.
137
+ """
138
+ parts = self._get_cache_path_parts(link)
139
+ assert self.cache_dir
140
+ # Store wheels within the root cache_dir
141
+ return os.path.join(self.cache_dir, "wheels", *parts)
142
+
143
+ def get(
144
+ self,
145
+ link: Link,
146
+ package_name: Optional[str],
147
+ supported_tags: List[Tag],
148
+ ) -> Link:
149
+ candidates = []
150
+
151
+ if not package_name:
152
+ return link
153
+
154
+ canonical_package_name = canonicalize_name(package_name)
155
+ for wheel_name, wheel_dir in self._get_candidates(link, canonical_package_name):
156
+ try:
157
+ wheel = Wheel(wheel_name)
158
+ except InvalidWheelFilename:
159
+ continue
160
+ if canonicalize_name(wheel.name) != canonical_package_name:
161
+ logger.debug(
162
+ "Ignoring cached wheel %s for %s as it "
163
+ "does not match the expected distribution name %s.",
164
+ wheel_name,
165
+ link,
166
+ package_name,
167
+ )
168
+ continue
169
+ if not wheel.supported(supported_tags):
170
+ # Built for a different python/arch/etc
171
+ continue
172
+ candidates.append(
173
+ (
174
+ wheel.support_index_min(supported_tags),
175
+ wheel_name,
176
+ wheel_dir,
177
+ )
178
+ )
179
+
180
+ if not candidates:
181
+ return link
182
+
183
+ _, wheel_name, wheel_dir = min(candidates)
184
+ return Link(path_to_url(os.path.join(wheel_dir, wheel_name)))
185
+
186
+
187
+ class EphemWheelCache(SimpleWheelCache):
188
+ """A SimpleWheelCache that creates it's own temporary cache directory"""
189
+
190
+ def __init__(self, format_control: FormatControl) -> None:
191
+ self._temp_dir = TempDirectory(
192
+ kind=tempdir_kinds.EPHEM_WHEEL_CACHE,
193
+ globally_managed=True,
194
+ )
195
+
196
+ super().__init__(self._temp_dir.path, format_control)
197
+
198
+
199
+ class CacheEntry:
200
+ def __init__(
201
+ self,
202
+ link: Link,
203
+ persistent: bool,
204
+ ):
205
+ self.link = link
206
+ self.persistent = persistent
207
+
208
+
209
+ class WheelCache(Cache):
210
+ """Wraps EphemWheelCache and SimpleWheelCache into a single Cache
211
+
212
+ This Cache allows for gracefully degradation, using the ephem wheel cache
213
+ when a certain link is not found in the simple wheel cache first.
214
+ """
215
+
216
+ def __init__(self, cache_dir: str, format_control: FormatControl) -> None:
217
+ super().__init__(cache_dir, format_control, {"binary"})
218
+ self._wheel_cache = SimpleWheelCache(cache_dir, format_control)
219
+ self._ephem_cache = EphemWheelCache(format_control)
220
+
221
+ def get_path_for_link(self, link: Link) -> str:
222
+ return self._wheel_cache.get_path_for_link(link)
223
+
224
+ def get_ephem_path_for_link(self, link: Link) -> str:
225
+ return self._ephem_cache.get_path_for_link(link)
226
+
227
+ def get(
228
+ self,
229
+ link: Link,
230
+ package_name: Optional[str],
231
+ supported_tags: List[Tag],
232
+ ) -> Link:
233
+ cache_entry = self.get_cache_entry(link, package_name, supported_tags)
234
+ if cache_entry is None:
235
+ return link
236
+ return cache_entry.link
237
+
238
+ def get_cache_entry(
239
+ self,
240
+ link: Link,
241
+ package_name: Optional[str],
242
+ supported_tags: List[Tag],
243
+ ) -> Optional[CacheEntry]:
244
+ """Returns a CacheEntry with a link to a cached item if it exists or
245
+ None. The cache entry indicates if the item was found in the persistent
246
+ or ephemeral cache.
247
+ """
248
+ retval = self._wheel_cache.get(
249
+ link=link,
250
+ package_name=package_name,
251
+ supported_tags=supported_tags,
252
+ )
253
+ if retval is not link:
254
+ return CacheEntry(retval, persistent=True)
255
+
256
+ retval = self._ephem_cache.get(
257
+ link=link,
258
+ package_name=package_name,
259
+ supported_tags=supported_tags,
260
+ )
261
+ if retval is not link:
262
+ return CacheEntry(retval, persistent=False)
263
+
264
+ return None
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ """Subpackage containing all of pip's command line interface related code
2
+ """
3
+
4
+ # This file intentionally does not import submodules
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (265 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc ADDED
Binary file (6.24 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc ADDED
Binary file (22.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc ADDED
Binary file (2.15 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ for opt_str in opt._long_opts + opt._short_opts:
75
+ options.append((opt_str, opt.nargs))
76
+
77
+ # filter out previously specified options from available options
78
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
79
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
80
+ # filter options by current input
81
+ options = [(k, v) for k, v in options if k.startswith(current)]
82
+ # get completion type given cwords and available subcommand options
83
+ completion_type = get_path_completion_type(
84
+ cwords,
85
+ cword,
86
+ subcommand.parser.option_list_all,
87
+ )
88
+ # get completion files and directories if ``completion_type`` is
89
+ # ``<file>``, ``<dir>`` or ``<path>``
90
+ if completion_type:
91
+ paths = auto_complete_paths(current, completion_type)
92
+ options = [(path, 0) for path in paths]
93
+ for option in options:
94
+ opt_label = option[0]
95
+ # append '=' to options which require args
96
+ if option[1] and option[0][:2] == "--":
97
+ opt_label += "="
98
+ print(opt_label)
99
+ else:
100
+ # show main parser options only when necessary
101
+
102
+ opts = [i.option_list for i in parser.option_groups]
103
+ opts.append(parser.option_list)
104
+ flattened_opts = chain.from_iterable(opts)
105
+ if current.startswith("-"):
106
+ for opt in flattened_opts:
107
+ if opt.help != optparse.SUPPRESS_HELP:
108
+ subcommands += opt._long_opts + opt._short_opts
109
+ else:
110
+ # get completion type given cwords and all available options
111
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
112
+ if completion_type:
113
+ subcommands = list(auto_complete_paths(current, completion_type))
114
+
115
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
116
+ sys.exit(1)
117
+
118
+
119
+ def get_path_completion_type(
120
+ cwords: List[str], cword: int, opts: Iterable[Any]
121
+ ) -> Optional[str]:
122
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
123
+
124
+ :param cwords: same as the environmental variable ``COMP_WORDS``
125
+ :param cword: same as the environmental variable ``COMP_CWORD``
126
+ :param opts: The available options to check
127
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
128
+ """
129
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
130
+ return None
131
+ for opt in opts:
132
+ if opt.help == optparse.SUPPRESS_HELP:
133
+ continue
134
+ for o in str(opt).split("/"):
135
+ if cwords[cword - 2].split("=")[0] == o:
136
+ if not opt.metavar or any(
137
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
138
+ ):
139
+ return opt.metavar
140
+ return None
141
+
142
+
143
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
144
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
145
+ and directories starting with ``current``; otherwise only list directories
146
+ starting with ``current``.
147
+
148
+ :param current: The word to be completed
149
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
150
+ :return: A generator of regular files and/or directories
151
+ """
152
+ directory, filename = os.path.split(current)
153
+ current_path = os.path.abspath(directory)
154
+ # Don't complete paths if they can't be accessed
155
+ if not os.access(current_path, os.R_OK):
156
+ return
157
+ filename = os.path.normcase(filename)
158
+ # list all files that start with ``filename``
159
+ file_list = (
160
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
161
+ )
162
+ for f in file_list:
163
+ opt = os.path.join(current_path, f)
164
+ comp_file = os.path.normcase(os.path.join(directory, f))
165
+ # complete regular files when there is not ``<dir>`` after option
166
+ # complete directories when there is ``<file>``, ``<path>`` or
167
+ # ``<dir>``after option
168
+ if completion_type != "dir" and os.path.isfile(opt):
169
+ yield comp_file
170
+ elif os.path.isdir(opt):
171
+ yield os.path.join(comp_file, "")
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/base_command.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base Command class, and related routines"""
2
+
3
+ import functools
4
+ import logging
5
+ import logging.config
6
+ import optparse
7
+ import os
8
+ import sys
9
+ import traceback
10
+ from optparse import Values
11
+ from typing import Any, Callable, List, Optional, Tuple
12
+
13
+ from pip._internal.cli import cmdoptions
14
+ from pip._internal.cli.command_context import CommandContextMixIn
15
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
16
+ from pip._internal.cli.status_codes import (
17
+ ERROR,
18
+ PREVIOUS_BUILD_DIR_ERROR,
19
+ UNKNOWN_ERROR,
20
+ VIRTUALENV_NOT_FOUND,
21
+ )
22
+ from pip._internal.exceptions import (
23
+ BadCommand,
24
+ CommandError,
25
+ DiagnosticPipError,
26
+ InstallationError,
27
+ NetworkConnectionError,
28
+ PreviousBuildDirError,
29
+ UninstallationError,
30
+ )
31
+ from pip._internal.utils.filesystem import check_path_owner
32
+ from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
33
+ from pip._internal.utils.misc import get_prog, normalize_path
34
+ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
35
+ from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
36
+ from pip._internal.utils.virtualenv import running_under_virtualenv
37
+
38
+ __all__ = ["Command"]
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ class Command(CommandContextMixIn):
44
+ usage: str = ""
45
+ ignore_require_venv: bool = False
46
+
47
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
48
+ super().__init__()
49
+
50
+ self.name = name
51
+ self.summary = summary
52
+ self.parser = ConfigOptionParser(
53
+ usage=self.usage,
54
+ prog=f"{get_prog()} {name}",
55
+ formatter=UpdatingDefaultsHelpFormatter(),
56
+ add_help_option=False,
57
+ name=name,
58
+ description=self.__doc__,
59
+ isolated=isolated,
60
+ )
61
+
62
+ self.tempdir_registry: Optional[TempDirRegistry] = None
63
+
64
+ # Commands should add options to this option group
65
+ optgroup_name = f"{self.name.capitalize()} Options"
66
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
67
+
68
+ # Add the general options
69
+ gen_opts = cmdoptions.make_option_group(
70
+ cmdoptions.general_group,
71
+ self.parser,
72
+ )
73
+ self.parser.add_option_group(gen_opts)
74
+
75
+ self.add_options()
76
+
77
+ def add_options(self) -> None:
78
+ pass
79
+
80
+ def handle_pip_version_check(self, options: Values) -> None:
81
+ """
82
+ This is a no-op so that commands by default do not do the pip version
83
+ check.
84
+ """
85
+ # Make sure we do the pip version check if the index_group options
86
+ # are present.
87
+ assert not hasattr(options, "no_index")
88
+
89
+ def run(self, options: Values, args: List[str]) -> int:
90
+ raise NotImplementedError
91
+
92
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
93
+ # factored out for testability
94
+ return self.parser.parse_args(args)
95
+
96
+ def main(self, args: List[str]) -> int:
97
+ try:
98
+ with self.main_context():
99
+ return self._main(args)
100
+ finally:
101
+ logging.shutdown()
102
+
103
+ def _main(self, args: List[str]) -> int:
104
+ # We must initialize this before the tempdir manager, otherwise the
105
+ # configuration would not be accessible by the time we clean up the
106
+ # tempdir manager.
107
+ self.tempdir_registry = self.enter_context(tempdir_registry())
108
+ # Intentionally set as early as possible so globally-managed temporary
109
+ # directories are available to the rest of the code.
110
+ self.enter_context(global_tempdir_manager())
111
+
112
+ options, args = self.parse_args(args)
113
+
114
+ # Set verbosity so that it can be used elsewhere.
115
+ self.verbosity = options.verbose - options.quiet
116
+
117
+ level_number = setup_logging(
118
+ verbosity=self.verbosity,
119
+ no_color=options.no_color,
120
+ user_log_file=options.log,
121
+ )
122
+
123
+ # TODO: Try to get these passing down from the command?
124
+ # without resorting to os.environ to hold these.
125
+ # This also affects isolated builds and it should.
126
+
127
+ if options.no_input:
128
+ os.environ["PIP_NO_INPUT"] = "1"
129
+
130
+ if options.exists_action:
131
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
132
+
133
+ if options.require_venv and not self.ignore_require_venv:
134
+ # If a venv is required check if it can really be found
135
+ if not running_under_virtualenv():
136
+ logger.critical("Could not find an activated virtualenv (required).")
137
+ sys.exit(VIRTUALENV_NOT_FOUND)
138
+
139
+ if options.cache_dir:
140
+ options.cache_dir = normalize_path(options.cache_dir)
141
+ if not check_path_owner(options.cache_dir):
142
+ logger.warning(
143
+ "The directory '%s' or its parent directory is not owned "
144
+ "or is not writable by the current user. The cache "
145
+ "has been disabled. Check the permissions and owner of "
146
+ "that directory. If executing pip with sudo, you should "
147
+ "use sudo's -H flag.",
148
+ options.cache_dir,
149
+ )
150
+ options.cache_dir = None
151
+
152
+ if "2020-resolver" in options.features_enabled:
153
+ logger.warning(
154
+ "--use-feature=2020-resolver no longer has any effect, "
155
+ "since it is now the default dependency resolver in pip. "
156
+ "This will become an error in pip 21.0."
157
+ )
158
+
159
+ def intercepts_unhandled_exc(
160
+ run_func: Callable[..., int]
161
+ ) -> Callable[..., int]:
162
+ @functools.wraps(run_func)
163
+ def exc_logging_wrapper(*args: Any) -> int:
164
+ try:
165
+ status = run_func(*args)
166
+ assert isinstance(status, int)
167
+ return status
168
+ except DiagnosticPipError as exc:
169
+ logger.error("[present-diagnostic] %s", exc)
170
+ logger.debug("Exception information:", exc_info=True)
171
+
172
+ return ERROR
173
+ except PreviousBuildDirError as exc:
174
+ logger.critical(str(exc))
175
+ logger.debug("Exception information:", exc_info=True)
176
+
177
+ return PREVIOUS_BUILD_DIR_ERROR
178
+ except (
179
+ InstallationError,
180
+ UninstallationError,
181
+ BadCommand,
182
+ NetworkConnectionError,
183
+ ) as exc:
184
+ logger.critical(str(exc))
185
+ logger.debug("Exception information:", exc_info=True)
186
+
187
+ return ERROR
188
+ except CommandError as exc:
189
+ logger.critical("%s", exc)
190
+ logger.debug("Exception information:", exc_info=True)
191
+
192
+ return ERROR
193
+ except BrokenStdoutLoggingError:
194
+ # Bypass our logger and write any remaining messages to
195
+ # stderr because stdout no longer works.
196
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
197
+ if level_number <= logging.DEBUG:
198
+ traceback.print_exc(file=sys.stderr)
199
+
200
+ return ERROR
201
+ except KeyboardInterrupt:
202
+ logger.critical("Operation cancelled by user")
203
+ logger.debug("Exception information:", exc_info=True)
204
+
205
+ return ERROR
206
+ except BaseException:
207
+ logger.critical("Exception:", exc_info=True)
208
+
209
+ return UNKNOWN_ERROR
210
+
211
+ return exc_logging_wrapper
212
+
213
+ try:
214
+ if not options.debug_mode:
215
+ run = intercepts_unhandled_exc(self.run)
216
+ else:
217
+ run = self.run
218
+ return run(options, args)
219
+ finally:
220
+ self.handle_pip_version_check(options)
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/command_context.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import ExitStack, contextmanager
2
+ from typing import ContextManager, Iterator, TypeVar
3
+
4
+ _T = TypeVar("_T", covariant=True)
5
+
6
+
7
+ class CommandContextMixIn:
8
+ def __init__(self) -> None:
9
+ super().__init__()
10
+ self._in_main_context = False
11
+ self._main_context = ExitStack()
12
+
13
+ @contextmanager
14
+ def main_context(self) -> Iterator[None]:
15
+ assert not self._in_main_context
16
+
17
+ self._in_main_context = True
18
+ try:
19
+ with self._main_context:
20
+ yield
21
+ finally:
22
+ self._in_main_context = False
23
+
24
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
25
+ assert self._in_main_context
26
+
27
+ return self._main_context.enter_context(context_provider)
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/main.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Primary application entrypoint.
2
+ """
3
+ import locale
4
+ import logging
5
+ import os
6
+ import sys
7
+ from typing import List, Optional
8
+
9
+ from pip._internal.cli.autocompletion import autocomplete
10
+ from pip._internal.cli.main_parser import parse_command
11
+ from pip._internal.commands import create_command
12
+ from pip._internal.exceptions import PipError
13
+ from pip._internal.utils import deprecation
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ # Do not import and use main() directly! Using it directly is actively
19
+ # discouraged by pip's maintainers. The name, location and behavior of
20
+ # this function is subject to change, so calling it directly is not
21
+ # portable across different pip versions.
22
+
23
+ # In addition, running pip in-process is unsupported and unsafe. This is
24
+ # elaborated in detail at
25
+ # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
26
+ # That document also provides suggestions that should work for nearly
27
+ # all users that are considering importing and using main() directly.
28
+
29
+ # However, we know that certain users will still want to invoke pip
30
+ # in-process. If you understand and accept the implications of using pip
31
+ # in an unsupported manner, the best approach is to use runpy to avoid
32
+ # depending on the exact location of this entry point.
33
+
34
+ # The following example shows how to use runpy to invoke pip in that
35
+ # case:
36
+ #
37
+ # sys.argv = ["pip", your, args, here]
38
+ # runpy.run_module("pip", run_name="__main__")
39
+ #
40
+ # Note that this will exit the process after running, unlike a direct
41
+ # call to main. As it is not safe to do any processing after calling
42
+ # main, this should not be an issue in practice.
43
+
44
+
45
+ def main(args: Optional[List[str]] = None) -> int:
46
+ if args is None:
47
+ args = sys.argv[1:]
48
+
49
+ # Configure our deprecation warnings to be sent through loggers
50
+ deprecation.install_warning_logger()
51
+
52
+ autocomplete()
53
+
54
+ try:
55
+ cmd_name, cmd_args = parse_command(args)
56
+ except PipError as exc:
57
+ sys.stderr.write(f"ERROR: {exc}")
58
+ sys.stderr.write(os.linesep)
59
+ sys.exit(1)
60
+
61
+ # Needed for locale.getpreferredencoding(False) to work
62
+ # in pip._internal.utils.encoding.auto_decode
63
+ try:
64
+ locale.setlocale(locale.LC_ALL, "")
65
+ except locale.Error as e:
66
+ # setlocale can apparently crash if locale are uninitialized
67
+ logger.debug("Ignoring error %s when setting locale", e)
68
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
69
+
70
+ return command.main(cmd_args)
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/parser.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base option parser setup"""
2
+
3
+ import logging
4
+ import optparse
5
+ import shutil
6
+ import sys
7
+ import textwrap
8
+ from contextlib import suppress
9
+ from typing import Any, Dict, Iterator, List, Tuple
10
+
11
+ from pip._internal.cli.status_codes import UNKNOWN_ERROR
12
+ from pip._internal.configuration import Configuration, ConfigurationError
13
+ from pip._internal.utils.misc import redact_auth_from_url, strtobool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19
+ """A prettier/less verbose help formatter for optparse."""
20
+
21
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
22
+ # help position must be aligned with __init__.parseopts.description
23
+ kwargs["max_help_position"] = 30
24
+ kwargs["indent_increment"] = 1
25
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
26
+ super().__init__(*args, **kwargs)
27
+
28
+ def format_option_strings(self, option: optparse.Option) -> str:
29
+ return self._format_option_strings(option)
30
+
31
+ def _format_option_strings(
32
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
33
+ ) -> str:
34
+ """
35
+ Return a comma-separated list of option strings and metavars.
36
+
37
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
38
+ :param mvarfmt: metavar format string
39
+ :param optsep: separator
40
+ """
41
+ opts = []
42
+
43
+ if option._short_opts:
44
+ opts.append(option._short_opts[0])
45
+ if option._long_opts:
46
+ opts.append(option._long_opts[0])
47
+ if len(opts) > 1:
48
+ opts.insert(1, optsep)
49
+
50
+ if option.takes_value():
51
+ assert option.dest is not None
52
+ metavar = option.metavar or option.dest.lower()
53
+ opts.append(mvarfmt.format(metavar.lower()))
54
+
55
+ return "".join(opts)
56
+
57
+ def format_heading(self, heading: str) -> str:
58
+ if heading == "Options":
59
+ return ""
60
+ return heading + ":\n"
61
+
62
+ def format_usage(self, usage: str) -> str:
63
+ """
64
+ Ensure there is only one newline between usage and the first heading
65
+ if there is no description.
66
+ """
67
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
68
+ return msg
69
+
70
+ def format_description(self, description: str) -> str:
71
+ # leave full control over description to us
72
+ if description:
73
+ if hasattr(self.parser, "main"):
74
+ label = "Commands"
75
+ else:
76
+ label = "Description"
77
+ # some doc strings have initial newlines, some don't
78
+ description = description.lstrip("\n")
79
+ # some doc strings have final newlines and spaces, some don't
80
+ description = description.rstrip()
81
+ # dedent, then reindent
82
+ description = self.indent_lines(textwrap.dedent(description), " ")
83
+ description = f"{label}:\n{description}\n"
84
+ return description
85
+ else:
86
+ return ""
87
+
88
+ def format_epilog(self, epilog: str) -> str:
89
+ # leave full control over epilog to us
90
+ if epilog:
91
+ return epilog
92
+ else:
93
+ return ""
94
+
95
+ def indent_lines(self, text: str, indent: str) -> str:
96
+ new_lines = [indent + line for line in text.split("\n")]
97
+ return "\n".join(new_lines)
98
+
99
+
100
+ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
101
+ """Custom help formatter for use in ConfigOptionParser.
102
+
103
+ This is updates the defaults before expanding them, allowing
104
+ them to show up correctly in the help listing.
105
+
106
+ Also redact auth from url type options
107
+ """
108
+
109
+ def expand_default(self, option: optparse.Option) -> str:
110
+ default_values = None
111
+ if self.parser is not None:
112
+ assert isinstance(self.parser, ConfigOptionParser)
113
+ self.parser._update_defaults(self.parser.defaults)
114
+ assert option.dest is not None
115
+ default_values = self.parser.defaults.get(option.dest)
116
+ help_text = super().expand_default(option)
117
+
118
+ if default_values and option.metavar == "URL":
119
+ if isinstance(default_values, str):
120
+ default_values = [default_values]
121
+
122
+ # If its not a list, we should abort and just return the help text
123
+ if not isinstance(default_values, list):
124
+ default_values = []
125
+
126
+ for val in default_values:
127
+ help_text = help_text.replace(val, redact_auth_from_url(val))
128
+
129
+ return help_text
130
+
131
+
132
+ class CustomOptionParser(optparse.OptionParser):
133
+ def insert_option_group(
134
+ self, idx: int, *args: Any, **kwargs: Any
135
+ ) -> optparse.OptionGroup:
136
+ """Insert an OptionGroup at a given position."""
137
+ group = self.add_option_group(*args, **kwargs)
138
+
139
+ self.option_groups.pop()
140
+ self.option_groups.insert(idx, group)
141
+
142
+ return group
143
+
144
+ @property
145
+ def option_list_all(self) -> List[optparse.Option]:
146
+ """Get a list of all options, including those in option groups."""
147
+ res = self.option_list[:]
148
+ for i in self.option_groups:
149
+ res.extend(i.option_list)
150
+
151
+ return res
152
+
153
+
154
+ class ConfigOptionParser(CustomOptionParser):
155
+ """Custom option parser which updates its defaults by checking the
156
+ configuration files and environmental variables"""
157
+
158
+ def __init__(
159
+ self,
160
+ *args: Any,
161
+ name: str,
162
+ isolated: bool = False,
163
+ **kwargs: Any,
164
+ ) -> None:
165
+ self.name = name
166
+ self.config = Configuration(isolated)
167
+
168
+ assert self.name
169
+ super().__init__(*args, **kwargs)
170
+
171
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
172
+ try:
173
+ return option.check_value(key, val)
174
+ except optparse.OptionValueError as exc:
175
+ print(f"An error occurred during configuration: {exc}")
176
+ sys.exit(3)
177
+
178
+ def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]:
179
+ # Configuration gives keys in an unordered manner. Order them.
180
+ override_order = ["global", self.name, ":env:"]
181
+
182
+ # Pool the options into different groups
183
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
184
+ name: [] for name in override_order
185
+ }
186
+ for section_key, val in self.config.items():
187
+ # ignore empty values
188
+ if not val:
189
+ logger.debug(
190
+ "Ignoring configuration key '%s' as it's value is empty.",
191
+ section_key,
192
+ )
193
+ continue
194
+
195
+ section, key = section_key.split(".", 1)
196
+ if section in override_order:
197
+ section_items[section].append((key, val))
198
+
199
+ # Yield each group in their override order
200
+ for section in override_order:
201
+ for key, val in section_items[section]:
202
+ yield key, val
203
+
204
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
205
+ """Updates the given defaults with values from the config files and
206
+ the environ. Does a little special handling for certain types of
207
+ options (lists)."""
208
+
209
+ # Accumulate complex default state.
210
+ self.values = optparse.Values(self.defaults)
211
+ late_eval = set()
212
+ # Then set the options with those values
213
+ for key, val in self._get_ordered_configuration_items():
214
+ # '--' because configuration supports only long names
215
+ option = self.get_option("--" + key)
216
+
217
+ # Ignore options not present in this parser. E.g. non-globals put
218
+ # in [global] by users that want them to apply to all applicable
219
+ # commands.
220
+ if option is None:
221
+ continue
222
+
223
+ assert option.dest is not None
224
+
225
+ if option.action in ("store_true", "store_false"):
226
+ try:
227
+ val = strtobool(val)
228
+ except ValueError:
229
+ self.error(
230
+ "{} is not a valid value for {} option, " # noqa
231
+ "please specify a boolean value like yes/no, "
232
+ "true/false or 1/0 instead.".format(val, key)
233
+ )
234
+ elif option.action == "count":
235
+ with suppress(ValueError):
236
+ val = strtobool(val)
237
+ with suppress(ValueError):
238
+ val = int(val)
239
+ if not isinstance(val, int) or val < 0:
240
+ self.error(
241
+ "{} is not a valid value for {} option, " # noqa
242
+ "please instead specify either a non-negative integer "
243
+ "or a boolean value like yes/no or false/true "
244
+ "which is equivalent to 1/0.".format(val, key)
245
+ )
246
+ elif option.action == "append":
247
+ val = val.split()
248
+ val = [self.check_default(option, key, v) for v in val]
249
+ elif option.action == "callback":
250
+ assert option.callback is not None
251
+ late_eval.add(option.dest)
252
+ opt_str = option.get_opt_string()
253
+ val = option.convert_value(opt_str, val)
254
+ # From take_action
255
+ args = option.callback_args or ()
256
+ kwargs = option.callback_kwargs or {}
257
+ option.callback(option, opt_str, val, self, *args, **kwargs)
258
+ else:
259
+ val = self.check_default(option, key, val)
260
+
261
+ defaults[option.dest] = val
262
+
263
+ for key in late_eval:
264
+ defaults[key] = getattr(self.values, key)
265
+ self.values = None
266
+ return defaults
267
+
268
+ def get_default_values(self) -> optparse.Values:
269
+ """Overriding to make updating the defaults after instantiation of
270
+ the option parser possible, _update_defaults() does the dirty work."""
271
+ if not self.process_default_values:
272
+ # Old, pre-Optik 1.5 behaviour.
273
+ return optparse.Values(self.defaults)
274
+
275
+ # Load the configuration, or error out in case of an error
276
+ try:
277
+ self.config.load()
278
+ except ConfigurationError as err:
279
+ self.exit(UNKNOWN_ERROR, str(err))
280
+
281
+ defaults = self._update_defaults(self.defaults.copy()) # ours
282
+ for option in self._get_all_options():
283
+ assert option.dest is not None
284
+ default = defaults.get(option.dest)
285
+ if isinstance(default, str):
286
+ opt_str = option.get_opt_string()
287
+ defaults[option.dest] = option.check_value(opt_str, default)
288
+ return optparse.Values(defaults)
289
+
290
+ def error(self, msg: str) -> None:
291
+ self.print_usage(sys.stderr)
292
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.prepare import RequirementPreparer
26
+ from pip._internal.req.constructors import (
27
+ install_req_from_editable,
28
+ install_req_from_line,
29
+ install_req_from_parsed_requirement,
30
+ install_req_from_req_string,
31
+ )
32
+ from pip._internal.req.req_file import parse_requirements
33
+ from pip._internal.req.req_install import InstallRequirement
34
+ from pip._internal.req.req_tracker import RequirementTracker
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.deprecation import deprecated
38
+ from pip._internal.utils.temp_dir import (
39
+ TempDirectory,
40
+ TempDirectoryTypeRegistry,
41
+ tempdir_kinds,
42
+ )
43
+ from pip._internal.utils.virtualenv import running_under_virtualenv
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+
48
+ class SessionCommandMixin(CommandContextMixIn):
49
+
50
+ """
51
+ A class mixin for command classes needing _build_session().
52
+ """
53
+
54
+ def __init__(self) -> None:
55
+ super().__init__()
56
+ self._session: Optional[PipSession] = None
57
+
58
+ @classmethod
59
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
60
+ """Return a list of index urls from user-provided options."""
61
+ index_urls = []
62
+ if not getattr(options, "no_index", False):
63
+ url = getattr(options, "index_url", None)
64
+ if url:
65
+ index_urls.append(url)
66
+ urls = getattr(options, "extra_index_urls", None)
67
+ if urls:
68
+ index_urls.extend(urls)
69
+ # Return None rather than an empty list
70
+ return index_urls or None
71
+
72
+ def get_default_session(self, options: Values) -> PipSession:
73
+ """Get a default-managed session."""
74
+ if self._session is None:
75
+ self._session = self.enter_context(self._build_session(options))
76
+ # there's no type annotation on requests.Session, so it's
77
+ # automatically ContextManager[Any] and self._session becomes Any,
78
+ # then https://github.com/python/mypy/issues/7696 kicks in
79
+ assert self._session is not None
80
+ return self._session
81
+
82
+ def _build_session(
83
+ self,
84
+ options: Values,
85
+ retries: Optional[int] = None,
86
+ timeout: Optional[int] = None,
87
+ ) -> PipSession:
88
+ assert not options.cache_dir or os.path.isabs(options.cache_dir)
89
+ session = PipSession(
90
+ cache=(
91
+ os.path.join(options.cache_dir, "http") if options.cache_dir else None
92
+ ),
93
+ retries=retries if retries is not None else options.retries,
94
+ trusted_hosts=options.trusted_hosts,
95
+ index_urls=self._get_index_urls(options),
96
+ )
97
+
98
+ # Handle custom ca-bundles from the user
99
+ if options.cert:
100
+ session.verify = options.cert
101
+
102
+ # Handle SSL client certificate
103
+ if options.client_cert:
104
+ session.cert = options.client_cert
105
+
106
+ # Handle timeouts
107
+ if options.timeout or timeout:
108
+ session.timeout = timeout if timeout is not None else options.timeout
109
+
110
+ # Handle configured proxies
111
+ if options.proxy:
112
+ session.proxies = {
113
+ "http": options.proxy,
114
+ "https": options.proxy,
115
+ }
116
+
117
+ # Determine if we can prompt the user for authentication or not
118
+ session.auth.prompting = not options.no_input
119
+
120
+ return session
121
+
122
+
123
+ class IndexGroupCommand(Command, SessionCommandMixin):
124
+
125
+ """
126
+ Abstract base class for commands with the index_group options.
127
+
128
+ This also corresponds to the commands that permit the pip version check.
129
+ """
130
+
131
+ def handle_pip_version_check(self, options: Values) -> None:
132
+ """
133
+ Do the pip version check if not disabled.
134
+
135
+ This overrides the default behavior of not doing the check.
136
+ """
137
+ # Make sure the index_group options are present.
138
+ assert hasattr(options, "no_index")
139
+
140
+ if options.disable_pip_version_check or options.no_index:
141
+ return
142
+
143
+ # Otherwise, check if we're using the latest version of pip available.
144
+ session = self._build_session(
145
+ options, retries=0, timeout=min(5, options.timeout)
146
+ )
147
+ with session:
148
+ pip_self_version_check(session, options)
149
+
150
+
151
+ KEEPABLE_TEMPDIR_TYPES = [
152
+ tempdir_kinds.BUILD_ENV,
153
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
154
+ tempdir_kinds.REQ_BUILD,
155
+ ]
156
+
157
+
158
+ def warn_if_run_as_root() -> None:
159
+ """Output a warning for sudo users on Unix.
160
+
161
+ In a virtual environment, sudo pip still writes to virtualenv.
162
+ On Windows, users may run pip as Administrator without issues.
163
+ This warning only applies to Unix root users outside of virtualenv.
164
+ """
165
+ if running_under_virtualenv():
166
+ return
167
+ if not hasattr(os, "getuid"):
168
+ return
169
+ # On Windows, there are no "system managed" Python packages. Installing as
170
+ # Administrator via pip is the correct way of updating system environments.
171
+ #
172
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
173
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
174
+ if sys.platform == "win32" or sys.platform == "cygwin":
175
+ return
176
+
177
+ if os.getuid() != 0:
178
+ return
179
+
180
+ logger.warning(
181
+ "Running pip as the 'root' user can result in broken permissions and "
182
+ "conflicting behaviour with the system package manager. "
183
+ "It is recommended to use a virtual environment instead: "
184
+ "https://pip.pypa.io/warnings/venv"
185
+ )
186
+
187
+
188
+ def with_cleanup(func: Any) -> Any:
189
+ """Decorator for common logic related to managing temporary
190
+ directories.
191
+ """
192
+
193
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
194
+ for t in KEEPABLE_TEMPDIR_TYPES:
195
+ registry.set_delete(t, False)
196
+
197
+ def wrapper(
198
+ self: RequirementCommand, options: Values, args: List[Any]
199
+ ) -> Optional[int]:
200
+ assert self.tempdir_registry is not None
201
+ if options.no_clean:
202
+ configure_tempdir_registry(self.tempdir_registry)
203
+
204
+ try:
205
+ return func(self, options, args)
206
+ except PreviousBuildDirError:
207
+ # This kind of conflict can occur when the user passes an explicit
208
+ # build directory with a pre-existing folder. In that case we do
209
+ # not want to accidentally remove it.
210
+ configure_tempdir_registry(self.tempdir_registry)
211
+ raise
212
+
213
+ return wrapper
214
+
215
+
216
+ class RequirementCommand(IndexGroupCommand):
217
+ def __init__(self, *args: Any, **kw: Any) -> None:
218
+ super().__init__(*args, **kw)
219
+
220
+ self.cmd_opts.add_option(cmdoptions.no_clean())
221
+
222
+ @staticmethod
223
+ def determine_resolver_variant(options: Values) -> str:
224
+ """Determines which resolver should be used, based on the given options."""
225
+ if "legacy-resolver" in options.deprecated_features_enabled:
226
+ return "legacy"
227
+
228
+ return "2020-resolver"
229
+
230
+ @staticmethod
231
+ def determine_build_failure_suppression(options: Values) -> bool:
232
+ """Determines whether build failures should be suppressed and backtracked on."""
233
+ if "backtrack-on-build-failures" not in options.deprecated_features_enabled:
234
+ return False
235
+
236
+ if "legacy-resolver" in options.deprecated_features_enabled:
237
+ raise CommandError("Cannot backtrack with legacy resolver.")
238
+
239
+ deprecated(
240
+ reason=(
241
+ "Backtracking on build failures can mask issues related to how "
242
+ "a package generates metadata or builds a wheel. This flag will "
243
+ "be removed in pip 22.2."
244
+ ),
245
+ gone_in=None,
246
+ replacement=(
247
+ "avoiding known-bad versions by explicitly telling pip to ignore them "
248
+ "(either directly as requirements, or via a constraints file)"
249
+ ),
250
+ feature_flag=None,
251
+ issue=10655,
252
+ )
253
+ return True
254
+
255
+ @classmethod
256
+ def make_requirement_preparer(
257
+ cls,
258
+ temp_build_dir: TempDirectory,
259
+ options: Values,
260
+ req_tracker: RequirementTracker,
261
+ session: PipSession,
262
+ finder: PackageFinder,
263
+ use_user_site: bool,
264
+ download_dir: Optional[str] = None,
265
+ verbosity: int = 0,
266
+ ) -> RequirementPreparer:
267
+ """
268
+ Create a RequirementPreparer instance for the given parameters.
269
+ """
270
+ temp_build_dir_path = temp_build_dir.path
271
+ assert temp_build_dir_path is not None
272
+
273
+ resolver_variant = cls.determine_resolver_variant(options)
274
+ if resolver_variant == "2020-resolver":
275
+ lazy_wheel = "fast-deps" in options.features_enabled
276
+ if lazy_wheel:
277
+ logger.warning(
278
+ "pip is using lazily downloaded wheels using HTTP "
279
+ "range requests to obtain dependency information. "
280
+ "This experimental feature is enabled through "
281
+ "--use-feature=fast-deps and it is not ready for "
282
+ "production."
283
+ )
284
+ else:
285
+ lazy_wheel = False
286
+ if "fast-deps" in options.features_enabled:
287
+ logger.warning(
288
+ "fast-deps has no effect when used with the legacy resolver."
289
+ )
290
+
291
+ in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled
292
+ if "in-tree-build" in options.features_enabled:
293
+ deprecated(
294
+ reason="In-tree builds are now the default.",
295
+ replacement="to remove the --use-feature=in-tree-build flag",
296
+ gone_in="22.1",
297
+ )
298
+ if "out-of-tree-build" in options.deprecated_features_enabled:
299
+ deprecated(
300
+ reason="Out-of-tree builds are deprecated.",
301
+ replacement=None,
302
+ gone_in="22.1",
303
+ )
304
+
305
+ if options.progress_bar not in {"on", "off"}:
306
+ deprecated(
307
+ reason="Custom progress bar styles are deprecated",
308
+ replacement="to use the default progress bar style.",
309
+ gone_in="22.1",
310
+ )
311
+
312
+ return RequirementPreparer(
313
+ build_dir=temp_build_dir_path,
314
+ src_dir=options.src_dir,
315
+ download_dir=download_dir,
316
+ build_isolation=options.build_isolation,
317
+ req_tracker=req_tracker,
318
+ session=session,
319
+ progress_bar=options.progress_bar,
320
+ finder=finder,
321
+ require_hashes=options.require_hashes,
322
+ use_user_site=use_user_site,
323
+ lazy_wheel=lazy_wheel,
324
+ verbosity=verbosity,
325
+ in_tree_build=in_tree_build,
326
+ )
327
+
328
+ @classmethod
329
+ def make_resolver(
330
+ cls,
331
+ preparer: RequirementPreparer,
332
+ finder: PackageFinder,
333
+ options: Values,
334
+ wheel_cache: Optional[WheelCache] = None,
335
+ use_user_site: bool = False,
336
+ ignore_installed: bool = True,
337
+ ignore_requires_python: bool = False,
338
+ force_reinstall: bool = False,
339
+ upgrade_strategy: str = "to-satisfy-only",
340
+ use_pep517: Optional[bool] = None,
341
+ py_version_info: Optional[Tuple[int, ...]] = None,
342
+ ) -> BaseResolver:
343
+ """
344
+ Create a Resolver instance for the given parameters.
345
+ """
346
+ make_install_req = partial(
347
+ install_req_from_req_string,
348
+ isolated=options.isolated_mode,
349
+ use_pep517=use_pep517,
350
+ )
351
+ suppress_build_failures = cls.determine_build_failure_suppression(options)
352
+ resolver_variant = cls.determine_resolver_variant(options)
353
+ # The long import name and duplicated invocation is needed to convince
354
+ # Mypy into correctly typechecking. Otherwise it would complain the
355
+ # "Resolver" class being redefined.
356
+ if resolver_variant == "2020-resolver":
357
+ import pip._internal.resolution.resolvelib.resolver
358
+
359
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
360
+ preparer=preparer,
361
+ finder=finder,
362
+ wheel_cache=wheel_cache,
363
+ make_install_req=make_install_req,
364
+ use_user_site=use_user_site,
365
+ ignore_dependencies=options.ignore_dependencies,
366
+ ignore_installed=ignore_installed,
367
+ ignore_requires_python=ignore_requires_python,
368
+ force_reinstall=force_reinstall,
369
+ upgrade_strategy=upgrade_strategy,
370
+ py_version_info=py_version_info,
371
+ suppress_build_failures=suppress_build_failures,
372
+ )
373
+ import pip._internal.resolution.legacy.resolver
374
+
375
+ return pip._internal.resolution.legacy.resolver.Resolver(
376
+ preparer=preparer,
377
+ finder=finder,
378
+ wheel_cache=wheel_cache,
379
+ make_install_req=make_install_req,
380
+ use_user_site=use_user_site,
381
+ ignore_dependencies=options.ignore_dependencies,
382
+ ignore_installed=ignore_installed,
383
+ ignore_requires_python=ignore_requires_python,
384
+ force_reinstall=force_reinstall,
385
+ upgrade_strategy=upgrade_strategy,
386
+ py_version_info=py_version_info,
387
+ )
388
+
389
+ def get_requirements(
390
+ self,
391
+ args: List[str],
392
+ options: Values,
393
+ finder: PackageFinder,
394
+ session: PipSession,
395
+ ) -> List[InstallRequirement]:
396
+ """
397
+ Parse command-line arguments into the corresponding requirements.
398
+ """
399
+ requirements: List[InstallRequirement] = []
400
+ for filename in options.constraints:
401
+ for parsed_req in parse_requirements(
402
+ filename,
403
+ constraint=True,
404
+ finder=finder,
405
+ options=options,
406
+ session=session,
407
+ ):
408
+ req_to_add = install_req_from_parsed_requirement(
409
+ parsed_req,
410
+ isolated=options.isolated_mode,
411
+ user_supplied=False,
412
+ )
413
+ requirements.append(req_to_add)
414
+
415
+ for req in args:
416
+ req_to_add = install_req_from_line(
417
+ req,
418
+ None,
419
+ isolated=options.isolated_mode,
420
+ use_pep517=options.use_pep517,
421
+ user_supplied=True,
422
+ )
423
+ requirements.append(req_to_add)
424
+
425
+ for req in options.editables:
426
+ req_to_add = install_req_from_editable(
427
+ req,
428
+ user_supplied=True,
429
+ isolated=options.isolated_mode,
430
+ use_pep517=options.use_pep517,
431
+ )
432
+ requirements.append(req_to_add)
433
+
434
+ # NOTE: options.require_hashes may be set if --require-hashes is True
435
+ for filename in options.requirements:
436
+ for parsed_req in parse_requirements(
437
+ filename, finder=finder, options=options, session=session
438
+ ):
439
+ req_to_add = install_req_from_parsed_requirement(
440
+ parsed_req,
441
+ isolated=options.isolated_mode,
442
+ use_pep517=options.use_pep517,
443
+ user_supplied=True,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
506
+ )
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Iterator
7
+
8
+ from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
9
+
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.logging import get_indentation
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SpinnerInterface:
17
+ def spin(self) -> None:
18
+ raise NotImplementedError()
19
+
20
+ def finish(self, final_status: str) -> None:
21
+ raise NotImplementedError()
22
+
23
+
24
+ class InteractiveSpinner(SpinnerInterface):
25
+ def __init__(
26
+ self,
27
+ message: str,
28
+ file: IO[str] = None,
29
+ spin_chars: str = "-\\|/",
30
+ # Empirically, 8 updates/second looks nice
31
+ min_update_interval_seconds: float = 0.125,
32
+ ):
33
+ self._message = message
34
+ if file is None:
35
+ file = sys.stdout
36
+ self._file = file
37
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
38
+ self._finished = False
39
+
40
+ self._spin_cycle = itertools.cycle(spin_chars)
41
+
42
+ self._file.write(" " * get_indentation() + self._message + " ... ")
43
+ self._width = 0
44
+
45
+ def _write(self, status: str) -> None:
46
+ assert not self._finished
47
+ # Erase what we wrote before by backspacing to the beginning, writing
48
+ # spaces to overwrite the old text, and then backspacing again
49
+ backup = "\b" * self._width
50
+ self._file.write(backup + " " * self._width + backup)
51
+ # Now we have a blank slate to add our status
52
+ self._file.write(status)
53
+ self._width = len(status)
54
+ self._file.flush()
55
+ self._rate_limiter.reset()
56
+
57
+ def spin(self) -> None:
58
+ if self._finished:
59
+ return
60
+ if not self._rate_limiter.ready():
61
+ return
62
+ self._write(next(self._spin_cycle))
63
+
64
+ def finish(self, final_status: str) -> None:
65
+ if self._finished:
66
+ return
67
+ self._write(final_status)
68
+ self._file.write("\n")
69
+ self._file.flush()
70
+ self._finished = True
71
+
72
+
73
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
74
+ # We still print updates occasionally (once every 60 seconds by default) to
75
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
76
+ # an indication that a task has frozen.
77
+ class NonInteractiveSpinner(SpinnerInterface):
78
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
79
+ self._message = message
80
+ self._finished = False
81
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
82
+ self._update("started")
83
+
84
+ def _update(self, status: str) -> None:
85
+ assert not self._finished
86
+ self._rate_limiter.reset()
87
+ logger.info("%s: %s", self._message, status)
88
+
89
+ def spin(self) -> None:
90
+ if self._finished:
91
+ return
92
+ if not self._rate_limiter.ready():
93
+ return
94
+ self._update("still running...")
95
+
96
+ def finish(self, final_status: str) -> None:
97
+ if self._finished:
98
+ return
99
+ self._update(f"finished with status '{final_status}'")
100
+ self._finished = True
101
+
102
+
103
+ class RateLimiter:
104
+ def __init__(self, min_update_interval_seconds: float) -> None:
105
+ self._min_update_interval_seconds = min_update_interval_seconds
106
+ self._last_update: float = 0
107
+
108
+ def ready(self) -> bool:
109
+ now = time.time()
110
+ delta = now - self._last_update
111
+ return delta >= self._min_update_interval_seconds
112
+
113
+ def reset(self) -> None:
114
+ self._last_update = time.time()
115
+
116
+
117
+ @contextlib.contextmanager
118
+ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
119
+ # Interactive spinner goes directly to sys.stdout rather than being routed
120
+ # through the logging system, but it acts like it has level INFO,
121
+ # i.e. it's only displayed if we're at level INFO or better.
122
+ # Non-interactive spinner goes through the logging system, so it is always
123
+ # in sync with logging configuration.
124
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
125
+ spinner: SpinnerInterface = InteractiveSpinner(message)
126
+ else:
127
+ spinner = NonInteractiveSpinner(message)
128
+ try:
129
+ with hidden_cursor(sys.stdout):
130
+ yield spinner
131
+ except KeyboardInterrupt:
132
+ spinner.finish("canceled")
133
+ raise
134
+ except Exception:
135
+ spinner.finish("error")
136
+ raise
137
+ else:
138
+ spinner.finish("done")
139
+
140
+
141
+ @contextlib.contextmanager
142
+ def hidden_cursor(file: IO[str]) -> Iterator[None]:
143
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
144
+ # even via colorama. So don't even try.
145
+ if WINDOWS:
146
+ yield
147
+ # We don't want to clutter the output with control characters if we're
148
+ # writing to a file, or if the user is running with --quiet.
149
+ # See https://github.com/pypa/pip/issues/3418
150
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
151
+ yield
152
+ else:
153
+ file.write(HIDE_CURSOR)
154
+ try:
155
+ yield
156
+ finally:
157
+ file.write(SHOW_CURSOR)
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ SUCCESS = 0
2
+ ERROR = 1
3
+ UNKNOWN_ERROR = 2
4
+ VIRTUALENV_NOT_FOUND = 3
5
+ PREVIOUS_BUILD_DIR_ERROR = 4
6
+ NO_MATCHES_FOUND = 23
env-llmeval/lib/python3.10/site-packages/pip/_internal/configuration.py ADDED
@@ -0,0 +1,366 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration management setup
2
+
3
+ Some terminology:
4
+ - name
5
+ As written in config files.
6
+ - value
7
+ Value associated with a name
8
+ - key
9
+ Name combined with it's section (section.name)
10
+ - variant
11
+ A single word describing where the configuration key-value pair came from
12
+ """
13
+
14
+ import configparser
15
+ import locale
16
+ import os
17
+ import sys
18
+ from typing import Any, Dict, Iterable, List, NewType, Optional, Tuple
19
+
20
+ from pip._internal.exceptions import (
21
+ ConfigurationError,
22
+ ConfigurationFileCouldNotBeLoaded,
23
+ )
24
+ from pip._internal.utils import appdirs
25
+ from pip._internal.utils.compat import WINDOWS
26
+ from pip._internal.utils.logging import getLogger
27
+ from pip._internal.utils.misc import ensure_dir, enum
28
+
29
+ RawConfigParser = configparser.RawConfigParser # Shorthand
30
+ Kind = NewType("Kind", str)
31
+
32
+ CONFIG_BASENAME = "pip.ini" if WINDOWS else "pip.conf"
33
+ ENV_NAMES_IGNORED = "version", "help"
34
+
35
+ # The kinds of configurations there are.
36
+ kinds = enum(
37
+ USER="user", # User Specific
38
+ GLOBAL="global", # System Wide
39
+ SITE="site", # [Virtual] Environment Specific
40
+ ENV="env", # from PIP_CONFIG_FILE
41
+ ENV_VAR="env-var", # from Environment Variables
42
+ )
43
+ OVERRIDE_ORDER = kinds.GLOBAL, kinds.USER, kinds.SITE, kinds.ENV, kinds.ENV_VAR
44
+ VALID_LOAD_ONLY = kinds.USER, kinds.GLOBAL, kinds.SITE
45
+
46
+ logger = getLogger(__name__)
47
+
48
+
49
+ # NOTE: Maybe use the optionx attribute to normalize keynames.
50
+ def _normalize_name(name: str) -> str:
51
+ """Make a name consistent regardless of source (environment or file)"""
52
+ name = name.lower().replace("_", "-")
53
+ if name.startswith("--"):
54
+ name = name[2:] # only prefer long opts
55
+ return name
56
+
57
+
58
+ def _disassemble_key(name: str) -> List[str]:
59
+ if "." not in name:
60
+ error_message = (
61
+ "Key does not contain dot separated section and key. "
62
+ "Perhaps you wanted to use 'global.{}' instead?"
63
+ ).format(name)
64
+ raise ConfigurationError(error_message)
65
+ return name.split(".", 1)
66
+
67
+
68
+ def get_configuration_files() -> Dict[Kind, List[str]]:
69
+ global_config_files = [
70
+ os.path.join(path, CONFIG_BASENAME) for path in appdirs.site_config_dirs("pip")
71
+ ]
72
+
73
+ site_config_file = os.path.join(sys.prefix, CONFIG_BASENAME)
74
+ legacy_config_file = os.path.join(
75
+ os.path.expanduser("~"),
76
+ "pip" if WINDOWS else ".pip",
77
+ CONFIG_BASENAME,
78
+ )
79
+ new_config_file = os.path.join(appdirs.user_config_dir("pip"), CONFIG_BASENAME)
80
+ return {
81
+ kinds.GLOBAL: global_config_files,
82
+ kinds.SITE: [site_config_file],
83
+ kinds.USER: [legacy_config_file, new_config_file],
84
+ }
85
+
86
+
87
+ class Configuration:
88
+ """Handles management of configuration.
89
+
90
+ Provides an interface to accessing and managing configuration files.
91
+
92
+ This class converts provides an API that takes "section.key-name" style
93
+ keys and stores the value associated with it as "key-name" under the
94
+ section "section".
95
+
96
+ This allows for a clean interface wherein the both the section and the
97
+ key-name are preserved in an easy to manage form in the configuration files
98
+ and the data stored is also nice.
99
+ """
100
+
101
+ def __init__(self, isolated: bool, load_only: Optional[Kind] = None) -> None:
102
+ super().__init__()
103
+
104
+ if load_only is not None and load_only not in VALID_LOAD_ONLY:
105
+ raise ConfigurationError(
106
+ "Got invalid value for load_only - should be one of {}".format(
107
+ ", ".join(map(repr, VALID_LOAD_ONLY))
108
+ )
109
+ )
110
+ self.isolated = isolated
111
+ self.load_only = load_only
112
+
113
+ # Because we keep track of where we got the data from
114
+ self._parsers: Dict[Kind, List[Tuple[str, RawConfigParser]]] = {
115
+ variant: [] for variant in OVERRIDE_ORDER
116
+ }
117
+ self._config: Dict[Kind, Dict[str, Any]] = {
118
+ variant: {} for variant in OVERRIDE_ORDER
119
+ }
120
+ self._modified_parsers: List[Tuple[str, RawConfigParser]] = []
121
+
122
+ def load(self) -> None:
123
+ """Loads configuration from configuration files and environment"""
124
+ self._load_config_files()
125
+ if not self.isolated:
126
+ self._load_environment_vars()
127
+
128
+ def get_file_to_edit(self) -> Optional[str]:
129
+ """Returns the file with highest priority in configuration"""
130
+ assert self.load_only is not None, "Need to be specified a file to be editing"
131
+
132
+ try:
133
+ return self._get_parser_to_modify()[0]
134
+ except IndexError:
135
+ return None
136
+
137
+ def items(self) -> Iterable[Tuple[str, Any]]:
138
+ """Returns key-value pairs like dict.items() representing the loaded
139
+ configuration
140
+ """
141
+ return self._dictionary.items()
142
+
143
+ def get_value(self, key: str) -> Any:
144
+ """Get a value from the configuration."""
145
+ try:
146
+ return self._dictionary[key]
147
+ except KeyError:
148
+ raise ConfigurationError(f"No such key - {key}")
149
+
150
+ def set_value(self, key: str, value: Any) -> None:
151
+ """Modify a value in the configuration."""
152
+ self._ensure_have_load_only()
153
+
154
+ assert self.load_only
155
+ fname, parser = self._get_parser_to_modify()
156
+
157
+ if parser is not None:
158
+ section, name = _disassemble_key(key)
159
+
160
+ # Modify the parser and the configuration
161
+ if not parser.has_section(section):
162
+ parser.add_section(section)
163
+ parser.set(section, name, value)
164
+
165
+ self._config[self.load_only][key] = value
166
+ self._mark_as_modified(fname, parser)
167
+
168
+ def unset_value(self, key: str) -> None:
169
+ """Unset a value in the configuration."""
170
+ self._ensure_have_load_only()
171
+
172
+ assert self.load_only
173
+ if key not in self._config[self.load_only]:
174
+ raise ConfigurationError(f"No such key - {key}")
175
+
176
+ fname, parser = self._get_parser_to_modify()
177
+
178
+ if parser is not None:
179
+ section, name = _disassemble_key(key)
180
+ if not (
181
+ parser.has_section(section) and parser.remove_option(section, name)
182
+ ):
183
+ # The option was not removed.
184
+ raise ConfigurationError(
185
+ "Fatal Internal error [id=1]. Please report as a bug."
186
+ )
187
+
188
+ # The section may be empty after the option was removed.
189
+ if not parser.items(section):
190
+ parser.remove_section(section)
191
+ self._mark_as_modified(fname, parser)
192
+
193
+ del self._config[self.load_only][key]
194
+
195
+ def save(self) -> None:
196
+ """Save the current in-memory state."""
197
+ self._ensure_have_load_only()
198
+
199
+ for fname, parser in self._modified_parsers:
200
+ logger.info("Writing to %s", fname)
201
+
202
+ # Ensure directory exists.
203
+ ensure_dir(os.path.dirname(fname))
204
+
205
+ with open(fname, "w") as f:
206
+ parser.write(f)
207
+
208
+ #
209
+ # Private routines
210
+ #
211
+
212
+ def _ensure_have_load_only(self) -> None:
213
+ if self.load_only is None:
214
+ raise ConfigurationError("Needed a specific file to be modifying.")
215
+ logger.debug("Will be working with %s variant only", self.load_only)
216
+
217
+ @property
218
+ def _dictionary(self) -> Dict[str, Any]:
219
+ """A dictionary representing the loaded configuration."""
220
+ # NOTE: Dictionaries are not populated if not loaded. So, conditionals
221
+ # are not needed here.
222
+ retval = {}
223
+
224
+ for variant in OVERRIDE_ORDER:
225
+ retval.update(self._config[variant])
226
+
227
+ return retval
228
+
229
+ def _load_config_files(self) -> None:
230
+ """Loads configuration from configuration files"""
231
+ config_files = dict(self.iter_config_files())
232
+ if config_files[kinds.ENV][0:1] == [os.devnull]:
233
+ logger.debug(
234
+ "Skipping loading configuration files due to "
235
+ "environment's PIP_CONFIG_FILE being os.devnull"
236
+ )
237
+ return
238
+
239
+ for variant, files in config_files.items():
240
+ for fname in files:
241
+ # If there's specific variant set in `load_only`, load only
242
+ # that variant, not the others.
243
+ if self.load_only is not None and variant != self.load_only:
244
+ logger.debug("Skipping file '%s' (variant: %s)", fname, variant)
245
+ continue
246
+
247
+ parser = self._load_file(variant, fname)
248
+
249
+ # Keeping track of the parsers used
250
+ self._parsers[variant].append((fname, parser))
251
+
252
+ def _load_file(self, variant: Kind, fname: str) -> RawConfigParser:
253
+ logger.verbose("For variant '%s', will try loading '%s'", variant, fname)
254
+ parser = self._construct_parser(fname)
255
+
256
+ for section in parser.sections():
257
+ items = parser.items(section)
258
+ self._config[variant].update(self._normalized_keys(section, items))
259
+
260
+ return parser
261
+
262
+ def _construct_parser(self, fname: str) -> RawConfigParser:
263
+ parser = configparser.RawConfigParser()
264
+ # If there is no such file, don't bother reading it but create the
265
+ # parser anyway, to hold the data.
266
+ # Doing this is useful when modifying and saving files, where we don't
267
+ # need to construct a parser.
268
+ if os.path.exists(fname):
269
+ locale_encoding = locale.getpreferredencoding(False)
270
+ try:
271
+ parser.read(fname, encoding=locale_encoding)
272
+ except UnicodeDecodeError:
273
+ # See https://github.com/pypa/pip/issues/4963
274
+ raise ConfigurationFileCouldNotBeLoaded(
275
+ reason=f"contains invalid {locale_encoding} characters",
276
+ fname=fname,
277
+ )
278
+ except configparser.Error as error:
279
+ # See https://github.com/pypa/pip/issues/4893
280
+ raise ConfigurationFileCouldNotBeLoaded(error=error)
281
+ return parser
282
+
283
+ def _load_environment_vars(self) -> None:
284
+ """Loads configuration from environment variables"""
285
+ self._config[kinds.ENV_VAR].update(
286
+ self._normalized_keys(":env:", self.get_environ_vars())
287
+ )
288
+
289
+ def _normalized_keys(
290
+ self, section: str, items: Iterable[Tuple[str, Any]]
291
+ ) -> Dict[str, Any]:
292
+ """Normalizes items to construct a dictionary with normalized keys.
293
+
294
+ This routine is where the names become keys and are made the same
295
+ regardless of source - configuration files or environment.
296
+ """
297
+ normalized = {}
298
+ for name, val in items:
299
+ key = section + "." + _normalize_name(name)
300
+ normalized[key] = val
301
+ return normalized
302
+
303
+ def get_environ_vars(self) -> Iterable[Tuple[str, str]]:
304
+ """Returns a generator with all environmental vars with prefix PIP_"""
305
+ for key, val in os.environ.items():
306
+ if key.startswith("PIP_"):
307
+ name = key[4:].lower()
308
+ if name not in ENV_NAMES_IGNORED:
309
+ yield name, val
310
+
311
+ # XXX: This is patched in the tests.
312
+ def iter_config_files(self) -> Iterable[Tuple[Kind, List[str]]]:
313
+ """Yields variant and configuration files associated with it.
314
+
315
+ This should be treated like items of a dictionary.
316
+ """
317
+ # SMELL: Move the conditions out of this function
318
+
319
+ # environment variables have the lowest priority
320
+ config_file = os.environ.get("PIP_CONFIG_FILE", None)
321
+ if config_file is not None:
322
+ yield kinds.ENV, [config_file]
323
+ else:
324
+ yield kinds.ENV, []
325
+
326
+ config_files = get_configuration_files()
327
+
328
+ # at the base we have any global configuration
329
+ yield kinds.GLOBAL, config_files[kinds.GLOBAL]
330
+
331
+ # per-user configuration next
332
+ should_load_user_config = not self.isolated and not (
333
+ config_file and os.path.exists(config_file)
334
+ )
335
+ if should_load_user_config:
336
+ # The legacy config file is overridden by the new config file
337
+ yield kinds.USER, config_files[kinds.USER]
338
+
339
+ # finally virtualenv configuration first trumping others
340
+ yield kinds.SITE, config_files[kinds.SITE]
341
+
342
+ def get_values_in_config(self, variant: Kind) -> Dict[str, Any]:
343
+ """Get values present in a config file"""
344
+ return self._config[variant]
345
+
346
+ def _get_parser_to_modify(self) -> Tuple[str, RawConfigParser]:
347
+ # Determine which parser to modify
348
+ assert self.load_only
349
+ parsers = self._parsers[self.load_only]
350
+ if not parsers:
351
+ # This should not happen if everything works correctly.
352
+ raise ConfigurationError(
353
+ "Fatal Internal error [id=2]. Please report as a bug."
354
+ )
355
+
356
+ # Use the highest priority parser.
357
+ return parsers[-1]
358
+
359
+ # XXX: This is patched in the tests.
360
+ def _mark_as_modified(self, fname: str, parser: RawConfigParser) -> None:
361
+ file_parser_tuple = (fname, parser)
362
+ if file_parser_tuple not in self._modified_parsers:
363
+ self._modified_parsers.append(file_parser_tuple)
364
+
365
+ def __repr__(self) -> str:
366
+ return f"{self.__class__.__name__}({self._dictionary!r})"
env-llmeval/lib/python3.10/site-packages/pip/_internal/exceptions.py ADDED
@@ -0,0 +1,658 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Exceptions used throughout package.
2
+
3
+ This module MUST NOT try to import from anything within `pip._internal` to
4
+ operate. This is expected to be importable from any/all files within the
5
+ subpackage and, thus, should not depend on them.
6
+ """
7
+
8
+ import configparser
9
+ import re
10
+ from itertools import chain, groupby, repeat
11
+ from typing import TYPE_CHECKING, Dict, List, Optional, Union
12
+
13
+ from pip._vendor.requests.models import Request, Response
14
+ from pip._vendor.rich.console import Console, ConsoleOptions, RenderResult
15
+ from pip._vendor.rich.markup import escape
16
+ from pip._vendor.rich.text import Text
17
+
18
+ if TYPE_CHECKING:
19
+ from hashlib import _Hash
20
+ from typing import Literal
21
+
22
+ from pip._internal.metadata import BaseDistribution
23
+ from pip._internal.req.req_install import InstallRequirement
24
+
25
+
26
+ #
27
+ # Scaffolding
28
+ #
29
+ def _is_kebab_case(s: str) -> bool:
30
+ return re.match(r"^[a-z]+(-[a-z]+)*$", s) is not None
31
+
32
+
33
+ def _prefix_with_indent(
34
+ s: Union[Text, str],
35
+ console: Console,
36
+ *,
37
+ prefix: str,
38
+ indent: str,
39
+ ) -> Text:
40
+ if isinstance(s, Text):
41
+ text = s
42
+ else:
43
+ text = console.render_str(s)
44
+
45
+ return console.render_str(prefix, overflow="ignore") + console.render_str(
46
+ f"\n{indent}", overflow="ignore"
47
+ ).join(text.split(allow_blank=True))
48
+
49
+
50
+ class PipError(Exception):
51
+ """The base pip error."""
52
+
53
+
54
+ class DiagnosticPipError(PipError):
55
+ """An error, that presents diagnostic information to the user.
56
+
57
+ This contains a bunch of logic, to enable pretty presentation of our error
58
+ messages. Each error gets a unique reference. Each error can also include
59
+ additional context, a hint and/or a note -- which are presented with the
60
+ main error message in a consistent style.
61
+
62
+ This is adapted from the error output styling in `sphinx-theme-builder`.
63
+ """
64
+
65
+ reference: str
66
+
67
+ def __init__(
68
+ self,
69
+ *,
70
+ kind: 'Literal["error", "warning"]' = "error",
71
+ reference: Optional[str] = None,
72
+ message: Union[str, Text],
73
+ context: Optional[Union[str, Text]],
74
+ hint_stmt: Optional[Union[str, Text]],
75
+ note_stmt: Optional[Union[str, Text]] = None,
76
+ link: Optional[str] = None,
77
+ ) -> None:
78
+ # Ensure a proper reference is provided.
79
+ if reference is None:
80
+ assert hasattr(self, "reference"), "error reference not provided!"
81
+ reference = self.reference
82
+ assert _is_kebab_case(reference), "error reference must be kebab-case!"
83
+
84
+ self.kind = kind
85
+ self.reference = reference
86
+
87
+ self.message = message
88
+ self.context = context
89
+
90
+ self.note_stmt = note_stmt
91
+ self.hint_stmt = hint_stmt
92
+
93
+ self.link = link
94
+
95
+ super().__init__(f"<{self.__class__.__name__}: {self.reference}>")
96
+
97
+ def __repr__(self) -> str:
98
+ return (
99
+ f"<{self.__class__.__name__}("
100
+ f"reference={self.reference!r}, "
101
+ f"message={self.message!r}, "
102
+ f"context={self.context!r}, "
103
+ f"note_stmt={self.note_stmt!r}, "
104
+ f"hint_stmt={self.hint_stmt!r}"
105
+ ")>"
106
+ )
107
+
108
+ def __rich_console__(
109
+ self,
110
+ console: Console,
111
+ options: ConsoleOptions,
112
+ ) -> RenderResult:
113
+ colour = "red" if self.kind == "error" else "yellow"
114
+
115
+ yield f"[{colour} bold]{self.kind}[/]: [bold]{self.reference}[/]"
116
+ yield ""
117
+
118
+ if not options.ascii_only:
119
+ # Present the main message, with relevant context indented.
120
+ if self.context is not None:
121
+ yield _prefix_with_indent(
122
+ self.message,
123
+ console,
124
+ prefix=f"[{colour}]×[/] ",
125
+ indent=f"[{colour}]│[/] ",
126
+ )
127
+ yield _prefix_with_indent(
128
+ self.context,
129
+ console,
130
+ prefix=f"[{colour}]╰─>[/] ",
131
+ indent=f"[{colour}] [/] ",
132
+ )
133
+ else:
134
+ yield _prefix_with_indent(
135
+ self.message,
136
+ console,
137
+ prefix="[red]×[/] ",
138
+ indent=" ",
139
+ )
140
+ else:
141
+ yield self.message
142
+ if self.context is not None:
143
+ yield ""
144
+ yield self.context
145
+
146
+ if self.note_stmt is not None or self.hint_stmt is not None:
147
+ yield ""
148
+
149
+ if self.note_stmt is not None:
150
+ yield _prefix_with_indent(
151
+ self.note_stmt,
152
+ console,
153
+ prefix="[magenta bold]note[/]: ",
154
+ indent=" ",
155
+ )
156
+ if self.hint_stmt is not None:
157
+ yield _prefix_with_indent(
158
+ self.hint_stmt,
159
+ console,
160
+ prefix="[cyan bold]hint[/]: ",
161
+ indent=" ",
162
+ )
163
+
164
+ if self.link is not None:
165
+ yield ""
166
+ yield f"Link: {self.link}"
167
+
168
+
169
+ #
170
+ # Actual Errors
171
+ #
172
+ class ConfigurationError(PipError):
173
+ """General exception in configuration"""
174
+
175
+
176
+ class InstallationError(PipError):
177
+ """General exception during installation"""
178
+
179
+
180
+ class UninstallationError(PipError):
181
+ """General exception during uninstallation"""
182
+
183
+
184
+ class MissingPyProjectBuildRequires(DiagnosticPipError):
185
+ """Raised when pyproject.toml has `build-system`, but no `build-system.requires`."""
186
+
187
+ reference = "missing-pyproject-build-system-requires"
188
+
189
+ def __init__(self, *, package: str) -> None:
190
+ super().__init__(
191
+ message=f"Can not process {escape(package)}",
192
+ context=Text(
193
+ "This package has an invalid pyproject.toml file.\n"
194
+ "The [build-system] table is missing the mandatory `requires` key."
195
+ ),
196
+ note_stmt="This is an issue with the package mentioned above, not pip.",
197
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
198
+ )
199
+
200
+
201
+ class InvalidPyProjectBuildRequires(DiagnosticPipError):
202
+ """Raised when pyproject.toml an invalid `build-system.requires`."""
203
+
204
+ reference = "invalid-pyproject-build-system-requires"
205
+
206
+ def __init__(self, *, package: str, reason: str) -> None:
207
+ super().__init__(
208
+ message=f"Can not process {escape(package)}",
209
+ context=Text(
210
+ "This package has an invalid `build-system.requires` key in "
211
+ f"pyproject.toml.\n{reason}"
212
+ ),
213
+ note_stmt="This is an issue with the package mentioned above, not pip.",
214
+ hint_stmt=Text("See PEP 518 for the detailed specification."),
215
+ )
216
+
217
+
218
+ class NoneMetadataError(PipError):
219
+ """Raised when accessing a Distribution's "METADATA" or "PKG-INFO".
220
+
221
+ This signifies an inconsistency, when the Distribution claims to have
222
+ the metadata file (if not, raise ``FileNotFoundError`` instead), but is
223
+ not actually able to produce its content. This may be due to permission
224
+ errors.
225
+ """
226
+
227
+ def __init__(
228
+ self,
229
+ dist: "BaseDistribution",
230
+ metadata_name: str,
231
+ ) -> None:
232
+ """
233
+ :param dist: A Distribution object.
234
+ :param metadata_name: The name of the metadata being accessed
235
+ (can be "METADATA" or "PKG-INFO").
236
+ """
237
+ self.dist = dist
238
+ self.metadata_name = metadata_name
239
+
240
+ def __str__(self) -> str:
241
+ # Use `dist` in the error message because its stringification
242
+ # includes more information, like the version and location.
243
+ return "None {} metadata found for distribution: {}".format(
244
+ self.metadata_name,
245
+ self.dist,
246
+ )
247
+
248
+
249
+ class UserInstallationInvalid(InstallationError):
250
+ """A --user install is requested on an environment without user site."""
251
+
252
+ def __str__(self) -> str:
253
+ return "User base directory is not specified"
254
+
255
+
256
+ class InvalidSchemeCombination(InstallationError):
257
+ def __str__(self) -> str:
258
+ before = ", ".join(str(a) for a in self.args[:-1])
259
+ return f"Cannot set {before} and {self.args[-1]} together"
260
+
261
+
262
+ class DistributionNotFound(InstallationError):
263
+ """Raised when a distribution cannot be found to satisfy a requirement"""
264
+
265
+
266
+ class RequirementsFileParseError(InstallationError):
267
+ """Raised when a general error occurs parsing a requirements file line."""
268
+
269
+
270
+ class BestVersionAlreadyInstalled(PipError):
271
+ """Raised when the most up-to-date version of a package is already
272
+ installed."""
273
+
274
+
275
+ class BadCommand(PipError):
276
+ """Raised when virtualenv or a command is not found"""
277
+
278
+
279
+ class CommandError(PipError):
280
+ """Raised when there is an error in command-line arguments"""
281
+
282
+
283
+ class PreviousBuildDirError(PipError):
284
+ """Raised when there's a previous conflicting build directory"""
285
+
286
+
287
+ class NetworkConnectionError(PipError):
288
+ """HTTP connection error"""
289
+
290
+ def __init__(
291
+ self, error_msg: str, response: Response = None, request: Request = None
292
+ ) -> None:
293
+ """
294
+ Initialize NetworkConnectionError with `request` and `response`
295
+ objects.
296
+ """
297
+ self.response = response
298
+ self.request = request
299
+ self.error_msg = error_msg
300
+ if (
301
+ self.response is not None
302
+ and not self.request
303
+ and hasattr(response, "request")
304
+ ):
305
+ self.request = self.response.request
306
+ super().__init__(error_msg, response, request)
307
+
308
+ def __str__(self) -> str:
309
+ return str(self.error_msg)
310
+
311
+
312
+ class InvalidWheelFilename(InstallationError):
313
+ """Invalid wheel filename."""
314
+
315
+
316
+ class UnsupportedWheel(InstallationError):
317
+ """Unsupported wheel."""
318
+
319
+
320
+ class InvalidWheel(InstallationError):
321
+ """Invalid (e.g. corrupt) wheel."""
322
+
323
+ def __init__(self, location: str, name: str):
324
+ self.location = location
325
+ self.name = name
326
+
327
+ def __str__(self) -> str:
328
+ return f"Wheel '{self.name}' located at {self.location} is invalid."
329
+
330
+
331
+ class MetadataInconsistent(InstallationError):
332
+ """Built metadata contains inconsistent information.
333
+
334
+ This is raised when the metadata contains values (e.g. name and version)
335
+ that do not match the information previously obtained from sdist filename
336
+ or user-supplied ``#egg=`` value.
337
+ """
338
+
339
+ def __init__(
340
+ self, ireq: "InstallRequirement", field: str, f_val: str, m_val: str
341
+ ) -> None:
342
+ self.ireq = ireq
343
+ self.field = field
344
+ self.f_val = f_val
345
+ self.m_val = m_val
346
+
347
+ def __str__(self) -> str:
348
+ template = (
349
+ "Requested {} has inconsistent {}: "
350
+ "filename has {!r}, but metadata has {!r}"
351
+ )
352
+ return template.format(self.ireq, self.field, self.f_val, self.m_val)
353
+
354
+
355
+ class LegacyInstallFailure(DiagnosticPipError):
356
+ """Error occurred while executing `setup.py install`"""
357
+
358
+ reference = "legacy-install-failure"
359
+
360
+ def __init__(self, package_details: str) -> None:
361
+ super().__init__(
362
+ message="Encountered error while trying to install package.",
363
+ context=package_details,
364
+ hint_stmt="See above for output from the failure.",
365
+ note_stmt="This is an issue with the package mentioned above, not pip.",
366
+ )
367
+
368
+
369
+ class InstallationSubprocessError(DiagnosticPipError, InstallationError):
370
+ """A subprocess call failed."""
371
+
372
+ reference = "subprocess-exited-with-error"
373
+
374
+ def __init__(
375
+ self,
376
+ *,
377
+ command_description: str,
378
+ exit_code: int,
379
+ output_lines: Optional[List[str]],
380
+ ) -> None:
381
+ if output_lines is None:
382
+ output_prompt = Text("See above for output.")
383
+ else:
384
+ output_prompt = (
385
+ Text.from_markup(f"[red][{len(output_lines)} lines of output][/]\n")
386
+ + Text("".join(output_lines))
387
+ + Text.from_markup(R"[red]\[end of output][/]")
388
+ )
389
+
390
+ super().__init__(
391
+ message=(
392
+ f"[green]{escape(command_description)}[/] did not run successfully.\n"
393
+ f"exit code: {exit_code}"
394
+ ),
395
+ context=output_prompt,
396
+ hint_stmt=None,
397
+ note_stmt=(
398
+ "This error originates from a subprocess, and is likely not a "
399
+ "problem with pip."
400
+ ),
401
+ )
402
+
403
+ self.command_description = command_description
404
+ self.exit_code = exit_code
405
+
406
+ def __str__(self) -> str:
407
+ return f"{self.command_description} exited with {self.exit_code}"
408
+
409
+
410
+ class MetadataGenerationFailed(InstallationSubprocessError, InstallationError):
411
+ reference = "metadata-generation-failed"
412
+
413
+ def __init__(
414
+ self,
415
+ *,
416
+ package_details: str,
417
+ ) -> None:
418
+ super(InstallationSubprocessError, self).__init__(
419
+ message="Encountered error while generating package metadata.",
420
+ context=escape(package_details),
421
+ hint_stmt="See above for details.",
422
+ note_stmt="This is an issue with the package mentioned above, not pip.",
423
+ )
424
+
425
+ def __str__(self) -> str:
426
+ return "metadata generation failed"
427
+
428
+
429
+ class HashErrors(InstallationError):
430
+ """Multiple HashError instances rolled into one for reporting"""
431
+
432
+ def __init__(self) -> None:
433
+ self.errors: List["HashError"] = []
434
+
435
+ def append(self, error: "HashError") -> None:
436
+ self.errors.append(error)
437
+
438
+ def __str__(self) -> str:
439
+ lines = []
440
+ self.errors.sort(key=lambda e: e.order)
441
+ for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__):
442
+ lines.append(cls.head)
443
+ lines.extend(e.body() for e in errors_of_cls)
444
+ if lines:
445
+ return "\n".join(lines)
446
+ return ""
447
+
448
+ def __bool__(self) -> bool:
449
+ return bool(self.errors)
450
+
451
+
452
+ class HashError(InstallationError):
453
+ """
454
+ A failure to verify a package against known-good hashes
455
+
456
+ :cvar order: An int sorting hash exception classes by difficulty of
457
+ recovery (lower being harder), so the user doesn't bother fretting
458
+ about unpinned packages when he has deeper issues, like VCS
459
+ dependencies, to deal with. Also keeps error reports in a
460
+ deterministic order.
461
+ :cvar head: A section heading for display above potentially many
462
+ exceptions of this kind
463
+ :ivar req: The InstallRequirement that triggered this error. This is
464
+ pasted on after the exception is instantiated, because it's not
465
+ typically available earlier.
466
+
467
+ """
468
+
469
+ req: Optional["InstallRequirement"] = None
470
+ head = ""
471
+ order: int = -1
472
+
473
+ def body(self) -> str:
474
+ """Return a summary of me for display under the heading.
475
+
476
+ This default implementation simply prints a description of the
477
+ triggering requirement.
478
+
479
+ :param req: The InstallRequirement that provoked this error, with
480
+ its link already populated by the resolver's _populate_link().
481
+
482
+ """
483
+ return f" {self._requirement_name()}"
484
+
485
+ def __str__(self) -> str:
486
+ return f"{self.head}\n{self.body()}"
487
+
488
+ def _requirement_name(self) -> str:
489
+ """Return a description of the requirement that triggered me.
490
+
491
+ This default implementation returns long description of the req, with
492
+ line numbers
493
+
494
+ """
495
+ return str(self.req) if self.req else "unknown package"
496
+
497
+
498
+ class VcsHashUnsupported(HashError):
499
+ """A hash was provided for a version-control-system-based requirement, but
500
+ we don't have a method for hashing those."""
501
+
502
+ order = 0
503
+ head = (
504
+ "Can't verify hashes for these requirements because we don't "
505
+ "have a way to hash version control repositories:"
506
+ )
507
+
508
+
509
+ class DirectoryUrlHashUnsupported(HashError):
510
+ """A hash was provided for a version-control-system-based requirement, but
511
+ we don't have a method for hashing those."""
512
+
513
+ order = 1
514
+ head = (
515
+ "Can't verify hashes for these file:// requirements because they "
516
+ "point to directories:"
517
+ )
518
+
519
+
520
+ class HashMissing(HashError):
521
+ """A hash was needed for a requirement but is absent."""
522
+
523
+ order = 2
524
+ head = (
525
+ "Hashes are required in --require-hashes mode, but they are "
526
+ "missing from some requirements. Here is a list of those "
527
+ "requirements along with the hashes their downloaded archives "
528
+ "actually had. Add lines like these to your requirements files to "
529
+ "prevent tampering. (If you did not enable --require-hashes "
530
+ "manually, note that it turns on automatically when any package "
531
+ "has a hash.)"
532
+ )
533
+
534
+ def __init__(self, gotten_hash: str) -> None:
535
+ """
536
+ :param gotten_hash: The hash of the (possibly malicious) archive we
537
+ just downloaded
538
+ """
539
+ self.gotten_hash = gotten_hash
540
+
541
+ def body(self) -> str:
542
+ # Dodge circular import.
543
+ from pip._internal.utils.hashes import FAVORITE_HASH
544
+
545
+ package = None
546
+ if self.req:
547
+ # In the case of URL-based requirements, display the original URL
548
+ # seen in the requirements file rather than the package name,
549
+ # so the output can be directly copied into the requirements file.
550
+ package = (
551
+ self.req.original_link
552
+ if self.req.original_link
553
+ # In case someone feeds something downright stupid
554
+ # to InstallRequirement's constructor.
555
+ else getattr(self.req, "req", None)
556
+ )
557
+ return " {} --hash={}:{}".format(
558
+ package or "unknown package", FAVORITE_HASH, self.gotten_hash
559
+ )
560
+
561
+
562
+ class HashUnpinned(HashError):
563
+ """A requirement had a hash specified but was not pinned to a specific
564
+ version."""
565
+
566
+ order = 3
567
+ head = (
568
+ "In --require-hashes mode, all requirements must have their "
569
+ "versions pinned with ==. These do not:"
570
+ )
571
+
572
+
573
+ class HashMismatch(HashError):
574
+ """
575
+ Distribution file hash values don't match.
576
+
577
+ :ivar package_name: The name of the package that triggered the hash
578
+ mismatch. Feel free to write to this after the exception is raise to
579
+ improve its error message.
580
+
581
+ """
582
+
583
+ order = 4
584
+ head = (
585
+ "THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS "
586
+ "FILE. If you have updated the package versions, please update "
587
+ "the hashes. Otherwise, examine the package contents carefully; "
588
+ "someone may have tampered with them."
589
+ )
590
+
591
+ def __init__(self, allowed: Dict[str, List[str]], gots: Dict[str, "_Hash"]) -> None:
592
+ """
593
+ :param allowed: A dict of algorithm names pointing to lists of allowed
594
+ hex digests
595
+ :param gots: A dict of algorithm names pointing to hashes we
596
+ actually got from the files under suspicion
597
+ """
598
+ self.allowed = allowed
599
+ self.gots = gots
600
+
601
+ def body(self) -> str:
602
+ return " {}:\n{}".format(self._requirement_name(), self._hash_comparison())
603
+
604
+ def _hash_comparison(self) -> str:
605
+ """
606
+ Return a comparison of actual and expected hash values.
607
+
608
+ Example::
609
+
610
+ Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde
611
+ or 123451234512345123451234512345123451234512345
612
+ Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef
613
+
614
+ """
615
+
616
+ def hash_then_or(hash_name: str) -> "chain[str]":
617
+ # For now, all the decent hashes have 6-char names, so we can get
618
+ # away with hard-coding space literals.
619
+ return chain([hash_name], repeat(" or"))
620
+
621
+ lines: List[str] = []
622
+ for hash_name, expecteds in self.allowed.items():
623
+ prefix = hash_then_or(hash_name)
624
+ lines.extend(
625
+ (" Expected {} {}".format(next(prefix), e)) for e in expecteds
626
+ )
627
+ lines.append(
628
+ " Got {}\n".format(self.gots[hash_name].hexdigest())
629
+ )
630
+ return "\n".join(lines)
631
+
632
+
633
+ class UnsupportedPythonVersion(InstallationError):
634
+ """Unsupported python version according to Requires-Python package
635
+ metadata."""
636
+
637
+
638
+ class ConfigurationFileCouldNotBeLoaded(ConfigurationError):
639
+ """When there are errors while loading a configuration file"""
640
+
641
+ def __init__(
642
+ self,
643
+ reason: str = "could not be loaded",
644
+ fname: Optional[str] = None,
645
+ error: Optional[configparser.Error] = None,
646
+ ) -> None:
647
+ super().__init__(error)
648
+ self.reason = reason
649
+ self.fname = fname
650
+ self.error = error
651
+
652
+ def __str__(self) -> str:
653
+ if self.fname is not None:
654
+ message_part = f" in {self.fname}."
655
+ else:
656
+ assert self.error is not None
657
+ message_part = f".\n{self.error}\n"
658
+ return f"Configuration file {self.reason}{message_part}"
env-llmeval/lib/python3.10/site-packages/pip/_internal/main.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import List, Optional
2
+
3
+
4
+ def main(args: Optional[List[str]] = None) -> int:
5
+ """This is preserved for old console scripts that may still be referencing
6
+ it.
7
+
8
+ For additional details, see https://github.com/pypa/pip/issues/7498.
9
+ """
10
+ from pip._internal.utils.entrypoints import _wrapper
11
+
12
+ return _wrapper(args)
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """A package that contains models that represent entities.
2
+ """
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (253 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc ADDED
Binary file (1.41 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc ADDED
Binary file (7.28 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc ADDED
Binary file (2.73 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc ADDED
Binary file (1.22 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc ADDED
Binary file (10.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc ADDED
Binary file (1.02 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc ADDED
Binary file (3.48 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc ADDED
Binary file (1.68 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc ADDED
Binary file (3.43 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (4.35 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/candidate.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.version import parse as parse_version
2
+
3
+ from pip._internal.models.link import Link
4
+ from pip._internal.utils.models import KeyBasedCompareMixin
5
+
6
+
7
+ class InstallationCandidate(KeyBasedCompareMixin):
8
+ """Represents a potential "candidate" for installation."""
9
+
10
+ __slots__ = ["name", "version", "link"]
11
+
12
+ def __init__(self, name: str, version: str, link: Link) -> None:
13
+ self.name = name
14
+ self.version = parse_version(version)
15
+ self.link = link
16
+
17
+ super().__init__(
18
+ key=(self.name, self.version, self.link),
19
+ defining_class=InstallationCandidate,
20
+ )
21
+
22
+ def __repr__(self) -> str:
23
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
24
+ self.name,
25
+ self.version,
26
+ self.link,
27
+ )
28
+
29
+ def __str__(self) -> str:
30
+ return "{!r} candidate (version {} at {})".format(
31
+ self.name,
32
+ self.version,
33
+ self.link,
34
+ )
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/direct_url.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ PEP 610 """
2
+ import json
3
+ import re
4
+ import urllib.parse
5
+ from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
6
+
7
+ __all__ = [
8
+ "DirectUrl",
9
+ "DirectUrlValidationError",
10
+ "DirInfo",
11
+ "ArchiveInfo",
12
+ "VcsInfo",
13
+ ]
14
+
15
+ T = TypeVar("T")
16
+
17
+ DIRECT_URL_METADATA_NAME = "direct_url.json"
18
+ ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
19
+
20
+
21
+ class DirectUrlValidationError(Exception):
22
+ pass
23
+
24
+
25
+ def _get(
26
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
27
+ ) -> Optional[T]:
28
+ """Get value from dictionary and verify expected type."""
29
+ if key not in d:
30
+ return default
31
+ value = d[key]
32
+ if not isinstance(value, expected_type):
33
+ raise DirectUrlValidationError(
34
+ "{!r} has unexpected type for {} (expected {})".format(
35
+ value, key, expected_type
36
+ )
37
+ )
38
+ return value
39
+
40
+
41
+ def _get_required(
42
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
43
+ ) -> T:
44
+ value = _get(d, expected_type, key, default)
45
+ if value is None:
46
+ raise DirectUrlValidationError(f"{key} must have a value")
47
+ return value
48
+
49
+
50
+ def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
51
+ infos = [info for info in infos if info is not None]
52
+ if not infos:
53
+ raise DirectUrlValidationError(
54
+ "missing one of archive_info, dir_info, vcs_info"
55
+ )
56
+ if len(infos) > 1:
57
+ raise DirectUrlValidationError(
58
+ "more than one of archive_info, dir_info, vcs_info"
59
+ )
60
+ assert infos[0] is not None
61
+ return infos[0]
62
+
63
+
64
+ def _filter_none(**kwargs: Any) -> Dict[str, Any]:
65
+ """Make dict excluding None values."""
66
+ return {k: v for k, v in kwargs.items() if v is not None}
67
+
68
+
69
+ class VcsInfo:
70
+ name = "vcs_info"
71
+
72
+ def __init__(
73
+ self,
74
+ vcs: str,
75
+ commit_id: str,
76
+ requested_revision: Optional[str] = None,
77
+ resolved_revision: Optional[str] = None,
78
+ resolved_revision_type: Optional[str] = None,
79
+ ) -> None:
80
+ self.vcs = vcs
81
+ self.requested_revision = requested_revision
82
+ self.commit_id = commit_id
83
+ self.resolved_revision = resolved_revision
84
+ self.resolved_revision_type = resolved_revision_type
85
+
86
+ @classmethod
87
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
88
+ if d is None:
89
+ return None
90
+ return cls(
91
+ vcs=_get_required(d, str, "vcs"),
92
+ commit_id=_get_required(d, str, "commit_id"),
93
+ requested_revision=_get(d, str, "requested_revision"),
94
+ resolved_revision=_get(d, str, "resolved_revision"),
95
+ resolved_revision_type=_get(d, str, "resolved_revision_type"),
96
+ )
97
+
98
+ def _to_dict(self) -> Dict[str, Any]:
99
+ return _filter_none(
100
+ vcs=self.vcs,
101
+ requested_revision=self.requested_revision,
102
+ commit_id=self.commit_id,
103
+ resolved_revision=self.resolved_revision,
104
+ resolved_revision_type=self.resolved_revision_type,
105
+ )
106
+
107
+
108
+ class ArchiveInfo:
109
+ name = "archive_info"
110
+
111
+ def __init__(
112
+ self,
113
+ hash: Optional[str] = None,
114
+ ) -> None:
115
+ self.hash = hash
116
+
117
+ @classmethod
118
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
119
+ if d is None:
120
+ return None
121
+ return cls(hash=_get(d, str, "hash"))
122
+
123
+ def _to_dict(self) -> Dict[str, Any]:
124
+ return _filter_none(hash=self.hash)
125
+
126
+
127
+ class DirInfo:
128
+ name = "dir_info"
129
+
130
+ def __init__(
131
+ self,
132
+ editable: bool = False,
133
+ ) -> None:
134
+ self.editable = editable
135
+
136
+ @classmethod
137
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
138
+ if d is None:
139
+ return None
140
+ return cls(editable=_get_required(d, bool, "editable", default=False))
141
+
142
+ def _to_dict(self) -> Dict[str, Any]:
143
+ return _filter_none(editable=self.editable or None)
144
+
145
+
146
+ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
147
+
148
+
149
+ class DirectUrl:
150
+ def __init__(
151
+ self,
152
+ url: str,
153
+ info: InfoType,
154
+ subdirectory: Optional[str] = None,
155
+ ) -> None:
156
+ self.url = url
157
+ self.info = info
158
+ self.subdirectory = subdirectory
159
+
160
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
161
+ if "@" not in netloc:
162
+ return netloc
163
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
164
+ if (
165
+ isinstance(self.info, VcsInfo)
166
+ and self.info.vcs == "git"
167
+ and user_pass == "git"
168
+ ):
169
+ return netloc
170
+ if ENV_VAR_RE.match(user_pass):
171
+ return netloc
172
+ return netloc_no_user_pass
173
+
174
+ @property
175
+ def redacted_url(self) -> str:
176
+ """url with user:password part removed unless it is formed with
177
+ environment variables as specified in PEP 610, or it is ``git``
178
+ in the case of a git URL.
179
+ """
180
+ purl = urllib.parse.urlsplit(self.url)
181
+ netloc = self._remove_auth_from_netloc(purl.netloc)
182
+ surl = urllib.parse.urlunsplit(
183
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
184
+ )
185
+ return surl
186
+
187
+ def validate(self) -> None:
188
+ self.from_dict(self.to_dict())
189
+
190
+ @classmethod
191
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
192
+ return DirectUrl(
193
+ url=_get_required(d, str, "url"),
194
+ subdirectory=_get(d, str, "subdirectory"),
195
+ info=_exactly_one_of(
196
+ [
197
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
198
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
199
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
200
+ ]
201
+ ),
202
+ )
203
+
204
+ def to_dict(self) -> Dict[str, Any]:
205
+ res = _filter_none(
206
+ url=self.redacted_url,
207
+ subdirectory=self.subdirectory,
208
+ )
209
+ res[self.info.name] = self.info._to_dict()
210
+ return res
211
+
212
+ @classmethod
213
+ def from_json(cls, s: str) -> "DirectUrl":
214
+ return cls.from_dict(json.loads(s))
215
+
216
+ def to_json(self) -> str:
217
+ return json.dumps(self.to_dict(), sort_keys=True)
218
+
219
+ def is_local_editable(self) -> bool:
220
+ return isinstance(self.info, DirInfo) and self.info.editable
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/format_control.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import FrozenSet, Optional, Set
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.exceptions import CommandError
6
+
7
+
8
+ class FormatControl:
9
+ """Helper for managing formats from which a package can be installed."""
10
+
11
+ __slots__ = ["no_binary", "only_binary"]
12
+
13
+ def __init__(
14
+ self,
15
+ no_binary: Optional[Set[str]] = None,
16
+ only_binary: Optional[Set[str]] = None,
17
+ ) -> None:
18
+ if no_binary is None:
19
+ no_binary = set()
20
+ if only_binary is None:
21
+ only_binary = set()
22
+
23
+ self.no_binary = no_binary
24
+ self.only_binary = only_binary
25
+
26
+ def __eq__(self, other: object) -> bool:
27
+ if not isinstance(other, self.__class__):
28
+ return NotImplemented
29
+
30
+ if self.__slots__ != other.__slots__:
31
+ return False
32
+
33
+ return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
34
+
35
+ def __repr__(self) -> str:
36
+ return "{}({}, {})".format(
37
+ self.__class__.__name__, self.no_binary, self.only_binary
38
+ )
39
+
40
+ @staticmethod
41
+ def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
42
+ if value.startswith("-"):
43
+ raise CommandError(
44
+ "--no-binary / --only-binary option requires 1 argument."
45
+ )
46
+ new = value.split(",")
47
+ while ":all:" in new:
48
+ other.clear()
49
+ target.clear()
50
+ target.add(":all:")
51
+ del new[: new.index(":all:") + 1]
52
+ # Without a none, we want to discard everything as :all: covers it
53
+ if ":none:" not in new:
54
+ return
55
+ for name in new:
56
+ if name == ":none:":
57
+ target.clear()
58
+ continue
59
+ name = canonicalize_name(name)
60
+ other.discard(name)
61
+ target.add(name)
62
+
63
+ def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
64
+ result = {"binary", "source"}
65
+ if canonical_name in self.only_binary:
66
+ result.discard("source")
67
+ elif canonical_name in self.no_binary:
68
+ result.discard("binary")
69
+ elif ":all:" in self.only_binary:
70
+ result.discard("source")
71
+ elif ":all:" in self.no_binary:
72
+ result.discard("binary")
73
+ return frozenset(result)
74
+
75
+ def disallow_binaries(self) -> None:
76
+ self.handle_mutual_excludes(
77
+ ":all:",
78
+ self.no_binary,
79
+ self.only_binary,
80
+ )
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/index.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import urllib.parse
2
+
3
+
4
+ class PackageIndex:
5
+ """Represents a Package Index and provides easier access to endpoints"""
6
+
7
+ __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
8
+
9
+ def __init__(self, url: str, file_storage_domain: str) -> None:
10
+ super().__init__()
11
+ self.url = url
12
+ self.netloc = urllib.parse.urlsplit(url).netloc
13
+ self.simple_url = self._url_for_path("simple")
14
+ self.pypi_url = self._url_for_path("pypi")
15
+
16
+ # This is part of a temporary hack used to block installs of PyPI
17
+ # packages which depend on external urls only necessary until PyPI can
18
+ # block such packages themselves
19
+ self.file_storage_domain = file_storage_domain
20
+
21
+ def _url_for_path(self, path: str) -> str:
22
+ return urllib.parse.urljoin(self.url, path)
23
+
24
+
25
+ PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
26
+ TestPyPI = PackageIndex(
27
+ "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
28
+ )
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/link.py ADDED
@@ -0,0 +1,288 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import os
4
+ import posixpath
5
+ import re
6
+ import urllib.parse
7
+ from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
8
+
9
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
10
+ from pip._internal.utils.hashes import Hashes
11
+ from pip._internal.utils.misc import (
12
+ redact_auth_from_url,
13
+ split_auth_from_netloc,
14
+ splitext,
15
+ )
16
+ from pip._internal.utils.models import KeyBasedCompareMixin
17
+ from pip._internal.utils.urls import path_to_url, url_to_path
18
+
19
+ if TYPE_CHECKING:
20
+ from pip._internal.index.collector import HTMLPage
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ _SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
26
+
27
+
28
+ class Link(KeyBasedCompareMixin):
29
+ """Represents a parsed link from a Package Index's simple URL"""
30
+
31
+ __slots__ = [
32
+ "_parsed_url",
33
+ "_url",
34
+ "comes_from",
35
+ "requires_python",
36
+ "yanked_reason",
37
+ "cache_link_parsing",
38
+ ]
39
+
40
+ def __init__(
41
+ self,
42
+ url: str,
43
+ comes_from: Optional[Union[str, "HTMLPage"]] = None,
44
+ requires_python: Optional[str] = None,
45
+ yanked_reason: Optional[str] = None,
46
+ cache_link_parsing: bool = True,
47
+ ) -> None:
48
+ """
49
+ :param url: url of the resource pointed to (href of the link)
50
+ :param comes_from: instance of HTMLPage where the link was found,
51
+ or string.
52
+ :param requires_python: String containing the `Requires-Python`
53
+ metadata field, specified in PEP 345. This may be specified by
54
+ a data-requires-python attribute in the HTML link tag, as
55
+ described in PEP 503.
56
+ :param yanked_reason: the reason the file has been yanked, if the
57
+ file has been yanked, or None if the file hasn't been yanked.
58
+ This is the value of the "data-yanked" attribute, if present, in
59
+ a simple repository HTML link. If the file has been yanked but
60
+ no reason was provided, this should be the empty string. See
61
+ PEP 592 for more information and the specification.
62
+ :param cache_link_parsing: A flag that is used elsewhere to determine
63
+ whether resources retrieved from this link
64
+ should be cached. PyPI index urls should
65
+ generally have this set to False, for
66
+ example.
67
+ """
68
+
69
+ # url can be a UNC windows share
70
+ if url.startswith("\\\\"):
71
+ url = path_to_url(url)
72
+
73
+ self._parsed_url = urllib.parse.urlsplit(url)
74
+ # Store the url as a private attribute to prevent accidentally
75
+ # trying to set a new value.
76
+ self._url = url
77
+
78
+ self.comes_from = comes_from
79
+ self.requires_python = requires_python if requires_python else None
80
+ self.yanked_reason = yanked_reason
81
+
82
+ super().__init__(key=url, defining_class=Link)
83
+
84
+ self.cache_link_parsing = cache_link_parsing
85
+
86
+ def __str__(self) -> str:
87
+ if self.requires_python:
88
+ rp = f" (requires-python:{self.requires_python})"
89
+ else:
90
+ rp = ""
91
+ if self.comes_from:
92
+ return "{} (from {}){}".format(
93
+ redact_auth_from_url(self._url), self.comes_from, rp
94
+ )
95
+ else:
96
+ return redact_auth_from_url(str(self._url))
97
+
98
+ def __repr__(self) -> str:
99
+ return f"<Link {self}>"
100
+
101
+ @property
102
+ def url(self) -> str:
103
+ return self._url
104
+
105
+ @property
106
+ def filename(self) -> str:
107
+ path = self.path.rstrip("/")
108
+ name = posixpath.basename(path)
109
+ if not name:
110
+ # Make sure we don't leak auth information if the netloc
111
+ # includes a username and password.
112
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
113
+ return netloc
114
+
115
+ name = urllib.parse.unquote(name)
116
+ assert name, f"URL {self._url!r} produced no filename"
117
+ return name
118
+
119
+ @property
120
+ def file_path(self) -> str:
121
+ return url_to_path(self.url)
122
+
123
+ @property
124
+ def scheme(self) -> str:
125
+ return self._parsed_url.scheme
126
+
127
+ @property
128
+ def netloc(self) -> str:
129
+ """
130
+ This can contain auth information.
131
+ """
132
+ return self._parsed_url.netloc
133
+
134
+ @property
135
+ def path(self) -> str:
136
+ return urllib.parse.unquote(self._parsed_url.path)
137
+
138
+ def splitext(self) -> Tuple[str, str]:
139
+ return splitext(posixpath.basename(self.path.rstrip("/")))
140
+
141
+ @property
142
+ def ext(self) -> str:
143
+ return self.splitext()[1]
144
+
145
+ @property
146
+ def url_without_fragment(self) -> str:
147
+ scheme, netloc, path, query, fragment = self._parsed_url
148
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
149
+
150
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
151
+
152
+ @property
153
+ def egg_fragment(self) -> Optional[str]:
154
+ match = self._egg_fragment_re.search(self._url)
155
+ if not match:
156
+ return None
157
+ return match.group(1)
158
+
159
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
160
+
161
+ @property
162
+ def subdirectory_fragment(self) -> Optional[str]:
163
+ match = self._subdirectory_fragment_re.search(self._url)
164
+ if not match:
165
+ return None
166
+ return match.group(1)
167
+
168
+ _hash_re = re.compile(
169
+ r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES))
170
+ )
171
+
172
+ @property
173
+ def hash(self) -> Optional[str]:
174
+ match = self._hash_re.search(self._url)
175
+ if match:
176
+ return match.group(2)
177
+ return None
178
+
179
+ @property
180
+ def hash_name(self) -> Optional[str]:
181
+ match = self._hash_re.search(self._url)
182
+ if match:
183
+ return match.group(1)
184
+ return None
185
+
186
+ @property
187
+ def show_url(self) -> str:
188
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
189
+
190
+ @property
191
+ def is_file(self) -> bool:
192
+ return self.scheme == "file"
193
+
194
+ def is_existing_dir(self) -> bool:
195
+ return self.is_file and os.path.isdir(self.file_path)
196
+
197
+ @property
198
+ def is_wheel(self) -> bool:
199
+ return self.ext == WHEEL_EXTENSION
200
+
201
+ @property
202
+ def is_vcs(self) -> bool:
203
+ from pip._internal.vcs import vcs
204
+
205
+ return self.scheme in vcs.all_schemes
206
+
207
+ @property
208
+ def is_yanked(self) -> bool:
209
+ return self.yanked_reason is not None
210
+
211
+ @property
212
+ def has_hash(self) -> bool:
213
+ return self.hash_name is not None
214
+
215
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
216
+ """
217
+ Return True if the link has a hash and it is allowed.
218
+ """
219
+ if hashes is None or not self.has_hash:
220
+ return False
221
+ # Assert non-None so mypy knows self.hash_name and self.hash are str.
222
+ assert self.hash_name is not None
223
+ assert self.hash is not None
224
+
225
+ return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
226
+
227
+
228
+ class _CleanResult(NamedTuple):
229
+ """Convert link for equivalency check.
230
+
231
+ This is used in the resolver to check whether two URL-specified requirements
232
+ likely point to the same distribution and can be considered equivalent. This
233
+ equivalency logic avoids comparing URLs literally, which can be too strict
234
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
235
+
236
+ Currently this does three things:
237
+
238
+ 1. Drop the basic auth part. This is technically wrong since a server can
239
+ serve different content based on auth, but if it does that, it is even
240
+ impossible to guarantee two URLs without auth are equivalent, since
241
+ the user can input different auth information when prompted. So the
242
+ practical solution is to assume the auth doesn't affect the response.
243
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
244
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
245
+ still considered different.
246
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
247
+ hash values, since it should have no impact the downloaded content. Note
248
+ that this drops the "egg=" part historically used to denote the requested
249
+ project (and extras), which is wrong in the strictest sense, but too many
250
+ people are supplying it inconsistently to cause superfluous resolution
251
+ conflicts, so we choose to also ignore them.
252
+ """
253
+
254
+ parsed: urllib.parse.SplitResult
255
+ query: Dict[str, List[str]]
256
+ subdirectory: str
257
+ hashes: Dict[str, str]
258
+
259
+
260
+ def _clean_link(link: Link) -> _CleanResult:
261
+ parsed = link._parsed_url
262
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
263
+ # According to RFC 8089, an empty host in file: means localhost.
264
+ if parsed.scheme == "file" and not netloc:
265
+ netloc = "localhost"
266
+ fragment = urllib.parse.parse_qs(parsed.fragment)
267
+ if "egg" in fragment:
268
+ logger.debug("Ignoring egg= fragment in %s", link)
269
+ try:
270
+ # If there are multiple subdirectory values, use the first one.
271
+ # This matches the behavior of Link.subdirectory_fragment.
272
+ subdirectory = fragment["subdirectory"][0]
273
+ except (IndexError, KeyError):
274
+ subdirectory = ""
275
+ # If there are multiple hash values under the same algorithm, use the
276
+ # first one. This matches the behavior of Link.hash_value.
277
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
278
+ return _CleanResult(
279
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
280
+ query=urllib.parse.parse_qs(parsed.query),
281
+ subdirectory=subdirectory,
282
+ hashes=hashes,
283
+ )
284
+
285
+
286
+ @functools.lru_cache(maxsize=None)
287
+ def links_equivalent(link1: Link, link2: Link) -> bool:
288
+ return _clean_link(link1) == _clean_link(link2)
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/scheme.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For types associated with installation schemes.
3
+
4
+ For a general overview of available schemes and their context, see
5
+ https://docs.python.org/3/install/index.html#alternate-installation.
6
+ """
7
+
8
+
9
+ SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
10
+
11
+
12
+ class Scheme:
13
+ """A Scheme holds paths which are used as the base directories for
14
+ artifacts associated with a Python package.
15
+ """
16
+
17
+ __slots__ = SCHEME_KEYS
18
+
19
+ def __init__(
20
+ self,
21
+ platlib: str,
22
+ purelib: str,
23
+ headers: str,
24
+ scripts: str,
25
+ data: str,
26
+ ) -> None:
27
+ self.platlib = platlib
28
+ self.purelib = purelib
29
+ self.headers = headers
30
+ self.scripts = scripts
31
+ self.data = data
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/search_scope.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import logging
3
+ import os
4
+ import posixpath
5
+ import urllib.parse
6
+ from typing import List
7
+
8
+ from pip._vendor.packaging.utils import canonicalize_name
9
+
10
+ from pip._internal.models.index import PyPI
11
+ from pip._internal.utils.compat import has_tls
12
+ from pip._internal.utils.misc import normalize_path, redact_auth_from_url
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SearchScope:
18
+
19
+ """
20
+ Encapsulates the locations that pip is configured to search.
21
+ """
22
+
23
+ __slots__ = ["find_links", "index_urls"]
24
+
25
+ @classmethod
26
+ def create(
27
+ cls,
28
+ find_links: List[str],
29
+ index_urls: List[str],
30
+ ) -> "SearchScope":
31
+ """
32
+ Create a SearchScope object after normalizing the `find_links`.
33
+ """
34
+ # Build find_links. If an argument starts with ~, it may be
35
+ # a local file relative to a home directory. So try normalizing
36
+ # it and if it exists, use the normalized version.
37
+ # This is deliberately conservative - it might be fine just to
38
+ # blindly normalize anything starting with a ~...
39
+ built_find_links: List[str] = []
40
+ for link in find_links:
41
+ if link.startswith("~"):
42
+ new_link = normalize_path(link)
43
+ if os.path.exists(new_link):
44
+ link = new_link
45
+ built_find_links.append(link)
46
+
47
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
48
+ # relies on TLS.
49
+ if not has_tls():
50
+ for link in itertools.chain(index_urls, built_find_links):
51
+ parsed = urllib.parse.urlparse(link)
52
+ if parsed.scheme == "https":
53
+ logger.warning(
54
+ "pip is configured with locations that require "
55
+ "TLS/SSL, however the ssl module in Python is not "
56
+ "available."
57
+ )
58
+ break
59
+
60
+ return cls(
61
+ find_links=built_find_links,
62
+ index_urls=index_urls,
63
+ )
64
+
65
+ def __init__(
66
+ self,
67
+ find_links: List[str],
68
+ index_urls: List[str],
69
+ ) -> None:
70
+ self.find_links = find_links
71
+ self.index_urls = index_urls
72
+
73
+ def get_formatted_locations(self) -> str:
74
+ lines = []
75
+ redacted_index_urls = []
76
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
77
+ for url in self.index_urls:
78
+
79
+ redacted_index_url = redact_auth_from_url(url)
80
+
81
+ # Parse the URL
82
+ purl = urllib.parse.urlsplit(redacted_index_url)
83
+
84
+ # URL is generally invalid if scheme and netloc is missing
85
+ # there are issues with Python and URL parsing, so this test
86
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
87
+ # always parse invalid URLs correctly - it should raise
88
+ # exceptions for malformed URLs
89
+ if not purl.scheme and not purl.netloc:
90
+ logger.warning(
91
+ 'The index url "%s" seems invalid, please provide a scheme.',
92
+ redacted_index_url,
93
+ )
94
+
95
+ redacted_index_urls.append(redacted_index_url)
96
+
97
+ lines.append(
98
+ "Looking in indexes: {}".format(", ".join(redacted_index_urls))
99
+ )
100
+
101
+ if self.find_links:
102
+ lines.append(
103
+ "Looking in links: {}".format(
104
+ ", ".join(redact_auth_from_url(url) for url in self.find_links)
105
+ )
106
+ )
107
+ return "\n".join(lines)
108
+
109
+ def get_index_urls_locations(self, project_name: str) -> List[str]:
110
+ """Returns the locations found via self.index_urls
111
+
112
+ Checks the url_name on the main (first in the list) index and
113
+ use this url_name to produce all locations
114
+ """
115
+
116
+ def mkurl_pypi_url(url: str) -> str:
117
+ loc = posixpath.join(
118
+ url, urllib.parse.quote(canonicalize_name(project_name))
119
+ )
120
+ # For maximum compatibility with easy_install, ensure the path
121
+ # ends in a trailing slash. Although this isn't in the spec
122
+ # (and PyPI can handle it without the slash) some other index
123
+ # implementations might break if they relied on easy_install's
124
+ # behavior.
125
+ if not loc.endswith("/"):
126
+ loc = loc + "/"
127
+ return loc
128
+
129
+ return [mkurl_pypi_url(url) for url in self.index_urls]
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.models.format_control import FormatControl
4
+
5
+
6
+ class SelectionPreferences:
7
+ """
8
+ Encapsulates the candidate selection preferences for downloading
9
+ and installing files.
10
+ """
11
+
12
+ __slots__ = [
13
+ "allow_yanked",
14
+ "allow_all_prereleases",
15
+ "format_control",
16
+ "prefer_binary",
17
+ "ignore_requires_python",
18
+ ]
19
+
20
+ # Don't include an allow_yanked default value to make sure each call
21
+ # site considers whether yanked releases are allowed. This also causes
22
+ # that decision to be made explicit in the calling code, which helps
23
+ # people when reading the code.
24
+ def __init__(
25
+ self,
26
+ allow_yanked: bool,
27
+ allow_all_prereleases: bool = False,
28
+ format_control: Optional[FormatControl] = None,
29
+ prefer_binary: bool = False,
30
+ ignore_requires_python: Optional[bool] = None,
31
+ ) -> None:
32
+ """Create a SelectionPreferences object.
33
+
34
+ :param allow_yanked: Whether files marked as yanked (in the sense
35
+ of PEP 592) are permitted to be candidates for install.
36
+ :param format_control: A FormatControl object or None. Used to control
37
+ the selection of source packages / binary packages when consulting
38
+ the index and links.
39
+ :param prefer_binary: Whether to prefer an old, but valid, binary
40
+ dist over a new source dist.
41
+ :param ignore_requires_python: Whether to ignore incompatible
42
+ "Requires-Python" values in links. Defaults to False.
43
+ """
44
+ if ignore_requires_python is None:
45
+ ignore_requires_python = False
46
+
47
+ self.allow_yanked = allow_yanked
48
+ self.allow_all_prereleases = allow_all_prereleases
49
+ self.format_control = format_control
50
+ self.prefer_binary = prefer_binary
51
+ self.ignore_requires_python = ignore_requires_python
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/target_python.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from typing import List, Optional, Tuple
3
+
4
+ from pip._vendor.packaging.tags import Tag
5
+
6
+ from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
7
+ from pip._internal.utils.misc import normalize_version_info
8
+
9
+
10
+ class TargetPython:
11
+
12
+ """
13
+ Encapsulates the properties of a Python interpreter one is targeting
14
+ for a package install, download, etc.
15
+ """
16
+
17
+ __slots__ = [
18
+ "_given_py_version_info",
19
+ "abis",
20
+ "implementation",
21
+ "platforms",
22
+ "py_version",
23
+ "py_version_info",
24
+ "_valid_tags",
25
+ ]
26
+
27
+ def __init__(
28
+ self,
29
+ platforms: Optional[List[str]] = None,
30
+ py_version_info: Optional[Tuple[int, ...]] = None,
31
+ abis: Optional[List[str]] = None,
32
+ implementation: Optional[str] = None,
33
+ ) -> None:
34
+ """
35
+ :param platforms: A list of strings or None. If None, searches for
36
+ packages that are supported by the current system. Otherwise, will
37
+ find packages that can be built on the platforms passed in. These
38
+ packages will only be downloaded for distribution: they will
39
+ not be built locally.
40
+ :param py_version_info: An optional tuple of ints representing the
41
+ Python version information to use (e.g. `sys.version_info[:3]`).
42
+ This can have length 1, 2, or 3 when provided.
43
+ :param abis: A list of strings or None. This is passed to
44
+ compatibility_tags.py's get_supported() function as is.
45
+ :param implementation: A string or None. This is passed to
46
+ compatibility_tags.py's get_supported() function as is.
47
+ """
48
+ # Store the given py_version_info for when we call get_supported().
49
+ self._given_py_version_info = py_version_info
50
+
51
+ if py_version_info is None:
52
+ py_version_info = sys.version_info[:3]
53
+ else:
54
+ py_version_info = normalize_version_info(py_version_info)
55
+
56
+ py_version = ".".join(map(str, py_version_info[:2]))
57
+
58
+ self.abis = abis
59
+ self.implementation = implementation
60
+ self.platforms = platforms
61
+ self.py_version = py_version
62
+ self.py_version_info = py_version_info
63
+
64
+ # This is used to cache the return value of get_tags().
65
+ self._valid_tags: Optional[List[Tag]] = None
66
+
67
+ def format_given(self) -> str:
68
+ """
69
+ Format the given, non-None attributes for display.
70
+ """
71
+ display_version = None
72
+ if self._given_py_version_info is not None:
73
+ display_version = ".".join(
74
+ str(part) for part in self._given_py_version_info
75
+ )
76
+
77
+ key_values = [
78
+ ("platforms", self.platforms),
79
+ ("version_info", display_version),
80
+ ("abis", self.abis),
81
+ ("implementation", self.implementation),
82
+ ]
83
+ return " ".join(
84
+ f"{key}={value!r}" for key, value in key_values if value is not None
85
+ )
86
+
87
+ def get_tags(self) -> List[Tag]:
88
+ """
89
+ Return the supported PEP 425 tags to check wheel candidates against.
90
+
91
+ The tags are returned in order of preference (most preferred first).
92
+ """
93
+ if self._valid_tags is None:
94
+ # Pass versions=None if no py_version_info was given since
95
+ # versions=None uses special default logic.
96
+ py_version_info = self._given_py_version_info
97
+ if py_version_info is None:
98
+ version = None
99
+ else:
100
+ version = version_info_to_nodot(py_version_info)
101
+
102
+ tags = get_supported(
103
+ version=version,
104
+ platforms=self.platforms,
105
+ abis=self.abis,
106
+ impl=self.implementation,
107
+ )
108
+ self._valid_tags = tags
109
+
110
+ return self._valid_tags
env-llmeval/lib/python3.10/site-packages/pip/_internal/models/wheel.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Represents a wheel file and provides access to the various parts of the
2
+ name that have meaning.
3
+ """
4
+ import re
5
+ from typing import Dict, Iterable, List
6
+
7
+ from pip._vendor.packaging.tags import Tag
8
+
9
+ from pip._internal.exceptions import InvalidWheelFilename
10
+
11
+
12
+ class Wheel:
13
+ """A wheel file"""
14
+
15
+ wheel_file_re = re.compile(
16
+ r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
17
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
18
+ \.whl|\.dist-info)$""",
19
+ re.VERBOSE,
20
+ )
21
+
22
+ def __init__(self, filename: str) -> None:
23
+ """
24
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
25
+ """
26
+ wheel_info = self.wheel_file_re.match(filename)
27
+ if not wheel_info:
28
+ raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
29
+ self.filename = filename
30
+ self.name = wheel_info.group("name").replace("_", "-")
31
+ # we'll assume "_" means "-" due to wheel naming scheme
32
+ # (https://github.com/pypa/pip/issues/1150)
33
+ self.version = wheel_info.group("ver").replace("_", "-")
34
+ self.build_tag = wheel_info.group("build")
35
+ self.pyversions = wheel_info.group("pyver").split(".")
36
+ self.abis = wheel_info.group("abi").split(".")
37
+ self.plats = wheel_info.group("plat").split(".")
38
+
39
+ # All the tag combinations from this file
40
+ self.file_tags = {
41
+ Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
42
+ }
43
+
44
+ def get_formatted_file_tags(self) -> List[str]:
45
+ """Return the wheel's tags as a sorted list of strings."""
46
+ return sorted(str(tag) for tag in self.file_tags)
47
+
48
+ def support_index_min(self, tags: List[Tag]) -> int:
49
+ """Return the lowest index that one of the wheel's file_tag combinations
50
+ achieves in the given list of supported tags.
51
+
52
+ For example, if there are 8 supported tags and one of the file tags
53
+ is first in the list, then return 0.
54
+
55
+ :param tags: the PEP 425 tags to check the wheel against, in order
56
+ with most preferred first.
57
+
58
+ :raises ValueError: If none of the wheel's file tags match one of
59
+ the supported tags.
60
+ """
61
+ return min(tags.index(tag) for tag in self.file_tags if tag in tags)
62
+
63
+ def find_most_preferred_tag(
64
+ self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
65
+ ) -> int:
66
+ """Return the priority of the most preferred tag that one of the wheel's file
67
+ tag combinations achieves in the given list of supported tags using the given
68
+ tag_to_priority mapping, where lower priorities are more-preferred.
69
+
70
+ This is used in place of support_index_min in some cases in order to avoid
71
+ an expensive linear scan of a large list of tags.
72
+
73
+ :param tags: the PEP 425 tags to check the wheel against.
74
+ :param tag_to_priority: a mapping from tag to priority of that tag, where
75
+ lower is more preferred.
76
+
77
+ :raises ValueError: If none of the wheel's file tags match one of
78
+ the supported tags.
79
+ """
80
+ return min(
81
+ tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
82
+ )
83
+
84
+ def supported(self, tags: Iterable[Tag]) -> bool:
85
+ """Return whether the wheel is compatible with one of the given tags.
86
+
87
+ :param tags: the PEP 425 tags to check the wheel against.
88
+ """
89
+ return not self.file_tags.isdisjoint(tags)
env-llmeval/lib/python3.10/site-packages/pip/_internal/pyproject.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from collections import namedtuple
3
+ from typing import Any, List, Optional
4
+
5
+ from pip._vendor import tomli
6
+ from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
7
+
8
+ from pip._internal.exceptions import (
9
+ InstallationError,
10
+ InvalidPyProjectBuildRequires,
11
+ MissingPyProjectBuildRequires,
12
+ )
13
+
14
+
15
+ def _is_list_of_str(obj: Any) -> bool:
16
+ return isinstance(obj, list) and all(isinstance(item, str) for item in obj)
17
+
18
+
19
+ def make_pyproject_path(unpacked_source_directory: str) -> str:
20
+ return os.path.join(unpacked_source_directory, "pyproject.toml")
21
+
22
+
23
+ BuildSystemDetails = namedtuple(
24
+ "BuildSystemDetails", ["requires", "backend", "check", "backend_path"]
25
+ )
26
+
27
+
28
+ def load_pyproject_toml(
29
+ use_pep517: Optional[bool], pyproject_toml: str, setup_py: str, req_name: str
30
+ ) -> Optional[BuildSystemDetails]:
31
+ """Load the pyproject.toml file.
32
+
33
+ Parameters:
34
+ use_pep517 - Has the user requested PEP 517 processing? None
35
+ means the user hasn't explicitly specified.
36
+ pyproject_toml - Location of the project's pyproject.toml file
37
+ setup_py - Location of the project's setup.py file
38
+ req_name - The name of the requirement we're processing (for
39
+ error reporting)
40
+
41
+ Returns:
42
+ None if we should use the legacy code path, otherwise a tuple
43
+ (
44
+ requirements from pyproject.toml,
45
+ name of PEP 517 backend,
46
+ requirements we should check are installed after setting
47
+ up the build environment
48
+ directory paths to import the backend from (backend-path),
49
+ relative to the project root.
50
+ )
51
+ """
52
+ has_pyproject = os.path.isfile(pyproject_toml)
53
+ has_setup = os.path.isfile(setup_py)
54
+
55
+ if not has_pyproject and not has_setup:
56
+ raise InstallationError(
57
+ f"{req_name} does not appear to be a Python project: "
58
+ f"neither 'setup.py' nor 'pyproject.toml' found."
59
+ )
60
+
61
+ if has_pyproject:
62
+ with open(pyproject_toml, encoding="utf-8") as f:
63
+ pp_toml = tomli.loads(f.read())
64
+ build_system = pp_toml.get("build-system")
65
+ else:
66
+ build_system = None
67
+
68
+ # The following cases must use PEP 517
69
+ # We check for use_pep517 being non-None and falsey because that means
70
+ # the user explicitly requested --no-use-pep517. The value 0 as
71
+ # opposed to False can occur when the value is provided via an
72
+ # environment variable or config file option (due to the quirk of
73
+ # strtobool() returning an integer in pip's configuration code).
74
+ if has_pyproject and not has_setup:
75
+ if use_pep517 is not None and not use_pep517:
76
+ raise InstallationError(
77
+ "Disabling PEP 517 processing is invalid: "
78
+ "project does not have a setup.py"
79
+ )
80
+ use_pep517 = True
81
+ elif build_system and "build-backend" in build_system:
82
+ if use_pep517 is not None and not use_pep517:
83
+ raise InstallationError(
84
+ "Disabling PEP 517 processing is invalid: "
85
+ "project specifies a build backend of {} "
86
+ "in pyproject.toml".format(build_system["build-backend"])
87
+ )
88
+ use_pep517 = True
89
+
90
+ # If we haven't worked out whether to use PEP 517 yet,
91
+ # and the user hasn't explicitly stated a preference,
92
+ # we do so if the project has a pyproject.toml file.
93
+ elif use_pep517 is None:
94
+ use_pep517 = has_pyproject
95
+
96
+ # At this point, we know whether we're going to use PEP 517.
97
+ assert use_pep517 is not None
98
+
99
+ # If we're using the legacy code path, there is nothing further
100
+ # for us to do here.
101
+ if not use_pep517:
102
+ return None
103
+
104
+ if build_system is None:
105
+ # Either the user has a pyproject.toml with no build-system
106
+ # section, or the user has no pyproject.toml, but has opted in
107
+ # explicitly via --use-pep517.
108
+ # In the absence of any explicit backend specification, we
109
+ # assume the setuptools backend that most closely emulates the
110
+ # traditional direct setup.py execution, and require wheel and
111
+ # a version of setuptools that supports that backend.
112
+
113
+ build_system = {
114
+ "requires": ["setuptools>=40.8.0", "wheel"],
115
+ "build-backend": "setuptools.build_meta:__legacy__",
116
+ }
117
+
118
+ # If we're using PEP 517, we have build system information (either
119
+ # from pyproject.toml, or defaulted by the code above).
120
+ # Note that at this point, we do not know if the user has actually
121
+ # specified a backend, though.
122
+ assert build_system is not None
123
+
124
+ # Ensure that the build-system section in pyproject.toml conforms
125
+ # to PEP 518.
126
+
127
+ # Specifying the build-system table but not the requires key is invalid
128
+ if "requires" not in build_system:
129
+ raise MissingPyProjectBuildRequires(package=req_name)
130
+
131
+ # Error out if requires is not a list of strings
132
+ requires = build_system["requires"]
133
+ if not _is_list_of_str(requires):
134
+ raise InvalidPyProjectBuildRequires(
135
+ package=req_name,
136
+ reason="It is not a list of strings.",
137
+ )
138
+
139
+ # Each requirement must be valid as per PEP 508
140
+ for requirement in requires:
141
+ try:
142
+ Requirement(requirement)
143
+ except InvalidRequirement as error:
144
+ raise InvalidPyProjectBuildRequires(
145
+ package=req_name,
146
+ reason=f"It contains an invalid requirement: {requirement!r}",
147
+ ) from error
148
+
149
+ backend = build_system.get("build-backend")
150
+ backend_path = build_system.get("backend-path", [])
151
+ check: List[str] = []
152
+ if backend is None:
153
+ # If the user didn't specify a backend, we assume they want to use
154
+ # the setuptools backend. But we can't be sure they have included
155
+ # a version of setuptools which supplies the backend, or wheel
156
+ # (which is needed by the backend) in their requirements. So we
157
+ # make a note to check that those requirements are present once
158
+ # we have set up the environment.
159
+ # This is quite a lot of work to check for a very specific case. But
160
+ # the problem is, that case is potentially quite common - projects that
161
+ # adopted PEP 518 early for the ability to specify requirements to
162
+ # execute setup.py, but never considered needing to mention the build
163
+ # tools themselves. The original PEP 518 code had a similar check (but
164
+ # implemented in a different way).
165
+ backend = "setuptools.build_meta:__legacy__"
166
+ check = ["setuptools>=40.8.0", "wheel"]
167
+
168
+ return BuildSystemDetails(requires, backend, check, backend_path)
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/constructors.py ADDED
@@ -0,0 +1,490 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Backing implementation for InstallRequirement's various constructors
2
+
3
+ The idea here is that these formed a major chunk of InstallRequirement's size
4
+ so, moving them and support code dedicated to them outside of that class
5
+ helps creates for better understandability for the rest of the code.
6
+
7
+ These are meant to be used elsewhere within pip to create instances of
8
+ InstallRequirement.
9
+ """
10
+
11
+ import logging
12
+ import os
13
+ import re
14
+ from typing import Any, Dict, Optional, Set, Tuple, Union
15
+
16
+ from pip._vendor.packaging.markers import Marker
17
+ from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
18
+ from pip._vendor.packaging.specifiers import Specifier
19
+
20
+ from pip._internal.exceptions import InstallationError
21
+ from pip._internal.models.index import PyPI, TestPyPI
22
+ from pip._internal.models.link import Link
23
+ from pip._internal.models.wheel import Wheel
24
+ from pip._internal.req.req_file import ParsedRequirement
25
+ from pip._internal.req.req_install import InstallRequirement
26
+ from pip._internal.utils.filetypes import is_archive_file
27
+ from pip._internal.utils.misc import is_installable_dir
28
+ from pip._internal.utils.packaging import get_requirement
29
+ from pip._internal.utils.urls import path_to_url
30
+ from pip._internal.vcs import is_url, vcs
31
+
32
+ __all__ = [
33
+ "install_req_from_editable",
34
+ "install_req_from_line",
35
+ "parse_editable",
36
+ ]
37
+
38
+ logger = logging.getLogger(__name__)
39
+ operators = Specifier._operators.keys()
40
+
41
+
42
+ def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
43
+ m = re.match(r"^(.+)(\[[^\]]+\])$", path)
44
+ extras = None
45
+ if m:
46
+ path_no_extras = m.group(1)
47
+ extras = m.group(2)
48
+ else:
49
+ path_no_extras = path
50
+
51
+ return path_no_extras, extras
52
+
53
+
54
+ def convert_extras(extras: Optional[str]) -> Set[str]:
55
+ if not extras:
56
+ return set()
57
+ return get_requirement("placeholder" + extras.lower()).extras
58
+
59
+
60
+ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
61
+ """Parses an editable requirement into:
62
+ - a requirement name
63
+ - an URL
64
+ - extras
65
+ - editable options
66
+ Accepted requirements:
67
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
68
+ .[some_extra]
69
+ """
70
+
71
+ url = editable_req
72
+
73
+ # If a file path is specified with extras, strip off the extras.
74
+ url_no_extras, extras = _strip_extras(url)
75
+
76
+ if os.path.isdir(url_no_extras):
77
+ # Treating it as code that has already been checked out
78
+ url_no_extras = path_to_url(url_no_extras)
79
+
80
+ if url_no_extras.lower().startswith("file:"):
81
+ package_name = Link(url_no_extras).egg_fragment
82
+ if extras:
83
+ return (
84
+ package_name,
85
+ url_no_extras,
86
+ get_requirement("placeholder" + extras.lower()).extras,
87
+ )
88
+ else:
89
+ return package_name, url_no_extras, set()
90
+
91
+ for version_control in vcs:
92
+ if url.lower().startswith(f"{version_control}:"):
93
+ url = f"{version_control}+{url}"
94
+ break
95
+
96
+ link = Link(url)
97
+
98
+ if not link.is_vcs:
99
+ backends = ", ".join(vcs.all_schemes)
100
+ raise InstallationError(
101
+ f"{editable_req} is not a valid editable requirement. "
102
+ f"It should either be a path to a local project or a VCS URL "
103
+ f"(beginning with {backends})."
104
+ )
105
+
106
+ package_name = link.egg_fragment
107
+ if not package_name:
108
+ raise InstallationError(
109
+ "Could not detect requirement name for '{}', please specify one "
110
+ "with #egg=your_package_name".format(editable_req)
111
+ )
112
+ return package_name, url, set()
113
+
114
+
115
+ def check_first_requirement_in_file(filename: str) -> None:
116
+ """Check if file is parsable as a requirements file.
117
+
118
+ This is heavily based on ``pkg_resources.parse_requirements``, but
119
+ simplified to just check the first meaningful line.
120
+
121
+ :raises InvalidRequirement: If the first meaningful line cannot be parsed
122
+ as an requirement.
123
+ """
124
+ with open(filename, encoding="utf-8", errors="ignore") as f:
125
+ # Create a steppable iterator, so we can handle \-continuations.
126
+ lines = (
127
+ line
128
+ for line in (line.strip() for line in f)
129
+ if line and not line.startswith("#") # Skip blank lines/comments.
130
+ )
131
+
132
+ for line in lines:
133
+ # Drop comments -- a hash without a space may be in a URL.
134
+ if " #" in line:
135
+ line = line[: line.find(" #")]
136
+ # If there is a line continuation, drop it, and append the next line.
137
+ if line.endswith("\\"):
138
+ line = line[:-2].strip() + next(lines, "")
139
+ Requirement(line)
140
+ return
141
+
142
+
143
+ def deduce_helpful_msg(req: str) -> str:
144
+ """Returns helpful msg in case requirements file does not exist,
145
+ or cannot be parsed.
146
+
147
+ :params req: Requirements file path
148
+ """
149
+ if not os.path.exists(req):
150
+ return f" File '{req}' does not exist."
151
+ msg = " The path does exist. "
152
+ # Try to parse and check if it is a requirements file.
153
+ try:
154
+ check_first_requirement_in_file(req)
155
+ except InvalidRequirement:
156
+ logger.debug("Cannot parse '%s' as requirements file", req)
157
+ else:
158
+ msg += (
159
+ f"The argument you provided "
160
+ f"({req}) appears to be a"
161
+ f" requirements file. If that is the"
162
+ f" case, use the '-r' flag to install"
163
+ f" the packages specified within it."
164
+ )
165
+ return msg
166
+
167
+
168
+ class RequirementParts:
169
+ def __init__(
170
+ self,
171
+ requirement: Optional[Requirement],
172
+ link: Optional[Link],
173
+ markers: Optional[Marker],
174
+ extras: Set[str],
175
+ ):
176
+ self.requirement = requirement
177
+ self.link = link
178
+ self.markers = markers
179
+ self.extras = extras
180
+
181
+
182
+ def parse_req_from_editable(editable_req: str) -> RequirementParts:
183
+ name, url, extras_override = parse_editable(editable_req)
184
+
185
+ if name is not None:
186
+ try:
187
+ req: Optional[Requirement] = Requirement(name)
188
+ except InvalidRequirement:
189
+ raise InstallationError(f"Invalid requirement: '{name}'")
190
+ else:
191
+ req = None
192
+
193
+ link = Link(url)
194
+
195
+ return RequirementParts(req, link, None, extras_override)
196
+
197
+
198
+ # ---- The actual constructors follow ----
199
+
200
+
201
+ def install_req_from_editable(
202
+ editable_req: str,
203
+ comes_from: Optional[Union[InstallRequirement, str]] = None,
204
+ use_pep517: Optional[bool] = None,
205
+ isolated: bool = False,
206
+ options: Optional[Dict[str, Any]] = None,
207
+ constraint: bool = False,
208
+ user_supplied: bool = False,
209
+ permit_editable_wheels: bool = False,
210
+ ) -> InstallRequirement:
211
+
212
+ parts = parse_req_from_editable(editable_req)
213
+
214
+ return InstallRequirement(
215
+ parts.requirement,
216
+ comes_from=comes_from,
217
+ user_supplied=user_supplied,
218
+ editable=True,
219
+ permit_editable_wheels=permit_editable_wheels,
220
+ link=parts.link,
221
+ constraint=constraint,
222
+ use_pep517=use_pep517,
223
+ isolated=isolated,
224
+ install_options=options.get("install_options", []) if options else [],
225
+ global_options=options.get("global_options", []) if options else [],
226
+ hash_options=options.get("hashes", {}) if options else {},
227
+ extras=parts.extras,
228
+ )
229
+
230
+
231
+ def _looks_like_path(name: str) -> bool:
232
+ """Checks whether the string "looks like" a path on the filesystem.
233
+
234
+ This does not check whether the target actually exists, only judge from the
235
+ appearance.
236
+
237
+ Returns true if any of the following conditions is true:
238
+ * a path separator is found (either os.path.sep or os.path.altsep);
239
+ * a dot is found (which represents the current directory).
240
+ """
241
+ if os.path.sep in name:
242
+ return True
243
+ if os.path.altsep is not None and os.path.altsep in name:
244
+ return True
245
+ if name.startswith("."):
246
+ return True
247
+ return False
248
+
249
+
250
+ def _get_url_from_path(path: str, name: str) -> Optional[str]:
251
+ """
252
+ First, it checks whether a provided path is an installable directory. If it
253
+ is, returns the path.
254
+
255
+ If false, check if the path is an archive file (such as a .whl).
256
+ The function checks if the path is a file. If false, if the path has
257
+ an @, it will treat it as a PEP 440 URL requirement and return the path.
258
+ """
259
+ if _looks_like_path(name) and os.path.isdir(path):
260
+ if is_installable_dir(path):
261
+ return path_to_url(path)
262
+ # TODO: The is_installable_dir test here might not be necessary
263
+ # now that it is done in load_pyproject_toml too.
264
+ raise InstallationError(
265
+ f"Directory {name!r} is not installable. Neither 'setup.py' "
266
+ "nor 'pyproject.toml' found."
267
+ )
268
+ if not is_archive_file(path):
269
+ return None
270
+ if os.path.isfile(path):
271
+ return path_to_url(path)
272
+ urlreq_parts = name.split("@", 1)
273
+ if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
274
+ # If the path contains '@' and the part before it does not look
275
+ # like a path, try to treat it as a PEP 440 URL req instead.
276
+ return None
277
+ logger.warning(
278
+ "Requirement %r looks like a filename, but the file does not exist",
279
+ name,
280
+ )
281
+ return path_to_url(path)
282
+
283
+
284
+ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
285
+ if is_url(name):
286
+ marker_sep = "; "
287
+ else:
288
+ marker_sep = ";"
289
+ if marker_sep in name:
290
+ name, markers_as_string = name.split(marker_sep, 1)
291
+ markers_as_string = markers_as_string.strip()
292
+ if not markers_as_string:
293
+ markers = None
294
+ else:
295
+ markers = Marker(markers_as_string)
296
+ else:
297
+ markers = None
298
+ name = name.strip()
299
+ req_as_string = None
300
+ path = os.path.normpath(os.path.abspath(name))
301
+ link = None
302
+ extras_as_string = None
303
+
304
+ if is_url(name):
305
+ link = Link(name)
306
+ else:
307
+ p, extras_as_string = _strip_extras(path)
308
+ url = _get_url_from_path(p, name)
309
+ if url is not None:
310
+ link = Link(url)
311
+
312
+ # it's a local file, dir, or url
313
+ if link:
314
+ # Handle relative file URLs
315
+ if link.scheme == "file" and re.search(r"\.\./", link.url):
316
+ link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
317
+ # wheel file
318
+ if link.is_wheel:
319
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
320
+ req_as_string = f"{wheel.name}=={wheel.version}"
321
+ else:
322
+ # set the req to the egg fragment. when it's not there, this
323
+ # will become an 'unnamed' requirement
324
+ req_as_string = link.egg_fragment
325
+
326
+ # a requirement specifier
327
+ else:
328
+ req_as_string = name
329
+
330
+ extras = convert_extras(extras_as_string)
331
+
332
+ def with_source(text: str) -> str:
333
+ if not line_source:
334
+ return text
335
+ return f"{text} (from {line_source})"
336
+
337
+ def _parse_req_string(req_as_string: str) -> Requirement:
338
+ try:
339
+ req = get_requirement(req_as_string)
340
+ except InvalidRequirement:
341
+ if os.path.sep in req_as_string:
342
+ add_msg = "It looks like a path."
343
+ add_msg += deduce_helpful_msg(req_as_string)
344
+ elif "=" in req_as_string and not any(
345
+ op in req_as_string for op in operators
346
+ ):
347
+ add_msg = "= is not a valid operator. Did you mean == ?"
348
+ else:
349
+ add_msg = ""
350
+ msg = with_source(f"Invalid requirement: {req_as_string!r}")
351
+ if add_msg:
352
+ msg += f"\nHint: {add_msg}"
353
+ raise InstallationError(msg)
354
+ else:
355
+ # Deprecate extras after specifiers: "name>=1.0[extras]"
356
+ # This currently works by accident because _strip_extras() parses
357
+ # any extras in the end of the string and those are saved in
358
+ # RequirementParts
359
+ for spec in req.specifier:
360
+ spec_str = str(spec)
361
+ if spec_str.endswith("]"):
362
+ msg = f"Extras after version '{spec_str}'."
363
+ raise InstallationError(msg)
364
+ return req
365
+
366
+ if req_as_string is not None:
367
+ req: Optional[Requirement] = _parse_req_string(req_as_string)
368
+ else:
369
+ req = None
370
+
371
+ return RequirementParts(req, link, markers, extras)
372
+
373
+
374
+ def install_req_from_line(
375
+ name: str,
376
+ comes_from: Optional[Union[str, InstallRequirement]] = None,
377
+ use_pep517: Optional[bool] = None,
378
+ isolated: bool = False,
379
+ options: Optional[Dict[str, Any]] = None,
380
+ constraint: bool = False,
381
+ line_source: Optional[str] = None,
382
+ user_supplied: bool = False,
383
+ ) -> InstallRequirement:
384
+ """Creates an InstallRequirement from a name, which might be a
385
+ requirement, directory containing 'setup.py', filename, or URL.
386
+
387
+ :param line_source: An optional string describing where the line is from,
388
+ for logging purposes in case of an error.
389
+ """
390
+ parts = parse_req_from_line(name, line_source)
391
+
392
+ return InstallRequirement(
393
+ parts.requirement,
394
+ comes_from,
395
+ link=parts.link,
396
+ markers=parts.markers,
397
+ use_pep517=use_pep517,
398
+ isolated=isolated,
399
+ install_options=options.get("install_options", []) if options else [],
400
+ global_options=options.get("global_options", []) if options else [],
401
+ hash_options=options.get("hashes", {}) if options else {},
402
+ constraint=constraint,
403
+ extras=parts.extras,
404
+ user_supplied=user_supplied,
405
+ )
406
+
407
+
408
+ def install_req_from_req_string(
409
+ req_string: str,
410
+ comes_from: Optional[InstallRequirement] = None,
411
+ isolated: bool = False,
412
+ use_pep517: Optional[bool] = None,
413
+ user_supplied: bool = False,
414
+ ) -> InstallRequirement:
415
+ try:
416
+ req = get_requirement(req_string)
417
+ except InvalidRequirement:
418
+ raise InstallationError(f"Invalid requirement: '{req_string}'")
419
+
420
+ domains_not_allowed = [
421
+ PyPI.file_storage_domain,
422
+ TestPyPI.file_storage_domain,
423
+ ]
424
+ if (
425
+ req.url
426
+ and comes_from
427
+ and comes_from.link
428
+ and comes_from.link.netloc in domains_not_allowed
429
+ ):
430
+ # Explicitly disallow pypi packages that depend on external urls
431
+ raise InstallationError(
432
+ "Packages installed from PyPI cannot depend on packages "
433
+ "which are not also hosted on PyPI.\n"
434
+ "{} depends on {} ".format(comes_from.name, req)
435
+ )
436
+
437
+ return InstallRequirement(
438
+ req,
439
+ comes_from,
440
+ isolated=isolated,
441
+ use_pep517=use_pep517,
442
+ user_supplied=user_supplied,
443
+ )
444
+
445
+
446
+ def install_req_from_parsed_requirement(
447
+ parsed_req: ParsedRequirement,
448
+ isolated: bool = False,
449
+ use_pep517: Optional[bool] = None,
450
+ user_supplied: bool = False,
451
+ ) -> InstallRequirement:
452
+ if parsed_req.is_editable:
453
+ req = install_req_from_editable(
454
+ parsed_req.requirement,
455
+ comes_from=parsed_req.comes_from,
456
+ use_pep517=use_pep517,
457
+ constraint=parsed_req.constraint,
458
+ isolated=isolated,
459
+ user_supplied=user_supplied,
460
+ )
461
+
462
+ else:
463
+ req = install_req_from_line(
464
+ parsed_req.requirement,
465
+ comes_from=parsed_req.comes_from,
466
+ use_pep517=use_pep517,
467
+ isolated=isolated,
468
+ options=parsed_req.options,
469
+ constraint=parsed_req.constraint,
470
+ line_source=parsed_req.line_source,
471
+ user_supplied=user_supplied,
472
+ )
473
+ return req
474
+
475
+
476
+ def install_req_from_link_and_ireq(
477
+ link: Link, ireq: InstallRequirement
478
+ ) -> InstallRequirement:
479
+ return InstallRequirement(
480
+ req=ireq.req,
481
+ comes_from=ireq.comes_from,
482
+ editable=ireq.editable,
483
+ link=link,
484
+ markers=ireq.markers,
485
+ use_pep517=ireq.use_pep517,
486
+ isolated=ireq.isolated,
487
+ install_options=ireq.install_options,
488
+ global_options=ireq.global_options,
489
+ hash_options=ireq.hash_options,
490
+ )
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_set.py ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from collections import OrderedDict
3
+ from typing import Dict, Iterable, List, Optional, Tuple
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.exceptions import InstallationError
8
+ from pip._internal.models.wheel import Wheel
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils import compatibility_tags
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class RequirementSet:
16
+ def __init__(self, check_supported_wheels: bool = True) -> None:
17
+ """Create a RequirementSet."""
18
+
19
+ self.requirements: Dict[str, InstallRequirement] = OrderedDict()
20
+ self.check_supported_wheels = check_supported_wheels
21
+
22
+ self.unnamed_requirements: List[InstallRequirement] = []
23
+
24
+ def __str__(self) -> str:
25
+ requirements = sorted(
26
+ (req for req in self.requirements.values() if not req.comes_from),
27
+ key=lambda req: canonicalize_name(req.name or ""),
28
+ )
29
+ return " ".join(str(req.req) for req in requirements)
30
+
31
+ def __repr__(self) -> str:
32
+ requirements = sorted(
33
+ self.requirements.values(),
34
+ key=lambda req: canonicalize_name(req.name or ""),
35
+ )
36
+
37
+ format_string = "<{classname} object; {count} requirement(s): {reqs}>"
38
+ return format_string.format(
39
+ classname=self.__class__.__name__,
40
+ count=len(requirements),
41
+ reqs=", ".join(str(req.req) for req in requirements),
42
+ )
43
+
44
+ def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
45
+ assert not install_req.name
46
+ self.unnamed_requirements.append(install_req)
47
+
48
+ def add_named_requirement(self, install_req: InstallRequirement) -> None:
49
+ assert install_req.name
50
+
51
+ project_name = canonicalize_name(install_req.name)
52
+ self.requirements[project_name] = install_req
53
+
54
+ def add_requirement(
55
+ self,
56
+ install_req: InstallRequirement,
57
+ parent_req_name: Optional[str] = None,
58
+ extras_requested: Optional[Iterable[str]] = None,
59
+ ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
60
+ """Add install_req as a requirement to install.
61
+
62
+ :param parent_req_name: The name of the requirement that needed this
63
+ added. The name is used because when multiple unnamed requirements
64
+ resolve to the same name, we could otherwise end up with dependency
65
+ links that point outside the Requirements set. parent_req must
66
+ already be added. Note that None implies that this is a user
67
+ supplied requirement, vs an inferred one.
68
+ :param extras_requested: an iterable of extras used to evaluate the
69
+ environment markers.
70
+ :return: Additional requirements to scan. That is either [] if
71
+ the requirement is not applicable, or [install_req] if the
72
+ requirement is applicable and has just been added.
73
+ """
74
+ # If the markers do not match, ignore this requirement.
75
+ if not install_req.match_markers(extras_requested):
76
+ logger.info(
77
+ "Ignoring %s: markers '%s' don't match your environment",
78
+ install_req.name,
79
+ install_req.markers,
80
+ )
81
+ return [], None
82
+
83
+ # If the wheel is not supported, raise an error.
84
+ # Should check this after filtering out based on environment markers to
85
+ # allow specifying different wheels based on the environment/OS, in a
86
+ # single requirements file.
87
+ if install_req.link and install_req.link.is_wheel:
88
+ wheel = Wheel(install_req.link.filename)
89
+ tags = compatibility_tags.get_supported()
90
+ if self.check_supported_wheels and not wheel.supported(tags):
91
+ raise InstallationError(
92
+ "{} is not a supported wheel on this platform.".format(
93
+ wheel.filename
94
+ )
95
+ )
96
+
97
+ # This next bit is really a sanity check.
98
+ assert (
99
+ not install_req.user_supplied or parent_req_name is None
100
+ ), "a user supplied req shouldn't have a parent"
101
+
102
+ # Unnamed requirements are scanned again and the requirement won't be
103
+ # added as a dependency until after scanning.
104
+ if not install_req.name:
105
+ self.add_unnamed_requirement(install_req)
106
+ return [install_req], None
107
+
108
+ try:
109
+ existing_req: Optional[InstallRequirement] = self.get_requirement(
110
+ install_req.name
111
+ )
112
+ except KeyError:
113
+ existing_req = None
114
+
115
+ has_conflicting_requirement = (
116
+ parent_req_name is None
117
+ and existing_req
118
+ and not existing_req.constraint
119
+ and existing_req.extras == install_req.extras
120
+ and existing_req.req
121
+ and install_req.req
122
+ and existing_req.req.specifier != install_req.req.specifier
123
+ )
124
+ if has_conflicting_requirement:
125
+ raise InstallationError(
126
+ "Double requirement given: {} (already in {}, name={!r})".format(
127
+ install_req, existing_req, install_req.name
128
+ )
129
+ )
130
+
131
+ # When no existing requirement exists, add the requirement as a
132
+ # dependency and it will be scanned again after.
133
+ if not existing_req:
134
+ self.add_named_requirement(install_req)
135
+ # We'd want to rescan this requirement later
136
+ return [install_req], install_req
137
+
138
+ # Assume there's no need to scan, and that we've already
139
+ # encountered this for scanning.
140
+ if install_req.constraint or not existing_req.constraint:
141
+ return [], existing_req
142
+
143
+ does_not_satisfy_constraint = install_req.link and not (
144
+ existing_req.link and install_req.link.path == existing_req.link.path
145
+ )
146
+ if does_not_satisfy_constraint:
147
+ raise InstallationError(
148
+ "Could not satisfy constraints for '{}': "
149
+ "installation from path or url cannot be "
150
+ "constrained to a version".format(install_req.name)
151
+ )
152
+ # If we're now installing a constraint, mark the existing
153
+ # object for real installation.
154
+ existing_req.constraint = False
155
+ # If we're now installing a user supplied requirement,
156
+ # mark the existing object as such.
157
+ if install_req.user_supplied:
158
+ existing_req.user_supplied = True
159
+ existing_req.extras = tuple(
160
+ sorted(set(existing_req.extras) | set(install_req.extras))
161
+ )
162
+ logger.debug(
163
+ "Setting %s extras to: %s",
164
+ existing_req,
165
+ existing_req.extras,
166
+ )
167
+ # Return the existing requirement for addition to the parent and
168
+ # scanning again.
169
+ return [existing_req], existing_req
170
+
171
+ def has_requirement(self, name: str) -> bool:
172
+ project_name = canonicalize_name(name)
173
+
174
+ return (
175
+ project_name in self.requirements
176
+ and not self.requirements[project_name].constraint
177
+ )
178
+
179
+ def get_requirement(self, name: str) -> InstallRequirement:
180
+ project_name = canonicalize_name(name)
181
+
182
+ if project_name in self.requirements:
183
+ return self.requirements[project_name]
184
+
185
+ raise KeyError(f"No project with the name {name!r}")
186
+
187
+ @property
188
+ def all_requirements(self) -> List[InstallRequirement]:
189
+ return self.unnamed_requirements + list(self.requirements.values())
env-llmeval/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (189 Bytes). View file