Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__init__.py +520 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py +169 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py +219 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/base.py +52 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__init__.py +2 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/auth.py +323 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/cache.py +69 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/download.py +185 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py +210 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/session.py +454 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/utils.py +96 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py +60 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py +39 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py +41 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py +74 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py +37 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py +46 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py +102 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py +2 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc +0 -0
env-llmeval/lib/python3.10/site-packages/pip/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (624 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/__pycache__/__main__.cpython-310.pyc
ADDED
Binary file (586 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__init__.py
ADDED
@@ -0,0 +1,520 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import pathlib
|
5 |
+
import sys
|
6 |
+
import sysconfig
|
7 |
+
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
8 |
+
|
9 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
10 |
+
from pip._internal.utils.compat import WINDOWS
|
11 |
+
from pip._internal.utils.deprecation import deprecated
|
12 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
13 |
+
|
14 |
+
from . import _distutils, _sysconfig
|
15 |
+
from .base import (
|
16 |
+
USER_CACHE_DIR,
|
17 |
+
get_major_minor_version,
|
18 |
+
get_src_prefix,
|
19 |
+
is_osx_framework,
|
20 |
+
site_packages,
|
21 |
+
user_site,
|
22 |
+
)
|
23 |
+
|
24 |
+
__all__ = [
|
25 |
+
"USER_CACHE_DIR",
|
26 |
+
"get_bin_prefix",
|
27 |
+
"get_bin_user",
|
28 |
+
"get_major_minor_version",
|
29 |
+
"get_platlib",
|
30 |
+
"get_prefixed_libs",
|
31 |
+
"get_purelib",
|
32 |
+
"get_scheme",
|
33 |
+
"get_src_prefix",
|
34 |
+
"site_packages",
|
35 |
+
"user_site",
|
36 |
+
]
|
37 |
+
|
38 |
+
|
39 |
+
logger = logging.getLogger(__name__)
|
40 |
+
|
41 |
+
|
42 |
+
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
43 |
+
|
44 |
+
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
45 |
+
|
46 |
+
|
47 |
+
def _should_use_sysconfig() -> bool:
|
48 |
+
"""This function determines the value of _USE_SYSCONFIG.
|
49 |
+
|
50 |
+
By default, pip uses sysconfig on Python 3.10+.
|
51 |
+
But Python distributors can override this decision by setting:
|
52 |
+
sysconfig._PIP_USE_SYSCONFIG = True / False
|
53 |
+
Rationale in https://github.com/pypa/pip/issues/10647
|
54 |
+
|
55 |
+
This is a function for testability, but should be constant during any one
|
56 |
+
run.
|
57 |
+
"""
|
58 |
+
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
59 |
+
|
60 |
+
|
61 |
+
_USE_SYSCONFIG = _should_use_sysconfig()
|
62 |
+
|
63 |
+
# Be noisy about incompatibilities if this platforms "should" be using
|
64 |
+
# sysconfig, but is explicitly opting out and using distutils instead.
|
65 |
+
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
66 |
+
_MISMATCH_LEVEL = logging.WARNING
|
67 |
+
else:
|
68 |
+
_MISMATCH_LEVEL = logging.DEBUG
|
69 |
+
|
70 |
+
|
71 |
+
def _looks_like_bpo_44860() -> bool:
|
72 |
+
"""The resolution to bpo-44860 will change this incorrect platlib.
|
73 |
+
|
74 |
+
See <https://bugs.python.org/issue44860>.
|
75 |
+
"""
|
76 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
77 |
+
|
78 |
+
try:
|
79 |
+
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
80 |
+
except KeyError:
|
81 |
+
return False
|
82 |
+
return unix_user_platlib == "$usersite"
|
83 |
+
|
84 |
+
|
85 |
+
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
86 |
+
platlib = scheme["platlib"]
|
87 |
+
if "/$platlibdir/" in platlib:
|
88 |
+
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
89 |
+
if "/lib64/" not in platlib:
|
90 |
+
return False
|
91 |
+
unpatched = platlib.replace("/lib64/", "/lib/")
|
92 |
+
return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
|
93 |
+
|
94 |
+
|
95 |
+
@functools.lru_cache(maxsize=None)
|
96 |
+
def _looks_like_red_hat_lib() -> bool:
|
97 |
+
"""Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
|
98 |
+
|
99 |
+
This is the only way I can see to tell a Red Hat-patched Python.
|
100 |
+
"""
|
101 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
102 |
+
|
103 |
+
return all(
|
104 |
+
k in INSTALL_SCHEMES
|
105 |
+
and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
|
106 |
+
for k in ("unix_prefix", "unix_home")
|
107 |
+
)
|
108 |
+
|
109 |
+
|
110 |
+
@functools.lru_cache(maxsize=None)
|
111 |
+
def _looks_like_debian_scheme() -> bool:
|
112 |
+
"""Debian adds two additional schemes."""
|
113 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
114 |
+
|
115 |
+
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
116 |
+
|
117 |
+
|
118 |
+
@functools.lru_cache(maxsize=None)
|
119 |
+
def _looks_like_red_hat_scheme() -> bool:
|
120 |
+
"""Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
|
121 |
+
|
122 |
+
Red Hat's ``00251-change-user-install-location.patch`` changes the install
|
123 |
+
command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
|
124 |
+
(fortunately?) done quite unconditionally, so we create a default command
|
125 |
+
object without any configuration to detect this.
|
126 |
+
"""
|
127 |
+
from distutils.command.install import install
|
128 |
+
from distutils.dist import Distribution
|
129 |
+
|
130 |
+
cmd: Any = install(Distribution())
|
131 |
+
cmd.finalize_options()
|
132 |
+
return (
|
133 |
+
cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
|
134 |
+
and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
|
135 |
+
)
|
136 |
+
|
137 |
+
|
138 |
+
@functools.lru_cache(maxsize=None)
|
139 |
+
def _looks_like_slackware_scheme() -> bool:
|
140 |
+
"""Slackware patches sysconfig but fails to patch distutils and site.
|
141 |
+
|
142 |
+
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
143 |
+
path, but does not do the same to the site module.
|
144 |
+
"""
|
145 |
+
if user_site is None: # User-site not available.
|
146 |
+
return False
|
147 |
+
try:
|
148 |
+
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
149 |
+
except KeyError: # User-site not available.
|
150 |
+
return False
|
151 |
+
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
152 |
+
|
153 |
+
|
154 |
+
@functools.lru_cache(maxsize=None)
|
155 |
+
def _looks_like_msys2_mingw_scheme() -> bool:
|
156 |
+
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
157 |
+
|
158 |
+
However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
|
159 |
+
likely going to be included in their 3.10 release, so we ignore the warning.
|
160 |
+
See msys2/MINGW-packages#9319.
|
161 |
+
|
162 |
+
MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
|
163 |
+
and is missing the final ``"site-packages"``.
|
164 |
+
"""
|
165 |
+
paths = sysconfig.get_paths("nt", expand=False)
|
166 |
+
return all(
|
167 |
+
"Lib" not in p and "lib" in p and not p.endswith("site-packages")
|
168 |
+
for p in (paths[key] for key in ("platlib", "purelib"))
|
169 |
+
)
|
170 |
+
|
171 |
+
|
172 |
+
def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]:
|
173 |
+
ldversion = sysconfig.get_config_var("LDVERSION")
|
174 |
+
abiflags: str = getattr(sys, "abiflags", None)
|
175 |
+
|
176 |
+
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
177 |
+
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
178 |
+
yield from parts
|
179 |
+
return
|
180 |
+
|
181 |
+
# Strip sys.abiflags from LDVERSION-based path components.
|
182 |
+
for part in parts:
|
183 |
+
if part.endswith(ldversion):
|
184 |
+
part = part[: (0 - len(abiflags))]
|
185 |
+
yield part
|
186 |
+
|
187 |
+
|
188 |
+
@functools.lru_cache(maxsize=None)
|
189 |
+
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
|
190 |
+
issue_url = "https://github.com/pypa/pip/issues/10151"
|
191 |
+
message = (
|
192 |
+
"Value for %s does not match. Please report this to <%s>"
|
193 |
+
"\ndistutils: %s"
|
194 |
+
"\nsysconfig: %s"
|
195 |
+
)
|
196 |
+
logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
|
197 |
+
|
198 |
+
|
199 |
+
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
200 |
+
if old == new:
|
201 |
+
return False
|
202 |
+
_warn_mismatched(old, new, key=key)
|
203 |
+
return True
|
204 |
+
|
205 |
+
|
206 |
+
@functools.lru_cache(maxsize=None)
|
207 |
+
def _log_context(
|
208 |
+
*,
|
209 |
+
user: bool = False,
|
210 |
+
home: Optional[str] = None,
|
211 |
+
root: Optional[str] = None,
|
212 |
+
prefix: Optional[str] = None,
|
213 |
+
) -> None:
|
214 |
+
parts = [
|
215 |
+
"Additional context:",
|
216 |
+
"user = %r",
|
217 |
+
"home = %r",
|
218 |
+
"root = %r",
|
219 |
+
"prefix = %r",
|
220 |
+
]
|
221 |
+
|
222 |
+
logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
|
223 |
+
|
224 |
+
|
225 |
+
def get_scheme(
|
226 |
+
dist_name: str,
|
227 |
+
user: bool = False,
|
228 |
+
home: Optional[str] = None,
|
229 |
+
root: Optional[str] = None,
|
230 |
+
isolated: bool = False,
|
231 |
+
prefix: Optional[str] = None,
|
232 |
+
) -> Scheme:
|
233 |
+
new = _sysconfig.get_scheme(
|
234 |
+
dist_name,
|
235 |
+
user=user,
|
236 |
+
home=home,
|
237 |
+
root=root,
|
238 |
+
isolated=isolated,
|
239 |
+
prefix=prefix,
|
240 |
+
)
|
241 |
+
if _USE_SYSCONFIG:
|
242 |
+
return new
|
243 |
+
|
244 |
+
old = _distutils.get_scheme(
|
245 |
+
dist_name,
|
246 |
+
user=user,
|
247 |
+
home=home,
|
248 |
+
root=root,
|
249 |
+
isolated=isolated,
|
250 |
+
prefix=prefix,
|
251 |
+
)
|
252 |
+
|
253 |
+
warning_contexts = []
|
254 |
+
for k in SCHEME_KEYS:
|
255 |
+
old_v = pathlib.Path(getattr(old, k))
|
256 |
+
new_v = pathlib.Path(getattr(new, k))
|
257 |
+
|
258 |
+
if old_v == new_v:
|
259 |
+
continue
|
260 |
+
|
261 |
+
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
262 |
+
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
263 |
+
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
264 |
+
# and not warn about it. See bpo-43307 and python/cpython#24628.
|
265 |
+
skip_pypy_special_case = (
|
266 |
+
sys.implementation.name == "pypy"
|
267 |
+
and home is not None
|
268 |
+
and k in ("platlib", "purelib")
|
269 |
+
and old_v.parent == new_v.parent
|
270 |
+
and old_v.name.startswith("python")
|
271 |
+
and new_v.name.startswith("pypy")
|
272 |
+
)
|
273 |
+
if skip_pypy_special_case:
|
274 |
+
continue
|
275 |
+
|
276 |
+
# sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
|
277 |
+
# the ``include`` value, but distutils's ``headers`` does. We'll let
|
278 |
+
# CPython decide whether this is a bug or feature. See bpo-43948.
|
279 |
+
skip_osx_framework_user_special_case = (
|
280 |
+
user
|
281 |
+
and is_osx_framework()
|
282 |
+
and k == "headers"
|
283 |
+
and old_v.parent.parent == new_v.parent
|
284 |
+
and old_v.parent.name.startswith("python")
|
285 |
+
)
|
286 |
+
if skip_osx_framework_user_special_case:
|
287 |
+
continue
|
288 |
+
|
289 |
+
# On Red Hat and derived Linux distributions, distutils is patched to
|
290 |
+
# use "lib64" instead of "lib" for platlib.
|
291 |
+
if k == "platlib" and _looks_like_red_hat_lib():
|
292 |
+
continue
|
293 |
+
|
294 |
+
# On Python 3.9+, sysconfig's posix_user scheme sets platlib against
|
295 |
+
# sys.platlibdir, but distutils's unix_user incorrectly coninutes
|
296 |
+
# using the same $usersite for both platlib and purelib. This creates a
|
297 |
+
# mismatch when sys.platlibdir is not "lib".
|
298 |
+
skip_bpo_44860 = (
|
299 |
+
user
|
300 |
+
and k == "platlib"
|
301 |
+
and not WINDOWS
|
302 |
+
and sys.version_info >= (3, 9)
|
303 |
+
and _PLATLIBDIR != "lib"
|
304 |
+
and _looks_like_bpo_44860()
|
305 |
+
)
|
306 |
+
if skip_bpo_44860:
|
307 |
+
continue
|
308 |
+
|
309 |
+
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
310 |
+
# but not usersite to match the location.
|
311 |
+
skip_slackware_user_scheme = (
|
312 |
+
user
|
313 |
+
and k in ("platlib", "purelib")
|
314 |
+
and not WINDOWS
|
315 |
+
and _looks_like_slackware_scheme()
|
316 |
+
)
|
317 |
+
if skip_slackware_user_scheme:
|
318 |
+
continue
|
319 |
+
|
320 |
+
# Both Debian and Red Hat patch Python to place the system site under
|
321 |
+
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
322 |
+
# instead of site-packages, but the /usr/local check should cover it.
|
323 |
+
skip_linux_system_special_case = (
|
324 |
+
not (user or home or prefix or running_under_virtualenv())
|
325 |
+
and old_v.parts[1:3] == ("usr", "local")
|
326 |
+
and len(new_v.parts) > 1
|
327 |
+
and new_v.parts[1] == "usr"
|
328 |
+
and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
|
329 |
+
and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
|
330 |
+
)
|
331 |
+
if skip_linux_system_special_case:
|
332 |
+
continue
|
333 |
+
|
334 |
+
# On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
|
335 |
+
# the "pythonX.Y" part of the path, but distutils does.
|
336 |
+
skip_sysconfig_abiflag_bug = (
|
337 |
+
sys.version_info < (3, 8)
|
338 |
+
and not WINDOWS
|
339 |
+
and k in ("headers", "platlib", "purelib")
|
340 |
+
and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
|
341 |
+
)
|
342 |
+
if skip_sysconfig_abiflag_bug:
|
343 |
+
continue
|
344 |
+
|
345 |
+
# MSYS2 MINGW's sysconfig patch does not include the "site-packages"
|
346 |
+
# part of the path. This is incorrect and will be fixed in MSYS.
|
347 |
+
skip_msys2_mingw_bug = (
|
348 |
+
WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
|
349 |
+
)
|
350 |
+
if skip_msys2_mingw_bug:
|
351 |
+
continue
|
352 |
+
|
353 |
+
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
354 |
+
# interpreter located in the source tree, not the install site. This
|
355 |
+
# triggers special logic in sysconfig that's not present in distutils.
|
356 |
+
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
357 |
+
skip_cpython_build = (
|
358 |
+
sysconfig.is_python_build(check_home=True)
|
359 |
+
and not WINDOWS
|
360 |
+
and k in ("headers", "include", "platinclude")
|
361 |
+
)
|
362 |
+
if skip_cpython_build:
|
363 |
+
continue
|
364 |
+
|
365 |
+
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
366 |
+
|
367 |
+
if not warning_contexts:
|
368 |
+
return old
|
369 |
+
|
370 |
+
# Check if this path mismatch is caused by distutils config files. Those
|
371 |
+
# files will no longer work once we switch to sysconfig, so this raises a
|
372 |
+
# deprecation message for them.
|
373 |
+
default_old = _distutils.distutils_scheme(
|
374 |
+
dist_name,
|
375 |
+
user,
|
376 |
+
home,
|
377 |
+
root,
|
378 |
+
isolated,
|
379 |
+
prefix,
|
380 |
+
ignore_config_files=True,
|
381 |
+
)
|
382 |
+
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
383 |
+
deprecated(
|
384 |
+
reason=(
|
385 |
+
"Configuring installation scheme with distutils config files "
|
386 |
+
"is deprecated and will no longer work in the near future. If you "
|
387 |
+
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
388 |
+
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
389 |
+
),
|
390 |
+
replacement=None,
|
391 |
+
gone_in=None,
|
392 |
+
)
|
393 |
+
return old
|
394 |
+
|
395 |
+
# Post warnings about this mismatch so user can report them back.
|
396 |
+
for old_v, new_v, key in warning_contexts:
|
397 |
+
_warn_mismatched(old_v, new_v, key=key)
|
398 |
+
_log_context(user=user, home=home, root=root, prefix=prefix)
|
399 |
+
|
400 |
+
return old
|
401 |
+
|
402 |
+
|
403 |
+
def get_bin_prefix() -> str:
|
404 |
+
new = _sysconfig.get_bin_prefix()
|
405 |
+
if _USE_SYSCONFIG:
|
406 |
+
return new
|
407 |
+
|
408 |
+
old = _distutils.get_bin_prefix()
|
409 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
410 |
+
_log_context()
|
411 |
+
return old
|
412 |
+
|
413 |
+
|
414 |
+
def get_bin_user() -> str:
|
415 |
+
return _sysconfig.get_scheme("", user=True).scripts
|
416 |
+
|
417 |
+
|
418 |
+
def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
419 |
+
"""Check if the value is Debian's APT-controlled dist-packages.
|
420 |
+
|
421 |
+
Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
|
422 |
+
default package path controlled by APT, but does not patch ``sysconfig`` to
|
423 |
+
do the same. This is similar to the bug worked around in ``get_scheme()``,
|
424 |
+
but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
|
425 |
+
we can't do anything about this Debian bug, and this detection allows us to
|
426 |
+
skip the warning when needed.
|
427 |
+
"""
|
428 |
+
if not _looks_like_debian_scheme():
|
429 |
+
return False
|
430 |
+
if value == "/usr/lib/python3/dist-packages":
|
431 |
+
return True
|
432 |
+
return False
|
433 |
+
|
434 |
+
|
435 |
+
def get_purelib() -> str:
|
436 |
+
"""Return the default pure-Python lib location."""
|
437 |
+
new = _sysconfig.get_purelib()
|
438 |
+
if _USE_SYSCONFIG:
|
439 |
+
return new
|
440 |
+
|
441 |
+
old = _distutils.get_purelib()
|
442 |
+
if _looks_like_deb_system_dist_packages(old):
|
443 |
+
return old
|
444 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
445 |
+
_log_context()
|
446 |
+
return old
|
447 |
+
|
448 |
+
|
449 |
+
def get_platlib() -> str:
|
450 |
+
"""Return the default platform-shared lib location."""
|
451 |
+
new = _sysconfig.get_platlib()
|
452 |
+
if _USE_SYSCONFIG:
|
453 |
+
return new
|
454 |
+
|
455 |
+
old = _distutils.get_platlib()
|
456 |
+
if _looks_like_deb_system_dist_packages(old):
|
457 |
+
return old
|
458 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
459 |
+
_log_context()
|
460 |
+
return old
|
461 |
+
|
462 |
+
|
463 |
+
def _deduplicated(v1: str, v2: str) -> List[str]:
|
464 |
+
"""Deduplicate values from a list."""
|
465 |
+
if v1 == v2:
|
466 |
+
return [v1]
|
467 |
+
return [v1, v2]
|
468 |
+
|
469 |
+
|
470 |
+
def _looks_like_apple_library(path: str) -> bool:
|
471 |
+
"""Apple patches sysconfig to *always* look under */Library/Python*."""
|
472 |
+
if sys.platform[:6] != "darwin":
|
473 |
+
return False
|
474 |
+
return path == f"/Library/Python/{get_major_minor_version()}/site-packages"
|
475 |
+
|
476 |
+
|
477 |
+
def get_prefixed_libs(prefix: str) -> List[str]:
|
478 |
+
"""Return the lib locations under ``prefix``."""
|
479 |
+
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
480 |
+
if _USE_SYSCONFIG:
|
481 |
+
return _deduplicated(new_pure, new_plat)
|
482 |
+
|
483 |
+
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
484 |
+
old_lib_paths = _deduplicated(old_pure, old_plat)
|
485 |
+
|
486 |
+
# Apple's Python (shipped with Xcode and Command Line Tools) hard-code
|
487 |
+
# platlib and purelib to '/Library/Python/X.Y/site-packages'. This will
|
488 |
+
# cause serious build isolation bugs when Apple starts shipping 3.10 because
|
489 |
+
# pip will install build backends to the wrong location. This tells users
|
490 |
+
# who is at fault so Apple may notice it and fix the issue in time.
|
491 |
+
if all(_looks_like_apple_library(p) for p in old_lib_paths):
|
492 |
+
deprecated(
|
493 |
+
reason=(
|
494 |
+
"Python distributed by Apple's Command Line Tools incorrectly "
|
495 |
+
"patches sysconfig to always point to '/Library/Python'. This "
|
496 |
+
"will cause build isolation to operate incorrectly on Python "
|
497 |
+
"3.10 or later. Please help report this to Apple so they can "
|
498 |
+
"fix this. https://developer.apple.com/bug-reporting/"
|
499 |
+
),
|
500 |
+
replacement=None,
|
501 |
+
gone_in=None,
|
502 |
+
)
|
503 |
+
return old_lib_paths
|
504 |
+
|
505 |
+
warned = [
|
506 |
+
_warn_if_mismatch(
|
507 |
+
pathlib.Path(old_pure),
|
508 |
+
pathlib.Path(new_pure),
|
509 |
+
key="prefixed-purelib",
|
510 |
+
),
|
511 |
+
_warn_if_mismatch(
|
512 |
+
pathlib.Path(old_plat),
|
513 |
+
pathlib.Path(new_plat),
|
514 |
+
key="prefixed-platlib",
|
515 |
+
),
|
516 |
+
]
|
517 |
+
if any(warned):
|
518 |
+
_log_context(prefix=prefix)
|
519 |
+
|
520 |
+
return old_lib_paths
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (12.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc
ADDED
Binary file (4.65 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc
ADDED
Binary file (6.23 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc
ADDED
Binary file (1.53 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Locations where we look for configs, install stuff, etc"""
|
2 |
+
|
3 |
+
# The following comment should be removed at some point in the future.
|
4 |
+
# mypy: strict-optional=False
|
5 |
+
|
6 |
+
import logging
|
7 |
+
import os
|
8 |
+
import sys
|
9 |
+
from distutils.cmd import Command as DistutilsCommand
|
10 |
+
from distutils.command.install import SCHEME_KEYS
|
11 |
+
from distutils.command.install import install as distutils_install_command
|
12 |
+
from distutils.sysconfig import get_python_lib
|
13 |
+
from typing import Dict, List, Optional, Tuple, Union, cast
|
14 |
+
|
15 |
+
from pip._internal.models.scheme import Scheme
|
16 |
+
from pip._internal.utils.compat import WINDOWS
|
17 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
18 |
+
|
19 |
+
from .base import get_major_minor_version
|
20 |
+
|
21 |
+
logger = logging.getLogger(__name__)
|
22 |
+
|
23 |
+
|
24 |
+
def distutils_scheme(
|
25 |
+
dist_name: str,
|
26 |
+
user: bool = False,
|
27 |
+
home: str = None,
|
28 |
+
root: str = None,
|
29 |
+
isolated: bool = False,
|
30 |
+
prefix: str = None,
|
31 |
+
*,
|
32 |
+
ignore_config_files: bool = False,
|
33 |
+
) -> Dict[str, str]:
|
34 |
+
"""
|
35 |
+
Return a distutils install scheme
|
36 |
+
"""
|
37 |
+
from distutils.dist import Distribution
|
38 |
+
|
39 |
+
dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
|
40 |
+
if isolated:
|
41 |
+
dist_args["script_args"] = ["--no-user-cfg"]
|
42 |
+
|
43 |
+
d = Distribution(dist_args)
|
44 |
+
if not ignore_config_files:
|
45 |
+
try:
|
46 |
+
d.parse_config_files()
|
47 |
+
except UnicodeDecodeError:
|
48 |
+
# Typeshed does not include find_config_files() for some reason.
|
49 |
+
paths = d.find_config_files() # type: ignore
|
50 |
+
logger.warning(
|
51 |
+
"Ignore distutils configs in %s due to encoding errors.",
|
52 |
+
", ".join(os.path.basename(p) for p in paths),
|
53 |
+
)
|
54 |
+
obj: Optional[DistutilsCommand] = None
|
55 |
+
obj = d.get_command_obj("install", create=True)
|
56 |
+
assert obj is not None
|
57 |
+
i = cast(distutils_install_command, obj)
|
58 |
+
# NOTE: setting user or home has the side-effect of creating the home dir
|
59 |
+
# or user base for installations during finalize_options()
|
60 |
+
# ideally, we'd prefer a scheme class that has no side-effects.
|
61 |
+
assert not (user and prefix), f"user={user} prefix={prefix}"
|
62 |
+
assert not (home and prefix), f"home={home} prefix={prefix}"
|
63 |
+
i.user = user or i.user
|
64 |
+
if user or home:
|
65 |
+
i.prefix = ""
|
66 |
+
i.prefix = prefix or i.prefix
|
67 |
+
i.home = home or i.home
|
68 |
+
i.root = root or i.root
|
69 |
+
i.finalize_options()
|
70 |
+
|
71 |
+
scheme = {}
|
72 |
+
for key in SCHEME_KEYS:
|
73 |
+
scheme[key] = getattr(i, "install_" + key)
|
74 |
+
|
75 |
+
# install_lib specified in setup.cfg should install *everything*
|
76 |
+
# into there (i.e. it takes precedence over both purelib and
|
77 |
+
# platlib). Note, i.install_lib is *always* set after
|
78 |
+
# finalize_options(); we only want to override here if the user
|
79 |
+
# has explicitly requested it hence going back to the config
|
80 |
+
if "install_lib" in d.get_option_dict("install"):
|
81 |
+
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
82 |
+
|
83 |
+
if running_under_virtualenv():
|
84 |
+
if home:
|
85 |
+
prefix = home
|
86 |
+
elif user:
|
87 |
+
prefix = i.install_userbase # type: ignore
|
88 |
+
else:
|
89 |
+
prefix = i.prefix
|
90 |
+
scheme["headers"] = os.path.join(
|
91 |
+
prefix,
|
92 |
+
"include",
|
93 |
+
"site",
|
94 |
+
f"python{get_major_minor_version()}",
|
95 |
+
dist_name,
|
96 |
+
)
|
97 |
+
|
98 |
+
if root is not None:
|
99 |
+
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
100 |
+
scheme["headers"] = os.path.join(root, path_no_drive[1:])
|
101 |
+
|
102 |
+
return scheme
|
103 |
+
|
104 |
+
|
105 |
+
def get_scheme(
|
106 |
+
dist_name: str,
|
107 |
+
user: bool = False,
|
108 |
+
home: Optional[str] = None,
|
109 |
+
root: Optional[str] = None,
|
110 |
+
isolated: bool = False,
|
111 |
+
prefix: Optional[str] = None,
|
112 |
+
) -> Scheme:
|
113 |
+
"""
|
114 |
+
Get the "scheme" corresponding to the input parameters. The distutils
|
115 |
+
documentation provides the context for the available schemes:
|
116 |
+
https://docs.python.org/3/install/index.html#alternate-installation
|
117 |
+
|
118 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
119 |
+
in the headers scheme path
|
120 |
+
:param user: indicates to use the "user" scheme
|
121 |
+
:param home: indicates to use the "home" scheme and provides the base
|
122 |
+
directory for the same
|
123 |
+
:param root: root under which other directories are re-based
|
124 |
+
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
125 |
+
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
126 |
+
scheme paths
|
127 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
128 |
+
base directory for the same
|
129 |
+
"""
|
130 |
+
scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
131 |
+
return Scheme(
|
132 |
+
platlib=scheme["platlib"],
|
133 |
+
purelib=scheme["purelib"],
|
134 |
+
headers=scheme["headers"],
|
135 |
+
scripts=scheme["scripts"],
|
136 |
+
data=scheme["data"],
|
137 |
+
)
|
138 |
+
|
139 |
+
|
140 |
+
def get_bin_prefix() -> str:
|
141 |
+
# XXX: In old virtualenv versions, sys.prefix can contain '..' components,
|
142 |
+
# so we need to call normpath to eliminate them.
|
143 |
+
prefix = os.path.normpath(sys.prefix)
|
144 |
+
if WINDOWS:
|
145 |
+
bin_py = os.path.join(prefix, "Scripts")
|
146 |
+
# buildout uses 'bin' on Windows too?
|
147 |
+
if not os.path.exists(bin_py):
|
148 |
+
bin_py = os.path.join(prefix, "bin")
|
149 |
+
return bin_py
|
150 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs
|
151 |
+
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
152 |
+
if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
|
153 |
+
return "/usr/local/bin"
|
154 |
+
return os.path.join(prefix, "bin")
|
155 |
+
|
156 |
+
|
157 |
+
def get_purelib() -> str:
|
158 |
+
return get_python_lib(plat_specific=False)
|
159 |
+
|
160 |
+
|
161 |
+
def get_platlib() -> str:
|
162 |
+
return get_python_lib(plat_specific=True)
|
163 |
+
|
164 |
+
|
165 |
+
def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
|
166 |
+
return (
|
167 |
+
get_python_lib(plat_specific=False, prefix=prefix),
|
168 |
+
get_python_lib(plat_specific=True, prefix=prefix),
|
169 |
+
)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py
ADDED
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import distutils.util # FIXME: For change_root.
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import sysconfig
|
6 |
+
import typing
|
7 |
+
|
8 |
+
from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
|
9 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
10 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
11 |
+
|
12 |
+
from .base import get_major_minor_version, is_osx_framework
|
13 |
+
|
14 |
+
logger = logging.getLogger(__name__)
|
15 |
+
|
16 |
+
|
17 |
+
# Notes on _infer_* functions.
|
18 |
+
# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
|
19 |
+
# way to ask things like "what is the '_prefix' scheme on this platform". These
|
20 |
+
# functions try to answer that with some heuristics while accounting for ad-hoc
|
21 |
+
# platforms not covered by CPython's default sysconfig implementation. If the
|
22 |
+
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
23 |
+
# a POSIX scheme.
|
24 |
+
|
25 |
+
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
26 |
+
|
27 |
+
_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
|
28 |
+
|
29 |
+
|
30 |
+
def _should_use_osx_framework_prefix() -> bool:
|
31 |
+
"""Check for Apple's ``osx_framework_library`` scheme.
|
32 |
+
|
33 |
+
Python distributed by Apple's Command Line Tools has this special scheme
|
34 |
+
that's used when:
|
35 |
+
|
36 |
+
* This is a framework build.
|
37 |
+
* We are installing into the system prefix.
|
38 |
+
|
39 |
+
This does not account for ``pip install --prefix`` (also means we're not
|
40 |
+
installing to the system prefix), which should use ``posix_prefix``, but
|
41 |
+
logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
|
42 |
+
since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
|
43 |
+
which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
|
44 |
+
wouldn't be able to magically switch between ``osx_framework_library`` and
|
45 |
+
``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
|
46 |
+
means its behavior is consistent whether we use the stdlib implementation
|
47 |
+
or our own, and we deal with this special case in ``get_scheme()`` instead.
|
48 |
+
"""
|
49 |
+
return (
|
50 |
+
"osx_framework_library" in _AVAILABLE_SCHEMES
|
51 |
+
and not running_under_virtualenv()
|
52 |
+
and is_osx_framework()
|
53 |
+
)
|
54 |
+
|
55 |
+
|
56 |
+
def _infer_prefix() -> str:
|
57 |
+
"""Try to find a prefix scheme for the current platform.
|
58 |
+
|
59 |
+
This tries:
|
60 |
+
|
61 |
+
* A special ``osx_framework_library`` for Python distributed by Apple's
|
62 |
+
Command Line Tools, when not running in a virtual environment.
|
63 |
+
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
64 |
+
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
65 |
+
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
66 |
+
* Just the OS name, used by CPython on Windows (``nt``).
|
67 |
+
|
68 |
+
If none of the above works, fall back to ``posix_prefix``.
|
69 |
+
"""
|
70 |
+
if _PREFERRED_SCHEME_API:
|
71 |
+
return _PREFERRED_SCHEME_API("prefix")
|
72 |
+
if _should_use_osx_framework_prefix():
|
73 |
+
return "osx_framework_library"
|
74 |
+
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
75 |
+
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
76 |
+
return implementation_suffixed
|
77 |
+
if sys.implementation.name in _AVAILABLE_SCHEMES:
|
78 |
+
return sys.implementation.name
|
79 |
+
suffixed = f"{os.name}_prefix"
|
80 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
81 |
+
return suffixed
|
82 |
+
if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
|
83 |
+
return os.name
|
84 |
+
return "posix_prefix"
|
85 |
+
|
86 |
+
|
87 |
+
def _infer_user() -> str:
|
88 |
+
"""Try to find a user scheme for the current platform."""
|
89 |
+
if _PREFERRED_SCHEME_API:
|
90 |
+
return _PREFERRED_SCHEME_API("user")
|
91 |
+
if is_osx_framework() and not running_under_virtualenv():
|
92 |
+
suffixed = "osx_framework_user"
|
93 |
+
else:
|
94 |
+
suffixed = f"{os.name}_user"
|
95 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
96 |
+
return suffixed
|
97 |
+
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
98 |
+
raise UserInstallationInvalid()
|
99 |
+
return "posix_user"
|
100 |
+
|
101 |
+
|
102 |
+
def _infer_home() -> str:
|
103 |
+
"""Try to find a home for the current platform."""
|
104 |
+
if _PREFERRED_SCHEME_API:
|
105 |
+
return _PREFERRED_SCHEME_API("home")
|
106 |
+
suffixed = f"{os.name}_home"
|
107 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
108 |
+
return suffixed
|
109 |
+
return "posix_home"
|
110 |
+
|
111 |
+
|
112 |
+
# Update these keys if the user sets a custom home.
|
113 |
+
_HOME_KEYS = [
|
114 |
+
"installed_base",
|
115 |
+
"base",
|
116 |
+
"installed_platbase",
|
117 |
+
"platbase",
|
118 |
+
"prefix",
|
119 |
+
"exec_prefix",
|
120 |
+
]
|
121 |
+
if sysconfig.get_config_var("userbase") is not None:
|
122 |
+
_HOME_KEYS.append("userbase")
|
123 |
+
|
124 |
+
|
125 |
+
def get_scheme(
|
126 |
+
dist_name: str,
|
127 |
+
user: bool = False,
|
128 |
+
home: typing.Optional[str] = None,
|
129 |
+
root: typing.Optional[str] = None,
|
130 |
+
isolated: bool = False,
|
131 |
+
prefix: typing.Optional[str] = None,
|
132 |
+
) -> Scheme:
|
133 |
+
"""
|
134 |
+
Get the "scheme" corresponding to the input parameters.
|
135 |
+
|
136 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
137 |
+
in the headers scheme path
|
138 |
+
:param user: indicates to use the "user" scheme
|
139 |
+
:param home: indicates to use the "home" scheme
|
140 |
+
:param root: root under which other directories are re-based
|
141 |
+
:param isolated: ignored, but kept for distutils compatibility (where
|
142 |
+
this controls whether the user-site pydistutils.cfg is honored)
|
143 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
144 |
+
base directory for the same
|
145 |
+
"""
|
146 |
+
if user and prefix:
|
147 |
+
raise InvalidSchemeCombination("--user", "--prefix")
|
148 |
+
if home and prefix:
|
149 |
+
raise InvalidSchemeCombination("--home", "--prefix")
|
150 |
+
|
151 |
+
if home is not None:
|
152 |
+
scheme_name = _infer_home()
|
153 |
+
elif user:
|
154 |
+
scheme_name = _infer_user()
|
155 |
+
else:
|
156 |
+
scheme_name = _infer_prefix()
|
157 |
+
|
158 |
+
# Special case: When installing into a custom prefix, use posix_prefix
|
159 |
+
# instead of osx_framework_library. See _should_use_osx_framework_prefix()
|
160 |
+
# docstring for details.
|
161 |
+
if prefix is not None and scheme_name == "osx_framework_library":
|
162 |
+
scheme_name = "posix_prefix"
|
163 |
+
|
164 |
+
if home is not None:
|
165 |
+
variables = {k: home for k in _HOME_KEYS}
|
166 |
+
elif prefix is not None:
|
167 |
+
variables = {k: prefix for k in _HOME_KEYS}
|
168 |
+
else:
|
169 |
+
variables = {}
|
170 |
+
|
171 |
+
paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
|
172 |
+
|
173 |
+
# Logic here is very arbitrary, we're doing it for compatibility, don't ask.
|
174 |
+
# 1. Pip historically uses a special header path in virtual environments.
|
175 |
+
# 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
|
176 |
+
# only do the same when not running in a virtual environment because
|
177 |
+
# pip's historical header path logic (see point 1) did not do this.
|
178 |
+
if running_under_virtualenv():
|
179 |
+
if user:
|
180 |
+
base = variables.get("userbase", sys.prefix)
|
181 |
+
else:
|
182 |
+
base = variables.get("base", sys.prefix)
|
183 |
+
python_xy = f"python{get_major_minor_version()}"
|
184 |
+
paths["include"] = os.path.join(base, "include", "site", python_xy)
|
185 |
+
elif not dist_name:
|
186 |
+
dist_name = "UNKNOWN"
|
187 |
+
|
188 |
+
scheme = Scheme(
|
189 |
+
platlib=paths["platlib"],
|
190 |
+
purelib=paths["purelib"],
|
191 |
+
headers=os.path.join(paths["include"], dist_name),
|
192 |
+
scripts=paths["scripts"],
|
193 |
+
data=paths["data"],
|
194 |
+
)
|
195 |
+
if root is not None:
|
196 |
+
for key in SCHEME_KEYS:
|
197 |
+
value = distutils.util.change_root(root, getattr(scheme, key))
|
198 |
+
setattr(scheme, key, value)
|
199 |
+
return scheme
|
200 |
+
|
201 |
+
|
202 |
+
def get_bin_prefix() -> str:
|
203 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
204 |
+
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
205 |
+
return "/usr/local/bin"
|
206 |
+
return sysconfig.get_paths()["scripts"]
|
207 |
+
|
208 |
+
|
209 |
+
def get_purelib() -> str:
|
210 |
+
return sysconfig.get_paths()["purelib"]
|
211 |
+
|
212 |
+
|
213 |
+
def get_platlib() -> str:
|
214 |
+
return sysconfig.get_paths()["platlib"]
|
215 |
+
|
216 |
+
|
217 |
+
def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
|
218 |
+
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
219 |
+
return (paths["purelib"], paths["platlib"])
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/locations/base.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import os
|
3 |
+
import site
|
4 |
+
import sys
|
5 |
+
import sysconfig
|
6 |
+
import typing
|
7 |
+
|
8 |
+
from pip._internal.utils import appdirs
|
9 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
10 |
+
|
11 |
+
# Application Directories
|
12 |
+
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
13 |
+
|
14 |
+
# FIXME doesn't account for venv linked to global site-packages
|
15 |
+
site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
|
16 |
+
|
17 |
+
|
18 |
+
def get_major_minor_version() -> str:
|
19 |
+
"""
|
20 |
+
Return the major-minor version of the current Python as a string, e.g.
|
21 |
+
"3.7" or "3.10".
|
22 |
+
"""
|
23 |
+
return "{}.{}".format(*sys.version_info)
|
24 |
+
|
25 |
+
|
26 |
+
def get_src_prefix() -> str:
|
27 |
+
if running_under_virtualenv():
|
28 |
+
src_prefix = os.path.join(sys.prefix, "src")
|
29 |
+
else:
|
30 |
+
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
31 |
+
try:
|
32 |
+
src_prefix = os.path.join(os.getcwd(), "src")
|
33 |
+
except OSError:
|
34 |
+
# In case the current working directory has been renamed or deleted
|
35 |
+
sys.exit("The folder you are executing pip from can no longer be found.")
|
36 |
+
|
37 |
+
# under macOS + virtualenv sys.prefix is not properly resolved
|
38 |
+
# it is something like /path/to/python/bin/..
|
39 |
+
return os.path.abspath(src_prefix)
|
40 |
+
|
41 |
+
|
42 |
+
try:
|
43 |
+
# Use getusersitepackages if this is present, as it ensures that the
|
44 |
+
# value is initialised properly.
|
45 |
+
user_site: typing.Optional[str] = site.getusersitepackages()
|
46 |
+
except AttributeError:
|
47 |
+
user_site = site.USER_SITE
|
48 |
+
|
49 |
+
|
50 |
+
@functools.lru_cache(maxsize=None)
|
51 |
+
def is_osx_framework() -> bool:
|
52 |
+
return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__init__.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
"""Contains purely network-related utilities.
|
2 |
+
"""
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (241 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/auth.cpython-310.pyc
ADDED
Binary file (7.51 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (2.92 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc
ADDED
Binary file (5.49 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/lazy_wheel.cpython-310.pyc
ADDED
Binary file (8.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/session.cpython-310.pyc
ADDED
Binary file (10.7 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc
ADDED
Binary file (1.44 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/__pycache__/xmlrpc.cpython-310.pyc
ADDED
Binary file (2.05 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/auth.py
ADDED
@@ -0,0 +1,323 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Network Authentication Helpers
|
2 |
+
|
3 |
+
Contains interface (MultiDomainBasicAuth) and associated glue code for
|
4 |
+
providing credentials in the context of network requests.
|
5 |
+
"""
|
6 |
+
|
7 |
+
import urllib.parse
|
8 |
+
from typing import Any, Dict, List, Optional, Tuple
|
9 |
+
|
10 |
+
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
|
11 |
+
from pip._vendor.requests.models import Request, Response
|
12 |
+
from pip._vendor.requests.utils import get_netrc_auth
|
13 |
+
|
14 |
+
from pip._internal.utils.logging import getLogger
|
15 |
+
from pip._internal.utils.misc import (
|
16 |
+
ask,
|
17 |
+
ask_input,
|
18 |
+
ask_password,
|
19 |
+
remove_auth_from_url,
|
20 |
+
split_auth_netloc_from_url,
|
21 |
+
)
|
22 |
+
from pip._internal.vcs.versioncontrol import AuthInfo
|
23 |
+
|
24 |
+
logger = getLogger(__name__)
|
25 |
+
|
26 |
+
Credentials = Tuple[str, str, str]
|
27 |
+
|
28 |
+
try:
|
29 |
+
import keyring
|
30 |
+
except ImportError:
|
31 |
+
keyring = None # type: ignore[assignment]
|
32 |
+
except Exception as exc:
|
33 |
+
logger.warning(
|
34 |
+
"Keyring is skipped due to an exception: %s",
|
35 |
+
str(exc),
|
36 |
+
)
|
37 |
+
keyring = None # type: ignore[assignment]
|
38 |
+
|
39 |
+
|
40 |
+
def get_keyring_auth(url: Optional[str], username: Optional[str]) -> Optional[AuthInfo]:
|
41 |
+
"""Return the tuple auth for a given url from keyring."""
|
42 |
+
global keyring
|
43 |
+
if not url or not keyring:
|
44 |
+
return None
|
45 |
+
|
46 |
+
try:
|
47 |
+
try:
|
48 |
+
get_credential = keyring.get_credential
|
49 |
+
except AttributeError:
|
50 |
+
pass
|
51 |
+
else:
|
52 |
+
logger.debug("Getting credentials from keyring for %s", url)
|
53 |
+
cred = get_credential(url, username)
|
54 |
+
if cred is not None:
|
55 |
+
return cred.username, cred.password
|
56 |
+
return None
|
57 |
+
|
58 |
+
if username:
|
59 |
+
logger.debug("Getting password from keyring for %s", url)
|
60 |
+
password = keyring.get_password(url, username)
|
61 |
+
if password:
|
62 |
+
return username, password
|
63 |
+
|
64 |
+
except Exception as exc:
|
65 |
+
logger.warning(
|
66 |
+
"Keyring is skipped due to an exception: %s",
|
67 |
+
str(exc),
|
68 |
+
)
|
69 |
+
keyring = None # type: ignore[assignment]
|
70 |
+
return None
|
71 |
+
|
72 |
+
|
73 |
+
class MultiDomainBasicAuth(AuthBase):
|
74 |
+
def __init__(
|
75 |
+
self, prompting: bool = True, index_urls: Optional[List[str]] = None
|
76 |
+
) -> None:
|
77 |
+
self.prompting = prompting
|
78 |
+
self.index_urls = index_urls
|
79 |
+
self.passwords: Dict[str, AuthInfo] = {}
|
80 |
+
# When the user is prompted to enter credentials and keyring is
|
81 |
+
# available, we will offer to save them. If the user accepts,
|
82 |
+
# this value is set to the credentials they entered. After the
|
83 |
+
# request authenticates, the caller should call
|
84 |
+
# ``save_credentials`` to save these.
|
85 |
+
self._credentials_to_save: Optional[Credentials] = None
|
86 |
+
|
87 |
+
def _get_index_url(self, url: str) -> Optional[str]:
|
88 |
+
"""Return the original index URL matching the requested URL.
|
89 |
+
|
90 |
+
Cached or dynamically generated credentials may work against
|
91 |
+
the original index URL rather than just the netloc.
|
92 |
+
|
93 |
+
The provided url should have had its username and password
|
94 |
+
removed already. If the original index url had credentials then
|
95 |
+
they will be included in the return value.
|
96 |
+
|
97 |
+
Returns None if no matching index was found, or if --no-index
|
98 |
+
was specified by the user.
|
99 |
+
"""
|
100 |
+
if not url or not self.index_urls:
|
101 |
+
return None
|
102 |
+
|
103 |
+
for u in self.index_urls:
|
104 |
+
prefix = remove_auth_from_url(u).rstrip("/") + "/"
|
105 |
+
if url.startswith(prefix):
|
106 |
+
return u
|
107 |
+
return None
|
108 |
+
|
109 |
+
def _get_new_credentials(
|
110 |
+
self,
|
111 |
+
original_url: str,
|
112 |
+
allow_netrc: bool = True,
|
113 |
+
allow_keyring: bool = False,
|
114 |
+
) -> AuthInfo:
|
115 |
+
"""Find and return credentials for the specified URL."""
|
116 |
+
# Split the credentials and netloc from the url.
|
117 |
+
url, netloc, url_user_password = split_auth_netloc_from_url(
|
118 |
+
original_url,
|
119 |
+
)
|
120 |
+
|
121 |
+
# Start with the credentials embedded in the url
|
122 |
+
username, password = url_user_password
|
123 |
+
if username is not None and password is not None:
|
124 |
+
logger.debug("Found credentials in url for %s", netloc)
|
125 |
+
return url_user_password
|
126 |
+
|
127 |
+
# Find a matching index url for this request
|
128 |
+
index_url = self._get_index_url(url)
|
129 |
+
if index_url:
|
130 |
+
# Split the credentials from the url.
|
131 |
+
index_info = split_auth_netloc_from_url(index_url)
|
132 |
+
if index_info:
|
133 |
+
index_url, _, index_url_user_password = index_info
|
134 |
+
logger.debug("Found index url %s", index_url)
|
135 |
+
|
136 |
+
# If an index URL was found, try its embedded credentials
|
137 |
+
if index_url and index_url_user_password[0] is not None:
|
138 |
+
username, password = index_url_user_password
|
139 |
+
if username is not None and password is not None:
|
140 |
+
logger.debug("Found credentials in index url for %s", netloc)
|
141 |
+
return index_url_user_password
|
142 |
+
|
143 |
+
# Get creds from netrc if we still don't have them
|
144 |
+
if allow_netrc:
|
145 |
+
netrc_auth = get_netrc_auth(original_url)
|
146 |
+
if netrc_auth:
|
147 |
+
logger.debug("Found credentials in netrc for %s", netloc)
|
148 |
+
return netrc_auth
|
149 |
+
|
150 |
+
# If we don't have a password and keyring is available, use it.
|
151 |
+
if allow_keyring:
|
152 |
+
# The index url is more specific than the netloc, so try it first
|
153 |
+
# fmt: off
|
154 |
+
kr_auth = (
|
155 |
+
get_keyring_auth(index_url, username) or
|
156 |
+
get_keyring_auth(netloc, username)
|
157 |
+
)
|
158 |
+
# fmt: on
|
159 |
+
if kr_auth:
|
160 |
+
logger.debug("Found credentials in keyring for %s", netloc)
|
161 |
+
return kr_auth
|
162 |
+
|
163 |
+
return username, password
|
164 |
+
|
165 |
+
def _get_url_and_credentials(
|
166 |
+
self, original_url: str
|
167 |
+
) -> Tuple[str, Optional[str], Optional[str]]:
|
168 |
+
"""Return the credentials to use for the provided URL.
|
169 |
+
|
170 |
+
If allowed, netrc and keyring may be used to obtain the
|
171 |
+
correct credentials.
|
172 |
+
|
173 |
+
Returns (url_without_credentials, username, password). Note
|
174 |
+
that even if the original URL contains credentials, this
|
175 |
+
function may return a different username and password.
|
176 |
+
"""
|
177 |
+
url, netloc, _ = split_auth_netloc_from_url(original_url)
|
178 |
+
|
179 |
+
# Try to get credentials from original url
|
180 |
+
username, password = self._get_new_credentials(original_url)
|
181 |
+
|
182 |
+
# If credentials not found, use any stored credentials for this netloc.
|
183 |
+
# Do this if either the username or the password is missing.
|
184 |
+
# This accounts for the situation in which the user has specified
|
185 |
+
# the username in the index url, but the password comes from keyring.
|
186 |
+
if (username is None or password is None) and netloc in self.passwords:
|
187 |
+
un, pw = self.passwords[netloc]
|
188 |
+
# It is possible that the cached credentials are for a different username,
|
189 |
+
# in which case the cache should be ignored.
|
190 |
+
if username is None or username == un:
|
191 |
+
username, password = un, pw
|
192 |
+
|
193 |
+
if username is not None or password is not None:
|
194 |
+
# Convert the username and password if they're None, so that
|
195 |
+
# this netloc will show up as "cached" in the conditional above.
|
196 |
+
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
|
197 |
+
# cache the value that is going to be used.
|
198 |
+
username = username or ""
|
199 |
+
password = password or ""
|
200 |
+
|
201 |
+
# Store any acquired credentials.
|
202 |
+
self.passwords[netloc] = (username, password)
|
203 |
+
|
204 |
+
assert (
|
205 |
+
# Credentials were found
|
206 |
+
(username is not None and password is not None)
|
207 |
+
# Credentials were not found
|
208 |
+
or (username is None and password is None)
|
209 |
+
), f"Could not load credentials from url: {original_url}"
|
210 |
+
|
211 |
+
return url, username, password
|
212 |
+
|
213 |
+
def __call__(self, req: Request) -> Request:
|
214 |
+
# Get credentials for this request
|
215 |
+
url, username, password = self._get_url_and_credentials(req.url)
|
216 |
+
|
217 |
+
# Set the url of the request to the url without any credentials
|
218 |
+
req.url = url
|
219 |
+
|
220 |
+
if username is not None and password is not None:
|
221 |
+
# Send the basic auth with this request
|
222 |
+
req = HTTPBasicAuth(username, password)(req)
|
223 |
+
|
224 |
+
# Attach a hook to handle 401 responses
|
225 |
+
req.register_hook("response", self.handle_401)
|
226 |
+
|
227 |
+
return req
|
228 |
+
|
229 |
+
# Factored out to allow for easy patching in tests
|
230 |
+
def _prompt_for_password(
|
231 |
+
self, netloc: str
|
232 |
+
) -> Tuple[Optional[str], Optional[str], bool]:
|
233 |
+
username = ask_input(f"User for {netloc}: ")
|
234 |
+
if not username:
|
235 |
+
return None, None, False
|
236 |
+
auth = get_keyring_auth(netloc, username)
|
237 |
+
if auth and auth[0] is not None and auth[1] is not None:
|
238 |
+
return auth[0], auth[1], False
|
239 |
+
password = ask_password("Password: ")
|
240 |
+
return username, password, True
|
241 |
+
|
242 |
+
# Factored out to allow for easy patching in tests
|
243 |
+
def _should_save_password_to_keyring(self) -> bool:
|
244 |
+
if not keyring:
|
245 |
+
return False
|
246 |
+
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
|
247 |
+
|
248 |
+
def handle_401(self, resp: Response, **kwargs: Any) -> Response:
|
249 |
+
# We only care about 401 responses, anything else we want to just
|
250 |
+
# pass through the actual response
|
251 |
+
if resp.status_code != 401:
|
252 |
+
return resp
|
253 |
+
|
254 |
+
# We are not able to prompt the user so simply return the response
|
255 |
+
if not self.prompting:
|
256 |
+
return resp
|
257 |
+
|
258 |
+
parsed = urllib.parse.urlparse(resp.url)
|
259 |
+
|
260 |
+
# Query the keyring for credentials:
|
261 |
+
username, password = self._get_new_credentials(
|
262 |
+
resp.url,
|
263 |
+
allow_netrc=False,
|
264 |
+
allow_keyring=True,
|
265 |
+
)
|
266 |
+
|
267 |
+
# Prompt the user for a new username and password
|
268 |
+
save = False
|
269 |
+
if not username and not password:
|
270 |
+
username, password, save = self._prompt_for_password(parsed.netloc)
|
271 |
+
|
272 |
+
# Store the new username and password to use for future requests
|
273 |
+
self._credentials_to_save = None
|
274 |
+
if username is not None and password is not None:
|
275 |
+
self.passwords[parsed.netloc] = (username, password)
|
276 |
+
|
277 |
+
# Prompt to save the password to keyring
|
278 |
+
if save and self._should_save_password_to_keyring():
|
279 |
+
self._credentials_to_save = (parsed.netloc, username, password)
|
280 |
+
|
281 |
+
# Consume content and release the original connection to allow our new
|
282 |
+
# request to reuse the same one.
|
283 |
+
resp.content
|
284 |
+
resp.raw.release_conn()
|
285 |
+
|
286 |
+
# Add our new username and password to the request
|
287 |
+
req = HTTPBasicAuth(username or "", password or "")(resp.request)
|
288 |
+
req.register_hook("response", self.warn_on_401)
|
289 |
+
|
290 |
+
# On successful request, save the credentials that were used to
|
291 |
+
# keyring. (Note that if the user responded "no" above, this member
|
292 |
+
# is not set and nothing will be saved.)
|
293 |
+
if self._credentials_to_save:
|
294 |
+
req.register_hook("response", self.save_credentials)
|
295 |
+
|
296 |
+
# Send our new request
|
297 |
+
new_resp = resp.connection.send(req, **kwargs)
|
298 |
+
new_resp.history.append(resp)
|
299 |
+
|
300 |
+
return new_resp
|
301 |
+
|
302 |
+
def warn_on_401(self, resp: Response, **kwargs: Any) -> None:
|
303 |
+
"""Response callback to warn about incorrect credentials."""
|
304 |
+
if resp.status_code == 401:
|
305 |
+
logger.warning(
|
306 |
+
"401 Error, Credentials not correct for %s",
|
307 |
+
resp.request.url,
|
308 |
+
)
|
309 |
+
|
310 |
+
def save_credentials(self, resp: Response, **kwargs: Any) -> None:
|
311 |
+
"""Response callback to save credentials on success."""
|
312 |
+
assert keyring is not None, "should never reach here without keyring"
|
313 |
+
if not keyring:
|
314 |
+
return
|
315 |
+
|
316 |
+
creds = self._credentials_to_save
|
317 |
+
self._credentials_to_save = None
|
318 |
+
if creds and resp.status_code < 400:
|
319 |
+
try:
|
320 |
+
logger.info("Saving credentials to keyring")
|
321 |
+
keyring.set_password(*creds)
|
322 |
+
except Exception:
|
323 |
+
logger.exception("Failed to save credentials")
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/cache.py
ADDED
@@ -0,0 +1,69 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP cache implementation.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import os
|
5 |
+
from contextlib import contextmanager
|
6 |
+
from typing import Iterator, Optional
|
7 |
+
|
8 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
9 |
+
from pip._vendor.cachecontrol.caches import FileCache
|
10 |
+
from pip._vendor.requests.models import Response
|
11 |
+
|
12 |
+
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
|
13 |
+
from pip._internal.utils.misc import ensure_dir
|
14 |
+
|
15 |
+
|
16 |
+
def is_from_cache(response: Response) -> bool:
|
17 |
+
return getattr(response, "from_cache", False)
|
18 |
+
|
19 |
+
|
20 |
+
@contextmanager
|
21 |
+
def suppressed_cache_errors() -> Iterator[None]:
|
22 |
+
"""If we can't access the cache then we can just skip caching and process
|
23 |
+
requests as if caching wasn't enabled.
|
24 |
+
"""
|
25 |
+
try:
|
26 |
+
yield
|
27 |
+
except OSError:
|
28 |
+
pass
|
29 |
+
|
30 |
+
|
31 |
+
class SafeFileCache(BaseCache):
|
32 |
+
"""
|
33 |
+
A file based cache which is safe to use even when the target directory may
|
34 |
+
not be accessible or writable.
|
35 |
+
"""
|
36 |
+
|
37 |
+
def __init__(self, directory: str) -> None:
|
38 |
+
assert directory is not None, "Cache directory must not be None."
|
39 |
+
super().__init__()
|
40 |
+
self.directory = directory
|
41 |
+
|
42 |
+
def _get_cache_path(self, name: str) -> str:
|
43 |
+
# From cachecontrol.caches.file_cache.FileCache._fn, brought into our
|
44 |
+
# class for backwards-compatibility and to avoid using a non-public
|
45 |
+
# method.
|
46 |
+
hashed = FileCache.encode(name)
|
47 |
+
parts = list(hashed[:5]) + [hashed]
|
48 |
+
return os.path.join(self.directory, *parts)
|
49 |
+
|
50 |
+
def get(self, key: str) -> Optional[bytes]:
|
51 |
+
path = self._get_cache_path(key)
|
52 |
+
with suppressed_cache_errors():
|
53 |
+
with open(path, "rb") as f:
|
54 |
+
return f.read()
|
55 |
+
|
56 |
+
def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
|
57 |
+
path = self._get_cache_path(key)
|
58 |
+
with suppressed_cache_errors():
|
59 |
+
ensure_dir(os.path.dirname(path))
|
60 |
+
|
61 |
+
with adjacent_tmp_file(path) as f:
|
62 |
+
f.write(value)
|
63 |
+
|
64 |
+
replace(f.name, path)
|
65 |
+
|
66 |
+
def delete(self, key: str) -> None:
|
67 |
+
path = self._get_cache_path(key)
|
68 |
+
with suppressed_cache_errors():
|
69 |
+
os.remove(path)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/download.py
ADDED
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Download files with progress indicators.
|
2 |
+
"""
|
3 |
+
import cgi
|
4 |
+
import logging
|
5 |
+
import mimetypes
|
6 |
+
import os
|
7 |
+
from typing import Iterable, Optional, Tuple
|
8 |
+
|
9 |
+
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
10 |
+
|
11 |
+
from pip._internal.cli.progress_bars import get_download_progress_renderer
|
12 |
+
from pip._internal.exceptions import NetworkConnectionError
|
13 |
+
from pip._internal.models.index import PyPI
|
14 |
+
from pip._internal.models.link import Link
|
15 |
+
from pip._internal.network.cache import is_from_cache
|
16 |
+
from pip._internal.network.session import PipSession
|
17 |
+
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
18 |
+
from pip._internal.utils.misc import format_size, redact_auth_from_url, splitext
|
19 |
+
|
20 |
+
logger = logging.getLogger(__name__)
|
21 |
+
|
22 |
+
|
23 |
+
def _get_http_response_size(resp: Response) -> Optional[int]:
|
24 |
+
try:
|
25 |
+
return int(resp.headers["content-length"])
|
26 |
+
except (ValueError, KeyError, TypeError):
|
27 |
+
return None
|
28 |
+
|
29 |
+
|
30 |
+
def _prepare_download(
|
31 |
+
resp: Response,
|
32 |
+
link: Link,
|
33 |
+
progress_bar: str,
|
34 |
+
) -> Iterable[bytes]:
|
35 |
+
total_length = _get_http_response_size(resp)
|
36 |
+
|
37 |
+
if link.netloc == PyPI.file_storage_domain:
|
38 |
+
url = link.show_url
|
39 |
+
else:
|
40 |
+
url = link.url_without_fragment
|
41 |
+
|
42 |
+
logged_url = redact_auth_from_url(url)
|
43 |
+
|
44 |
+
if total_length:
|
45 |
+
logged_url = "{} ({})".format(logged_url, format_size(total_length))
|
46 |
+
|
47 |
+
if is_from_cache(resp):
|
48 |
+
logger.info("Using cached %s", logged_url)
|
49 |
+
else:
|
50 |
+
logger.info("Downloading %s", logged_url)
|
51 |
+
|
52 |
+
if logger.getEffectiveLevel() > logging.INFO:
|
53 |
+
show_progress = False
|
54 |
+
elif is_from_cache(resp):
|
55 |
+
show_progress = False
|
56 |
+
elif not total_length:
|
57 |
+
show_progress = True
|
58 |
+
elif total_length > (40 * 1000):
|
59 |
+
show_progress = True
|
60 |
+
else:
|
61 |
+
show_progress = False
|
62 |
+
|
63 |
+
chunks = response_chunks(resp, CONTENT_CHUNK_SIZE)
|
64 |
+
|
65 |
+
if not show_progress:
|
66 |
+
return chunks
|
67 |
+
|
68 |
+
renderer = get_download_progress_renderer(bar_type=progress_bar, size=total_length)
|
69 |
+
return renderer(chunks)
|
70 |
+
|
71 |
+
|
72 |
+
def sanitize_content_filename(filename: str) -> str:
|
73 |
+
"""
|
74 |
+
Sanitize the "filename" value from a Content-Disposition header.
|
75 |
+
"""
|
76 |
+
return os.path.basename(filename)
|
77 |
+
|
78 |
+
|
79 |
+
def parse_content_disposition(content_disposition: str, default_filename: str) -> str:
|
80 |
+
"""
|
81 |
+
Parse the "filename" value from a Content-Disposition header, and
|
82 |
+
return the default filename if the result is empty.
|
83 |
+
"""
|
84 |
+
_type, params = cgi.parse_header(content_disposition)
|
85 |
+
filename = params.get("filename")
|
86 |
+
if filename:
|
87 |
+
# We need to sanitize the filename to prevent directory traversal
|
88 |
+
# in case the filename contains ".." path parts.
|
89 |
+
filename = sanitize_content_filename(filename)
|
90 |
+
return filename or default_filename
|
91 |
+
|
92 |
+
|
93 |
+
def _get_http_response_filename(resp: Response, link: Link) -> str:
|
94 |
+
"""Get an ideal filename from the given HTTP response, falling back to
|
95 |
+
the link filename if not provided.
|
96 |
+
"""
|
97 |
+
filename = link.filename # fallback
|
98 |
+
# Have a look at the Content-Disposition header for a better guess
|
99 |
+
content_disposition = resp.headers.get("content-disposition")
|
100 |
+
if content_disposition:
|
101 |
+
filename = parse_content_disposition(content_disposition, filename)
|
102 |
+
ext: Optional[str] = splitext(filename)[1]
|
103 |
+
if not ext:
|
104 |
+
ext = mimetypes.guess_extension(resp.headers.get("content-type", ""))
|
105 |
+
if ext:
|
106 |
+
filename += ext
|
107 |
+
if not ext and link.url != resp.url:
|
108 |
+
ext = os.path.splitext(resp.url)[1]
|
109 |
+
if ext:
|
110 |
+
filename += ext
|
111 |
+
return filename
|
112 |
+
|
113 |
+
|
114 |
+
def _http_get_download(session: PipSession, link: Link) -> Response:
|
115 |
+
target_url = link.url.split("#", 1)[0]
|
116 |
+
resp = session.get(target_url, headers=HEADERS, stream=True)
|
117 |
+
raise_for_status(resp)
|
118 |
+
return resp
|
119 |
+
|
120 |
+
|
121 |
+
class Downloader:
|
122 |
+
def __init__(
|
123 |
+
self,
|
124 |
+
session: PipSession,
|
125 |
+
progress_bar: str,
|
126 |
+
) -> None:
|
127 |
+
self._session = session
|
128 |
+
self._progress_bar = progress_bar
|
129 |
+
|
130 |
+
def __call__(self, link: Link, location: str) -> Tuple[str, str]:
|
131 |
+
"""Download the file given by link into location."""
|
132 |
+
try:
|
133 |
+
resp = _http_get_download(self._session, link)
|
134 |
+
except NetworkConnectionError as e:
|
135 |
+
assert e.response is not None
|
136 |
+
logger.critical(
|
137 |
+
"HTTP error %s while getting %s", e.response.status_code, link
|
138 |
+
)
|
139 |
+
raise
|
140 |
+
|
141 |
+
filename = _get_http_response_filename(resp, link)
|
142 |
+
filepath = os.path.join(location, filename)
|
143 |
+
|
144 |
+
chunks = _prepare_download(resp, link, self._progress_bar)
|
145 |
+
with open(filepath, "wb") as content_file:
|
146 |
+
for chunk in chunks:
|
147 |
+
content_file.write(chunk)
|
148 |
+
content_type = resp.headers.get("Content-Type", "")
|
149 |
+
return filepath, content_type
|
150 |
+
|
151 |
+
|
152 |
+
class BatchDownloader:
|
153 |
+
def __init__(
|
154 |
+
self,
|
155 |
+
session: PipSession,
|
156 |
+
progress_bar: str,
|
157 |
+
) -> None:
|
158 |
+
self._session = session
|
159 |
+
self._progress_bar = progress_bar
|
160 |
+
|
161 |
+
def __call__(
|
162 |
+
self, links: Iterable[Link], location: str
|
163 |
+
) -> Iterable[Tuple[Link, Tuple[str, str]]]:
|
164 |
+
"""Download the files given by links into location."""
|
165 |
+
for link in links:
|
166 |
+
try:
|
167 |
+
resp = _http_get_download(self._session, link)
|
168 |
+
except NetworkConnectionError as e:
|
169 |
+
assert e.response is not None
|
170 |
+
logger.critical(
|
171 |
+
"HTTP error %s while getting %s",
|
172 |
+
e.response.status_code,
|
173 |
+
link,
|
174 |
+
)
|
175 |
+
raise
|
176 |
+
|
177 |
+
filename = _get_http_response_filename(resp, link)
|
178 |
+
filepath = os.path.join(location, filename)
|
179 |
+
|
180 |
+
chunks = _prepare_download(resp, link, self._progress_bar)
|
181 |
+
with open(filepath, "wb") as content_file:
|
182 |
+
for chunk in chunks:
|
183 |
+
content_file.write(chunk)
|
184 |
+
content_type = resp.headers.get("Content-Type", "")
|
185 |
+
yield link, (filepath, content_type)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py
ADDED
@@ -0,0 +1,210 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Lazy ZIP over HTTP"""
|
2 |
+
|
3 |
+
__all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
|
4 |
+
|
5 |
+
from bisect import bisect_left, bisect_right
|
6 |
+
from contextlib import contextmanager
|
7 |
+
from tempfile import NamedTemporaryFile
|
8 |
+
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
9 |
+
from zipfile import BadZipfile, ZipFile
|
10 |
+
|
11 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
12 |
+
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
13 |
+
|
14 |
+
from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
|
15 |
+
from pip._internal.network.session import PipSession
|
16 |
+
from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
|
17 |
+
|
18 |
+
|
19 |
+
class HTTPRangeRequestUnsupported(Exception):
|
20 |
+
pass
|
21 |
+
|
22 |
+
|
23 |
+
def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
|
24 |
+
"""Return a distribution object from the given wheel URL.
|
25 |
+
|
26 |
+
This uses HTTP range requests to only fetch the potion of the wheel
|
27 |
+
containing metadata, just enough for the object to be constructed.
|
28 |
+
If such requests are not supported, HTTPRangeRequestUnsupported
|
29 |
+
is raised.
|
30 |
+
"""
|
31 |
+
with LazyZipOverHTTP(url, session) as zf:
|
32 |
+
# For read-only ZIP files, ZipFile only needs methods read,
|
33 |
+
# seek, seekable and tell, not the whole IO protocol.
|
34 |
+
wheel = MemoryWheel(zf.name, zf) # type: ignore
|
35 |
+
# After context manager exit, wheel.name
|
36 |
+
# is an invalid file by intention.
|
37 |
+
return get_wheel_distribution(wheel, canonicalize_name(name))
|
38 |
+
|
39 |
+
|
40 |
+
class LazyZipOverHTTP:
|
41 |
+
"""File-like object mapped to a ZIP file over HTTP.
|
42 |
+
|
43 |
+
This uses HTTP range requests to lazily fetch the file's content,
|
44 |
+
which is supposed to be fed to ZipFile. If such requests are not
|
45 |
+
supported by the server, raise HTTPRangeRequestUnsupported
|
46 |
+
during initialization.
|
47 |
+
"""
|
48 |
+
|
49 |
+
def __init__(
|
50 |
+
self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
|
51 |
+
) -> None:
|
52 |
+
head = session.head(url, headers=HEADERS)
|
53 |
+
raise_for_status(head)
|
54 |
+
assert head.status_code == 200
|
55 |
+
self._session, self._url, self._chunk_size = session, url, chunk_size
|
56 |
+
self._length = int(head.headers["Content-Length"])
|
57 |
+
self._file = NamedTemporaryFile()
|
58 |
+
self.truncate(self._length)
|
59 |
+
self._left: List[int] = []
|
60 |
+
self._right: List[int] = []
|
61 |
+
if "bytes" not in head.headers.get("Accept-Ranges", "none"):
|
62 |
+
raise HTTPRangeRequestUnsupported("range request is not supported")
|
63 |
+
self._check_zip()
|
64 |
+
|
65 |
+
@property
|
66 |
+
def mode(self) -> str:
|
67 |
+
"""Opening mode, which is always rb."""
|
68 |
+
return "rb"
|
69 |
+
|
70 |
+
@property
|
71 |
+
def name(self) -> str:
|
72 |
+
"""Path to the underlying file."""
|
73 |
+
return self._file.name
|
74 |
+
|
75 |
+
def seekable(self) -> bool:
|
76 |
+
"""Return whether random access is supported, which is True."""
|
77 |
+
return True
|
78 |
+
|
79 |
+
def close(self) -> None:
|
80 |
+
"""Close the file."""
|
81 |
+
self._file.close()
|
82 |
+
|
83 |
+
@property
|
84 |
+
def closed(self) -> bool:
|
85 |
+
"""Whether the file is closed."""
|
86 |
+
return self._file.closed
|
87 |
+
|
88 |
+
def read(self, size: int = -1) -> bytes:
|
89 |
+
"""Read up to size bytes from the object and return them.
|
90 |
+
|
91 |
+
As a convenience, if size is unspecified or -1,
|
92 |
+
all bytes until EOF are returned. Fewer than
|
93 |
+
size bytes may be returned if EOF is reached.
|
94 |
+
"""
|
95 |
+
download_size = max(size, self._chunk_size)
|
96 |
+
start, length = self.tell(), self._length
|
97 |
+
stop = length if size < 0 else min(start + download_size, length)
|
98 |
+
start = max(0, stop - download_size)
|
99 |
+
self._download(start, stop - 1)
|
100 |
+
return self._file.read(size)
|
101 |
+
|
102 |
+
def readable(self) -> bool:
|
103 |
+
"""Return whether the file is readable, which is True."""
|
104 |
+
return True
|
105 |
+
|
106 |
+
def seek(self, offset: int, whence: int = 0) -> int:
|
107 |
+
"""Change stream position and return the new absolute position.
|
108 |
+
|
109 |
+
Seek to offset relative position indicated by whence:
|
110 |
+
* 0: Start of stream (the default). pos should be >= 0;
|
111 |
+
* 1: Current position - pos may be negative;
|
112 |
+
* 2: End of stream - pos usually negative.
|
113 |
+
"""
|
114 |
+
return self._file.seek(offset, whence)
|
115 |
+
|
116 |
+
def tell(self) -> int:
|
117 |
+
"""Return the current position."""
|
118 |
+
return self._file.tell()
|
119 |
+
|
120 |
+
def truncate(self, size: Optional[int] = None) -> int:
|
121 |
+
"""Resize the stream to the given size in bytes.
|
122 |
+
|
123 |
+
If size is unspecified resize to the current position.
|
124 |
+
The current stream position isn't changed.
|
125 |
+
|
126 |
+
Return the new file size.
|
127 |
+
"""
|
128 |
+
return self._file.truncate(size)
|
129 |
+
|
130 |
+
def writable(self) -> bool:
|
131 |
+
"""Return False."""
|
132 |
+
return False
|
133 |
+
|
134 |
+
def __enter__(self) -> "LazyZipOverHTTP":
|
135 |
+
self._file.__enter__()
|
136 |
+
return self
|
137 |
+
|
138 |
+
def __exit__(self, *exc: Any) -> Optional[bool]:
|
139 |
+
return self._file.__exit__(*exc)
|
140 |
+
|
141 |
+
@contextmanager
|
142 |
+
def _stay(self) -> Iterator[None]:
|
143 |
+
"""Return a context manager keeping the position.
|
144 |
+
|
145 |
+
At the end of the block, seek back to original position.
|
146 |
+
"""
|
147 |
+
pos = self.tell()
|
148 |
+
try:
|
149 |
+
yield
|
150 |
+
finally:
|
151 |
+
self.seek(pos)
|
152 |
+
|
153 |
+
def _check_zip(self) -> None:
|
154 |
+
"""Check and download until the file is a valid ZIP."""
|
155 |
+
end = self._length - 1
|
156 |
+
for start in reversed(range(0, end, self._chunk_size)):
|
157 |
+
self._download(start, end)
|
158 |
+
with self._stay():
|
159 |
+
try:
|
160 |
+
# For read-only ZIP files, ZipFile only needs
|
161 |
+
# methods read, seek, seekable and tell.
|
162 |
+
ZipFile(self) # type: ignore
|
163 |
+
except BadZipfile:
|
164 |
+
pass
|
165 |
+
else:
|
166 |
+
break
|
167 |
+
|
168 |
+
def _stream_response(
|
169 |
+
self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
|
170 |
+
) -> Response:
|
171 |
+
"""Return HTTP response to a range request from start to end."""
|
172 |
+
headers = base_headers.copy()
|
173 |
+
headers["Range"] = f"bytes={start}-{end}"
|
174 |
+
# TODO: Get range requests to be correctly cached
|
175 |
+
headers["Cache-Control"] = "no-cache"
|
176 |
+
return self._session.get(self._url, headers=headers, stream=True)
|
177 |
+
|
178 |
+
def _merge(
|
179 |
+
self, start: int, end: int, left: int, right: int
|
180 |
+
) -> Iterator[Tuple[int, int]]:
|
181 |
+
"""Return an iterator of intervals to be fetched.
|
182 |
+
|
183 |
+
Args:
|
184 |
+
start (int): Start of needed interval
|
185 |
+
end (int): End of needed interval
|
186 |
+
left (int): Index of first overlapping downloaded data
|
187 |
+
right (int): Index after last overlapping downloaded data
|
188 |
+
"""
|
189 |
+
lslice, rslice = self._left[left:right], self._right[left:right]
|
190 |
+
i = start = min([start] + lslice[:1])
|
191 |
+
end = max([end] + rslice[-1:])
|
192 |
+
for j, k in zip(lslice, rslice):
|
193 |
+
if j > i:
|
194 |
+
yield i, j - 1
|
195 |
+
i = k + 1
|
196 |
+
if i <= end:
|
197 |
+
yield i, end
|
198 |
+
self._left[left:right], self._right[left:right] = [start], [end]
|
199 |
+
|
200 |
+
def _download(self, start: int, end: int) -> None:
|
201 |
+
"""Download bytes from start to end inclusively."""
|
202 |
+
with self._stay():
|
203 |
+
left = bisect_left(self._right, start)
|
204 |
+
right = bisect_right(self._left, end)
|
205 |
+
for start, end in self._merge(start, end, left, right):
|
206 |
+
response = self._stream_response(start, end)
|
207 |
+
response.raise_for_status()
|
208 |
+
self.seek(start)
|
209 |
+
for chunk in response_chunks(response, self._chunk_size):
|
210 |
+
self._file.write(chunk)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/session.py
ADDED
@@ -0,0 +1,454 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""PipSession and supporting code, containing all pip-specific
|
2 |
+
network request configuration and behavior.
|
3 |
+
"""
|
4 |
+
|
5 |
+
import email.utils
|
6 |
+
import io
|
7 |
+
import ipaddress
|
8 |
+
import json
|
9 |
+
import logging
|
10 |
+
import mimetypes
|
11 |
+
import os
|
12 |
+
import platform
|
13 |
+
import shutil
|
14 |
+
import subprocess
|
15 |
+
import sys
|
16 |
+
import urllib.parse
|
17 |
+
import warnings
|
18 |
+
from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
|
19 |
+
|
20 |
+
from pip._vendor import requests, urllib3
|
21 |
+
from pip._vendor.cachecontrol import CacheControlAdapter
|
22 |
+
from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
|
23 |
+
from pip._vendor.requests.models import PreparedRequest, Response
|
24 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
25 |
+
from pip._vendor.urllib3.connectionpool import ConnectionPool
|
26 |
+
from pip._vendor.urllib3.exceptions import InsecureRequestWarning
|
27 |
+
|
28 |
+
from pip import __version__
|
29 |
+
from pip._internal.metadata import get_default_environment
|
30 |
+
from pip._internal.models.link import Link
|
31 |
+
from pip._internal.network.auth import MultiDomainBasicAuth
|
32 |
+
from pip._internal.network.cache import SafeFileCache
|
33 |
+
|
34 |
+
# Import ssl from compat so the initial import occurs in only one place.
|
35 |
+
from pip._internal.utils.compat import has_tls
|
36 |
+
from pip._internal.utils.glibc import libc_ver
|
37 |
+
from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
|
38 |
+
from pip._internal.utils.urls import url_to_path
|
39 |
+
|
40 |
+
logger = logging.getLogger(__name__)
|
41 |
+
|
42 |
+
SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
|
43 |
+
|
44 |
+
|
45 |
+
# Ignore warning raised when using --trusted-host.
|
46 |
+
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
|
47 |
+
|
48 |
+
|
49 |
+
SECURE_ORIGINS: List[SecureOrigin] = [
|
50 |
+
# protocol, hostname, port
|
51 |
+
# Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
|
52 |
+
("https", "*", "*"),
|
53 |
+
("*", "localhost", "*"),
|
54 |
+
("*", "127.0.0.0/8", "*"),
|
55 |
+
("*", "::1/128", "*"),
|
56 |
+
("file", "*", None),
|
57 |
+
# ssh is always secure.
|
58 |
+
("ssh", "*", "*"),
|
59 |
+
]
|
60 |
+
|
61 |
+
|
62 |
+
# These are environment variables present when running under various
|
63 |
+
# CI systems. For each variable, some CI systems that use the variable
|
64 |
+
# are indicated. The collection was chosen so that for each of a number
|
65 |
+
# of popular systems, at least one of the environment variables is used.
|
66 |
+
# This list is used to provide some indication of and lower bound for
|
67 |
+
# CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
|
68 |
+
# For more background, see: https://github.com/pypa/pip/issues/5499
|
69 |
+
CI_ENVIRONMENT_VARIABLES = (
|
70 |
+
# Azure Pipelines
|
71 |
+
"BUILD_BUILDID",
|
72 |
+
# Jenkins
|
73 |
+
"BUILD_ID",
|
74 |
+
# AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
|
75 |
+
"CI",
|
76 |
+
# Explicit environment variable.
|
77 |
+
"PIP_IS_CI",
|
78 |
+
)
|
79 |
+
|
80 |
+
|
81 |
+
def looks_like_ci() -> bool:
|
82 |
+
"""
|
83 |
+
Return whether it looks like pip is running under CI.
|
84 |
+
"""
|
85 |
+
# We don't use the method of checking for a tty (e.g. using isatty())
|
86 |
+
# because some CI systems mimic a tty (e.g. Travis CI). Thus that
|
87 |
+
# method doesn't provide definitive information in either direction.
|
88 |
+
return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
|
89 |
+
|
90 |
+
|
91 |
+
def user_agent() -> str:
|
92 |
+
"""
|
93 |
+
Return a string representing the user agent.
|
94 |
+
"""
|
95 |
+
data: Dict[str, Any] = {
|
96 |
+
"installer": {"name": "pip", "version": __version__},
|
97 |
+
"python": platform.python_version(),
|
98 |
+
"implementation": {
|
99 |
+
"name": platform.python_implementation(),
|
100 |
+
},
|
101 |
+
}
|
102 |
+
|
103 |
+
if data["implementation"]["name"] == "CPython":
|
104 |
+
data["implementation"]["version"] = platform.python_version()
|
105 |
+
elif data["implementation"]["name"] == "PyPy":
|
106 |
+
pypy_version_info = sys.pypy_version_info # type: ignore
|
107 |
+
if pypy_version_info.releaselevel == "final":
|
108 |
+
pypy_version_info = pypy_version_info[:3]
|
109 |
+
data["implementation"]["version"] = ".".join(
|
110 |
+
[str(x) for x in pypy_version_info]
|
111 |
+
)
|
112 |
+
elif data["implementation"]["name"] == "Jython":
|
113 |
+
# Complete Guess
|
114 |
+
data["implementation"]["version"] = platform.python_version()
|
115 |
+
elif data["implementation"]["name"] == "IronPython":
|
116 |
+
# Complete Guess
|
117 |
+
data["implementation"]["version"] = platform.python_version()
|
118 |
+
|
119 |
+
if sys.platform.startswith("linux"):
|
120 |
+
from pip._vendor import distro
|
121 |
+
|
122 |
+
linux_distribution = distro.name(), distro.version(), distro.codename()
|
123 |
+
distro_infos: Dict[str, Any] = dict(
|
124 |
+
filter(
|
125 |
+
lambda x: x[1],
|
126 |
+
zip(["name", "version", "id"], linux_distribution),
|
127 |
+
)
|
128 |
+
)
|
129 |
+
libc = dict(
|
130 |
+
filter(
|
131 |
+
lambda x: x[1],
|
132 |
+
zip(["lib", "version"], libc_ver()),
|
133 |
+
)
|
134 |
+
)
|
135 |
+
if libc:
|
136 |
+
distro_infos["libc"] = libc
|
137 |
+
if distro_infos:
|
138 |
+
data["distro"] = distro_infos
|
139 |
+
|
140 |
+
if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
|
141 |
+
data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
|
142 |
+
|
143 |
+
if platform.system():
|
144 |
+
data.setdefault("system", {})["name"] = platform.system()
|
145 |
+
|
146 |
+
if platform.release():
|
147 |
+
data.setdefault("system", {})["release"] = platform.release()
|
148 |
+
|
149 |
+
if platform.machine():
|
150 |
+
data["cpu"] = platform.machine()
|
151 |
+
|
152 |
+
if has_tls():
|
153 |
+
import _ssl as ssl
|
154 |
+
|
155 |
+
data["openssl_version"] = ssl.OPENSSL_VERSION
|
156 |
+
|
157 |
+
setuptools_dist = get_default_environment().get_distribution("setuptools")
|
158 |
+
if setuptools_dist is not None:
|
159 |
+
data["setuptools_version"] = str(setuptools_dist.version)
|
160 |
+
|
161 |
+
if shutil.which("rustc") is not None:
|
162 |
+
# If for any reason `rustc --version` fails, silently ignore it
|
163 |
+
try:
|
164 |
+
rustc_output = subprocess.check_output(
|
165 |
+
["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
|
166 |
+
)
|
167 |
+
except Exception:
|
168 |
+
pass
|
169 |
+
else:
|
170 |
+
if rustc_output.startswith(b"rustc "):
|
171 |
+
# The format of `rustc --version` is:
|
172 |
+
# `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
|
173 |
+
# We extract just the middle (1.52.1) part
|
174 |
+
data["rustc_version"] = rustc_output.split(b" ")[1].decode()
|
175 |
+
|
176 |
+
# Use None rather than False so as not to give the impression that
|
177 |
+
# pip knows it is not being run under CI. Rather, it is a null or
|
178 |
+
# inconclusive result. Also, we include some value rather than no
|
179 |
+
# value to make it easier to know that the check has been run.
|
180 |
+
data["ci"] = True if looks_like_ci() else None
|
181 |
+
|
182 |
+
user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
|
183 |
+
if user_data is not None:
|
184 |
+
data["user_data"] = user_data
|
185 |
+
|
186 |
+
return "{data[installer][name]}/{data[installer][version]} {json}".format(
|
187 |
+
data=data,
|
188 |
+
json=json.dumps(data, separators=(",", ":"), sort_keys=True),
|
189 |
+
)
|
190 |
+
|
191 |
+
|
192 |
+
class LocalFSAdapter(BaseAdapter):
|
193 |
+
def send(
|
194 |
+
self,
|
195 |
+
request: PreparedRequest,
|
196 |
+
stream: bool = False,
|
197 |
+
timeout: Optional[Union[float, Tuple[float, float]]] = None,
|
198 |
+
verify: Union[bool, str] = True,
|
199 |
+
cert: Optional[Union[str, Tuple[str, str]]] = None,
|
200 |
+
proxies: Optional[Mapping[str, str]] = None,
|
201 |
+
) -> Response:
|
202 |
+
pathname = url_to_path(request.url)
|
203 |
+
|
204 |
+
resp = Response()
|
205 |
+
resp.status_code = 200
|
206 |
+
resp.url = request.url
|
207 |
+
|
208 |
+
try:
|
209 |
+
stats = os.stat(pathname)
|
210 |
+
except OSError as exc:
|
211 |
+
# format the exception raised as a io.BytesIO object,
|
212 |
+
# to return a better error message:
|
213 |
+
resp.status_code = 404
|
214 |
+
resp.reason = type(exc).__name__
|
215 |
+
resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
|
216 |
+
else:
|
217 |
+
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
|
218 |
+
content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
|
219 |
+
resp.headers = CaseInsensitiveDict(
|
220 |
+
{
|
221 |
+
"Content-Type": content_type,
|
222 |
+
"Content-Length": stats.st_size,
|
223 |
+
"Last-Modified": modified,
|
224 |
+
}
|
225 |
+
)
|
226 |
+
|
227 |
+
resp.raw = open(pathname, "rb")
|
228 |
+
resp.close = resp.raw.close
|
229 |
+
|
230 |
+
return resp
|
231 |
+
|
232 |
+
def close(self) -> None:
|
233 |
+
pass
|
234 |
+
|
235 |
+
|
236 |
+
class InsecureHTTPAdapter(HTTPAdapter):
|
237 |
+
def cert_verify(
|
238 |
+
self,
|
239 |
+
conn: ConnectionPool,
|
240 |
+
url: str,
|
241 |
+
verify: Union[bool, str],
|
242 |
+
cert: Optional[Union[str, Tuple[str, str]]],
|
243 |
+
) -> None:
|
244 |
+
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
245 |
+
|
246 |
+
|
247 |
+
class InsecureCacheControlAdapter(CacheControlAdapter):
|
248 |
+
def cert_verify(
|
249 |
+
self,
|
250 |
+
conn: ConnectionPool,
|
251 |
+
url: str,
|
252 |
+
verify: Union[bool, str],
|
253 |
+
cert: Optional[Union[str, Tuple[str, str]]],
|
254 |
+
) -> None:
|
255 |
+
super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
|
256 |
+
|
257 |
+
|
258 |
+
class PipSession(requests.Session):
|
259 |
+
|
260 |
+
timeout: Optional[int] = None
|
261 |
+
|
262 |
+
def __init__(
|
263 |
+
self,
|
264 |
+
*args: Any,
|
265 |
+
retries: int = 0,
|
266 |
+
cache: Optional[str] = None,
|
267 |
+
trusted_hosts: Sequence[str] = (),
|
268 |
+
index_urls: Optional[List[str]] = None,
|
269 |
+
**kwargs: Any,
|
270 |
+
) -> None:
|
271 |
+
"""
|
272 |
+
:param trusted_hosts: Domains not to emit warnings for when not using
|
273 |
+
HTTPS.
|
274 |
+
"""
|
275 |
+
super().__init__(*args, **kwargs)
|
276 |
+
|
277 |
+
# Namespace the attribute with "pip_" just in case to prevent
|
278 |
+
# possible conflicts with the base class.
|
279 |
+
self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
|
280 |
+
|
281 |
+
# Attach our User Agent to the request
|
282 |
+
self.headers["User-Agent"] = user_agent()
|
283 |
+
|
284 |
+
# Attach our Authentication handler to the session
|
285 |
+
self.auth = MultiDomainBasicAuth(index_urls=index_urls)
|
286 |
+
|
287 |
+
# Create our urllib3.Retry instance which will allow us to customize
|
288 |
+
# how we handle retries.
|
289 |
+
retries = urllib3.Retry(
|
290 |
+
# Set the total number of retries that a particular request can
|
291 |
+
# have.
|
292 |
+
total=retries,
|
293 |
+
# A 503 error from PyPI typically means that the Fastly -> Origin
|
294 |
+
# connection got interrupted in some way. A 503 error in general
|
295 |
+
# is typically considered a transient error so we'll go ahead and
|
296 |
+
# retry it.
|
297 |
+
# A 500 may indicate transient error in Amazon S3
|
298 |
+
# A 520 or 527 - may indicate transient error in CloudFlare
|
299 |
+
status_forcelist=[500, 503, 520, 527],
|
300 |
+
# Add a small amount of back off between failed requests in
|
301 |
+
# order to prevent hammering the service.
|
302 |
+
backoff_factor=0.25,
|
303 |
+
) # type: ignore
|
304 |
+
|
305 |
+
# Our Insecure HTTPAdapter disables HTTPS validation. It does not
|
306 |
+
# support caching so we'll use it for all http:// URLs.
|
307 |
+
# If caching is disabled, we will also use it for
|
308 |
+
# https:// hosts that we've marked as ignoring
|
309 |
+
# TLS errors for (trusted-hosts).
|
310 |
+
insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
|
311 |
+
|
312 |
+
# We want to _only_ cache responses on securely fetched origins or when
|
313 |
+
# the host is specified as trusted. We do this because
|
314 |
+
# we can't validate the response of an insecurely/untrusted fetched
|
315 |
+
# origin, and we don't want someone to be able to poison the cache and
|
316 |
+
# require manual eviction from the cache to fix it.
|
317 |
+
if cache:
|
318 |
+
secure_adapter = CacheControlAdapter(
|
319 |
+
cache=SafeFileCache(cache),
|
320 |
+
max_retries=retries,
|
321 |
+
)
|
322 |
+
self._trusted_host_adapter = InsecureCacheControlAdapter(
|
323 |
+
cache=SafeFileCache(cache),
|
324 |
+
max_retries=retries,
|
325 |
+
)
|
326 |
+
else:
|
327 |
+
secure_adapter = HTTPAdapter(max_retries=retries)
|
328 |
+
self._trusted_host_adapter = insecure_adapter
|
329 |
+
|
330 |
+
self.mount("https://", secure_adapter)
|
331 |
+
self.mount("http://", insecure_adapter)
|
332 |
+
|
333 |
+
# Enable file:// urls
|
334 |
+
self.mount("file://", LocalFSAdapter())
|
335 |
+
|
336 |
+
for host in trusted_hosts:
|
337 |
+
self.add_trusted_host(host, suppress_logging=True)
|
338 |
+
|
339 |
+
def update_index_urls(self, new_index_urls: List[str]) -> None:
|
340 |
+
"""
|
341 |
+
:param new_index_urls: New index urls to update the authentication
|
342 |
+
handler with.
|
343 |
+
"""
|
344 |
+
self.auth.index_urls = new_index_urls
|
345 |
+
|
346 |
+
def add_trusted_host(
|
347 |
+
self, host: str, source: Optional[str] = None, suppress_logging: bool = False
|
348 |
+
) -> None:
|
349 |
+
"""
|
350 |
+
:param host: It is okay to provide a host that has previously been
|
351 |
+
added.
|
352 |
+
:param source: An optional source string, for logging where the host
|
353 |
+
string came from.
|
354 |
+
"""
|
355 |
+
if not suppress_logging:
|
356 |
+
msg = f"adding trusted host: {host!r}"
|
357 |
+
if source is not None:
|
358 |
+
msg += f" (from {source})"
|
359 |
+
logger.info(msg)
|
360 |
+
|
361 |
+
host_port = parse_netloc(host)
|
362 |
+
if host_port not in self.pip_trusted_origins:
|
363 |
+
self.pip_trusted_origins.append(host_port)
|
364 |
+
|
365 |
+
self.mount(
|
366 |
+
build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
|
367 |
+
)
|
368 |
+
self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
|
369 |
+
if not host_port[1]:
|
370 |
+
self.mount(
|
371 |
+
build_url_from_netloc(host, scheme="http") + ":",
|
372 |
+
self._trusted_host_adapter,
|
373 |
+
)
|
374 |
+
# Mount wildcard ports for the same host.
|
375 |
+
self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
|
376 |
+
|
377 |
+
def iter_secure_origins(self) -> Iterator[SecureOrigin]:
|
378 |
+
yield from SECURE_ORIGINS
|
379 |
+
for host, port in self.pip_trusted_origins:
|
380 |
+
yield ("*", host, "*" if port is None else port)
|
381 |
+
|
382 |
+
def is_secure_origin(self, location: Link) -> bool:
|
383 |
+
# Determine if this url used a secure transport mechanism
|
384 |
+
parsed = urllib.parse.urlparse(str(location))
|
385 |
+
origin_protocol, origin_host, origin_port = (
|
386 |
+
parsed.scheme,
|
387 |
+
parsed.hostname,
|
388 |
+
parsed.port,
|
389 |
+
)
|
390 |
+
|
391 |
+
# The protocol to use to see if the protocol matches.
|
392 |
+
# Don't count the repository type as part of the protocol: in
|
393 |
+
# cases such as "git+ssh", only use "ssh". (I.e., Only verify against
|
394 |
+
# the last scheme.)
|
395 |
+
origin_protocol = origin_protocol.rsplit("+", 1)[-1]
|
396 |
+
|
397 |
+
# Determine if our origin is a secure origin by looking through our
|
398 |
+
# hardcoded list of secure origins, as well as any additional ones
|
399 |
+
# configured on this PackageFinder instance.
|
400 |
+
for secure_origin in self.iter_secure_origins():
|
401 |
+
secure_protocol, secure_host, secure_port = secure_origin
|
402 |
+
if origin_protocol != secure_protocol and secure_protocol != "*":
|
403 |
+
continue
|
404 |
+
|
405 |
+
try:
|
406 |
+
addr = ipaddress.ip_address(origin_host)
|
407 |
+
network = ipaddress.ip_network(secure_host)
|
408 |
+
except ValueError:
|
409 |
+
# We don't have both a valid address or a valid network, so
|
410 |
+
# we'll check this origin against hostnames.
|
411 |
+
if (
|
412 |
+
origin_host
|
413 |
+
and origin_host.lower() != secure_host.lower()
|
414 |
+
and secure_host != "*"
|
415 |
+
):
|
416 |
+
continue
|
417 |
+
else:
|
418 |
+
# We have a valid address and network, so see if the address
|
419 |
+
# is contained within the network.
|
420 |
+
if addr not in network:
|
421 |
+
continue
|
422 |
+
|
423 |
+
# Check to see if the port matches.
|
424 |
+
if (
|
425 |
+
origin_port != secure_port
|
426 |
+
and secure_port != "*"
|
427 |
+
and secure_port is not None
|
428 |
+
):
|
429 |
+
continue
|
430 |
+
|
431 |
+
# If we've gotten here, then this origin matches the current
|
432 |
+
# secure origin and we should return True
|
433 |
+
return True
|
434 |
+
|
435 |
+
# If we've gotten to this point, then the origin isn't secure and we
|
436 |
+
# will not accept it as a valid location to search. We will however
|
437 |
+
# log a warning that we are ignoring it.
|
438 |
+
logger.warning(
|
439 |
+
"The repository located at %s is not a trusted or secure host and "
|
440 |
+
"is being ignored. If this repository is available via HTTPS we "
|
441 |
+
"recommend you use HTTPS instead, otherwise you may silence "
|
442 |
+
"this warning and allow it anyway with '--trusted-host %s'.",
|
443 |
+
origin_host,
|
444 |
+
origin_host,
|
445 |
+
)
|
446 |
+
|
447 |
+
return False
|
448 |
+
|
449 |
+
def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
|
450 |
+
# Allow setting a default timeout on a session
|
451 |
+
kwargs.setdefault("timeout", self.timeout)
|
452 |
+
|
453 |
+
# Dispatch the actual request
|
454 |
+
return super().request(method, url, *args, **kwargs)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/utils.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Dict, Iterator
|
2 |
+
|
3 |
+
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
|
4 |
+
|
5 |
+
from pip._internal.exceptions import NetworkConnectionError
|
6 |
+
|
7 |
+
# The following comments and HTTP headers were originally added by
|
8 |
+
# Donald Stufft in git commit 22c562429a61bb77172039e480873fb239dd8c03.
|
9 |
+
#
|
10 |
+
# We use Accept-Encoding: identity here because requests defaults to
|
11 |
+
# accepting compressed responses. This breaks in a variety of ways
|
12 |
+
# depending on how the server is configured.
|
13 |
+
# - Some servers will notice that the file isn't a compressible file
|
14 |
+
# and will leave the file alone and with an empty Content-Encoding
|
15 |
+
# - Some servers will notice that the file is already compressed and
|
16 |
+
# will leave the file alone, adding a Content-Encoding: gzip header
|
17 |
+
# - Some servers won't notice anything at all and will take a file
|
18 |
+
# that's already been compressed and compress it again, and set
|
19 |
+
# the Content-Encoding: gzip header
|
20 |
+
# By setting this to request only the identity encoding we're hoping
|
21 |
+
# to eliminate the third case. Hopefully there does not exist a server
|
22 |
+
# which when given a file will notice it is already compressed and that
|
23 |
+
# you're not asking for a compressed file and will then decompress it
|
24 |
+
# before sending because if that's the case I don't think it'll ever be
|
25 |
+
# possible to make this work.
|
26 |
+
HEADERS: Dict[str, str] = {"Accept-Encoding": "identity"}
|
27 |
+
|
28 |
+
|
29 |
+
def raise_for_status(resp: Response) -> None:
|
30 |
+
http_error_msg = ""
|
31 |
+
if isinstance(resp.reason, bytes):
|
32 |
+
# We attempt to decode utf-8 first because some servers
|
33 |
+
# choose to localize their reason strings. If the string
|
34 |
+
# isn't utf-8, we fall back to iso-8859-1 for all other
|
35 |
+
# encodings.
|
36 |
+
try:
|
37 |
+
reason = resp.reason.decode("utf-8")
|
38 |
+
except UnicodeDecodeError:
|
39 |
+
reason = resp.reason.decode("iso-8859-1")
|
40 |
+
else:
|
41 |
+
reason = resp.reason
|
42 |
+
|
43 |
+
if 400 <= resp.status_code < 500:
|
44 |
+
http_error_msg = (
|
45 |
+
f"{resp.status_code} Client Error: {reason} for url: {resp.url}"
|
46 |
+
)
|
47 |
+
|
48 |
+
elif 500 <= resp.status_code < 600:
|
49 |
+
http_error_msg = (
|
50 |
+
f"{resp.status_code} Server Error: {reason} for url: {resp.url}"
|
51 |
+
)
|
52 |
+
|
53 |
+
if http_error_msg:
|
54 |
+
raise NetworkConnectionError(http_error_msg, response=resp)
|
55 |
+
|
56 |
+
|
57 |
+
def response_chunks(
|
58 |
+
response: Response, chunk_size: int = CONTENT_CHUNK_SIZE
|
59 |
+
) -> Iterator[bytes]:
|
60 |
+
"""Given a requests Response, provide the data chunks."""
|
61 |
+
try:
|
62 |
+
# Special case for urllib3.
|
63 |
+
for chunk in response.raw.stream(
|
64 |
+
chunk_size,
|
65 |
+
# We use decode_content=False here because we don't
|
66 |
+
# want urllib3 to mess with the raw bytes we get
|
67 |
+
# from the server. If we decompress inside of
|
68 |
+
# urllib3 then we cannot verify the checksum
|
69 |
+
# because the checksum will be of the compressed
|
70 |
+
# file. This breakage will only occur if the
|
71 |
+
# server adds a Content-Encoding header, which
|
72 |
+
# depends on how the server was configured:
|
73 |
+
# - Some servers will notice that the file isn't a
|
74 |
+
# compressible file and will leave the file alone
|
75 |
+
# and with an empty Content-Encoding
|
76 |
+
# - Some servers will notice that the file is
|
77 |
+
# already compressed and will leave the file
|
78 |
+
# alone and will add a Content-Encoding: gzip
|
79 |
+
# header
|
80 |
+
# - Some servers won't notice anything at all and
|
81 |
+
# will take a file that's already been compressed
|
82 |
+
# and compress it again and set the
|
83 |
+
# Content-Encoding: gzip header
|
84 |
+
#
|
85 |
+
# By setting this not to decode automatically we
|
86 |
+
# hope to eliminate problems with the second case.
|
87 |
+
decode_content=False,
|
88 |
+
):
|
89 |
+
yield chunk
|
90 |
+
except AttributeError:
|
91 |
+
# Standard file-like object.
|
92 |
+
while True:
|
93 |
+
chunk = response.raw.read(chunk_size)
|
94 |
+
if not chunk:
|
95 |
+
break
|
96 |
+
yield chunk
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/network/xmlrpc.py
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""xmlrpclib.Transport implementation
|
2 |
+
"""
|
3 |
+
|
4 |
+
import logging
|
5 |
+
import urllib.parse
|
6 |
+
import xmlrpc.client
|
7 |
+
from typing import TYPE_CHECKING, Tuple
|
8 |
+
|
9 |
+
from pip._internal.exceptions import NetworkConnectionError
|
10 |
+
from pip._internal.network.session import PipSession
|
11 |
+
from pip._internal.network.utils import raise_for_status
|
12 |
+
|
13 |
+
if TYPE_CHECKING:
|
14 |
+
from xmlrpc.client import _HostType, _Marshallable
|
15 |
+
|
16 |
+
logger = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
|
19 |
+
class PipXmlrpcTransport(xmlrpc.client.Transport):
|
20 |
+
"""Provide a `xmlrpclib.Transport` implementation via a `PipSession`
|
21 |
+
object.
|
22 |
+
"""
|
23 |
+
|
24 |
+
def __init__(
|
25 |
+
self, index_url: str, session: PipSession, use_datetime: bool = False
|
26 |
+
) -> None:
|
27 |
+
super().__init__(use_datetime)
|
28 |
+
index_parts = urllib.parse.urlparse(index_url)
|
29 |
+
self._scheme = index_parts.scheme
|
30 |
+
self._session = session
|
31 |
+
|
32 |
+
def request(
|
33 |
+
self,
|
34 |
+
host: "_HostType",
|
35 |
+
handler: str,
|
36 |
+
request_body: bytes,
|
37 |
+
verbose: bool = False,
|
38 |
+
) -> Tuple["_Marshallable", ...]:
|
39 |
+
assert isinstance(host, str)
|
40 |
+
parts = (self._scheme, host, handler, None, None, None)
|
41 |
+
url = urllib.parse.urlunparse(parts)
|
42 |
+
try:
|
43 |
+
headers = {"Content-Type": "text/xml"}
|
44 |
+
response = self._session.post(
|
45 |
+
url,
|
46 |
+
data=request_body,
|
47 |
+
headers=headers,
|
48 |
+
stream=True,
|
49 |
+
)
|
50 |
+
raise_for_status(response)
|
51 |
+
self.verbose = verbose
|
52 |
+
return self.parse_response(response.raw)
|
53 |
+
except NetworkConnectionError as exc:
|
54 |
+
assert exc.response
|
55 |
+
logger.critical(
|
56 |
+
"HTTP error %s while getting %s",
|
57 |
+
exc.response.status_code,
|
58 |
+
url,
|
59 |
+
)
|
60 |
+
raise
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (189 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc
ADDED
Binary file (4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/freeze.cpython-310.pyc
ADDED
Binary file (6.19 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc
ADDED
Binary file (14.9 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__init__.py
ADDED
File without changes
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (195 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata.cpython-310.pyc
ADDED
Binary file (1.42 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_editable.cpython-310.pyc
ADDED
Binary file (1.46 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc
ADDED
Binary file (2.37 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (1.21 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc
ADDED
Binary file (1.44 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-310.pyc
ADDED
Binary file (2.75 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata.py
ADDED
@@ -0,0 +1,39 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Metadata generation logic for source distributions.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import os
|
5 |
+
|
6 |
+
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
7 |
+
|
8 |
+
from pip._internal.build_env import BuildEnvironment
|
9 |
+
from pip._internal.exceptions import (
|
10 |
+
InstallationSubprocessError,
|
11 |
+
MetadataGenerationFailed,
|
12 |
+
)
|
13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
15 |
+
|
16 |
+
|
17 |
+
def generate_metadata(
|
18 |
+
build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
|
19 |
+
) -> str:
|
20 |
+
"""Generate metadata using mechanisms described in PEP 517.
|
21 |
+
|
22 |
+
Returns the generated metadata directory.
|
23 |
+
"""
|
24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
25 |
+
|
26 |
+
metadata_dir = metadata_tmpdir.path
|
27 |
+
|
28 |
+
with build_env:
|
29 |
+
# Note that Pep517HookCaller implements a fallback for
|
30 |
+
# prepare_metadata_for_build_wheel, so we don't have to
|
31 |
+
# consider the possibility that this hook doesn't exist.
|
32 |
+
runner = runner_with_spinner_message("Preparing metadata (pyproject.toml)")
|
33 |
+
with backend.subprocess_runner(runner):
|
34 |
+
try:
|
35 |
+
distinfo_dir = backend.prepare_metadata_for_build_wheel(metadata_dir)
|
36 |
+
except InstallationSubprocessError as error:
|
37 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
38 |
+
|
39 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_editable.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Metadata generation logic for source distributions.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import os
|
5 |
+
|
6 |
+
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
7 |
+
|
8 |
+
from pip._internal.build_env import BuildEnvironment
|
9 |
+
from pip._internal.exceptions import (
|
10 |
+
InstallationSubprocessError,
|
11 |
+
MetadataGenerationFailed,
|
12 |
+
)
|
13 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
14 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
15 |
+
|
16 |
+
|
17 |
+
def generate_editable_metadata(
|
18 |
+
build_env: BuildEnvironment, backend: Pep517HookCaller, details: str
|
19 |
+
) -> str:
|
20 |
+
"""Generate metadata using mechanisms described in PEP 660.
|
21 |
+
|
22 |
+
Returns the generated metadata directory.
|
23 |
+
"""
|
24 |
+
metadata_tmpdir = TempDirectory(kind="modern-metadata", globally_managed=True)
|
25 |
+
|
26 |
+
metadata_dir = metadata_tmpdir.path
|
27 |
+
|
28 |
+
with build_env:
|
29 |
+
# Note that Pep517HookCaller implements a fallback for
|
30 |
+
# prepare_metadata_for_build_wheel/editable, so we don't have to
|
31 |
+
# consider the possibility that this hook doesn't exist.
|
32 |
+
runner = runner_with_spinner_message(
|
33 |
+
"Preparing editable metadata (pyproject.toml)"
|
34 |
+
)
|
35 |
+
with backend.subprocess_runner(runner):
|
36 |
+
try:
|
37 |
+
distinfo_dir = backend.prepare_metadata_for_build_editable(metadata_dir)
|
38 |
+
except InstallationSubprocessError as error:
|
39 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
40 |
+
|
41 |
+
return os.path.join(metadata_dir, distinfo_dir)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/metadata_legacy.py
ADDED
@@ -0,0 +1,74 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Metadata generation logic for legacy source distributions.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import logging
|
5 |
+
import os
|
6 |
+
|
7 |
+
from pip._internal.build_env import BuildEnvironment
|
8 |
+
from pip._internal.cli.spinners import open_spinner
|
9 |
+
from pip._internal.exceptions import (
|
10 |
+
InstallationError,
|
11 |
+
InstallationSubprocessError,
|
12 |
+
MetadataGenerationFailed,
|
13 |
+
)
|
14 |
+
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
|
15 |
+
from pip._internal.utils.subprocess import call_subprocess
|
16 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
17 |
+
|
18 |
+
logger = logging.getLogger(__name__)
|
19 |
+
|
20 |
+
|
21 |
+
def _find_egg_info(directory: str) -> str:
|
22 |
+
"""Find an .egg-info subdirectory in `directory`."""
|
23 |
+
filenames = [f for f in os.listdir(directory) if f.endswith(".egg-info")]
|
24 |
+
|
25 |
+
if not filenames:
|
26 |
+
raise InstallationError(f"No .egg-info directory found in {directory}")
|
27 |
+
|
28 |
+
if len(filenames) > 1:
|
29 |
+
raise InstallationError(
|
30 |
+
"More than one .egg-info directory found in {}".format(directory)
|
31 |
+
)
|
32 |
+
|
33 |
+
return os.path.join(directory, filenames[0])
|
34 |
+
|
35 |
+
|
36 |
+
def generate_metadata(
|
37 |
+
build_env: BuildEnvironment,
|
38 |
+
setup_py_path: str,
|
39 |
+
source_dir: str,
|
40 |
+
isolated: bool,
|
41 |
+
details: str,
|
42 |
+
) -> str:
|
43 |
+
"""Generate metadata using setup.py-based defacto mechanisms.
|
44 |
+
|
45 |
+
Returns the generated metadata directory.
|
46 |
+
"""
|
47 |
+
logger.debug(
|
48 |
+
"Running setup.py (path:%s) egg_info for package %s",
|
49 |
+
setup_py_path,
|
50 |
+
details,
|
51 |
+
)
|
52 |
+
|
53 |
+
egg_info_dir = TempDirectory(kind="pip-egg-info", globally_managed=True).path
|
54 |
+
|
55 |
+
args = make_setuptools_egg_info_args(
|
56 |
+
setup_py_path,
|
57 |
+
egg_info_dir=egg_info_dir,
|
58 |
+
no_user_config=isolated,
|
59 |
+
)
|
60 |
+
|
61 |
+
with build_env:
|
62 |
+
with open_spinner("Preparing metadata (setup.py)") as spinner:
|
63 |
+
try:
|
64 |
+
call_subprocess(
|
65 |
+
args,
|
66 |
+
cwd=source_dir,
|
67 |
+
command_desc="python setup.py egg_info",
|
68 |
+
spinner=spinner,
|
69 |
+
)
|
70 |
+
except InstallationSubprocessError as error:
|
71 |
+
raise MetadataGenerationFailed(package_details=details) from error
|
72 |
+
|
73 |
+
# Return the .egg-info directory.
|
74 |
+
return _find_egg_info(egg_info_dir)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
from typing import Optional
|
4 |
+
|
5 |
+
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
6 |
+
|
7 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
8 |
+
|
9 |
+
logger = logging.getLogger(__name__)
|
10 |
+
|
11 |
+
|
12 |
+
def build_wheel_pep517(
|
13 |
+
name: str,
|
14 |
+
backend: Pep517HookCaller,
|
15 |
+
metadata_directory: str,
|
16 |
+
tempd: str,
|
17 |
+
) -> Optional[str]:
|
18 |
+
"""Build one InstallRequirement using the PEP 517 build process.
|
19 |
+
|
20 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
21 |
+
"""
|
22 |
+
assert metadata_directory is not None
|
23 |
+
try:
|
24 |
+
logger.debug("Destination directory: %s", tempd)
|
25 |
+
|
26 |
+
runner = runner_with_spinner_message(
|
27 |
+
f"Building wheel for {name} (pyproject.toml)"
|
28 |
+
)
|
29 |
+
with backend.subprocess_runner(runner):
|
30 |
+
wheel_name = backend.build_wheel(
|
31 |
+
tempd,
|
32 |
+
metadata_directory=metadata_directory,
|
33 |
+
)
|
34 |
+
except Exception:
|
35 |
+
logger.error("Failed building wheel for %s", name)
|
36 |
+
return None
|
37 |
+
return os.path.join(tempd, wheel_name)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_editable.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
from typing import Optional
|
4 |
+
|
5 |
+
from pip._vendor.pep517.wrappers import HookMissing, Pep517HookCaller
|
6 |
+
|
7 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
8 |
+
|
9 |
+
logger = logging.getLogger(__name__)
|
10 |
+
|
11 |
+
|
12 |
+
def build_wheel_editable(
|
13 |
+
name: str,
|
14 |
+
backend: Pep517HookCaller,
|
15 |
+
metadata_directory: str,
|
16 |
+
tempd: str,
|
17 |
+
) -> Optional[str]:
|
18 |
+
"""Build one InstallRequirement using the PEP 660 build process.
|
19 |
+
|
20 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
21 |
+
"""
|
22 |
+
assert metadata_directory is not None
|
23 |
+
try:
|
24 |
+
logger.debug("Destination directory: %s", tempd)
|
25 |
+
|
26 |
+
runner = runner_with_spinner_message(
|
27 |
+
f"Building editable for {name} (pyproject.toml)"
|
28 |
+
)
|
29 |
+
with backend.subprocess_runner(runner):
|
30 |
+
try:
|
31 |
+
wheel_name = backend.build_editable(
|
32 |
+
tempd,
|
33 |
+
metadata_directory=metadata_directory,
|
34 |
+
)
|
35 |
+
except HookMissing as e:
|
36 |
+
logger.error(
|
37 |
+
"Cannot build editable %s because the build "
|
38 |
+
"backend does not have the %s hook",
|
39 |
+
name,
|
40 |
+
e,
|
41 |
+
)
|
42 |
+
return None
|
43 |
+
except Exception:
|
44 |
+
logger.error("Failed building editable for %s", name)
|
45 |
+
return None
|
46 |
+
return os.path.join(tempd, wheel_name)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/build/wheel_legacy.py
ADDED
@@ -0,0 +1,102 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os.path
|
3 |
+
from typing import List, Optional
|
4 |
+
|
5 |
+
from pip._internal.cli.spinners import open_spinner
|
6 |
+
from pip._internal.utils.setuptools_build import make_setuptools_bdist_wheel_args
|
7 |
+
from pip._internal.utils.subprocess import call_subprocess, format_command_args
|
8 |
+
|
9 |
+
logger = logging.getLogger(__name__)
|
10 |
+
|
11 |
+
|
12 |
+
def format_command_result(
|
13 |
+
command_args: List[str],
|
14 |
+
command_output: str,
|
15 |
+
) -> str:
|
16 |
+
"""Format command information for logging."""
|
17 |
+
command_desc = format_command_args(command_args)
|
18 |
+
text = f"Command arguments: {command_desc}\n"
|
19 |
+
|
20 |
+
if not command_output:
|
21 |
+
text += "Command output: None"
|
22 |
+
elif logger.getEffectiveLevel() > logging.DEBUG:
|
23 |
+
text += "Command output: [use --verbose to show]"
|
24 |
+
else:
|
25 |
+
if not command_output.endswith("\n"):
|
26 |
+
command_output += "\n"
|
27 |
+
text += f"Command output:\n{command_output}"
|
28 |
+
|
29 |
+
return text
|
30 |
+
|
31 |
+
|
32 |
+
def get_legacy_build_wheel_path(
|
33 |
+
names: List[str],
|
34 |
+
temp_dir: str,
|
35 |
+
name: str,
|
36 |
+
command_args: List[str],
|
37 |
+
command_output: str,
|
38 |
+
) -> Optional[str]:
|
39 |
+
"""Return the path to the wheel in the temporary build directory."""
|
40 |
+
# Sort for determinism.
|
41 |
+
names = sorted(names)
|
42 |
+
if not names:
|
43 |
+
msg = ("Legacy build of wheel for {!r} created no files.\n").format(name)
|
44 |
+
msg += format_command_result(command_args, command_output)
|
45 |
+
logger.warning(msg)
|
46 |
+
return None
|
47 |
+
|
48 |
+
if len(names) > 1:
|
49 |
+
msg = (
|
50 |
+
"Legacy build of wheel for {!r} created more than one file.\n"
|
51 |
+
"Filenames (choosing first): {}\n"
|
52 |
+
).format(name, names)
|
53 |
+
msg += format_command_result(command_args, command_output)
|
54 |
+
logger.warning(msg)
|
55 |
+
|
56 |
+
return os.path.join(temp_dir, names[0])
|
57 |
+
|
58 |
+
|
59 |
+
def build_wheel_legacy(
|
60 |
+
name: str,
|
61 |
+
setup_py_path: str,
|
62 |
+
source_dir: str,
|
63 |
+
global_options: List[str],
|
64 |
+
build_options: List[str],
|
65 |
+
tempd: str,
|
66 |
+
) -> Optional[str]:
|
67 |
+
"""Build one unpacked package using the "legacy" build process.
|
68 |
+
|
69 |
+
Returns path to wheel if successfully built. Otherwise, returns None.
|
70 |
+
"""
|
71 |
+
wheel_args = make_setuptools_bdist_wheel_args(
|
72 |
+
setup_py_path,
|
73 |
+
global_options=global_options,
|
74 |
+
build_options=build_options,
|
75 |
+
destination_dir=tempd,
|
76 |
+
)
|
77 |
+
|
78 |
+
spin_message = f"Building wheel for {name} (setup.py)"
|
79 |
+
with open_spinner(spin_message) as spinner:
|
80 |
+
logger.debug("Destination directory: %s", tempd)
|
81 |
+
|
82 |
+
try:
|
83 |
+
output = call_subprocess(
|
84 |
+
wheel_args,
|
85 |
+
command_desc="python setup.py bdist_wheel",
|
86 |
+
cwd=source_dir,
|
87 |
+
spinner=spinner,
|
88 |
+
)
|
89 |
+
except Exception:
|
90 |
+
spinner.finish("error")
|
91 |
+
logger.error("Failed building wheel for %s", name)
|
92 |
+
return None
|
93 |
+
|
94 |
+
names = os.listdir(tempd)
|
95 |
+
wheel_path = get_legacy_build_wheel_path(
|
96 |
+
names=names,
|
97 |
+
temp_dir=tempd,
|
98 |
+
name=name,
|
99 |
+
command_args=wheel_args,
|
100 |
+
command_output=output,
|
101 |
+
)
|
102 |
+
return wheel_path
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__init__.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
"""For modules related to installing packages.
|
2 |
+
"""
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (253 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/editable_legacy.cpython-310.pyc
ADDED
Binary file (1.54 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/legacy.cpython-310.pyc
ADDED
Binary file (3.32 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/operations/install/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (21.1 kB). View file
|
|