Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz +3 -0
- llmeval-env/lib/python3.10/site-packages/distutils-precedence.pth +3 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__init__.py +520 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py +169 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py +219 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/base.py +52 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py +739 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py +248 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py +68 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py +15 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py +101 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/git.py +526 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py +163 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py +324 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py +705 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/__init__.py +111 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/distro.py +1386 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__init__.py +54 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/_version.py +1 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/exceptions.py +48 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/ext.py +193 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/fallback.py +1012 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/dateutil/zoneinfo/dateutil-zoneinfo.tar.gz
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d3ea52e7b6e968de0d884df1288193596fa95b803db4f92a18279a7398004475
|
3 |
+
size 156400
|
llmeval-env/lib/python3.10/site-packages/distutils-precedence.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7ea7ffef3fe2a117ee12c68ed6553617f0d7fd2f0590257c25c484959a3b7373
|
3 |
+
size 152
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc
ADDED
Binary file (1.86 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc
ADDED
Binary file (4.45 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (1.6 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__init__.py
ADDED
@@ -0,0 +1,520 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import pathlib
|
5 |
+
import sys
|
6 |
+
import sysconfig
|
7 |
+
from typing import Any, Dict, Iterator, List, Optional, Tuple
|
8 |
+
|
9 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
10 |
+
from pip._internal.utils.compat import WINDOWS
|
11 |
+
from pip._internal.utils.deprecation import deprecated
|
12 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
13 |
+
|
14 |
+
from . import _distutils, _sysconfig
|
15 |
+
from .base import (
|
16 |
+
USER_CACHE_DIR,
|
17 |
+
get_major_minor_version,
|
18 |
+
get_src_prefix,
|
19 |
+
is_osx_framework,
|
20 |
+
site_packages,
|
21 |
+
user_site,
|
22 |
+
)
|
23 |
+
|
24 |
+
__all__ = [
|
25 |
+
"USER_CACHE_DIR",
|
26 |
+
"get_bin_prefix",
|
27 |
+
"get_bin_user",
|
28 |
+
"get_major_minor_version",
|
29 |
+
"get_platlib",
|
30 |
+
"get_prefixed_libs",
|
31 |
+
"get_purelib",
|
32 |
+
"get_scheme",
|
33 |
+
"get_src_prefix",
|
34 |
+
"site_packages",
|
35 |
+
"user_site",
|
36 |
+
]
|
37 |
+
|
38 |
+
|
39 |
+
logger = logging.getLogger(__name__)
|
40 |
+
|
41 |
+
|
42 |
+
_PLATLIBDIR: str = getattr(sys, "platlibdir", "lib")
|
43 |
+
|
44 |
+
_USE_SYSCONFIG_DEFAULT = sys.version_info >= (3, 10)
|
45 |
+
|
46 |
+
|
47 |
+
def _should_use_sysconfig() -> bool:
|
48 |
+
"""This function determines the value of _USE_SYSCONFIG.
|
49 |
+
|
50 |
+
By default, pip uses sysconfig on Python 3.10+.
|
51 |
+
But Python distributors can override this decision by setting:
|
52 |
+
sysconfig._PIP_USE_SYSCONFIG = True / False
|
53 |
+
Rationale in https://github.com/pypa/pip/issues/10647
|
54 |
+
|
55 |
+
This is a function for testability, but should be constant during any one
|
56 |
+
run.
|
57 |
+
"""
|
58 |
+
return bool(getattr(sysconfig, "_PIP_USE_SYSCONFIG", _USE_SYSCONFIG_DEFAULT))
|
59 |
+
|
60 |
+
|
61 |
+
_USE_SYSCONFIG = _should_use_sysconfig()
|
62 |
+
|
63 |
+
# Be noisy about incompatibilities if this platforms "should" be using
|
64 |
+
# sysconfig, but is explicitly opting out and using distutils instead.
|
65 |
+
if _USE_SYSCONFIG_DEFAULT and not _USE_SYSCONFIG:
|
66 |
+
_MISMATCH_LEVEL = logging.WARNING
|
67 |
+
else:
|
68 |
+
_MISMATCH_LEVEL = logging.DEBUG
|
69 |
+
|
70 |
+
|
71 |
+
def _looks_like_bpo_44860() -> bool:
|
72 |
+
"""The resolution to bpo-44860 will change this incorrect platlib.
|
73 |
+
|
74 |
+
See <https://bugs.python.org/issue44860>.
|
75 |
+
"""
|
76 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
77 |
+
|
78 |
+
try:
|
79 |
+
unix_user_platlib = INSTALL_SCHEMES["unix_user"]["platlib"]
|
80 |
+
except KeyError:
|
81 |
+
return False
|
82 |
+
return unix_user_platlib == "$usersite"
|
83 |
+
|
84 |
+
|
85 |
+
def _looks_like_red_hat_patched_platlib_purelib(scheme: Dict[str, str]) -> bool:
|
86 |
+
platlib = scheme["platlib"]
|
87 |
+
if "/$platlibdir/" in platlib:
|
88 |
+
platlib = platlib.replace("/$platlibdir/", f"/{_PLATLIBDIR}/")
|
89 |
+
if "/lib64/" not in platlib:
|
90 |
+
return False
|
91 |
+
unpatched = platlib.replace("/lib64/", "/lib/")
|
92 |
+
return unpatched.replace("$platbase/", "$base/") == scheme["purelib"]
|
93 |
+
|
94 |
+
|
95 |
+
@functools.lru_cache(maxsize=None)
|
96 |
+
def _looks_like_red_hat_lib() -> bool:
|
97 |
+
"""Red Hat patches platlib in unix_prefix and unix_home, but not purelib.
|
98 |
+
|
99 |
+
This is the only way I can see to tell a Red Hat-patched Python.
|
100 |
+
"""
|
101 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
102 |
+
|
103 |
+
return all(
|
104 |
+
k in INSTALL_SCHEMES
|
105 |
+
and _looks_like_red_hat_patched_platlib_purelib(INSTALL_SCHEMES[k])
|
106 |
+
for k in ("unix_prefix", "unix_home")
|
107 |
+
)
|
108 |
+
|
109 |
+
|
110 |
+
@functools.lru_cache(maxsize=None)
|
111 |
+
def _looks_like_debian_scheme() -> bool:
|
112 |
+
"""Debian adds two additional schemes."""
|
113 |
+
from distutils.command.install import INSTALL_SCHEMES # type: ignore
|
114 |
+
|
115 |
+
return "deb_system" in INSTALL_SCHEMES and "unix_local" in INSTALL_SCHEMES
|
116 |
+
|
117 |
+
|
118 |
+
@functools.lru_cache(maxsize=None)
|
119 |
+
def _looks_like_red_hat_scheme() -> bool:
|
120 |
+
"""Red Hat patches ``sys.prefix`` and ``sys.exec_prefix``.
|
121 |
+
|
122 |
+
Red Hat's ``00251-change-user-install-location.patch`` changes the install
|
123 |
+
command's ``prefix`` and ``exec_prefix`` to append ``"/local"``. This is
|
124 |
+
(fortunately?) done quite unconditionally, so we create a default command
|
125 |
+
object without any configuration to detect this.
|
126 |
+
"""
|
127 |
+
from distutils.command.install import install
|
128 |
+
from distutils.dist import Distribution
|
129 |
+
|
130 |
+
cmd: Any = install(Distribution())
|
131 |
+
cmd.finalize_options()
|
132 |
+
return (
|
133 |
+
cmd.exec_prefix == f"{os.path.normpath(sys.exec_prefix)}/local"
|
134 |
+
and cmd.prefix == f"{os.path.normpath(sys.prefix)}/local"
|
135 |
+
)
|
136 |
+
|
137 |
+
|
138 |
+
@functools.lru_cache(maxsize=None)
|
139 |
+
def _looks_like_slackware_scheme() -> bool:
|
140 |
+
"""Slackware patches sysconfig but fails to patch distutils and site.
|
141 |
+
|
142 |
+
Slackware changes sysconfig's user scheme to use ``"lib64"`` for the lib
|
143 |
+
path, but does not do the same to the site module.
|
144 |
+
"""
|
145 |
+
if user_site is None: # User-site not available.
|
146 |
+
return False
|
147 |
+
try:
|
148 |
+
paths = sysconfig.get_paths(scheme="posix_user", expand=False)
|
149 |
+
except KeyError: # User-site not available.
|
150 |
+
return False
|
151 |
+
return "/lib64/" in paths["purelib"] and "/lib64/" not in user_site
|
152 |
+
|
153 |
+
|
154 |
+
@functools.lru_cache(maxsize=None)
|
155 |
+
def _looks_like_msys2_mingw_scheme() -> bool:
|
156 |
+
"""MSYS2 patches distutils and sysconfig to use a UNIX-like scheme.
|
157 |
+
|
158 |
+
However, MSYS2 incorrectly patches sysconfig ``nt`` scheme. The fix is
|
159 |
+
likely going to be included in their 3.10 release, so we ignore the warning.
|
160 |
+
See msys2/MINGW-packages#9319.
|
161 |
+
|
162 |
+
MSYS2 MINGW's patch uses lowercase ``"lib"`` instead of the usual uppercase,
|
163 |
+
and is missing the final ``"site-packages"``.
|
164 |
+
"""
|
165 |
+
paths = sysconfig.get_paths("nt", expand=False)
|
166 |
+
return all(
|
167 |
+
"Lib" not in p and "lib" in p and not p.endswith("site-packages")
|
168 |
+
for p in (paths[key] for key in ("platlib", "purelib"))
|
169 |
+
)
|
170 |
+
|
171 |
+
|
172 |
+
def _fix_abiflags(parts: Tuple[str]) -> Iterator[str]:
|
173 |
+
ldversion = sysconfig.get_config_var("LDVERSION")
|
174 |
+
abiflags: str = getattr(sys, "abiflags", None)
|
175 |
+
|
176 |
+
# LDVERSION does not end with sys.abiflags. Just return the path unchanged.
|
177 |
+
if not ldversion or not abiflags or not ldversion.endswith(abiflags):
|
178 |
+
yield from parts
|
179 |
+
return
|
180 |
+
|
181 |
+
# Strip sys.abiflags from LDVERSION-based path components.
|
182 |
+
for part in parts:
|
183 |
+
if part.endswith(ldversion):
|
184 |
+
part = part[: (0 - len(abiflags))]
|
185 |
+
yield part
|
186 |
+
|
187 |
+
|
188 |
+
@functools.lru_cache(maxsize=None)
|
189 |
+
def _warn_mismatched(old: pathlib.Path, new: pathlib.Path, *, key: str) -> None:
|
190 |
+
issue_url = "https://github.com/pypa/pip/issues/10151"
|
191 |
+
message = (
|
192 |
+
"Value for %s does not match. Please report this to <%s>"
|
193 |
+
"\ndistutils: %s"
|
194 |
+
"\nsysconfig: %s"
|
195 |
+
)
|
196 |
+
logger.log(_MISMATCH_LEVEL, message, key, issue_url, old, new)
|
197 |
+
|
198 |
+
|
199 |
+
def _warn_if_mismatch(old: pathlib.Path, new: pathlib.Path, *, key: str) -> bool:
|
200 |
+
if old == new:
|
201 |
+
return False
|
202 |
+
_warn_mismatched(old, new, key=key)
|
203 |
+
return True
|
204 |
+
|
205 |
+
|
206 |
+
@functools.lru_cache(maxsize=None)
|
207 |
+
def _log_context(
|
208 |
+
*,
|
209 |
+
user: bool = False,
|
210 |
+
home: Optional[str] = None,
|
211 |
+
root: Optional[str] = None,
|
212 |
+
prefix: Optional[str] = None,
|
213 |
+
) -> None:
|
214 |
+
parts = [
|
215 |
+
"Additional context:",
|
216 |
+
"user = %r",
|
217 |
+
"home = %r",
|
218 |
+
"root = %r",
|
219 |
+
"prefix = %r",
|
220 |
+
]
|
221 |
+
|
222 |
+
logger.log(_MISMATCH_LEVEL, "\n".join(parts), user, home, root, prefix)
|
223 |
+
|
224 |
+
|
225 |
+
def get_scheme(
|
226 |
+
dist_name: str,
|
227 |
+
user: bool = False,
|
228 |
+
home: Optional[str] = None,
|
229 |
+
root: Optional[str] = None,
|
230 |
+
isolated: bool = False,
|
231 |
+
prefix: Optional[str] = None,
|
232 |
+
) -> Scheme:
|
233 |
+
new = _sysconfig.get_scheme(
|
234 |
+
dist_name,
|
235 |
+
user=user,
|
236 |
+
home=home,
|
237 |
+
root=root,
|
238 |
+
isolated=isolated,
|
239 |
+
prefix=prefix,
|
240 |
+
)
|
241 |
+
if _USE_SYSCONFIG:
|
242 |
+
return new
|
243 |
+
|
244 |
+
old = _distutils.get_scheme(
|
245 |
+
dist_name,
|
246 |
+
user=user,
|
247 |
+
home=home,
|
248 |
+
root=root,
|
249 |
+
isolated=isolated,
|
250 |
+
prefix=prefix,
|
251 |
+
)
|
252 |
+
|
253 |
+
warning_contexts = []
|
254 |
+
for k in SCHEME_KEYS:
|
255 |
+
old_v = pathlib.Path(getattr(old, k))
|
256 |
+
new_v = pathlib.Path(getattr(new, k))
|
257 |
+
|
258 |
+
if old_v == new_v:
|
259 |
+
continue
|
260 |
+
|
261 |
+
# distutils incorrectly put PyPy packages under ``site-packages/python``
|
262 |
+
# in the ``posix_home`` scheme, but PyPy devs said they expect the
|
263 |
+
# directory name to be ``pypy`` instead. So we treat this as a bug fix
|
264 |
+
# and not warn about it. See bpo-43307 and python/cpython#24628.
|
265 |
+
skip_pypy_special_case = (
|
266 |
+
sys.implementation.name == "pypy"
|
267 |
+
and home is not None
|
268 |
+
and k in ("platlib", "purelib")
|
269 |
+
and old_v.parent == new_v.parent
|
270 |
+
and old_v.name.startswith("python")
|
271 |
+
and new_v.name.startswith("pypy")
|
272 |
+
)
|
273 |
+
if skip_pypy_special_case:
|
274 |
+
continue
|
275 |
+
|
276 |
+
# sysconfig's ``osx_framework_user`` does not include ``pythonX.Y`` in
|
277 |
+
# the ``include`` value, but distutils's ``headers`` does. We'll let
|
278 |
+
# CPython decide whether this is a bug or feature. See bpo-43948.
|
279 |
+
skip_osx_framework_user_special_case = (
|
280 |
+
user
|
281 |
+
and is_osx_framework()
|
282 |
+
and k == "headers"
|
283 |
+
and old_v.parent.parent == new_v.parent
|
284 |
+
and old_v.parent.name.startswith("python")
|
285 |
+
)
|
286 |
+
if skip_osx_framework_user_special_case:
|
287 |
+
continue
|
288 |
+
|
289 |
+
# On Red Hat and derived Linux distributions, distutils is patched to
|
290 |
+
# use "lib64" instead of "lib" for platlib.
|
291 |
+
if k == "platlib" and _looks_like_red_hat_lib():
|
292 |
+
continue
|
293 |
+
|
294 |
+
# On Python 3.9+, sysconfig's posix_user scheme sets platlib against
|
295 |
+
# sys.platlibdir, but distutils's unix_user incorrectly coninutes
|
296 |
+
# using the same $usersite for both platlib and purelib. This creates a
|
297 |
+
# mismatch when sys.platlibdir is not "lib".
|
298 |
+
skip_bpo_44860 = (
|
299 |
+
user
|
300 |
+
and k == "platlib"
|
301 |
+
and not WINDOWS
|
302 |
+
and sys.version_info >= (3, 9)
|
303 |
+
and _PLATLIBDIR != "lib"
|
304 |
+
and _looks_like_bpo_44860()
|
305 |
+
)
|
306 |
+
if skip_bpo_44860:
|
307 |
+
continue
|
308 |
+
|
309 |
+
# Slackware incorrectly patches posix_user to use lib64 instead of lib,
|
310 |
+
# but not usersite to match the location.
|
311 |
+
skip_slackware_user_scheme = (
|
312 |
+
user
|
313 |
+
and k in ("platlib", "purelib")
|
314 |
+
and not WINDOWS
|
315 |
+
and _looks_like_slackware_scheme()
|
316 |
+
)
|
317 |
+
if skip_slackware_user_scheme:
|
318 |
+
continue
|
319 |
+
|
320 |
+
# Both Debian and Red Hat patch Python to place the system site under
|
321 |
+
# /usr/local instead of /usr. Debian also places lib in dist-packages
|
322 |
+
# instead of site-packages, but the /usr/local check should cover it.
|
323 |
+
skip_linux_system_special_case = (
|
324 |
+
not (user or home or prefix or running_under_virtualenv())
|
325 |
+
and old_v.parts[1:3] == ("usr", "local")
|
326 |
+
and len(new_v.parts) > 1
|
327 |
+
and new_v.parts[1] == "usr"
|
328 |
+
and (len(new_v.parts) < 3 or new_v.parts[2] != "local")
|
329 |
+
and (_looks_like_red_hat_scheme() or _looks_like_debian_scheme())
|
330 |
+
)
|
331 |
+
if skip_linux_system_special_case:
|
332 |
+
continue
|
333 |
+
|
334 |
+
# On Python 3.7 and earlier, sysconfig does not include sys.abiflags in
|
335 |
+
# the "pythonX.Y" part of the path, but distutils does.
|
336 |
+
skip_sysconfig_abiflag_bug = (
|
337 |
+
sys.version_info < (3, 8)
|
338 |
+
and not WINDOWS
|
339 |
+
and k in ("headers", "platlib", "purelib")
|
340 |
+
and tuple(_fix_abiflags(old_v.parts)) == new_v.parts
|
341 |
+
)
|
342 |
+
if skip_sysconfig_abiflag_bug:
|
343 |
+
continue
|
344 |
+
|
345 |
+
# MSYS2 MINGW's sysconfig patch does not include the "site-packages"
|
346 |
+
# part of the path. This is incorrect and will be fixed in MSYS.
|
347 |
+
skip_msys2_mingw_bug = (
|
348 |
+
WINDOWS and k in ("platlib", "purelib") and _looks_like_msys2_mingw_scheme()
|
349 |
+
)
|
350 |
+
if skip_msys2_mingw_bug:
|
351 |
+
continue
|
352 |
+
|
353 |
+
# CPython's POSIX install script invokes pip (via ensurepip) against the
|
354 |
+
# interpreter located in the source tree, not the install site. This
|
355 |
+
# triggers special logic in sysconfig that's not present in distutils.
|
356 |
+
# https://github.com/python/cpython/blob/8c21941ddaf/Lib/sysconfig.py#L178-L194
|
357 |
+
skip_cpython_build = (
|
358 |
+
sysconfig.is_python_build(check_home=True)
|
359 |
+
and not WINDOWS
|
360 |
+
and k in ("headers", "include", "platinclude")
|
361 |
+
)
|
362 |
+
if skip_cpython_build:
|
363 |
+
continue
|
364 |
+
|
365 |
+
warning_contexts.append((old_v, new_v, f"scheme.{k}"))
|
366 |
+
|
367 |
+
if not warning_contexts:
|
368 |
+
return old
|
369 |
+
|
370 |
+
# Check if this path mismatch is caused by distutils config files. Those
|
371 |
+
# files will no longer work once we switch to sysconfig, so this raises a
|
372 |
+
# deprecation message for them.
|
373 |
+
default_old = _distutils.distutils_scheme(
|
374 |
+
dist_name,
|
375 |
+
user,
|
376 |
+
home,
|
377 |
+
root,
|
378 |
+
isolated,
|
379 |
+
prefix,
|
380 |
+
ignore_config_files=True,
|
381 |
+
)
|
382 |
+
if any(default_old[k] != getattr(old, k) for k in SCHEME_KEYS):
|
383 |
+
deprecated(
|
384 |
+
reason=(
|
385 |
+
"Configuring installation scheme with distutils config files "
|
386 |
+
"is deprecated and will no longer work in the near future. If you "
|
387 |
+
"are using a Homebrew or Linuxbrew Python, please see discussion "
|
388 |
+
"at https://github.com/Homebrew/homebrew-core/issues/76621"
|
389 |
+
),
|
390 |
+
replacement=None,
|
391 |
+
gone_in=None,
|
392 |
+
)
|
393 |
+
return old
|
394 |
+
|
395 |
+
# Post warnings about this mismatch so user can report them back.
|
396 |
+
for old_v, new_v, key in warning_contexts:
|
397 |
+
_warn_mismatched(old_v, new_v, key=key)
|
398 |
+
_log_context(user=user, home=home, root=root, prefix=prefix)
|
399 |
+
|
400 |
+
return old
|
401 |
+
|
402 |
+
|
403 |
+
def get_bin_prefix() -> str:
|
404 |
+
new = _sysconfig.get_bin_prefix()
|
405 |
+
if _USE_SYSCONFIG:
|
406 |
+
return new
|
407 |
+
|
408 |
+
old = _distutils.get_bin_prefix()
|
409 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="bin_prefix"):
|
410 |
+
_log_context()
|
411 |
+
return old
|
412 |
+
|
413 |
+
|
414 |
+
def get_bin_user() -> str:
|
415 |
+
return _sysconfig.get_scheme("", user=True).scripts
|
416 |
+
|
417 |
+
|
418 |
+
def _looks_like_deb_system_dist_packages(value: str) -> bool:
|
419 |
+
"""Check if the value is Debian's APT-controlled dist-packages.
|
420 |
+
|
421 |
+
Debian's ``distutils.sysconfig.get_python_lib()`` implementation returns the
|
422 |
+
default package path controlled by APT, but does not patch ``sysconfig`` to
|
423 |
+
do the same. This is similar to the bug worked around in ``get_scheme()``,
|
424 |
+
but here the default is ``deb_system`` instead of ``unix_local``. Ultimately
|
425 |
+
we can't do anything about this Debian bug, and this detection allows us to
|
426 |
+
skip the warning when needed.
|
427 |
+
"""
|
428 |
+
if not _looks_like_debian_scheme():
|
429 |
+
return False
|
430 |
+
if value == "/usr/lib/python3/dist-packages":
|
431 |
+
return True
|
432 |
+
return False
|
433 |
+
|
434 |
+
|
435 |
+
def get_purelib() -> str:
|
436 |
+
"""Return the default pure-Python lib location."""
|
437 |
+
new = _sysconfig.get_purelib()
|
438 |
+
if _USE_SYSCONFIG:
|
439 |
+
return new
|
440 |
+
|
441 |
+
old = _distutils.get_purelib()
|
442 |
+
if _looks_like_deb_system_dist_packages(old):
|
443 |
+
return old
|
444 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="purelib"):
|
445 |
+
_log_context()
|
446 |
+
return old
|
447 |
+
|
448 |
+
|
449 |
+
def get_platlib() -> str:
|
450 |
+
"""Return the default platform-shared lib location."""
|
451 |
+
new = _sysconfig.get_platlib()
|
452 |
+
if _USE_SYSCONFIG:
|
453 |
+
return new
|
454 |
+
|
455 |
+
old = _distutils.get_platlib()
|
456 |
+
if _looks_like_deb_system_dist_packages(old):
|
457 |
+
return old
|
458 |
+
if _warn_if_mismatch(pathlib.Path(old), pathlib.Path(new), key="platlib"):
|
459 |
+
_log_context()
|
460 |
+
return old
|
461 |
+
|
462 |
+
|
463 |
+
def _deduplicated(v1: str, v2: str) -> List[str]:
|
464 |
+
"""Deduplicate values from a list."""
|
465 |
+
if v1 == v2:
|
466 |
+
return [v1]
|
467 |
+
return [v1, v2]
|
468 |
+
|
469 |
+
|
470 |
+
def _looks_like_apple_library(path: str) -> bool:
|
471 |
+
"""Apple patches sysconfig to *always* look under */Library/Python*."""
|
472 |
+
if sys.platform[:6] != "darwin":
|
473 |
+
return False
|
474 |
+
return path == f"/Library/Python/{get_major_minor_version()}/site-packages"
|
475 |
+
|
476 |
+
|
477 |
+
def get_prefixed_libs(prefix: str) -> List[str]:
|
478 |
+
"""Return the lib locations under ``prefix``."""
|
479 |
+
new_pure, new_plat = _sysconfig.get_prefixed_libs(prefix)
|
480 |
+
if _USE_SYSCONFIG:
|
481 |
+
return _deduplicated(new_pure, new_plat)
|
482 |
+
|
483 |
+
old_pure, old_plat = _distutils.get_prefixed_libs(prefix)
|
484 |
+
old_lib_paths = _deduplicated(old_pure, old_plat)
|
485 |
+
|
486 |
+
# Apple's Python (shipped with Xcode and Command Line Tools) hard-code
|
487 |
+
# platlib and purelib to '/Library/Python/X.Y/site-packages'. This will
|
488 |
+
# cause serious build isolation bugs when Apple starts shipping 3.10 because
|
489 |
+
# pip will install build backends to the wrong location. This tells users
|
490 |
+
# who is at fault so Apple may notice it and fix the issue in time.
|
491 |
+
if all(_looks_like_apple_library(p) for p in old_lib_paths):
|
492 |
+
deprecated(
|
493 |
+
reason=(
|
494 |
+
"Python distributed by Apple's Command Line Tools incorrectly "
|
495 |
+
"patches sysconfig to always point to '/Library/Python'. This "
|
496 |
+
"will cause build isolation to operate incorrectly on Python "
|
497 |
+
"3.10 or later. Please help report this to Apple so they can "
|
498 |
+
"fix this. https://developer.apple.com/bug-reporting/"
|
499 |
+
),
|
500 |
+
replacement=None,
|
501 |
+
gone_in=None,
|
502 |
+
)
|
503 |
+
return old_lib_paths
|
504 |
+
|
505 |
+
warned = [
|
506 |
+
_warn_if_mismatch(
|
507 |
+
pathlib.Path(old_pure),
|
508 |
+
pathlib.Path(new_pure),
|
509 |
+
key="prefixed-purelib",
|
510 |
+
),
|
511 |
+
_warn_if_mismatch(
|
512 |
+
pathlib.Path(old_plat),
|
513 |
+
pathlib.Path(new_plat),
|
514 |
+
key="prefixed-platlib",
|
515 |
+
),
|
516 |
+
]
|
517 |
+
if any(warned):
|
518 |
+
_log_context(prefix=prefix)
|
519 |
+
|
520 |
+
return old_lib_paths
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_distutils.cpython-310.pyc
ADDED
Binary file (4.66 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/base.cpython-310.pyc
ADDED
Binary file (1.54 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/_distutils.py
ADDED
@@ -0,0 +1,169 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Locations where we look for configs, install stuff, etc"""
|
2 |
+
|
3 |
+
# The following comment should be removed at some point in the future.
|
4 |
+
# mypy: strict-optional=False
|
5 |
+
|
6 |
+
import logging
|
7 |
+
import os
|
8 |
+
import sys
|
9 |
+
from distutils.cmd import Command as DistutilsCommand
|
10 |
+
from distutils.command.install import SCHEME_KEYS
|
11 |
+
from distutils.command.install import install as distutils_install_command
|
12 |
+
from distutils.sysconfig import get_python_lib
|
13 |
+
from typing import Dict, List, Optional, Tuple, Union, cast
|
14 |
+
|
15 |
+
from pip._internal.models.scheme import Scheme
|
16 |
+
from pip._internal.utils.compat import WINDOWS
|
17 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
18 |
+
|
19 |
+
from .base import get_major_minor_version
|
20 |
+
|
21 |
+
logger = logging.getLogger(__name__)
|
22 |
+
|
23 |
+
|
24 |
+
def distutils_scheme(
|
25 |
+
dist_name: str,
|
26 |
+
user: bool = False,
|
27 |
+
home: str = None,
|
28 |
+
root: str = None,
|
29 |
+
isolated: bool = False,
|
30 |
+
prefix: str = None,
|
31 |
+
*,
|
32 |
+
ignore_config_files: bool = False,
|
33 |
+
) -> Dict[str, str]:
|
34 |
+
"""
|
35 |
+
Return a distutils install scheme
|
36 |
+
"""
|
37 |
+
from distutils.dist import Distribution
|
38 |
+
|
39 |
+
dist_args: Dict[str, Union[str, List[str]]] = {"name": dist_name}
|
40 |
+
if isolated:
|
41 |
+
dist_args["script_args"] = ["--no-user-cfg"]
|
42 |
+
|
43 |
+
d = Distribution(dist_args)
|
44 |
+
if not ignore_config_files:
|
45 |
+
try:
|
46 |
+
d.parse_config_files()
|
47 |
+
except UnicodeDecodeError:
|
48 |
+
# Typeshed does not include find_config_files() for some reason.
|
49 |
+
paths = d.find_config_files() # type: ignore
|
50 |
+
logger.warning(
|
51 |
+
"Ignore distutils configs in %s due to encoding errors.",
|
52 |
+
", ".join(os.path.basename(p) for p in paths),
|
53 |
+
)
|
54 |
+
obj: Optional[DistutilsCommand] = None
|
55 |
+
obj = d.get_command_obj("install", create=True)
|
56 |
+
assert obj is not None
|
57 |
+
i = cast(distutils_install_command, obj)
|
58 |
+
# NOTE: setting user or home has the side-effect of creating the home dir
|
59 |
+
# or user base for installations during finalize_options()
|
60 |
+
# ideally, we'd prefer a scheme class that has no side-effects.
|
61 |
+
assert not (user and prefix), f"user={user} prefix={prefix}"
|
62 |
+
assert not (home and prefix), f"home={home} prefix={prefix}"
|
63 |
+
i.user = user or i.user
|
64 |
+
if user or home:
|
65 |
+
i.prefix = ""
|
66 |
+
i.prefix = prefix or i.prefix
|
67 |
+
i.home = home or i.home
|
68 |
+
i.root = root or i.root
|
69 |
+
i.finalize_options()
|
70 |
+
|
71 |
+
scheme = {}
|
72 |
+
for key in SCHEME_KEYS:
|
73 |
+
scheme[key] = getattr(i, "install_" + key)
|
74 |
+
|
75 |
+
# install_lib specified in setup.cfg should install *everything*
|
76 |
+
# into there (i.e. it takes precedence over both purelib and
|
77 |
+
# platlib). Note, i.install_lib is *always* set after
|
78 |
+
# finalize_options(); we only want to override here if the user
|
79 |
+
# has explicitly requested it hence going back to the config
|
80 |
+
if "install_lib" in d.get_option_dict("install"):
|
81 |
+
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
|
82 |
+
|
83 |
+
if running_under_virtualenv():
|
84 |
+
if home:
|
85 |
+
prefix = home
|
86 |
+
elif user:
|
87 |
+
prefix = i.install_userbase # type: ignore
|
88 |
+
else:
|
89 |
+
prefix = i.prefix
|
90 |
+
scheme["headers"] = os.path.join(
|
91 |
+
prefix,
|
92 |
+
"include",
|
93 |
+
"site",
|
94 |
+
f"python{get_major_minor_version()}",
|
95 |
+
dist_name,
|
96 |
+
)
|
97 |
+
|
98 |
+
if root is not None:
|
99 |
+
path_no_drive = os.path.splitdrive(os.path.abspath(scheme["headers"]))[1]
|
100 |
+
scheme["headers"] = os.path.join(root, path_no_drive[1:])
|
101 |
+
|
102 |
+
return scheme
|
103 |
+
|
104 |
+
|
105 |
+
def get_scheme(
|
106 |
+
dist_name: str,
|
107 |
+
user: bool = False,
|
108 |
+
home: Optional[str] = None,
|
109 |
+
root: Optional[str] = None,
|
110 |
+
isolated: bool = False,
|
111 |
+
prefix: Optional[str] = None,
|
112 |
+
) -> Scheme:
|
113 |
+
"""
|
114 |
+
Get the "scheme" corresponding to the input parameters. The distutils
|
115 |
+
documentation provides the context for the available schemes:
|
116 |
+
https://docs.python.org/3/install/index.html#alternate-installation
|
117 |
+
|
118 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
119 |
+
in the headers scheme path
|
120 |
+
:param user: indicates to use the "user" scheme
|
121 |
+
:param home: indicates to use the "home" scheme and provides the base
|
122 |
+
directory for the same
|
123 |
+
:param root: root under which other directories are re-based
|
124 |
+
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
|
125 |
+
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
|
126 |
+
scheme paths
|
127 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
128 |
+
base directory for the same
|
129 |
+
"""
|
130 |
+
scheme = distutils_scheme(dist_name, user, home, root, isolated, prefix)
|
131 |
+
return Scheme(
|
132 |
+
platlib=scheme["platlib"],
|
133 |
+
purelib=scheme["purelib"],
|
134 |
+
headers=scheme["headers"],
|
135 |
+
scripts=scheme["scripts"],
|
136 |
+
data=scheme["data"],
|
137 |
+
)
|
138 |
+
|
139 |
+
|
140 |
+
def get_bin_prefix() -> str:
|
141 |
+
# XXX: In old virtualenv versions, sys.prefix can contain '..' components,
|
142 |
+
# so we need to call normpath to eliminate them.
|
143 |
+
prefix = os.path.normpath(sys.prefix)
|
144 |
+
if WINDOWS:
|
145 |
+
bin_py = os.path.join(prefix, "Scripts")
|
146 |
+
# buildout uses 'bin' on Windows too?
|
147 |
+
if not os.path.exists(bin_py):
|
148 |
+
bin_py = os.path.join(prefix, "bin")
|
149 |
+
return bin_py
|
150 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs
|
151 |
+
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
|
152 |
+
if sys.platform[:6] == "darwin" and prefix[:16] == "/System/Library/":
|
153 |
+
return "/usr/local/bin"
|
154 |
+
return os.path.join(prefix, "bin")
|
155 |
+
|
156 |
+
|
157 |
+
def get_purelib() -> str:
|
158 |
+
return get_python_lib(plat_specific=False)
|
159 |
+
|
160 |
+
|
161 |
+
def get_platlib() -> str:
|
162 |
+
return get_python_lib(plat_specific=True)
|
163 |
+
|
164 |
+
|
165 |
+
def get_prefixed_libs(prefix: str) -> Tuple[str, str]:
|
166 |
+
return (
|
167 |
+
get_python_lib(plat_specific=False, prefix=prefix),
|
168 |
+
get_python_lib(plat_specific=True, prefix=prefix),
|
169 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/_sysconfig.py
ADDED
@@ -0,0 +1,219 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import distutils.util # FIXME: For change_root.
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
import sysconfig
|
6 |
+
import typing
|
7 |
+
|
8 |
+
from pip._internal.exceptions import InvalidSchemeCombination, UserInstallationInvalid
|
9 |
+
from pip._internal.models.scheme import SCHEME_KEYS, Scheme
|
10 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
11 |
+
|
12 |
+
from .base import get_major_minor_version, is_osx_framework
|
13 |
+
|
14 |
+
logger = logging.getLogger(__name__)
|
15 |
+
|
16 |
+
|
17 |
+
# Notes on _infer_* functions.
|
18 |
+
# Unfortunately ``get_default_scheme()`` didn't exist before 3.10, so there's no
|
19 |
+
# way to ask things like "what is the '_prefix' scheme on this platform". These
|
20 |
+
# functions try to answer that with some heuristics while accounting for ad-hoc
|
21 |
+
# platforms not covered by CPython's default sysconfig implementation. If the
|
22 |
+
# ad-hoc implementation does not fully implement sysconfig, we'll fall back to
|
23 |
+
# a POSIX scheme.
|
24 |
+
|
25 |
+
_AVAILABLE_SCHEMES = set(sysconfig.get_scheme_names())
|
26 |
+
|
27 |
+
_PREFERRED_SCHEME_API = getattr(sysconfig, "get_preferred_scheme", None)
|
28 |
+
|
29 |
+
|
30 |
+
def _should_use_osx_framework_prefix() -> bool:
|
31 |
+
"""Check for Apple's ``osx_framework_library`` scheme.
|
32 |
+
|
33 |
+
Python distributed by Apple's Command Line Tools has this special scheme
|
34 |
+
that's used when:
|
35 |
+
|
36 |
+
* This is a framework build.
|
37 |
+
* We are installing into the system prefix.
|
38 |
+
|
39 |
+
This does not account for ``pip install --prefix`` (also means we're not
|
40 |
+
installing to the system prefix), which should use ``posix_prefix``, but
|
41 |
+
logic here means ``_infer_prefix()`` outputs ``osx_framework_library``. But
|
42 |
+
since ``prefix`` is not available for ``sysconfig.get_default_scheme()``,
|
43 |
+
which is the stdlib replacement for ``_infer_prefix()``, presumably Apple
|
44 |
+
wouldn't be able to magically switch between ``osx_framework_library`` and
|
45 |
+
``posix_prefix``. ``_infer_prefix()`` returning ``osx_framework_library``
|
46 |
+
means its behavior is consistent whether we use the stdlib implementation
|
47 |
+
or our own, and we deal with this special case in ``get_scheme()`` instead.
|
48 |
+
"""
|
49 |
+
return (
|
50 |
+
"osx_framework_library" in _AVAILABLE_SCHEMES
|
51 |
+
and not running_under_virtualenv()
|
52 |
+
and is_osx_framework()
|
53 |
+
)
|
54 |
+
|
55 |
+
|
56 |
+
def _infer_prefix() -> str:
|
57 |
+
"""Try to find a prefix scheme for the current platform.
|
58 |
+
|
59 |
+
This tries:
|
60 |
+
|
61 |
+
* A special ``osx_framework_library`` for Python distributed by Apple's
|
62 |
+
Command Line Tools, when not running in a virtual environment.
|
63 |
+
* Implementation + OS, used by PyPy on Windows (``pypy_nt``).
|
64 |
+
* Implementation without OS, used by PyPy on POSIX (``pypy``).
|
65 |
+
* OS + "prefix", used by CPython on POSIX (``posix_prefix``).
|
66 |
+
* Just the OS name, used by CPython on Windows (``nt``).
|
67 |
+
|
68 |
+
If none of the above works, fall back to ``posix_prefix``.
|
69 |
+
"""
|
70 |
+
if _PREFERRED_SCHEME_API:
|
71 |
+
return _PREFERRED_SCHEME_API("prefix")
|
72 |
+
if _should_use_osx_framework_prefix():
|
73 |
+
return "osx_framework_library"
|
74 |
+
implementation_suffixed = f"{sys.implementation.name}_{os.name}"
|
75 |
+
if implementation_suffixed in _AVAILABLE_SCHEMES:
|
76 |
+
return implementation_suffixed
|
77 |
+
if sys.implementation.name in _AVAILABLE_SCHEMES:
|
78 |
+
return sys.implementation.name
|
79 |
+
suffixed = f"{os.name}_prefix"
|
80 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
81 |
+
return suffixed
|
82 |
+
if os.name in _AVAILABLE_SCHEMES: # On Windows, prefx is just called "nt".
|
83 |
+
return os.name
|
84 |
+
return "posix_prefix"
|
85 |
+
|
86 |
+
|
87 |
+
def _infer_user() -> str:
|
88 |
+
"""Try to find a user scheme for the current platform."""
|
89 |
+
if _PREFERRED_SCHEME_API:
|
90 |
+
return _PREFERRED_SCHEME_API("user")
|
91 |
+
if is_osx_framework() and not running_under_virtualenv():
|
92 |
+
suffixed = "osx_framework_user"
|
93 |
+
else:
|
94 |
+
suffixed = f"{os.name}_user"
|
95 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
96 |
+
return suffixed
|
97 |
+
if "posix_user" not in _AVAILABLE_SCHEMES: # User scheme unavailable.
|
98 |
+
raise UserInstallationInvalid()
|
99 |
+
return "posix_user"
|
100 |
+
|
101 |
+
|
102 |
+
def _infer_home() -> str:
|
103 |
+
"""Try to find a home for the current platform."""
|
104 |
+
if _PREFERRED_SCHEME_API:
|
105 |
+
return _PREFERRED_SCHEME_API("home")
|
106 |
+
suffixed = f"{os.name}_home"
|
107 |
+
if suffixed in _AVAILABLE_SCHEMES:
|
108 |
+
return suffixed
|
109 |
+
return "posix_home"
|
110 |
+
|
111 |
+
|
112 |
+
# Update these keys if the user sets a custom home.
|
113 |
+
_HOME_KEYS = [
|
114 |
+
"installed_base",
|
115 |
+
"base",
|
116 |
+
"installed_platbase",
|
117 |
+
"platbase",
|
118 |
+
"prefix",
|
119 |
+
"exec_prefix",
|
120 |
+
]
|
121 |
+
if sysconfig.get_config_var("userbase") is not None:
|
122 |
+
_HOME_KEYS.append("userbase")
|
123 |
+
|
124 |
+
|
125 |
+
def get_scheme(
|
126 |
+
dist_name: str,
|
127 |
+
user: bool = False,
|
128 |
+
home: typing.Optional[str] = None,
|
129 |
+
root: typing.Optional[str] = None,
|
130 |
+
isolated: bool = False,
|
131 |
+
prefix: typing.Optional[str] = None,
|
132 |
+
) -> Scheme:
|
133 |
+
"""
|
134 |
+
Get the "scheme" corresponding to the input parameters.
|
135 |
+
|
136 |
+
:param dist_name: the name of the package to retrieve the scheme for, used
|
137 |
+
in the headers scheme path
|
138 |
+
:param user: indicates to use the "user" scheme
|
139 |
+
:param home: indicates to use the "home" scheme
|
140 |
+
:param root: root under which other directories are re-based
|
141 |
+
:param isolated: ignored, but kept for distutils compatibility (where
|
142 |
+
this controls whether the user-site pydistutils.cfg is honored)
|
143 |
+
:param prefix: indicates to use the "prefix" scheme and provides the
|
144 |
+
base directory for the same
|
145 |
+
"""
|
146 |
+
if user and prefix:
|
147 |
+
raise InvalidSchemeCombination("--user", "--prefix")
|
148 |
+
if home and prefix:
|
149 |
+
raise InvalidSchemeCombination("--home", "--prefix")
|
150 |
+
|
151 |
+
if home is not None:
|
152 |
+
scheme_name = _infer_home()
|
153 |
+
elif user:
|
154 |
+
scheme_name = _infer_user()
|
155 |
+
else:
|
156 |
+
scheme_name = _infer_prefix()
|
157 |
+
|
158 |
+
# Special case: When installing into a custom prefix, use posix_prefix
|
159 |
+
# instead of osx_framework_library. See _should_use_osx_framework_prefix()
|
160 |
+
# docstring for details.
|
161 |
+
if prefix is not None and scheme_name == "osx_framework_library":
|
162 |
+
scheme_name = "posix_prefix"
|
163 |
+
|
164 |
+
if home is not None:
|
165 |
+
variables = {k: home for k in _HOME_KEYS}
|
166 |
+
elif prefix is not None:
|
167 |
+
variables = {k: prefix for k in _HOME_KEYS}
|
168 |
+
else:
|
169 |
+
variables = {}
|
170 |
+
|
171 |
+
paths = sysconfig.get_paths(scheme=scheme_name, vars=variables)
|
172 |
+
|
173 |
+
# Logic here is very arbitrary, we're doing it for compatibility, don't ask.
|
174 |
+
# 1. Pip historically uses a special header path in virtual environments.
|
175 |
+
# 2. If the distribution name is not known, distutils uses 'UNKNOWN'. We
|
176 |
+
# only do the same when not running in a virtual environment because
|
177 |
+
# pip's historical header path logic (see point 1) did not do this.
|
178 |
+
if running_under_virtualenv():
|
179 |
+
if user:
|
180 |
+
base = variables.get("userbase", sys.prefix)
|
181 |
+
else:
|
182 |
+
base = variables.get("base", sys.prefix)
|
183 |
+
python_xy = f"python{get_major_minor_version()}"
|
184 |
+
paths["include"] = os.path.join(base, "include", "site", python_xy)
|
185 |
+
elif not dist_name:
|
186 |
+
dist_name = "UNKNOWN"
|
187 |
+
|
188 |
+
scheme = Scheme(
|
189 |
+
platlib=paths["platlib"],
|
190 |
+
purelib=paths["purelib"],
|
191 |
+
headers=os.path.join(paths["include"], dist_name),
|
192 |
+
scripts=paths["scripts"],
|
193 |
+
data=paths["data"],
|
194 |
+
)
|
195 |
+
if root is not None:
|
196 |
+
for key in SCHEME_KEYS:
|
197 |
+
value = distutils.util.change_root(root, getattr(scheme, key))
|
198 |
+
setattr(scheme, key, value)
|
199 |
+
return scheme
|
200 |
+
|
201 |
+
|
202 |
+
def get_bin_prefix() -> str:
|
203 |
+
# Forcing to use /usr/local/bin for standard macOS framework installs.
|
204 |
+
if sys.platform[:6] == "darwin" and sys.prefix[:16] == "/System/Library/":
|
205 |
+
return "/usr/local/bin"
|
206 |
+
return sysconfig.get_paths()["scripts"]
|
207 |
+
|
208 |
+
|
209 |
+
def get_purelib() -> str:
|
210 |
+
return sysconfig.get_paths()["purelib"]
|
211 |
+
|
212 |
+
|
213 |
+
def get_platlib() -> str:
|
214 |
+
return sysconfig.get_paths()["platlib"]
|
215 |
+
|
216 |
+
|
217 |
+
def get_prefixed_libs(prefix: str) -> typing.Tuple[str, str]:
|
218 |
+
paths = sysconfig.get_paths(vars={"base": prefix, "platbase": prefix})
|
219 |
+
return (paths["purelib"], paths["platlib"])
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/base.py
ADDED
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import os
|
3 |
+
import site
|
4 |
+
import sys
|
5 |
+
import sysconfig
|
6 |
+
import typing
|
7 |
+
|
8 |
+
from pip._internal.utils import appdirs
|
9 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
10 |
+
|
11 |
+
# Application Directories
|
12 |
+
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
13 |
+
|
14 |
+
# FIXME doesn't account for venv linked to global site-packages
|
15 |
+
site_packages: typing.Optional[str] = sysconfig.get_path("purelib")
|
16 |
+
|
17 |
+
|
18 |
+
def get_major_minor_version() -> str:
|
19 |
+
"""
|
20 |
+
Return the major-minor version of the current Python as a string, e.g.
|
21 |
+
"3.7" or "3.10".
|
22 |
+
"""
|
23 |
+
return "{}.{}".format(*sys.version_info)
|
24 |
+
|
25 |
+
|
26 |
+
def get_src_prefix() -> str:
|
27 |
+
if running_under_virtualenv():
|
28 |
+
src_prefix = os.path.join(sys.prefix, "src")
|
29 |
+
else:
|
30 |
+
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
31 |
+
try:
|
32 |
+
src_prefix = os.path.join(os.getcwd(), "src")
|
33 |
+
except OSError:
|
34 |
+
# In case the current working directory has been renamed or deleted
|
35 |
+
sys.exit("The folder you are executing pip from can no longer be found.")
|
36 |
+
|
37 |
+
# under macOS + virtualenv sys.prefix is not properly resolved
|
38 |
+
# it is something like /path/to/python/bin/..
|
39 |
+
return os.path.abspath(src_prefix)
|
40 |
+
|
41 |
+
|
42 |
+
try:
|
43 |
+
# Use getusersitepackages if this is present, as it ensures that the
|
44 |
+
# value is initialised properly.
|
45 |
+
user_site: typing.Optional[str] = site.getusersitepackages()
|
46 |
+
except AttributeError:
|
47 |
+
user_site = site.USER_SITE
|
48 |
+
|
49 |
+
|
50 |
+
@functools.lru_cache(maxsize=None)
|
51 |
+
def is_osx_framework() -> bool:
|
52 |
+
return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (261 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc
ADDED
Binary file (1.03 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-310.pyc
ADDED
Binary file (18.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-310.pyc
ADDED
Binary file (19.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/found_candidates.cpython-310.pyc
ADDED
Binary file (4.87 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-310.pyc
ADDED
Binary file (7.71 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/reporter.cpython-310.pyc
ADDED
Binary file (3.18 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-310.pyc
ADDED
Binary file (7.47 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-310.pyc
ADDED
Binary file (8.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/factory.py
ADDED
@@ -0,0 +1,739 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import contextlib
|
2 |
+
import functools
|
3 |
+
import logging
|
4 |
+
from typing import (
|
5 |
+
TYPE_CHECKING,
|
6 |
+
Dict,
|
7 |
+
FrozenSet,
|
8 |
+
Iterable,
|
9 |
+
Iterator,
|
10 |
+
List,
|
11 |
+
Mapping,
|
12 |
+
NamedTuple,
|
13 |
+
Optional,
|
14 |
+
Sequence,
|
15 |
+
Set,
|
16 |
+
Tuple,
|
17 |
+
TypeVar,
|
18 |
+
cast,
|
19 |
+
)
|
20 |
+
|
21 |
+
from pip._vendor.packaging.requirements import InvalidRequirement
|
22 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
23 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
24 |
+
from pip._vendor.resolvelib import ResolutionImpossible
|
25 |
+
|
26 |
+
from pip._internal.cache import CacheEntry, WheelCache
|
27 |
+
from pip._internal.exceptions import (
|
28 |
+
DistributionNotFound,
|
29 |
+
InstallationError,
|
30 |
+
InstallationSubprocessError,
|
31 |
+
MetadataInconsistent,
|
32 |
+
UnsupportedPythonVersion,
|
33 |
+
UnsupportedWheel,
|
34 |
+
)
|
35 |
+
from pip._internal.index.package_finder import PackageFinder
|
36 |
+
from pip._internal.metadata import BaseDistribution, get_default_environment
|
37 |
+
from pip._internal.models.link import Link
|
38 |
+
from pip._internal.models.wheel import Wheel
|
39 |
+
from pip._internal.operations.prepare import RequirementPreparer
|
40 |
+
from pip._internal.req.constructors import install_req_from_link_and_ireq
|
41 |
+
from pip._internal.req.req_install import (
|
42 |
+
InstallRequirement,
|
43 |
+
check_invalid_constraint_type,
|
44 |
+
)
|
45 |
+
from pip._internal.resolution.base import InstallRequirementProvider
|
46 |
+
from pip._internal.utils.compatibility_tags import get_supported
|
47 |
+
from pip._internal.utils.hashes import Hashes
|
48 |
+
from pip._internal.utils.packaging import get_requirement
|
49 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
50 |
+
|
51 |
+
from .base import Candidate, CandidateVersion, Constraint, Requirement
|
52 |
+
from .candidates import (
|
53 |
+
AlreadyInstalledCandidate,
|
54 |
+
BaseCandidate,
|
55 |
+
EditableCandidate,
|
56 |
+
ExtrasCandidate,
|
57 |
+
LinkCandidate,
|
58 |
+
RequiresPythonCandidate,
|
59 |
+
as_base_candidate,
|
60 |
+
)
|
61 |
+
from .found_candidates import FoundCandidates, IndexCandidateInfo
|
62 |
+
from .requirements import (
|
63 |
+
ExplicitRequirement,
|
64 |
+
RequiresPythonRequirement,
|
65 |
+
SpecifierRequirement,
|
66 |
+
UnsatisfiableRequirement,
|
67 |
+
)
|
68 |
+
|
69 |
+
if TYPE_CHECKING:
|
70 |
+
from typing import Protocol
|
71 |
+
|
72 |
+
class ConflictCause(Protocol):
|
73 |
+
requirement: RequiresPythonRequirement
|
74 |
+
parent: Candidate
|
75 |
+
|
76 |
+
|
77 |
+
logger = logging.getLogger(__name__)
|
78 |
+
|
79 |
+
C = TypeVar("C")
|
80 |
+
Cache = Dict[Link, C]
|
81 |
+
|
82 |
+
|
83 |
+
class CollectedRootRequirements(NamedTuple):
|
84 |
+
requirements: List[Requirement]
|
85 |
+
constraints: Dict[str, Constraint]
|
86 |
+
user_requested: Dict[str, int]
|
87 |
+
|
88 |
+
|
89 |
+
class Factory:
|
90 |
+
def __init__(
|
91 |
+
self,
|
92 |
+
finder: PackageFinder,
|
93 |
+
preparer: RequirementPreparer,
|
94 |
+
make_install_req: InstallRequirementProvider,
|
95 |
+
wheel_cache: Optional[WheelCache],
|
96 |
+
use_user_site: bool,
|
97 |
+
force_reinstall: bool,
|
98 |
+
ignore_installed: bool,
|
99 |
+
ignore_requires_python: bool,
|
100 |
+
suppress_build_failures: bool,
|
101 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
102 |
+
) -> None:
|
103 |
+
self._finder = finder
|
104 |
+
self.preparer = preparer
|
105 |
+
self._wheel_cache = wheel_cache
|
106 |
+
self._python_candidate = RequiresPythonCandidate(py_version_info)
|
107 |
+
self._make_install_req_from_spec = make_install_req
|
108 |
+
self._use_user_site = use_user_site
|
109 |
+
self._force_reinstall = force_reinstall
|
110 |
+
self._ignore_requires_python = ignore_requires_python
|
111 |
+
self._suppress_build_failures = suppress_build_failures
|
112 |
+
|
113 |
+
self._build_failures: Cache[InstallationError] = {}
|
114 |
+
self._link_candidate_cache: Cache[LinkCandidate] = {}
|
115 |
+
self._editable_candidate_cache: Cache[EditableCandidate] = {}
|
116 |
+
self._installed_candidate_cache: Dict[str, AlreadyInstalledCandidate] = {}
|
117 |
+
self._extras_candidate_cache: Dict[
|
118 |
+
Tuple[int, FrozenSet[str]], ExtrasCandidate
|
119 |
+
] = {}
|
120 |
+
|
121 |
+
if not ignore_installed:
|
122 |
+
env = get_default_environment()
|
123 |
+
self._installed_dists = {
|
124 |
+
dist.canonical_name: dist
|
125 |
+
for dist in env.iter_installed_distributions(local_only=False)
|
126 |
+
}
|
127 |
+
else:
|
128 |
+
self._installed_dists = {}
|
129 |
+
|
130 |
+
@property
|
131 |
+
def force_reinstall(self) -> bool:
|
132 |
+
return self._force_reinstall
|
133 |
+
|
134 |
+
def _fail_if_link_is_unsupported_wheel(self, link: Link) -> None:
|
135 |
+
if not link.is_wheel:
|
136 |
+
return
|
137 |
+
wheel = Wheel(link.filename)
|
138 |
+
if wheel.supported(self._finder.target_python.get_tags()):
|
139 |
+
return
|
140 |
+
msg = f"{link.filename} is not a supported wheel on this platform."
|
141 |
+
raise UnsupportedWheel(msg)
|
142 |
+
|
143 |
+
def _make_extras_candidate(
|
144 |
+
self, base: BaseCandidate, extras: FrozenSet[str]
|
145 |
+
) -> ExtrasCandidate:
|
146 |
+
cache_key = (id(base), extras)
|
147 |
+
try:
|
148 |
+
candidate = self._extras_candidate_cache[cache_key]
|
149 |
+
except KeyError:
|
150 |
+
candidate = ExtrasCandidate(base, extras)
|
151 |
+
self._extras_candidate_cache[cache_key] = candidate
|
152 |
+
return candidate
|
153 |
+
|
154 |
+
def _make_candidate_from_dist(
|
155 |
+
self,
|
156 |
+
dist: BaseDistribution,
|
157 |
+
extras: FrozenSet[str],
|
158 |
+
template: InstallRequirement,
|
159 |
+
) -> Candidate:
|
160 |
+
try:
|
161 |
+
base = self._installed_candidate_cache[dist.canonical_name]
|
162 |
+
except KeyError:
|
163 |
+
base = AlreadyInstalledCandidate(dist, template, factory=self)
|
164 |
+
self._installed_candidate_cache[dist.canonical_name] = base
|
165 |
+
if not extras:
|
166 |
+
return base
|
167 |
+
return self._make_extras_candidate(base, extras)
|
168 |
+
|
169 |
+
def _make_candidate_from_link(
|
170 |
+
self,
|
171 |
+
link: Link,
|
172 |
+
extras: FrozenSet[str],
|
173 |
+
template: InstallRequirement,
|
174 |
+
name: Optional[NormalizedName],
|
175 |
+
version: Optional[CandidateVersion],
|
176 |
+
) -> Optional[Candidate]:
|
177 |
+
# TODO: Check already installed candidate, and use it if the link and
|
178 |
+
# editable flag match.
|
179 |
+
|
180 |
+
if link in self._build_failures:
|
181 |
+
# We already tried this candidate before, and it does not build.
|
182 |
+
# Don't bother trying again.
|
183 |
+
return None
|
184 |
+
|
185 |
+
if template.editable:
|
186 |
+
if link not in self._editable_candidate_cache:
|
187 |
+
try:
|
188 |
+
self._editable_candidate_cache[link] = EditableCandidate(
|
189 |
+
link,
|
190 |
+
template,
|
191 |
+
factory=self,
|
192 |
+
name=name,
|
193 |
+
version=version,
|
194 |
+
)
|
195 |
+
except MetadataInconsistent as e:
|
196 |
+
logger.info(
|
197 |
+
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
198 |
+
link,
|
199 |
+
e,
|
200 |
+
extra={"markup": True},
|
201 |
+
)
|
202 |
+
self._build_failures[link] = e
|
203 |
+
return None
|
204 |
+
except InstallationSubprocessError as e:
|
205 |
+
if not self._suppress_build_failures:
|
206 |
+
raise
|
207 |
+
logger.warning("Discarding %s due to build failure: %s", link, e)
|
208 |
+
self._build_failures[link] = e
|
209 |
+
return None
|
210 |
+
|
211 |
+
base: BaseCandidate = self._editable_candidate_cache[link]
|
212 |
+
else:
|
213 |
+
if link not in self._link_candidate_cache:
|
214 |
+
try:
|
215 |
+
self._link_candidate_cache[link] = LinkCandidate(
|
216 |
+
link,
|
217 |
+
template,
|
218 |
+
factory=self,
|
219 |
+
name=name,
|
220 |
+
version=version,
|
221 |
+
)
|
222 |
+
except MetadataInconsistent as e:
|
223 |
+
logger.info(
|
224 |
+
"Discarding [blue underline]%s[/]: [yellow]%s[reset]",
|
225 |
+
link,
|
226 |
+
e,
|
227 |
+
extra={"markup": True},
|
228 |
+
)
|
229 |
+
self._build_failures[link] = e
|
230 |
+
return None
|
231 |
+
except InstallationSubprocessError as e:
|
232 |
+
if not self._suppress_build_failures:
|
233 |
+
raise
|
234 |
+
logger.warning("Discarding %s due to build failure: %s", link, e)
|
235 |
+
self._build_failures[link] = e
|
236 |
+
return None
|
237 |
+
base = self._link_candidate_cache[link]
|
238 |
+
|
239 |
+
if not extras:
|
240 |
+
return base
|
241 |
+
return self._make_extras_candidate(base, extras)
|
242 |
+
|
243 |
+
def _iter_found_candidates(
|
244 |
+
self,
|
245 |
+
ireqs: Sequence[InstallRequirement],
|
246 |
+
specifier: SpecifierSet,
|
247 |
+
hashes: Hashes,
|
248 |
+
prefers_installed: bool,
|
249 |
+
incompatible_ids: Set[int],
|
250 |
+
) -> Iterable[Candidate]:
|
251 |
+
if not ireqs:
|
252 |
+
return ()
|
253 |
+
|
254 |
+
# The InstallRequirement implementation requires us to give it a
|
255 |
+
# "template". Here we just choose the first requirement to represent
|
256 |
+
# all of them.
|
257 |
+
# Hopefully the Project model can correct this mismatch in the future.
|
258 |
+
template = ireqs[0]
|
259 |
+
assert template.req, "Candidates found on index must be PEP 508"
|
260 |
+
name = canonicalize_name(template.req.name)
|
261 |
+
|
262 |
+
extras: FrozenSet[str] = frozenset()
|
263 |
+
for ireq in ireqs:
|
264 |
+
assert ireq.req, "Candidates found on index must be PEP 508"
|
265 |
+
specifier &= ireq.req.specifier
|
266 |
+
hashes &= ireq.hashes(trust_internet=False)
|
267 |
+
extras |= frozenset(ireq.extras)
|
268 |
+
|
269 |
+
def _get_installed_candidate() -> Optional[Candidate]:
|
270 |
+
"""Get the candidate for the currently-installed version."""
|
271 |
+
# If --force-reinstall is set, we want the version from the index
|
272 |
+
# instead, so we "pretend" there is nothing installed.
|
273 |
+
if self._force_reinstall:
|
274 |
+
return None
|
275 |
+
try:
|
276 |
+
installed_dist = self._installed_dists[name]
|
277 |
+
except KeyError:
|
278 |
+
return None
|
279 |
+
# Don't use the installed distribution if its version does not fit
|
280 |
+
# the current dependency graph.
|
281 |
+
if not specifier.contains(installed_dist.version, prereleases=True):
|
282 |
+
return None
|
283 |
+
candidate = self._make_candidate_from_dist(
|
284 |
+
dist=installed_dist,
|
285 |
+
extras=extras,
|
286 |
+
template=template,
|
287 |
+
)
|
288 |
+
# The candidate is a known incompatibility. Don't use it.
|
289 |
+
if id(candidate) in incompatible_ids:
|
290 |
+
return None
|
291 |
+
return candidate
|
292 |
+
|
293 |
+
def iter_index_candidate_infos() -> Iterator[IndexCandidateInfo]:
|
294 |
+
result = self._finder.find_best_candidate(
|
295 |
+
project_name=name,
|
296 |
+
specifier=specifier,
|
297 |
+
hashes=hashes,
|
298 |
+
)
|
299 |
+
icans = list(result.iter_applicable())
|
300 |
+
|
301 |
+
# PEP 592: Yanked releases are ignored unless the specifier
|
302 |
+
# explicitly pins a version (via '==' or '===') that can be
|
303 |
+
# solely satisfied by a yanked release.
|
304 |
+
all_yanked = all(ican.link.is_yanked for ican in icans)
|
305 |
+
|
306 |
+
def is_pinned(specifier: SpecifierSet) -> bool:
|
307 |
+
for sp in specifier:
|
308 |
+
if sp.operator == "===":
|
309 |
+
return True
|
310 |
+
if sp.operator != "==":
|
311 |
+
continue
|
312 |
+
if sp.version.endswith(".*"):
|
313 |
+
continue
|
314 |
+
return True
|
315 |
+
return False
|
316 |
+
|
317 |
+
pinned = is_pinned(specifier)
|
318 |
+
|
319 |
+
# PackageFinder returns earlier versions first, so we reverse.
|
320 |
+
for ican in reversed(icans):
|
321 |
+
if not (all_yanked and pinned) and ican.link.is_yanked:
|
322 |
+
continue
|
323 |
+
func = functools.partial(
|
324 |
+
self._make_candidate_from_link,
|
325 |
+
link=ican.link,
|
326 |
+
extras=extras,
|
327 |
+
template=template,
|
328 |
+
name=name,
|
329 |
+
version=ican.version,
|
330 |
+
)
|
331 |
+
yield ican.version, func
|
332 |
+
|
333 |
+
return FoundCandidates(
|
334 |
+
iter_index_candidate_infos,
|
335 |
+
_get_installed_candidate(),
|
336 |
+
prefers_installed,
|
337 |
+
incompatible_ids,
|
338 |
+
)
|
339 |
+
|
340 |
+
def _iter_explicit_candidates_from_base(
|
341 |
+
self,
|
342 |
+
base_requirements: Iterable[Requirement],
|
343 |
+
extras: FrozenSet[str],
|
344 |
+
) -> Iterator[Candidate]:
|
345 |
+
"""Produce explicit candidates from the base given an extra-ed package.
|
346 |
+
|
347 |
+
:param base_requirements: Requirements known to the resolver. The
|
348 |
+
requirements are guaranteed to not have extras.
|
349 |
+
:param extras: The extras to inject into the explicit requirements'
|
350 |
+
candidates.
|
351 |
+
"""
|
352 |
+
for req in base_requirements:
|
353 |
+
lookup_cand, _ = req.get_candidate_lookup()
|
354 |
+
if lookup_cand is None: # Not explicit.
|
355 |
+
continue
|
356 |
+
# We've stripped extras from the identifier, and should always
|
357 |
+
# get a BaseCandidate here, unless there's a bug elsewhere.
|
358 |
+
base_cand = as_base_candidate(lookup_cand)
|
359 |
+
assert base_cand is not None, "no extras here"
|
360 |
+
yield self._make_extras_candidate(base_cand, extras)
|
361 |
+
|
362 |
+
def _iter_candidates_from_constraints(
|
363 |
+
self,
|
364 |
+
identifier: str,
|
365 |
+
constraint: Constraint,
|
366 |
+
template: InstallRequirement,
|
367 |
+
) -> Iterator[Candidate]:
|
368 |
+
"""Produce explicit candidates from constraints.
|
369 |
+
|
370 |
+
This creates "fake" InstallRequirement objects that are basically clones
|
371 |
+
of what "should" be the template, but with original_link set to link.
|
372 |
+
"""
|
373 |
+
for link in constraint.links:
|
374 |
+
self._fail_if_link_is_unsupported_wheel(link)
|
375 |
+
candidate = self._make_candidate_from_link(
|
376 |
+
link,
|
377 |
+
extras=frozenset(),
|
378 |
+
template=install_req_from_link_and_ireq(link, template),
|
379 |
+
name=canonicalize_name(identifier),
|
380 |
+
version=None,
|
381 |
+
)
|
382 |
+
if candidate:
|
383 |
+
yield candidate
|
384 |
+
|
385 |
+
def find_candidates(
|
386 |
+
self,
|
387 |
+
identifier: str,
|
388 |
+
requirements: Mapping[str, Iterable[Requirement]],
|
389 |
+
incompatibilities: Mapping[str, Iterator[Candidate]],
|
390 |
+
constraint: Constraint,
|
391 |
+
prefers_installed: bool,
|
392 |
+
) -> Iterable[Candidate]:
|
393 |
+
# Collect basic lookup information from the requirements.
|
394 |
+
explicit_candidates: Set[Candidate] = set()
|
395 |
+
ireqs: List[InstallRequirement] = []
|
396 |
+
for req in requirements[identifier]:
|
397 |
+
cand, ireq = req.get_candidate_lookup()
|
398 |
+
if cand is not None:
|
399 |
+
explicit_candidates.add(cand)
|
400 |
+
if ireq is not None:
|
401 |
+
ireqs.append(ireq)
|
402 |
+
|
403 |
+
# If the current identifier contains extras, add explicit candidates
|
404 |
+
# from entries from extra-less identifier.
|
405 |
+
with contextlib.suppress(InvalidRequirement):
|
406 |
+
parsed_requirement = get_requirement(identifier)
|
407 |
+
explicit_candidates.update(
|
408 |
+
self._iter_explicit_candidates_from_base(
|
409 |
+
requirements.get(parsed_requirement.name, ()),
|
410 |
+
frozenset(parsed_requirement.extras),
|
411 |
+
),
|
412 |
+
)
|
413 |
+
|
414 |
+
# Add explicit candidates from constraints. We only do this if there are
|
415 |
+
# known ireqs, which represent requirements not already explicit. If
|
416 |
+
# there are no ireqs, we're constraining already-explicit requirements,
|
417 |
+
# which is handled later when we return the explicit candidates.
|
418 |
+
if ireqs:
|
419 |
+
try:
|
420 |
+
explicit_candidates.update(
|
421 |
+
self._iter_candidates_from_constraints(
|
422 |
+
identifier,
|
423 |
+
constraint,
|
424 |
+
template=ireqs[0],
|
425 |
+
),
|
426 |
+
)
|
427 |
+
except UnsupportedWheel:
|
428 |
+
# If we're constrained to install a wheel incompatible with the
|
429 |
+
# target architecture, no candidates will ever be valid.
|
430 |
+
return ()
|
431 |
+
|
432 |
+
# Since we cache all the candidates, incompatibility identification
|
433 |
+
# can be made quicker by comparing only the id() values.
|
434 |
+
incompat_ids = {id(c) for c in incompatibilities.get(identifier, ())}
|
435 |
+
|
436 |
+
# If none of the requirements want an explicit candidate, we can ask
|
437 |
+
# the finder for candidates.
|
438 |
+
if not explicit_candidates:
|
439 |
+
return self._iter_found_candidates(
|
440 |
+
ireqs,
|
441 |
+
constraint.specifier,
|
442 |
+
constraint.hashes,
|
443 |
+
prefers_installed,
|
444 |
+
incompat_ids,
|
445 |
+
)
|
446 |
+
|
447 |
+
return (
|
448 |
+
c
|
449 |
+
for c in explicit_candidates
|
450 |
+
if id(c) not in incompat_ids
|
451 |
+
and constraint.is_satisfied_by(c)
|
452 |
+
and all(req.is_satisfied_by(c) for req in requirements[identifier])
|
453 |
+
)
|
454 |
+
|
455 |
+
def _make_requirement_from_install_req(
|
456 |
+
self, ireq: InstallRequirement, requested_extras: Iterable[str]
|
457 |
+
) -> Optional[Requirement]:
|
458 |
+
if not ireq.match_markers(requested_extras):
|
459 |
+
logger.info(
|
460 |
+
"Ignoring %s: markers '%s' don't match your environment",
|
461 |
+
ireq.name,
|
462 |
+
ireq.markers,
|
463 |
+
)
|
464 |
+
return None
|
465 |
+
if not ireq.link:
|
466 |
+
return SpecifierRequirement(ireq)
|
467 |
+
self._fail_if_link_is_unsupported_wheel(ireq.link)
|
468 |
+
cand = self._make_candidate_from_link(
|
469 |
+
ireq.link,
|
470 |
+
extras=frozenset(ireq.extras),
|
471 |
+
template=ireq,
|
472 |
+
name=canonicalize_name(ireq.name) if ireq.name else None,
|
473 |
+
version=None,
|
474 |
+
)
|
475 |
+
if cand is None:
|
476 |
+
# There's no way we can satisfy a URL requirement if the underlying
|
477 |
+
# candidate fails to build. An unnamed URL must be user-supplied, so
|
478 |
+
# we fail eagerly. If the URL is named, an unsatisfiable requirement
|
479 |
+
# can make the resolver do the right thing, either backtrack (and
|
480 |
+
# maybe find some other requirement that's buildable) or raise a
|
481 |
+
# ResolutionImpossible eventually.
|
482 |
+
if not ireq.name:
|
483 |
+
raise self._build_failures[ireq.link]
|
484 |
+
return UnsatisfiableRequirement(canonicalize_name(ireq.name))
|
485 |
+
return self.make_requirement_from_candidate(cand)
|
486 |
+
|
487 |
+
def collect_root_requirements(
|
488 |
+
self, root_ireqs: List[InstallRequirement]
|
489 |
+
) -> CollectedRootRequirements:
|
490 |
+
collected = CollectedRootRequirements([], {}, {})
|
491 |
+
for i, ireq in enumerate(root_ireqs):
|
492 |
+
if ireq.constraint:
|
493 |
+
# Ensure we only accept valid constraints
|
494 |
+
problem = check_invalid_constraint_type(ireq)
|
495 |
+
if problem:
|
496 |
+
raise InstallationError(problem)
|
497 |
+
if not ireq.match_markers():
|
498 |
+
continue
|
499 |
+
assert ireq.name, "Constraint must be named"
|
500 |
+
name = canonicalize_name(ireq.name)
|
501 |
+
if name in collected.constraints:
|
502 |
+
collected.constraints[name] &= ireq
|
503 |
+
else:
|
504 |
+
collected.constraints[name] = Constraint.from_ireq(ireq)
|
505 |
+
else:
|
506 |
+
req = self._make_requirement_from_install_req(
|
507 |
+
ireq,
|
508 |
+
requested_extras=(),
|
509 |
+
)
|
510 |
+
if req is None:
|
511 |
+
continue
|
512 |
+
if ireq.user_supplied and req.name not in collected.user_requested:
|
513 |
+
collected.user_requested[req.name] = i
|
514 |
+
collected.requirements.append(req)
|
515 |
+
return collected
|
516 |
+
|
517 |
+
def make_requirement_from_candidate(
|
518 |
+
self, candidate: Candidate
|
519 |
+
) -> ExplicitRequirement:
|
520 |
+
return ExplicitRequirement(candidate)
|
521 |
+
|
522 |
+
def make_requirement_from_spec(
|
523 |
+
self,
|
524 |
+
specifier: str,
|
525 |
+
comes_from: Optional[InstallRequirement],
|
526 |
+
requested_extras: Iterable[str] = (),
|
527 |
+
) -> Optional[Requirement]:
|
528 |
+
ireq = self._make_install_req_from_spec(specifier, comes_from)
|
529 |
+
return self._make_requirement_from_install_req(ireq, requested_extras)
|
530 |
+
|
531 |
+
def make_requires_python_requirement(
|
532 |
+
self,
|
533 |
+
specifier: SpecifierSet,
|
534 |
+
) -> Optional[Requirement]:
|
535 |
+
if self._ignore_requires_python:
|
536 |
+
return None
|
537 |
+
# Don't bother creating a dependency for an empty Requires-Python.
|
538 |
+
if not str(specifier):
|
539 |
+
return None
|
540 |
+
return RequiresPythonRequirement(specifier, self._python_candidate)
|
541 |
+
|
542 |
+
def get_wheel_cache_entry(
|
543 |
+
self, link: Link, name: Optional[str]
|
544 |
+
) -> Optional[CacheEntry]:
|
545 |
+
"""Look up the link in the wheel cache.
|
546 |
+
|
547 |
+
If ``preparer.require_hashes`` is True, don't use the wheel cache,
|
548 |
+
because cached wheels, always built locally, have different hashes
|
549 |
+
than the files downloaded from the index server and thus throw false
|
550 |
+
hash mismatches. Furthermore, cached wheels at present have
|
551 |
+
nondeterministic contents due to file modification times.
|
552 |
+
"""
|
553 |
+
if self._wheel_cache is None or self.preparer.require_hashes:
|
554 |
+
return None
|
555 |
+
return self._wheel_cache.get_cache_entry(
|
556 |
+
link=link,
|
557 |
+
package_name=name,
|
558 |
+
supported_tags=get_supported(),
|
559 |
+
)
|
560 |
+
|
561 |
+
def get_dist_to_uninstall(self, candidate: Candidate) -> Optional[BaseDistribution]:
|
562 |
+
# TODO: Are there more cases this needs to return True? Editable?
|
563 |
+
dist = self._installed_dists.get(candidate.project_name)
|
564 |
+
if dist is None: # Not installed, no uninstallation required.
|
565 |
+
return None
|
566 |
+
|
567 |
+
# We're installing into global site. The current installation must
|
568 |
+
# be uninstalled, no matter it's in global or user site, because the
|
569 |
+
# user site installation has precedence over global.
|
570 |
+
if not self._use_user_site:
|
571 |
+
return dist
|
572 |
+
|
573 |
+
# We're installing into user site. Remove the user site installation.
|
574 |
+
if dist.in_usersite:
|
575 |
+
return dist
|
576 |
+
|
577 |
+
# We're installing into user site, but the installed incompatible
|
578 |
+
# package is in global site. We can't uninstall that, and would let
|
579 |
+
# the new user installation to "shadow" it. But shadowing won't work
|
580 |
+
# in virtual environments, so we error out.
|
581 |
+
if running_under_virtualenv() and dist.in_site_packages:
|
582 |
+
message = (
|
583 |
+
f"Will not install to the user site because it will lack "
|
584 |
+
f"sys.path precedence to {dist.raw_name} in {dist.location}"
|
585 |
+
)
|
586 |
+
raise InstallationError(message)
|
587 |
+
return None
|
588 |
+
|
589 |
+
def _report_requires_python_error(
|
590 |
+
self, causes: Sequence["ConflictCause"]
|
591 |
+
) -> UnsupportedPythonVersion:
|
592 |
+
assert causes, "Requires-Python error reported with no cause"
|
593 |
+
|
594 |
+
version = self._python_candidate.version
|
595 |
+
|
596 |
+
if len(causes) == 1:
|
597 |
+
specifier = str(causes[0].requirement.specifier)
|
598 |
+
message = (
|
599 |
+
f"Package {causes[0].parent.name!r} requires a different "
|
600 |
+
f"Python: {version} not in {specifier!r}"
|
601 |
+
)
|
602 |
+
return UnsupportedPythonVersion(message)
|
603 |
+
|
604 |
+
message = f"Packages require a different Python. {version} not in:"
|
605 |
+
for cause in causes:
|
606 |
+
package = cause.parent.format_for_error()
|
607 |
+
specifier = str(cause.requirement.specifier)
|
608 |
+
message += f"\n{specifier!r} (required by {package})"
|
609 |
+
return UnsupportedPythonVersion(message)
|
610 |
+
|
611 |
+
def _report_single_requirement_conflict(
|
612 |
+
self, req: Requirement, parent: Optional[Candidate]
|
613 |
+
) -> DistributionNotFound:
|
614 |
+
if parent is None:
|
615 |
+
req_disp = str(req)
|
616 |
+
else:
|
617 |
+
req_disp = f"{req} (from {parent.name})"
|
618 |
+
|
619 |
+
cands = self._finder.find_all_candidates(req.project_name)
|
620 |
+
versions = [str(v) for v in sorted({c.version for c in cands})]
|
621 |
+
|
622 |
+
logger.critical(
|
623 |
+
"Could not find a version that satisfies the requirement %s "
|
624 |
+
"(from versions: %s)",
|
625 |
+
req_disp,
|
626 |
+
", ".join(versions) or "none",
|
627 |
+
)
|
628 |
+
if str(req) == "requirements.txt":
|
629 |
+
logger.info(
|
630 |
+
"HINT: You are attempting to install a package literally "
|
631 |
+
'named "requirements.txt" (which cannot exist). Consider '
|
632 |
+
"using the '-r' flag to install the packages listed in "
|
633 |
+
"requirements.txt"
|
634 |
+
)
|
635 |
+
|
636 |
+
return DistributionNotFound(f"No matching distribution found for {req}")
|
637 |
+
|
638 |
+
def get_installation_error(
|
639 |
+
self,
|
640 |
+
e: "ResolutionImpossible[Requirement, Candidate]",
|
641 |
+
constraints: Dict[str, Constraint],
|
642 |
+
) -> InstallationError:
|
643 |
+
|
644 |
+
assert e.causes, "Installation error reported with no cause"
|
645 |
+
|
646 |
+
# If one of the things we can't solve is "we need Python X.Y",
|
647 |
+
# that is what we report.
|
648 |
+
requires_python_causes = [
|
649 |
+
cause
|
650 |
+
for cause in e.causes
|
651 |
+
if isinstance(cause.requirement, RequiresPythonRequirement)
|
652 |
+
and not cause.requirement.is_satisfied_by(self._python_candidate)
|
653 |
+
]
|
654 |
+
if requires_python_causes:
|
655 |
+
# The comprehension above makes sure all Requirement instances are
|
656 |
+
# RequiresPythonRequirement, so let's cast for convenience.
|
657 |
+
return self._report_requires_python_error(
|
658 |
+
cast("Sequence[ConflictCause]", requires_python_causes),
|
659 |
+
)
|
660 |
+
|
661 |
+
# Otherwise, we have a set of causes which can't all be satisfied
|
662 |
+
# at once.
|
663 |
+
|
664 |
+
# The simplest case is when we have *one* cause that can't be
|
665 |
+
# satisfied. We just report that case.
|
666 |
+
if len(e.causes) == 1:
|
667 |
+
req, parent = e.causes[0]
|
668 |
+
if req.name not in constraints:
|
669 |
+
return self._report_single_requirement_conflict(req, parent)
|
670 |
+
|
671 |
+
# OK, we now have a list of requirements that can't all be
|
672 |
+
# satisfied at once.
|
673 |
+
|
674 |
+
# A couple of formatting helpers
|
675 |
+
def text_join(parts: List[str]) -> str:
|
676 |
+
if len(parts) == 1:
|
677 |
+
return parts[0]
|
678 |
+
|
679 |
+
return ", ".join(parts[:-1]) + " and " + parts[-1]
|
680 |
+
|
681 |
+
def describe_trigger(parent: Candidate) -> str:
|
682 |
+
ireq = parent.get_install_requirement()
|
683 |
+
if not ireq or not ireq.comes_from:
|
684 |
+
return f"{parent.name}=={parent.version}"
|
685 |
+
if isinstance(ireq.comes_from, InstallRequirement):
|
686 |
+
return str(ireq.comes_from.name)
|
687 |
+
return str(ireq.comes_from)
|
688 |
+
|
689 |
+
triggers = set()
|
690 |
+
for req, parent in e.causes:
|
691 |
+
if parent is None:
|
692 |
+
# This is a root requirement, so we can report it directly
|
693 |
+
trigger = req.format_for_error()
|
694 |
+
else:
|
695 |
+
trigger = describe_trigger(parent)
|
696 |
+
triggers.add(trigger)
|
697 |
+
|
698 |
+
if triggers:
|
699 |
+
info = text_join(sorted(triggers))
|
700 |
+
else:
|
701 |
+
info = "the requested packages"
|
702 |
+
|
703 |
+
msg = (
|
704 |
+
"Cannot install {} because these package versions "
|
705 |
+
"have conflicting dependencies.".format(info)
|
706 |
+
)
|
707 |
+
logger.critical(msg)
|
708 |
+
msg = "\nThe conflict is caused by:"
|
709 |
+
|
710 |
+
relevant_constraints = set()
|
711 |
+
for req, parent in e.causes:
|
712 |
+
if req.name in constraints:
|
713 |
+
relevant_constraints.add(req.name)
|
714 |
+
msg = msg + "\n "
|
715 |
+
if parent:
|
716 |
+
msg = msg + f"{parent.name} {parent.version} depends on "
|
717 |
+
else:
|
718 |
+
msg = msg + "The user requested "
|
719 |
+
msg = msg + req.format_for_error()
|
720 |
+
for key in relevant_constraints:
|
721 |
+
spec = constraints[key].specifier
|
722 |
+
msg += f"\n The user requested (constraint) {key}{spec}"
|
723 |
+
|
724 |
+
msg = (
|
725 |
+
msg
|
726 |
+
+ "\n\n"
|
727 |
+
+ "To fix this you could try to:\n"
|
728 |
+
+ "1. loosen the range of package versions you've specified\n"
|
729 |
+
+ "2. remove package versions to allow pip attempt to solve "
|
730 |
+
+ "the dependency conflict\n"
|
731 |
+
)
|
732 |
+
|
733 |
+
logger.info(msg)
|
734 |
+
|
735 |
+
return DistributionNotFound(
|
736 |
+
"ResolutionImpossible: for help visit "
|
737 |
+
"https://pip.pypa.io/en/latest/topics/dependency-resolution/"
|
738 |
+
"#dealing-with-dependency-conflicts"
|
739 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/provider.py
ADDED
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import collections
|
2 |
+
import math
|
3 |
+
from typing import (
|
4 |
+
TYPE_CHECKING,
|
5 |
+
Dict,
|
6 |
+
Iterable,
|
7 |
+
Iterator,
|
8 |
+
Mapping,
|
9 |
+
Sequence,
|
10 |
+
TypeVar,
|
11 |
+
Union,
|
12 |
+
)
|
13 |
+
|
14 |
+
from pip._vendor.resolvelib.providers import AbstractProvider
|
15 |
+
|
16 |
+
from .base import Candidate, Constraint, Requirement
|
17 |
+
from .candidates import REQUIRES_PYTHON_IDENTIFIER
|
18 |
+
from .factory import Factory
|
19 |
+
|
20 |
+
if TYPE_CHECKING:
|
21 |
+
from pip._vendor.resolvelib.providers import Preference
|
22 |
+
from pip._vendor.resolvelib.resolvers import RequirementInformation
|
23 |
+
|
24 |
+
PreferenceInformation = RequirementInformation[Requirement, Candidate]
|
25 |
+
|
26 |
+
_ProviderBase = AbstractProvider[Requirement, Candidate, str]
|
27 |
+
else:
|
28 |
+
_ProviderBase = AbstractProvider
|
29 |
+
|
30 |
+
# Notes on the relationship between the provider, the factory, and the
|
31 |
+
# candidate and requirement classes.
|
32 |
+
#
|
33 |
+
# The provider is a direct implementation of the resolvelib class. Its role
|
34 |
+
# is to deliver the API that resolvelib expects.
|
35 |
+
#
|
36 |
+
# Rather than work with completely abstract "requirement" and "candidate"
|
37 |
+
# concepts as resolvelib does, pip has concrete classes implementing these two
|
38 |
+
# ideas. The API of Requirement and Candidate objects are defined in the base
|
39 |
+
# classes, but essentially map fairly directly to the equivalent provider
|
40 |
+
# methods. In particular, `find_matches` and `is_satisfied_by` are
|
41 |
+
# requirement methods, and `get_dependencies` is a candidate method.
|
42 |
+
#
|
43 |
+
# The factory is the interface to pip's internal mechanisms. It is stateless,
|
44 |
+
# and is created by the resolver and held as a property of the provider. It is
|
45 |
+
# responsible for creating Requirement and Candidate objects, and provides
|
46 |
+
# services to those objects (access to pip's finder and preparer).
|
47 |
+
|
48 |
+
|
49 |
+
D = TypeVar("D")
|
50 |
+
V = TypeVar("V")
|
51 |
+
|
52 |
+
|
53 |
+
def _get_with_identifier(
|
54 |
+
mapping: Mapping[str, V],
|
55 |
+
identifier: str,
|
56 |
+
default: D,
|
57 |
+
) -> Union[D, V]:
|
58 |
+
"""Get item from a package name lookup mapping with a resolver identifier.
|
59 |
+
|
60 |
+
This extra logic is needed when the target mapping is keyed by package
|
61 |
+
name, which cannot be directly looked up with an identifier (which may
|
62 |
+
contain requested extras). Additional logic is added to also look up a value
|
63 |
+
by "cleaning up" the extras from the identifier.
|
64 |
+
"""
|
65 |
+
if identifier in mapping:
|
66 |
+
return mapping[identifier]
|
67 |
+
# HACK: Theoretically we should check whether this identifier is a valid
|
68 |
+
# "NAME[EXTRAS]" format, and parse out the name part with packaging or
|
69 |
+
# some regular expression. But since pip's resolver only spits out three
|
70 |
+
# kinds of identifiers: normalized PEP 503 names, normalized names plus
|
71 |
+
# extras, and Requires-Python, we can cheat a bit here.
|
72 |
+
name, open_bracket, _ = identifier.partition("[")
|
73 |
+
if open_bracket and name in mapping:
|
74 |
+
return mapping[name]
|
75 |
+
return default
|
76 |
+
|
77 |
+
|
78 |
+
class PipProvider(_ProviderBase):
|
79 |
+
"""Pip's provider implementation for resolvelib.
|
80 |
+
|
81 |
+
:params constraints: A mapping of constraints specified by the user. Keys
|
82 |
+
are canonicalized project names.
|
83 |
+
:params ignore_dependencies: Whether the user specified ``--no-deps``.
|
84 |
+
:params upgrade_strategy: The user-specified upgrade strategy.
|
85 |
+
:params user_requested: A set of canonicalized package names that the user
|
86 |
+
supplied for pip to install/upgrade.
|
87 |
+
"""
|
88 |
+
|
89 |
+
def __init__(
|
90 |
+
self,
|
91 |
+
factory: Factory,
|
92 |
+
constraints: Dict[str, Constraint],
|
93 |
+
ignore_dependencies: bool,
|
94 |
+
upgrade_strategy: str,
|
95 |
+
user_requested: Dict[str, int],
|
96 |
+
) -> None:
|
97 |
+
self._factory = factory
|
98 |
+
self._constraints = constraints
|
99 |
+
self._ignore_dependencies = ignore_dependencies
|
100 |
+
self._upgrade_strategy = upgrade_strategy
|
101 |
+
self._user_requested = user_requested
|
102 |
+
self._known_depths: Dict[str, float] = collections.defaultdict(lambda: math.inf)
|
103 |
+
|
104 |
+
def identify(self, requirement_or_candidate: Union[Requirement, Candidate]) -> str:
|
105 |
+
return requirement_or_candidate.name
|
106 |
+
|
107 |
+
def get_preference( # type: ignore
|
108 |
+
self,
|
109 |
+
identifier: str,
|
110 |
+
resolutions: Mapping[str, Candidate],
|
111 |
+
candidates: Mapping[str, Iterator[Candidate]],
|
112 |
+
information: Mapping[str, Iterable["PreferenceInformation"]],
|
113 |
+
backtrack_causes: Sequence["PreferenceInformation"],
|
114 |
+
) -> "Preference":
|
115 |
+
"""Produce a sort key for given requirement based on preference.
|
116 |
+
|
117 |
+
The lower the return value is, the more preferred this group of
|
118 |
+
arguments is.
|
119 |
+
|
120 |
+
Currently pip considers the followings in order:
|
121 |
+
|
122 |
+
* Prefer if any of the known requirements is "direct", e.g. points to an
|
123 |
+
explicit URL.
|
124 |
+
* If equal, prefer if any requirement is "pinned", i.e. contains
|
125 |
+
operator ``===`` or ``==``.
|
126 |
+
* If equal, calculate an approximate "depth" and resolve requirements
|
127 |
+
closer to the user-specified requirements first.
|
128 |
+
* Order user-specified requirements by the order they are specified.
|
129 |
+
* If equal, prefers "non-free" requirements, i.e. contains at least one
|
130 |
+
operator, such as ``>=`` or ``<``.
|
131 |
+
* If equal, order alphabetically for consistency (helps debuggability).
|
132 |
+
"""
|
133 |
+
lookups = (r.get_candidate_lookup() for r, _ in information[identifier])
|
134 |
+
candidate, ireqs = zip(*lookups)
|
135 |
+
operators = [
|
136 |
+
specifier.operator
|
137 |
+
for specifier_set in (ireq.specifier for ireq in ireqs if ireq)
|
138 |
+
for specifier in specifier_set
|
139 |
+
]
|
140 |
+
|
141 |
+
direct = candidate is not None
|
142 |
+
pinned = any(op[:2] == "==" for op in operators)
|
143 |
+
unfree = bool(operators)
|
144 |
+
|
145 |
+
try:
|
146 |
+
requested_order: Union[int, float] = self._user_requested[identifier]
|
147 |
+
except KeyError:
|
148 |
+
requested_order = math.inf
|
149 |
+
parent_depths = (
|
150 |
+
self._known_depths[parent.name] if parent is not None else 0.0
|
151 |
+
for _, parent in information[identifier]
|
152 |
+
)
|
153 |
+
inferred_depth = min(d for d in parent_depths) + 1.0
|
154 |
+
else:
|
155 |
+
inferred_depth = 1.0
|
156 |
+
self._known_depths[identifier] = inferred_depth
|
157 |
+
|
158 |
+
requested_order = self._user_requested.get(identifier, math.inf)
|
159 |
+
|
160 |
+
# Requires-Python has only one candidate and the check is basically
|
161 |
+
# free, so we always do it first to avoid needless work if it fails.
|
162 |
+
requires_python = identifier == REQUIRES_PYTHON_IDENTIFIER
|
163 |
+
|
164 |
+
# HACK: Setuptools have a very long and solid backward compatibility
|
165 |
+
# track record, and extremely few projects would request a narrow,
|
166 |
+
# non-recent version range of it since that would break a lot things.
|
167 |
+
# (Most projects specify it only to request for an installer feature,
|
168 |
+
# which does not work, but that's another topic.) Intentionally
|
169 |
+
# delaying Setuptools helps reduce branches the resolver has to check.
|
170 |
+
# This serves as a temporary fix for issues like "apache-airflow[all]"
|
171 |
+
# while we work on "proper" branch pruning techniques.
|
172 |
+
delay_this = identifier == "setuptools"
|
173 |
+
|
174 |
+
# Prefer the causes of backtracking on the assumption that the problem
|
175 |
+
# resolving the dependency tree is related to the failures that caused
|
176 |
+
# the backtracking
|
177 |
+
backtrack_cause = self.is_backtrack_cause(identifier, backtrack_causes)
|
178 |
+
|
179 |
+
return (
|
180 |
+
not requires_python,
|
181 |
+
delay_this,
|
182 |
+
not direct,
|
183 |
+
not pinned,
|
184 |
+
not backtrack_cause,
|
185 |
+
inferred_depth,
|
186 |
+
requested_order,
|
187 |
+
not unfree,
|
188 |
+
identifier,
|
189 |
+
)
|
190 |
+
|
191 |
+
def find_matches(
|
192 |
+
self,
|
193 |
+
identifier: str,
|
194 |
+
requirements: Mapping[str, Iterator[Requirement]],
|
195 |
+
incompatibilities: Mapping[str, Iterator[Candidate]],
|
196 |
+
) -> Iterable[Candidate]:
|
197 |
+
def _eligible_for_upgrade(identifier: str) -> bool:
|
198 |
+
"""Are upgrades allowed for this project?
|
199 |
+
|
200 |
+
This checks the upgrade strategy, and whether the project was one
|
201 |
+
that the user specified in the command line, in order to decide
|
202 |
+
whether we should upgrade if there's a newer version available.
|
203 |
+
|
204 |
+
(Note that we don't need access to the `--upgrade` flag, because
|
205 |
+
an upgrade strategy of "to-satisfy-only" means that `--upgrade`
|
206 |
+
was not specified).
|
207 |
+
"""
|
208 |
+
if self._upgrade_strategy == "eager":
|
209 |
+
return True
|
210 |
+
elif self._upgrade_strategy == "only-if-needed":
|
211 |
+
user_order = _get_with_identifier(
|
212 |
+
self._user_requested,
|
213 |
+
identifier,
|
214 |
+
default=None,
|
215 |
+
)
|
216 |
+
return user_order is not None
|
217 |
+
return False
|
218 |
+
|
219 |
+
constraint = _get_with_identifier(
|
220 |
+
self._constraints,
|
221 |
+
identifier,
|
222 |
+
default=Constraint.empty(),
|
223 |
+
)
|
224 |
+
return self._factory.find_candidates(
|
225 |
+
identifier=identifier,
|
226 |
+
requirements=requirements,
|
227 |
+
constraint=constraint,
|
228 |
+
prefers_installed=(not _eligible_for_upgrade(identifier)),
|
229 |
+
incompatibilities=incompatibilities,
|
230 |
+
)
|
231 |
+
|
232 |
+
def is_satisfied_by(self, requirement: Requirement, candidate: Candidate) -> bool:
|
233 |
+
return requirement.is_satisfied_by(candidate)
|
234 |
+
|
235 |
+
def get_dependencies(self, candidate: Candidate) -> Sequence[Requirement]:
|
236 |
+
with_requires = not self._ignore_dependencies
|
237 |
+
return [r for r in candidate.iter_dependencies(with_requires) if r is not None]
|
238 |
+
|
239 |
+
@staticmethod
|
240 |
+
def is_backtrack_cause(
|
241 |
+
identifier: str, backtrack_causes: Sequence["PreferenceInformation"]
|
242 |
+
) -> bool:
|
243 |
+
for backtrack_cause in backtrack_causes:
|
244 |
+
if identifier == backtrack_cause.requirement.name:
|
245 |
+
return True
|
246 |
+
if backtrack_cause.parent and identifier == backtrack_cause.parent.name:
|
247 |
+
return True
|
248 |
+
return False
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/reporter.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from collections import defaultdict
|
2 |
+
from logging import getLogger
|
3 |
+
from typing import Any, DefaultDict
|
4 |
+
|
5 |
+
from pip._vendor.resolvelib.reporters import BaseReporter
|
6 |
+
|
7 |
+
from .base import Candidate, Requirement
|
8 |
+
|
9 |
+
logger = getLogger(__name__)
|
10 |
+
|
11 |
+
|
12 |
+
class PipReporter(BaseReporter):
|
13 |
+
def __init__(self) -> None:
|
14 |
+
self.backtracks_by_package: DefaultDict[str, int] = defaultdict(int)
|
15 |
+
|
16 |
+
self._messages_at_backtrack = {
|
17 |
+
1: (
|
18 |
+
"pip is looking at multiple versions of {package_name} to "
|
19 |
+
"determine which version is compatible with other "
|
20 |
+
"requirements. This could take a while."
|
21 |
+
),
|
22 |
+
8: (
|
23 |
+
"pip is looking at multiple versions of {package_name} to "
|
24 |
+
"determine which version is compatible with other "
|
25 |
+
"requirements. This could take a while."
|
26 |
+
),
|
27 |
+
13: (
|
28 |
+
"This is taking longer than usual. You might need to provide "
|
29 |
+
"the dependency resolver with stricter constraints to reduce "
|
30 |
+
"runtime. See https://pip.pypa.io/warnings/backtracking for "
|
31 |
+
"guidance. If you want to abort this run, press Ctrl + C."
|
32 |
+
),
|
33 |
+
}
|
34 |
+
|
35 |
+
def backtracking(self, candidate: Candidate) -> None:
|
36 |
+
self.backtracks_by_package[candidate.name] += 1
|
37 |
+
|
38 |
+
count = self.backtracks_by_package[candidate.name]
|
39 |
+
if count not in self._messages_at_backtrack:
|
40 |
+
return
|
41 |
+
|
42 |
+
message = self._messages_at_backtrack[count]
|
43 |
+
logger.info("INFO: %s", message.format(package_name=candidate.name))
|
44 |
+
|
45 |
+
|
46 |
+
class PipDebuggingReporter(BaseReporter):
|
47 |
+
"""A reporter that does an info log for every event it sees."""
|
48 |
+
|
49 |
+
def starting(self) -> None:
|
50 |
+
logger.info("Reporter.starting()")
|
51 |
+
|
52 |
+
def starting_round(self, index: int) -> None:
|
53 |
+
logger.info("Reporter.starting_round(%r)", index)
|
54 |
+
|
55 |
+
def ending_round(self, index: int, state: Any) -> None:
|
56 |
+
logger.info("Reporter.ending_round(%r, state)", index)
|
57 |
+
|
58 |
+
def ending(self, state: Any) -> None:
|
59 |
+
logger.info("Reporter.ending(%r)", state)
|
60 |
+
|
61 |
+
def adding_requirement(self, requirement: Requirement, parent: Candidate) -> None:
|
62 |
+
logger.info("Reporter.adding_requirement(%r, %r)", requirement, parent)
|
63 |
+
|
64 |
+
def backtracking(self, candidate: Candidate) -> None:
|
65 |
+
logger.info("Reporter.backtracking(%r)", candidate)
|
66 |
+
|
67 |
+
def pinning(self, candidate: Candidate) -> None:
|
68 |
+
logger.info("Reporter.pinning(%r)", candidate)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__init__.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Expose a limited set of classes and functions so callers outside of
|
2 |
+
# the vcs package don't need to import deeper than `pip._internal.vcs`.
|
3 |
+
# (The test directory may still need to import from a vcs sub-package.)
|
4 |
+
# Import all vcs modules to register each VCS in the VcsSupport object.
|
5 |
+
import pip._internal.vcs.bazaar
|
6 |
+
import pip._internal.vcs.git
|
7 |
+
import pip._internal.vcs.mercurial
|
8 |
+
import pip._internal.vcs.subversion # noqa: F401
|
9 |
+
from pip._internal.vcs.versioncontrol import ( # noqa: F401
|
10 |
+
RemoteNotFoundError,
|
11 |
+
RemoteNotValidError,
|
12 |
+
is_url,
|
13 |
+
make_vcs_requirement_url,
|
14 |
+
vcs,
|
15 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (515 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/bazaar.cpython-310.pyc
ADDED
Binary file (3.34 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/git.cpython-310.pyc
ADDED
Binary file (12.5 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/mercurial.cpython-310.pyc
ADDED
Binary file (5.06 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/subversion.cpython-310.pyc
ADDED
Binary file (8.45 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/__pycache__/versioncontrol.cpython-310.pyc
ADDED
Binary file (21.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/bazaar.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from typing import List, Optional, Tuple
|
3 |
+
|
4 |
+
from pip._internal.utils.misc import HiddenText, display_path
|
5 |
+
from pip._internal.utils.subprocess import make_command
|
6 |
+
from pip._internal.utils.urls import path_to_url
|
7 |
+
from pip._internal.vcs.versioncontrol import (
|
8 |
+
AuthInfo,
|
9 |
+
RemoteNotFoundError,
|
10 |
+
RevOptions,
|
11 |
+
VersionControl,
|
12 |
+
vcs,
|
13 |
+
)
|
14 |
+
|
15 |
+
logger = logging.getLogger(__name__)
|
16 |
+
|
17 |
+
|
18 |
+
class Bazaar(VersionControl):
|
19 |
+
name = "bzr"
|
20 |
+
dirname = ".bzr"
|
21 |
+
repo_name = "branch"
|
22 |
+
schemes = (
|
23 |
+
"bzr+http",
|
24 |
+
"bzr+https",
|
25 |
+
"bzr+ssh",
|
26 |
+
"bzr+sftp",
|
27 |
+
"bzr+ftp",
|
28 |
+
"bzr+lp",
|
29 |
+
"bzr+file",
|
30 |
+
)
|
31 |
+
|
32 |
+
@staticmethod
|
33 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
34 |
+
return ["-r", rev]
|
35 |
+
|
36 |
+
def fetch_new(
|
37 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
38 |
+
) -> None:
|
39 |
+
rev_display = rev_options.to_display()
|
40 |
+
logger.info(
|
41 |
+
"Checking out %s%s to %s",
|
42 |
+
url,
|
43 |
+
rev_display,
|
44 |
+
display_path(dest),
|
45 |
+
)
|
46 |
+
if verbosity <= 0:
|
47 |
+
flag = "--quiet"
|
48 |
+
elif verbosity == 1:
|
49 |
+
flag = ""
|
50 |
+
else:
|
51 |
+
flag = f"-{'v'*verbosity}"
|
52 |
+
cmd_args = make_command("branch", flag, rev_options.to_args(), url, dest)
|
53 |
+
self.run_command(cmd_args)
|
54 |
+
|
55 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
56 |
+
self.run_command(make_command("switch", url), cwd=dest)
|
57 |
+
|
58 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
59 |
+
cmd_args = make_command("pull", "-q", rev_options.to_args())
|
60 |
+
self.run_command(cmd_args, cwd=dest)
|
61 |
+
|
62 |
+
@classmethod
|
63 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
64 |
+
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
|
65 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
66 |
+
if url.startswith("ssh://"):
|
67 |
+
url = "bzr+" + url
|
68 |
+
return url, rev, user_pass
|
69 |
+
|
70 |
+
@classmethod
|
71 |
+
def get_remote_url(cls, location: str) -> str:
|
72 |
+
urls = cls.run_command(
|
73 |
+
["info"], show_stdout=False, stdout_only=True, cwd=location
|
74 |
+
)
|
75 |
+
for line in urls.splitlines():
|
76 |
+
line = line.strip()
|
77 |
+
for x in ("checkout of branch: ", "parent branch: "):
|
78 |
+
if line.startswith(x):
|
79 |
+
repo = line.split(x)[1]
|
80 |
+
if cls._is_local_repository(repo):
|
81 |
+
return path_to_url(repo)
|
82 |
+
return repo
|
83 |
+
raise RemoteNotFoundError
|
84 |
+
|
85 |
+
@classmethod
|
86 |
+
def get_revision(cls, location: str) -> str:
|
87 |
+
revision = cls.run_command(
|
88 |
+
["revno"],
|
89 |
+
show_stdout=False,
|
90 |
+
stdout_only=True,
|
91 |
+
cwd=location,
|
92 |
+
)
|
93 |
+
return revision.splitlines()[-1]
|
94 |
+
|
95 |
+
@classmethod
|
96 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
97 |
+
"""Always assume the versions don't match"""
|
98 |
+
return False
|
99 |
+
|
100 |
+
|
101 |
+
vcs.register(Bazaar)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/git.py
ADDED
@@ -0,0 +1,526 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os.path
|
3 |
+
import pathlib
|
4 |
+
import re
|
5 |
+
import urllib.parse
|
6 |
+
import urllib.request
|
7 |
+
from typing import List, Optional, Tuple
|
8 |
+
|
9 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
10 |
+
from pip._internal.utils.misc import HiddenText, display_path, hide_url
|
11 |
+
from pip._internal.utils.subprocess import make_command
|
12 |
+
from pip._internal.vcs.versioncontrol import (
|
13 |
+
AuthInfo,
|
14 |
+
RemoteNotFoundError,
|
15 |
+
RemoteNotValidError,
|
16 |
+
RevOptions,
|
17 |
+
VersionControl,
|
18 |
+
find_path_to_project_root_from_repo_root,
|
19 |
+
vcs,
|
20 |
+
)
|
21 |
+
|
22 |
+
urlsplit = urllib.parse.urlsplit
|
23 |
+
urlunsplit = urllib.parse.urlunsplit
|
24 |
+
|
25 |
+
|
26 |
+
logger = logging.getLogger(__name__)
|
27 |
+
|
28 |
+
|
29 |
+
GIT_VERSION_REGEX = re.compile(
|
30 |
+
r"^git version " # Prefix.
|
31 |
+
r"(\d+)" # Major.
|
32 |
+
r"\.(\d+)" # Dot, minor.
|
33 |
+
r"(?:\.(\d+))?" # Optional dot, patch.
|
34 |
+
r".*$" # Suffix, including any pre- and post-release segments we don't care about.
|
35 |
+
)
|
36 |
+
|
37 |
+
HASH_REGEX = re.compile("^[a-fA-F0-9]{40}$")
|
38 |
+
|
39 |
+
# SCP (Secure copy protocol) shorthand. e.g. '[email protected]:foo/bar.git'
|
40 |
+
SCP_REGEX = re.compile(
|
41 |
+
r"""^
|
42 |
+
# Optional user, e.g. 'git@'
|
43 |
+
(\w+@)?
|
44 |
+
# Server, e.g. 'github.com'.
|
45 |
+
([^/:]+):
|
46 |
+
# The server-side path. e.g. 'user/project.git'. Must start with an
|
47 |
+
# alphanumeric character so as not to be confusable with a Windows paths
|
48 |
+
# like 'C:/foo/bar' or 'C:\foo\bar'.
|
49 |
+
(\w[^:]*)
|
50 |
+
$""",
|
51 |
+
re.VERBOSE,
|
52 |
+
)
|
53 |
+
|
54 |
+
|
55 |
+
def looks_like_hash(sha: str) -> bool:
|
56 |
+
return bool(HASH_REGEX.match(sha))
|
57 |
+
|
58 |
+
|
59 |
+
class Git(VersionControl):
|
60 |
+
name = "git"
|
61 |
+
dirname = ".git"
|
62 |
+
repo_name = "clone"
|
63 |
+
schemes = (
|
64 |
+
"git+http",
|
65 |
+
"git+https",
|
66 |
+
"git+ssh",
|
67 |
+
"git+git",
|
68 |
+
"git+file",
|
69 |
+
)
|
70 |
+
# Prevent the user's environment variables from interfering with pip:
|
71 |
+
# https://github.com/pypa/pip/issues/1130
|
72 |
+
unset_environ = ("GIT_DIR", "GIT_WORK_TREE")
|
73 |
+
default_arg_rev = "HEAD"
|
74 |
+
|
75 |
+
@staticmethod
|
76 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
77 |
+
return [rev]
|
78 |
+
|
79 |
+
def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
|
80 |
+
_, rev_options = self.get_url_rev_options(hide_url(url))
|
81 |
+
if not rev_options.rev:
|
82 |
+
return False
|
83 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
84 |
+
# the current commit is different from rev,
|
85 |
+
# which means rev was something else than a commit hash
|
86 |
+
return False
|
87 |
+
# return False in the rare case rev is both a commit hash
|
88 |
+
# and a tag or a branch; we don't want to cache in that case
|
89 |
+
# because that branch/tag could point to something else in the future
|
90 |
+
is_tag_or_branch = bool(self.get_revision_sha(dest, rev_options.rev)[0])
|
91 |
+
return not is_tag_or_branch
|
92 |
+
|
93 |
+
def get_git_version(self) -> Tuple[int, ...]:
|
94 |
+
version = self.run_command(
|
95 |
+
["version"],
|
96 |
+
command_desc="git version",
|
97 |
+
show_stdout=False,
|
98 |
+
stdout_only=True,
|
99 |
+
)
|
100 |
+
match = GIT_VERSION_REGEX.match(version)
|
101 |
+
if not match:
|
102 |
+
logger.warning("Can't parse git version: %s", version)
|
103 |
+
return ()
|
104 |
+
return tuple(int(c) for c in match.groups())
|
105 |
+
|
106 |
+
@classmethod
|
107 |
+
def get_current_branch(cls, location: str) -> Optional[str]:
|
108 |
+
"""
|
109 |
+
Return the current branch, or None if HEAD isn't at a branch
|
110 |
+
(e.g. detached HEAD).
|
111 |
+
"""
|
112 |
+
# git-symbolic-ref exits with empty stdout if "HEAD" is a detached
|
113 |
+
# HEAD rather than a symbolic ref. In addition, the -q causes the
|
114 |
+
# command to exit with status code 1 instead of 128 in this case
|
115 |
+
# and to suppress the message to stderr.
|
116 |
+
args = ["symbolic-ref", "-q", "HEAD"]
|
117 |
+
output = cls.run_command(
|
118 |
+
args,
|
119 |
+
extra_ok_returncodes=(1,),
|
120 |
+
show_stdout=False,
|
121 |
+
stdout_only=True,
|
122 |
+
cwd=location,
|
123 |
+
)
|
124 |
+
ref = output.strip()
|
125 |
+
|
126 |
+
if ref.startswith("refs/heads/"):
|
127 |
+
return ref[len("refs/heads/") :]
|
128 |
+
|
129 |
+
return None
|
130 |
+
|
131 |
+
@classmethod
|
132 |
+
def get_revision_sha(cls, dest: str, rev: str) -> Tuple[Optional[str], bool]:
|
133 |
+
"""
|
134 |
+
Return (sha_or_none, is_branch), where sha_or_none is a commit hash
|
135 |
+
if the revision names a remote branch or tag, otherwise None.
|
136 |
+
|
137 |
+
Args:
|
138 |
+
dest: the repository directory.
|
139 |
+
rev: the revision name.
|
140 |
+
"""
|
141 |
+
# Pass rev to pre-filter the list.
|
142 |
+
output = cls.run_command(
|
143 |
+
["show-ref", rev],
|
144 |
+
cwd=dest,
|
145 |
+
show_stdout=False,
|
146 |
+
stdout_only=True,
|
147 |
+
on_returncode="ignore",
|
148 |
+
)
|
149 |
+
refs = {}
|
150 |
+
# NOTE: We do not use splitlines here since that would split on other
|
151 |
+
# unicode separators, which can be maliciously used to install a
|
152 |
+
# different revision.
|
153 |
+
for line in output.strip().split("\n"):
|
154 |
+
line = line.rstrip("\r")
|
155 |
+
if not line:
|
156 |
+
continue
|
157 |
+
try:
|
158 |
+
ref_sha, ref_name = line.split(" ", maxsplit=2)
|
159 |
+
except ValueError:
|
160 |
+
# Include the offending line to simplify troubleshooting if
|
161 |
+
# this error ever occurs.
|
162 |
+
raise ValueError(f"unexpected show-ref line: {line!r}")
|
163 |
+
|
164 |
+
refs[ref_name] = ref_sha
|
165 |
+
|
166 |
+
branch_ref = f"refs/remotes/origin/{rev}"
|
167 |
+
tag_ref = f"refs/tags/{rev}"
|
168 |
+
|
169 |
+
sha = refs.get(branch_ref)
|
170 |
+
if sha is not None:
|
171 |
+
return (sha, True)
|
172 |
+
|
173 |
+
sha = refs.get(tag_ref)
|
174 |
+
|
175 |
+
return (sha, False)
|
176 |
+
|
177 |
+
@classmethod
|
178 |
+
def _should_fetch(cls, dest: str, rev: str) -> bool:
|
179 |
+
"""
|
180 |
+
Return true if rev is a ref or is a commit that we don't have locally.
|
181 |
+
|
182 |
+
Branches and tags are not considered in this method because they are
|
183 |
+
assumed to be always available locally (which is a normal outcome of
|
184 |
+
``git clone`` and ``git fetch --tags``).
|
185 |
+
"""
|
186 |
+
if rev.startswith("refs/"):
|
187 |
+
# Always fetch remote refs.
|
188 |
+
return True
|
189 |
+
|
190 |
+
if not looks_like_hash(rev):
|
191 |
+
# Git fetch would fail with abbreviated commits.
|
192 |
+
return False
|
193 |
+
|
194 |
+
if cls.has_commit(dest, rev):
|
195 |
+
# Don't fetch if we have the commit locally.
|
196 |
+
return False
|
197 |
+
|
198 |
+
return True
|
199 |
+
|
200 |
+
@classmethod
|
201 |
+
def resolve_revision(
|
202 |
+
cls, dest: str, url: HiddenText, rev_options: RevOptions
|
203 |
+
) -> RevOptions:
|
204 |
+
"""
|
205 |
+
Resolve a revision to a new RevOptions object with the SHA1 of the
|
206 |
+
branch, tag, or ref if found.
|
207 |
+
|
208 |
+
Args:
|
209 |
+
rev_options: a RevOptions object.
|
210 |
+
"""
|
211 |
+
rev = rev_options.arg_rev
|
212 |
+
# The arg_rev property's implementation for Git ensures that the
|
213 |
+
# rev return value is always non-None.
|
214 |
+
assert rev is not None
|
215 |
+
|
216 |
+
sha, is_branch = cls.get_revision_sha(dest, rev)
|
217 |
+
|
218 |
+
if sha is not None:
|
219 |
+
rev_options = rev_options.make_new(sha)
|
220 |
+
rev_options.branch_name = rev if is_branch else None
|
221 |
+
|
222 |
+
return rev_options
|
223 |
+
|
224 |
+
# Do not show a warning for the common case of something that has
|
225 |
+
# the form of a Git commit hash.
|
226 |
+
if not looks_like_hash(rev):
|
227 |
+
logger.warning(
|
228 |
+
"Did not find branch or tag '%s', assuming revision or ref.",
|
229 |
+
rev,
|
230 |
+
)
|
231 |
+
|
232 |
+
if not cls._should_fetch(dest, rev):
|
233 |
+
return rev_options
|
234 |
+
|
235 |
+
# fetch the requested revision
|
236 |
+
cls.run_command(
|
237 |
+
make_command("fetch", "-q", url, rev_options.to_args()),
|
238 |
+
cwd=dest,
|
239 |
+
)
|
240 |
+
# Change the revision to the SHA of the ref we fetched
|
241 |
+
sha = cls.get_revision(dest, rev="FETCH_HEAD")
|
242 |
+
rev_options = rev_options.make_new(sha)
|
243 |
+
|
244 |
+
return rev_options
|
245 |
+
|
246 |
+
@classmethod
|
247 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
248 |
+
"""
|
249 |
+
Return whether the current commit hash equals the given name.
|
250 |
+
|
251 |
+
Args:
|
252 |
+
dest: the repository directory.
|
253 |
+
name: a string name.
|
254 |
+
"""
|
255 |
+
if not name:
|
256 |
+
# Then avoid an unnecessary subprocess call.
|
257 |
+
return False
|
258 |
+
|
259 |
+
return cls.get_revision(dest) == name
|
260 |
+
|
261 |
+
def fetch_new(
|
262 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
263 |
+
) -> None:
|
264 |
+
rev_display = rev_options.to_display()
|
265 |
+
logger.info("Cloning %s%s to %s", url, rev_display, display_path(dest))
|
266 |
+
if verbosity <= 0:
|
267 |
+
flags: Tuple[str, ...] = ("--quiet",)
|
268 |
+
elif verbosity == 1:
|
269 |
+
flags = ()
|
270 |
+
else:
|
271 |
+
flags = ("--verbose", "--progress")
|
272 |
+
if self.get_git_version() >= (2, 17):
|
273 |
+
# Git added support for partial clone in 2.17
|
274 |
+
# https://git-scm.com/docs/partial-clone
|
275 |
+
# Speeds up cloning by functioning without a complete copy of repository
|
276 |
+
self.run_command(
|
277 |
+
make_command(
|
278 |
+
"clone",
|
279 |
+
"--filter=blob:none",
|
280 |
+
*flags,
|
281 |
+
url,
|
282 |
+
dest,
|
283 |
+
)
|
284 |
+
)
|
285 |
+
else:
|
286 |
+
self.run_command(make_command("clone", *flags, url, dest))
|
287 |
+
|
288 |
+
if rev_options.rev:
|
289 |
+
# Then a specific revision was requested.
|
290 |
+
rev_options = self.resolve_revision(dest, url, rev_options)
|
291 |
+
branch_name = getattr(rev_options, "branch_name", None)
|
292 |
+
logger.debug("Rev options %s, branch_name %s", rev_options, branch_name)
|
293 |
+
if branch_name is None:
|
294 |
+
# Only do a checkout if the current commit id doesn't match
|
295 |
+
# the requested revision.
|
296 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
297 |
+
cmd_args = make_command(
|
298 |
+
"checkout",
|
299 |
+
"-q",
|
300 |
+
rev_options.to_args(),
|
301 |
+
)
|
302 |
+
self.run_command(cmd_args, cwd=dest)
|
303 |
+
elif self.get_current_branch(dest) != branch_name:
|
304 |
+
# Then a specific branch was requested, and that branch
|
305 |
+
# is not yet checked out.
|
306 |
+
track_branch = f"origin/{branch_name}"
|
307 |
+
cmd_args = [
|
308 |
+
"checkout",
|
309 |
+
"-b",
|
310 |
+
branch_name,
|
311 |
+
"--track",
|
312 |
+
track_branch,
|
313 |
+
]
|
314 |
+
self.run_command(cmd_args, cwd=dest)
|
315 |
+
else:
|
316 |
+
sha = self.get_revision(dest)
|
317 |
+
rev_options = rev_options.make_new(sha)
|
318 |
+
|
319 |
+
logger.info("Resolved %s to commit %s", url, rev_options.rev)
|
320 |
+
|
321 |
+
#: repo may contain submodules
|
322 |
+
self.update_submodules(dest)
|
323 |
+
|
324 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
325 |
+
self.run_command(
|
326 |
+
make_command("config", "remote.origin.url", url),
|
327 |
+
cwd=dest,
|
328 |
+
)
|
329 |
+
cmd_args = make_command("checkout", "-q", rev_options.to_args())
|
330 |
+
self.run_command(cmd_args, cwd=dest)
|
331 |
+
|
332 |
+
self.update_submodules(dest)
|
333 |
+
|
334 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
335 |
+
# First fetch changes from the default remote
|
336 |
+
if self.get_git_version() >= (1, 9):
|
337 |
+
# fetch tags in addition to everything else
|
338 |
+
self.run_command(["fetch", "-q", "--tags"], cwd=dest)
|
339 |
+
else:
|
340 |
+
self.run_command(["fetch", "-q"], cwd=dest)
|
341 |
+
# Then reset to wanted revision (maybe even origin/master)
|
342 |
+
rev_options = self.resolve_revision(dest, url, rev_options)
|
343 |
+
cmd_args = make_command("reset", "--hard", "-q", rev_options.to_args())
|
344 |
+
self.run_command(cmd_args, cwd=dest)
|
345 |
+
#: update submodules
|
346 |
+
self.update_submodules(dest)
|
347 |
+
|
348 |
+
@classmethod
|
349 |
+
def get_remote_url(cls, location: str) -> str:
|
350 |
+
"""
|
351 |
+
Return URL of the first remote encountered.
|
352 |
+
|
353 |
+
Raises RemoteNotFoundError if the repository does not have a remote
|
354 |
+
url configured.
|
355 |
+
"""
|
356 |
+
# We need to pass 1 for extra_ok_returncodes since the command
|
357 |
+
# exits with return code 1 if there are no matching lines.
|
358 |
+
stdout = cls.run_command(
|
359 |
+
["config", "--get-regexp", r"remote\..*\.url"],
|
360 |
+
extra_ok_returncodes=(1,),
|
361 |
+
show_stdout=False,
|
362 |
+
stdout_only=True,
|
363 |
+
cwd=location,
|
364 |
+
)
|
365 |
+
remotes = stdout.splitlines()
|
366 |
+
try:
|
367 |
+
found_remote = remotes[0]
|
368 |
+
except IndexError:
|
369 |
+
raise RemoteNotFoundError
|
370 |
+
|
371 |
+
for remote in remotes:
|
372 |
+
if remote.startswith("remote.origin.url "):
|
373 |
+
found_remote = remote
|
374 |
+
break
|
375 |
+
url = found_remote.split(" ")[1]
|
376 |
+
return cls._git_remote_to_pip_url(url.strip())
|
377 |
+
|
378 |
+
@staticmethod
|
379 |
+
def _git_remote_to_pip_url(url: str) -> str:
|
380 |
+
"""
|
381 |
+
Convert a remote url from what git uses to what pip accepts.
|
382 |
+
|
383 |
+
There are 3 legal forms **url** may take:
|
384 |
+
|
385 |
+
1. A fully qualified url: ssh://[email protected]/foo/bar.git
|
386 |
+
2. A local project.git folder: /path/to/bare/repository.git
|
387 |
+
3. SCP shorthand for form 1: [email protected]:foo/bar.git
|
388 |
+
|
389 |
+
Form 1 is output as-is. Form 2 must be converted to URI and form 3 must
|
390 |
+
be converted to form 1.
|
391 |
+
|
392 |
+
See the corresponding test test_git_remote_url_to_pip() for examples of
|
393 |
+
sample inputs/outputs.
|
394 |
+
"""
|
395 |
+
if re.match(r"\w+://", url):
|
396 |
+
# This is already valid. Pass it though as-is.
|
397 |
+
return url
|
398 |
+
if os.path.exists(url):
|
399 |
+
# A local bare remote (git clone --mirror).
|
400 |
+
# Needs a file:// prefix.
|
401 |
+
return pathlib.PurePath(url).as_uri()
|
402 |
+
scp_match = SCP_REGEX.match(url)
|
403 |
+
if scp_match:
|
404 |
+
# Add an ssh:// prefix and replace the ':' with a '/'.
|
405 |
+
return scp_match.expand(r"ssh://\1\2/\3")
|
406 |
+
# Otherwise, bail out.
|
407 |
+
raise RemoteNotValidError(url)
|
408 |
+
|
409 |
+
@classmethod
|
410 |
+
def has_commit(cls, location: str, rev: str) -> bool:
|
411 |
+
"""
|
412 |
+
Check if rev is a commit that is available in the local repository.
|
413 |
+
"""
|
414 |
+
try:
|
415 |
+
cls.run_command(
|
416 |
+
["rev-parse", "-q", "--verify", "sha^" + rev],
|
417 |
+
cwd=location,
|
418 |
+
log_failed_cmd=False,
|
419 |
+
)
|
420 |
+
except InstallationError:
|
421 |
+
return False
|
422 |
+
else:
|
423 |
+
return True
|
424 |
+
|
425 |
+
@classmethod
|
426 |
+
def get_revision(cls, location: str, rev: Optional[str] = None) -> str:
|
427 |
+
if rev is None:
|
428 |
+
rev = "HEAD"
|
429 |
+
current_rev = cls.run_command(
|
430 |
+
["rev-parse", rev],
|
431 |
+
show_stdout=False,
|
432 |
+
stdout_only=True,
|
433 |
+
cwd=location,
|
434 |
+
)
|
435 |
+
return current_rev.strip()
|
436 |
+
|
437 |
+
@classmethod
|
438 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
439 |
+
"""
|
440 |
+
Return the path to Python project root, relative to the repo root.
|
441 |
+
Return None if the project root is in the repo root.
|
442 |
+
"""
|
443 |
+
# find the repo root
|
444 |
+
git_dir = cls.run_command(
|
445 |
+
["rev-parse", "--git-dir"],
|
446 |
+
show_stdout=False,
|
447 |
+
stdout_only=True,
|
448 |
+
cwd=location,
|
449 |
+
).strip()
|
450 |
+
if not os.path.isabs(git_dir):
|
451 |
+
git_dir = os.path.join(location, git_dir)
|
452 |
+
repo_root = os.path.abspath(os.path.join(git_dir, ".."))
|
453 |
+
return find_path_to_project_root_from_repo_root(location, repo_root)
|
454 |
+
|
455 |
+
@classmethod
|
456 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
457 |
+
"""
|
458 |
+
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
|
459 |
+
That's required because although they use SSH they sometimes don't
|
460 |
+
work with a ssh:// scheme (e.g. GitHub). But we need a scheme for
|
461 |
+
parsing. Hence we remove it again afterwards and return it as a stub.
|
462 |
+
"""
|
463 |
+
# Works around an apparent Git bug
|
464 |
+
# (see https://article.gmane.org/gmane.comp.version-control.git/146500)
|
465 |
+
scheme, netloc, path, query, fragment = urlsplit(url)
|
466 |
+
if scheme.endswith("file"):
|
467 |
+
initial_slashes = path[: -len(path.lstrip("/"))]
|
468 |
+
newpath = initial_slashes + urllib.request.url2pathname(path).replace(
|
469 |
+
"\\", "/"
|
470 |
+
).lstrip("/")
|
471 |
+
after_plus = scheme.find("+") + 1
|
472 |
+
url = scheme[:after_plus] + urlunsplit(
|
473 |
+
(scheme[after_plus:], netloc, newpath, query, fragment),
|
474 |
+
)
|
475 |
+
|
476 |
+
if "://" not in url:
|
477 |
+
assert "file:" not in url
|
478 |
+
url = url.replace("git+", "git+ssh://")
|
479 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
480 |
+
url = url.replace("ssh://", "")
|
481 |
+
else:
|
482 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
483 |
+
|
484 |
+
return url, rev, user_pass
|
485 |
+
|
486 |
+
@classmethod
|
487 |
+
def update_submodules(cls, location: str) -> None:
|
488 |
+
if not os.path.exists(os.path.join(location, ".gitmodules")):
|
489 |
+
return
|
490 |
+
cls.run_command(
|
491 |
+
["submodule", "update", "--init", "--recursive", "-q"],
|
492 |
+
cwd=location,
|
493 |
+
)
|
494 |
+
|
495 |
+
@classmethod
|
496 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
497 |
+
loc = super().get_repository_root(location)
|
498 |
+
if loc:
|
499 |
+
return loc
|
500 |
+
try:
|
501 |
+
r = cls.run_command(
|
502 |
+
["rev-parse", "--show-toplevel"],
|
503 |
+
cwd=location,
|
504 |
+
show_stdout=False,
|
505 |
+
stdout_only=True,
|
506 |
+
on_returncode="raise",
|
507 |
+
log_failed_cmd=False,
|
508 |
+
)
|
509 |
+
except BadCommand:
|
510 |
+
logger.debug(
|
511 |
+
"could not determine if %s is under git control "
|
512 |
+
"because git is not available",
|
513 |
+
location,
|
514 |
+
)
|
515 |
+
return None
|
516 |
+
except InstallationError:
|
517 |
+
return None
|
518 |
+
return os.path.normpath(r.rstrip("\r\n"))
|
519 |
+
|
520 |
+
@staticmethod
|
521 |
+
def should_add_vcs_url_prefix(repo_url: str) -> bool:
|
522 |
+
"""In either https or ssh form, requirements must be prefixed with git+."""
|
523 |
+
return True
|
524 |
+
|
525 |
+
|
526 |
+
vcs.register(Git)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/mercurial.py
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import configparser
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
from typing import List, Optional, Tuple
|
5 |
+
|
6 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
7 |
+
from pip._internal.utils.misc import HiddenText, display_path
|
8 |
+
from pip._internal.utils.subprocess import make_command
|
9 |
+
from pip._internal.utils.urls import path_to_url
|
10 |
+
from pip._internal.vcs.versioncontrol import (
|
11 |
+
RevOptions,
|
12 |
+
VersionControl,
|
13 |
+
find_path_to_project_root_from_repo_root,
|
14 |
+
vcs,
|
15 |
+
)
|
16 |
+
|
17 |
+
logger = logging.getLogger(__name__)
|
18 |
+
|
19 |
+
|
20 |
+
class Mercurial(VersionControl):
|
21 |
+
name = "hg"
|
22 |
+
dirname = ".hg"
|
23 |
+
repo_name = "clone"
|
24 |
+
schemes = (
|
25 |
+
"hg+file",
|
26 |
+
"hg+http",
|
27 |
+
"hg+https",
|
28 |
+
"hg+ssh",
|
29 |
+
"hg+static-http",
|
30 |
+
)
|
31 |
+
|
32 |
+
@staticmethod
|
33 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
34 |
+
return [rev]
|
35 |
+
|
36 |
+
def fetch_new(
|
37 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
38 |
+
) -> None:
|
39 |
+
rev_display = rev_options.to_display()
|
40 |
+
logger.info(
|
41 |
+
"Cloning hg %s%s to %s",
|
42 |
+
url,
|
43 |
+
rev_display,
|
44 |
+
display_path(dest),
|
45 |
+
)
|
46 |
+
if verbosity <= 0:
|
47 |
+
flags: Tuple[str, ...] = ("--quiet",)
|
48 |
+
elif verbosity == 1:
|
49 |
+
flags = ()
|
50 |
+
elif verbosity == 2:
|
51 |
+
flags = ("--verbose",)
|
52 |
+
else:
|
53 |
+
flags = ("--verbose", "--debug")
|
54 |
+
self.run_command(make_command("clone", "--noupdate", *flags, url, dest))
|
55 |
+
self.run_command(
|
56 |
+
make_command("update", *flags, rev_options.to_args()),
|
57 |
+
cwd=dest,
|
58 |
+
)
|
59 |
+
|
60 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
61 |
+
repo_config = os.path.join(dest, self.dirname, "hgrc")
|
62 |
+
config = configparser.RawConfigParser()
|
63 |
+
try:
|
64 |
+
config.read(repo_config)
|
65 |
+
config.set("paths", "default", url.secret)
|
66 |
+
with open(repo_config, "w") as config_file:
|
67 |
+
config.write(config_file)
|
68 |
+
except (OSError, configparser.NoSectionError) as exc:
|
69 |
+
logger.warning("Could not switch Mercurial repository to %s: %s", url, exc)
|
70 |
+
else:
|
71 |
+
cmd_args = make_command("update", "-q", rev_options.to_args())
|
72 |
+
self.run_command(cmd_args, cwd=dest)
|
73 |
+
|
74 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
75 |
+
self.run_command(["pull", "-q"], cwd=dest)
|
76 |
+
cmd_args = make_command("update", "-q", rev_options.to_args())
|
77 |
+
self.run_command(cmd_args, cwd=dest)
|
78 |
+
|
79 |
+
@classmethod
|
80 |
+
def get_remote_url(cls, location: str) -> str:
|
81 |
+
url = cls.run_command(
|
82 |
+
["showconfig", "paths.default"],
|
83 |
+
show_stdout=False,
|
84 |
+
stdout_only=True,
|
85 |
+
cwd=location,
|
86 |
+
).strip()
|
87 |
+
if cls._is_local_repository(url):
|
88 |
+
url = path_to_url(url)
|
89 |
+
return url.strip()
|
90 |
+
|
91 |
+
@classmethod
|
92 |
+
def get_revision(cls, location: str) -> str:
|
93 |
+
"""
|
94 |
+
Return the repository-local changeset revision number, as an integer.
|
95 |
+
"""
|
96 |
+
current_revision = cls.run_command(
|
97 |
+
["parents", "--template={rev}"],
|
98 |
+
show_stdout=False,
|
99 |
+
stdout_only=True,
|
100 |
+
cwd=location,
|
101 |
+
).strip()
|
102 |
+
return current_revision
|
103 |
+
|
104 |
+
@classmethod
|
105 |
+
def get_requirement_revision(cls, location: str) -> str:
|
106 |
+
"""
|
107 |
+
Return the changeset identification hash, as a 40-character
|
108 |
+
hexadecimal string
|
109 |
+
"""
|
110 |
+
current_rev_hash = cls.run_command(
|
111 |
+
["parents", "--template={node}"],
|
112 |
+
show_stdout=False,
|
113 |
+
stdout_only=True,
|
114 |
+
cwd=location,
|
115 |
+
).strip()
|
116 |
+
return current_rev_hash
|
117 |
+
|
118 |
+
@classmethod
|
119 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
120 |
+
"""Always assume the versions don't match"""
|
121 |
+
return False
|
122 |
+
|
123 |
+
@classmethod
|
124 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
125 |
+
"""
|
126 |
+
Return the path to Python project root, relative to the repo root.
|
127 |
+
Return None if the project root is in the repo root.
|
128 |
+
"""
|
129 |
+
# find the repo root
|
130 |
+
repo_root = cls.run_command(
|
131 |
+
["root"], show_stdout=False, stdout_only=True, cwd=location
|
132 |
+
).strip()
|
133 |
+
if not os.path.isabs(repo_root):
|
134 |
+
repo_root = os.path.abspath(os.path.join(location, repo_root))
|
135 |
+
return find_path_to_project_root_from_repo_root(location, repo_root)
|
136 |
+
|
137 |
+
@classmethod
|
138 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
139 |
+
loc = super().get_repository_root(location)
|
140 |
+
if loc:
|
141 |
+
return loc
|
142 |
+
try:
|
143 |
+
r = cls.run_command(
|
144 |
+
["root"],
|
145 |
+
cwd=location,
|
146 |
+
show_stdout=False,
|
147 |
+
stdout_only=True,
|
148 |
+
on_returncode="raise",
|
149 |
+
log_failed_cmd=False,
|
150 |
+
)
|
151 |
+
except BadCommand:
|
152 |
+
logger.debug(
|
153 |
+
"could not determine if %s is under hg control "
|
154 |
+
"because hg is not available",
|
155 |
+
location,
|
156 |
+
)
|
157 |
+
return None
|
158 |
+
except InstallationError:
|
159 |
+
return None
|
160 |
+
return os.path.normpath(r.rstrip("\r\n"))
|
161 |
+
|
162 |
+
|
163 |
+
vcs.register(Mercurial)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/subversion.py
ADDED
@@ -0,0 +1,324 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
import re
|
4 |
+
from typing import List, Optional, Tuple
|
5 |
+
|
6 |
+
from pip._internal.utils.misc import (
|
7 |
+
HiddenText,
|
8 |
+
display_path,
|
9 |
+
is_console_interactive,
|
10 |
+
is_installable_dir,
|
11 |
+
split_auth_from_netloc,
|
12 |
+
)
|
13 |
+
from pip._internal.utils.subprocess import CommandArgs, make_command
|
14 |
+
from pip._internal.vcs.versioncontrol import (
|
15 |
+
AuthInfo,
|
16 |
+
RemoteNotFoundError,
|
17 |
+
RevOptions,
|
18 |
+
VersionControl,
|
19 |
+
vcs,
|
20 |
+
)
|
21 |
+
|
22 |
+
logger = logging.getLogger(__name__)
|
23 |
+
|
24 |
+
_svn_xml_url_re = re.compile('url="([^"]+)"')
|
25 |
+
_svn_rev_re = re.compile(r'committed-rev="(\d+)"')
|
26 |
+
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
|
27 |
+
_svn_info_xml_url_re = re.compile(r"<url>(.*)</url>")
|
28 |
+
|
29 |
+
|
30 |
+
class Subversion(VersionControl):
|
31 |
+
name = "svn"
|
32 |
+
dirname = ".svn"
|
33 |
+
repo_name = "checkout"
|
34 |
+
schemes = ("svn+ssh", "svn+http", "svn+https", "svn+svn", "svn+file")
|
35 |
+
|
36 |
+
@classmethod
|
37 |
+
def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
|
38 |
+
return True
|
39 |
+
|
40 |
+
@staticmethod
|
41 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
42 |
+
return ["-r", rev]
|
43 |
+
|
44 |
+
@classmethod
|
45 |
+
def get_revision(cls, location: str) -> str:
|
46 |
+
"""
|
47 |
+
Return the maximum revision for all files under a given location
|
48 |
+
"""
|
49 |
+
# Note: taken from setuptools.command.egg_info
|
50 |
+
revision = 0
|
51 |
+
|
52 |
+
for base, dirs, _ in os.walk(location):
|
53 |
+
if cls.dirname not in dirs:
|
54 |
+
dirs[:] = []
|
55 |
+
continue # no sense walking uncontrolled subdirs
|
56 |
+
dirs.remove(cls.dirname)
|
57 |
+
entries_fn = os.path.join(base, cls.dirname, "entries")
|
58 |
+
if not os.path.exists(entries_fn):
|
59 |
+
# FIXME: should we warn?
|
60 |
+
continue
|
61 |
+
|
62 |
+
dirurl, localrev = cls._get_svn_url_rev(base)
|
63 |
+
|
64 |
+
if base == location:
|
65 |
+
assert dirurl is not None
|
66 |
+
base = dirurl + "/" # save the root url
|
67 |
+
elif not dirurl or not dirurl.startswith(base):
|
68 |
+
dirs[:] = []
|
69 |
+
continue # not part of the same svn tree, skip it
|
70 |
+
revision = max(revision, localrev)
|
71 |
+
return str(revision)
|
72 |
+
|
73 |
+
@classmethod
|
74 |
+
def get_netloc_and_auth(
|
75 |
+
cls, netloc: str, scheme: str
|
76 |
+
) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
|
77 |
+
"""
|
78 |
+
This override allows the auth information to be passed to svn via the
|
79 |
+
--username and --password options instead of via the URL.
|
80 |
+
"""
|
81 |
+
if scheme == "ssh":
|
82 |
+
# The --username and --password options can't be used for
|
83 |
+
# svn+ssh URLs, so keep the auth information in the URL.
|
84 |
+
return super().get_netloc_and_auth(netloc, scheme)
|
85 |
+
|
86 |
+
return split_auth_from_netloc(netloc)
|
87 |
+
|
88 |
+
@classmethod
|
89 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
90 |
+
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
|
91 |
+
url, rev, user_pass = super().get_url_rev_and_auth(url)
|
92 |
+
if url.startswith("ssh://"):
|
93 |
+
url = "svn+" + url
|
94 |
+
return url, rev, user_pass
|
95 |
+
|
96 |
+
@staticmethod
|
97 |
+
def make_rev_args(
|
98 |
+
username: Optional[str], password: Optional[HiddenText]
|
99 |
+
) -> CommandArgs:
|
100 |
+
extra_args: CommandArgs = []
|
101 |
+
if username:
|
102 |
+
extra_args += ["--username", username]
|
103 |
+
if password:
|
104 |
+
extra_args += ["--password", password]
|
105 |
+
|
106 |
+
return extra_args
|
107 |
+
|
108 |
+
@classmethod
|
109 |
+
def get_remote_url(cls, location: str) -> str:
|
110 |
+
# In cases where the source is in a subdirectory, we have to look up in
|
111 |
+
# the location until we find a valid project root.
|
112 |
+
orig_location = location
|
113 |
+
while not is_installable_dir(location):
|
114 |
+
last_location = location
|
115 |
+
location = os.path.dirname(location)
|
116 |
+
if location == last_location:
|
117 |
+
# We've traversed up to the root of the filesystem without
|
118 |
+
# finding a Python project.
|
119 |
+
logger.warning(
|
120 |
+
"Could not find Python project for directory %s (tried all "
|
121 |
+
"parent directories)",
|
122 |
+
orig_location,
|
123 |
+
)
|
124 |
+
raise RemoteNotFoundError
|
125 |
+
|
126 |
+
url, _rev = cls._get_svn_url_rev(location)
|
127 |
+
if url is None:
|
128 |
+
raise RemoteNotFoundError
|
129 |
+
|
130 |
+
return url
|
131 |
+
|
132 |
+
@classmethod
|
133 |
+
def _get_svn_url_rev(cls, location: str) -> Tuple[Optional[str], int]:
|
134 |
+
from pip._internal.exceptions import InstallationError
|
135 |
+
|
136 |
+
entries_path = os.path.join(location, cls.dirname, "entries")
|
137 |
+
if os.path.exists(entries_path):
|
138 |
+
with open(entries_path) as f:
|
139 |
+
data = f.read()
|
140 |
+
else: # subversion >= 1.7 does not have the 'entries' file
|
141 |
+
data = ""
|
142 |
+
|
143 |
+
url = None
|
144 |
+
if data.startswith("8") or data.startswith("9") or data.startswith("10"):
|
145 |
+
entries = list(map(str.splitlines, data.split("\n\x0c\n")))
|
146 |
+
del entries[0][0] # get rid of the '8'
|
147 |
+
url = entries[0][3]
|
148 |
+
revs = [int(d[9]) for d in entries if len(d) > 9 and d[9]] + [0]
|
149 |
+
elif data.startswith("<?xml"):
|
150 |
+
match = _svn_xml_url_re.search(data)
|
151 |
+
if not match:
|
152 |
+
raise ValueError(f"Badly formatted data: {data!r}")
|
153 |
+
url = match.group(1) # get repository URL
|
154 |
+
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
|
155 |
+
else:
|
156 |
+
try:
|
157 |
+
# subversion >= 1.7
|
158 |
+
# Note that using get_remote_call_options is not necessary here
|
159 |
+
# because `svn info` is being run against a local directory.
|
160 |
+
# We don't need to worry about making sure interactive mode
|
161 |
+
# is being used to prompt for passwords, because passwords
|
162 |
+
# are only potentially needed for remote server requests.
|
163 |
+
xml = cls.run_command(
|
164 |
+
["info", "--xml", location],
|
165 |
+
show_stdout=False,
|
166 |
+
stdout_only=True,
|
167 |
+
)
|
168 |
+
match = _svn_info_xml_url_re.search(xml)
|
169 |
+
assert match is not None
|
170 |
+
url = match.group(1)
|
171 |
+
revs = [int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)]
|
172 |
+
except InstallationError:
|
173 |
+
url, revs = None, []
|
174 |
+
|
175 |
+
if revs:
|
176 |
+
rev = max(revs)
|
177 |
+
else:
|
178 |
+
rev = 0
|
179 |
+
|
180 |
+
return url, rev
|
181 |
+
|
182 |
+
@classmethod
|
183 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
184 |
+
"""Always assume the versions don't match"""
|
185 |
+
return False
|
186 |
+
|
187 |
+
def __init__(self, use_interactive: bool = None) -> None:
|
188 |
+
if use_interactive is None:
|
189 |
+
use_interactive = is_console_interactive()
|
190 |
+
self.use_interactive = use_interactive
|
191 |
+
|
192 |
+
# This member is used to cache the fetched version of the current
|
193 |
+
# ``svn`` client.
|
194 |
+
# Special value definitions:
|
195 |
+
# None: Not evaluated yet.
|
196 |
+
# Empty tuple: Could not parse version.
|
197 |
+
self._vcs_version: Optional[Tuple[int, ...]] = None
|
198 |
+
|
199 |
+
super().__init__()
|
200 |
+
|
201 |
+
def call_vcs_version(self) -> Tuple[int, ...]:
|
202 |
+
"""Query the version of the currently installed Subversion client.
|
203 |
+
|
204 |
+
:return: A tuple containing the parts of the version information or
|
205 |
+
``()`` if the version returned from ``svn`` could not be parsed.
|
206 |
+
:raises: BadCommand: If ``svn`` is not installed.
|
207 |
+
"""
|
208 |
+
# Example versions:
|
209 |
+
# svn, version 1.10.3 (r1842928)
|
210 |
+
# compiled Feb 25 2019, 14:20:39 on x86_64-apple-darwin17.0.0
|
211 |
+
# svn, version 1.7.14 (r1542130)
|
212 |
+
# compiled Mar 28 2018, 08:49:13 on x86_64-pc-linux-gnu
|
213 |
+
# svn, version 1.12.0-SlikSvn (SlikSvn/1.12.0)
|
214 |
+
# compiled May 28 2019, 13:44:56 on x86_64-microsoft-windows6.2
|
215 |
+
version_prefix = "svn, version "
|
216 |
+
version = self.run_command(["--version"], show_stdout=False, stdout_only=True)
|
217 |
+
if not version.startswith(version_prefix):
|
218 |
+
return ()
|
219 |
+
|
220 |
+
version = version[len(version_prefix) :].split()[0]
|
221 |
+
version_list = version.partition("-")[0].split(".")
|
222 |
+
try:
|
223 |
+
parsed_version = tuple(map(int, version_list))
|
224 |
+
except ValueError:
|
225 |
+
return ()
|
226 |
+
|
227 |
+
return parsed_version
|
228 |
+
|
229 |
+
def get_vcs_version(self) -> Tuple[int, ...]:
|
230 |
+
"""Return the version of the currently installed Subversion client.
|
231 |
+
|
232 |
+
If the version of the Subversion client has already been queried,
|
233 |
+
a cached value will be used.
|
234 |
+
|
235 |
+
:return: A tuple containing the parts of the version information or
|
236 |
+
``()`` if the version returned from ``svn`` could not be parsed.
|
237 |
+
:raises: BadCommand: If ``svn`` is not installed.
|
238 |
+
"""
|
239 |
+
if self._vcs_version is not None:
|
240 |
+
# Use cached version, if available.
|
241 |
+
# If parsing the version failed previously (empty tuple),
|
242 |
+
# do not attempt to parse it again.
|
243 |
+
return self._vcs_version
|
244 |
+
|
245 |
+
vcs_version = self.call_vcs_version()
|
246 |
+
self._vcs_version = vcs_version
|
247 |
+
return vcs_version
|
248 |
+
|
249 |
+
def get_remote_call_options(self) -> CommandArgs:
|
250 |
+
"""Return options to be used on calls to Subversion that contact the server.
|
251 |
+
|
252 |
+
These options are applicable for the following ``svn`` subcommands used
|
253 |
+
in this class.
|
254 |
+
|
255 |
+
- checkout
|
256 |
+
- switch
|
257 |
+
- update
|
258 |
+
|
259 |
+
:return: A list of command line arguments to pass to ``svn``.
|
260 |
+
"""
|
261 |
+
if not self.use_interactive:
|
262 |
+
# --non-interactive switch is available since Subversion 0.14.4.
|
263 |
+
# Subversion < 1.8 runs in interactive mode by default.
|
264 |
+
return ["--non-interactive"]
|
265 |
+
|
266 |
+
svn_version = self.get_vcs_version()
|
267 |
+
# By default, Subversion >= 1.8 runs in non-interactive mode if
|
268 |
+
# stdin is not a TTY. Since that is how pip invokes SVN, in
|
269 |
+
# call_subprocess(), pip must pass --force-interactive to ensure
|
270 |
+
# the user can be prompted for a password, if required.
|
271 |
+
# SVN added the --force-interactive option in SVN 1.8. Since
|
272 |
+
# e.g. RHEL/CentOS 7, which is supported until 2024, ships with
|
273 |
+
# SVN 1.7, pip should continue to support SVN 1.7. Therefore, pip
|
274 |
+
# can't safely add the option if the SVN version is < 1.8 (or unknown).
|
275 |
+
if svn_version >= (1, 8):
|
276 |
+
return ["--force-interactive"]
|
277 |
+
|
278 |
+
return []
|
279 |
+
|
280 |
+
def fetch_new(
|
281 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
282 |
+
) -> None:
|
283 |
+
rev_display = rev_options.to_display()
|
284 |
+
logger.info(
|
285 |
+
"Checking out %s%s to %s",
|
286 |
+
url,
|
287 |
+
rev_display,
|
288 |
+
display_path(dest),
|
289 |
+
)
|
290 |
+
if verbosity <= 0:
|
291 |
+
flag = "--quiet"
|
292 |
+
else:
|
293 |
+
flag = ""
|
294 |
+
cmd_args = make_command(
|
295 |
+
"checkout",
|
296 |
+
flag,
|
297 |
+
self.get_remote_call_options(),
|
298 |
+
rev_options.to_args(),
|
299 |
+
url,
|
300 |
+
dest,
|
301 |
+
)
|
302 |
+
self.run_command(cmd_args)
|
303 |
+
|
304 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
305 |
+
cmd_args = make_command(
|
306 |
+
"switch",
|
307 |
+
self.get_remote_call_options(),
|
308 |
+
rev_options.to_args(),
|
309 |
+
url,
|
310 |
+
dest,
|
311 |
+
)
|
312 |
+
self.run_command(cmd_args)
|
313 |
+
|
314 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
315 |
+
cmd_args = make_command(
|
316 |
+
"update",
|
317 |
+
self.get_remote_call_options(),
|
318 |
+
rev_options.to_args(),
|
319 |
+
dest,
|
320 |
+
)
|
321 |
+
self.run_command(cmd_args)
|
322 |
+
|
323 |
+
|
324 |
+
vcs.register(Subversion)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/vcs/versioncontrol.py
ADDED
@@ -0,0 +1,705 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Handles all VCS (version control) support"""
|
2 |
+
|
3 |
+
import logging
|
4 |
+
import os
|
5 |
+
import shutil
|
6 |
+
import sys
|
7 |
+
import urllib.parse
|
8 |
+
from typing import (
|
9 |
+
TYPE_CHECKING,
|
10 |
+
Any,
|
11 |
+
Dict,
|
12 |
+
Iterable,
|
13 |
+
Iterator,
|
14 |
+
List,
|
15 |
+
Mapping,
|
16 |
+
Optional,
|
17 |
+
Tuple,
|
18 |
+
Type,
|
19 |
+
Union,
|
20 |
+
)
|
21 |
+
|
22 |
+
from pip._internal.cli.spinners import SpinnerInterface
|
23 |
+
from pip._internal.exceptions import BadCommand, InstallationError
|
24 |
+
from pip._internal.utils.misc import (
|
25 |
+
HiddenText,
|
26 |
+
ask_path_exists,
|
27 |
+
backup_dir,
|
28 |
+
display_path,
|
29 |
+
hide_url,
|
30 |
+
hide_value,
|
31 |
+
is_installable_dir,
|
32 |
+
rmtree,
|
33 |
+
)
|
34 |
+
from pip._internal.utils.subprocess import (
|
35 |
+
CommandArgs,
|
36 |
+
call_subprocess,
|
37 |
+
format_command_args,
|
38 |
+
make_command,
|
39 |
+
)
|
40 |
+
from pip._internal.utils.urls import get_url_scheme
|
41 |
+
|
42 |
+
if TYPE_CHECKING:
|
43 |
+
# Literal was introduced in Python 3.8.
|
44 |
+
#
|
45 |
+
# TODO: Remove `if TYPE_CHECKING` when dropping support for Python 3.7.
|
46 |
+
from typing import Literal
|
47 |
+
|
48 |
+
|
49 |
+
__all__ = ["vcs"]
|
50 |
+
|
51 |
+
|
52 |
+
logger = logging.getLogger(__name__)
|
53 |
+
|
54 |
+
AuthInfo = Tuple[Optional[str], Optional[str]]
|
55 |
+
|
56 |
+
|
57 |
+
def is_url(name: str) -> bool:
|
58 |
+
"""
|
59 |
+
Return true if the name looks like a URL.
|
60 |
+
"""
|
61 |
+
scheme = get_url_scheme(name)
|
62 |
+
if scheme is None:
|
63 |
+
return False
|
64 |
+
return scheme in ["http", "https", "file", "ftp"] + vcs.all_schemes
|
65 |
+
|
66 |
+
|
67 |
+
def make_vcs_requirement_url(
|
68 |
+
repo_url: str, rev: str, project_name: str, subdir: Optional[str] = None
|
69 |
+
) -> str:
|
70 |
+
"""
|
71 |
+
Return the URL for a VCS requirement.
|
72 |
+
|
73 |
+
Args:
|
74 |
+
repo_url: the remote VCS url, with any needed VCS prefix (e.g. "git+").
|
75 |
+
project_name: the (unescaped) project name.
|
76 |
+
"""
|
77 |
+
egg_project_name = project_name.replace("-", "_")
|
78 |
+
req = f"{repo_url}@{rev}#egg={egg_project_name}"
|
79 |
+
if subdir:
|
80 |
+
req += f"&subdirectory={subdir}"
|
81 |
+
|
82 |
+
return req
|
83 |
+
|
84 |
+
|
85 |
+
def find_path_to_project_root_from_repo_root(
|
86 |
+
location: str, repo_root: str
|
87 |
+
) -> Optional[str]:
|
88 |
+
"""
|
89 |
+
Find the the Python project's root by searching up the filesystem from
|
90 |
+
`location`. Return the path to project root relative to `repo_root`.
|
91 |
+
Return None if the project root is `repo_root`, or cannot be found.
|
92 |
+
"""
|
93 |
+
# find project root.
|
94 |
+
orig_location = location
|
95 |
+
while not is_installable_dir(location):
|
96 |
+
last_location = location
|
97 |
+
location = os.path.dirname(location)
|
98 |
+
if location == last_location:
|
99 |
+
# We've traversed up to the root of the filesystem without
|
100 |
+
# finding a Python project.
|
101 |
+
logger.warning(
|
102 |
+
"Could not find a Python project for directory %s (tried all "
|
103 |
+
"parent directories)",
|
104 |
+
orig_location,
|
105 |
+
)
|
106 |
+
return None
|
107 |
+
|
108 |
+
if os.path.samefile(repo_root, location):
|
109 |
+
return None
|
110 |
+
|
111 |
+
return os.path.relpath(location, repo_root)
|
112 |
+
|
113 |
+
|
114 |
+
class RemoteNotFoundError(Exception):
|
115 |
+
pass
|
116 |
+
|
117 |
+
|
118 |
+
class RemoteNotValidError(Exception):
|
119 |
+
def __init__(self, url: str):
|
120 |
+
super().__init__(url)
|
121 |
+
self.url = url
|
122 |
+
|
123 |
+
|
124 |
+
class RevOptions:
|
125 |
+
|
126 |
+
"""
|
127 |
+
Encapsulates a VCS-specific revision to install, along with any VCS
|
128 |
+
install options.
|
129 |
+
|
130 |
+
Instances of this class should be treated as if immutable.
|
131 |
+
"""
|
132 |
+
|
133 |
+
def __init__(
|
134 |
+
self,
|
135 |
+
vc_class: Type["VersionControl"],
|
136 |
+
rev: Optional[str] = None,
|
137 |
+
extra_args: Optional[CommandArgs] = None,
|
138 |
+
) -> None:
|
139 |
+
"""
|
140 |
+
Args:
|
141 |
+
vc_class: a VersionControl subclass.
|
142 |
+
rev: the name of the revision to install.
|
143 |
+
extra_args: a list of extra options.
|
144 |
+
"""
|
145 |
+
if extra_args is None:
|
146 |
+
extra_args = []
|
147 |
+
|
148 |
+
self.extra_args = extra_args
|
149 |
+
self.rev = rev
|
150 |
+
self.vc_class = vc_class
|
151 |
+
self.branch_name: Optional[str] = None
|
152 |
+
|
153 |
+
def __repr__(self) -> str:
|
154 |
+
return f"<RevOptions {self.vc_class.name}: rev={self.rev!r}>"
|
155 |
+
|
156 |
+
@property
|
157 |
+
def arg_rev(self) -> Optional[str]:
|
158 |
+
if self.rev is None:
|
159 |
+
return self.vc_class.default_arg_rev
|
160 |
+
|
161 |
+
return self.rev
|
162 |
+
|
163 |
+
def to_args(self) -> CommandArgs:
|
164 |
+
"""
|
165 |
+
Return the VCS-specific command arguments.
|
166 |
+
"""
|
167 |
+
args: CommandArgs = []
|
168 |
+
rev = self.arg_rev
|
169 |
+
if rev is not None:
|
170 |
+
args += self.vc_class.get_base_rev_args(rev)
|
171 |
+
args += self.extra_args
|
172 |
+
|
173 |
+
return args
|
174 |
+
|
175 |
+
def to_display(self) -> str:
|
176 |
+
if not self.rev:
|
177 |
+
return ""
|
178 |
+
|
179 |
+
return f" (to revision {self.rev})"
|
180 |
+
|
181 |
+
def make_new(self, rev: str) -> "RevOptions":
|
182 |
+
"""
|
183 |
+
Make a copy of the current instance, but with a new rev.
|
184 |
+
|
185 |
+
Args:
|
186 |
+
rev: the name of the revision for the new object.
|
187 |
+
"""
|
188 |
+
return self.vc_class.make_rev_options(rev, extra_args=self.extra_args)
|
189 |
+
|
190 |
+
|
191 |
+
class VcsSupport:
|
192 |
+
_registry: Dict[str, "VersionControl"] = {}
|
193 |
+
schemes = ["ssh", "git", "hg", "bzr", "sftp", "svn"]
|
194 |
+
|
195 |
+
def __init__(self) -> None:
|
196 |
+
# Register more schemes with urlparse for various version control
|
197 |
+
# systems
|
198 |
+
urllib.parse.uses_netloc.extend(self.schemes)
|
199 |
+
super().__init__()
|
200 |
+
|
201 |
+
def __iter__(self) -> Iterator[str]:
|
202 |
+
return self._registry.__iter__()
|
203 |
+
|
204 |
+
@property
|
205 |
+
def backends(self) -> List["VersionControl"]:
|
206 |
+
return list(self._registry.values())
|
207 |
+
|
208 |
+
@property
|
209 |
+
def dirnames(self) -> List[str]:
|
210 |
+
return [backend.dirname for backend in self.backends]
|
211 |
+
|
212 |
+
@property
|
213 |
+
def all_schemes(self) -> List[str]:
|
214 |
+
schemes: List[str] = []
|
215 |
+
for backend in self.backends:
|
216 |
+
schemes.extend(backend.schemes)
|
217 |
+
return schemes
|
218 |
+
|
219 |
+
def register(self, cls: Type["VersionControl"]) -> None:
|
220 |
+
if not hasattr(cls, "name"):
|
221 |
+
logger.warning("Cannot register VCS %s", cls.__name__)
|
222 |
+
return
|
223 |
+
if cls.name not in self._registry:
|
224 |
+
self._registry[cls.name] = cls()
|
225 |
+
logger.debug("Registered VCS backend: %s", cls.name)
|
226 |
+
|
227 |
+
def unregister(self, name: str) -> None:
|
228 |
+
if name in self._registry:
|
229 |
+
del self._registry[name]
|
230 |
+
|
231 |
+
def get_backend_for_dir(self, location: str) -> Optional["VersionControl"]:
|
232 |
+
"""
|
233 |
+
Return a VersionControl object if a repository of that type is found
|
234 |
+
at the given directory.
|
235 |
+
"""
|
236 |
+
vcs_backends = {}
|
237 |
+
for vcs_backend in self._registry.values():
|
238 |
+
repo_path = vcs_backend.get_repository_root(location)
|
239 |
+
if not repo_path:
|
240 |
+
continue
|
241 |
+
logger.debug("Determine that %s uses VCS: %s", location, vcs_backend.name)
|
242 |
+
vcs_backends[repo_path] = vcs_backend
|
243 |
+
|
244 |
+
if not vcs_backends:
|
245 |
+
return None
|
246 |
+
|
247 |
+
# Choose the VCS in the inner-most directory. Since all repository
|
248 |
+
# roots found here would be either `location` or one of its
|
249 |
+
# parents, the longest path should have the most path components,
|
250 |
+
# i.e. the backend representing the inner-most repository.
|
251 |
+
inner_most_repo_path = max(vcs_backends, key=len)
|
252 |
+
return vcs_backends[inner_most_repo_path]
|
253 |
+
|
254 |
+
def get_backend_for_scheme(self, scheme: str) -> Optional["VersionControl"]:
|
255 |
+
"""
|
256 |
+
Return a VersionControl object or None.
|
257 |
+
"""
|
258 |
+
for vcs_backend in self._registry.values():
|
259 |
+
if scheme in vcs_backend.schemes:
|
260 |
+
return vcs_backend
|
261 |
+
return None
|
262 |
+
|
263 |
+
def get_backend(self, name: str) -> Optional["VersionControl"]:
|
264 |
+
"""
|
265 |
+
Return a VersionControl object or None.
|
266 |
+
"""
|
267 |
+
name = name.lower()
|
268 |
+
return self._registry.get(name)
|
269 |
+
|
270 |
+
|
271 |
+
vcs = VcsSupport()
|
272 |
+
|
273 |
+
|
274 |
+
class VersionControl:
|
275 |
+
name = ""
|
276 |
+
dirname = ""
|
277 |
+
repo_name = ""
|
278 |
+
# List of supported schemes for this Version Control
|
279 |
+
schemes: Tuple[str, ...] = ()
|
280 |
+
# Iterable of environment variable names to pass to call_subprocess().
|
281 |
+
unset_environ: Tuple[str, ...] = ()
|
282 |
+
default_arg_rev: Optional[str] = None
|
283 |
+
|
284 |
+
@classmethod
|
285 |
+
def should_add_vcs_url_prefix(cls, remote_url: str) -> bool:
|
286 |
+
"""
|
287 |
+
Return whether the vcs prefix (e.g. "git+") should be added to a
|
288 |
+
repository's remote url when used in a requirement.
|
289 |
+
"""
|
290 |
+
return not remote_url.lower().startswith(f"{cls.name}:")
|
291 |
+
|
292 |
+
@classmethod
|
293 |
+
def get_subdirectory(cls, location: str) -> Optional[str]:
|
294 |
+
"""
|
295 |
+
Return the path to Python project root, relative to the repo root.
|
296 |
+
Return None if the project root is in the repo root.
|
297 |
+
"""
|
298 |
+
return None
|
299 |
+
|
300 |
+
@classmethod
|
301 |
+
def get_requirement_revision(cls, repo_dir: str) -> str:
|
302 |
+
"""
|
303 |
+
Return the revision string that should be used in a requirement.
|
304 |
+
"""
|
305 |
+
return cls.get_revision(repo_dir)
|
306 |
+
|
307 |
+
@classmethod
|
308 |
+
def get_src_requirement(cls, repo_dir: str, project_name: str) -> str:
|
309 |
+
"""
|
310 |
+
Return the requirement string to use to redownload the files
|
311 |
+
currently at the given repository directory.
|
312 |
+
|
313 |
+
Args:
|
314 |
+
project_name: the (unescaped) project name.
|
315 |
+
|
316 |
+
The return value has a form similar to the following:
|
317 |
+
|
318 |
+
{repository_url}@{revision}#egg={project_name}
|
319 |
+
"""
|
320 |
+
repo_url = cls.get_remote_url(repo_dir)
|
321 |
+
|
322 |
+
if cls.should_add_vcs_url_prefix(repo_url):
|
323 |
+
repo_url = f"{cls.name}+{repo_url}"
|
324 |
+
|
325 |
+
revision = cls.get_requirement_revision(repo_dir)
|
326 |
+
subdir = cls.get_subdirectory(repo_dir)
|
327 |
+
req = make_vcs_requirement_url(repo_url, revision, project_name, subdir=subdir)
|
328 |
+
|
329 |
+
return req
|
330 |
+
|
331 |
+
@staticmethod
|
332 |
+
def get_base_rev_args(rev: str) -> List[str]:
|
333 |
+
"""
|
334 |
+
Return the base revision arguments for a vcs command.
|
335 |
+
|
336 |
+
Args:
|
337 |
+
rev: the name of a revision to install. Cannot be None.
|
338 |
+
"""
|
339 |
+
raise NotImplementedError
|
340 |
+
|
341 |
+
def is_immutable_rev_checkout(self, url: str, dest: str) -> bool:
|
342 |
+
"""
|
343 |
+
Return true if the commit hash checked out at dest matches
|
344 |
+
the revision in url.
|
345 |
+
|
346 |
+
Always return False, if the VCS does not support immutable commit
|
347 |
+
hashes.
|
348 |
+
|
349 |
+
This method does not check if there are local uncommitted changes
|
350 |
+
in dest after checkout, as pip currently has no use case for that.
|
351 |
+
"""
|
352 |
+
return False
|
353 |
+
|
354 |
+
@classmethod
|
355 |
+
def make_rev_options(
|
356 |
+
cls, rev: Optional[str] = None, extra_args: Optional[CommandArgs] = None
|
357 |
+
) -> RevOptions:
|
358 |
+
"""
|
359 |
+
Return a RevOptions object.
|
360 |
+
|
361 |
+
Args:
|
362 |
+
rev: the name of a revision to install.
|
363 |
+
extra_args: a list of extra options.
|
364 |
+
"""
|
365 |
+
return RevOptions(cls, rev, extra_args=extra_args)
|
366 |
+
|
367 |
+
@classmethod
|
368 |
+
def _is_local_repository(cls, repo: str) -> bool:
|
369 |
+
"""
|
370 |
+
posix absolute paths start with os.path.sep,
|
371 |
+
win32 ones start with drive (like c:\\folder)
|
372 |
+
"""
|
373 |
+
drive, tail = os.path.splitdrive(repo)
|
374 |
+
return repo.startswith(os.path.sep) or bool(drive)
|
375 |
+
|
376 |
+
@classmethod
|
377 |
+
def get_netloc_and_auth(
|
378 |
+
cls, netloc: str, scheme: str
|
379 |
+
) -> Tuple[str, Tuple[Optional[str], Optional[str]]]:
|
380 |
+
"""
|
381 |
+
Parse the repository URL's netloc, and return the new netloc to use
|
382 |
+
along with auth information.
|
383 |
+
|
384 |
+
Args:
|
385 |
+
netloc: the original repository URL netloc.
|
386 |
+
scheme: the repository URL's scheme without the vcs prefix.
|
387 |
+
|
388 |
+
This is mainly for the Subversion class to override, so that auth
|
389 |
+
information can be provided via the --username and --password options
|
390 |
+
instead of through the URL. For other subclasses like Git without
|
391 |
+
such an option, auth information must stay in the URL.
|
392 |
+
|
393 |
+
Returns: (netloc, (username, password)).
|
394 |
+
"""
|
395 |
+
return netloc, (None, None)
|
396 |
+
|
397 |
+
@classmethod
|
398 |
+
def get_url_rev_and_auth(cls, url: str) -> Tuple[str, Optional[str], AuthInfo]:
|
399 |
+
"""
|
400 |
+
Parse the repository URL to use, and return the URL, revision,
|
401 |
+
and auth info to use.
|
402 |
+
|
403 |
+
Returns: (url, rev, (username, password)).
|
404 |
+
"""
|
405 |
+
scheme, netloc, path, query, frag = urllib.parse.urlsplit(url)
|
406 |
+
if "+" not in scheme:
|
407 |
+
raise ValueError(
|
408 |
+
"Sorry, {!r} is a malformed VCS url. "
|
409 |
+
"The format is <vcs>+<protocol>://<url>, "
|
410 |
+
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp".format(url)
|
411 |
+
)
|
412 |
+
# Remove the vcs prefix.
|
413 |
+
scheme = scheme.split("+", 1)[1]
|
414 |
+
netloc, user_pass = cls.get_netloc_and_auth(netloc, scheme)
|
415 |
+
rev = None
|
416 |
+
if "@" in path:
|
417 |
+
path, rev = path.rsplit("@", 1)
|
418 |
+
if not rev:
|
419 |
+
raise InstallationError(
|
420 |
+
"The URL {!r} has an empty revision (after @) "
|
421 |
+
"which is not supported. Include a revision after @ "
|
422 |
+
"or remove @ from the URL.".format(url)
|
423 |
+
)
|
424 |
+
url = urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
425 |
+
return url, rev, user_pass
|
426 |
+
|
427 |
+
@staticmethod
|
428 |
+
def make_rev_args(
|
429 |
+
username: Optional[str], password: Optional[HiddenText]
|
430 |
+
) -> CommandArgs:
|
431 |
+
"""
|
432 |
+
Return the RevOptions "extra arguments" to use in obtain().
|
433 |
+
"""
|
434 |
+
return []
|
435 |
+
|
436 |
+
def get_url_rev_options(self, url: HiddenText) -> Tuple[HiddenText, RevOptions]:
|
437 |
+
"""
|
438 |
+
Return the URL and RevOptions object to use in obtain(),
|
439 |
+
as a tuple (url, rev_options).
|
440 |
+
"""
|
441 |
+
secret_url, rev, user_pass = self.get_url_rev_and_auth(url.secret)
|
442 |
+
username, secret_password = user_pass
|
443 |
+
password: Optional[HiddenText] = None
|
444 |
+
if secret_password is not None:
|
445 |
+
password = hide_value(secret_password)
|
446 |
+
extra_args = self.make_rev_args(username, password)
|
447 |
+
rev_options = self.make_rev_options(rev, extra_args=extra_args)
|
448 |
+
|
449 |
+
return hide_url(secret_url), rev_options
|
450 |
+
|
451 |
+
@staticmethod
|
452 |
+
def normalize_url(url: str) -> str:
|
453 |
+
"""
|
454 |
+
Normalize a URL for comparison by unquoting it and removing any
|
455 |
+
trailing slash.
|
456 |
+
"""
|
457 |
+
return urllib.parse.unquote(url).rstrip("/")
|
458 |
+
|
459 |
+
@classmethod
|
460 |
+
def compare_urls(cls, url1: str, url2: str) -> bool:
|
461 |
+
"""
|
462 |
+
Compare two repo URLs for identity, ignoring incidental differences.
|
463 |
+
"""
|
464 |
+
return cls.normalize_url(url1) == cls.normalize_url(url2)
|
465 |
+
|
466 |
+
def fetch_new(
|
467 |
+
self, dest: str, url: HiddenText, rev_options: RevOptions, verbosity: int
|
468 |
+
) -> None:
|
469 |
+
"""
|
470 |
+
Fetch a revision from a repository, in the case that this is the
|
471 |
+
first fetch from the repository.
|
472 |
+
|
473 |
+
Args:
|
474 |
+
dest: the directory to fetch the repository to.
|
475 |
+
rev_options: a RevOptions object.
|
476 |
+
verbosity: verbosity level.
|
477 |
+
"""
|
478 |
+
raise NotImplementedError
|
479 |
+
|
480 |
+
def switch(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
481 |
+
"""
|
482 |
+
Switch the repo at ``dest`` to point to ``URL``.
|
483 |
+
|
484 |
+
Args:
|
485 |
+
rev_options: a RevOptions object.
|
486 |
+
"""
|
487 |
+
raise NotImplementedError
|
488 |
+
|
489 |
+
def update(self, dest: str, url: HiddenText, rev_options: RevOptions) -> None:
|
490 |
+
"""
|
491 |
+
Update an already-existing repo to the given ``rev_options``.
|
492 |
+
|
493 |
+
Args:
|
494 |
+
rev_options: a RevOptions object.
|
495 |
+
"""
|
496 |
+
raise NotImplementedError
|
497 |
+
|
498 |
+
@classmethod
|
499 |
+
def is_commit_id_equal(cls, dest: str, name: Optional[str]) -> bool:
|
500 |
+
"""
|
501 |
+
Return whether the id of the current commit equals the given name.
|
502 |
+
|
503 |
+
Args:
|
504 |
+
dest: the repository directory.
|
505 |
+
name: a string name.
|
506 |
+
"""
|
507 |
+
raise NotImplementedError
|
508 |
+
|
509 |
+
def obtain(self, dest: str, url: HiddenText, verbosity: int) -> None:
|
510 |
+
"""
|
511 |
+
Install or update in editable mode the package represented by this
|
512 |
+
VersionControl object.
|
513 |
+
|
514 |
+
:param dest: the repository directory in which to install or update.
|
515 |
+
:param url: the repository URL starting with a vcs prefix.
|
516 |
+
:param verbosity: verbosity level.
|
517 |
+
"""
|
518 |
+
url, rev_options = self.get_url_rev_options(url)
|
519 |
+
|
520 |
+
if not os.path.exists(dest):
|
521 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
522 |
+
return
|
523 |
+
|
524 |
+
rev_display = rev_options.to_display()
|
525 |
+
if self.is_repository_directory(dest):
|
526 |
+
existing_url = self.get_remote_url(dest)
|
527 |
+
if self.compare_urls(existing_url, url.secret):
|
528 |
+
logger.debug(
|
529 |
+
"%s in %s exists, and has correct URL (%s)",
|
530 |
+
self.repo_name.title(),
|
531 |
+
display_path(dest),
|
532 |
+
url,
|
533 |
+
)
|
534 |
+
if not self.is_commit_id_equal(dest, rev_options.rev):
|
535 |
+
logger.info(
|
536 |
+
"Updating %s %s%s",
|
537 |
+
display_path(dest),
|
538 |
+
self.repo_name,
|
539 |
+
rev_display,
|
540 |
+
)
|
541 |
+
self.update(dest, url, rev_options)
|
542 |
+
else:
|
543 |
+
logger.info("Skipping because already up-to-date.")
|
544 |
+
return
|
545 |
+
|
546 |
+
logger.warning(
|
547 |
+
"%s %s in %s exists with URL %s",
|
548 |
+
self.name,
|
549 |
+
self.repo_name,
|
550 |
+
display_path(dest),
|
551 |
+
existing_url,
|
552 |
+
)
|
553 |
+
prompt = ("(s)witch, (i)gnore, (w)ipe, (b)ackup ", ("s", "i", "w", "b"))
|
554 |
+
else:
|
555 |
+
logger.warning(
|
556 |
+
"Directory %s already exists, and is not a %s %s.",
|
557 |
+
dest,
|
558 |
+
self.name,
|
559 |
+
self.repo_name,
|
560 |
+
)
|
561 |
+
# https://github.com/python/mypy/issues/1174
|
562 |
+
prompt = ("(i)gnore, (w)ipe, (b)ackup ", ("i", "w", "b")) # type: ignore
|
563 |
+
|
564 |
+
logger.warning(
|
565 |
+
"The plan is to install the %s repository %s",
|
566 |
+
self.name,
|
567 |
+
url,
|
568 |
+
)
|
569 |
+
response = ask_path_exists("What to do? {}".format(prompt[0]), prompt[1])
|
570 |
+
|
571 |
+
if response == "a":
|
572 |
+
sys.exit(-1)
|
573 |
+
|
574 |
+
if response == "w":
|
575 |
+
logger.warning("Deleting %s", display_path(dest))
|
576 |
+
rmtree(dest)
|
577 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
578 |
+
return
|
579 |
+
|
580 |
+
if response == "b":
|
581 |
+
dest_dir = backup_dir(dest)
|
582 |
+
logger.warning("Backing up %s to %s", display_path(dest), dest_dir)
|
583 |
+
shutil.move(dest, dest_dir)
|
584 |
+
self.fetch_new(dest, url, rev_options, verbosity=verbosity)
|
585 |
+
return
|
586 |
+
|
587 |
+
# Do nothing if the response is "i".
|
588 |
+
if response == "s":
|
589 |
+
logger.info(
|
590 |
+
"Switching %s %s to %s%s",
|
591 |
+
self.repo_name,
|
592 |
+
display_path(dest),
|
593 |
+
url,
|
594 |
+
rev_display,
|
595 |
+
)
|
596 |
+
self.switch(dest, url, rev_options)
|
597 |
+
|
598 |
+
def unpack(self, location: str, url: HiddenText, verbosity: int) -> None:
|
599 |
+
"""
|
600 |
+
Clean up current location and download the url repository
|
601 |
+
(and vcs infos) into location
|
602 |
+
|
603 |
+
:param url: the repository URL starting with a vcs prefix.
|
604 |
+
:param verbosity: verbosity level.
|
605 |
+
"""
|
606 |
+
if os.path.exists(location):
|
607 |
+
rmtree(location)
|
608 |
+
self.obtain(location, url=url, verbosity=verbosity)
|
609 |
+
|
610 |
+
@classmethod
|
611 |
+
def get_remote_url(cls, location: str) -> str:
|
612 |
+
"""
|
613 |
+
Return the url used at location
|
614 |
+
|
615 |
+
Raises RemoteNotFoundError if the repository does not have a remote
|
616 |
+
url configured.
|
617 |
+
"""
|
618 |
+
raise NotImplementedError
|
619 |
+
|
620 |
+
@classmethod
|
621 |
+
def get_revision(cls, location: str) -> str:
|
622 |
+
"""
|
623 |
+
Return the current commit id of the files at the given location.
|
624 |
+
"""
|
625 |
+
raise NotImplementedError
|
626 |
+
|
627 |
+
@classmethod
|
628 |
+
def run_command(
|
629 |
+
cls,
|
630 |
+
cmd: Union[List[str], CommandArgs],
|
631 |
+
show_stdout: bool = True,
|
632 |
+
cwd: Optional[str] = None,
|
633 |
+
on_returncode: 'Literal["raise", "warn", "ignore"]' = "raise",
|
634 |
+
extra_ok_returncodes: Optional[Iterable[int]] = None,
|
635 |
+
command_desc: Optional[str] = None,
|
636 |
+
extra_environ: Optional[Mapping[str, Any]] = None,
|
637 |
+
spinner: Optional[SpinnerInterface] = None,
|
638 |
+
log_failed_cmd: bool = True,
|
639 |
+
stdout_only: bool = False,
|
640 |
+
) -> str:
|
641 |
+
"""
|
642 |
+
Run a VCS subcommand
|
643 |
+
This is simply a wrapper around call_subprocess that adds the VCS
|
644 |
+
command name, and checks that the VCS is available
|
645 |
+
"""
|
646 |
+
cmd = make_command(cls.name, *cmd)
|
647 |
+
if command_desc is None:
|
648 |
+
command_desc = format_command_args(cmd)
|
649 |
+
try:
|
650 |
+
return call_subprocess(
|
651 |
+
cmd,
|
652 |
+
show_stdout,
|
653 |
+
cwd,
|
654 |
+
on_returncode=on_returncode,
|
655 |
+
extra_ok_returncodes=extra_ok_returncodes,
|
656 |
+
command_desc=command_desc,
|
657 |
+
extra_environ=extra_environ,
|
658 |
+
unset_environ=cls.unset_environ,
|
659 |
+
spinner=spinner,
|
660 |
+
log_failed_cmd=log_failed_cmd,
|
661 |
+
stdout_only=stdout_only,
|
662 |
+
)
|
663 |
+
except FileNotFoundError:
|
664 |
+
# errno.ENOENT = no such file or directory
|
665 |
+
# In other words, the VCS executable isn't available
|
666 |
+
raise BadCommand(
|
667 |
+
f"Cannot find command {cls.name!r} - do you have "
|
668 |
+
f"{cls.name!r} installed and in your PATH?"
|
669 |
+
)
|
670 |
+
except PermissionError:
|
671 |
+
# errno.EACCES = Permission denied
|
672 |
+
# This error occurs, for instance, when the command is installed
|
673 |
+
# only for another user. So, the current user don't have
|
674 |
+
# permission to call the other user command.
|
675 |
+
raise BadCommand(
|
676 |
+
f"No permission to execute {cls.name!r} - install it "
|
677 |
+
f"locally, globally (ask admin), or check your PATH. "
|
678 |
+
f"See possible solutions at "
|
679 |
+
f"https://pip.pypa.io/en/latest/reference/pip_freeze/"
|
680 |
+
f"#fixing-permission-denied."
|
681 |
+
)
|
682 |
+
|
683 |
+
@classmethod
|
684 |
+
def is_repository_directory(cls, path: str) -> bool:
|
685 |
+
"""
|
686 |
+
Return whether a directory path is a repository directory.
|
687 |
+
"""
|
688 |
+
logger.debug("Checking in %s for %s (%s)...", path, cls.dirname, cls.name)
|
689 |
+
return os.path.exists(os.path.join(path, cls.dirname))
|
690 |
+
|
691 |
+
@classmethod
|
692 |
+
def get_repository_root(cls, location: str) -> Optional[str]:
|
693 |
+
"""
|
694 |
+
Return the "root" (top-level) directory controlled by the vcs,
|
695 |
+
or `None` if the directory is not in any.
|
696 |
+
|
697 |
+
It is meant to be overridden to implement smarter detection
|
698 |
+
mechanisms for specific vcs.
|
699 |
+
|
700 |
+
This can do more than is_repository_directory() alone. For
|
701 |
+
example, the Git override checks that Git is actually available.
|
702 |
+
"""
|
703 |
+
if cls.is_repository_directory(location):
|
704 |
+
return location
|
705 |
+
return None
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/__init__.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
3 |
+
depend on something external.
|
4 |
+
|
5 |
+
Files inside of pip._vendor should be considered immutable and should only be
|
6 |
+
updated to versions from upstream.
|
7 |
+
"""
|
8 |
+
from __future__ import absolute_import
|
9 |
+
|
10 |
+
import glob
|
11 |
+
import os.path
|
12 |
+
import sys
|
13 |
+
|
14 |
+
# Downstream redistributors which have debundled our dependencies should also
|
15 |
+
# patch this value to be true. This will trigger the additional patching
|
16 |
+
# to cause things like "six" to be available as pip.
|
17 |
+
DEBUNDLED = False
|
18 |
+
|
19 |
+
# By default, look in this directory for a bunch of .whl files which we will
|
20 |
+
# add to the beginning of sys.path before attempting to import anything. This
|
21 |
+
# is done to support downstream re-distributors like Debian and Fedora who
|
22 |
+
# wish to create their own Wheels for our dependencies to aid in debundling.
|
23 |
+
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
|
24 |
+
|
25 |
+
|
26 |
+
# Define a small helper function to alias our vendored modules to the real ones
|
27 |
+
# if the vendored ones do not exist. This idea of this was taken from
|
28 |
+
# https://github.com/kennethreitz/requests/pull/2567.
|
29 |
+
def vendored(modulename):
|
30 |
+
vendored_name = "{0}.{1}".format(__name__, modulename)
|
31 |
+
|
32 |
+
try:
|
33 |
+
__import__(modulename, globals(), locals(), level=0)
|
34 |
+
except ImportError:
|
35 |
+
# We can just silently allow import failures to pass here. If we
|
36 |
+
# got to this point it means that ``import pip._vendor.whatever``
|
37 |
+
# failed and so did ``import whatever``. Since we're importing this
|
38 |
+
# upfront in an attempt to alias imports, not erroring here will
|
39 |
+
# just mean we get a regular import error whenever pip *actually*
|
40 |
+
# tries to import one of these modules to use it, which actually
|
41 |
+
# gives us a better error message than we would have otherwise
|
42 |
+
# gotten.
|
43 |
+
pass
|
44 |
+
else:
|
45 |
+
sys.modules[vendored_name] = sys.modules[modulename]
|
46 |
+
base, head = vendored_name.rsplit(".", 1)
|
47 |
+
setattr(sys.modules[base], head, sys.modules[modulename])
|
48 |
+
|
49 |
+
|
50 |
+
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
51 |
+
# the aliasing of our vendored libraries as well as looking for wheels to add
|
52 |
+
# to our sys.path. This will cause all of this code to be a no-op typically
|
53 |
+
# however downstream redistributors can enable it in a consistent way across
|
54 |
+
# all platforms.
|
55 |
+
if DEBUNDLED:
|
56 |
+
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
57 |
+
# front of our sys.path.
|
58 |
+
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
59 |
+
|
60 |
+
# Actually alias all of our vendored dependencies.
|
61 |
+
vendored("cachecontrol")
|
62 |
+
vendored("certifi")
|
63 |
+
vendored("colorama")
|
64 |
+
vendored("distlib")
|
65 |
+
vendored("distro")
|
66 |
+
vendored("html5lib")
|
67 |
+
vendored("six")
|
68 |
+
vendored("six.moves")
|
69 |
+
vendored("six.moves.urllib")
|
70 |
+
vendored("six.moves.urllib.parse")
|
71 |
+
vendored("packaging")
|
72 |
+
vendored("packaging.version")
|
73 |
+
vendored("packaging.specifiers")
|
74 |
+
vendored("pep517")
|
75 |
+
vendored("pkg_resources")
|
76 |
+
vendored("platformdirs")
|
77 |
+
vendored("progress")
|
78 |
+
vendored("requests")
|
79 |
+
vendored("requests.exceptions")
|
80 |
+
vendored("requests.packages")
|
81 |
+
vendored("requests.packages.urllib3")
|
82 |
+
vendored("requests.packages.urllib3._collections")
|
83 |
+
vendored("requests.packages.urllib3.connection")
|
84 |
+
vendored("requests.packages.urllib3.connectionpool")
|
85 |
+
vendored("requests.packages.urllib3.contrib")
|
86 |
+
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
87 |
+
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
88 |
+
vendored("requests.packages.urllib3.exceptions")
|
89 |
+
vendored("requests.packages.urllib3.fields")
|
90 |
+
vendored("requests.packages.urllib3.filepost")
|
91 |
+
vendored("requests.packages.urllib3.packages")
|
92 |
+
vendored("requests.packages.urllib3.packages.ordered_dict")
|
93 |
+
vendored("requests.packages.urllib3.packages.six")
|
94 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
95 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
96 |
+
"_implementation")
|
97 |
+
vendored("requests.packages.urllib3.poolmanager")
|
98 |
+
vendored("requests.packages.urllib3.request")
|
99 |
+
vendored("requests.packages.urllib3.response")
|
100 |
+
vendored("requests.packages.urllib3.util")
|
101 |
+
vendored("requests.packages.urllib3.util.connection")
|
102 |
+
vendored("requests.packages.urllib3.util.request")
|
103 |
+
vendored("requests.packages.urllib3.util.response")
|
104 |
+
vendored("requests.packages.urllib3.util.retry")
|
105 |
+
vendored("requests.packages.urllib3.util.ssl_")
|
106 |
+
vendored("requests.packages.urllib3.util.timeout")
|
107 |
+
vendored("requests.packages.urllib3.util.url")
|
108 |
+
vendored("resolvelib")
|
109 |
+
vendored("tenacity")
|
110 |
+
vendored("tomli")
|
111 |
+
vendored("urllib3")
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/distro.py
ADDED
@@ -0,0 +1,1386 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2015,2016,2017 Nir Cohen
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
"""
|
16 |
+
The ``distro`` package (``distro`` stands for Linux Distribution) provides
|
17 |
+
information about the Linux distribution it runs on, such as a reliable
|
18 |
+
machine-readable distro ID, or version information.
|
19 |
+
|
20 |
+
It is the recommended replacement for Python's original
|
21 |
+
:py:func:`platform.linux_distribution` function, but it provides much more
|
22 |
+
functionality. An alternative implementation became necessary because Python
|
23 |
+
3.5 deprecated this function, and Python 3.8 removed it altogether. Its
|
24 |
+
predecessor function :py:func:`platform.dist` was already deprecated since
|
25 |
+
Python 2.6 and removed in Python 3.8. Still, there are many cases in which
|
26 |
+
access to OS distribution information is needed. See `Python issue 1322
|
27 |
+
<https://bugs.python.org/issue1322>`_ for more information.
|
28 |
+
"""
|
29 |
+
|
30 |
+
import argparse
|
31 |
+
import json
|
32 |
+
import logging
|
33 |
+
import os
|
34 |
+
import re
|
35 |
+
import shlex
|
36 |
+
import subprocess
|
37 |
+
import sys
|
38 |
+
import warnings
|
39 |
+
|
40 |
+
__version__ = "1.6.0"
|
41 |
+
|
42 |
+
# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
|
43 |
+
# support, can use typing.TYPE_CHECKING instead. See:
|
44 |
+
# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
|
45 |
+
if False: # pragma: nocover
|
46 |
+
from typing import (
|
47 |
+
Any,
|
48 |
+
Callable,
|
49 |
+
Dict,
|
50 |
+
Iterable,
|
51 |
+
Optional,
|
52 |
+
Sequence,
|
53 |
+
TextIO,
|
54 |
+
Tuple,
|
55 |
+
Type,
|
56 |
+
TypedDict,
|
57 |
+
Union,
|
58 |
+
)
|
59 |
+
|
60 |
+
VersionDict = TypedDict(
|
61 |
+
"VersionDict", {"major": str, "minor": str, "build_number": str}
|
62 |
+
)
|
63 |
+
InfoDict = TypedDict(
|
64 |
+
"InfoDict",
|
65 |
+
{
|
66 |
+
"id": str,
|
67 |
+
"version": str,
|
68 |
+
"version_parts": VersionDict,
|
69 |
+
"like": str,
|
70 |
+
"codename": str,
|
71 |
+
},
|
72 |
+
)
|
73 |
+
|
74 |
+
|
75 |
+
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
76 |
+
_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
|
77 |
+
_OS_RELEASE_BASENAME = "os-release"
|
78 |
+
|
79 |
+
#: Translation table for normalizing the "ID" attribute defined in os-release
|
80 |
+
#: files, for use by the :func:`distro.id` method.
|
81 |
+
#:
|
82 |
+
#: * Key: Value as defined in the os-release file, translated to lower case,
|
83 |
+
#: with blanks translated to underscores.
|
84 |
+
#:
|
85 |
+
#: * Value: Normalized value.
|
86 |
+
NORMALIZED_OS_ID = {
|
87 |
+
"ol": "oracle", # Oracle Linux
|
88 |
+
}
|
89 |
+
|
90 |
+
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
91 |
+
#: the lsb_release command, for use by the :func:`distro.id` method.
|
92 |
+
#:
|
93 |
+
#: * Key: Value as returned by the lsb_release command, translated to lower
|
94 |
+
#: case, with blanks translated to underscores.
|
95 |
+
#:
|
96 |
+
#: * Value: Normalized value.
|
97 |
+
NORMALIZED_LSB_ID = {
|
98 |
+
"enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
|
99 |
+
"enterpriseenterpriseserver": "oracle", # Oracle Linux 5
|
100 |
+
"redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
|
101 |
+
"redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
|
102 |
+
"redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
|
103 |
+
}
|
104 |
+
|
105 |
+
#: Translation table for normalizing the distro ID derived from the file name
|
106 |
+
#: of distro release files, for use by the :func:`distro.id` method.
|
107 |
+
#:
|
108 |
+
#: * Key: Value as derived from the file name of a distro release file,
|
109 |
+
#: translated to lower case, with blanks translated to underscores.
|
110 |
+
#:
|
111 |
+
#: * Value: Normalized value.
|
112 |
+
NORMALIZED_DISTRO_ID = {
|
113 |
+
"redhat": "rhel", # RHEL 6.x, 7.x
|
114 |
+
}
|
115 |
+
|
116 |
+
# Pattern for content of distro release file (reversed)
|
117 |
+
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
|
118 |
+
r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
|
119 |
+
)
|
120 |
+
|
121 |
+
# Pattern for base file name of distro release file
|
122 |
+
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
123 |
+
|
124 |
+
# Base file names to be ignored when searching for distro release file
|
125 |
+
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
126 |
+
"debian_version",
|
127 |
+
"lsb-release",
|
128 |
+
"oem-release",
|
129 |
+
_OS_RELEASE_BASENAME,
|
130 |
+
"system-release",
|
131 |
+
"plesk-release",
|
132 |
+
"iredmail-release",
|
133 |
+
)
|
134 |
+
|
135 |
+
|
136 |
+
def linux_distribution(full_distribution_name=True):
|
137 |
+
# type: (bool) -> Tuple[str, str, str]
|
138 |
+
"""
|
139 |
+
.. deprecated:: 1.6.0
|
140 |
+
|
141 |
+
:func:`distro.linux_distribution()` is deprecated. It should only be
|
142 |
+
used as a compatibility shim with Python's
|
143 |
+
:py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
|
144 |
+
:func:`distro.version` and :func:`distro.name` instead.
|
145 |
+
|
146 |
+
Return information about the current OS distribution as a tuple
|
147 |
+
``(id_name, version, codename)`` with items as follows:
|
148 |
+
|
149 |
+
* ``id_name``: If *full_distribution_name* is false, the result of
|
150 |
+
:func:`distro.id`. Otherwise, the result of :func:`distro.name`.
|
151 |
+
|
152 |
+
* ``version``: The result of :func:`distro.version`.
|
153 |
+
|
154 |
+
* ``codename``: The result of :func:`distro.codename`.
|
155 |
+
|
156 |
+
The interface of this function is compatible with the original
|
157 |
+
:py:func:`platform.linux_distribution` function, supporting a subset of
|
158 |
+
its parameters.
|
159 |
+
|
160 |
+
The data it returns may not exactly be the same, because it uses more data
|
161 |
+
sources than the original function, and that may lead to different data if
|
162 |
+
the OS distribution is not consistent across multiple data sources it
|
163 |
+
provides (there are indeed such distributions ...).
|
164 |
+
|
165 |
+
Another reason for differences is the fact that the :func:`distro.id`
|
166 |
+
method normalizes the distro ID string to a reliable machine-readable value
|
167 |
+
for a number of popular OS distributions.
|
168 |
+
"""
|
169 |
+
warnings.warn(
|
170 |
+
"distro.linux_distribution() is deprecated. It should only be used as a "
|
171 |
+
"compatibility shim with Python's platform.linux_distribution(). Please use "
|
172 |
+
"distro.id(), distro.version() and distro.name() instead.",
|
173 |
+
DeprecationWarning,
|
174 |
+
stacklevel=2,
|
175 |
+
)
|
176 |
+
return _distro.linux_distribution(full_distribution_name)
|
177 |
+
|
178 |
+
|
179 |
+
def id():
|
180 |
+
# type: () -> str
|
181 |
+
"""
|
182 |
+
Return the distro ID of the current distribution, as a
|
183 |
+
machine-readable string.
|
184 |
+
|
185 |
+
For a number of OS distributions, the returned distro ID value is
|
186 |
+
*reliable*, in the sense that it is documented and that it does not change
|
187 |
+
across releases of the distribution.
|
188 |
+
|
189 |
+
This package maintains the following reliable distro ID values:
|
190 |
+
|
191 |
+
============== =========================================
|
192 |
+
Distro ID Distribution
|
193 |
+
============== =========================================
|
194 |
+
"ubuntu" Ubuntu
|
195 |
+
"debian" Debian
|
196 |
+
"rhel" RedHat Enterprise Linux
|
197 |
+
"centos" CentOS
|
198 |
+
"fedora" Fedora
|
199 |
+
"sles" SUSE Linux Enterprise Server
|
200 |
+
"opensuse" openSUSE
|
201 |
+
"amazon" Amazon Linux
|
202 |
+
"arch" Arch Linux
|
203 |
+
"cloudlinux" CloudLinux OS
|
204 |
+
"exherbo" Exherbo Linux
|
205 |
+
"gentoo" GenToo Linux
|
206 |
+
"ibm_powerkvm" IBM PowerKVM
|
207 |
+
"kvmibm" KVM for IBM z Systems
|
208 |
+
"linuxmint" Linux Mint
|
209 |
+
"mageia" Mageia
|
210 |
+
"mandriva" Mandriva Linux
|
211 |
+
"parallels" Parallels
|
212 |
+
"pidora" Pidora
|
213 |
+
"raspbian" Raspbian
|
214 |
+
"oracle" Oracle Linux (and Oracle Enterprise Linux)
|
215 |
+
"scientific" Scientific Linux
|
216 |
+
"slackware" Slackware
|
217 |
+
"xenserver" XenServer
|
218 |
+
"openbsd" OpenBSD
|
219 |
+
"netbsd" NetBSD
|
220 |
+
"freebsd" FreeBSD
|
221 |
+
"midnightbsd" MidnightBSD
|
222 |
+
============== =========================================
|
223 |
+
|
224 |
+
If you have a need to get distros for reliable IDs added into this set,
|
225 |
+
or if you find that the :func:`distro.id` function returns a different
|
226 |
+
distro ID for one of the listed distros, please create an issue in the
|
227 |
+
`distro issue tracker`_.
|
228 |
+
|
229 |
+
**Lookup hierarchy and transformations:**
|
230 |
+
|
231 |
+
First, the ID is obtained from the following sources, in the specified
|
232 |
+
order. The first available and non-empty value is used:
|
233 |
+
|
234 |
+
* the value of the "ID" attribute of the os-release file,
|
235 |
+
|
236 |
+
* the value of the "Distributor ID" attribute returned by the lsb_release
|
237 |
+
command,
|
238 |
+
|
239 |
+
* the first part of the file name of the distro release file,
|
240 |
+
|
241 |
+
The so determined ID value then passes the following transformations,
|
242 |
+
before it is returned by this method:
|
243 |
+
|
244 |
+
* it is translated to lower case,
|
245 |
+
|
246 |
+
* blanks (which should not be there anyway) are translated to underscores,
|
247 |
+
|
248 |
+
* a normalization of the ID is performed, based upon
|
249 |
+
`normalization tables`_. The purpose of this normalization is to ensure
|
250 |
+
that the ID is as reliable as possible, even across incompatible changes
|
251 |
+
in the OS distributions. A common reason for an incompatible change is
|
252 |
+
the addition of an os-release file, or the addition of the lsb_release
|
253 |
+
command, with ID values that differ from what was previously determined
|
254 |
+
from the distro release file name.
|
255 |
+
"""
|
256 |
+
return _distro.id()
|
257 |
+
|
258 |
+
|
259 |
+
def name(pretty=False):
|
260 |
+
# type: (bool) -> str
|
261 |
+
"""
|
262 |
+
Return the name of the current OS distribution, as a human-readable
|
263 |
+
string.
|
264 |
+
|
265 |
+
If *pretty* is false, the name is returned without version or codename.
|
266 |
+
(e.g. "CentOS Linux")
|
267 |
+
|
268 |
+
If *pretty* is true, the version and codename are appended.
|
269 |
+
(e.g. "CentOS Linux 7.1.1503 (Core)")
|
270 |
+
|
271 |
+
**Lookup hierarchy:**
|
272 |
+
|
273 |
+
The name is obtained from the following sources, in the specified order.
|
274 |
+
The first available and non-empty value is used:
|
275 |
+
|
276 |
+
* If *pretty* is false:
|
277 |
+
|
278 |
+
- the value of the "NAME" attribute of the os-release file,
|
279 |
+
|
280 |
+
- the value of the "Distributor ID" attribute returned by the lsb_release
|
281 |
+
command,
|
282 |
+
|
283 |
+
- the value of the "<name>" field of the distro release file.
|
284 |
+
|
285 |
+
* If *pretty* is true:
|
286 |
+
|
287 |
+
- the value of the "PRETTY_NAME" attribute of the os-release file,
|
288 |
+
|
289 |
+
- the value of the "Description" attribute returned by the lsb_release
|
290 |
+
command,
|
291 |
+
|
292 |
+
- the value of the "<name>" field of the distro release file, appended
|
293 |
+
with the value of the pretty version ("<version_id>" and "<codename>"
|
294 |
+
fields) of the distro release file, if available.
|
295 |
+
"""
|
296 |
+
return _distro.name(pretty)
|
297 |
+
|
298 |
+
|
299 |
+
def version(pretty=False, best=False):
|
300 |
+
# type: (bool, bool) -> str
|
301 |
+
"""
|
302 |
+
Return the version of the current OS distribution, as a human-readable
|
303 |
+
string.
|
304 |
+
|
305 |
+
If *pretty* is false, the version is returned without codename (e.g.
|
306 |
+
"7.0").
|
307 |
+
|
308 |
+
If *pretty* is true, the codename in parenthesis is appended, if the
|
309 |
+
codename is non-empty (e.g. "7.0 (Maipo)").
|
310 |
+
|
311 |
+
Some distributions provide version numbers with different precisions in
|
312 |
+
the different sources of distribution information. Examining the different
|
313 |
+
sources in a fixed priority order does not always yield the most precise
|
314 |
+
version (e.g. for Debian 8.2, or CentOS 7.1).
|
315 |
+
|
316 |
+
The *best* parameter can be used to control the approach for the returned
|
317 |
+
version:
|
318 |
+
|
319 |
+
If *best* is false, the first non-empty version number in priority order of
|
320 |
+
the examined sources is returned.
|
321 |
+
|
322 |
+
If *best* is true, the most precise version number out of all examined
|
323 |
+
sources is returned.
|
324 |
+
|
325 |
+
**Lookup hierarchy:**
|
326 |
+
|
327 |
+
In all cases, the version number is obtained from the following sources.
|
328 |
+
If *best* is false, this order represents the priority order:
|
329 |
+
|
330 |
+
* the value of the "VERSION_ID" attribute of the os-release file,
|
331 |
+
* the value of the "Release" attribute returned by the lsb_release
|
332 |
+
command,
|
333 |
+
* the version number parsed from the "<version_id>" field of the first line
|
334 |
+
of the distro release file,
|
335 |
+
* the version number parsed from the "PRETTY_NAME" attribute of the
|
336 |
+
os-release file, if it follows the format of the distro release files.
|
337 |
+
* the version number parsed from the "Description" attribute returned by
|
338 |
+
the lsb_release command, if it follows the format of the distro release
|
339 |
+
files.
|
340 |
+
"""
|
341 |
+
return _distro.version(pretty, best)
|
342 |
+
|
343 |
+
|
344 |
+
def version_parts(best=False):
|
345 |
+
# type: (bool) -> Tuple[str, str, str]
|
346 |
+
"""
|
347 |
+
Return the version of the current OS distribution as a tuple
|
348 |
+
``(major, minor, build_number)`` with items as follows:
|
349 |
+
|
350 |
+
* ``major``: The result of :func:`distro.major_version`.
|
351 |
+
|
352 |
+
* ``minor``: The result of :func:`distro.minor_version`.
|
353 |
+
|
354 |
+
* ``build_number``: The result of :func:`distro.build_number`.
|
355 |
+
|
356 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
357 |
+
method.
|
358 |
+
"""
|
359 |
+
return _distro.version_parts(best)
|
360 |
+
|
361 |
+
|
362 |
+
def major_version(best=False):
|
363 |
+
# type: (bool) -> str
|
364 |
+
"""
|
365 |
+
Return the major version of the current OS distribution, as a string,
|
366 |
+
if provided.
|
367 |
+
Otherwise, the empty string is returned. The major version is the first
|
368 |
+
part of the dot-separated version string.
|
369 |
+
|
370 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
371 |
+
method.
|
372 |
+
"""
|
373 |
+
return _distro.major_version(best)
|
374 |
+
|
375 |
+
|
376 |
+
def minor_version(best=False):
|
377 |
+
# type: (bool) -> str
|
378 |
+
"""
|
379 |
+
Return the minor version of the current OS distribution, as a string,
|
380 |
+
if provided.
|
381 |
+
Otherwise, the empty string is returned. The minor version is the second
|
382 |
+
part of the dot-separated version string.
|
383 |
+
|
384 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
385 |
+
method.
|
386 |
+
"""
|
387 |
+
return _distro.minor_version(best)
|
388 |
+
|
389 |
+
|
390 |
+
def build_number(best=False):
|
391 |
+
# type: (bool) -> str
|
392 |
+
"""
|
393 |
+
Return the build number of the current OS distribution, as a string,
|
394 |
+
if provided.
|
395 |
+
Otherwise, the empty string is returned. The build number is the third part
|
396 |
+
of the dot-separated version string.
|
397 |
+
|
398 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
399 |
+
method.
|
400 |
+
"""
|
401 |
+
return _distro.build_number(best)
|
402 |
+
|
403 |
+
|
404 |
+
def like():
|
405 |
+
# type: () -> str
|
406 |
+
"""
|
407 |
+
Return a space-separated list of distro IDs of distributions that are
|
408 |
+
closely related to the current OS distribution in regards to packaging
|
409 |
+
and programming interfaces, for example distributions the current
|
410 |
+
distribution is a derivative from.
|
411 |
+
|
412 |
+
**Lookup hierarchy:**
|
413 |
+
|
414 |
+
This information item is only provided by the os-release file.
|
415 |
+
For details, see the description of the "ID_LIKE" attribute in the
|
416 |
+
`os-release man page
|
417 |
+
<http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
|
418 |
+
"""
|
419 |
+
return _distro.like()
|
420 |
+
|
421 |
+
|
422 |
+
def codename():
|
423 |
+
# type: () -> str
|
424 |
+
"""
|
425 |
+
Return the codename for the release of the current OS distribution,
|
426 |
+
as a string.
|
427 |
+
|
428 |
+
If the distribution does not have a codename, an empty string is returned.
|
429 |
+
|
430 |
+
Note that the returned codename is not always really a codename. For
|
431 |
+
example, openSUSE returns "x86_64". This function does not handle such
|
432 |
+
cases in any special way and just returns the string it finds, if any.
|
433 |
+
|
434 |
+
**Lookup hierarchy:**
|
435 |
+
|
436 |
+
* the codename within the "VERSION" attribute of the os-release file, if
|
437 |
+
provided,
|
438 |
+
|
439 |
+
* the value of the "Codename" attribute returned by the lsb_release
|
440 |
+
command,
|
441 |
+
|
442 |
+
* the value of the "<codename>" field of the distro release file.
|
443 |
+
"""
|
444 |
+
return _distro.codename()
|
445 |
+
|
446 |
+
|
447 |
+
def info(pretty=False, best=False):
|
448 |
+
# type: (bool, bool) -> InfoDict
|
449 |
+
"""
|
450 |
+
Return certain machine-readable information items about the current OS
|
451 |
+
distribution in a dictionary, as shown in the following example:
|
452 |
+
|
453 |
+
.. sourcecode:: python
|
454 |
+
|
455 |
+
{
|
456 |
+
'id': 'rhel',
|
457 |
+
'version': '7.0',
|
458 |
+
'version_parts': {
|
459 |
+
'major': '7',
|
460 |
+
'minor': '0',
|
461 |
+
'build_number': ''
|
462 |
+
},
|
463 |
+
'like': 'fedora',
|
464 |
+
'codename': 'Maipo'
|
465 |
+
}
|
466 |
+
|
467 |
+
The dictionary structure and keys are always the same, regardless of which
|
468 |
+
information items are available in the underlying data sources. The values
|
469 |
+
for the various keys are as follows:
|
470 |
+
|
471 |
+
* ``id``: The result of :func:`distro.id`.
|
472 |
+
|
473 |
+
* ``version``: The result of :func:`distro.version`.
|
474 |
+
|
475 |
+
* ``version_parts -> major``: The result of :func:`distro.major_version`.
|
476 |
+
|
477 |
+
* ``version_parts -> minor``: The result of :func:`distro.minor_version`.
|
478 |
+
|
479 |
+
* ``version_parts -> build_number``: The result of
|
480 |
+
:func:`distro.build_number`.
|
481 |
+
|
482 |
+
* ``like``: The result of :func:`distro.like`.
|
483 |
+
|
484 |
+
* ``codename``: The result of :func:`distro.codename`.
|
485 |
+
|
486 |
+
For a description of the *pretty* and *best* parameters, see the
|
487 |
+
:func:`distro.version` method.
|
488 |
+
"""
|
489 |
+
return _distro.info(pretty, best)
|
490 |
+
|
491 |
+
|
492 |
+
def os_release_info():
|
493 |
+
# type: () -> Dict[str, str]
|
494 |
+
"""
|
495 |
+
Return a dictionary containing key-value pairs for the information items
|
496 |
+
from the os-release file data source of the current OS distribution.
|
497 |
+
|
498 |
+
See `os-release file`_ for details about these information items.
|
499 |
+
"""
|
500 |
+
return _distro.os_release_info()
|
501 |
+
|
502 |
+
|
503 |
+
def lsb_release_info():
|
504 |
+
# type: () -> Dict[str, str]
|
505 |
+
"""
|
506 |
+
Return a dictionary containing key-value pairs for the information items
|
507 |
+
from the lsb_release command data source of the current OS distribution.
|
508 |
+
|
509 |
+
See `lsb_release command output`_ for details about these information
|
510 |
+
items.
|
511 |
+
"""
|
512 |
+
return _distro.lsb_release_info()
|
513 |
+
|
514 |
+
|
515 |
+
def distro_release_info():
|
516 |
+
# type: () -> Dict[str, str]
|
517 |
+
"""
|
518 |
+
Return a dictionary containing key-value pairs for the information items
|
519 |
+
from the distro release file data source of the current OS distribution.
|
520 |
+
|
521 |
+
See `distro release file`_ for details about these information items.
|
522 |
+
"""
|
523 |
+
return _distro.distro_release_info()
|
524 |
+
|
525 |
+
|
526 |
+
def uname_info():
|
527 |
+
# type: () -> Dict[str, str]
|
528 |
+
"""
|
529 |
+
Return a dictionary containing key-value pairs for the information items
|
530 |
+
from the distro release file data source of the current OS distribution.
|
531 |
+
"""
|
532 |
+
return _distro.uname_info()
|
533 |
+
|
534 |
+
|
535 |
+
def os_release_attr(attribute):
|
536 |
+
# type: (str) -> str
|
537 |
+
"""
|
538 |
+
Return a single named information item from the os-release file data source
|
539 |
+
of the current OS distribution.
|
540 |
+
|
541 |
+
Parameters:
|
542 |
+
|
543 |
+
* ``attribute`` (string): Key of the information item.
|
544 |
+
|
545 |
+
Returns:
|
546 |
+
|
547 |
+
* (string): Value of the information item, if the item exists.
|
548 |
+
The empty string, if the item does not exist.
|
549 |
+
|
550 |
+
See `os-release file`_ for details about these information items.
|
551 |
+
"""
|
552 |
+
return _distro.os_release_attr(attribute)
|
553 |
+
|
554 |
+
|
555 |
+
def lsb_release_attr(attribute):
|
556 |
+
# type: (str) -> str
|
557 |
+
"""
|
558 |
+
Return a single named information item from the lsb_release command output
|
559 |
+
data source of the current OS distribution.
|
560 |
+
|
561 |
+
Parameters:
|
562 |
+
|
563 |
+
* ``attribute`` (string): Key of the information item.
|
564 |
+
|
565 |
+
Returns:
|
566 |
+
|
567 |
+
* (string): Value of the information item, if the item exists.
|
568 |
+
The empty string, if the item does not exist.
|
569 |
+
|
570 |
+
See `lsb_release command output`_ for details about these information
|
571 |
+
items.
|
572 |
+
"""
|
573 |
+
return _distro.lsb_release_attr(attribute)
|
574 |
+
|
575 |
+
|
576 |
+
def distro_release_attr(attribute):
|
577 |
+
# type: (str) -> str
|
578 |
+
"""
|
579 |
+
Return a single named information item from the distro release file
|
580 |
+
data source of the current OS distribution.
|
581 |
+
|
582 |
+
Parameters:
|
583 |
+
|
584 |
+
* ``attribute`` (string): Key of the information item.
|
585 |
+
|
586 |
+
Returns:
|
587 |
+
|
588 |
+
* (string): Value of the information item, if the item exists.
|
589 |
+
The empty string, if the item does not exist.
|
590 |
+
|
591 |
+
See `distro release file`_ for details about these information items.
|
592 |
+
"""
|
593 |
+
return _distro.distro_release_attr(attribute)
|
594 |
+
|
595 |
+
|
596 |
+
def uname_attr(attribute):
|
597 |
+
# type: (str) -> str
|
598 |
+
"""
|
599 |
+
Return a single named information item from the distro release file
|
600 |
+
data source of the current OS distribution.
|
601 |
+
|
602 |
+
Parameters:
|
603 |
+
|
604 |
+
* ``attribute`` (string): Key of the information item.
|
605 |
+
|
606 |
+
Returns:
|
607 |
+
|
608 |
+
* (string): Value of the information item, if the item exists.
|
609 |
+
The empty string, if the item does not exist.
|
610 |
+
"""
|
611 |
+
return _distro.uname_attr(attribute)
|
612 |
+
|
613 |
+
|
614 |
+
try:
|
615 |
+
from functools import cached_property
|
616 |
+
except ImportError:
|
617 |
+
# Python < 3.8
|
618 |
+
class cached_property(object): # type: ignore
|
619 |
+
"""A version of @property which caches the value. On access, it calls the
|
620 |
+
underlying function and sets the value in `__dict__` so future accesses
|
621 |
+
will not re-call the property.
|
622 |
+
"""
|
623 |
+
|
624 |
+
def __init__(self, f):
|
625 |
+
# type: (Callable[[Any], Any]) -> None
|
626 |
+
self._fname = f.__name__
|
627 |
+
self._f = f
|
628 |
+
|
629 |
+
def __get__(self, obj, owner):
|
630 |
+
# type: (Any, Type[Any]) -> Any
|
631 |
+
assert obj is not None, "call {} on an instance".format(self._fname)
|
632 |
+
ret = obj.__dict__[self._fname] = self._f(obj)
|
633 |
+
return ret
|
634 |
+
|
635 |
+
|
636 |
+
class LinuxDistribution(object):
|
637 |
+
"""
|
638 |
+
Provides information about a OS distribution.
|
639 |
+
|
640 |
+
This package creates a private module-global instance of this class with
|
641 |
+
default initialization arguments, that is used by the
|
642 |
+
`consolidated accessor functions`_ and `single source accessor functions`_.
|
643 |
+
By using default initialization arguments, that module-global instance
|
644 |
+
returns data about the current OS distribution (i.e. the distro this
|
645 |
+
package runs on).
|
646 |
+
|
647 |
+
Normally, it is not necessary to create additional instances of this class.
|
648 |
+
However, in situations where control is needed over the exact data sources
|
649 |
+
that are used, instances of this class can be created with a specific
|
650 |
+
distro release file, or a specific os-release file, or without invoking the
|
651 |
+
lsb_release command.
|
652 |
+
"""
|
653 |
+
|
654 |
+
def __init__(
|
655 |
+
self,
|
656 |
+
include_lsb=True,
|
657 |
+
os_release_file="",
|
658 |
+
distro_release_file="",
|
659 |
+
include_uname=True,
|
660 |
+
root_dir=None,
|
661 |
+
):
|
662 |
+
# type: (bool, str, str, bool, Optional[str]) -> None
|
663 |
+
"""
|
664 |
+
The initialization method of this class gathers information from the
|
665 |
+
available data sources, and stores that in private instance attributes.
|
666 |
+
Subsequent access to the information items uses these private instance
|
667 |
+
attributes, so that the data sources are read only once.
|
668 |
+
|
669 |
+
Parameters:
|
670 |
+
|
671 |
+
* ``include_lsb`` (bool): Controls whether the
|
672 |
+
`lsb_release command output`_ is included as a data source.
|
673 |
+
|
674 |
+
If the lsb_release command is not available in the program execution
|
675 |
+
path, the data source for the lsb_release command will be empty.
|
676 |
+
|
677 |
+
* ``os_release_file`` (string): The path name of the
|
678 |
+
`os-release file`_ that is to be used as a data source.
|
679 |
+
|
680 |
+
An empty string (the default) will cause the default path name to
|
681 |
+
be used (see `os-release file`_ for details).
|
682 |
+
|
683 |
+
If the specified or defaulted os-release file does not exist, the
|
684 |
+
data source for the os-release file will be empty.
|
685 |
+
|
686 |
+
* ``distro_release_file`` (string): The path name of the
|
687 |
+
`distro release file`_ that is to be used as a data source.
|
688 |
+
|
689 |
+
An empty string (the default) will cause a default search algorithm
|
690 |
+
to be used (see `distro release file`_ for details).
|
691 |
+
|
692 |
+
If the specified distro release file does not exist, or if no default
|
693 |
+
distro release file can be found, the data source for the distro
|
694 |
+
release file will be empty.
|
695 |
+
|
696 |
+
* ``include_uname`` (bool): Controls whether uname command output is
|
697 |
+
included as a data source. If the uname command is not available in
|
698 |
+
the program execution path the data source for the uname command will
|
699 |
+
be empty.
|
700 |
+
|
701 |
+
* ``root_dir`` (string): The absolute path to the root directory to use
|
702 |
+
to find distro-related information files.
|
703 |
+
|
704 |
+
Public instance attributes:
|
705 |
+
|
706 |
+
* ``os_release_file`` (string): The path name of the
|
707 |
+
`os-release file`_ that is actually used as a data source. The
|
708 |
+
empty string if no distro release file is used as a data source.
|
709 |
+
|
710 |
+
* ``distro_release_file`` (string): The path name of the
|
711 |
+
`distro release file`_ that is actually used as a data source. The
|
712 |
+
empty string if no distro release file is used as a data source.
|
713 |
+
|
714 |
+
* ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
|
715 |
+
This controls whether the lsb information will be loaded.
|
716 |
+
|
717 |
+
* ``include_uname`` (bool): The result of the ``include_uname``
|
718 |
+
parameter. This controls whether the uname information will
|
719 |
+
be loaded.
|
720 |
+
|
721 |
+
Raises:
|
722 |
+
|
723 |
+
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
|
724 |
+
release file.
|
725 |
+
|
726 |
+
* :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
|
727 |
+
some issue (other than not being available in the program execution
|
728 |
+
path).
|
729 |
+
|
730 |
+
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
731 |
+
uses an unexpected encoding.
|
732 |
+
"""
|
733 |
+
self.root_dir = root_dir
|
734 |
+
self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
|
735 |
+
self.usr_lib_dir = (
|
736 |
+
os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
|
737 |
+
)
|
738 |
+
|
739 |
+
if os_release_file:
|
740 |
+
self.os_release_file = os_release_file
|
741 |
+
else:
|
742 |
+
etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
|
743 |
+
usr_lib_os_release_file = os.path.join(
|
744 |
+
self.usr_lib_dir, _OS_RELEASE_BASENAME
|
745 |
+
)
|
746 |
+
|
747 |
+
# NOTE: The idea is to respect order **and** have it set
|
748 |
+
# at all times for API backwards compatibility.
|
749 |
+
if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
|
750 |
+
usr_lib_os_release_file
|
751 |
+
):
|
752 |
+
self.os_release_file = etc_dir_os_release_file
|
753 |
+
else:
|
754 |
+
self.os_release_file = usr_lib_os_release_file
|
755 |
+
|
756 |
+
self.distro_release_file = distro_release_file or "" # updated later
|
757 |
+
self.include_lsb = include_lsb
|
758 |
+
self.include_uname = include_uname
|
759 |
+
|
760 |
+
def __repr__(self):
|
761 |
+
# type: () -> str
|
762 |
+
"""Return repr of all info"""
|
763 |
+
return (
|
764 |
+
"LinuxDistribution("
|
765 |
+
"os_release_file={self.os_release_file!r}, "
|
766 |
+
"distro_release_file={self.distro_release_file!r}, "
|
767 |
+
"include_lsb={self.include_lsb!r}, "
|
768 |
+
"include_uname={self.include_uname!r}, "
|
769 |
+
"_os_release_info={self._os_release_info!r}, "
|
770 |
+
"_lsb_release_info={self._lsb_release_info!r}, "
|
771 |
+
"_distro_release_info={self._distro_release_info!r}, "
|
772 |
+
"_uname_info={self._uname_info!r})".format(self=self)
|
773 |
+
)
|
774 |
+
|
775 |
+
def linux_distribution(self, full_distribution_name=True):
|
776 |
+
# type: (bool) -> Tuple[str, str, str]
|
777 |
+
"""
|
778 |
+
Return information about the OS distribution that is compatible
|
779 |
+
with Python's :func:`platform.linux_distribution`, supporting a subset
|
780 |
+
of its parameters.
|
781 |
+
|
782 |
+
For details, see :func:`distro.linux_distribution`.
|
783 |
+
"""
|
784 |
+
return (
|
785 |
+
self.name() if full_distribution_name else self.id(),
|
786 |
+
self.version(),
|
787 |
+
self.codename(),
|
788 |
+
)
|
789 |
+
|
790 |
+
def id(self):
|
791 |
+
# type: () -> str
|
792 |
+
"""Return the distro ID of the OS distribution, as a string.
|
793 |
+
|
794 |
+
For details, see :func:`distro.id`.
|
795 |
+
"""
|
796 |
+
|
797 |
+
def normalize(distro_id, table):
|
798 |
+
# type: (str, Dict[str, str]) -> str
|
799 |
+
distro_id = distro_id.lower().replace(" ", "_")
|
800 |
+
return table.get(distro_id, distro_id)
|
801 |
+
|
802 |
+
distro_id = self.os_release_attr("id")
|
803 |
+
if distro_id:
|
804 |
+
return normalize(distro_id, NORMALIZED_OS_ID)
|
805 |
+
|
806 |
+
distro_id = self.lsb_release_attr("distributor_id")
|
807 |
+
if distro_id:
|
808 |
+
return normalize(distro_id, NORMALIZED_LSB_ID)
|
809 |
+
|
810 |
+
distro_id = self.distro_release_attr("id")
|
811 |
+
if distro_id:
|
812 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
813 |
+
|
814 |
+
distro_id = self.uname_attr("id")
|
815 |
+
if distro_id:
|
816 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
817 |
+
|
818 |
+
return ""
|
819 |
+
|
820 |
+
def name(self, pretty=False):
|
821 |
+
# type: (bool) -> str
|
822 |
+
"""
|
823 |
+
Return the name of the OS distribution, as a string.
|
824 |
+
|
825 |
+
For details, see :func:`distro.name`.
|
826 |
+
"""
|
827 |
+
name = (
|
828 |
+
self.os_release_attr("name")
|
829 |
+
or self.lsb_release_attr("distributor_id")
|
830 |
+
or self.distro_release_attr("name")
|
831 |
+
or self.uname_attr("name")
|
832 |
+
)
|
833 |
+
if pretty:
|
834 |
+
name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
|
835 |
+
"description"
|
836 |
+
)
|
837 |
+
if not name:
|
838 |
+
name = self.distro_release_attr("name") or self.uname_attr("name")
|
839 |
+
version = self.version(pretty=True)
|
840 |
+
if version:
|
841 |
+
name = name + " " + version
|
842 |
+
return name or ""
|
843 |
+
|
844 |
+
def version(self, pretty=False, best=False):
|
845 |
+
# type: (bool, bool) -> str
|
846 |
+
"""
|
847 |
+
Return the version of the OS distribution, as a string.
|
848 |
+
|
849 |
+
For details, see :func:`distro.version`.
|
850 |
+
"""
|
851 |
+
versions = [
|
852 |
+
self.os_release_attr("version_id"),
|
853 |
+
self.lsb_release_attr("release"),
|
854 |
+
self.distro_release_attr("version_id"),
|
855 |
+
self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
|
856 |
+
"version_id", ""
|
857 |
+
),
|
858 |
+
self._parse_distro_release_content(
|
859 |
+
self.lsb_release_attr("description")
|
860 |
+
).get("version_id", ""),
|
861 |
+
self.uname_attr("release"),
|
862 |
+
]
|
863 |
+
version = ""
|
864 |
+
if best:
|
865 |
+
# This algorithm uses the last version in priority order that has
|
866 |
+
# the best precision. If the versions are not in conflict, that
|
867 |
+
# does not matter; otherwise, using the last one instead of the
|
868 |
+
# first one might be considered a surprise.
|
869 |
+
for v in versions:
|
870 |
+
if v.count(".") > version.count(".") or version == "":
|
871 |
+
version = v
|
872 |
+
else:
|
873 |
+
for v in versions:
|
874 |
+
if v != "":
|
875 |
+
version = v
|
876 |
+
break
|
877 |
+
if pretty and version and self.codename():
|
878 |
+
version = "{0} ({1})".format(version, self.codename())
|
879 |
+
return version
|
880 |
+
|
881 |
+
def version_parts(self, best=False):
|
882 |
+
# type: (bool) -> Tuple[str, str, str]
|
883 |
+
"""
|
884 |
+
Return the version of the OS distribution, as a tuple of version
|
885 |
+
numbers.
|
886 |
+
|
887 |
+
For details, see :func:`distro.version_parts`.
|
888 |
+
"""
|
889 |
+
version_str = self.version(best=best)
|
890 |
+
if version_str:
|
891 |
+
version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
|
892 |
+
matches = version_regex.match(version_str)
|
893 |
+
if matches:
|
894 |
+
major, minor, build_number = matches.groups()
|
895 |
+
return major, minor or "", build_number or ""
|
896 |
+
return "", "", ""
|
897 |
+
|
898 |
+
def major_version(self, best=False):
|
899 |
+
# type: (bool) -> str
|
900 |
+
"""
|
901 |
+
Return the major version number of the current distribution.
|
902 |
+
|
903 |
+
For details, see :func:`distro.major_version`.
|
904 |
+
"""
|
905 |
+
return self.version_parts(best)[0]
|
906 |
+
|
907 |
+
def minor_version(self, best=False):
|
908 |
+
# type: (bool) -> str
|
909 |
+
"""
|
910 |
+
Return the minor version number of the current distribution.
|
911 |
+
|
912 |
+
For details, see :func:`distro.minor_version`.
|
913 |
+
"""
|
914 |
+
return self.version_parts(best)[1]
|
915 |
+
|
916 |
+
def build_number(self, best=False):
|
917 |
+
# type: (bool) -> str
|
918 |
+
"""
|
919 |
+
Return the build number of the current distribution.
|
920 |
+
|
921 |
+
For details, see :func:`distro.build_number`.
|
922 |
+
"""
|
923 |
+
return self.version_parts(best)[2]
|
924 |
+
|
925 |
+
def like(self):
|
926 |
+
# type: () -> str
|
927 |
+
"""
|
928 |
+
Return the IDs of distributions that are like the OS distribution.
|
929 |
+
|
930 |
+
For details, see :func:`distro.like`.
|
931 |
+
"""
|
932 |
+
return self.os_release_attr("id_like") or ""
|
933 |
+
|
934 |
+
def codename(self):
|
935 |
+
# type: () -> str
|
936 |
+
"""
|
937 |
+
Return the codename of the OS distribution.
|
938 |
+
|
939 |
+
For details, see :func:`distro.codename`.
|
940 |
+
"""
|
941 |
+
try:
|
942 |
+
# Handle os_release specially since distros might purposefully set
|
943 |
+
# this to empty string to have no codename
|
944 |
+
return self._os_release_info["codename"]
|
945 |
+
except KeyError:
|
946 |
+
return (
|
947 |
+
self.lsb_release_attr("codename")
|
948 |
+
or self.distro_release_attr("codename")
|
949 |
+
or ""
|
950 |
+
)
|
951 |
+
|
952 |
+
def info(self, pretty=False, best=False):
|
953 |
+
# type: (bool, bool) -> InfoDict
|
954 |
+
"""
|
955 |
+
Return certain machine-readable information about the OS
|
956 |
+
distribution.
|
957 |
+
|
958 |
+
For details, see :func:`distro.info`.
|
959 |
+
"""
|
960 |
+
return dict(
|
961 |
+
id=self.id(),
|
962 |
+
version=self.version(pretty, best),
|
963 |
+
version_parts=dict(
|
964 |
+
major=self.major_version(best),
|
965 |
+
minor=self.minor_version(best),
|
966 |
+
build_number=self.build_number(best),
|
967 |
+
),
|
968 |
+
like=self.like(),
|
969 |
+
codename=self.codename(),
|
970 |
+
)
|
971 |
+
|
972 |
+
def os_release_info(self):
|
973 |
+
# type: () -> Dict[str, str]
|
974 |
+
"""
|
975 |
+
Return a dictionary containing key-value pairs for the information
|
976 |
+
items from the os-release file data source of the OS distribution.
|
977 |
+
|
978 |
+
For details, see :func:`distro.os_release_info`.
|
979 |
+
"""
|
980 |
+
return self._os_release_info
|
981 |
+
|
982 |
+
def lsb_release_info(self):
|
983 |
+
# type: () -> Dict[str, str]
|
984 |
+
"""
|
985 |
+
Return a dictionary containing key-value pairs for the information
|
986 |
+
items from the lsb_release command data source of the OS
|
987 |
+
distribution.
|
988 |
+
|
989 |
+
For details, see :func:`distro.lsb_release_info`.
|
990 |
+
"""
|
991 |
+
return self._lsb_release_info
|
992 |
+
|
993 |
+
def distro_release_info(self):
|
994 |
+
# type: () -> Dict[str, str]
|
995 |
+
"""
|
996 |
+
Return a dictionary containing key-value pairs for the information
|
997 |
+
items from the distro release file data source of the OS
|
998 |
+
distribution.
|
999 |
+
|
1000 |
+
For details, see :func:`distro.distro_release_info`.
|
1001 |
+
"""
|
1002 |
+
return self._distro_release_info
|
1003 |
+
|
1004 |
+
def uname_info(self):
|
1005 |
+
# type: () -> Dict[str, str]
|
1006 |
+
"""
|
1007 |
+
Return a dictionary containing key-value pairs for the information
|
1008 |
+
items from the uname command data source of the OS distribution.
|
1009 |
+
|
1010 |
+
For details, see :func:`distro.uname_info`.
|
1011 |
+
"""
|
1012 |
+
return self._uname_info
|
1013 |
+
|
1014 |
+
def os_release_attr(self, attribute):
|
1015 |
+
# type: (str) -> str
|
1016 |
+
"""
|
1017 |
+
Return a single named information item from the os-release file data
|
1018 |
+
source of the OS distribution.
|
1019 |
+
|
1020 |
+
For details, see :func:`distro.os_release_attr`.
|
1021 |
+
"""
|
1022 |
+
return self._os_release_info.get(attribute, "")
|
1023 |
+
|
1024 |
+
def lsb_release_attr(self, attribute):
|
1025 |
+
# type: (str) -> str
|
1026 |
+
"""
|
1027 |
+
Return a single named information item from the lsb_release command
|
1028 |
+
output data source of the OS distribution.
|
1029 |
+
|
1030 |
+
For details, see :func:`distro.lsb_release_attr`.
|
1031 |
+
"""
|
1032 |
+
return self._lsb_release_info.get(attribute, "")
|
1033 |
+
|
1034 |
+
def distro_release_attr(self, attribute):
|
1035 |
+
# type: (str) -> str
|
1036 |
+
"""
|
1037 |
+
Return a single named information item from the distro release file
|
1038 |
+
data source of the OS distribution.
|
1039 |
+
|
1040 |
+
For details, see :func:`distro.distro_release_attr`.
|
1041 |
+
"""
|
1042 |
+
return self._distro_release_info.get(attribute, "")
|
1043 |
+
|
1044 |
+
def uname_attr(self, attribute):
|
1045 |
+
# type: (str) -> str
|
1046 |
+
"""
|
1047 |
+
Return a single named information item from the uname command
|
1048 |
+
output data source of the OS distribution.
|
1049 |
+
|
1050 |
+
For details, see :func:`distro.uname_attr`.
|
1051 |
+
"""
|
1052 |
+
return self._uname_info.get(attribute, "")
|
1053 |
+
|
1054 |
+
@cached_property
|
1055 |
+
def _os_release_info(self):
|
1056 |
+
# type: () -> Dict[str, str]
|
1057 |
+
"""
|
1058 |
+
Get the information items from the specified os-release file.
|
1059 |
+
|
1060 |
+
Returns:
|
1061 |
+
A dictionary containing all information items.
|
1062 |
+
"""
|
1063 |
+
if os.path.isfile(self.os_release_file):
|
1064 |
+
with open(self.os_release_file) as release_file:
|
1065 |
+
return self._parse_os_release_content(release_file)
|
1066 |
+
return {}
|
1067 |
+
|
1068 |
+
@staticmethod
|
1069 |
+
def _parse_os_release_content(lines):
|
1070 |
+
# type: (TextIO) -> Dict[str, str]
|
1071 |
+
"""
|
1072 |
+
Parse the lines of an os-release file.
|
1073 |
+
|
1074 |
+
Parameters:
|
1075 |
+
|
1076 |
+
* lines: Iterable through the lines in the os-release file.
|
1077 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
1078 |
+
string.
|
1079 |
+
|
1080 |
+
Returns:
|
1081 |
+
A dictionary containing all information items.
|
1082 |
+
"""
|
1083 |
+
props = {}
|
1084 |
+
lexer = shlex.shlex(lines, posix=True)
|
1085 |
+
lexer.whitespace_split = True
|
1086 |
+
|
1087 |
+
# The shlex module defines its `wordchars` variable using literals,
|
1088 |
+
# making it dependent on the encoding of the Python source file.
|
1089 |
+
# In Python 2.6 and 2.7, the shlex source file is encoded in
|
1090 |
+
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
|
1091 |
+
# string. This causes a UnicodeDecodeError to be raised when the
|
1092 |
+
# parsed content is a unicode object. The following fix resolves that
|
1093 |
+
# (... but it should be fixed in shlex...):
|
1094 |
+
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
|
1095 |
+
lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
|
1096 |
+
|
1097 |
+
tokens = list(lexer)
|
1098 |
+
for token in tokens:
|
1099 |
+
# At this point, all shell-like parsing has been done (i.e.
|
1100 |
+
# comments processed, quotes and backslash escape sequences
|
1101 |
+
# processed, multi-line values assembled, trailing newlines
|
1102 |
+
# stripped, etc.), so the tokens are now either:
|
1103 |
+
# * variable assignments: var=value
|
1104 |
+
# * commands or their arguments (not allowed in os-release)
|
1105 |
+
if "=" in token:
|
1106 |
+
k, v = token.split("=", 1)
|
1107 |
+
props[k.lower()] = v
|
1108 |
+
else:
|
1109 |
+
# Ignore any tokens that are not variable assignments
|
1110 |
+
pass
|
1111 |
+
|
1112 |
+
if "version_codename" in props:
|
1113 |
+
# os-release added a version_codename field. Use that in
|
1114 |
+
# preference to anything else Note that some distros purposefully
|
1115 |
+
# do not have code names. They should be setting
|
1116 |
+
# version_codename=""
|
1117 |
+
props["codename"] = props["version_codename"]
|
1118 |
+
elif "ubuntu_codename" in props:
|
1119 |
+
# Same as above but a non-standard field name used on older Ubuntus
|
1120 |
+
props["codename"] = props["ubuntu_codename"]
|
1121 |
+
elif "version" in props:
|
1122 |
+
# If there is no version_codename, parse it from the version
|
1123 |
+
match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
|
1124 |
+
if match:
|
1125 |
+
codename = match.group()
|
1126 |
+
codename = codename.strip("()")
|
1127 |
+
codename = codename.strip(",")
|
1128 |
+
codename = codename.strip()
|
1129 |
+
# codename appears within paranthese.
|
1130 |
+
props["codename"] = codename
|
1131 |
+
|
1132 |
+
return props
|
1133 |
+
|
1134 |
+
@cached_property
|
1135 |
+
def _lsb_release_info(self):
|
1136 |
+
# type: () -> Dict[str, str]
|
1137 |
+
"""
|
1138 |
+
Get the information items from the lsb_release command output.
|
1139 |
+
|
1140 |
+
Returns:
|
1141 |
+
A dictionary containing all information items.
|
1142 |
+
"""
|
1143 |
+
if not self.include_lsb:
|
1144 |
+
return {}
|
1145 |
+
with open(os.devnull, "wb") as devnull:
|
1146 |
+
try:
|
1147 |
+
cmd = ("lsb_release", "-a")
|
1148 |
+
stdout = subprocess.check_output(cmd, stderr=devnull)
|
1149 |
+
# Command not found or lsb_release returned error
|
1150 |
+
except (OSError, subprocess.CalledProcessError):
|
1151 |
+
return {}
|
1152 |
+
content = self._to_str(stdout).splitlines()
|
1153 |
+
return self._parse_lsb_release_content(content)
|
1154 |
+
|
1155 |
+
@staticmethod
|
1156 |
+
def _parse_lsb_release_content(lines):
|
1157 |
+
# type: (Iterable[str]) -> Dict[str, str]
|
1158 |
+
"""
|
1159 |
+
Parse the output of the lsb_release command.
|
1160 |
+
|
1161 |
+
Parameters:
|
1162 |
+
|
1163 |
+
* lines: Iterable through the lines of the lsb_release output.
|
1164 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
1165 |
+
string.
|
1166 |
+
|
1167 |
+
Returns:
|
1168 |
+
A dictionary containing all information items.
|
1169 |
+
"""
|
1170 |
+
props = {}
|
1171 |
+
for line in lines:
|
1172 |
+
kv = line.strip("\n").split(":", 1)
|
1173 |
+
if len(kv) != 2:
|
1174 |
+
# Ignore lines without colon.
|
1175 |
+
continue
|
1176 |
+
k, v = kv
|
1177 |
+
props.update({k.replace(" ", "_").lower(): v.strip()})
|
1178 |
+
return props
|
1179 |
+
|
1180 |
+
@cached_property
|
1181 |
+
def _uname_info(self):
|
1182 |
+
# type: () -> Dict[str, str]
|
1183 |
+
with open(os.devnull, "wb") as devnull:
|
1184 |
+
try:
|
1185 |
+
cmd = ("uname", "-rs")
|
1186 |
+
stdout = subprocess.check_output(cmd, stderr=devnull)
|
1187 |
+
except OSError:
|
1188 |
+
return {}
|
1189 |
+
content = self._to_str(stdout).splitlines()
|
1190 |
+
return self._parse_uname_content(content)
|
1191 |
+
|
1192 |
+
@staticmethod
|
1193 |
+
def _parse_uname_content(lines):
|
1194 |
+
# type: (Sequence[str]) -> Dict[str, str]
|
1195 |
+
props = {}
|
1196 |
+
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
1197 |
+
if match:
|
1198 |
+
name, version = match.groups()
|
1199 |
+
|
1200 |
+
# This is to prevent the Linux kernel version from
|
1201 |
+
# appearing as the 'best' version on otherwise
|
1202 |
+
# identifiable distributions.
|
1203 |
+
if name == "Linux":
|
1204 |
+
return {}
|
1205 |
+
props["id"] = name.lower()
|
1206 |
+
props["name"] = name
|
1207 |
+
props["release"] = version
|
1208 |
+
return props
|
1209 |
+
|
1210 |
+
@staticmethod
|
1211 |
+
def _to_str(text):
|
1212 |
+
# type: (Union[bytes, str]) -> str
|
1213 |
+
encoding = sys.getfilesystemencoding()
|
1214 |
+
encoding = "utf-8" if encoding == "ascii" else encoding
|
1215 |
+
|
1216 |
+
if sys.version_info[0] >= 3:
|
1217 |
+
if isinstance(text, bytes):
|
1218 |
+
return text.decode(encoding)
|
1219 |
+
else:
|
1220 |
+
if isinstance(text, unicode): # noqa
|
1221 |
+
return text.encode(encoding)
|
1222 |
+
|
1223 |
+
return text
|
1224 |
+
|
1225 |
+
@cached_property
|
1226 |
+
def _distro_release_info(self):
|
1227 |
+
# type: () -> Dict[str, str]
|
1228 |
+
"""
|
1229 |
+
Get the information items from the specified distro release file.
|
1230 |
+
|
1231 |
+
Returns:
|
1232 |
+
A dictionary containing all information items.
|
1233 |
+
"""
|
1234 |
+
if self.distro_release_file:
|
1235 |
+
# If it was specified, we use it and parse what we can, even if
|
1236 |
+
# its file name or content does not match the expected pattern.
|
1237 |
+
distro_info = self._parse_distro_release_file(self.distro_release_file)
|
1238 |
+
basename = os.path.basename(self.distro_release_file)
|
1239 |
+
# The file name pattern for user-specified distro release files
|
1240 |
+
# is somewhat more tolerant (compared to when searching for the
|
1241 |
+
# file), because we want to use what was specified as best as
|
1242 |
+
# possible.
|
1243 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
1244 |
+
if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
|
1245 |
+
distro_info["id"] = "cloudlinux"
|
1246 |
+
elif match:
|
1247 |
+
distro_info["id"] = match.group(1)
|
1248 |
+
return distro_info
|
1249 |
+
else:
|
1250 |
+
try:
|
1251 |
+
basenames = os.listdir(self.etc_dir)
|
1252 |
+
# We sort for repeatability in cases where there are multiple
|
1253 |
+
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
1254 |
+
# containing `redhat-release` on top of their own.
|
1255 |
+
basenames.sort()
|
1256 |
+
except OSError:
|
1257 |
+
# This may occur when /etc is not readable but we can't be
|
1258 |
+
# sure about the *-release files. Check common entries of
|
1259 |
+
# /etc for information. If they turn out to not be there the
|
1260 |
+
# error is handled in `_parse_distro_release_file()`.
|
1261 |
+
basenames = [
|
1262 |
+
"SuSE-release",
|
1263 |
+
"arch-release",
|
1264 |
+
"base-release",
|
1265 |
+
"centos-release",
|
1266 |
+
"fedora-release",
|
1267 |
+
"gentoo-release",
|
1268 |
+
"mageia-release",
|
1269 |
+
"mandrake-release",
|
1270 |
+
"mandriva-release",
|
1271 |
+
"mandrivalinux-release",
|
1272 |
+
"manjaro-release",
|
1273 |
+
"oracle-release",
|
1274 |
+
"redhat-release",
|
1275 |
+
"sl-release",
|
1276 |
+
"slackware-version",
|
1277 |
+
]
|
1278 |
+
for basename in basenames:
|
1279 |
+
if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
|
1280 |
+
continue
|
1281 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
1282 |
+
if match:
|
1283 |
+
filepath = os.path.join(self.etc_dir, basename)
|
1284 |
+
distro_info = self._parse_distro_release_file(filepath)
|
1285 |
+
if "name" in distro_info:
|
1286 |
+
# The name is always present if the pattern matches
|
1287 |
+
self.distro_release_file = filepath
|
1288 |
+
distro_info["id"] = match.group(1)
|
1289 |
+
if "cloudlinux" in distro_info["name"].lower():
|
1290 |
+
distro_info["id"] = "cloudlinux"
|
1291 |
+
return distro_info
|
1292 |
+
return {}
|
1293 |
+
|
1294 |
+
def _parse_distro_release_file(self, filepath):
|
1295 |
+
# type: (str) -> Dict[str, str]
|
1296 |
+
"""
|
1297 |
+
Parse a distro release file.
|
1298 |
+
|
1299 |
+
Parameters:
|
1300 |
+
|
1301 |
+
* filepath: Path name of the distro release file.
|
1302 |
+
|
1303 |
+
Returns:
|
1304 |
+
A dictionary containing all information items.
|
1305 |
+
"""
|
1306 |
+
try:
|
1307 |
+
with open(filepath) as fp:
|
1308 |
+
# Only parse the first line. For instance, on SLES there
|
1309 |
+
# are multiple lines. We don't want them...
|
1310 |
+
return self._parse_distro_release_content(fp.readline())
|
1311 |
+
except (OSError, IOError):
|
1312 |
+
# Ignore not being able to read a specific, seemingly version
|
1313 |
+
# related file.
|
1314 |
+
# See https://github.com/python-distro/distro/issues/162
|
1315 |
+
return {}
|
1316 |
+
|
1317 |
+
@staticmethod
|
1318 |
+
def _parse_distro_release_content(line):
|
1319 |
+
# type: (str) -> Dict[str, str]
|
1320 |
+
"""
|
1321 |
+
Parse a line from a distro release file.
|
1322 |
+
|
1323 |
+
Parameters:
|
1324 |
+
* line: Line from the distro release file. Must be a unicode string
|
1325 |
+
or a UTF-8 encoded byte string.
|
1326 |
+
|
1327 |
+
Returns:
|
1328 |
+
A dictionary containing all information items.
|
1329 |
+
"""
|
1330 |
+
matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
|
1331 |
+
distro_info = {}
|
1332 |
+
if matches:
|
1333 |
+
# regexp ensures non-None
|
1334 |
+
distro_info["name"] = matches.group(3)[::-1]
|
1335 |
+
if matches.group(2):
|
1336 |
+
distro_info["version_id"] = matches.group(2)[::-1]
|
1337 |
+
if matches.group(1):
|
1338 |
+
distro_info["codename"] = matches.group(1)[::-1]
|
1339 |
+
elif line:
|
1340 |
+
distro_info["name"] = line.strip()
|
1341 |
+
return distro_info
|
1342 |
+
|
1343 |
+
|
1344 |
+
_distro = LinuxDistribution()
|
1345 |
+
|
1346 |
+
|
1347 |
+
def main():
|
1348 |
+
# type: () -> None
|
1349 |
+
logger = logging.getLogger(__name__)
|
1350 |
+
logger.setLevel(logging.DEBUG)
|
1351 |
+
logger.addHandler(logging.StreamHandler(sys.stdout))
|
1352 |
+
|
1353 |
+
parser = argparse.ArgumentParser(description="OS distro info tool")
|
1354 |
+
parser.add_argument(
|
1355 |
+
"--json", "-j", help="Output in machine readable format", action="store_true"
|
1356 |
+
)
|
1357 |
+
|
1358 |
+
parser.add_argument(
|
1359 |
+
"--root-dir",
|
1360 |
+
"-r",
|
1361 |
+
type=str,
|
1362 |
+
dest="root_dir",
|
1363 |
+
help="Path to the root filesystem directory (defaults to /)",
|
1364 |
+
)
|
1365 |
+
|
1366 |
+
args = parser.parse_args()
|
1367 |
+
|
1368 |
+
if args.root_dir:
|
1369 |
+
dist = LinuxDistribution(
|
1370 |
+
include_lsb=False, include_uname=False, root_dir=args.root_dir
|
1371 |
+
)
|
1372 |
+
else:
|
1373 |
+
dist = _distro
|
1374 |
+
|
1375 |
+
if args.json:
|
1376 |
+
logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
|
1377 |
+
else:
|
1378 |
+
logger.info("Name: %s", dist.name(pretty=True))
|
1379 |
+
distribution_version = dist.version(pretty=True)
|
1380 |
+
logger.info("Version: %s", distribution_version)
|
1381 |
+
distribution_codename = dist.codename()
|
1382 |
+
logger.info("Codename: %s", distribution_codename)
|
1383 |
+
|
1384 |
+
|
1385 |
+
if __name__ == "__main__":
|
1386 |
+
main()
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__init__.py
ADDED
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding: utf-8
|
2 |
+
from ._version import version
|
3 |
+
from .exceptions import *
|
4 |
+
from .ext import ExtType, Timestamp
|
5 |
+
|
6 |
+
import os
|
7 |
+
import sys
|
8 |
+
|
9 |
+
|
10 |
+
if os.environ.get("MSGPACK_PUREPYTHON") or sys.version_info[0] == 2:
|
11 |
+
from .fallback import Packer, unpackb, Unpacker
|
12 |
+
else:
|
13 |
+
try:
|
14 |
+
from ._cmsgpack import Packer, unpackb, Unpacker
|
15 |
+
except ImportError:
|
16 |
+
from .fallback import Packer, unpackb, Unpacker
|
17 |
+
|
18 |
+
|
19 |
+
def pack(o, stream, **kwargs):
|
20 |
+
"""
|
21 |
+
Pack object `o` and write it to `stream`
|
22 |
+
|
23 |
+
See :class:`Packer` for options.
|
24 |
+
"""
|
25 |
+
packer = Packer(**kwargs)
|
26 |
+
stream.write(packer.pack(o))
|
27 |
+
|
28 |
+
|
29 |
+
def packb(o, **kwargs):
|
30 |
+
"""
|
31 |
+
Pack object `o` and return packed bytes
|
32 |
+
|
33 |
+
See :class:`Packer` for options.
|
34 |
+
"""
|
35 |
+
return Packer(**kwargs).pack(o)
|
36 |
+
|
37 |
+
|
38 |
+
def unpack(stream, **kwargs):
|
39 |
+
"""
|
40 |
+
Unpack an object from `stream`.
|
41 |
+
|
42 |
+
Raises `ExtraData` when `stream` contains extra bytes.
|
43 |
+
See :class:`Unpacker` for options.
|
44 |
+
"""
|
45 |
+
data = stream.read()
|
46 |
+
return unpackb(data, **kwargs)
|
47 |
+
|
48 |
+
|
49 |
+
# alias for compatibility to simplejson/marshal/pickle.
|
50 |
+
load = unpack
|
51 |
+
loads = unpackb
|
52 |
+
|
53 |
+
dump = pack
|
54 |
+
dumps = packb
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.43 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/_version.cpython-310.pyc
ADDED
Binary file (219 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (1.81 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/ext.cpython-310.pyc
ADDED
Binary file (6.32 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/__pycache__/fallback.cpython-310.pyc
ADDED
Binary file (25.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/_version.py
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
version = (1, 0, 3)
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/exceptions.py
ADDED
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class UnpackException(Exception):
|
2 |
+
"""Base class for some exceptions raised while unpacking.
|
3 |
+
|
4 |
+
NOTE: unpack may raise exception other than subclass of
|
5 |
+
UnpackException. If you want to catch all error, catch
|
6 |
+
Exception instead.
|
7 |
+
"""
|
8 |
+
|
9 |
+
|
10 |
+
class BufferFull(UnpackException):
|
11 |
+
pass
|
12 |
+
|
13 |
+
|
14 |
+
class OutOfData(UnpackException):
|
15 |
+
pass
|
16 |
+
|
17 |
+
|
18 |
+
class FormatError(ValueError, UnpackException):
|
19 |
+
"""Invalid msgpack format"""
|
20 |
+
|
21 |
+
|
22 |
+
class StackError(ValueError, UnpackException):
|
23 |
+
"""Too nested"""
|
24 |
+
|
25 |
+
|
26 |
+
# Deprecated. Use ValueError instead
|
27 |
+
UnpackValueError = ValueError
|
28 |
+
|
29 |
+
|
30 |
+
class ExtraData(UnpackValueError):
|
31 |
+
"""ExtraData is raised when there is trailing data.
|
32 |
+
|
33 |
+
This exception is raised while only one-shot (not streaming)
|
34 |
+
unpack.
|
35 |
+
"""
|
36 |
+
|
37 |
+
def __init__(self, unpacked, extra):
|
38 |
+
self.unpacked = unpacked
|
39 |
+
self.extra = extra
|
40 |
+
|
41 |
+
def __str__(self):
|
42 |
+
return "unpack(b) received extra data."
|
43 |
+
|
44 |
+
|
45 |
+
# Deprecated. Use Exception instead to catch all exception during packing.
|
46 |
+
PackException = Exception
|
47 |
+
PackValueError = ValueError
|
48 |
+
PackOverflowError = OverflowError
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/ext.py
ADDED
@@ -0,0 +1,193 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# coding: utf-8
|
2 |
+
from collections import namedtuple
|
3 |
+
import datetime
|
4 |
+
import sys
|
5 |
+
import struct
|
6 |
+
|
7 |
+
|
8 |
+
PY2 = sys.version_info[0] == 2
|
9 |
+
|
10 |
+
if PY2:
|
11 |
+
int_types = (int, long)
|
12 |
+
_utc = None
|
13 |
+
else:
|
14 |
+
int_types = int
|
15 |
+
try:
|
16 |
+
_utc = datetime.timezone.utc
|
17 |
+
except AttributeError:
|
18 |
+
_utc = datetime.timezone(datetime.timedelta(0))
|
19 |
+
|
20 |
+
|
21 |
+
class ExtType(namedtuple("ExtType", "code data")):
|
22 |
+
"""ExtType represents ext type in msgpack."""
|
23 |
+
|
24 |
+
def __new__(cls, code, data):
|
25 |
+
if not isinstance(code, int):
|
26 |
+
raise TypeError("code must be int")
|
27 |
+
if not isinstance(data, bytes):
|
28 |
+
raise TypeError("data must be bytes")
|
29 |
+
if not 0 <= code <= 127:
|
30 |
+
raise ValueError("code must be 0~127")
|
31 |
+
return super(ExtType, cls).__new__(cls, code, data)
|
32 |
+
|
33 |
+
|
34 |
+
class Timestamp(object):
|
35 |
+
"""Timestamp represents the Timestamp extension type in msgpack.
|
36 |
+
|
37 |
+
When built with Cython, msgpack uses C methods to pack and unpack `Timestamp`. When using pure-Python
|
38 |
+
msgpack, :func:`to_bytes` and :func:`from_bytes` are used to pack and unpack `Timestamp`.
|
39 |
+
|
40 |
+
This class is immutable: Do not override seconds and nanoseconds.
|
41 |
+
"""
|
42 |
+
|
43 |
+
__slots__ = ["seconds", "nanoseconds"]
|
44 |
+
|
45 |
+
def __init__(self, seconds, nanoseconds=0):
|
46 |
+
"""Initialize a Timestamp object.
|
47 |
+
|
48 |
+
:param int seconds:
|
49 |
+
Number of seconds since the UNIX epoch (00:00:00 UTC Jan 1 1970, minus leap seconds).
|
50 |
+
May be negative.
|
51 |
+
|
52 |
+
:param int nanoseconds:
|
53 |
+
Number of nanoseconds to add to `seconds` to get fractional time.
|
54 |
+
Maximum is 999_999_999. Default is 0.
|
55 |
+
|
56 |
+
Note: Negative times (before the UNIX epoch) are represented as negative seconds + positive ns.
|
57 |
+
"""
|
58 |
+
if not isinstance(seconds, int_types):
|
59 |
+
raise TypeError("seconds must be an interger")
|
60 |
+
if not isinstance(nanoseconds, int_types):
|
61 |
+
raise TypeError("nanoseconds must be an integer")
|
62 |
+
if not (0 <= nanoseconds < 10 ** 9):
|
63 |
+
raise ValueError(
|
64 |
+
"nanoseconds must be a non-negative integer less than 999999999."
|
65 |
+
)
|
66 |
+
self.seconds = seconds
|
67 |
+
self.nanoseconds = nanoseconds
|
68 |
+
|
69 |
+
def __repr__(self):
|
70 |
+
"""String representation of Timestamp."""
|
71 |
+
return "Timestamp(seconds={0}, nanoseconds={1})".format(
|
72 |
+
self.seconds, self.nanoseconds
|
73 |
+
)
|
74 |
+
|
75 |
+
def __eq__(self, other):
|
76 |
+
"""Check for equality with another Timestamp object"""
|
77 |
+
if type(other) is self.__class__:
|
78 |
+
return (
|
79 |
+
self.seconds == other.seconds and self.nanoseconds == other.nanoseconds
|
80 |
+
)
|
81 |
+
return False
|
82 |
+
|
83 |
+
def __ne__(self, other):
|
84 |
+
"""not-equals method (see :func:`__eq__()`)"""
|
85 |
+
return not self.__eq__(other)
|
86 |
+
|
87 |
+
def __hash__(self):
|
88 |
+
return hash((self.seconds, self.nanoseconds))
|
89 |
+
|
90 |
+
@staticmethod
|
91 |
+
def from_bytes(b):
|
92 |
+
"""Unpack bytes into a `Timestamp` object.
|
93 |
+
|
94 |
+
Used for pure-Python msgpack unpacking.
|
95 |
+
|
96 |
+
:param b: Payload from msgpack ext message with code -1
|
97 |
+
:type b: bytes
|
98 |
+
|
99 |
+
:returns: Timestamp object unpacked from msgpack ext payload
|
100 |
+
:rtype: Timestamp
|
101 |
+
"""
|
102 |
+
if len(b) == 4:
|
103 |
+
seconds = struct.unpack("!L", b)[0]
|
104 |
+
nanoseconds = 0
|
105 |
+
elif len(b) == 8:
|
106 |
+
data64 = struct.unpack("!Q", b)[0]
|
107 |
+
seconds = data64 & 0x00000003FFFFFFFF
|
108 |
+
nanoseconds = data64 >> 34
|
109 |
+
elif len(b) == 12:
|
110 |
+
nanoseconds, seconds = struct.unpack("!Iq", b)
|
111 |
+
else:
|
112 |
+
raise ValueError(
|
113 |
+
"Timestamp type can only be created from 32, 64, or 96-bit byte objects"
|
114 |
+
)
|
115 |
+
return Timestamp(seconds, nanoseconds)
|
116 |
+
|
117 |
+
def to_bytes(self):
|
118 |
+
"""Pack this Timestamp object into bytes.
|
119 |
+
|
120 |
+
Used for pure-Python msgpack packing.
|
121 |
+
|
122 |
+
:returns data: Payload for EXT message with code -1 (timestamp type)
|
123 |
+
:rtype: bytes
|
124 |
+
"""
|
125 |
+
if (self.seconds >> 34) == 0: # seconds is non-negative and fits in 34 bits
|
126 |
+
data64 = self.nanoseconds << 34 | self.seconds
|
127 |
+
if data64 & 0xFFFFFFFF00000000 == 0:
|
128 |
+
# nanoseconds is zero and seconds < 2**32, so timestamp 32
|
129 |
+
data = struct.pack("!L", data64)
|
130 |
+
else:
|
131 |
+
# timestamp 64
|
132 |
+
data = struct.pack("!Q", data64)
|
133 |
+
else:
|
134 |
+
# timestamp 96
|
135 |
+
data = struct.pack("!Iq", self.nanoseconds, self.seconds)
|
136 |
+
return data
|
137 |
+
|
138 |
+
@staticmethod
|
139 |
+
def from_unix(unix_sec):
|
140 |
+
"""Create a Timestamp from posix timestamp in seconds.
|
141 |
+
|
142 |
+
:param unix_float: Posix timestamp in seconds.
|
143 |
+
:type unix_float: int or float.
|
144 |
+
"""
|
145 |
+
seconds = int(unix_sec // 1)
|
146 |
+
nanoseconds = int((unix_sec % 1) * 10 ** 9)
|
147 |
+
return Timestamp(seconds, nanoseconds)
|
148 |
+
|
149 |
+
def to_unix(self):
|
150 |
+
"""Get the timestamp as a floating-point value.
|
151 |
+
|
152 |
+
:returns: posix timestamp
|
153 |
+
:rtype: float
|
154 |
+
"""
|
155 |
+
return self.seconds + self.nanoseconds / 1e9
|
156 |
+
|
157 |
+
@staticmethod
|
158 |
+
def from_unix_nano(unix_ns):
|
159 |
+
"""Create a Timestamp from posix timestamp in nanoseconds.
|
160 |
+
|
161 |
+
:param int unix_ns: Posix timestamp in nanoseconds.
|
162 |
+
:rtype: Timestamp
|
163 |
+
"""
|
164 |
+
return Timestamp(*divmod(unix_ns, 10 ** 9))
|
165 |
+
|
166 |
+
def to_unix_nano(self):
|
167 |
+
"""Get the timestamp as a unixtime in nanoseconds.
|
168 |
+
|
169 |
+
:returns: posix timestamp in nanoseconds
|
170 |
+
:rtype: int
|
171 |
+
"""
|
172 |
+
return self.seconds * 10 ** 9 + self.nanoseconds
|
173 |
+
|
174 |
+
def to_datetime(self):
|
175 |
+
"""Get the timestamp as a UTC datetime.
|
176 |
+
|
177 |
+
Python 2 is not supported.
|
178 |
+
|
179 |
+
:rtype: datetime.
|
180 |
+
"""
|
181 |
+
return datetime.datetime.fromtimestamp(0, _utc) + datetime.timedelta(
|
182 |
+
seconds=self.to_unix()
|
183 |
+
)
|
184 |
+
|
185 |
+
@staticmethod
|
186 |
+
def from_datetime(dt):
|
187 |
+
"""Create a Timestamp from datetime with tzinfo.
|
188 |
+
|
189 |
+
Python 2 is not supported.
|
190 |
+
|
191 |
+
:rtype: Timestamp
|
192 |
+
"""
|
193 |
+
return Timestamp.from_unix(dt.timestamp())
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/msgpack/fallback.py
ADDED
@@ -0,0 +1,1012 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Fallback pure Python implementation of msgpack"""
|
2 |
+
from datetime import datetime as _DateTime
|
3 |
+
import sys
|
4 |
+
import struct
|
5 |
+
|
6 |
+
|
7 |
+
PY2 = sys.version_info[0] == 2
|
8 |
+
if PY2:
|
9 |
+
int_types = (int, long)
|
10 |
+
|
11 |
+
def dict_iteritems(d):
|
12 |
+
return d.iteritems()
|
13 |
+
|
14 |
+
|
15 |
+
else:
|
16 |
+
int_types = int
|
17 |
+
unicode = str
|
18 |
+
xrange = range
|
19 |
+
|
20 |
+
def dict_iteritems(d):
|
21 |
+
return d.items()
|
22 |
+
|
23 |
+
|
24 |
+
if sys.version_info < (3, 5):
|
25 |
+
# Ugly hack...
|
26 |
+
RecursionError = RuntimeError
|
27 |
+
|
28 |
+
def _is_recursionerror(e):
|
29 |
+
return (
|
30 |
+
len(e.args) == 1
|
31 |
+
and isinstance(e.args[0], str)
|
32 |
+
and e.args[0].startswith("maximum recursion depth exceeded")
|
33 |
+
)
|
34 |
+
|
35 |
+
|
36 |
+
else:
|
37 |
+
|
38 |
+
def _is_recursionerror(e):
|
39 |
+
return True
|
40 |
+
|
41 |
+
|
42 |
+
if hasattr(sys, "pypy_version_info"):
|
43 |
+
# StringIO is slow on PyPy, StringIO is faster. However: PyPy's own
|
44 |
+
# StringBuilder is fastest.
|
45 |
+
from __pypy__ import newlist_hint
|
46 |
+
|
47 |
+
try:
|
48 |
+
from __pypy__.builders import BytesBuilder as StringBuilder
|
49 |
+
except ImportError:
|
50 |
+
from __pypy__.builders import StringBuilder
|
51 |
+
USING_STRINGBUILDER = True
|
52 |
+
|
53 |
+
class StringIO(object):
|
54 |
+
def __init__(self, s=b""):
|
55 |
+
if s:
|
56 |
+
self.builder = StringBuilder(len(s))
|
57 |
+
self.builder.append(s)
|
58 |
+
else:
|
59 |
+
self.builder = StringBuilder()
|
60 |
+
|
61 |
+
def write(self, s):
|
62 |
+
if isinstance(s, memoryview):
|
63 |
+
s = s.tobytes()
|
64 |
+
elif isinstance(s, bytearray):
|
65 |
+
s = bytes(s)
|
66 |
+
self.builder.append(s)
|
67 |
+
|
68 |
+
def getvalue(self):
|
69 |
+
return self.builder.build()
|
70 |
+
|
71 |
+
|
72 |
+
else:
|
73 |
+
USING_STRINGBUILDER = False
|
74 |
+
from io import BytesIO as StringIO
|
75 |
+
|
76 |
+
newlist_hint = lambda size: []
|
77 |
+
|
78 |
+
|
79 |
+
from .exceptions import BufferFull, OutOfData, ExtraData, FormatError, StackError
|
80 |
+
|
81 |
+
from .ext import ExtType, Timestamp
|
82 |
+
|
83 |
+
|
84 |
+
EX_SKIP = 0
|
85 |
+
EX_CONSTRUCT = 1
|
86 |
+
EX_READ_ARRAY_HEADER = 2
|
87 |
+
EX_READ_MAP_HEADER = 3
|
88 |
+
|
89 |
+
TYPE_IMMEDIATE = 0
|
90 |
+
TYPE_ARRAY = 1
|
91 |
+
TYPE_MAP = 2
|
92 |
+
TYPE_RAW = 3
|
93 |
+
TYPE_BIN = 4
|
94 |
+
TYPE_EXT = 5
|
95 |
+
|
96 |
+
DEFAULT_RECURSE_LIMIT = 511
|
97 |
+
|
98 |
+
|
99 |
+
def _check_type_strict(obj, t, type=type, tuple=tuple):
|
100 |
+
if type(t) is tuple:
|
101 |
+
return type(obj) in t
|
102 |
+
else:
|
103 |
+
return type(obj) is t
|
104 |
+
|
105 |
+
|
106 |
+
def _get_data_from_buffer(obj):
|
107 |
+
view = memoryview(obj)
|
108 |
+
if view.itemsize != 1:
|
109 |
+
raise ValueError("cannot unpack from multi-byte object")
|
110 |
+
return view
|
111 |
+
|
112 |
+
|
113 |
+
def unpackb(packed, **kwargs):
|
114 |
+
"""
|
115 |
+
Unpack an object from `packed`.
|
116 |
+
|
117 |
+
Raises ``ExtraData`` when *packed* contains extra bytes.
|
118 |
+
Raises ``ValueError`` when *packed* is incomplete.
|
119 |
+
Raises ``FormatError`` when *packed* is not valid msgpack.
|
120 |
+
Raises ``StackError`` when *packed* contains too nested.
|
121 |
+
Other exceptions can be raised during unpacking.
|
122 |
+
|
123 |
+
See :class:`Unpacker` for options.
|
124 |
+
"""
|
125 |
+
unpacker = Unpacker(None, max_buffer_size=len(packed), **kwargs)
|
126 |
+
unpacker.feed(packed)
|
127 |
+
try:
|
128 |
+
ret = unpacker._unpack()
|
129 |
+
except OutOfData:
|
130 |
+
raise ValueError("Unpack failed: incomplete input")
|
131 |
+
except RecursionError as e:
|
132 |
+
if _is_recursionerror(e):
|
133 |
+
raise StackError
|
134 |
+
raise
|
135 |
+
if unpacker._got_extradata():
|
136 |
+
raise ExtraData(ret, unpacker._get_extradata())
|
137 |
+
return ret
|
138 |
+
|
139 |
+
|
140 |
+
if sys.version_info < (2, 7, 6):
|
141 |
+
|
142 |
+
def _unpack_from(f, b, o=0):
|
143 |
+
"""Explicit type cast for legacy struct.unpack_from"""
|
144 |
+
return struct.unpack_from(f, bytes(b), o)
|
145 |
+
|
146 |
+
|
147 |
+
else:
|
148 |
+
_unpack_from = struct.unpack_from
|
149 |
+
|
150 |
+
_NO_FORMAT_USED = ""
|
151 |
+
_MSGPACK_HEADERS = {
|
152 |
+
0xC4: (1, _NO_FORMAT_USED, TYPE_BIN),
|
153 |
+
0xC5: (2, ">H", TYPE_BIN),
|
154 |
+
0xC6: (4, ">I", TYPE_BIN),
|
155 |
+
0xC7: (2, "Bb", TYPE_EXT),
|
156 |
+
0xC8: (3, ">Hb", TYPE_EXT),
|
157 |
+
0xC9: (5, ">Ib", TYPE_EXT),
|
158 |
+
0xCA: (4, ">f"),
|
159 |
+
0xCB: (8, ">d"),
|
160 |
+
0xCC: (1, _NO_FORMAT_USED),
|
161 |
+
0xCD: (2, ">H"),
|
162 |
+
0xCE: (4, ">I"),
|
163 |
+
0xCF: (8, ">Q"),
|
164 |
+
0xD0: (1, "b"),
|
165 |
+
0xD1: (2, ">h"),
|
166 |
+
0xD2: (4, ">i"),
|
167 |
+
0xD3: (8, ">q"),
|
168 |
+
0xD4: (1, "b1s", TYPE_EXT),
|
169 |
+
0xD5: (2, "b2s", TYPE_EXT),
|
170 |
+
0xD6: (4, "b4s", TYPE_EXT),
|
171 |
+
0xD7: (8, "b8s", TYPE_EXT),
|
172 |
+
0xD8: (16, "b16s", TYPE_EXT),
|
173 |
+
0xD9: (1, _NO_FORMAT_USED, TYPE_RAW),
|
174 |
+
0xDA: (2, ">H", TYPE_RAW),
|
175 |
+
0xDB: (4, ">I", TYPE_RAW),
|
176 |
+
0xDC: (2, ">H", TYPE_ARRAY),
|
177 |
+
0xDD: (4, ">I", TYPE_ARRAY),
|
178 |
+
0xDE: (2, ">H", TYPE_MAP),
|
179 |
+
0xDF: (4, ">I", TYPE_MAP),
|
180 |
+
}
|
181 |
+
|
182 |
+
|
183 |
+
class Unpacker(object):
|
184 |
+
"""Streaming unpacker.
|
185 |
+
|
186 |
+
Arguments:
|
187 |
+
|
188 |
+
:param file_like:
|
189 |
+
File-like object having `.read(n)` method.
|
190 |
+
If specified, unpacker reads serialized data from it and :meth:`feed()` is not usable.
|
191 |
+
|
192 |
+
:param int read_size:
|
193 |
+
Used as `file_like.read(read_size)`. (default: `min(16*1024, max_buffer_size)`)
|
194 |
+
|
195 |
+
:param bool use_list:
|
196 |
+
If true, unpack msgpack array to Python list.
|
197 |
+
Otherwise, unpack to Python tuple. (default: True)
|
198 |
+
|
199 |
+
:param bool raw:
|
200 |
+
If true, unpack msgpack raw to Python bytes.
|
201 |
+
Otherwise, unpack to Python str by decoding with UTF-8 encoding (default).
|
202 |
+
|
203 |
+
:param int timestamp:
|
204 |
+
Control how timestamp type is unpacked:
|
205 |
+
|
206 |
+
0 - Timestamp
|
207 |
+
1 - float (Seconds from the EPOCH)
|
208 |
+
2 - int (Nanoseconds from the EPOCH)
|
209 |
+
3 - datetime.datetime (UTC). Python 2 is not supported.
|
210 |
+
|
211 |
+
:param bool strict_map_key:
|
212 |
+
If true (default), only str or bytes are accepted for map (dict) keys.
|
213 |
+
|
214 |
+
:param callable object_hook:
|
215 |
+
When specified, it should be callable.
|
216 |
+
Unpacker calls it with a dict argument after unpacking msgpack map.
|
217 |
+
(See also simplejson)
|
218 |
+
|
219 |
+
:param callable object_pairs_hook:
|
220 |
+
When specified, it should be callable.
|
221 |
+
Unpacker calls it with a list of key-value pairs after unpacking msgpack map.
|
222 |
+
(See also simplejson)
|
223 |
+
|
224 |
+
:param str unicode_errors:
|
225 |
+
The error handler for decoding unicode. (default: 'strict')
|
226 |
+
This option should be used only when you have msgpack data which
|
227 |
+
contains invalid UTF-8 string.
|
228 |
+
|
229 |
+
:param int max_buffer_size:
|
230 |
+
Limits size of data waiting unpacked. 0 means 2**32-1.
|
231 |
+
The default value is 100*1024*1024 (100MiB).
|
232 |
+
Raises `BufferFull` exception when it is insufficient.
|
233 |
+
You should set this parameter when unpacking data from untrusted source.
|
234 |
+
|
235 |
+
:param int max_str_len:
|
236 |
+
Deprecated, use *max_buffer_size* instead.
|
237 |
+
Limits max length of str. (default: max_buffer_size)
|
238 |
+
|
239 |
+
:param int max_bin_len:
|
240 |
+
Deprecated, use *max_buffer_size* instead.
|
241 |
+
Limits max length of bin. (default: max_buffer_size)
|
242 |
+
|
243 |
+
:param int max_array_len:
|
244 |
+
Limits max length of array.
|
245 |
+
(default: max_buffer_size)
|
246 |
+
|
247 |
+
:param int max_map_len:
|
248 |
+
Limits max length of map.
|
249 |
+
(default: max_buffer_size//2)
|
250 |
+
|
251 |
+
:param int max_ext_len:
|
252 |
+
Deprecated, use *max_buffer_size* instead.
|
253 |
+
Limits max size of ext type. (default: max_buffer_size)
|
254 |
+
|
255 |
+
Example of streaming deserialize from file-like object::
|
256 |
+
|
257 |
+
unpacker = Unpacker(file_like)
|
258 |
+
for o in unpacker:
|
259 |
+
process(o)
|
260 |
+
|
261 |
+
Example of streaming deserialize from socket::
|
262 |
+
|
263 |
+
unpacker = Unpacker()
|
264 |
+
while True:
|
265 |
+
buf = sock.recv(1024**2)
|
266 |
+
if not buf:
|
267 |
+
break
|
268 |
+
unpacker.feed(buf)
|
269 |
+
for o in unpacker:
|
270 |
+
process(o)
|
271 |
+
|
272 |
+
Raises ``ExtraData`` when *packed* contains extra bytes.
|
273 |
+
Raises ``OutOfData`` when *packed* is incomplete.
|
274 |
+
Raises ``FormatError`` when *packed* is not valid msgpack.
|
275 |
+
Raises ``StackError`` when *packed* contains too nested.
|
276 |
+
Other exceptions can be raised during unpacking.
|
277 |
+
"""
|
278 |
+
|
279 |
+
def __init__(
|
280 |
+
self,
|
281 |
+
file_like=None,
|
282 |
+
read_size=0,
|
283 |
+
use_list=True,
|
284 |
+
raw=False,
|
285 |
+
timestamp=0,
|
286 |
+
strict_map_key=True,
|
287 |
+
object_hook=None,
|
288 |
+
object_pairs_hook=None,
|
289 |
+
list_hook=None,
|
290 |
+
unicode_errors=None,
|
291 |
+
max_buffer_size=100 * 1024 * 1024,
|
292 |
+
ext_hook=ExtType,
|
293 |
+
max_str_len=-1,
|
294 |
+
max_bin_len=-1,
|
295 |
+
max_array_len=-1,
|
296 |
+
max_map_len=-1,
|
297 |
+
max_ext_len=-1,
|
298 |
+
):
|
299 |
+
if unicode_errors is None:
|
300 |
+
unicode_errors = "strict"
|
301 |
+
|
302 |
+
if file_like is None:
|
303 |
+
self._feeding = True
|
304 |
+
else:
|
305 |
+
if not callable(file_like.read):
|
306 |
+
raise TypeError("`file_like.read` must be callable")
|
307 |
+
self.file_like = file_like
|
308 |
+
self._feeding = False
|
309 |
+
|
310 |
+
#: array of bytes fed.
|
311 |
+
self._buffer = bytearray()
|
312 |
+
#: Which position we currently reads
|
313 |
+
self._buff_i = 0
|
314 |
+
|
315 |
+
# When Unpacker is used as an iterable, between the calls to next(),
|
316 |
+
# the buffer is not "consumed" completely, for efficiency sake.
|
317 |
+
# Instead, it is done sloppily. To make sure we raise BufferFull at
|
318 |
+
# the correct moments, we have to keep track of how sloppy we were.
|
319 |
+
# Furthermore, when the buffer is incomplete (that is: in the case
|
320 |
+
# we raise an OutOfData) we need to rollback the buffer to the correct
|
321 |
+
# state, which _buf_checkpoint records.
|
322 |
+
self._buf_checkpoint = 0
|
323 |
+
|
324 |
+
if not max_buffer_size:
|
325 |
+
max_buffer_size = 2 ** 31 - 1
|
326 |
+
if max_str_len == -1:
|
327 |
+
max_str_len = max_buffer_size
|
328 |
+
if max_bin_len == -1:
|
329 |
+
max_bin_len = max_buffer_size
|
330 |
+
if max_array_len == -1:
|
331 |
+
max_array_len = max_buffer_size
|
332 |
+
if max_map_len == -1:
|
333 |
+
max_map_len = max_buffer_size // 2
|
334 |
+
if max_ext_len == -1:
|
335 |
+
max_ext_len = max_buffer_size
|
336 |
+
|
337 |
+
self._max_buffer_size = max_buffer_size
|
338 |
+
if read_size > self._max_buffer_size:
|
339 |
+
raise ValueError("read_size must be smaller than max_buffer_size")
|
340 |
+
self._read_size = read_size or min(self._max_buffer_size, 16 * 1024)
|
341 |
+
self._raw = bool(raw)
|
342 |
+
self._strict_map_key = bool(strict_map_key)
|
343 |
+
self._unicode_errors = unicode_errors
|
344 |
+
self._use_list = use_list
|
345 |
+
if not (0 <= timestamp <= 3):
|
346 |
+
raise ValueError("timestamp must be 0..3")
|
347 |
+
self._timestamp = timestamp
|
348 |
+
self._list_hook = list_hook
|
349 |
+
self._object_hook = object_hook
|
350 |
+
self._object_pairs_hook = object_pairs_hook
|
351 |
+
self._ext_hook = ext_hook
|
352 |
+
self._max_str_len = max_str_len
|
353 |
+
self._max_bin_len = max_bin_len
|
354 |
+
self._max_array_len = max_array_len
|
355 |
+
self._max_map_len = max_map_len
|
356 |
+
self._max_ext_len = max_ext_len
|
357 |
+
self._stream_offset = 0
|
358 |
+
|
359 |
+
if list_hook is not None and not callable(list_hook):
|
360 |
+
raise TypeError("`list_hook` is not callable")
|
361 |
+
if object_hook is not None and not callable(object_hook):
|
362 |
+
raise TypeError("`object_hook` is not callable")
|
363 |
+
if object_pairs_hook is not None and not callable(object_pairs_hook):
|
364 |
+
raise TypeError("`object_pairs_hook` is not callable")
|
365 |
+
if object_hook is not None and object_pairs_hook is not None:
|
366 |
+
raise TypeError(
|
367 |
+
"object_pairs_hook and object_hook are mutually " "exclusive"
|
368 |
+
)
|
369 |
+
if not callable(ext_hook):
|
370 |
+
raise TypeError("`ext_hook` is not callable")
|
371 |
+
|
372 |
+
def feed(self, next_bytes):
|
373 |
+
assert self._feeding
|
374 |
+
view = _get_data_from_buffer(next_bytes)
|
375 |
+
if len(self._buffer) - self._buff_i + len(view) > self._max_buffer_size:
|
376 |
+
raise BufferFull
|
377 |
+
|
378 |
+
# Strip buffer before checkpoint before reading file.
|
379 |
+
if self._buf_checkpoint > 0:
|
380 |
+
del self._buffer[: self._buf_checkpoint]
|
381 |
+
self._buff_i -= self._buf_checkpoint
|
382 |
+
self._buf_checkpoint = 0
|
383 |
+
|
384 |
+
# Use extend here: INPLACE_ADD += doesn't reliably typecast memoryview in jython
|
385 |
+
self._buffer.extend(view)
|
386 |
+
|
387 |
+
def _consume(self):
|
388 |
+
"""Gets rid of the used parts of the buffer."""
|
389 |
+
self._stream_offset += self._buff_i - self._buf_checkpoint
|
390 |
+
self._buf_checkpoint = self._buff_i
|
391 |
+
|
392 |
+
def _got_extradata(self):
|
393 |
+
return self._buff_i < len(self._buffer)
|
394 |
+
|
395 |
+
def _get_extradata(self):
|
396 |
+
return self._buffer[self._buff_i :]
|
397 |
+
|
398 |
+
def read_bytes(self, n):
|
399 |
+
ret = self._read(n, raise_outofdata=False)
|
400 |
+
self._consume()
|
401 |
+
return ret
|
402 |
+
|
403 |
+
def _read(self, n, raise_outofdata=True):
|
404 |
+
# (int) -> bytearray
|
405 |
+
self._reserve(n, raise_outofdata=raise_outofdata)
|
406 |
+
i = self._buff_i
|
407 |
+
ret = self._buffer[i : i + n]
|
408 |
+
self._buff_i = i + len(ret)
|
409 |
+
return ret
|
410 |
+
|
411 |
+
def _reserve(self, n, raise_outofdata=True):
|
412 |
+
remain_bytes = len(self._buffer) - self._buff_i - n
|
413 |
+
|
414 |
+
# Fast path: buffer has n bytes already
|
415 |
+
if remain_bytes >= 0:
|
416 |
+
return
|
417 |
+
|
418 |
+
if self._feeding:
|
419 |
+
self._buff_i = self._buf_checkpoint
|
420 |
+
raise OutOfData
|
421 |
+
|
422 |
+
# Strip buffer before checkpoint before reading file.
|
423 |
+
if self._buf_checkpoint > 0:
|
424 |
+
del self._buffer[: self._buf_checkpoint]
|
425 |
+
self._buff_i -= self._buf_checkpoint
|
426 |
+
self._buf_checkpoint = 0
|
427 |
+
|
428 |
+
# Read from file
|
429 |
+
remain_bytes = -remain_bytes
|
430 |
+
while remain_bytes > 0:
|
431 |
+
to_read_bytes = max(self._read_size, remain_bytes)
|
432 |
+
read_data = self.file_like.read(to_read_bytes)
|
433 |
+
if not read_data:
|
434 |
+
break
|
435 |
+
assert isinstance(read_data, bytes)
|
436 |
+
self._buffer += read_data
|
437 |
+
remain_bytes -= len(read_data)
|
438 |
+
|
439 |
+
if len(self._buffer) < n + self._buff_i and raise_outofdata:
|
440 |
+
self._buff_i = 0 # rollback
|
441 |
+
raise OutOfData
|
442 |
+
|
443 |
+
def _read_header(self):
|
444 |
+
typ = TYPE_IMMEDIATE
|
445 |
+
n = 0
|
446 |
+
obj = None
|
447 |
+
self._reserve(1)
|
448 |
+
b = self._buffer[self._buff_i]
|
449 |
+
self._buff_i += 1
|
450 |
+
if b & 0b10000000 == 0:
|
451 |
+
obj = b
|
452 |
+
elif b & 0b11100000 == 0b11100000:
|
453 |
+
obj = -1 - (b ^ 0xFF)
|
454 |
+
elif b & 0b11100000 == 0b10100000:
|
455 |
+
n = b & 0b00011111
|
456 |
+
typ = TYPE_RAW
|
457 |
+
if n > self._max_str_len:
|
458 |
+
raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
|
459 |
+
obj = self._read(n)
|
460 |
+
elif b & 0b11110000 == 0b10010000:
|
461 |
+
n = b & 0b00001111
|
462 |
+
typ = TYPE_ARRAY
|
463 |
+
if n > self._max_array_len:
|
464 |
+
raise ValueError(
|
465 |
+
"%s exceeds max_array_len(%s)" % (n, self._max_array_len)
|
466 |
+
)
|
467 |
+
elif b & 0b11110000 == 0b10000000:
|
468 |
+
n = b & 0b00001111
|
469 |
+
typ = TYPE_MAP
|
470 |
+
if n > self._max_map_len:
|
471 |
+
raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
|
472 |
+
elif b == 0xC0:
|
473 |
+
obj = None
|
474 |
+
elif b == 0xC2:
|
475 |
+
obj = False
|
476 |
+
elif b == 0xC3:
|
477 |
+
obj = True
|
478 |
+
elif 0xC4 <= b <= 0xC6:
|
479 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
480 |
+
self._reserve(size)
|
481 |
+
if len(fmt) > 0:
|
482 |
+
n = _unpack_from(fmt, self._buffer, self._buff_i)[0]
|
483 |
+
else:
|
484 |
+
n = self._buffer[self._buff_i]
|
485 |
+
self._buff_i += size
|
486 |
+
if n > self._max_bin_len:
|
487 |
+
raise ValueError("%s exceeds max_bin_len(%s)" % (n, self._max_bin_len))
|
488 |
+
obj = self._read(n)
|
489 |
+
elif 0xC7 <= b <= 0xC9:
|
490 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
491 |
+
self._reserve(size)
|
492 |
+
L, n = _unpack_from(fmt, self._buffer, self._buff_i)
|
493 |
+
self._buff_i += size
|
494 |
+
if L > self._max_ext_len:
|
495 |
+
raise ValueError("%s exceeds max_ext_len(%s)" % (L, self._max_ext_len))
|
496 |
+
obj = self._read(L)
|
497 |
+
elif 0xCA <= b <= 0xD3:
|
498 |
+
size, fmt = _MSGPACK_HEADERS[b]
|
499 |
+
self._reserve(size)
|
500 |
+
if len(fmt) > 0:
|
501 |
+
obj = _unpack_from(fmt, self._buffer, self._buff_i)[0]
|
502 |
+
else:
|
503 |
+
obj = self._buffer[self._buff_i]
|
504 |
+
self._buff_i += size
|
505 |
+
elif 0xD4 <= b <= 0xD8:
|
506 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
507 |
+
if self._max_ext_len < size:
|
508 |
+
raise ValueError(
|
509 |
+
"%s exceeds max_ext_len(%s)" % (size, self._max_ext_len)
|
510 |
+
)
|
511 |
+
self._reserve(size + 1)
|
512 |
+
n, obj = _unpack_from(fmt, self._buffer, self._buff_i)
|
513 |
+
self._buff_i += size + 1
|
514 |
+
elif 0xD9 <= b <= 0xDB:
|
515 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
516 |
+
self._reserve(size)
|
517 |
+
if len(fmt) > 0:
|
518 |
+
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
519 |
+
else:
|
520 |
+
n = self._buffer[self._buff_i]
|
521 |
+
self._buff_i += size
|
522 |
+
if n > self._max_str_len:
|
523 |
+
raise ValueError("%s exceeds max_str_len(%s)" % (n, self._max_str_len))
|
524 |
+
obj = self._read(n)
|
525 |
+
elif 0xDC <= b <= 0xDD:
|
526 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
527 |
+
self._reserve(size)
|
528 |
+
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
529 |
+
self._buff_i += size
|
530 |
+
if n > self._max_array_len:
|
531 |
+
raise ValueError(
|
532 |
+
"%s exceeds max_array_len(%s)" % (n, self._max_array_len)
|
533 |
+
)
|
534 |
+
elif 0xDE <= b <= 0xDF:
|
535 |
+
size, fmt, typ = _MSGPACK_HEADERS[b]
|
536 |
+
self._reserve(size)
|
537 |
+
(n,) = _unpack_from(fmt, self._buffer, self._buff_i)
|
538 |
+
self._buff_i += size
|
539 |
+
if n > self._max_map_len:
|
540 |
+
raise ValueError("%s exceeds max_map_len(%s)" % (n, self._max_map_len))
|
541 |
+
else:
|
542 |
+
raise FormatError("Unknown header: 0x%x" % b)
|
543 |
+
return typ, n, obj
|
544 |
+
|
545 |
+
def _unpack(self, execute=EX_CONSTRUCT):
|
546 |
+
typ, n, obj = self._read_header()
|
547 |
+
|
548 |
+
if execute == EX_READ_ARRAY_HEADER:
|
549 |
+
if typ != TYPE_ARRAY:
|
550 |
+
raise ValueError("Expected array")
|
551 |
+
return n
|
552 |
+
if execute == EX_READ_MAP_HEADER:
|
553 |
+
if typ != TYPE_MAP:
|
554 |
+
raise ValueError("Expected map")
|
555 |
+
return n
|
556 |
+
# TODO should we eliminate the recursion?
|
557 |
+
if typ == TYPE_ARRAY:
|
558 |
+
if execute == EX_SKIP:
|
559 |
+
for i in xrange(n):
|
560 |
+
# TODO check whether we need to call `list_hook`
|
561 |
+
self._unpack(EX_SKIP)
|
562 |
+
return
|
563 |
+
ret = newlist_hint(n)
|
564 |
+
for i in xrange(n):
|
565 |
+
ret.append(self._unpack(EX_CONSTRUCT))
|
566 |
+
if self._list_hook is not None:
|
567 |
+
ret = self._list_hook(ret)
|
568 |
+
# TODO is the interaction between `list_hook` and `use_list` ok?
|
569 |
+
return ret if self._use_list else tuple(ret)
|
570 |
+
if typ == TYPE_MAP:
|
571 |
+
if execute == EX_SKIP:
|
572 |
+
for i in xrange(n):
|
573 |
+
# TODO check whether we need to call hooks
|
574 |
+
self._unpack(EX_SKIP)
|
575 |
+
self._unpack(EX_SKIP)
|
576 |
+
return
|
577 |
+
if self._object_pairs_hook is not None:
|
578 |
+
ret = self._object_pairs_hook(
|
579 |
+
(self._unpack(EX_CONSTRUCT), self._unpack(EX_CONSTRUCT))
|
580 |
+
for _ in xrange(n)
|
581 |
+
)
|
582 |
+
else:
|
583 |
+
ret = {}
|
584 |
+
for _ in xrange(n):
|
585 |
+
key = self._unpack(EX_CONSTRUCT)
|
586 |
+
if self._strict_map_key and type(key) not in (unicode, bytes):
|
587 |
+
raise ValueError(
|
588 |
+
"%s is not allowed for map key" % str(type(key))
|
589 |
+
)
|
590 |
+
if not PY2 and type(key) is str:
|
591 |
+
key = sys.intern(key)
|
592 |
+
ret[key] = self._unpack(EX_CONSTRUCT)
|
593 |
+
if self._object_hook is not None:
|
594 |
+
ret = self._object_hook(ret)
|
595 |
+
return ret
|
596 |
+
if execute == EX_SKIP:
|
597 |
+
return
|
598 |
+
if typ == TYPE_RAW:
|
599 |
+
if self._raw:
|
600 |
+
obj = bytes(obj)
|
601 |
+
else:
|
602 |
+
obj = obj.decode("utf_8", self._unicode_errors)
|
603 |
+
return obj
|
604 |
+
if typ == TYPE_BIN:
|
605 |
+
return bytes(obj)
|
606 |
+
if typ == TYPE_EXT:
|
607 |
+
if n == -1: # timestamp
|
608 |
+
ts = Timestamp.from_bytes(bytes(obj))
|
609 |
+
if self._timestamp == 1:
|
610 |
+
return ts.to_unix()
|
611 |
+
elif self._timestamp == 2:
|
612 |
+
return ts.to_unix_nano()
|
613 |
+
elif self._timestamp == 3:
|
614 |
+
return ts.to_datetime()
|
615 |
+
else:
|
616 |
+
return ts
|
617 |
+
else:
|
618 |
+
return self._ext_hook(n, bytes(obj))
|
619 |
+
assert typ == TYPE_IMMEDIATE
|
620 |
+
return obj
|
621 |
+
|
622 |
+
def __iter__(self):
|
623 |
+
return self
|
624 |
+
|
625 |
+
def __next__(self):
|
626 |
+
try:
|
627 |
+
ret = self._unpack(EX_CONSTRUCT)
|
628 |
+
self._consume()
|
629 |
+
return ret
|
630 |
+
except OutOfData:
|
631 |
+
self._consume()
|
632 |
+
raise StopIteration
|
633 |
+
except RecursionError:
|
634 |
+
raise StackError
|
635 |
+
|
636 |
+
next = __next__
|
637 |
+
|
638 |
+
def skip(self):
|
639 |
+
self._unpack(EX_SKIP)
|
640 |
+
self._consume()
|
641 |
+
|
642 |
+
def unpack(self):
|
643 |
+
try:
|
644 |
+
ret = self._unpack(EX_CONSTRUCT)
|
645 |
+
except RecursionError:
|
646 |
+
raise StackError
|
647 |
+
self._consume()
|
648 |
+
return ret
|
649 |
+
|
650 |
+
def read_array_header(self):
|
651 |
+
ret = self._unpack(EX_READ_ARRAY_HEADER)
|
652 |
+
self._consume()
|
653 |
+
return ret
|
654 |
+
|
655 |
+
def read_map_header(self):
|
656 |
+
ret = self._unpack(EX_READ_MAP_HEADER)
|
657 |
+
self._consume()
|
658 |
+
return ret
|
659 |
+
|
660 |
+
def tell(self):
|
661 |
+
return self._stream_offset
|
662 |
+
|
663 |
+
|
664 |
+
class Packer(object):
|
665 |
+
"""
|
666 |
+
MessagePack Packer
|
667 |
+
|
668 |
+
Usage::
|
669 |
+
|
670 |
+
packer = Packer()
|
671 |
+
astream.write(packer.pack(a))
|
672 |
+
astream.write(packer.pack(b))
|
673 |
+
|
674 |
+
Packer's constructor has some keyword arguments:
|
675 |
+
|
676 |
+
:param callable default:
|
677 |
+
Convert user type to builtin type that Packer supports.
|
678 |
+
See also simplejson's document.
|
679 |
+
|
680 |
+
:param bool use_single_float:
|
681 |
+
Use single precision float type for float. (default: False)
|
682 |
+
|
683 |
+
:param bool autoreset:
|
684 |
+
Reset buffer after each pack and return its content as `bytes`. (default: True).
|
685 |
+
If set this to false, use `bytes()` to get content and `.reset()` to clear buffer.
|
686 |
+
|
687 |
+
:param bool use_bin_type:
|
688 |
+
Use bin type introduced in msgpack spec 2.0 for bytes.
|
689 |
+
It also enables str8 type for unicode. (default: True)
|
690 |
+
|
691 |
+
:param bool strict_types:
|
692 |
+
If set to true, types will be checked to be exact. Derived classes
|
693 |
+
from serializable types will not be serialized and will be
|
694 |
+
treated as unsupported type and forwarded to default.
|
695 |
+
Additionally tuples will not be serialized as lists.
|
696 |
+
This is useful when trying to implement accurate serialization
|
697 |
+
for python types.
|
698 |
+
|
699 |
+
:param bool datetime:
|
700 |
+
If set to true, datetime with tzinfo is packed into Timestamp type.
|
701 |
+
Note that the tzinfo is stripped in the timestamp.
|
702 |
+
You can get UTC datetime with `timestamp=3` option of the Unpacker.
|
703 |
+
(Python 2 is not supported).
|
704 |
+
|
705 |
+
:param str unicode_errors:
|
706 |
+
The error handler for encoding unicode. (default: 'strict')
|
707 |
+
DO NOT USE THIS!! This option is kept for very specific usage.
|
708 |
+
|
709 |
+
Example of streaming deserialize from file-like object::
|
710 |
+
|
711 |
+
unpacker = Unpacker(file_like)
|
712 |
+
for o in unpacker:
|
713 |
+
process(o)
|
714 |
+
|
715 |
+
Example of streaming deserialize from socket::
|
716 |
+
|
717 |
+
unpacker = Unpacker()
|
718 |
+
while True:
|
719 |
+
buf = sock.recv(1024**2)
|
720 |
+
if not buf:
|
721 |
+
break
|
722 |
+
unpacker.feed(buf)
|
723 |
+
for o in unpacker:
|
724 |
+
process(o)
|
725 |
+
|
726 |
+
Raises ``ExtraData`` when *packed* contains extra bytes.
|
727 |
+
Raises ``OutOfData`` when *packed* is incomplete.
|
728 |
+
Raises ``FormatError`` when *packed* is not valid msgpack.
|
729 |
+
Raises ``StackError`` when *packed* contains too nested.
|
730 |
+
Other exceptions can be raised during unpacking.
|
731 |
+
"""
|
732 |
+
|
733 |
+
def __init__(
|
734 |
+
self,
|
735 |
+
default=None,
|
736 |
+
use_single_float=False,
|
737 |
+
autoreset=True,
|
738 |
+
use_bin_type=True,
|
739 |
+
strict_types=False,
|
740 |
+
datetime=False,
|
741 |
+
unicode_errors=None,
|
742 |
+
):
|
743 |
+
self._strict_types = strict_types
|
744 |
+
self._use_float = use_single_float
|
745 |
+
self._autoreset = autoreset
|
746 |
+
self._use_bin_type = use_bin_type
|
747 |
+
self._buffer = StringIO()
|
748 |
+
if PY2 and datetime:
|
749 |
+
raise ValueError("datetime is not supported in Python 2")
|
750 |
+
self._datetime = bool(datetime)
|
751 |
+
self._unicode_errors = unicode_errors or "strict"
|
752 |
+
if default is not None:
|
753 |
+
if not callable(default):
|
754 |
+
raise TypeError("default must be callable")
|
755 |
+
self._default = default
|
756 |
+
|
757 |
+
def _pack(
|
758 |
+
self,
|
759 |
+
obj,
|
760 |
+
nest_limit=DEFAULT_RECURSE_LIMIT,
|
761 |
+
check=isinstance,
|
762 |
+
check_type_strict=_check_type_strict,
|
763 |
+
):
|
764 |
+
default_used = False
|
765 |
+
if self._strict_types:
|
766 |
+
check = check_type_strict
|
767 |
+
list_types = list
|
768 |
+
else:
|
769 |
+
list_types = (list, tuple)
|
770 |
+
while True:
|
771 |
+
if nest_limit < 0:
|
772 |
+
raise ValueError("recursion limit exceeded")
|
773 |
+
if obj is None:
|
774 |
+
return self._buffer.write(b"\xc0")
|
775 |
+
if check(obj, bool):
|
776 |
+
if obj:
|
777 |
+
return self._buffer.write(b"\xc3")
|
778 |
+
return self._buffer.write(b"\xc2")
|
779 |
+
if check(obj, int_types):
|
780 |
+
if 0 <= obj < 0x80:
|
781 |
+
return self._buffer.write(struct.pack("B", obj))
|
782 |
+
if -0x20 <= obj < 0:
|
783 |
+
return self._buffer.write(struct.pack("b", obj))
|
784 |
+
if 0x80 <= obj <= 0xFF:
|
785 |
+
return self._buffer.write(struct.pack("BB", 0xCC, obj))
|
786 |
+
if -0x80 <= obj < 0:
|
787 |
+
return self._buffer.write(struct.pack(">Bb", 0xD0, obj))
|
788 |
+
if 0xFF < obj <= 0xFFFF:
|
789 |
+
return self._buffer.write(struct.pack(">BH", 0xCD, obj))
|
790 |
+
if -0x8000 <= obj < -0x80:
|
791 |
+
return self._buffer.write(struct.pack(">Bh", 0xD1, obj))
|
792 |
+
if 0xFFFF < obj <= 0xFFFFFFFF:
|
793 |
+
return self._buffer.write(struct.pack(">BI", 0xCE, obj))
|
794 |
+
if -0x80000000 <= obj < -0x8000:
|
795 |
+
return self._buffer.write(struct.pack(">Bi", 0xD2, obj))
|
796 |
+
if 0xFFFFFFFF < obj <= 0xFFFFFFFFFFFFFFFF:
|
797 |
+
return self._buffer.write(struct.pack(">BQ", 0xCF, obj))
|
798 |
+
if -0x8000000000000000 <= obj < -0x80000000:
|
799 |
+
return self._buffer.write(struct.pack(">Bq", 0xD3, obj))
|
800 |
+
if not default_used and self._default is not None:
|
801 |
+
obj = self._default(obj)
|
802 |
+
default_used = True
|
803 |
+
continue
|
804 |
+
raise OverflowError("Integer value out of range")
|
805 |
+
if check(obj, (bytes, bytearray)):
|
806 |
+
n = len(obj)
|
807 |
+
if n >= 2 ** 32:
|
808 |
+
raise ValueError("%s is too large" % type(obj).__name__)
|
809 |
+
self._pack_bin_header(n)
|
810 |
+
return self._buffer.write(obj)
|
811 |
+
if check(obj, unicode):
|
812 |
+
obj = obj.encode("utf-8", self._unicode_errors)
|
813 |
+
n = len(obj)
|
814 |
+
if n >= 2 ** 32:
|
815 |
+
raise ValueError("String is too large")
|
816 |
+
self._pack_raw_header(n)
|
817 |
+
return self._buffer.write(obj)
|
818 |
+
if check(obj, memoryview):
|
819 |
+
n = len(obj) * obj.itemsize
|
820 |
+
if n >= 2 ** 32:
|
821 |
+
raise ValueError("Memoryview is too large")
|
822 |
+
self._pack_bin_header(n)
|
823 |
+
return self._buffer.write(obj)
|
824 |
+
if check(obj, float):
|
825 |
+
if self._use_float:
|
826 |
+
return self._buffer.write(struct.pack(">Bf", 0xCA, obj))
|
827 |
+
return self._buffer.write(struct.pack(">Bd", 0xCB, obj))
|
828 |
+
if check(obj, (ExtType, Timestamp)):
|
829 |
+
if check(obj, Timestamp):
|
830 |
+
code = -1
|
831 |
+
data = obj.to_bytes()
|
832 |
+
else:
|
833 |
+
code = obj.code
|
834 |
+
data = obj.data
|
835 |
+
assert isinstance(code, int)
|
836 |
+
assert isinstance(data, bytes)
|
837 |
+
L = len(data)
|
838 |
+
if L == 1:
|
839 |
+
self._buffer.write(b"\xd4")
|
840 |
+
elif L == 2:
|
841 |
+
self._buffer.write(b"\xd5")
|
842 |
+
elif L == 4:
|
843 |
+
self._buffer.write(b"\xd6")
|
844 |
+
elif L == 8:
|
845 |
+
self._buffer.write(b"\xd7")
|
846 |
+
elif L == 16:
|
847 |
+
self._buffer.write(b"\xd8")
|
848 |
+
elif L <= 0xFF:
|
849 |
+
self._buffer.write(struct.pack(">BB", 0xC7, L))
|
850 |
+
elif L <= 0xFFFF:
|
851 |
+
self._buffer.write(struct.pack(">BH", 0xC8, L))
|
852 |
+
else:
|
853 |
+
self._buffer.write(struct.pack(">BI", 0xC9, L))
|
854 |
+
self._buffer.write(struct.pack("b", code))
|
855 |
+
self._buffer.write(data)
|
856 |
+
return
|
857 |
+
if check(obj, list_types):
|
858 |
+
n = len(obj)
|
859 |
+
self._pack_array_header(n)
|
860 |
+
for i in xrange(n):
|
861 |
+
self._pack(obj[i], nest_limit - 1)
|
862 |
+
return
|
863 |
+
if check(obj, dict):
|
864 |
+
return self._pack_map_pairs(
|
865 |
+
len(obj), dict_iteritems(obj), nest_limit - 1
|
866 |
+
)
|
867 |
+
|
868 |
+
if self._datetime and check(obj, _DateTime) and obj.tzinfo is not None:
|
869 |
+
obj = Timestamp.from_datetime(obj)
|
870 |
+
default_used = 1
|
871 |
+
continue
|
872 |
+
|
873 |
+
if not default_used and self._default is not None:
|
874 |
+
obj = self._default(obj)
|
875 |
+
default_used = 1
|
876 |
+
continue
|
877 |
+
|
878 |
+
if self._datetime and check(obj, _DateTime):
|
879 |
+
raise ValueError("Cannot serialize %r where tzinfo=None" % (obj,))
|
880 |
+
|
881 |
+
raise TypeError("Cannot serialize %r" % (obj,))
|
882 |
+
|
883 |
+
def pack(self, obj):
|
884 |
+
try:
|
885 |
+
self._pack(obj)
|
886 |
+
except:
|
887 |
+
self._buffer = StringIO() # force reset
|
888 |
+
raise
|
889 |
+
if self._autoreset:
|
890 |
+
ret = self._buffer.getvalue()
|
891 |
+
self._buffer = StringIO()
|
892 |
+
return ret
|
893 |
+
|
894 |
+
def pack_map_pairs(self, pairs):
|
895 |
+
self._pack_map_pairs(len(pairs), pairs)
|
896 |
+
if self._autoreset:
|
897 |
+
ret = self._buffer.getvalue()
|
898 |
+
self._buffer = StringIO()
|
899 |
+
return ret
|
900 |
+
|
901 |
+
def pack_array_header(self, n):
|
902 |
+
if n >= 2 ** 32:
|
903 |
+
raise ValueError
|
904 |
+
self._pack_array_header(n)
|
905 |
+
if self._autoreset:
|
906 |
+
ret = self._buffer.getvalue()
|
907 |
+
self._buffer = StringIO()
|
908 |
+
return ret
|
909 |
+
|
910 |
+
def pack_map_header(self, n):
|
911 |
+
if n >= 2 ** 32:
|
912 |
+
raise ValueError
|
913 |
+
self._pack_map_header(n)
|
914 |
+
if self._autoreset:
|
915 |
+
ret = self._buffer.getvalue()
|
916 |
+
self._buffer = StringIO()
|
917 |
+
return ret
|
918 |
+
|
919 |
+
def pack_ext_type(self, typecode, data):
|
920 |
+
if not isinstance(typecode, int):
|
921 |
+
raise TypeError("typecode must have int type.")
|
922 |
+
if not 0 <= typecode <= 127:
|
923 |
+
raise ValueError("typecode should be 0-127")
|
924 |
+
if not isinstance(data, bytes):
|
925 |
+
raise TypeError("data must have bytes type")
|
926 |
+
L = len(data)
|
927 |
+
if L > 0xFFFFFFFF:
|
928 |
+
raise ValueError("Too large data")
|
929 |
+
if L == 1:
|
930 |
+
self._buffer.write(b"\xd4")
|
931 |
+
elif L == 2:
|
932 |
+
self._buffer.write(b"\xd5")
|
933 |
+
elif L == 4:
|
934 |
+
self._buffer.write(b"\xd6")
|
935 |
+
elif L == 8:
|
936 |
+
self._buffer.write(b"\xd7")
|
937 |
+
elif L == 16:
|
938 |
+
self._buffer.write(b"\xd8")
|
939 |
+
elif L <= 0xFF:
|
940 |
+
self._buffer.write(b"\xc7" + struct.pack("B", L))
|
941 |
+
elif L <= 0xFFFF:
|
942 |
+
self._buffer.write(b"\xc8" + struct.pack(">H", L))
|
943 |
+
else:
|
944 |
+
self._buffer.write(b"\xc9" + struct.pack(">I", L))
|
945 |
+
self._buffer.write(struct.pack("B", typecode))
|
946 |
+
self._buffer.write(data)
|
947 |
+
|
948 |
+
def _pack_array_header(self, n):
|
949 |
+
if n <= 0x0F:
|
950 |
+
return self._buffer.write(struct.pack("B", 0x90 + n))
|
951 |
+
if n <= 0xFFFF:
|
952 |
+
return self._buffer.write(struct.pack(">BH", 0xDC, n))
|
953 |
+
if n <= 0xFFFFFFFF:
|
954 |
+
return self._buffer.write(struct.pack(">BI", 0xDD, n))
|
955 |
+
raise ValueError("Array is too large")
|
956 |
+
|
957 |
+
def _pack_map_header(self, n):
|
958 |
+
if n <= 0x0F:
|
959 |
+
return self._buffer.write(struct.pack("B", 0x80 + n))
|
960 |
+
if n <= 0xFFFF:
|
961 |
+
return self._buffer.write(struct.pack(">BH", 0xDE, n))
|
962 |
+
if n <= 0xFFFFFFFF:
|
963 |
+
return self._buffer.write(struct.pack(">BI", 0xDF, n))
|
964 |
+
raise ValueError("Dict is too large")
|
965 |
+
|
966 |
+
def _pack_map_pairs(self, n, pairs, nest_limit=DEFAULT_RECURSE_LIMIT):
|
967 |
+
self._pack_map_header(n)
|
968 |
+
for (k, v) in pairs:
|
969 |
+
self._pack(k, nest_limit - 1)
|
970 |
+
self._pack(v, nest_limit - 1)
|
971 |
+
|
972 |
+
def _pack_raw_header(self, n):
|
973 |
+
if n <= 0x1F:
|
974 |
+
self._buffer.write(struct.pack("B", 0xA0 + n))
|
975 |
+
elif self._use_bin_type and n <= 0xFF:
|
976 |
+
self._buffer.write(struct.pack(">BB", 0xD9, n))
|
977 |
+
elif n <= 0xFFFF:
|
978 |
+
self._buffer.write(struct.pack(">BH", 0xDA, n))
|
979 |
+
elif n <= 0xFFFFFFFF:
|
980 |
+
self._buffer.write(struct.pack(">BI", 0xDB, n))
|
981 |
+
else:
|
982 |
+
raise ValueError("Raw is too large")
|
983 |
+
|
984 |
+
def _pack_bin_header(self, n):
|
985 |
+
if not self._use_bin_type:
|
986 |
+
return self._pack_raw_header(n)
|
987 |
+
elif n <= 0xFF:
|
988 |
+
return self._buffer.write(struct.pack(">BB", 0xC4, n))
|
989 |
+
elif n <= 0xFFFF:
|
990 |
+
return self._buffer.write(struct.pack(">BH", 0xC5, n))
|
991 |
+
elif n <= 0xFFFFFFFF:
|
992 |
+
return self._buffer.write(struct.pack(">BI", 0xC6, n))
|
993 |
+
else:
|
994 |
+
raise ValueError("Bin is too large")
|
995 |
+
|
996 |
+
def bytes(self):
|
997 |
+
"""Return internal buffer contents as bytes object"""
|
998 |
+
return self._buffer.getvalue()
|
999 |
+
|
1000 |
+
def reset(self):
|
1001 |
+
"""Reset internal buffer.
|
1002 |
+
|
1003 |
+
This method is useful only when autoreset=False.
|
1004 |
+
"""
|
1005 |
+
self._buffer = StringIO()
|
1006 |
+
|
1007 |
+
def getbuffer(self):
|
1008 |
+
"""Return view of internal buffer."""
|
1009 |
+
if USING_STRINGBUILDER or PY2:
|
1010 |
+
return memoryview(self.bytes())
|
1011 |
+
else:
|
1012 |
+
return self._buffer.getbuffer()
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (99.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc
ADDED
Binary file (661 Bytes). View file
|
|