Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/__editable__.lm_eval-0.4.2.pth +3 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py +21 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/base.py +36 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/installed.py +20 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py +127 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py +31 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__init__.py +62 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/base.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/base.py +546 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/pkg_resources.py +256 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__init__.py +2 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/candidate.py +34 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/direct_url.py +220 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/format_control.py +80 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/index.py +28 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/link.py +288 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/scheme.py +31 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/search_scope.py +129 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py +51 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/target_python.py +110 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/models/wheel.py +89 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/base.py +20 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py +467 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__init__.py +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py +141 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py +547 -0
- llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py +155 -0
llmeval-env/lib/python3.10/site-packages/__editable__.lm_eval-0.4.2.pth
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:0b87d24b5f41e9dfa2760bdd38e88bee0db23d5b34659d3ff52d013edad9d5ec
|
3 |
+
size 85
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py
ADDED
@@ -0,0 +1,21 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pip._internal.distributions.base import AbstractDistribution
|
2 |
+
from pip._internal.distributions.sdist import SourceDistribution
|
3 |
+
from pip._internal.distributions.wheel import WheelDistribution
|
4 |
+
from pip._internal.req.req_install import InstallRequirement
|
5 |
+
|
6 |
+
|
7 |
+
def make_distribution_for_install_requirement(
|
8 |
+
install_req: InstallRequirement,
|
9 |
+
) -> AbstractDistribution:
|
10 |
+
"""Returns a Distribution for the given InstallRequirement"""
|
11 |
+
# Editable requirements will always be source distributions. They use the
|
12 |
+
# legacy logic until we create a modern standard for them.
|
13 |
+
if install_req.editable:
|
14 |
+
return SourceDistribution(install_req)
|
15 |
+
|
16 |
+
# If it's a wheel, it's a WheelDistribution
|
17 |
+
if install_req.is_wheel:
|
18 |
+
return WheelDistribution(install_req)
|
19 |
+
|
20 |
+
# Otherwise, a SourceDistribution
|
21 |
+
return SourceDistribution(install_req)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (800 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/base.py
ADDED
@@ -0,0 +1,36 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import abc
|
2 |
+
|
3 |
+
from pip._internal.index.package_finder import PackageFinder
|
4 |
+
from pip._internal.metadata.base import BaseDistribution
|
5 |
+
from pip._internal.req import InstallRequirement
|
6 |
+
|
7 |
+
|
8 |
+
class AbstractDistribution(metaclass=abc.ABCMeta):
|
9 |
+
"""A base class for handling installable artifacts.
|
10 |
+
|
11 |
+
The requirements for anything installable are as follows:
|
12 |
+
|
13 |
+
- we must be able to determine the requirement name
|
14 |
+
(or we can't correctly handle the non-upgrade case).
|
15 |
+
|
16 |
+
- for packages with setup requirements, we must also be able
|
17 |
+
to determine their requirements without installing additional
|
18 |
+
packages (for the same reason as run-time dependencies)
|
19 |
+
|
20 |
+
- we must be able to create a Distribution object exposing the
|
21 |
+
above metadata.
|
22 |
+
"""
|
23 |
+
|
24 |
+
def __init__(self, req: InstallRequirement) -> None:
|
25 |
+
super().__init__()
|
26 |
+
self.req = req
|
27 |
+
|
28 |
+
@abc.abstractmethod
|
29 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
30 |
+
raise NotImplementedError()
|
31 |
+
|
32 |
+
@abc.abstractmethod
|
33 |
+
def prepare_distribution_metadata(
|
34 |
+
self, finder: PackageFinder, build_isolation: bool
|
35 |
+
) -> None:
|
36 |
+
raise NotImplementedError()
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/installed.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pip._internal.distributions.base import AbstractDistribution
|
2 |
+
from pip._internal.index.package_finder import PackageFinder
|
3 |
+
from pip._internal.metadata import BaseDistribution
|
4 |
+
|
5 |
+
|
6 |
+
class InstalledDistribution(AbstractDistribution):
|
7 |
+
"""Represents an installed package.
|
8 |
+
|
9 |
+
This does not need any preparation as the required information has already
|
10 |
+
been computed.
|
11 |
+
"""
|
12 |
+
|
13 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
14 |
+
assert self.req.satisfied_by is not None, "not actually installed"
|
15 |
+
return self.req.satisfied_by
|
16 |
+
|
17 |
+
def prepare_distribution_metadata(
|
18 |
+
self, finder: PackageFinder, build_isolation: bool
|
19 |
+
) -> None:
|
20 |
+
pass
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from typing import Iterable, Set, Tuple
|
3 |
+
|
4 |
+
from pip._internal.build_env import BuildEnvironment
|
5 |
+
from pip._internal.distributions.base import AbstractDistribution
|
6 |
+
from pip._internal.exceptions import InstallationError
|
7 |
+
from pip._internal.index.package_finder import PackageFinder
|
8 |
+
from pip._internal.metadata import BaseDistribution
|
9 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
10 |
+
|
11 |
+
logger = logging.getLogger(__name__)
|
12 |
+
|
13 |
+
|
14 |
+
class SourceDistribution(AbstractDistribution):
|
15 |
+
"""Represents a source distribution.
|
16 |
+
|
17 |
+
The preparation step for these needs metadata for the packages to be
|
18 |
+
generated, either using PEP 517 or using the legacy `setup.py egg_info`.
|
19 |
+
"""
|
20 |
+
|
21 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
22 |
+
return self.req.get_dist()
|
23 |
+
|
24 |
+
def prepare_distribution_metadata(
|
25 |
+
self, finder: PackageFinder, build_isolation: bool
|
26 |
+
) -> None:
|
27 |
+
# Load pyproject.toml, to determine whether PEP 517 is to be used
|
28 |
+
self.req.load_pyproject_toml()
|
29 |
+
|
30 |
+
# Set up the build isolation, if this requirement should be isolated
|
31 |
+
should_isolate = self.req.use_pep517 and build_isolation
|
32 |
+
if should_isolate:
|
33 |
+
# Setup an isolated environment and install the build backend static
|
34 |
+
# requirements in it.
|
35 |
+
self._prepare_build_backend(finder)
|
36 |
+
# Check that if the requirement is editable, it either supports PEP 660 or
|
37 |
+
# has a setup.py or a setup.cfg. This cannot be done earlier because we need
|
38 |
+
# to setup the build backend to verify it supports build_editable, nor can
|
39 |
+
# it be done later, because we want to avoid installing build requirements
|
40 |
+
# needlessly. Doing it here also works around setuptools generating
|
41 |
+
# UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
|
42 |
+
# without setup.py nor setup.cfg.
|
43 |
+
self.req.isolated_editable_sanity_check()
|
44 |
+
# Install the dynamic build requirements.
|
45 |
+
self._install_build_reqs(finder)
|
46 |
+
|
47 |
+
self.req.prepare_metadata()
|
48 |
+
|
49 |
+
def _prepare_build_backend(self, finder: PackageFinder) -> None:
|
50 |
+
# Isolate in a BuildEnvironment and install the build-time
|
51 |
+
# requirements.
|
52 |
+
pyproject_requires = self.req.pyproject_requires
|
53 |
+
assert pyproject_requires is not None
|
54 |
+
|
55 |
+
self.req.build_env = BuildEnvironment()
|
56 |
+
self.req.build_env.install_requirements(
|
57 |
+
finder, pyproject_requires, "overlay", kind="build dependencies"
|
58 |
+
)
|
59 |
+
conflicting, missing = self.req.build_env.check_requirements(
|
60 |
+
self.req.requirements_to_check
|
61 |
+
)
|
62 |
+
if conflicting:
|
63 |
+
self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
|
64 |
+
if missing:
|
65 |
+
logger.warning(
|
66 |
+
"Missing build requirements in pyproject.toml for %s.",
|
67 |
+
self.req,
|
68 |
+
)
|
69 |
+
logger.warning(
|
70 |
+
"The project does not specify a build backend, and "
|
71 |
+
"pip cannot fall back to setuptools without %s.",
|
72 |
+
" and ".join(map(repr, sorted(missing))),
|
73 |
+
)
|
74 |
+
|
75 |
+
def _get_build_requires_wheel(self) -> Iterable[str]:
|
76 |
+
with self.req.build_env:
|
77 |
+
runner = runner_with_spinner_message("Getting requirements to build wheel")
|
78 |
+
backend = self.req.pep517_backend
|
79 |
+
assert backend is not None
|
80 |
+
with backend.subprocess_runner(runner):
|
81 |
+
return backend.get_requires_for_build_wheel()
|
82 |
+
|
83 |
+
def _get_build_requires_editable(self) -> Iterable[str]:
|
84 |
+
with self.req.build_env:
|
85 |
+
runner = runner_with_spinner_message(
|
86 |
+
"Getting requirements to build editable"
|
87 |
+
)
|
88 |
+
backend = self.req.pep517_backend
|
89 |
+
assert backend is not None
|
90 |
+
with backend.subprocess_runner(runner):
|
91 |
+
return backend.get_requires_for_build_editable()
|
92 |
+
|
93 |
+
def _install_build_reqs(self, finder: PackageFinder) -> None:
|
94 |
+
# Install any extra build dependencies that the backend requests.
|
95 |
+
# This must be done in a second pass, as the pyproject.toml
|
96 |
+
# dependencies must be installed before we can call the backend.
|
97 |
+
if (
|
98 |
+
self.req.editable
|
99 |
+
and self.req.permit_editable_wheels
|
100 |
+
and self.req.supports_pyproject_editable()
|
101 |
+
):
|
102 |
+
build_reqs = self._get_build_requires_editable()
|
103 |
+
else:
|
104 |
+
build_reqs = self._get_build_requires_wheel()
|
105 |
+
conflicting, missing = self.req.build_env.check_requirements(build_reqs)
|
106 |
+
if conflicting:
|
107 |
+
self._raise_conflicts("the backend dependencies", conflicting)
|
108 |
+
self.req.build_env.install_requirements(
|
109 |
+
finder, missing, "normal", kind="backend dependencies"
|
110 |
+
)
|
111 |
+
|
112 |
+
def _raise_conflicts(
|
113 |
+
self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
|
114 |
+
) -> None:
|
115 |
+
format_string = (
|
116 |
+
"Some build dependencies for {requirement} "
|
117 |
+
"conflict with {conflicting_with}: {description}."
|
118 |
+
)
|
119 |
+
error_message = format_string.format(
|
120 |
+
requirement=self.req,
|
121 |
+
conflicting_with=conflicting_with,
|
122 |
+
description=", ".join(
|
123 |
+
f"{installed} is incompatible with {wanted}"
|
124 |
+
for installed, wanted in sorted(conflicting_reqs)
|
125 |
+
),
|
126 |
+
)
|
127 |
+
raise InstallationError(error_message)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
2 |
+
|
3 |
+
from pip._internal.distributions.base import AbstractDistribution
|
4 |
+
from pip._internal.index.package_finder import PackageFinder
|
5 |
+
from pip._internal.metadata import (
|
6 |
+
BaseDistribution,
|
7 |
+
FilesystemWheel,
|
8 |
+
get_wheel_distribution,
|
9 |
+
)
|
10 |
+
|
11 |
+
|
12 |
+
class WheelDistribution(AbstractDistribution):
|
13 |
+
"""Represents a wheel distribution.
|
14 |
+
|
15 |
+
This does not need any preparation as wheels can be directly unpacked.
|
16 |
+
"""
|
17 |
+
|
18 |
+
def get_metadata_distribution(self) -> BaseDistribution:
|
19 |
+
"""Loads the metadata from the wheel file into memory and returns a
|
20 |
+
Distribution that uses it, not relying on the wheel file or
|
21 |
+
requirement.
|
22 |
+
"""
|
23 |
+
assert self.req.local_file_path, "Set as part of preparation during download"
|
24 |
+
assert self.req.name, "Wheels are never unnamed"
|
25 |
+
wheel = FilesystemWheel(self.req.local_file_path)
|
26 |
+
return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
|
27 |
+
|
28 |
+
def prepare_distribution_metadata(
|
29 |
+
self, finder: PackageFinder, build_isolation: bool
|
30 |
+
) -> None:
|
31 |
+
pass
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (12.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/locations/__pycache__/_sysconfig.cpython-310.pyc
ADDED
Binary file (6.24 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__init__.py
ADDED
@@ -0,0 +1,62 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import List, Optional
|
2 |
+
|
3 |
+
from .base import BaseDistribution, BaseEnvironment, FilesystemWheel, MemoryWheel, Wheel
|
4 |
+
|
5 |
+
__all__ = [
|
6 |
+
"BaseDistribution",
|
7 |
+
"BaseEnvironment",
|
8 |
+
"FilesystemWheel",
|
9 |
+
"MemoryWheel",
|
10 |
+
"Wheel",
|
11 |
+
"get_default_environment",
|
12 |
+
"get_environment",
|
13 |
+
"get_wheel_distribution",
|
14 |
+
]
|
15 |
+
|
16 |
+
|
17 |
+
def get_default_environment() -> BaseEnvironment:
|
18 |
+
"""Get the default representation for the current environment.
|
19 |
+
|
20 |
+
This returns an Environment instance from the chosen backend. The default
|
21 |
+
Environment instance should be built from ``sys.path`` and may use caching
|
22 |
+
to share instance state accorss calls.
|
23 |
+
"""
|
24 |
+
from .pkg_resources import Environment
|
25 |
+
|
26 |
+
return Environment.default()
|
27 |
+
|
28 |
+
|
29 |
+
def get_environment(paths: Optional[List[str]]) -> BaseEnvironment:
|
30 |
+
"""Get a representation of the environment specified by ``paths``.
|
31 |
+
|
32 |
+
This returns an Environment instance from the chosen backend based on the
|
33 |
+
given import paths. The backend must build a fresh instance representing
|
34 |
+
the state of installed distributions when this function is called.
|
35 |
+
"""
|
36 |
+
from .pkg_resources import Environment
|
37 |
+
|
38 |
+
return Environment.from_paths(paths)
|
39 |
+
|
40 |
+
|
41 |
+
def get_directory_distribution(directory: str) -> BaseDistribution:
|
42 |
+
"""Get the distribution metadata representation in the specified directory.
|
43 |
+
|
44 |
+
This returns a Distribution instance from the chosen backend based on
|
45 |
+
the given on-disk ``.dist-info`` directory.
|
46 |
+
"""
|
47 |
+
from .pkg_resources import Distribution
|
48 |
+
|
49 |
+
return Distribution.from_directory(directory)
|
50 |
+
|
51 |
+
|
52 |
+
def get_wheel_distribution(wheel: Wheel, canonical_name: str) -> BaseDistribution:
|
53 |
+
"""Get the representation of the specified wheel's distribution metadata.
|
54 |
+
|
55 |
+
This returns a Distribution instance from the chosen backend based on
|
56 |
+
the given wheel's ``.dist-info`` directory.
|
57 |
+
|
58 |
+
:param canonical_name: Normalized project name of the given wheel.
|
59 |
+
"""
|
60 |
+
from .pkg_resources import Distribution
|
61 |
+
|
62 |
+
return Distribution.from_wheel(wheel, canonical_name)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (2.29 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/base.cpython-310.pyc
ADDED
Binary file (20.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/__pycache__/pkg_resources.cpython-310.pyc
ADDED
Binary file (9.86 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/base.py
ADDED
@@ -0,0 +1,546 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import csv
|
2 |
+
import email.message
|
3 |
+
import json
|
4 |
+
import logging
|
5 |
+
import pathlib
|
6 |
+
import re
|
7 |
+
import zipfile
|
8 |
+
from typing import (
|
9 |
+
IO,
|
10 |
+
TYPE_CHECKING,
|
11 |
+
Collection,
|
12 |
+
Container,
|
13 |
+
Iterable,
|
14 |
+
Iterator,
|
15 |
+
List,
|
16 |
+
Optional,
|
17 |
+
Tuple,
|
18 |
+
Union,
|
19 |
+
)
|
20 |
+
|
21 |
+
from pip._vendor.packaging.requirements import Requirement
|
22 |
+
from pip._vendor.packaging.specifiers import InvalidSpecifier, SpecifierSet
|
23 |
+
from pip._vendor.packaging.utils import NormalizedName
|
24 |
+
from pip._vendor.packaging.version import LegacyVersion, Version
|
25 |
+
|
26 |
+
from pip._internal.exceptions import NoneMetadataError
|
27 |
+
from pip._internal.locations import site_packages, user_site
|
28 |
+
from pip._internal.models.direct_url import (
|
29 |
+
DIRECT_URL_METADATA_NAME,
|
30 |
+
DirectUrl,
|
31 |
+
DirectUrlValidationError,
|
32 |
+
)
|
33 |
+
from pip._internal.utils.compat import stdlib_pkgs # TODO: Move definition here.
|
34 |
+
from pip._internal.utils.egg_link import (
|
35 |
+
egg_link_path_from_location,
|
36 |
+
egg_link_path_from_sys_path,
|
37 |
+
)
|
38 |
+
from pip._internal.utils.misc import is_local, normalize_path
|
39 |
+
from pip._internal.utils.urls import url_to_path
|
40 |
+
|
41 |
+
if TYPE_CHECKING:
|
42 |
+
from typing import Protocol
|
43 |
+
else:
|
44 |
+
Protocol = object
|
45 |
+
|
46 |
+
DistributionVersion = Union[LegacyVersion, Version]
|
47 |
+
|
48 |
+
InfoPath = Union[str, pathlib.PurePosixPath]
|
49 |
+
|
50 |
+
logger = logging.getLogger(__name__)
|
51 |
+
|
52 |
+
|
53 |
+
class BaseEntryPoint(Protocol):
|
54 |
+
@property
|
55 |
+
def name(self) -> str:
|
56 |
+
raise NotImplementedError()
|
57 |
+
|
58 |
+
@property
|
59 |
+
def value(self) -> str:
|
60 |
+
raise NotImplementedError()
|
61 |
+
|
62 |
+
@property
|
63 |
+
def group(self) -> str:
|
64 |
+
raise NotImplementedError()
|
65 |
+
|
66 |
+
|
67 |
+
def _convert_installed_files_path(
|
68 |
+
entry: Tuple[str, ...],
|
69 |
+
info: Tuple[str, ...],
|
70 |
+
) -> str:
|
71 |
+
"""Convert a legacy installed-files.txt path into modern RECORD path.
|
72 |
+
|
73 |
+
The legacy format stores paths relative to the info directory, while the
|
74 |
+
modern format stores paths relative to the package root, e.g. the
|
75 |
+
site-packages directory.
|
76 |
+
|
77 |
+
:param entry: Path parts of the installed-files.txt entry.
|
78 |
+
:param info: Path parts of the egg-info directory relative to package root.
|
79 |
+
:returns: The converted entry.
|
80 |
+
|
81 |
+
For best compatibility with symlinks, this does not use ``abspath()`` or
|
82 |
+
``Path.resolve()``, but tries to work with path parts:
|
83 |
+
|
84 |
+
1. While ``entry`` starts with ``..``, remove the equal amounts of parts
|
85 |
+
from ``info``; if ``info`` is empty, start appending ``..`` instead.
|
86 |
+
2. Join the two directly.
|
87 |
+
"""
|
88 |
+
while entry and entry[0] == "..":
|
89 |
+
if not info or info[-1] == "..":
|
90 |
+
info += ("..",)
|
91 |
+
else:
|
92 |
+
info = info[:-1]
|
93 |
+
entry = entry[1:]
|
94 |
+
return str(pathlib.Path(*info, *entry))
|
95 |
+
|
96 |
+
|
97 |
+
class BaseDistribution(Protocol):
|
98 |
+
def __repr__(self) -> str:
|
99 |
+
return f"{self.raw_name} {self.version} ({self.location})"
|
100 |
+
|
101 |
+
def __str__(self) -> str:
|
102 |
+
return f"{self.raw_name} {self.version}"
|
103 |
+
|
104 |
+
@property
|
105 |
+
def location(self) -> Optional[str]:
|
106 |
+
"""Where the distribution is loaded from.
|
107 |
+
|
108 |
+
A string value is not necessarily a filesystem path, since distributions
|
109 |
+
can be loaded from other sources, e.g. arbitrary zip archives. ``None``
|
110 |
+
means the distribution is created in-memory.
|
111 |
+
|
112 |
+
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
113 |
+
this is a symbolic link, we want to preserve the relative path between
|
114 |
+
it and files in the distribution.
|
115 |
+
"""
|
116 |
+
raise NotImplementedError()
|
117 |
+
|
118 |
+
@property
|
119 |
+
def editable_project_location(self) -> Optional[str]:
|
120 |
+
"""The project location for editable distributions.
|
121 |
+
|
122 |
+
This is the directory where pyproject.toml or setup.py is located.
|
123 |
+
None if the distribution is not installed in editable mode.
|
124 |
+
"""
|
125 |
+
# TODO: this property is relatively costly to compute, memoize it ?
|
126 |
+
direct_url = self.direct_url
|
127 |
+
if direct_url:
|
128 |
+
if direct_url.is_local_editable():
|
129 |
+
return url_to_path(direct_url.url)
|
130 |
+
else:
|
131 |
+
# Search for an .egg-link file by walking sys.path, as it was
|
132 |
+
# done before by dist_is_editable().
|
133 |
+
egg_link_path = egg_link_path_from_sys_path(self.raw_name)
|
134 |
+
if egg_link_path:
|
135 |
+
# TODO: get project location from second line of egg_link file
|
136 |
+
# (https://github.com/pypa/pip/issues/10243)
|
137 |
+
return self.location
|
138 |
+
return None
|
139 |
+
|
140 |
+
@property
|
141 |
+
def installed_location(self) -> Optional[str]:
|
142 |
+
"""The distribution's "installed" location.
|
143 |
+
|
144 |
+
This should generally be a ``site-packages`` directory. This is
|
145 |
+
usually ``dist.location``, except for legacy develop-installed packages,
|
146 |
+
where ``dist.location`` is the source code location, and this is where
|
147 |
+
the ``.egg-link`` file is.
|
148 |
+
|
149 |
+
The returned location is normalized (in particular, with symlinks removed).
|
150 |
+
"""
|
151 |
+
egg_link = egg_link_path_from_location(self.raw_name)
|
152 |
+
if egg_link:
|
153 |
+
location = egg_link
|
154 |
+
elif self.location:
|
155 |
+
location = self.location
|
156 |
+
else:
|
157 |
+
return None
|
158 |
+
return normalize_path(location)
|
159 |
+
|
160 |
+
@property
|
161 |
+
def info_location(self) -> Optional[str]:
|
162 |
+
"""Location of the .[egg|dist]-info directory or file.
|
163 |
+
|
164 |
+
Similarly to ``location``, a string value is not necessarily a
|
165 |
+
filesystem path. ``None`` means the distribution is created in-memory.
|
166 |
+
|
167 |
+
For a modern .dist-info installation on disk, this should be something
|
168 |
+
like ``{location}/{raw_name}-{version}.dist-info``.
|
169 |
+
|
170 |
+
Do not canonicalize this value with e.g. ``pathlib.Path.resolve()``. If
|
171 |
+
this is a symbolic link, we want to preserve the relative path between
|
172 |
+
it and other files in the distribution.
|
173 |
+
"""
|
174 |
+
raise NotImplementedError()
|
175 |
+
|
176 |
+
@property
|
177 |
+
def installed_by_distutils(self) -> bool:
|
178 |
+
"""Whether this distribution is installed with legacy distutils format.
|
179 |
+
|
180 |
+
A distribution installed with "raw" distutils not patched by setuptools
|
181 |
+
uses one single file at ``info_location`` to store metadata. We need to
|
182 |
+
treat this specially on uninstallation.
|
183 |
+
"""
|
184 |
+
info_location = self.info_location
|
185 |
+
if not info_location:
|
186 |
+
return False
|
187 |
+
return pathlib.Path(info_location).is_file()
|
188 |
+
|
189 |
+
@property
|
190 |
+
def installed_as_egg(self) -> bool:
|
191 |
+
"""Whether this distribution is installed as an egg.
|
192 |
+
|
193 |
+
This usually indicates the distribution was installed by (older versions
|
194 |
+
of) easy_install.
|
195 |
+
"""
|
196 |
+
location = self.location
|
197 |
+
if not location:
|
198 |
+
return False
|
199 |
+
return location.endswith(".egg")
|
200 |
+
|
201 |
+
@property
|
202 |
+
def installed_with_setuptools_egg_info(self) -> bool:
|
203 |
+
"""Whether this distribution is installed with the ``.egg-info`` format.
|
204 |
+
|
205 |
+
This usually indicates the distribution was installed with setuptools
|
206 |
+
with an old pip version or with ``single-version-externally-managed``.
|
207 |
+
|
208 |
+
Note that this ensure the metadata store is a directory. distutils can
|
209 |
+
also installs an ``.egg-info``, but as a file, not a directory. This
|
210 |
+
property is *False* for that case. Also see ``installed_by_distutils``.
|
211 |
+
"""
|
212 |
+
info_location = self.info_location
|
213 |
+
if not info_location:
|
214 |
+
return False
|
215 |
+
if not info_location.endswith(".egg-info"):
|
216 |
+
return False
|
217 |
+
return pathlib.Path(info_location).is_dir()
|
218 |
+
|
219 |
+
@property
|
220 |
+
def installed_with_dist_info(self) -> bool:
|
221 |
+
"""Whether this distribution is installed with the "modern format".
|
222 |
+
|
223 |
+
This indicates a "modern" installation, e.g. storing metadata in the
|
224 |
+
``.dist-info`` directory. This applies to installations made by
|
225 |
+
setuptools (but through pip, not directly), or anything using the
|
226 |
+
standardized build backend interface (PEP 517).
|
227 |
+
"""
|
228 |
+
info_location = self.info_location
|
229 |
+
if not info_location:
|
230 |
+
return False
|
231 |
+
if not info_location.endswith(".dist-info"):
|
232 |
+
return False
|
233 |
+
return pathlib.Path(info_location).is_dir()
|
234 |
+
|
235 |
+
@property
|
236 |
+
def canonical_name(self) -> NormalizedName:
|
237 |
+
raise NotImplementedError()
|
238 |
+
|
239 |
+
@property
|
240 |
+
def version(self) -> DistributionVersion:
|
241 |
+
raise NotImplementedError()
|
242 |
+
|
243 |
+
@property
|
244 |
+
def setuptools_filename(self) -> str:
|
245 |
+
"""Convert a project name to its setuptools-compatible filename.
|
246 |
+
|
247 |
+
This is a copy of ``pkg_resources.to_filename()`` for compatibility.
|
248 |
+
"""
|
249 |
+
return self.raw_name.replace("-", "_")
|
250 |
+
|
251 |
+
@property
|
252 |
+
def direct_url(self) -> Optional[DirectUrl]:
|
253 |
+
"""Obtain a DirectUrl from this distribution.
|
254 |
+
|
255 |
+
Returns None if the distribution has no `direct_url.json` metadata,
|
256 |
+
or if `direct_url.json` is invalid.
|
257 |
+
"""
|
258 |
+
try:
|
259 |
+
content = self.read_text(DIRECT_URL_METADATA_NAME)
|
260 |
+
except FileNotFoundError:
|
261 |
+
return None
|
262 |
+
try:
|
263 |
+
return DirectUrl.from_json(content)
|
264 |
+
except (
|
265 |
+
UnicodeDecodeError,
|
266 |
+
json.JSONDecodeError,
|
267 |
+
DirectUrlValidationError,
|
268 |
+
) as e:
|
269 |
+
logger.warning(
|
270 |
+
"Error parsing %s for %s: %s",
|
271 |
+
DIRECT_URL_METADATA_NAME,
|
272 |
+
self.canonical_name,
|
273 |
+
e,
|
274 |
+
)
|
275 |
+
return None
|
276 |
+
|
277 |
+
@property
|
278 |
+
def installer(self) -> str:
|
279 |
+
try:
|
280 |
+
installer_text = self.read_text("INSTALLER")
|
281 |
+
except (OSError, ValueError, NoneMetadataError):
|
282 |
+
return "" # Fail silently if the installer file cannot be read.
|
283 |
+
for line in installer_text.splitlines():
|
284 |
+
cleaned_line = line.strip()
|
285 |
+
if cleaned_line:
|
286 |
+
return cleaned_line
|
287 |
+
return ""
|
288 |
+
|
289 |
+
@property
|
290 |
+
def editable(self) -> bool:
|
291 |
+
return bool(self.editable_project_location)
|
292 |
+
|
293 |
+
@property
|
294 |
+
def local(self) -> bool:
|
295 |
+
"""If distribution is installed in the current virtual environment.
|
296 |
+
|
297 |
+
Always True if we're not in a virtualenv.
|
298 |
+
"""
|
299 |
+
if self.installed_location is None:
|
300 |
+
return False
|
301 |
+
return is_local(self.installed_location)
|
302 |
+
|
303 |
+
@property
|
304 |
+
def in_usersite(self) -> bool:
|
305 |
+
if self.installed_location is None or user_site is None:
|
306 |
+
return False
|
307 |
+
return self.installed_location.startswith(normalize_path(user_site))
|
308 |
+
|
309 |
+
@property
|
310 |
+
def in_site_packages(self) -> bool:
|
311 |
+
if self.installed_location is None or site_packages is None:
|
312 |
+
return False
|
313 |
+
return self.installed_location.startswith(normalize_path(site_packages))
|
314 |
+
|
315 |
+
def is_file(self, path: InfoPath) -> bool:
|
316 |
+
"""Check whether an entry in the info directory is a file."""
|
317 |
+
raise NotImplementedError()
|
318 |
+
|
319 |
+
def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
|
320 |
+
"""Iterate through a directory in the info directory.
|
321 |
+
|
322 |
+
Each item yielded would be a path relative to the info directory.
|
323 |
+
|
324 |
+
:raise FileNotFoundError: If ``name`` does not exist in the directory.
|
325 |
+
:raise NotADirectoryError: If ``name`` does not point to a directory.
|
326 |
+
"""
|
327 |
+
raise NotImplementedError()
|
328 |
+
|
329 |
+
def read_text(self, path: InfoPath) -> str:
|
330 |
+
"""Read a file in the info directory.
|
331 |
+
|
332 |
+
:raise FileNotFoundError: If ``name`` does not exist in the directory.
|
333 |
+
:raise NoneMetadataError: If ``name`` exists in the info directory, but
|
334 |
+
cannot be read.
|
335 |
+
"""
|
336 |
+
raise NotImplementedError()
|
337 |
+
|
338 |
+
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
339 |
+
raise NotImplementedError()
|
340 |
+
|
341 |
+
@property
|
342 |
+
def metadata(self) -> email.message.Message:
|
343 |
+
"""Metadata of distribution parsed from e.g. METADATA or PKG-INFO.
|
344 |
+
|
345 |
+
This should return an empty message if the metadata file is unavailable.
|
346 |
+
|
347 |
+
:raises NoneMetadataError: If the metadata file is available, but does
|
348 |
+
not contain valid metadata.
|
349 |
+
"""
|
350 |
+
raise NotImplementedError()
|
351 |
+
|
352 |
+
@property
|
353 |
+
def metadata_version(self) -> Optional[str]:
|
354 |
+
"""Value of "Metadata-Version:" in distribution metadata, if available."""
|
355 |
+
return self.metadata.get("Metadata-Version")
|
356 |
+
|
357 |
+
@property
|
358 |
+
def raw_name(self) -> str:
|
359 |
+
"""Value of "Name:" in distribution metadata."""
|
360 |
+
# The metadata should NEVER be missing the Name: key, but if it somehow
|
361 |
+
# does, fall back to the known canonical name.
|
362 |
+
return self.metadata.get("Name", self.canonical_name)
|
363 |
+
|
364 |
+
@property
|
365 |
+
def requires_python(self) -> SpecifierSet:
|
366 |
+
"""Value of "Requires-Python:" in distribution metadata.
|
367 |
+
|
368 |
+
If the key does not exist or contains an invalid value, an empty
|
369 |
+
SpecifierSet should be returned.
|
370 |
+
"""
|
371 |
+
value = self.metadata.get("Requires-Python")
|
372 |
+
if value is None:
|
373 |
+
return SpecifierSet()
|
374 |
+
try:
|
375 |
+
# Convert to str to satisfy the type checker; this can be a Header object.
|
376 |
+
spec = SpecifierSet(str(value))
|
377 |
+
except InvalidSpecifier as e:
|
378 |
+
message = "Package %r has an invalid Requires-Python: %s"
|
379 |
+
logger.warning(message, self.raw_name, e)
|
380 |
+
return SpecifierSet()
|
381 |
+
return spec
|
382 |
+
|
383 |
+
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
384 |
+
"""Dependencies of this distribution.
|
385 |
+
|
386 |
+
For modern .dist-info distributions, this is the collection of
|
387 |
+
"Requires-Dist:" entries in distribution metadata.
|
388 |
+
"""
|
389 |
+
raise NotImplementedError()
|
390 |
+
|
391 |
+
def iter_provided_extras(self) -> Iterable[str]:
|
392 |
+
"""Extras provided by this distribution.
|
393 |
+
|
394 |
+
For modern .dist-info distributions, this is the collection of
|
395 |
+
"Provides-Extra:" entries in distribution metadata.
|
396 |
+
"""
|
397 |
+
raise NotImplementedError()
|
398 |
+
|
399 |
+
def _iter_declared_entries_from_record(self) -> Optional[Iterator[str]]:
|
400 |
+
try:
|
401 |
+
text = self.read_text("RECORD")
|
402 |
+
except FileNotFoundError:
|
403 |
+
return None
|
404 |
+
# This extra Path-str cast normalizes entries.
|
405 |
+
return (str(pathlib.Path(row[0])) for row in csv.reader(text.splitlines()))
|
406 |
+
|
407 |
+
def _iter_declared_entries_from_legacy(self) -> Optional[Iterator[str]]:
|
408 |
+
try:
|
409 |
+
text = self.read_text("installed-files.txt")
|
410 |
+
except FileNotFoundError:
|
411 |
+
return None
|
412 |
+
paths = (p for p in text.splitlines(keepends=False) if p)
|
413 |
+
root = self.location
|
414 |
+
info = self.info_location
|
415 |
+
if root is None or info is None:
|
416 |
+
return paths
|
417 |
+
try:
|
418 |
+
info_rel = pathlib.Path(info).relative_to(root)
|
419 |
+
except ValueError: # info is not relative to root.
|
420 |
+
return paths
|
421 |
+
if not info_rel.parts: # info *is* root.
|
422 |
+
return paths
|
423 |
+
return (
|
424 |
+
_convert_installed_files_path(pathlib.Path(p).parts, info_rel.parts)
|
425 |
+
for p in paths
|
426 |
+
)
|
427 |
+
|
428 |
+
def iter_declared_entries(self) -> Optional[Iterator[str]]:
|
429 |
+
"""Iterate through file entires declared in this distribution.
|
430 |
+
|
431 |
+
For modern .dist-info distributions, this is the files listed in the
|
432 |
+
``RECORD`` metadata file. For legacy setuptools distributions, this
|
433 |
+
comes from ``installed-files.txt``, with entries normalized to be
|
434 |
+
compatible with the format used by ``RECORD``.
|
435 |
+
|
436 |
+
:return: An iterator for listed entries, or None if the distribution
|
437 |
+
contains neither ``RECORD`` nor ``installed-files.txt``.
|
438 |
+
"""
|
439 |
+
return (
|
440 |
+
self._iter_declared_entries_from_record()
|
441 |
+
or self._iter_declared_entries_from_legacy()
|
442 |
+
)
|
443 |
+
|
444 |
+
|
445 |
+
class BaseEnvironment:
|
446 |
+
"""An environment containing distributions to introspect."""
|
447 |
+
|
448 |
+
@classmethod
|
449 |
+
def default(cls) -> "BaseEnvironment":
|
450 |
+
raise NotImplementedError()
|
451 |
+
|
452 |
+
@classmethod
|
453 |
+
def from_paths(cls, paths: Optional[List[str]]) -> "BaseEnvironment":
|
454 |
+
raise NotImplementedError()
|
455 |
+
|
456 |
+
def get_distribution(self, name: str) -> Optional["BaseDistribution"]:
|
457 |
+
"""Given a requirement name, return the installed distributions.
|
458 |
+
|
459 |
+
The name may not be normalized. The implementation must canonicalize
|
460 |
+
it for lookup.
|
461 |
+
"""
|
462 |
+
raise NotImplementedError()
|
463 |
+
|
464 |
+
def _iter_distributions(self) -> Iterator["BaseDistribution"]:
|
465 |
+
"""Iterate through installed distributions.
|
466 |
+
|
467 |
+
This function should be implemented by subclass, but never called
|
468 |
+
directly. Use the public ``iter_distribution()`` instead, which
|
469 |
+
implements additional logic to make sure the distributions are valid.
|
470 |
+
"""
|
471 |
+
raise NotImplementedError()
|
472 |
+
|
473 |
+
def iter_distributions(self) -> Iterator["BaseDistribution"]:
|
474 |
+
"""Iterate through installed distributions."""
|
475 |
+
for dist in self._iter_distributions():
|
476 |
+
# Make sure the distribution actually comes from a valid Python
|
477 |
+
# packaging distribution. Pip's AdjacentTempDirectory leaves folders
|
478 |
+
# e.g. ``~atplotlib.dist-info`` if cleanup was interrupted. The
|
479 |
+
# valid project name pattern is taken from PEP 508.
|
480 |
+
project_name_valid = re.match(
|
481 |
+
r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$",
|
482 |
+
dist.canonical_name,
|
483 |
+
flags=re.IGNORECASE,
|
484 |
+
)
|
485 |
+
if not project_name_valid:
|
486 |
+
logger.warning(
|
487 |
+
"Ignoring invalid distribution %s (%s)",
|
488 |
+
dist.canonical_name,
|
489 |
+
dist.location,
|
490 |
+
)
|
491 |
+
continue
|
492 |
+
yield dist
|
493 |
+
|
494 |
+
def iter_installed_distributions(
|
495 |
+
self,
|
496 |
+
local_only: bool = True,
|
497 |
+
skip: Container[str] = stdlib_pkgs,
|
498 |
+
include_editables: bool = True,
|
499 |
+
editables_only: bool = False,
|
500 |
+
user_only: bool = False,
|
501 |
+
) -> Iterator[BaseDistribution]:
|
502 |
+
"""Return a list of installed distributions.
|
503 |
+
|
504 |
+
:param local_only: If True (default), only return installations
|
505 |
+
local to the current virtualenv, if in a virtualenv.
|
506 |
+
:param skip: An iterable of canonicalized project names to ignore;
|
507 |
+
defaults to ``stdlib_pkgs``.
|
508 |
+
:param include_editables: If False, don't report editables.
|
509 |
+
:param editables_only: If True, only report editables.
|
510 |
+
:param user_only: If True, only report installations in the user
|
511 |
+
site directory.
|
512 |
+
"""
|
513 |
+
it = self.iter_distributions()
|
514 |
+
if local_only:
|
515 |
+
it = (d for d in it if d.local)
|
516 |
+
if not include_editables:
|
517 |
+
it = (d for d in it if not d.editable)
|
518 |
+
if editables_only:
|
519 |
+
it = (d for d in it if d.editable)
|
520 |
+
if user_only:
|
521 |
+
it = (d for d in it if d.in_usersite)
|
522 |
+
return (d for d in it if d.canonical_name not in skip)
|
523 |
+
|
524 |
+
|
525 |
+
class Wheel(Protocol):
|
526 |
+
location: str
|
527 |
+
|
528 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
529 |
+
raise NotImplementedError()
|
530 |
+
|
531 |
+
|
532 |
+
class FilesystemWheel(Wheel):
|
533 |
+
def __init__(self, location: str) -> None:
|
534 |
+
self.location = location
|
535 |
+
|
536 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
537 |
+
return zipfile.ZipFile(self.location, allowZip64=True)
|
538 |
+
|
539 |
+
|
540 |
+
class MemoryWheel(Wheel):
|
541 |
+
def __init__(self, location: str, stream: IO[bytes]) -> None:
|
542 |
+
self.location = location
|
543 |
+
self.stream = stream
|
544 |
+
|
545 |
+
def as_zipfile(self) -> zipfile.ZipFile:
|
546 |
+
return zipfile.ZipFile(self.stream, allowZip64=True)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/metadata/pkg_resources.py
ADDED
@@ -0,0 +1,256 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import email.message
|
2 |
+
import email.parser
|
3 |
+
import logging
|
4 |
+
import os
|
5 |
+
import pathlib
|
6 |
+
import zipfile
|
7 |
+
from typing import Collection, Iterable, Iterator, List, Mapping, NamedTuple, Optional
|
8 |
+
|
9 |
+
from pip._vendor import pkg_resources
|
10 |
+
from pip._vendor.packaging.requirements import Requirement
|
11 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
12 |
+
from pip._vendor.packaging.version import parse as parse_version
|
13 |
+
|
14 |
+
from pip._internal.exceptions import InvalidWheel, NoneMetadataError, UnsupportedWheel
|
15 |
+
from pip._internal.utils.misc import display_path
|
16 |
+
from pip._internal.utils.wheel import parse_wheel, read_wheel_metadata_file
|
17 |
+
|
18 |
+
from .base import (
|
19 |
+
BaseDistribution,
|
20 |
+
BaseEntryPoint,
|
21 |
+
BaseEnvironment,
|
22 |
+
DistributionVersion,
|
23 |
+
InfoPath,
|
24 |
+
Wheel,
|
25 |
+
)
|
26 |
+
|
27 |
+
logger = logging.getLogger(__name__)
|
28 |
+
|
29 |
+
|
30 |
+
class EntryPoint(NamedTuple):
|
31 |
+
name: str
|
32 |
+
value: str
|
33 |
+
group: str
|
34 |
+
|
35 |
+
|
36 |
+
class WheelMetadata:
|
37 |
+
"""IMetadataProvider that reads metadata files from a dictionary.
|
38 |
+
|
39 |
+
This also maps metadata decoding exceptions to our internal exception type.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, metadata: Mapping[str, bytes], wheel_name: str) -> None:
|
43 |
+
self._metadata = metadata
|
44 |
+
self._wheel_name = wheel_name
|
45 |
+
|
46 |
+
def has_metadata(self, name: str) -> bool:
|
47 |
+
return name in self._metadata
|
48 |
+
|
49 |
+
def get_metadata(self, name: str) -> str:
|
50 |
+
try:
|
51 |
+
return self._metadata[name].decode()
|
52 |
+
except UnicodeDecodeError as e:
|
53 |
+
# Augment the default error with the origin of the file.
|
54 |
+
raise UnsupportedWheel(
|
55 |
+
f"Error decoding metadata for {self._wheel_name}: {e} in {name} file"
|
56 |
+
)
|
57 |
+
|
58 |
+
def get_metadata_lines(self, name: str) -> Iterable[str]:
|
59 |
+
return pkg_resources.yield_lines(self.get_metadata(name))
|
60 |
+
|
61 |
+
def metadata_isdir(self, name: str) -> bool:
|
62 |
+
return False
|
63 |
+
|
64 |
+
def metadata_listdir(self, name: str) -> List[str]:
|
65 |
+
return []
|
66 |
+
|
67 |
+
def run_script(self, script_name: str, namespace: str) -> None:
|
68 |
+
pass
|
69 |
+
|
70 |
+
|
71 |
+
class Distribution(BaseDistribution):
|
72 |
+
def __init__(self, dist: pkg_resources.Distribution) -> None:
|
73 |
+
self._dist = dist
|
74 |
+
|
75 |
+
@classmethod
|
76 |
+
def from_directory(cls, directory: str) -> "Distribution":
|
77 |
+
dist_dir = directory.rstrip(os.sep)
|
78 |
+
|
79 |
+
# Build a PathMetadata object, from path to metadata. :wink:
|
80 |
+
base_dir, dist_dir_name = os.path.split(dist_dir)
|
81 |
+
metadata = pkg_resources.PathMetadata(base_dir, dist_dir)
|
82 |
+
|
83 |
+
# Determine the correct Distribution object type.
|
84 |
+
if dist_dir.endswith(".egg-info"):
|
85 |
+
dist_cls = pkg_resources.Distribution
|
86 |
+
dist_name = os.path.splitext(dist_dir_name)[0]
|
87 |
+
else:
|
88 |
+
assert dist_dir.endswith(".dist-info")
|
89 |
+
dist_cls = pkg_resources.DistInfoDistribution
|
90 |
+
dist_name = os.path.splitext(dist_dir_name)[0].split("-")[0]
|
91 |
+
|
92 |
+
dist = dist_cls(base_dir, project_name=dist_name, metadata=metadata)
|
93 |
+
return cls(dist)
|
94 |
+
|
95 |
+
@classmethod
|
96 |
+
def from_wheel(cls, wheel: Wheel, name: str) -> "Distribution":
|
97 |
+
"""Load the distribution from a given wheel.
|
98 |
+
|
99 |
+
:raises InvalidWheel: Whenever loading of the wheel causes a
|
100 |
+
:py:exc:`zipfile.BadZipFile` exception to be thrown.
|
101 |
+
:raises UnsupportedWheel: If the wheel is a valid zip, but malformed
|
102 |
+
internally.
|
103 |
+
"""
|
104 |
+
try:
|
105 |
+
with wheel.as_zipfile() as zf:
|
106 |
+
info_dir, _ = parse_wheel(zf, name)
|
107 |
+
metadata_text = {
|
108 |
+
path.split("/", 1)[-1]: read_wheel_metadata_file(zf, path)
|
109 |
+
for path in zf.namelist()
|
110 |
+
if path.startswith(f"{info_dir}/")
|
111 |
+
}
|
112 |
+
except zipfile.BadZipFile as e:
|
113 |
+
raise InvalidWheel(wheel.location, name) from e
|
114 |
+
except UnsupportedWheel as e:
|
115 |
+
raise UnsupportedWheel(f"{name} has an invalid wheel, {e}")
|
116 |
+
dist = pkg_resources.DistInfoDistribution(
|
117 |
+
location=wheel.location,
|
118 |
+
metadata=WheelMetadata(metadata_text, wheel.location),
|
119 |
+
project_name=name,
|
120 |
+
)
|
121 |
+
return cls(dist)
|
122 |
+
|
123 |
+
@property
|
124 |
+
def location(self) -> Optional[str]:
|
125 |
+
return self._dist.location
|
126 |
+
|
127 |
+
@property
|
128 |
+
def info_location(self) -> Optional[str]:
|
129 |
+
return self._dist.egg_info
|
130 |
+
|
131 |
+
@property
|
132 |
+
def installed_by_distutils(self) -> bool:
|
133 |
+
# A distutils-installed distribution is provided by FileMetadata. This
|
134 |
+
# provider has a "path" attribute not present anywhere else. Not the
|
135 |
+
# best introspection logic, but pip has been doing this for a long time.
|
136 |
+
try:
|
137 |
+
return bool(self._dist._provider.path)
|
138 |
+
except AttributeError:
|
139 |
+
return False
|
140 |
+
|
141 |
+
@property
|
142 |
+
def canonical_name(self) -> NormalizedName:
|
143 |
+
return canonicalize_name(self._dist.project_name)
|
144 |
+
|
145 |
+
@property
|
146 |
+
def version(self) -> DistributionVersion:
|
147 |
+
return parse_version(self._dist.version)
|
148 |
+
|
149 |
+
def is_file(self, path: InfoPath) -> bool:
|
150 |
+
return self._dist.has_metadata(str(path))
|
151 |
+
|
152 |
+
def iterdir(self, path: InfoPath) -> Iterator[pathlib.PurePosixPath]:
|
153 |
+
name = str(path)
|
154 |
+
if not self._dist.has_metadata(name):
|
155 |
+
raise FileNotFoundError(name)
|
156 |
+
if not self._dist.isdir(name):
|
157 |
+
raise NotADirectoryError(name)
|
158 |
+
for child in self._dist.metadata_listdir(name):
|
159 |
+
yield pathlib.PurePosixPath(path, child)
|
160 |
+
|
161 |
+
def read_text(self, path: InfoPath) -> str:
|
162 |
+
name = str(path)
|
163 |
+
if not self._dist.has_metadata(name):
|
164 |
+
raise FileNotFoundError(name)
|
165 |
+
content = self._dist.get_metadata(name)
|
166 |
+
if content is None:
|
167 |
+
raise NoneMetadataError(self, name)
|
168 |
+
return content
|
169 |
+
|
170 |
+
def iter_entry_points(self) -> Iterable[BaseEntryPoint]:
|
171 |
+
for group, entries in self._dist.get_entry_map().items():
|
172 |
+
for name, entry_point in entries.items():
|
173 |
+
name, _, value = str(entry_point).partition("=")
|
174 |
+
yield EntryPoint(name=name.strip(), value=value.strip(), group=group)
|
175 |
+
|
176 |
+
@property
|
177 |
+
def metadata(self) -> email.message.Message:
|
178 |
+
"""
|
179 |
+
:raises NoneMetadataError: if the distribution reports `has_metadata()`
|
180 |
+
True but `get_metadata()` returns None.
|
181 |
+
"""
|
182 |
+
if isinstance(self._dist, pkg_resources.DistInfoDistribution):
|
183 |
+
metadata_name = "METADATA"
|
184 |
+
else:
|
185 |
+
metadata_name = "PKG-INFO"
|
186 |
+
try:
|
187 |
+
metadata = self.read_text(metadata_name)
|
188 |
+
except FileNotFoundError:
|
189 |
+
if self.location:
|
190 |
+
displaying_path = display_path(self.location)
|
191 |
+
else:
|
192 |
+
displaying_path = repr(self.location)
|
193 |
+
logger.warning("No metadata found in %s", displaying_path)
|
194 |
+
metadata = ""
|
195 |
+
feed_parser = email.parser.FeedParser()
|
196 |
+
feed_parser.feed(metadata)
|
197 |
+
return feed_parser.close()
|
198 |
+
|
199 |
+
def iter_dependencies(self, extras: Collection[str] = ()) -> Iterable[Requirement]:
|
200 |
+
if extras: # pkg_resources raises on invalid extras, so we sanitize.
|
201 |
+
extras = frozenset(extras).intersection(self._dist.extras)
|
202 |
+
return self._dist.requires(extras)
|
203 |
+
|
204 |
+
def iter_provided_extras(self) -> Iterable[str]:
|
205 |
+
return self._dist.extras
|
206 |
+
|
207 |
+
|
208 |
+
class Environment(BaseEnvironment):
|
209 |
+
def __init__(self, ws: pkg_resources.WorkingSet) -> None:
|
210 |
+
self._ws = ws
|
211 |
+
|
212 |
+
@classmethod
|
213 |
+
def default(cls) -> BaseEnvironment:
|
214 |
+
return cls(pkg_resources.working_set)
|
215 |
+
|
216 |
+
@classmethod
|
217 |
+
def from_paths(cls, paths: Optional[List[str]]) -> BaseEnvironment:
|
218 |
+
return cls(pkg_resources.WorkingSet(paths))
|
219 |
+
|
220 |
+
def _search_distribution(self, name: str) -> Optional[BaseDistribution]:
|
221 |
+
"""Find a distribution matching the ``name`` in the environment.
|
222 |
+
|
223 |
+
This searches from *all* distributions available in the environment, to
|
224 |
+
match the behavior of ``pkg_resources.get_distribution()``.
|
225 |
+
"""
|
226 |
+
canonical_name = canonicalize_name(name)
|
227 |
+
for dist in self.iter_distributions():
|
228 |
+
if dist.canonical_name == canonical_name:
|
229 |
+
return dist
|
230 |
+
return None
|
231 |
+
|
232 |
+
def get_distribution(self, name: str) -> Optional[BaseDistribution]:
|
233 |
+
# Search the distribution by looking through the working set.
|
234 |
+
dist = self._search_distribution(name)
|
235 |
+
if dist:
|
236 |
+
return dist
|
237 |
+
|
238 |
+
# If distribution could not be found, call working_set.require to
|
239 |
+
# update the working set, and try to find the distribution again.
|
240 |
+
# This might happen for e.g. when you install a package twice, once
|
241 |
+
# using setup.py develop and again using setup.py install. Now when
|
242 |
+
# running pip uninstall twice, the package gets removed from the
|
243 |
+
# working set in the first uninstall, so we have to populate the
|
244 |
+
# working set again so that pip knows about it and the packages gets
|
245 |
+
# picked up and is successfully uninstalled the second time too.
|
246 |
+
try:
|
247 |
+
# We didn't pass in any version specifiers, so this can never
|
248 |
+
# raise pkg_resources.VersionConflict.
|
249 |
+
self._ws.require(name)
|
250 |
+
except pkg_resources.DistributionNotFound:
|
251 |
+
return None
|
252 |
+
return self._search_distribution(name)
|
253 |
+
|
254 |
+
def _iter_distributions(self) -> Iterator[BaseDistribution]:
|
255 |
+
for dist in self._ws:
|
256 |
+
yield Distribution(dist)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__init__.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
"""A package that contains models that represent entities.
|
2 |
+
"""
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc
ADDED
Binary file (1.41 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc
ADDED
Binary file (7.29 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc
ADDED
Binary file (2.74 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc
ADDED
Binary file (10.2 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc
ADDED
Binary file (3.48 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc
ADDED
Binary file (1.69 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc
ADDED
Binary file (3.44 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (4.36 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/candidate.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pip._vendor.packaging.version import parse as parse_version
|
2 |
+
|
3 |
+
from pip._internal.models.link import Link
|
4 |
+
from pip._internal.utils.models import KeyBasedCompareMixin
|
5 |
+
|
6 |
+
|
7 |
+
class InstallationCandidate(KeyBasedCompareMixin):
|
8 |
+
"""Represents a potential "candidate" for installation."""
|
9 |
+
|
10 |
+
__slots__ = ["name", "version", "link"]
|
11 |
+
|
12 |
+
def __init__(self, name: str, version: str, link: Link) -> None:
|
13 |
+
self.name = name
|
14 |
+
self.version = parse_version(version)
|
15 |
+
self.link = link
|
16 |
+
|
17 |
+
super().__init__(
|
18 |
+
key=(self.name, self.version, self.link),
|
19 |
+
defining_class=InstallationCandidate,
|
20 |
+
)
|
21 |
+
|
22 |
+
def __repr__(self) -> str:
|
23 |
+
return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
|
24 |
+
self.name,
|
25 |
+
self.version,
|
26 |
+
self.link,
|
27 |
+
)
|
28 |
+
|
29 |
+
def __str__(self) -> str:
|
30 |
+
return "{!r} candidate (version {} at {})".format(
|
31 |
+
self.name,
|
32 |
+
self.version,
|
33 |
+
self.link,
|
34 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/direct_url.py
ADDED
@@ -0,0 +1,220 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" PEP 610 """
|
2 |
+
import json
|
3 |
+
import re
|
4 |
+
import urllib.parse
|
5 |
+
from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
|
6 |
+
|
7 |
+
__all__ = [
|
8 |
+
"DirectUrl",
|
9 |
+
"DirectUrlValidationError",
|
10 |
+
"DirInfo",
|
11 |
+
"ArchiveInfo",
|
12 |
+
"VcsInfo",
|
13 |
+
]
|
14 |
+
|
15 |
+
T = TypeVar("T")
|
16 |
+
|
17 |
+
DIRECT_URL_METADATA_NAME = "direct_url.json"
|
18 |
+
ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
|
19 |
+
|
20 |
+
|
21 |
+
class DirectUrlValidationError(Exception):
|
22 |
+
pass
|
23 |
+
|
24 |
+
|
25 |
+
def _get(
|
26 |
+
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
27 |
+
) -> Optional[T]:
|
28 |
+
"""Get value from dictionary and verify expected type."""
|
29 |
+
if key not in d:
|
30 |
+
return default
|
31 |
+
value = d[key]
|
32 |
+
if not isinstance(value, expected_type):
|
33 |
+
raise DirectUrlValidationError(
|
34 |
+
"{!r} has unexpected type for {} (expected {})".format(
|
35 |
+
value, key, expected_type
|
36 |
+
)
|
37 |
+
)
|
38 |
+
return value
|
39 |
+
|
40 |
+
|
41 |
+
def _get_required(
|
42 |
+
d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
|
43 |
+
) -> T:
|
44 |
+
value = _get(d, expected_type, key, default)
|
45 |
+
if value is None:
|
46 |
+
raise DirectUrlValidationError(f"{key} must have a value")
|
47 |
+
return value
|
48 |
+
|
49 |
+
|
50 |
+
def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
|
51 |
+
infos = [info for info in infos if info is not None]
|
52 |
+
if not infos:
|
53 |
+
raise DirectUrlValidationError(
|
54 |
+
"missing one of archive_info, dir_info, vcs_info"
|
55 |
+
)
|
56 |
+
if len(infos) > 1:
|
57 |
+
raise DirectUrlValidationError(
|
58 |
+
"more than one of archive_info, dir_info, vcs_info"
|
59 |
+
)
|
60 |
+
assert infos[0] is not None
|
61 |
+
return infos[0]
|
62 |
+
|
63 |
+
|
64 |
+
def _filter_none(**kwargs: Any) -> Dict[str, Any]:
|
65 |
+
"""Make dict excluding None values."""
|
66 |
+
return {k: v for k, v in kwargs.items() if v is not None}
|
67 |
+
|
68 |
+
|
69 |
+
class VcsInfo:
|
70 |
+
name = "vcs_info"
|
71 |
+
|
72 |
+
def __init__(
|
73 |
+
self,
|
74 |
+
vcs: str,
|
75 |
+
commit_id: str,
|
76 |
+
requested_revision: Optional[str] = None,
|
77 |
+
resolved_revision: Optional[str] = None,
|
78 |
+
resolved_revision_type: Optional[str] = None,
|
79 |
+
) -> None:
|
80 |
+
self.vcs = vcs
|
81 |
+
self.requested_revision = requested_revision
|
82 |
+
self.commit_id = commit_id
|
83 |
+
self.resolved_revision = resolved_revision
|
84 |
+
self.resolved_revision_type = resolved_revision_type
|
85 |
+
|
86 |
+
@classmethod
|
87 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
|
88 |
+
if d is None:
|
89 |
+
return None
|
90 |
+
return cls(
|
91 |
+
vcs=_get_required(d, str, "vcs"),
|
92 |
+
commit_id=_get_required(d, str, "commit_id"),
|
93 |
+
requested_revision=_get(d, str, "requested_revision"),
|
94 |
+
resolved_revision=_get(d, str, "resolved_revision"),
|
95 |
+
resolved_revision_type=_get(d, str, "resolved_revision_type"),
|
96 |
+
)
|
97 |
+
|
98 |
+
def _to_dict(self) -> Dict[str, Any]:
|
99 |
+
return _filter_none(
|
100 |
+
vcs=self.vcs,
|
101 |
+
requested_revision=self.requested_revision,
|
102 |
+
commit_id=self.commit_id,
|
103 |
+
resolved_revision=self.resolved_revision,
|
104 |
+
resolved_revision_type=self.resolved_revision_type,
|
105 |
+
)
|
106 |
+
|
107 |
+
|
108 |
+
class ArchiveInfo:
|
109 |
+
name = "archive_info"
|
110 |
+
|
111 |
+
def __init__(
|
112 |
+
self,
|
113 |
+
hash: Optional[str] = None,
|
114 |
+
) -> None:
|
115 |
+
self.hash = hash
|
116 |
+
|
117 |
+
@classmethod
|
118 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
|
119 |
+
if d is None:
|
120 |
+
return None
|
121 |
+
return cls(hash=_get(d, str, "hash"))
|
122 |
+
|
123 |
+
def _to_dict(self) -> Dict[str, Any]:
|
124 |
+
return _filter_none(hash=self.hash)
|
125 |
+
|
126 |
+
|
127 |
+
class DirInfo:
|
128 |
+
name = "dir_info"
|
129 |
+
|
130 |
+
def __init__(
|
131 |
+
self,
|
132 |
+
editable: bool = False,
|
133 |
+
) -> None:
|
134 |
+
self.editable = editable
|
135 |
+
|
136 |
+
@classmethod
|
137 |
+
def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
|
138 |
+
if d is None:
|
139 |
+
return None
|
140 |
+
return cls(editable=_get_required(d, bool, "editable", default=False))
|
141 |
+
|
142 |
+
def _to_dict(self) -> Dict[str, Any]:
|
143 |
+
return _filter_none(editable=self.editable or None)
|
144 |
+
|
145 |
+
|
146 |
+
InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
|
147 |
+
|
148 |
+
|
149 |
+
class DirectUrl:
|
150 |
+
def __init__(
|
151 |
+
self,
|
152 |
+
url: str,
|
153 |
+
info: InfoType,
|
154 |
+
subdirectory: Optional[str] = None,
|
155 |
+
) -> None:
|
156 |
+
self.url = url
|
157 |
+
self.info = info
|
158 |
+
self.subdirectory = subdirectory
|
159 |
+
|
160 |
+
def _remove_auth_from_netloc(self, netloc: str) -> str:
|
161 |
+
if "@" not in netloc:
|
162 |
+
return netloc
|
163 |
+
user_pass, netloc_no_user_pass = netloc.split("@", 1)
|
164 |
+
if (
|
165 |
+
isinstance(self.info, VcsInfo)
|
166 |
+
and self.info.vcs == "git"
|
167 |
+
and user_pass == "git"
|
168 |
+
):
|
169 |
+
return netloc
|
170 |
+
if ENV_VAR_RE.match(user_pass):
|
171 |
+
return netloc
|
172 |
+
return netloc_no_user_pass
|
173 |
+
|
174 |
+
@property
|
175 |
+
def redacted_url(self) -> str:
|
176 |
+
"""url with user:password part removed unless it is formed with
|
177 |
+
environment variables as specified in PEP 610, or it is ``git``
|
178 |
+
in the case of a git URL.
|
179 |
+
"""
|
180 |
+
purl = urllib.parse.urlsplit(self.url)
|
181 |
+
netloc = self._remove_auth_from_netloc(purl.netloc)
|
182 |
+
surl = urllib.parse.urlunsplit(
|
183 |
+
(purl.scheme, netloc, purl.path, purl.query, purl.fragment)
|
184 |
+
)
|
185 |
+
return surl
|
186 |
+
|
187 |
+
def validate(self) -> None:
|
188 |
+
self.from_dict(self.to_dict())
|
189 |
+
|
190 |
+
@classmethod
|
191 |
+
def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
|
192 |
+
return DirectUrl(
|
193 |
+
url=_get_required(d, str, "url"),
|
194 |
+
subdirectory=_get(d, str, "subdirectory"),
|
195 |
+
info=_exactly_one_of(
|
196 |
+
[
|
197 |
+
ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
|
198 |
+
DirInfo._from_dict(_get(d, dict, "dir_info")),
|
199 |
+
VcsInfo._from_dict(_get(d, dict, "vcs_info")),
|
200 |
+
]
|
201 |
+
),
|
202 |
+
)
|
203 |
+
|
204 |
+
def to_dict(self) -> Dict[str, Any]:
|
205 |
+
res = _filter_none(
|
206 |
+
url=self.redacted_url,
|
207 |
+
subdirectory=self.subdirectory,
|
208 |
+
)
|
209 |
+
res[self.info.name] = self.info._to_dict()
|
210 |
+
return res
|
211 |
+
|
212 |
+
@classmethod
|
213 |
+
def from_json(cls, s: str) -> "DirectUrl":
|
214 |
+
return cls.from_dict(json.loads(s))
|
215 |
+
|
216 |
+
def to_json(self) -> str:
|
217 |
+
return json.dumps(self.to_dict(), sort_keys=True)
|
218 |
+
|
219 |
+
def is_local_editable(self) -> bool:
|
220 |
+
return isinstance(self.info, DirInfo) and self.info.editable
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/format_control.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import FrozenSet, Optional, Set
|
2 |
+
|
3 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
4 |
+
|
5 |
+
from pip._internal.exceptions import CommandError
|
6 |
+
|
7 |
+
|
8 |
+
class FormatControl:
|
9 |
+
"""Helper for managing formats from which a package can be installed."""
|
10 |
+
|
11 |
+
__slots__ = ["no_binary", "only_binary"]
|
12 |
+
|
13 |
+
def __init__(
|
14 |
+
self,
|
15 |
+
no_binary: Optional[Set[str]] = None,
|
16 |
+
only_binary: Optional[Set[str]] = None,
|
17 |
+
) -> None:
|
18 |
+
if no_binary is None:
|
19 |
+
no_binary = set()
|
20 |
+
if only_binary is None:
|
21 |
+
only_binary = set()
|
22 |
+
|
23 |
+
self.no_binary = no_binary
|
24 |
+
self.only_binary = only_binary
|
25 |
+
|
26 |
+
def __eq__(self, other: object) -> bool:
|
27 |
+
if not isinstance(other, self.__class__):
|
28 |
+
return NotImplemented
|
29 |
+
|
30 |
+
if self.__slots__ != other.__slots__:
|
31 |
+
return False
|
32 |
+
|
33 |
+
return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
|
34 |
+
|
35 |
+
def __repr__(self) -> str:
|
36 |
+
return "{}({}, {})".format(
|
37 |
+
self.__class__.__name__, self.no_binary, self.only_binary
|
38 |
+
)
|
39 |
+
|
40 |
+
@staticmethod
|
41 |
+
def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
|
42 |
+
if value.startswith("-"):
|
43 |
+
raise CommandError(
|
44 |
+
"--no-binary / --only-binary option requires 1 argument."
|
45 |
+
)
|
46 |
+
new = value.split(",")
|
47 |
+
while ":all:" in new:
|
48 |
+
other.clear()
|
49 |
+
target.clear()
|
50 |
+
target.add(":all:")
|
51 |
+
del new[: new.index(":all:") + 1]
|
52 |
+
# Without a none, we want to discard everything as :all: covers it
|
53 |
+
if ":none:" not in new:
|
54 |
+
return
|
55 |
+
for name in new:
|
56 |
+
if name == ":none:":
|
57 |
+
target.clear()
|
58 |
+
continue
|
59 |
+
name = canonicalize_name(name)
|
60 |
+
other.discard(name)
|
61 |
+
target.add(name)
|
62 |
+
|
63 |
+
def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
|
64 |
+
result = {"binary", "source"}
|
65 |
+
if canonical_name in self.only_binary:
|
66 |
+
result.discard("source")
|
67 |
+
elif canonical_name in self.no_binary:
|
68 |
+
result.discard("binary")
|
69 |
+
elif ":all:" in self.only_binary:
|
70 |
+
result.discard("source")
|
71 |
+
elif ":all:" in self.no_binary:
|
72 |
+
result.discard("binary")
|
73 |
+
return frozenset(result)
|
74 |
+
|
75 |
+
def disallow_binaries(self) -> None:
|
76 |
+
self.handle_mutual_excludes(
|
77 |
+
":all:",
|
78 |
+
self.no_binary,
|
79 |
+
self.only_binary,
|
80 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/index.py
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import urllib.parse
|
2 |
+
|
3 |
+
|
4 |
+
class PackageIndex:
|
5 |
+
"""Represents a Package Index and provides easier access to endpoints"""
|
6 |
+
|
7 |
+
__slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
|
8 |
+
|
9 |
+
def __init__(self, url: str, file_storage_domain: str) -> None:
|
10 |
+
super().__init__()
|
11 |
+
self.url = url
|
12 |
+
self.netloc = urllib.parse.urlsplit(url).netloc
|
13 |
+
self.simple_url = self._url_for_path("simple")
|
14 |
+
self.pypi_url = self._url_for_path("pypi")
|
15 |
+
|
16 |
+
# This is part of a temporary hack used to block installs of PyPI
|
17 |
+
# packages which depend on external urls only necessary until PyPI can
|
18 |
+
# block such packages themselves
|
19 |
+
self.file_storage_domain = file_storage_domain
|
20 |
+
|
21 |
+
def _url_for_path(self, path: str) -> str:
|
22 |
+
return urllib.parse.urljoin(self.url, path)
|
23 |
+
|
24 |
+
|
25 |
+
PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
|
26 |
+
TestPyPI = PackageIndex(
|
27 |
+
"https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
|
28 |
+
)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/link.py
ADDED
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import posixpath
|
5 |
+
import re
|
6 |
+
import urllib.parse
|
7 |
+
from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
|
8 |
+
|
9 |
+
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
10 |
+
from pip._internal.utils.hashes import Hashes
|
11 |
+
from pip._internal.utils.misc import (
|
12 |
+
redact_auth_from_url,
|
13 |
+
split_auth_from_netloc,
|
14 |
+
splitext,
|
15 |
+
)
|
16 |
+
from pip._internal.utils.models import KeyBasedCompareMixin
|
17 |
+
from pip._internal.utils.urls import path_to_url, url_to_path
|
18 |
+
|
19 |
+
if TYPE_CHECKING:
|
20 |
+
from pip._internal.index.collector import HTMLPage
|
21 |
+
|
22 |
+
logger = logging.getLogger(__name__)
|
23 |
+
|
24 |
+
|
25 |
+
_SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
|
26 |
+
|
27 |
+
|
28 |
+
class Link(KeyBasedCompareMixin):
|
29 |
+
"""Represents a parsed link from a Package Index's simple URL"""
|
30 |
+
|
31 |
+
__slots__ = [
|
32 |
+
"_parsed_url",
|
33 |
+
"_url",
|
34 |
+
"comes_from",
|
35 |
+
"requires_python",
|
36 |
+
"yanked_reason",
|
37 |
+
"cache_link_parsing",
|
38 |
+
]
|
39 |
+
|
40 |
+
def __init__(
|
41 |
+
self,
|
42 |
+
url: str,
|
43 |
+
comes_from: Optional[Union[str, "HTMLPage"]] = None,
|
44 |
+
requires_python: Optional[str] = None,
|
45 |
+
yanked_reason: Optional[str] = None,
|
46 |
+
cache_link_parsing: bool = True,
|
47 |
+
) -> None:
|
48 |
+
"""
|
49 |
+
:param url: url of the resource pointed to (href of the link)
|
50 |
+
:param comes_from: instance of HTMLPage where the link was found,
|
51 |
+
or string.
|
52 |
+
:param requires_python: String containing the `Requires-Python`
|
53 |
+
metadata field, specified in PEP 345. This may be specified by
|
54 |
+
a data-requires-python attribute in the HTML link tag, as
|
55 |
+
described in PEP 503.
|
56 |
+
:param yanked_reason: the reason the file has been yanked, if the
|
57 |
+
file has been yanked, or None if the file hasn't been yanked.
|
58 |
+
This is the value of the "data-yanked" attribute, if present, in
|
59 |
+
a simple repository HTML link. If the file has been yanked but
|
60 |
+
no reason was provided, this should be the empty string. See
|
61 |
+
PEP 592 for more information and the specification.
|
62 |
+
:param cache_link_parsing: A flag that is used elsewhere to determine
|
63 |
+
whether resources retrieved from this link
|
64 |
+
should be cached. PyPI index urls should
|
65 |
+
generally have this set to False, for
|
66 |
+
example.
|
67 |
+
"""
|
68 |
+
|
69 |
+
# url can be a UNC windows share
|
70 |
+
if url.startswith("\\\\"):
|
71 |
+
url = path_to_url(url)
|
72 |
+
|
73 |
+
self._parsed_url = urllib.parse.urlsplit(url)
|
74 |
+
# Store the url as a private attribute to prevent accidentally
|
75 |
+
# trying to set a new value.
|
76 |
+
self._url = url
|
77 |
+
|
78 |
+
self.comes_from = comes_from
|
79 |
+
self.requires_python = requires_python if requires_python else None
|
80 |
+
self.yanked_reason = yanked_reason
|
81 |
+
|
82 |
+
super().__init__(key=url, defining_class=Link)
|
83 |
+
|
84 |
+
self.cache_link_parsing = cache_link_parsing
|
85 |
+
|
86 |
+
def __str__(self) -> str:
|
87 |
+
if self.requires_python:
|
88 |
+
rp = f" (requires-python:{self.requires_python})"
|
89 |
+
else:
|
90 |
+
rp = ""
|
91 |
+
if self.comes_from:
|
92 |
+
return "{} (from {}){}".format(
|
93 |
+
redact_auth_from_url(self._url), self.comes_from, rp
|
94 |
+
)
|
95 |
+
else:
|
96 |
+
return redact_auth_from_url(str(self._url))
|
97 |
+
|
98 |
+
def __repr__(self) -> str:
|
99 |
+
return f"<Link {self}>"
|
100 |
+
|
101 |
+
@property
|
102 |
+
def url(self) -> str:
|
103 |
+
return self._url
|
104 |
+
|
105 |
+
@property
|
106 |
+
def filename(self) -> str:
|
107 |
+
path = self.path.rstrip("/")
|
108 |
+
name = posixpath.basename(path)
|
109 |
+
if not name:
|
110 |
+
# Make sure we don't leak auth information if the netloc
|
111 |
+
# includes a username and password.
|
112 |
+
netloc, user_pass = split_auth_from_netloc(self.netloc)
|
113 |
+
return netloc
|
114 |
+
|
115 |
+
name = urllib.parse.unquote(name)
|
116 |
+
assert name, f"URL {self._url!r} produced no filename"
|
117 |
+
return name
|
118 |
+
|
119 |
+
@property
|
120 |
+
def file_path(self) -> str:
|
121 |
+
return url_to_path(self.url)
|
122 |
+
|
123 |
+
@property
|
124 |
+
def scheme(self) -> str:
|
125 |
+
return self._parsed_url.scheme
|
126 |
+
|
127 |
+
@property
|
128 |
+
def netloc(self) -> str:
|
129 |
+
"""
|
130 |
+
This can contain auth information.
|
131 |
+
"""
|
132 |
+
return self._parsed_url.netloc
|
133 |
+
|
134 |
+
@property
|
135 |
+
def path(self) -> str:
|
136 |
+
return urllib.parse.unquote(self._parsed_url.path)
|
137 |
+
|
138 |
+
def splitext(self) -> Tuple[str, str]:
|
139 |
+
return splitext(posixpath.basename(self.path.rstrip("/")))
|
140 |
+
|
141 |
+
@property
|
142 |
+
def ext(self) -> str:
|
143 |
+
return self.splitext()[1]
|
144 |
+
|
145 |
+
@property
|
146 |
+
def url_without_fragment(self) -> str:
|
147 |
+
scheme, netloc, path, query, fragment = self._parsed_url
|
148 |
+
return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
|
149 |
+
|
150 |
+
_egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
|
151 |
+
|
152 |
+
@property
|
153 |
+
def egg_fragment(self) -> Optional[str]:
|
154 |
+
match = self._egg_fragment_re.search(self._url)
|
155 |
+
if not match:
|
156 |
+
return None
|
157 |
+
return match.group(1)
|
158 |
+
|
159 |
+
_subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
|
160 |
+
|
161 |
+
@property
|
162 |
+
def subdirectory_fragment(self) -> Optional[str]:
|
163 |
+
match = self._subdirectory_fragment_re.search(self._url)
|
164 |
+
if not match:
|
165 |
+
return None
|
166 |
+
return match.group(1)
|
167 |
+
|
168 |
+
_hash_re = re.compile(
|
169 |
+
r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES))
|
170 |
+
)
|
171 |
+
|
172 |
+
@property
|
173 |
+
def hash(self) -> Optional[str]:
|
174 |
+
match = self._hash_re.search(self._url)
|
175 |
+
if match:
|
176 |
+
return match.group(2)
|
177 |
+
return None
|
178 |
+
|
179 |
+
@property
|
180 |
+
def hash_name(self) -> Optional[str]:
|
181 |
+
match = self._hash_re.search(self._url)
|
182 |
+
if match:
|
183 |
+
return match.group(1)
|
184 |
+
return None
|
185 |
+
|
186 |
+
@property
|
187 |
+
def show_url(self) -> str:
|
188 |
+
return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
|
189 |
+
|
190 |
+
@property
|
191 |
+
def is_file(self) -> bool:
|
192 |
+
return self.scheme == "file"
|
193 |
+
|
194 |
+
def is_existing_dir(self) -> bool:
|
195 |
+
return self.is_file and os.path.isdir(self.file_path)
|
196 |
+
|
197 |
+
@property
|
198 |
+
def is_wheel(self) -> bool:
|
199 |
+
return self.ext == WHEEL_EXTENSION
|
200 |
+
|
201 |
+
@property
|
202 |
+
def is_vcs(self) -> bool:
|
203 |
+
from pip._internal.vcs import vcs
|
204 |
+
|
205 |
+
return self.scheme in vcs.all_schemes
|
206 |
+
|
207 |
+
@property
|
208 |
+
def is_yanked(self) -> bool:
|
209 |
+
return self.yanked_reason is not None
|
210 |
+
|
211 |
+
@property
|
212 |
+
def has_hash(self) -> bool:
|
213 |
+
return self.hash_name is not None
|
214 |
+
|
215 |
+
def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
|
216 |
+
"""
|
217 |
+
Return True if the link has a hash and it is allowed.
|
218 |
+
"""
|
219 |
+
if hashes is None or not self.has_hash:
|
220 |
+
return False
|
221 |
+
# Assert non-None so mypy knows self.hash_name and self.hash are str.
|
222 |
+
assert self.hash_name is not None
|
223 |
+
assert self.hash is not None
|
224 |
+
|
225 |
+
return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
|
226 |
+
|
227 |
+
|
228 |
+
class _CleanResult(NamedTuple):
|
229 |
+
"""Convert link for equivalency check.
|
230 |
+
|
231 |
+
This is used in the resolver to check whether two URL-specified requirements
|
232 |
+
likely point to the same distribution and can be considered equivalent. This
|
233 |
+
equivalency logic avoids comparing URLs literally, which can be too strict
|
234 |
+
(e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
|
235 |
+
|
236 |
+
Currently this does three things:
|
237 |
+
|
238 |
+
1. Drop the basic auth part. This is technically wrong since a server can
|
239 |
+
serve different content based on auth, but if it does that, it is even
|
240 |
+
impossible to guarantee two URLs without auth are equivalent, since
|
241 |
+
the user can input different auth information when prompted. So the
|
242 |
+
practical solution is to assume the auth doesn't affect the response.
|
243 |
+
2. Parse the query to avoid the ordering issue. Note that ordering under the
|
244 |
+
same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
|
245 |
+
still considered different.
|
246 |
+
3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
|
247 |
+
hash values, since it should have no impact the downloaded content. Note
|
248 |
+
that this drops the "egg=" part historically used to denote the requested
|
249 |
+
project (and extras), which is wrong in the strictest sense, but too many
|
250 |
+
people are supplying it inconsistently to cause superfluous resolution
|
251 |
+
conflicts, so we choose to also ignore them.
|
252 |
+
"""
|
253 |
+
|
254 |
+
parsed: urllib.parse.SplitResult
|
255 |
+
query: Dict[str, List[str]]
|
256 |
+
subdirectory: str
|
257 |
+
hashes: Dict[str, str]
|
258 |
+
|
259 |
+
|
260 |
+
def _clean_link(link: Link) -> _CleanResult:
|
261 |
+
parsed = link._parsed_url
|
262 |
+
netloc = parsed.netloc.rsplit("@", 1)[-1]
|
263 |
+
# According to RFC 8089, an empty host in file: means localhost.
|
264 |
+
if parsed.scheme == "file" and not netloc:
|
265 |
+
netloc = "localhost"
|
266 |
+
fragment = urllib.parse.parse_qs(parsed.fragment)
|
267 |
+
if "egg" in fragment:
|
268 |
+
logger.debug("Ignoring egg= fragment in %s", link)
|
269 |
+
try:
|
270 |
+
# If there are multiple subdirectory values, use the first one.
|
271 |
+
# This matches the behavior of Link.subdirectory_fragment.
|
272 |
+
subdirectory = fragment["subdirectory"][0]
|
273 |
+
except (IndexError, KeyError):
|
274 |
+
subdirectory = ""
|
275 |
+
# If there are multiple hash values under the same algorithm, use the
|
276 |
+
# first one. This matches the behavior of Link.hash_value.
|
277 |
+
hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
|
278 |
+
return _CleanResult(
|
279 |
+
parsed=parsed._replace(netloc=netloc, query="", fragment=""),
|
280 |
+
query=urllib.parse.parse_qs(parsed.query),
|
281 |
+
subdirectory=subdirectory,
|
282 |
+
hashes=hashes,
|
283 |
+
)
|
284 |
+
|
285 |
+
|
286 |
+
@functools.lru_cache(maxsize=None)
|
287 |
+
def links_equivalent(link1: Link, link2: Link) -> bool:
|
288 |
+
return _clean_link(link1) == _clean_link(link2)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/scheme.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
For types associated with installation schemes.
|
3 |
+
|
4 |
+
For a general overview of available schemes and their context, see
|
5 |
+
https://docs.python.org/3/install/index.html#alternate-installation.
|
6 |
+
"""
|
7 |
+
|
8 |
+
|
9 |
+
SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
|
10 |
+
|
11 |
+
|
12 |
+
class Scheme:
|
13 |
+
"""A Scheme holds paths which are used as the base directories for
|
14 |
+
artifacts associated with a Python package.
|
15 |
+
"""
|
16 |
+
|
17 |
+
__slots__ = SCHEME_KEYS
|
18 |
+
|
19 |
+
def __init__(
|
20 |
+
self,
|
21 |
+
platlib: str,
|
22 |
+
purelib: str,
|
23 |
+
headers: str,
|
24 |
+
scripts: str,
|
25 |
+
data: str,
|
26 |
+
) -> None:
|
27 |
+
self.platlib = platlib
|
28 |
+
self.purelib = purelib
|
29 |
+
self.headers = headers
|
30 |
+
self.scripts = scripts
|
31 |
+
self.data = data
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/search_scope.py
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import itertools
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import posixpath
|
5 |
+
import urllib.parse
|
6 |
+
from typing import List
|
7 |
+
|
8 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
9 |
+
|
10 |
+
from pip._internal.models.index import PyPI
|
11 |
+
from pip._internal.utils.compat import has_tls
|
12 |
+
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
|
13 |
+
|
14 |
+
logger = logging.getLogger(__name__)
|
15 |
+
|
16 |
+
|
17 |
+
class SearchScope:
|
18 |
+
|
19 |
+
"""
|
20 |
+
Encapsulates the locations that pip is configured to search.
|
21 |
+
"""
|
22 |
+
|
23 |
+
__slots__ = ["find_links", "index_urls"]
|
24 |
+
|
25 |
+
@classmethod
|
26 |
+
def create(
|
27 |
+
cls,
|
28 |
+
find_links: List[str],
|
29 |
+
index_urls: List[str],
|
30 |
+
) -> "SearchScope":
|
31 |
+
"""
|
32 |
+
Create a SearchScope object after normalizing the `find_links`.
|
33 |
+
"""
|
34 |
+
# Build find_links. If an argument starts with ~, it may be
|
35 |
+
# a local file relative to a home directory. So try normalizing
|
36 |
+
# it and if it exists, use the normalized version.
|
37 |
+
# This is deliberately conservative - it might be fine just to
|
38 |
+
# blindly normalize anything starting with a ~...
|
39 |
+
built_find_links: List[str] = []
|
40 |
+
for link in find_links:
|
41 |
+
if link.startswith("~"):
|
42 |
+
new_link = normalize_path(link)
|
43 |
+
if os.path.exists(new_link):
|
44 |
+
link = new_link
|
45 |
+
built_find_links.append(link)
|
46 |
+
|
47 |
+
# If we don't have TLS enabled, then WARN if anyplace we're looking
|
48 |
+
# relies on TLS.
|
49 |
+
if not has_tls():
|
50 |
+
for link in itertools.chain(index_urls, built_find_links):
|
51 |
+
parsed = urllib.parse.urlparse(link)
|
52 |
+
if parsed.scheme == "https":
|
53 |
+
logger.warning(
|
54 |
+
"pip is configured with locations that require "
|
55 |
+
"TLS/SSL, however the ssl module in Python is not "
|
56 |
+
"available."
|
57 |
+
)
|
58 |
+
break
|
59 |
+
|
60 |
+
return cls(
|
61 |
+
find_links=built_find_links,
|
62 |
+
index_urls=index_urls,
|
63 |
+
)
|
64 |
+
|
65 |
+
def __init__(
|
66 |
+
self,
|
67 |
+
find_links: List[str],
|
68 |
+
index_urls: List[str],
|
69 |
+
) -> None:
|
70 |
+
self.find_links = find_links
|
71 |
+
self.index_urls = index_urls
|
72 |
+
|
73 |
+
def get_formatted_locations(self) -> str:
|
74 |
+
lines = []
|
75 |
+
redacted_index_urls = []
|
76 |
+
if self.index_urls and self.index_urls != [PyPI.simple_url]:
|
77 |
+
for url in self.index_urls:
|
78 |
+
|
79 |
+
redacted_index_url = redact_auth_from_url(url)
|
80 |
+
|
81 |
+
# Parse the URL
|
82 |
+
purl = urllib.parse.urlsplit(redacted_index_url)
|
83 |
+
|
84 |
+
# URL is generally invalid if scheme and netloc is missing
|
85 |
+
# there are issues with Python and URL parsing, so this test
|
86 |
+
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
|
87 |
+
# always parse invalid URLs correctly - it should raise
|
88 |
+
# exceptions for malformed URLs
|
89 |
+
if not purl.scheme and not purl.netloc:
|
90 |
+
logger.warning(
|
91 |
+
'The index url "%s" seems invalid, please provide a scheme.',
|
92 |
+
redacted_index_url,
|
93 |
+
)
|
94 |
+
|
95 |
+
redacted_index_urls.append(redacted_index_url)
|
96 |
+
|
97 |
+
lines.append(
|
98 |
+
"Looking in indexes: {}".format(", ".join(redacted_index_urls))
|
99 |
+
)
|
100 |
+
|
101 |
+
if self.find_links:
|
102 |
+
lines.append(
|
103 |
+
"Looking in links: {}".format(
|
104 |
+
", ".join(redact_auth_from_url(url) for url in self.find_links)
|
105 |
+
)
|
106 |
+
)
|
107 |
+
return "\n".join(lines)
|
108 |
+
|
109 |
+
def get_index_urls_locations(self, project_name: str) -> List[str]:
|
110 |
+
"""Returns the locations found via self.index_urls
|
111 |
+
|
112 |
+
Checks the url_name on the main (first in the list) index and
|
113 |
+
use this url_name to produce all locations
|
114 |
+
"""
|
115 |
+
|
116 |
+
def mkurl_pypi_url(url: str) -> str:
|
117 |
+
loc = posixpath.join(
|
118 |
+
url, urllib.parse.quote(canonicalize_name(project_name))
|
119 |
+
)
|
120 |
+
# For maximum compatibility with easy_install, ensure the path
|
121 |
+
# ends in a trailing slash. Although this isn't in the spec
|
122 |
+
# (and PyPI can handle it without the slash) some other index
|
123 |
+
# implementations might break if they relied on easy_install's
|
124 |
+
# behavior.
|
125 |
+
if not loc.endswith("/"):
|
126 |
+
loc = loc + "/"
|
127 |
+
return loc
|
128 |
+
|
129 |
+
return [mkurl_pypi_url(url) for url in self.index_urls]
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Optional
|
2 |
+
|
3 |
+
from pip._internal.models.format_control import FormatControl
|
4 |
+
|
5 |
+
|
6 |
+
class SelectionPreferences:
|
7 |
+
"""
|
8 |
+
Encapsulates the candidate selection preferences for downloading
|
9 |
+
and installing files.
|
10 |
+
"""
|
11 |
+
|
12 |
+
__slots__ = [
|
13 |
+
"allow_yanked",
|
14 |
+
"allow_all_prereleases",
|
15 |
+
"format_control",
|
16 |
+
"prefer_binary",
|
17 |
+
"ignore_requires_python",
|
18 |
+
]
|
19 |
+
|
20 |
+
# Don't include an allow_yanked default value to make sure each call
|
21 |
+
# site considers whether yanked releases are allowed. This also causes
|
22 |
+
# that decision to be made explicit in the calling code, which helps
|
23 |
+
# people when reading the code.
|
24 |
+
def __init__(
|
25 |
+
self,
|
26 |
+
allow_yanked: bool,
|
27 |
+
allow_all_prereleases: bool = False,
|
28 |
+
format_control: Optional[FormatControl] = None,
|
29 |
+
prefer_binary: bool = False,
|
30 |
+
ignore_requires_python: Optional[bool] = None,
|
31 |
+
) -> None:
|
32 |
+
"""Create a SelectionPreferences object.
|
33 |
+
|
34 |
+
:param allow_yanked: Whether files marked as yanked (in the sense
|
35 |
+
of PEP 592) are permitted to be candidates for install.
|
36 |
+
:param format_control: A FormatControl object or None. Used to control
|
37 |
+
the selection of source packages / binary packages when consulting
|
38 |
+
the index and links.
|
39 |
+
:param prefer_binary: Whether to prefer an old, but valid, binary
|
40 |
+
dist over a new source dist.
|
41 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
42 |
+
"Requires-Python" values in links. Defaults to False.
|
43 |
+
"""
|
44 |
+
if ignore_requires_python is None:
|
45 |
+
ignore_requires_python = False
|
46 |
+
|
47 |
+
self.allow_yanked = allow_yanked
|
48 |
+
self.allow_all_prereleases = allow_all_prereleases
|
49 |
+
self.format_control = format_control
|
50 |
+
self.prefer_binary = prefer_binary
|
51 |
+
self.ignore_requires_python = ignore_requires_python
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/target_python.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
from typing import List, Optional, Tuple
|
3 |
+
|
4 |
+
from pip._vendor.packaging.tags import Tag
|
5 |
+
|
6 |
+
from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
|
7 |
+
from pip._internal.utils.misc import normalize_version_info
|
8 |
+
|
9 |
+
|
10 |
+
class TargetPython:
|
11 |
+
|
12 |
+
"""
|
13 |
+
Encapsulates the properties of a Python interpreter one is targeting
|
14 |
+
for a package install, download, etc.
|
15 |
+
"""
|
16 |
+
|
17 |
+
__slots__ = [
|
18 |
+
"_given_py_version_info",
|
19 |
+
"abis",
|
20 |
+
"implementation",
|
21 |
+
"platforms",
|
22 |
+
"py_version",
|
23 |
+
"py_version_info",
|
24 |
+
"_valid_tags",
|
25 |
+
]
|
26 |
+
|
27 |
+
def __init__(
|
28 |
+
self,
|
29 |
+
platforms: Optional[List[str]] = None,
|
30 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
31 |
+
abis: Optional[List[str]] = None,
|
32 |
+
implementation: Optional[str] = None,
|
33 |
+
) -> None:
|
34 |
+
"""
|
35 |
+
:param platforms: A list of strings or None. If None, searches for
|
36 |
+
packages that are supported by the current system. Otherwise, will
|
37 |
+
find packages that can be built on the platforms passed in. These
|
38 |
+
packages will only be downloaded for distribution: they will
|
39 |
+
not be built locally.
|
40 |
+
:param py_version_info: An optional tuple of ints representing the
|
41 |
+
Python version information to use (e.g. `sys.version_info[:3]`).
|
42 |
+
This can have length 1, 2, or 3 when provided.
|
43 |
+
:param abis: A list of strings or None. This is passed to
|
44 |
+
compatibility_tags.py's get_supported() function as is.
|
45 |
+
:param implementation: A string or None. This is passed to
|
46 |
+
compatibility_tags.py's get_supported() function as is.
|
47 |
+
"""
|
48 |
+
# Store the given py_version_info for when we call get_supported().
|
49 |
+
self._given_py_version_info = py_version_info
|
50 |
+
|
51 |
+
if py_version_info is None:
|
52 |
+
py_version_info = sys.version_info[:3]
|
53 |
+
else:
|
54 |
+
py_version_info = normalize_version_info(py_version_info)
|
55 |
+
|
56 |
+
py_version = ".".join(map(str, py_version_info[:2]))
|
57 |
+
|
58 |
+
self.abis = abis
|
59 |
+
self.implementation = implementation
|
60 |
+
self.platforms = platforms
|
61 |
+
self.py_version = py_version
|
62 |
+
self.py_version_info = py_version_info
|
63 |
+
|
64 |
+
# This is used to cache the return value of get_tags().
|
65 |
+
self._valid_tags: Optional[List[Tag]] = None
|
66 |
+
|
67 |
+
def format_given(self) -> str:
|
68 |
+
"""
|
69 |
+
Format the given, non-None attributes for display.
|
70 |
+
"""
|
71 |
+
display_version = None
|
72 |
+
if self._given_py_version_info is not None:
|
73 |
+
display_version = ".".join(
|
74 |
+
str(part) for part in self._given_py_version_info
|
75 |
+
)
|
76 |
+
|
77 |
+
key_values = [
|
78 |
+
("platforms", self.platforms),
|
79 |
+
("version_info", display_version),
|
80 |
+
("abis", self.abis),
|
81 |
+
("implementation", self.implementation),
|
82 |
+
]
|
83 |
+
return " ".join(
|
84 |
+
f"{key}={value!r}" for key, value in key_values if value is not None
|
85 |
+
)
|
86 |
+
|
87 |
+
def get_tags(self) -> List[Tag]:
|
88 |
+
"""
|
89 |
+
Return the supported PEP 425 tags to check wheel candidates against.
|
90 |
+
|
91 |
+
The tags are returned in order of preference (most preferred first).
|
92 |
+
"""
|
93 |
+
if self._valid_tags is None:
|
94 |
+
# Pass versions=None if no py_version_info was given since
|
95 |
+
# versions=None uses special default logic.
|
96 |
+
py_version_info = self._given_py_version_info
|
97 |
+
if py_version_info is None:
|
98 |
+
version = None
|
99 |
+
else:
|
100 |
+
version = version_info_to_nodot(py_version_info)
|
101 |
+
|
102 |
+
tags = get_supported(
|
103 |
+
version=version,
|
104 |
+
platforms=self.platforms,
|
105 |
+
abis=self.abis,
|
106 |
+
impl=self.implementation,
|
107 |
+
)
|
108 |
+
self._valid_tags = tags
|
109 |
+
|
110 |
+
return self._valid_tags
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/models/wheel.py
ADDED
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Represents a wheel file and provides access to the various parts of the
|
2 |
+
name that have meaning.
|
3 |
+
"""
|
4 |
+
import re
|
5 |
+
from typing import Dict, Iterable, List
|
6 |
+
|
7 |
+
from pip._vendor.packaging.tags import Tag
|
8 |
+
|
9 |
+
from pip._internal.exceptions import InvalidWheelFilename
|
10 |
+
|
11 |
+
|
12 |
+
class Wheel:
|
13 |
+
"""A wheel file"""
|
14 |
+
|
15 |
+
wheel_file_re = re.compile(
|
16 |
+
r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
|
17 |
+
((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
|
18 |
+
\.whl|\.dist-info)$""",
|
19 |
+
re.VERBOSE,
|
20 |
+
)
|
21 |
+
|
22 |
+
def __init__(self, filename: str) -> None:
|
23 |
+
"""
|
24 |
+
:raises InvalidWheelFilename: when the filename is invalid for a wheel
|
25 |
+
"""
|
26 |
+
wheel_info = self.wheel_file_re.match(filename)
|
27 |
+
if not wheel_info:
|
28 |
+
raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
|
29 |
+
self.filename = filename
|
30 |
+
self.name = wheel_info.group("name").replace("_", "-")
|
31 |
+
# we'll assume "_" means "-" due to wheel naming scheme
|
32 |
+
# (https://github.com/pypa/pip/issues/1150)
|
33 |
+
self.version = wheel_info.group("ver").replace("_", "-")
|
34 |
+
self.build_tag = wheel_info.group("build")
|
35 |
+
self.pyversions = wheel_info.group("pyver").split(".")
|
36 |
+
self.abis = wheel_info.group("abi").split(".")
|
37 |
+
self.plats = wheel_info.group("plat").split(".")
|
38 |
+
|
39 |
+
# All the tag combinations from this file
|
40 |
+
self.file_tags = {
|
41 |
+
Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
|
42 |
+
}
|
43 |
+
|
44 |
+
def get_formatted_file_tags(self) -> List[str]:
|
45 |
+
"""Return the wheel's tags as a sorted list of strings."""
|
46 |
+
return sorted(str(tag) for tag in self.file_tags)
|
47 |
+
|
48 |
+
def support_index_min(self, tags: List[Tag]) -> int:
|
49 |
+
"""Return the lowest index that one of the wheel's file_tag combinations
|
50 |
+
achieves in the given list of supported tags.
|
51 |
+
|
52 |
+
For example, if there are 8 supported tags and one of the file tags
|
53 |
+
is first in the list, then return 0.
|
54 |
+
|
55 |
+
:param tags: the PEP 425 tags to check the wheel against, in order
|
56 |
+
with most preferred first.
|
57 |
+
|
58 |
+
:raises ValueError: If none of the wheel's file tags match one of
|
59 |
+
the supported tags.
|
60 |
+
"""
|
61 |
+
return min(tags.index(tag) for tag in self.file_tags if tag in tags)
|
62 |
+
|
63 |
+
def find_most_preferred_tag(
|
64 |
+
self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
|
65 |
+
) -> int:
|
66 |
+
"""Return the priority of the most preferred tag that one of the wheel's file
|
67 |
+
tag combinations achieves in the given list of supported tags using the given
|
68 |
+
tag_to_priority mapping, where lower priorities are more-preferred.
|
69 |
+
|
70 |
+
This is used in place of support_index_min in some cases in order to avoid
|
71 |
+
an expensive linear scan of a large list of tags.
|
72 |
+
|
73 |
+
:param tags: the PEP 425 tags to check the wheel against.
|
74 |
+
:param tag_to_priority: a mapping from tag to priority of that tag, where
|
75 |
+
lower is more preferred.
|
76 |
+
|
77 |
+
:raises ValueError: If none of the wheel's file tags match one of
|
78 |
+
the supported tags.
|
79 |
+
"""
|
80 |
+
return min(
|
81 |
+
tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
|
82 |
+
)
|
83 |
+
|
84 |
+
def supported(self, tags: Iterable[Tag]) -> bool:
|
85 |
+
"""Return whether the wheel is compatible with one of the given tags.
|
86 |
+
|
87 |
+
:param tags: the PEP 425 tags to check the wheel against.
|
88 |
+
"""
|
89 |
+
return not self.file_tags.isdisjoint(tags)
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (197 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/__pycache__/base.cpython-310.pyc
ADDED
Binary file (1.05 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/base.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Callable, List, Optional
|
2 |
+
|
3 |
+
from pip._internal.req.req_install import InstallRequirement
|
4 |
+
from pip._internal.req.req_set import RequirementSet
|
5 |
+
|
6 |
+
InstallRequirementProvider = Callable[
|
7 |
+
[str, Optional[InstallRequirement]], InstallRequirement
|
8 |
+
]
|
9 |
+
|
10 |
+
|
11 |
+
class BaseResolver:
|
12 |
+
def resolve(
|
13 |
+
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
14 |
+
) -> RequirementSet:
|
15 |
+
raise NotImplementedError()
|
16 |
+
|
17 |
+
def get_installation_order(
|
18 |
+
self, req_set: RequirementSet
|
19 |
+
) -> List[InstallRequirement]:
|
20 |
+
raise NotImplementedError()
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (204 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/__pycache__/resolver.cpython-310.pyc
ADDED
Binary file (12.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/legacy/resolver.py
ADDED
@@ -0,0 +1,467 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Dependency Resolution
|
2 |
+
|
3 |
+
The dependency resolution in pip is performed as follows:
|
4 |
+
|
5 |
+
for top-level requirements:
|
6 |
+
a. only one spec allowed per project, regardless of conflicts or not.
|
7 |
+
otherwise a "double requirement" exception is raised
|
8 |
+
b. they override sub-dependency requirements.
|
9 |
+
for sub-dependencies
|
10 |
+
a. "first found, wins" (where the order is breadth first)
|
11 |
+
"""
|
12 |
+
|
13 |
+
# The following comment should be removed at some point in the future.
|
14 |
+
# mypy: strict-optional=False
|
15 |
+
|
16 |
+
import logging
|
17 |
+
import sys
|
18 |
+
from collections import defaultdict
|
19 |
+
from itertools import chain
|
20 |
+
from typing import DefaultDict, Iterable, List, Optional, Set, Tuple
|
21 |
+
|
22 |
+
from pip._vendor.packaging import specifiers
|
23 |
+
from pip._vendor.packaging.requirements import Requirement
|
24 |
+
|
25 |
+
from pip._internal.cache import WheelCache
|
26 |
+
from pip._internal.exceptions import (
|
27 |
+
BestVersionAlreadyInstalled,
|
28 |
+
DistributionNotFound,
|
29 |
+
HashError,
|
30 |
+
HashErrors,
|
31 |
+
NoneMetadataError,
|
32 |
+
UnsupportedPythonVersion,
|
33 |
+
)
|
34 |
+
from pip._internal.index.package_finder import PackageFinder
|
35 |
+
from pip._internal.metadata import BaseDistribution
|
36 |
+
from pip._internal.models.link import Link
|
37 |
+
from pip._internal.operations.prepare import RequirementPreparer
|
38 |
+
from pip._internal.req.req_install import (
|
39 |
+
InstallRequirement,
|
40 |
+
check_invalid_constraint_type,
|
41 |
+
)
|
42 |
+
from pip._internal.req.req_set import RequirementSet
|
43 |
+
from pip._internal.resolution.base import BaseResolver, InstallRequirementProvider
|
44 |
+
from pip._internal.utils.compatibility_tags import get_supported
|
45 |
+
from pip._internal.utils.logging import indent_log
|
46 |
+
from pip._internal.utils.misc import normalize_version_info
|
47 |
+
from pip._internal.utils.packaging import check_requires_python
|
48 |
+
|
49 |
+
logger = logging.getLogger(__name__)
|
50 |
+
|
51 |
+
DiscoveredDependencies = DefaultDict[str, List[InstallRequirement]]
|
52 |
+
|
53 |
+
|
54 |
+
def _check_dist_requires_python(
|
55 |
+
dist: BaseDistribution,
|
56 |
+
version_info: Tuple[int, int, int],
|
57 |
+
ignore_requires_python: bool = False,
|
58 |
+
) -> None:
|
59 |
+
"""
|
60 |
+
Check whether the given Python version is compatible with a distribution's
|
61 |
+
"Requires-Python" value.
|
62 |
+
|
63 |
+
:param version_info: A 3-tuple of ints representing the Python
|
64 |
+
major-minor-micro version to check.
|
65 |
+
:param ignore_requires_python: Whether to ignore the "Requires-Python"
|
66 |
+
value if the given Python version isn't compatible.
|
67 |
+
|
68 |
+
:raises UnsupportedPythonVersion: When the given Python version isn't
|
69 |
+
compatible.
|
70 |
+
"""
|
71 |
+
# This idiosyncratically converts the SpecifierSet to str and let
|
72 |
+
# check_requires_python then parse it again into SpecifierSet. But this
|
73 |
+
# is the legacy resolver so I'm just not going to bother refactoring.
|
74 |
+
try:
|
75 |
+
requires_python = str(dist.requires_python)
|
76 |
+
except FileNotFoundError as e:
|
77 |
+
raise NoneMetadataError(dist, str(e))
|
78 |
+
try:
|
79 |
+
is_compatible = check_requires_python(
|
80 |
+
requires_python,
|
81 |
+
version_info=version_info,
|
82 |
+
)
|
83 |
+
except specifiers.InvalidSpecifier as exc:
|
84 |
+
logger.warning(
|
85 |
+
"Package %r has an invalid Requires-Python: %s", dist.raw_name, exc
|
86 |
+
)
|
87 |
+
return
|
88 |
+
|
89 |
+
if is_compatible:
|
90 |
+
return
|
91 |
+
|
92 |
+
version = ".".join(map(str, version_info))
|
93 |
+
if ignore_requires_python:
|
94 |
+
logger.debug(
|
95 |
+
"Ignoring failed Requires-Python check for package %r: %s not in %r",
|
96 |
+
dist.raw_name,
|
97 |
+
version,
|
98 |
+
requires_python,
|
99 |
+
)
|
100 |
+
return
|
101 |
+
|
102 |
+
raise UnsupportedPythonVersion(
|
103 |
+
"Package {!r} requires a different Python: {} not in {!r}".format(
|
104 |
+
dist.raw_name, version, requires_python
|
105 |
+
)
|
106 |
+
)
|
107 |
+
|
108 |
+
|
109 |
+
class Resolver(BaseResolver):
|
110 |
+
"""Resolves which packages need to be installed/uninstalled to perform \
|
111 |
+
the requested operation without breaking the requirements of any package.
|
112 |
+
"""
|
113 |
+
|
114 |
+
_allowed_strategies = {"eager", "only-if-needed", "to-satisfy-only"}
|
115 |
+
|
116 |
+
def __init__(
|
117 |
+
self,
|
118 |
+
preparer: RequirementPreparer,
|
119 |
+
finder: PackageFinder,
|
120 |
+
wheel_cache: Optional[WheelCache],
|
121 |
+
make_install_req: InstallRequirementProvider,
|
122 |
+
use_user_site: bool,
|
123 |
+
ignore_dependencies: bool,
|
124 |
+
ignore_installed: bool,
|
125 |
+
ignore_requires_python: bool,
|
126 |
+
force_reinstall: bool,
|
127 |
+
upgrade_strategy: str,
|
128 |
+
py_version_info: Optional[Tuple[int, ...]] = None,
|
129 |
+
) -> None:
|
130 |
+
super().__init__()
|
131 |
+
assert upgrade_strategy in self._allowed_strategies
|
132 |
+
|
133 |
+
if py_version_info is None:
|
134 |
+
py_version_info = sys.version_info[:3]
|
135 |
+
else:
|
136 |
+
py_version_info = normalize_version_info(py_version_info)
|
137 |
+
|
138 |
+
self._py_version_info = py_version_info
|
139 |
+
|
140 |
+
self.preparer = preparer
|
141 |
+
self.finder = finder
|
142 |
+
self.wheel_cache = wheel_cache
|
143 |
+
|
144 |
+
self.upgrade_strategy = upgrade_strategy
|
145 |
+
self.force_reinstall = force_reinstall
|
146 |
+
self.ignore_dependencies = ignore_dependencies
|
147 |
+
self.ignore_installed = ignore_installed
|
148 |
+
self.ignore_requires_python = ignore_requires_python
|
149 |
+
self.use_user_site = use_user_site
|
150 |
+
self._make_install_req = make_install_req
|
151 |
+
|
152 |
+
self._discovered_dependencies: DiscoveredDependencies = defaultdict(list)
|
153 |
+
|
154 |
+
def resolve(
|
155 |
+
self, root_reqs: List[InstallRequirement], check_supported_wheels: bool
|
156 |
+
) -> RequirementSet:
|
157 |
+
"""Resolve what operations need to be done
|
158 |
+
|
159 |
+
As a side-effect of this method, the packages (and their dependencies)
|
160 |
+
are downloaded, unpacked and prepared for installation. This
|
161 |
+
preparation is done by ``pip.operations.prepare``.
|
162 |
+
|
163 |
+
Once PyPI has static dependency metadata available, it would be
|
164 |
+
possible to move the preparation to become a step separated from
|
165 |
+
dependency resolution.
|
166 |
+
"""
|
167 |
+
requirement_set = RequirementSet(check_supported_wheels=check_supported_wheels)
|
168 |
+
for req in root_reqs:
|
169 |
+
if req.constraint:
|
170 |
+
check_invalid_constraint_type(req)
|
171 |
+
requirement_set.add_requirement(req)
|
172 |
+
|
173 |
+
# Actually prepare the files, and collect any exceptions. Most hash
|
174 |
+
# exceptions cannot be checked ahead of time, because
|
175 |
+
# _populate_link() needs to be called before we can make decisions
|
176 |
+
# based on link type.
|
177 |
+
discovered_reqs: List[InstallRequirement] = []
|
178 |
+
hash_errors = HashErrors()
|
179 |
+
for req in chain(requirement_set.all_requirements, discovered_reqs):
|
180 |
+
try:
|
181 |
+
discovered_reqs.extend(self._resolve_one(requirement_set, req))
|
182 |
+
except HashError as exc:
|
183 |
+
exc.req = req
|
184 |
+
hash_errors.append(exc)
|
185 |
+
|
186 |
+
if hash_errors:
|
187 |
+
raise hash_errors
|
188 |
+
|
189 |
+
return requirement_set
|
190 |
+
|
191 |
+
def _is_upgrade_allowed(self, req: InstallRequirement) -> bool:
|
192 |
+
if self.upgrade_strategy == "to-satisfy-only":
|
193 |
+
return False
|
194 |
+
elif self.upgrade_strategy == "eager":
|
195 |
+
return True
|
196 |
+
else:
|
197 |
+
assert self.upgrade_strategy == "only-if-needed"
|
198 |
+
return req.user_supplied or req.constraint
|
199 |
+
|
200 |
+
def _set_req_to_reinstall(self, req: InstallRequirement) -> None:
|
201 |
+
"""
|
202 |
+
Set a requirement to be installed.
|
203 |
+
"""
|
204 |
+
# Don't uninstall the conflict if doing a user install and the
|
205 |
+
# conflict is not a user install.
|
206 |
+
if not self.use_user_site or req.satisfied_by.in_usersite:
|
207 |
+
req.should_reinstall = True
|
208 |
+
req.satisfied_by = None
|
209 |
+
|
210 |
+
def _check_skip_installed(
|
211 |
+
self, req_to_install: InstallRequirement
|
212 |
+
) -> Optional[str]:
|
213 |
+
"""Check if req_to_install should be skipped.
|
214 |
+
|
215 |
+
This will check if the req is installed, and whether we should upgrade
|
216 |
+
or reinstall it, taking into account all the relevant user options.
|
217 |
+
|
218 |
+
After calling this req_to_install will only have satisfied_by set to
|
219 |
+
None if the req_to_install is to be upgraded/reinstalled etc. Any
|
220 |
+
other value will be a dist recording the current thing installed that
|
221 |
+
satisfies the requirement.
|
222 |
+
|
223 |
+
Note that for vcs urls and the like we can't assess skipping in this
|
224 |
+
routine - we simply identify that we need to pull the thing down,
|
225 |
+
then later on it is pulled down and introspected to assess upgrade/
|
226 |
+
reinstalls etc.
|
227 |
+
|
228 |
+
:return: A text reason for why it was skipped, or None.
|
229 |
+
"""
|
230 |
+
if self.ignore_installed:
|
231 |
+
return None
|
232 |
+
|
233 |
+
req_to_install.check_if_exists(self.use_user_site)
|
234 |
+
if not req_to_install.satisfied_by:
|
235 |
+
return None
|
236 |
+
|
237 |
+
if self.force_reinstall:
|
238 |
+
self._set_req_to_reinstall(req_to_install)
|
239 |
+
return None
|
240 |
+
|
241 |
+
if not self._is_upgrade_allowed(req_to_install):
|
242 |
+
if self.upgrade_strategy == "only-if-needed":
|
243 |
+
return "already satisfied, skipping upgrade"
|
244 |
+
return "already satisfied"
|
245 |
+
|
246 |
+
# Check for the possibility of an upgrade. For link-based
|
247 |
+
# requirements we have to pull the tree down and inspect to assess
|
248 |
+
# the version #, so it's handled way down.
|
249 |
+
if not req_to_install.link:
|
250 |
+
try:
|
251 |
+
self.finder.find_requirement(req_to_install, upgrade=True)
|
252 |
+
except BestVersionAlreadyInstalled:
|
253 |
+
# Then the best version is installed.
|
254 |
+
return "already up-to-date"
|
255 |
+
except DistributionNotFound:
|
256 |
+
# No distribution found, so we squash the error. It will
|
257 |
+
# be raised later when we re-try later to do the install.
|
258 |
+
# Why don't we just raise here?
|
259 |
+
pass
|
260 |
+
|
261 |
+
self._set_req_to_reinstall(req_to_install)
|
262 |
+
return None
|
263 |
+
|
264 |
+
def _find_requirement_link(self, req: InstallRequirement) -> Optional[Link]:
|
265 |
+
upgrade = self._is_upgrade_allowed(req)
|
266 |
+
best_candidate = self.finder.find_requirement(req, upgrade)
|
267 |
+
if not best_candidate:
|
268 |
+
return None
|
269 |
+
|
270 |
+
# Log a warning per PEP 592 if necessary before returning.
|
271 |
+
link = best_candidate.link
|
272 |
+
if link.is_yanked:
|
273 |
+
reason = link.yanked_reason or "<none given>"
|
274 |
+
msg = (
|
275 |
+
# Mark this as a unicode string to prevent
|
276 |
+
# "UnicodeEncodeError: 'ascii' codec can't encode character"
|
277 |
+
# in Python 2 when the reason contains non-ascii characters.
|
278 |
+
"The candidate selected for download or install is a "
|
279 |
+
"yanked version: {candidate}\n"
|
280 |
+
"Reason for being yanked: {reason}"
|
281 |
+
).format(candidate=best_candidate, reason=reason)
|
282 |
+
logger.warning(msg)
|
283 |
+
|
284 |
+
return link
|
285 |
+
|
286 |
+
def _populate_link(self, req: InstallRequirement) -> None:
|
287 |
+
"""Ensure that if a link can be found for this, that it is found.
|
288 |
+
|
289 |
+
Note that req.link may still be None - if the requirement is already
|
290 |
+
installed and not needed to be upgraded based on the return value of
|
291 |
+
_is_upgrade_allowed().
|
292 |
+
|
293 |
+
If preparer.require_hashes is True, don't use the wheel cache, because
|
294 |
+
cached wheels, always built locally, have different hashes than the
|
295 |
+
files downloaded from the index server and thus throw false hash
|
296 |
+
mismatches. Furthermore, cached wheels at present have undeterministic
|
297 |
+
contents due to file modification times.
|
298 |
+
"""
|
299 |
+
if req.link is None:
|
300 |
+
req.link = self._find_requirement_link(req)
|
301 |
+
|
302 |
+
if self.wheel_cache is None or self.preparer.require_hashes:
|
303 |
+
return
|
304 |
+
cache_entry = self.wheel_cache.get_cache_entry(
|
305 |
+
link=req.link,
|
306 |
+
package_name=req.name,
|
307 |
+
supported_tags=get_supported(),
|
308 |
+
)
|
309 |
+
if cache_entry is not None:
|
310 |
+
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
311 |
+
if req.link is req.original_link and cache_entry.persistent:
|
312 |
+
req.original_link_is_in_wheel_cache = True
|
313 |
+
req.link = cache_entry.link
|
314 |
+
|
315 |
+
def _get_dist_for(self, req: InstallRequirement) -> BaseDistribution:
|
316 |
+
"""Takes a InstallRequirement and returns a single AbstractDist \
|
317 |
+
representing a prepared variant of the same.
|
318 |
+
"""
|
319 |
+
if req.editable:
|
320 |
+
return self.preparer.prepare_editable_requirement(req)
|
321 |
+
|
322 |
+
# satisfied_by is only evaluated by calling _check_skip_installed,
|
323 |
+
# so it must be None here.
|
324 |
+
assert req.satisfied_by is None
|
325 |
+
skip_reason = self._check_skip_installed(req)
|
326 |
+
|
327 |
+
if req.satisfied_by:
|
328 |
+
return self.preparer.prepare_installed_requirement(req, skip_reason)
|
329 |
+
|
330 |
+
# We eagerly populate the link, since that's our "legacy" behavior.
|
331 |
+
self._populate_link(req)
|
332 |
+
dist = self.preparer.prepare_linked_requirement(req)
|
333 |
+
|
334 |
+
# NOTE
|
335 |
+
# The following portion is for determining if a certain package is
|
336 |
+
# going to be re-installed/upgraded or not and reporting to the user.
|
337 |
+
# This should probably get cleaned up in a future refactor.
|
338 |
+
|
339 |
+
# req.req is only avail after unpack for URL
|
340 |
+
# pkgs repeat check_if_exists to uninstall-on-upgrade
|
341 |
+
# (#14)
|
342 |
+
if not self.ignore_installed:
|
343 |
+
req.check_if_exists(self.use_user_site)
|
344 |
+
|
345 |
+
if req.satisfied_by:
|
346 |
+
should_modify = (
|
347 |
+
self.upgrade_strategy != "to-satisfy-only"
|
348 |
+
or self.force_reinstall
|
349 |
+
or self.ignore_installed
|
350 |
+
or req.link.scheme == "file"
|
351 |
+
)
|
352 |
+
if should_modify:
|
353 |
+
self._set_req_to_reinstall(req)
|
354 |
+
else:
|
355 |
+
logger.info(
|
356 |
+
"Requirement already satisfied (use --upgrade to upgrade): %s",
|
357 |
+
req,
|
358 |
+
)
|
359 |
+
return dist
|
360 |
+
|
361 |
+
def _resolve_one(
|
362 |
+
self,
|
363 |
+
requirement_set: RequirementSet,
|
364 |
+
req_to_install: InstallRequirement,
|
365 |
+
) -> List[InstallRequirement]:
|
366 |
+
"""Prepare a single requirements file.
|
367 |
+
|
368 |
+
:return: A list of additional InstallRequirements to also install.
|
369 |
+
"""
|
370 |
+
# Tell user what we are doing for this requirement:
|
371 |
+
# obtain (editable), skipping, processing (local url), collecting
|
372 |
+
# (remote url or package name)
|
373 |
+
if req_to_install.constraint or req_to_install.prepared:
|
374 |
+
return []
|
375 |
+
|
376 |
+
req_to_install.prepared = True
|
377 |
+
|
378 |
+
# Parse and return dependencies
|
379 |
+
dist = self._get_dist_for(req_to_install)
|
380 |
+
# This will raise UnsupportedPythonVersion if the given Python
|
381 |
+
# version isn't compatible with the distribution's Requires-Python.
|
382 |
+
_check_dist_requires_python(
|
383 |
+
dist,
|
384 |
+
version_info=self._py_version_info,
|
385 |
+
ignore_requires_python=self.ignore_requires_python,
|
386 |
+
)
|
387 |
+
|
388 |
+
more_reqs: List[InstallRequirement] = []
|
389 |
+
|
390 |
+
def add_req(subreq: Requirement, extras_requested: Iterable[str]) -> None:
|
391 |
+
# This idiosyncratically converts the Requirement to str and let
|
392 |
+
# make_install_req then parse it again into Requirement. But this is
|
393 |
+
# the legacy resolver so I'm just not going to bother refactoring.
|
394 |
+
sub_install_req = self._make_install_req(str(subreq), req_to_install)
|
395 |
+
parent_req_name = req_to_install.name
|
396 |
+
to_scan_again, add_to_parent = requirement_set.add_requirement(
|
397 |
+
sub_install_req,
|
398 |
+
parent_req_name=parent_req_name,
|
399 |
+
extras_requested=extras_requested,
|
400 |
+
)
|
401 |
+
if parent_req_name and add_to_parent:
|
402 |
+
self._discovered_dependencies[parent_req_name].append(add_to_parent)
|
403 |
+
more_reqs.extend(to_scan_again)
|
404 |
+
|
405 |
+
with indent_log():
|
406 |
+
# We add req_to_install before its dependencies, so that we
|
407 |
+
# can refer to it when adding dependencies.
|
408 |
+
if not requirement_set.has_requirement(req_to_install.name):
|
409 |
+
# 'unnamed' requirements will get added here
|
410 |
+
# 'unnamed' requirements can only come from being directly
|
411 |
+
# provided by the user.
|
412 |
+
assert req_to_install.user_supplied
|
413 |
+
requirement_set.add_requirement(req_to_install, parent_req_name=None)
|
414 |
+
|
415 |
+
if not self.ignore_dependencies:
|
416 |
+
if req_to_install.extras:
|
417 |
+
logger.debug(
|
418 |
+
"Installing extra requirements: %r",
|
419 |
+
",".join(req_to_install.extras),
|
420 |
+
)
|
421 |
+
missing_requested = sorted(
|
422 |
+
set(req_to_install.extras) - set(dist.iter_provided_extras())
|
423 |
+
)
|
424 |
+
for missing in missing_requested:
|
425 |
+
logger.warning(
|
426 |
+
"%s %s does not provide the extra '%s'",
|
427 |
+
dist.raw_name,
|
428 |
+
dist.version,
|
429 |
+
missing,
|
430 |
+
)
|
431 |
+
|
432 |
+
available_requested = sorted(
|
433 |
+
set(dist.iter_provided_extras()) & set(req_to_install.extras)
|
434 |
+
)
|
435 |
+
for subreq in dist.iter_dependencies(available_requested):
|
436 |
+
add_req(subreq, extras_requested=available_requested)
|
437 |
+
|
438 |
+
return more_reqs
|
439 |
+
|
440 |
+
def get_installation_order(
|
441 |
+
self, req_set: RequirementSet
|
442 |
+
) -> List[InstallRequirement]:
|
443 |
+
"""Create the installation order.
|
444 |
+
|
445 |
+
The installation order is topological - requirements are installed
|
446 |
+
before the requiring thing. We break cycles at an arbitrary point,
|
447 |
+
and make no other guarantees.
|
448 |
+
"""
|
449 |
+
# The current implementation, which we may change at any point
|
450 |
+
# installs the user specified things in the order given, except when
|
451 |
+
# dependencies must come earlier to achieve topological order.
|
452 |
+
order = []
|
453 |
+
ordered_reqs: Set[InstallRequirement] = set()
|
454 |
+
|
455 |
+
def schedule(req: InstallRequirement) -> None:
|
456 |
+
if req.satisfied_by or req in ordered_reqs:
|
457 |
+
return
|
458 |
+
if req.constraint:
|
459 |
+
return
|
460 |
+
ordered_reqs.add(req)
|
461 |
+
for dep in self._discovered_dependencies[req.name]:
|
462 |
+
schedule(dep)
|
463 |
+
order.append(req)
|
464 |
+
|
465 |
+
for install_req in req_set.requirements.values():
|
466 |
+
schedule(install_req)
|
467 |
+
return order
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__init__.py
ADDED
File without changes
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (208 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/__pycache__/base.cpython-310.pyc
ADDED
Binary file (6.45 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/base.py
ADDED
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import FrozenSet, Iterable, Optional, Tuple, Union
|
2 |
+
|
3 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
4 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
5 |
+
from pip._vendor.packaging.version import LegacyVersion, Version
|
6 |
+
|
7 |
+
from pip._internal.models.link import Link, links_equivalent
|
8 |
+
from pip._internal.req.req_install import InstallRequirement
|
9 |
+
from pip._internal.utils.hashes import Hashes
|
10 |
+
|
11 |
+
CandidateLookup = Tuple[Optional["Candidate"], Optional[InstallRequirement]]
|
12 |
+
CandidateVersion = Union[LegacyVersion, Version]
|
13 |
+
|
14 |
+
|
15 |
+
def format_name(project: str, extras: FrozenSet[str]) -> str:
|
16 |
+
if not extras:
|
17 |
+
return project
|
18 |
+
canonical_extras = sorted(canonicalize_name(e) for e in extras)
|
19 |
+
return "{}[{}]".format(project, ",".join(canonical_extras))
|
20 |
+
|
21 |
+
|
22 |
+
class Constraint:
|
23 |
+
def __init__(
|
24 |
+
self, specifier: SpecifierSet, hashes: Hashes, links: FrozenSet[Link]
|
25 |
+
) -> None:
|
26 |
+
self.specifier = specifier
|
27 |
+
self.hashes = hashes
|
28 |
+
self.links = links
|
29 |
+
|
30 |
+
@classmethod
|
31 |
+
def empty(cls) -> "Constraint":
|
32 |
+
return Constraint(SpecifierSet(), Hashes(), frozenset())
|
33 |
+
|
34 |
+
@classmethod
|
35 |
+
def from_ireq(cls, ireq: InstallRequirement) -> "Constraint":
|
36 |
+
links = frozenset([ireq.link]) if ireq.link else frozenset()
|
37 |
+
return Constraint(ireq.specifier, ireq.hashes(trust_internet=False), links)
|
38 |
+
|
39 |
+
def __bool__(self) -> bool:
|
40 |
+
return bool(self.specifier) or bool(self.hashes) or bool(self.links)
|
41 |
+
|
42 |
+
def __and__(self, other: InstallRequirement) -> "Constraint":
|
43 |
+
if not isinstance(other, InstallRequirement):
|
44 |
+
return NotImplemented
|
45 |
+
specifier = self.specifier & other.specifier
|
46 |
+
hashes = self.hashes & other.hashes(trust_internet=False)
|
47 |
+
links = self.links
|
48 |
+
if other.link:
|
49 |
+
links = links.union([other.link])
|
50 |
+
return Constraint(specifier, hashes, links)
|
51 |
+
|
52 |
+
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
53 |
+
# Reject if there are any mismatched URL constraints on this package.
|
54 |
+
if self.links and not all(_match_link(link, candidate) for link in self.links):
|
55 |
+
return False
|
56 |
+
# We can safely always allow prereleases here since PackageFinder
|
57 |
+
# already implements the prerelease logic, and would have filtered out
|
58 |
+
# prerelease candidates if the user does not expect them.
|
59 |
+
return self.specifier.contains(candidate.version, prereleases=True)
|
60 |
+
|
61 |
+
|
62 |
+
class Requirement:
|
63 |
+
@property
|
64 |
+
def project_name(self) -> NormalizedName:
|
65 |
+
"""The "project name" of a requirement.
|
66 |
+
|
67 |
+
This is different from ``name`` if this requirement contains extras,
|
68 |
+
in which case ``name`` would contain the ``[...]`` part, while this
|
69 |
+
refers to the name of the project.
|
70 |
+
"""
|
71 |
+
raise NotImplementedError("Subclass should override")
|
72 |
+
|
73 |
+
@property
|
74 |
+
def name(self) -> str:
|
75 |
+
"""The name identifying this requirement in the resolver.
|
76 |
+
|
77 |
+
This is different from ``project_name`` if this requirement contains
|
78 |
+
extras, where ``project_name`` would not contain the ``[...]`` part.
|
79 |
+
"""
|
80 |
+
raise NotImplementedError("Subclass should override")
|
81 |
+
|
82 |
+
def is_satisfied_by(self, candidate: "Candidate") -> bool:
|
83 |
+
return False
|
84 |
+
|
85 |
+
def get_candidate_lookup(self) -> CandidateLookup:
|
86 |
+
raise NotImplementedError("Subclass should override")
|
87 |
+
|
88 |
+
def format_for_error(self) -> str:
|
89 |
+
raise NotImplementedError("Subclass should override")
|
90 |
+
|
91 |
+
|
92 |
+
def _match_link(link: Link, candidate: "Candidate") -> bool:
|
93 |
+
if candidate.source_link:
|
94 |
+
return links_equivalent(link, candidate.source_link)
|
95 |
+
return False
|
96 |
+
|
97 |
+
|
98 |
+
class Candidate:
|
99 |
+
@property
|
100 |
+
def project_name(self) -> NormalizedName:
|
101 |
+
"""The "project name" of the candidate.
|
102 |
+
|
103 |
+
This is different from ``name`` if this candidate contains extras,
|
104 |
+
in which case ``name`` would contain the ``[...]`` part, while this
|
105 |
+
refers to the name of the project.
|
106 |
+
"""
|
107 |
+
raise NotImplementedError("Override in subclass")
|
108 |
+
|
109 |
+
@property
|
110 |
+
def name(self) -> str:
|
111 |
+
"""The name identifying this candidate in the resolver.
|
112 |
+
|
113 |
+
This is different from ``project_name`` if this candidate contains
|
114 |
+
extras, where ``project_name`` would not contain the ``[...]`` part.
|
115 |
+
"""
|
116 |
+
raise NotImplementedError("Override in subclass")
|
117 |
+
|
118 |
+
@property
|
119 |
+
def version(self) -> CandidateVersion:
|
120 |
+
raise NotImplementedError("Override in subclass")
|
121 |
+
|
122 |
+
@property
|
123 |
+
def is_installed(self) -> bool:
|
124 |
+
raise NotImplementedError("Override in subclass")
|
125 |
+
|
126 |
+
@property
|
127 |
+
def is_editable(self) -> bool:
|
128 |
+
raise NotImplementedError("Override in subclass")
|
129 |
+
|
130 |
+
@property
|
131 |
+
def source_link(self) -> Optional[Link]:
|
132 |
+
raise NotImplementedError("Override in subclass")
|
133 |
+
|
134 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
135 |
+
raise NotImplementedError("Override in subclass")
|
136 |
+
|
137 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
138 |
+
raise NotImplementedError("Override in subclass")
|
139 |
+
|
140 |
+
def format_for_error(self) -> str:
|
141 |
+
raise NotImplementedError("Subclass should override")
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/candidates.py
ADDED
@@ -0,0 +1,547 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import sys
|
3 |
+
from typing import TYPE_CHECKING, Any, FrozenSet, Iterable, Optional, Tuple, Union, cast
|
4 |
+
|
5 |
+
from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
|
6 |
+
from pip._vendor.packaging.version import Version
|
7 |
+
|
8 |
+
from pip._internal.exceptions import (
|
9 |
+
HashError,
|
10 |
+
InstallationSubprocessError,
|
11 |
+
MetadataInconsistent,
|
12 |
+
)
|
13 |
+
from pip._internal.metadata import BaseDistribution
|
14 |
+
from pip._internal.models.link import Link, links_equivalent
|
15 |
+
from pip._internal.models.wheel import Wheel
|
16 |
+
from pip._internal.req.constructors import (
|
17 |
+
install_req_from_editable,
|
18 |
+
install_req_from_line,
|
19 |
+
)
|
20 |
+
from pip._internal.req.req_install import InstallRequirement
|
21 |
+
from pip._internal.utils.misc import normalize_version_info
|
22 |
+
|
23 |
+
from .base import Candidate, CandidateVersion, Requirement, format_name
|
24 |
+
|
25 |
+
if TYPE_CHECKING:
|
26 |
+
from .factory import Factory
|
27 |
+
|
28 |
+
logger = logging.getLogger(__name__)
|
29 |
+
|
30 |
+
BaseCandidate = Union[
|
31 |
+
"AlreadyInstalledCandidate",
|
32 |
+
"EditableCandidate",
|
33 |
+
"LinkCandidate",
|
34 |
+
]
|
35 |
+
|
36 |
+
# Avoid conflicting with the PyPI package "Python".
|
37 |
+
REQUIRES_PYTHON_IDENTIFIER = cast(NormalizedName, "<Python from Requires-Python>")
|
38 |
+
|
39 |
+
|
40 |
+
def as_base_candidate(candidate: Candidate) -> Optional[BaseCandidate]:
|
41 |
+
"""The runtime version of BaseCandidate."""
|
42 |
+
base_candidate_classes = (
|
43 |
+
AlreadyInstalledCandidate,
|
44 |
+
EditableCandidate,
|
45 |
+
LinkCandidate,
|
46 |
+
)
|
47 |
+
if isinstance(candidate, base_candidate_classes):
|
48 |
+
return candidate
|
49 |
+
return None
|
50 |
+
|
51 |
+
|
52 |
+
def make_install_req_from_link(
|
53 |
+
link: Link, template: InstallRequirement
|
54 |
+
) -> InstallRequirement:
|
55 |
+
assert not template.editable, "template is editable"
|
56 |
+
if template.req:
|
57 |
+
line = str(template.req)
|
58 |
+
else:
|
59 |
+
line = link.url
|
60 |
+
ireq = install_req_from_line(
|
61 |
+
line,
|
62 |
+
user_supplied=template.user_supplied,
|
63 |
+
comes_from=template.comes_from,
|
64 |
+
use_pep517=template.use_pep517,
|
65 |
+
isolated=template.isolated,
|
66 |
+
constraint=template.constraint,
|
67 |
+
options=dict(
|
68 |
+
install_options=template.install_options,
|
69 |
+
global_options=template.global_options,
|
70 |
+
hashes=template.hash_options,
|
71 |
+
),
|
72 |
+
)
|
73 |
+
ireq.original_link = template.original_link
|
74 |
+
ireq.link = link
|
75 |
+
return ireq
|
76 |
+
|
77 |
+
|
78 |
+
def make_install_req_from_editable(
|
79 |
+
link: Link, template: InstallRequirement
|
80 |
+
) -> InstallRequirement:
|
81 |
+
assert template.editable, "template not editable"
|
82 |
+
return install_req_from_editable(
|
83 |
+
link.url,
|
84 |
+
user_supplied=template.user_supplied,
|
85 |
+
comes_from=template.comes_from,
|
86 |
+
use_pep517=template.use_pep517,
|
87 |
+
isolated=template.isolated,
|
88 |
+
constraint=template.constraint,
|
89 |
+
permit_editable_wheels=template.permit_editable_wheels,
|
90 |
+
options=dict(
|
91 |
+
install_options=template.install_options,
|
92 |
+
global_options=template.global_options,
|
93 |
+
hashes=template.hash_options,
|
94 |
+
),
|
95 |
+
)
|
96 |
+
|
97 |
+
|
98 |
+
def _make_install_req_from_dist(
|
99 |
+
dist: BaseDistribution, template: InstallRequirement
|
100 |
+
) -> InstallRequirement:
|
101 |
+
if template.req:
|
102 |
+
line = str(template.req)
|
103 |
+
elif template.link:
|
104 |
+
line = f"{dist.canonical_name} @ {template.link.url}"
|
105 |
+
else:
|
106 |
+
line = f"{dist.canonical_name}=={dist.version}"
|
107 |
+
ireq = install_req_from_line(
|
108 |
+
line,
|
109 |
+
user_supplied=template.user_supplied,
|
110 |
+
comes_from=template.comes_from,
|
111 |
+
use_pep517=template.use_pep517,
|
112 |
+
isolated=template.isolated,
|
113 |
+
constraint=template.constraint,
|
114 |
+
options=dict(
|
115 |
+
install_options=template.install_options,
|
116 |
+
global_options=template.global_options,
|
117 |
+
hashes=template.hash_options,
|
118 |
+
),
|
119 |
+
)
|
120 |
+
ireq.satisfied_by = dist
|
121 |
+
return ireq
|
122 |
+
|
123 |
+
|
124 |
+
class _InstallRequirementBackedCandidate(Candidate):
|
125 |
+
"""A candidate backed by an ``InstallRequirement``.
|
126 |
+
|
127 |
+
This represents a package request with the target not being already
|
128 |
+
in the environment, and needs to be fetched and installed. The backing
|
129 |
+
``InstallRequirement`` is responsible for most of the leg work; this
|
130 |
+
class exposes appropriate information to the resolver.
|
131 |
+
|
132 |
+
:param link: The link passed to the ``InstallRequirement``. The backing
|
133 |
+
``InstallRequirement`` will use this link to fetch the distribution.
|
134 |
+
:param source_link: The link this candidate "originates" from. This is
|
135 |
+
different from ``link`` when the link is found in the wheel cache.
|
136 |
+
``link`` would point to the wheel cache, while this points to the
|
137 |
+
found remote link (e.g. from pypi.org).
|
138 |
+
"""
|
139 |
+
|
140 |
+
dist: BaseDistribution
|
141 |
+
is_installed = False
|
142 |
+
|
143 |
+
def __init__(
|
144 |
+
self,
|
145 |
+
link: Link,
|
146 |
+
source_link: Link,
|
147 |
+
ireq: InstallRequirement,
|
148 |
+
factory: "Factory",
|
149 |
+
name: Optional[NormalizedName] = None,
|
150 |
+
version: Optional[CandidateVersion] = None,
|
151 |
+
) -> None:
|
152 |
+
self._link = link
|
153 |
+
self._source_link = source_link
|
154 |
+
self._factory = factory
|
155 |
+
self._ireq = ireq
|
156 |
+
self._name = name
|
157 |
+
self._version = version
|
158 |
+
self.dist = self._prepare()
|
159 |
+
|
160 |
+
def __str__(self) -> str:
|
161 |
+
return f"{self.name} {self.version}"
|
162 |
+
|
163 |
+
def __repr__(self) -> str:
|
164 |
+
return "{class_name}({link!r})".format(
|
165 |
+
class_name=self.__class__.__name__,
|
166 |
+
link=str(self._link),
|
167 |
+
)
|
168 |
+
|
169 |
+
def __hash__(self) -> int:
|
170 |
+
return hash((self.__class__, self._link))
|
171 |
+
|
172 |
+
def __eq__(self, other: Any) -> bool:
|
173 |
+
if isinstance(other, self.__class__):
|
174 |
+
return links_equivalent(self._link, other._link)
|
175 |
+
return False
|
176 |
+
|
177 |
+
@property
|
178 |
+
def source_link(self) -> Optional[Link]:
|
179 |
+
return self._source_link
|
180 |
+
|
181 |
+
@property
|
182 |
+
def project_name(self) -> NormalizedName:
|
183 |
+
"""The normalised name of the project the candidate refers to"""
|
184 |
+
if self._name is None:
|
185 |
+
self._name = self.dist.canonical_name
|
186 |
+
return self._name
|
187 |
+
|
188 |
+
@property
|
189 |
+
def name(self) -> str:
|
190 |
+
return self.project_name
|
191 |
+
|
192 |
+
@property
|
193 |
+
def version(self) -> CandidateVersion:
|
194 |
+
if self._version is None:
|
195 |
+
self._version = self.dist.version
|
196 |
+
return self._version
|
197 |
+
|
198 |
+
def format_for_error(self) -> str:
|
199 |
+
return "{} {} (from {})".format(
|
200 |
+
self.name,
|
201 |
+
self.version,
|
202 |
+
self._link.file_path if self._link.is_file else self._link,
|
203 |
+
)
|
204 |
+
|
205 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
206 |
+
raise NotImplementedError("Override in subclass")
|
207 |
+
|
208 |
+
def _check_metadata_consistency(self, dist: BaseDistribution) -> None:
|
209 |
+
"""Check for consistency of project name and version of dist."""
|
210 |
+
if self._name is not None and self._name != dist.canonical_name:
|
211 |
+
raise MetadataInconsistent(
|
212 |
+
self._ireq,
|
213 |
+
"name",
|
214 |
+
self._name,
|
215 |
+
dist.canonical_name,
|
216 |
+
)
|
217 |
+
if self._version is not None and self._version != dist.version:
|
218 |
+
raise MetadataInconsistent(
|
219 |
+
self._ireq,
|
220 |
+
"version",
|
221 |
+
str(self._version),
|
222 |
+
str(dist.version),
|
223 |
+
)
|
224 |
+
|
225 |
+
def _prepare(self) -> BaseDistribution:
|
226 |
+
try:
|
227 |
+
dist = self._prepare_distribution()
|
228 |
+
except HashError as e:
|
229 |
+
# Provide HashError the underlying ireq that caused it. This
|
230 |
+
# provides context for the resulting error message to show the
|
231 |
+
# offending line to the user.
|
232 |
+
e.req = self._ireq
|
233 |
+
raise
|
234 |
+
except InstallationSubprocessError as exc:
|
235 |
+
# The output has been presented already, so don't duplicate it.
|
236 |
+
exc.context = "See above for output."
|
237 |
+
raise
|
238 |
+
|
239 |
+
self._check_metadata_consistency(dist)
|
240 |
+
return dist
|
241 |
+
|
242 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
243 |
+
requires = self.dist.iter_dependencies() if with_requires else ()
|
244 |
+
for r in requires:
|
245 |
+
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
246 |
+
yield self._factory.make_requires_python_requirement(self.dist.requires_python)
|
247 |
+
|
248 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
249 |
+
return self._ireq
|
250 |
+
|
251 |
+
|
252 |
+
class LinkCandidate(_InstallRequirementBackedCandidate):
|
253 |
+
is_editable = False
|
254 |
+
|
255 |
+
def __init__(
|
256 |
+
self,
|
257 |
+
link: Link,
|
258 |
+
template: InstallRequirement,
|
259 |
+
factory: "Factory",
|
260 |
+
name: Optional[NormalizedName] = None,
|
261 |
+
version: Optional[CandidateVersion] = None,
|
262 |
+
) -> None:
|
263 |
+
source_link = link
|
264 |
+
cache_entry = factory.get_wheel_cache_entry(link, name)
|
265 |
+
if cache_entry is not None:
|
266 |
+
logger.debug("Using cached wheel link: %s", cache_entry.link)
|
267 |
+
link = cache_entry.link
|
268 |
+
ireq = make_install_req_from_link(link, template)
|
269 |
+
assert ireq.link == link
|
270 |
+
if ireq.link.is_wheel and not ireq.link.is_file:
|
271 |
+
wheel = Wheel(ireq.link.filename)
|
272 |
+
wheel_name = canonicalize_name(wheel.name)
|
273 |
+
assert name == wheel_name, f"{name!r} != {wheel_name!r} for wheel"
|
274 |
+
# Version may not be present for PEP 508 direct URLs
|
275 |
+
if version is not None:
|
276 |
+
wheel_version = Version(wheel.version)
|
277 |
+
assert version == wheel_version, "{!r} != {!r} for wheel {}".format(
|
278 |
+
version, wheel_version, name
|
279 |
+
)
|
280 |
+
|
281 |
+
if (
|
282 |
+
cache_entry is not None
|
283 |
+
and cache_entry.persistent
|
284 |
+
and template.link is template.original_link
|
285 |
+
):
|
286 |
+
ireq.original_link_is_in_wheel_cache = True
|
287 |
+
|
288 |
+
super().__init__(
|
289 |
+
link=link,
|
290 |
+
source_link=source_link,
|
291 |
+
ireq=ireq,
|
292 |
+
factory=factory,
|
293 |
+
name=name,
|
294 |
+
version=version,
|
295 |
+
)
|
296 |
+
|
297 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
298 |
+
preparer = self._factory.preparer
|
299 |
+
return preparer.prepare_linked_requirement(self._ireq, parallel_builds=True)
|
300 |
+
|
301 |
+
|
302 |
+
class EditableCandidate(_InstallRequirementBackedCandidate):
|
303 |
+
is_editable = True
|
304 |
+
|
305 |
+
def __init__(
|
306 |
+
self,
|
307 |
+
link: Link,
|
308 |
+
template: InstallRequirement,
|
309 |
+
factory: "Factory",
|
310 |
+
name: Optional[NormalizedName] = None,
|
311 |
+
version: Optional[CandidateVersion] = None,
|
312 |
+
) -> None:
|
313 |
+
super().__init__(
|
314 |
+
link=link,
|
315 |
+
source_link=link,
|
316 |
+
ireq=make_install_req_from_editable(link, template),
|
317 |
+
factory=factory,
|
318 |
+
name=name,
|
319 |
+
version=version,
|
320 |
+
)
|
321 |
+
|
322 |
+
def _prepare_distribution(self) -> BaseDistribution:
|
323 |
+
return self._factory.preparer.prepare_editable_requirement(self._ireq)
|
324 |
+
|
325 |
+
|
326 |
+
class AlreadyInstalledCandidate(Candidate):
|
327 |
+
is_installed = True
|
328 |
+
source_link = None
|
329 |
+
|
330 |
+
def __init__(
|
331 |
+
self,
|
332 |
+
dist: BaseDistribution,
|
333 |
+
template: InstallRequirement,
|
334 |
+
factory: "Factory",
|
335 |
+
) -> None:
|
336 |
+
self.dist = dist
|
337 |
+
self._ireq = _make_install_req_from_dist(dist, template)
|
338 |
+
self._factory = factory
|
339 |
+
|
340 |
+
# This is just logging some messages, so we can do it eagerly.
|
341 |
+
# The returned dist would be exactly the same as self.dist because we
|
342 |
+
# set satisfied_by in _make_install_req_from_dist.
|
343 |
+
# TODO: Supply reason based on force_reinstall and upgrade_strategy.
|
344 |
+
skip_reason = "already satisfied"
|
345 |
+
factory.preparer.prepare_installed_requirement(self._ireq, skip_reason)
|
346 |
+
|
347 |
+
def __str__(self) -> str:
|
348 |
+
return str(self.dist)
|
349 |
+
|
350 |
+
def __repr__(self) -> str:
|
351 |
+
return "{class_name}({distribution!r})".format(
|
352 |
+
class_name=self.__class__.__name__,
|
353 |
+
distribution=self.dist,
|
354 |
+
)
|
355 |
+
|
356 |
+
def __hash__(self) -> int:
|
357 |
+
return hash((self.__class__, self.name, self.version))
|
358 |
+
|
359 |
+
def __eq__(self, other: Any) -> bool:
|
360 |
+
if isinstance(other, self.__class__):
|
361 |
+
return self.name == other.name and self.version == other.version
|
362 |
+
return False
|
363 |
+
|
364 |
+
@property
|
365 |
+
def project_name(self) -> NormalizedName:
|
366 |
+
return self.dist.canonical_name
|
367 |
+
|
368 |
+
@property
|
369 |
+
def name(self) -> str:
|
370 |
+
return self.project_name
|
371 |
+
|
372 |
+
@property
|
373 |
+
def version(self) -> CandidateVersion:
|
374 |
+
return self.dist.version
|
375 |
+
|
376 |
+
@property
|
377 |
+
def is_editable(self) -> bool:
|
378 |
+
return self.dist.editable
|
379 |
+
|
380 |
+
def format_for_error(self) -> str:
|
381 |
+
return f"{self.name} {self.version} (Installed)"
|
382 |
+
|
383 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
384 |
+
if not with_requires:
|
385 |
+
return
|
386 |
+
for r in self.dist.iter_dependencies():
|
387 |
+
yield self._factory.make_requirement_from_spec(str(r), self._ireq)
|
388 |
+
|
389 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
390 |
+
return None
|
391 |
+
|
392 |
+
|
393 |
+
class ExtrasCandidate(Candidate):
|
394 |
+
"""A candidate that has 'extras', indicating additional dependencies.
|
395 |
+
|
396 |
+
Requirements can be for a project with dependencies, something like
|
397 |
+
foo[extra]. The extras don't affect the project/version being installed
|
398 |
+
directly, but indicate that we need additional dependencies. We model that
|
399 |
+
by having an artificial ExtrasCandidate that wraps the "base" candidate.
|
400 |
+
|
401 |
+
The ExtrasCandidate differs from the base in the following ways:
|
402 |
+
|
403 |
+
1. It has a unique name, of the form foo[extra]. This causes the resolver
|
404 |
+
to treat it as a separate node in the dependency graph.
|
405 |
+
2. When we're getting the candidate's dependencies,
|
406 |
+
a) We specify that we want the extra dependencies as well.
|
407 |
+
b) We add a dependency on the base candidate.
|
408 |
+
See below for why this is needed.
|
409 |
+
3. We return None for the underlying InstallRequirement, as the base
|
410 |
+
candidate will provide it, and we don't want to end up with duplicates.
|
411 |
+
|
412 |
+
The dependency on the base candidate is needed so that the resolver can't
|
413 |
+
decide that it should recommend foo[extra1] version 1.0 and foo[extra2]
|
414 |
+
version 2.0. Having those candidates depend on foo=1.0 and foo=2.0
|
415 |
+
respectively forces the resolver to recognise that this is a conflict.
|
416 |
+
"""
|
417 |
+
|
418 |
+
def __init__(
|
419 |
+
self,
|
420 |
+
base: BaseCandidate,
|
421 |
+
extras: FrozenSet[str],
|
422 |
+
) -> None:
|
423 |
+
self.base = base
|
424 |
+
self.extras = extras
|
425 |
+
|
426 |
+
def __str__(self) -> str:
|
427 |
+
name, rest = str(self.base).split(" ", 1)
|
428 |
+
return "{}[{}] {}".format(name, ",".join(self.extras), rest)
|
429 |
+
|
430 |
+
def __repr__(self) -> str:
|
431 |
+
return "{class_name}(base={base!r}, extras={extras!r})".format(
|
432 |
+
class_name=self.__class__.__name__,
|
433 |
+
base=self.base,
|
434 |
+
extras=self.extras,
|
435 |
+
)
|
436 |
+
|
437 |
+
def __hash__(self) -> int:
|
438 |
+
return hash((self.base, self.extras))
|
439 |
+
|
440 |
+
def __eq__(self, other: Any) -> bool:
|
441 |
+
if isinstance(other, self.__class__):
|
442 |
+
return self.base == other.base and self.extras == other.extras
|
443 |
+
return False
|
444 |
+
|
445 |
+
@property
|
446 |
+
def project_name(self) -> NormalizedName:
|
447 |
+
return self.base.project_name
|
448 |
+
|
449 |
+
@property
|
450 |
+
def name(self) -> str:
|
451 |
+
"""The normalised name of the project the candidate refers to"""
|
452 |
+
return format_name(self.base.project_name, self.extras)
|
453 |
+
|
454 |
+
@property
|
455 |
+
def version(self) -> CandidateVersion:
|
456 |
+
return self.base.version
|
457 |
+
|
458 |
+
def format_for_error(self) -> str:
|
459 |
+
return "{} [{}]".format(
|
460 |
+
self.base.format_for_error(), ", ".join(sorted(self.extras))
|
461 |
+
)
|
462 |
+
|
463 |
+
@property
|
464 |
+
def is_installed(self) -> bool:
|
465 |
+
return self.base.is_installed
|
466 |
+
|
467 |
+
@property
|
468 |
+
def is_editable(self) -> bool:
|
469 |
+
return self.base.is_editable
|
470 |
+
|
471 |
+
@property
|
472 |
+
def source_link(self) -> Optional[Link]:
|
473 |
+
return self.base.source_link
|
474 |
+
|
475 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
476 |
+
factory = self.base._factory
|
477 |
+
|
478 |
+
# Add a dependency on the exact base
|
479 |
+
# (See note 2b in the class docstring)
|
480 |
+
yield factory.make_requirement_from_candidate(self.base)
|
481 |
+
if not with_requires:
|
482 |
+
return
|
483 |
+
|
484 |
+
# The user may have specified extras that the candidate doesn't
|
485 |
+
# support. We ignore any unsupported extras here.
|
486 |
+
valid_extras = self.extras.intersection(self.base.dist.iter_provided_extras())
|
487 |
+
invalid_extras = self.extras.difference(self.base.dist.iter_provided_extras())
|
488 |
+
for extra in sorted(invalid_extras):
|
489 |
+
logger.warning(
|
490 |
+
"%s %s does not provide the extra '%s'",
|
491 |
+
self.base.name,
|
492 |
+
self.version,
|
493 |
+
extra,
|
494 |
+
)
|
495 |
+
|
496 |
+
for r in self.base.dist.iter_dependencies(valid_extras):
|
497 |
+
requirement = factory.make_requirement_from_spec(
|
498 |
+
str(r), self.base._ireq, valid_extras
|
499 |
+
)
|
500 |
+
if requirement:
|
501 |
+
yield requirement
|
502 |
+
|
503 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
504 |
+
# We don't return anything here, because we always
|
505 |
+
# depend on the base candidate, and we'll get the
|
506 |
+
# install requirement from that.
|
507 |
+
return None
|
508 |
+
|
509 |
+
|
510 |
+
class RequiresPythonCandidate(Candidate):
|
511 |
+
is_installed = False
|
512 |
+
source_link = None
|
513 |
+
|
514 |
+
def __init__(self, py_version_info: Optional[Tuple[int, ...]]) -> None:
|
515 |
+
if py_version_info is not None:
|
516 |
+
version_info = normalize_version_info(py_version_info)
|
517 |
+
else:
|
518 |
+
version_info = sys.version_info[:3]
|
519 |
+
self._version = Version(".".join(str(c) for c in version_info))
|
520 |
+
|
521 |
+
# We don't need to implement __eq__() and __ne__() since there is always
|
522 |
+
# only one RequiresPythonCandidate in a resolution, i.e. the host Python.
|
523 |
+
# The built-in object.__eq__() and object.__ne__() do exactly what we want.
|
524 |
+
|
525 |
+
def __str__(self) -> str:
|
526 |
+
return f"Python {self._version}"
|
527 |
+
|
528 |
+
@property
|
529 |
+
def project_name(self) -> NormalizedName:
|
530 |
+
return REQUIRES_PYTHON_IDENTIFIER
|
531 |
+
|
532 |
+
@property
|
533 |
+
def name(self) -> str:
|
534 |
+
return REQUIRES_PYTHON_IDENTIFIER
|
535 |
+
|
536 |
+
@property
|
537 |
+
def version(self) -> CandidateVersion:
|
538 |
+
return self._version
|
539 |
+
|
540 |
+
def format_for_error(self) -> str:
|
541 |
+
return f"Python {self.version}"
|
542 |
+
|
543 |
+
def iter_dependencies(self, with_requires: bool) -> Iterable[Optional[Requirement]]:
|
544 |
+
return ()
|
545 |
+
|
546 |
+
def get_install_requirement(self) -> Optional[InstallRequirement]:
|
547 |
+
return None
|
llmeval-env/lib/python3.10/site-packages/pip/_internal/resolution/resolvelib/found_candidates.py
ADDED
@@ -0,0 +1,155 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Utilities to lazily create and visit candidates found.
|
2 |
+
|
3 |
+
Creating and visiting a candidate is a *very* costly operation. It involves
|
4 |
+
fetching, extracting, potentially building modules from source, and verifying
|
5 |
+
distribution metadata. It is therefore crucial for performance to keep
|
6 |
+
everything here lazy all the way down, so we only touch candidates that we
|
7 |
+
absolutely need, and not "download the world" when we only need one version of
|
8 |
+
something.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import functools
|
12 |
+
from collections.abc import Sequence
|
13 |
+
from typing import TYPE_CHECKING, Any, Callable, Iterator, Optional, Set, Tuple
|
14 |
+
|
15 |
+
from pip._vendor.packaging.version import _BaseVersion
|
16 |
+
|
17 |
+
from .base import Candidate
|
18 |
+
|
19 |
+
IndexCandidateInfo = Tuple[_BaseVersion, Callable[[], Optional[Candidate]]]
|
20 |
+
|
21 |
+
if TYPE_CHECKING:
|
22 |
+
SequenceCandidate = Sequence[Candidate]
|
23 |
+
else:
|
24 |
+
# For compatibility: Python before 3.9 does not support using [] on the
|
25 |
+
# Sequence class.
|
26 |
+
#
|
27 |
+
# >>> from collections.abc import Sequence
|
28 |
+
# >>> Sequence[str]
|
29 |
+
# Traceback (most recent call last):
|
30 |
+
# File "<stdin>", line 1, in <module>
|
31 |
+
# TypeError: 'ABCMeta' object is not subscriptable
|
32 |
+
#
|
33 |
+
# TODO: Remove this block after dropping Python 3.8 support.
|
34 |
+
SequenceCandidate = Sequence
|
35 |
+
|
36 |
+
|
37 |
+
def _iter_built(infos: Iterator[IndexCandidateInfo]) -> Iterator[Candidate]:
|
38 |
+
"""Iterator for ``FoundCandidates``.
|
39 |
+
|
40 |
+
This iterator is used when the package is not already installed. Candidates
|
41 |
+
from index come later in their normal ordering.
|
42 |
+
"""
|
43 |
+
versions_found: Set[_BaseVersion] = set()
|
44 |
+
for version, func in infos:
|
45 |
+
if version in versions_found:
|
46 |
+
continue
|
47 |
+
candidate = func()
|
48 |
+
if candidate is None:
|
49 |
+
continue
|
50 |
+
yield candidate
|
51 |
+
versions_found.add(version)
|
52 |
+
|
53 |
+
|
54 |
+
def _iter_built_with_prepended(
|
55 |
+
installed: Candidate, infos: Iterator[IndexCandidateInfo]
|
56 |
+
) -> Iterator[Candidate]:
|
57 |
+
"""Iterator for ``FoundCandidates``.
|
58 |
+
|
59 |
+
This iterator is used when the resolver prefers the already-installed
|
60 |
+
candidate and NOT to upgrade. The installed candidate is therefore
|
61 |
+
always yielded first, and candidates from index come later in their
|
62 |
+
normal ordering, except skipped when the version is already installed.
|
63 |
+
"""
|
64 |
+
yield installed
|
65 |
+
versions_found: Set[_BaseVersion] = {installed.version}
|
66 |
+
for version, func in infos:
|
67 |
+
if version in versions_found:
|
68 |
+
continue
|
69 |
+
candidate = func()
|
70 |
+
if candidate is None:
|
71 |
+
continue
|
72 |
+
yield candidate
|
73 |
+
versions_found.add(version)
|
74 |
+
|
75 |
+
|
76 |
+
def _iter_built_with_inserted(
|
77 |
+
installed: Candidate, infos: Iterator[IndexCandidateInfo]
|
78 |
+
) -> Iterator[Candidate]:
|
79 |
+
"""Iterator for ``FoundCandidates``.
|
80 |
+
|
81 |
+
This iterator is used when the resolver prefers to upgrade an
|
82 |
+
already-installed package. Candidates from index are returned in their
|
83 |
+
normal ordering, except replaced when the version is already installed.
|
84 |
+
|
85 |
+
The implementation iterates through and yields other candidates, inserting
|
86 |
+
the installed candidate exactly once before we start yielding older or
|
87 |
+
equivalent candidates, or after all other candidates if they are all newer.
|
88 |
+
"""
|
89 |
+
versions_found: Set[_BaseVersion] = set()
|
90 |
+
for version, func in infos:
|
91 |
+
if version in versions_found:
|
92 |
+
continue
|
93 |
+
# If the installed candidate is better, yield it first.
|
94 |
+
if installed.version >= version:
|
95 |
+
yield installed
|
96 |
+
versions_found.add(installed.version)
|
97 |
+
candidate = func()
|
98 |
+
if candidate is None:
|
99 |
+
continue
|
100 |
+
yield candidate
|
101 |
+
versions_found.add(version)
|
102 |
+
|
103 |
+
# If the installed candidate is older than all other candidates.
|
104 |
+
if installed.version not in versions_found:
|
105 |
+
yield installed
|
106 |
+
|
107 |
+
|
108 |
+
class FoundCandidates(SequenceCandidate):
|
109 |
+
"""A lazy sequence to provide candidates to the resolver.
|
110 |
+
|
111 |
+
The intended usage is to return this from `find_matches()` so the resolver
|
112 |
+
can iterate through the sequence multiple times, but only access the index
|
113 |
+
page when remote packages are actually needed. This improve performances
|
114 |
+
when suitable candidates are already installed on disk.
|
115 |
+
"""
|
116 |
+
|
117 |
+
def __init__(
|
118 |
+
self,
|
119 |
+
get_infos: Callable[[], Iterator[IndexCandidateInfo]],
|
120 |
+
installed: Optional[Candidate],
|
121 |
+
prefers_installed: bool,
|
122 |
+
incompatible_ids: Set[int],
|
123 |
+
):
|
124 |
+
self._get_infos = get_infos
|
125 |
+
self._installed = installed
|
126 |
+
self._prefers_installed = prefers_installed
|
127 |
+
self._incompatible_ids = incompatible_ids
|
128 |
+
|
129 |
+
def __getitem__(self, index: Any) -> Any:
|
130 |
+
# Implemented to satisfy the ABC check. This is not needed by the
|
131 |
+
# resolver, and should not be used by the provider either (for
|
132 |
+
# performance reasons).
|
133 |
+
raise NotImplementedError("don't do this")
|
134 |
+
|
135 |
+
def __iter__(self) -> Iterator[Candidate]:
|
136 |
+
infos = self._get_infos()
|
137 |
+
if not self._installed:
|
138 |
+
iterator = _iter_built(infos)
|
139 |
+
elif self._prefers_installed:
|
140 |
+
iterator = _iter_built_with_prepended(self._installed, infos)
|
141 |
+
else:
|
142 |
+
iterator = _iter_built_with_inserted(self._installed, infos)
|
143 |
+
return (c for c in iterator if id(c) not in self._incompatible_ids)
|
144 |
+
|
145 |
+
def __len__(self) -> int:
|
146 |
+
# Implemented to satisfy the ABC check. This is not needed by the
|
147 |
+
# resolver, and should not be used by the provider either (for
|
148 |
+
# performance reasons).
|
149 |
+
raise NotImplementedError("don't do this")
|
150 |
+
|
151 |
+
@functools.lru_cache(maxsize=1)
|
152 |
+
def __bool__(self) -> bool:
|
153 |
+
if self._prefers_installed and self._installed:
|
154 |
+
return True
|
155 |
+
return any(self)
|