applied-ai-018 commited on
Commit
c874fc1
·
verified ·
1 Parent(s): 5c5fe2b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py +21 -0
  2. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc +0 -0
  3. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
  4. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc +0 -0
  5. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc +0 -0
  6. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc +0 -0
  7. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/base.py +36 -0
  8. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/installed.py +20 -0
  9. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py +127 -0
  10. scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py +31 -0
  11. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__init__.py +2 -0
  12. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc +0 -0
  13. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc +0 -0
  14. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc +0 -0
  15. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc +0 -0
  16. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc +0 -0
  17. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc +0 -0
  18. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc +0 -0
  19. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc +0 -0
  20. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc +0 -0
  21. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc +0 -0
  22. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc +0 -0
  23. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/candidate.py +34 -0
  24. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/direct_url.py +220 -0
  25. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/format_control.py +80 -0
  26. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/index.py +28 -0
  27. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/link.py +288 -0
  28. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/scheme.py +31 -0
  29. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/search_scope.py +129 -0
  30. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py +51 -0
  31. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/target_python.py +110 -0
  32. scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/wheel.py +89 -0
  33. scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc +0 -0
  34. scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc +0 -0
  35. scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc +0 -0
  36. scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc +0 -0
  37. scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc +0 -0
  38. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__init__.py +0 -0
  39. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc +0 -0
  40. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc +0 -0
  41. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc +0 -0
  42. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc +0 -0
  43. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc +0 -0
  44. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/_log.py +38 -0
  45. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py +52 -0
  46. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/compat.py +63 -0
  47. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py +165 -0
  48. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/datetime.py +11 -0
  49. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py +120 -0
  50. scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py +87 -0
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.distributions.sdist import SourceDistribution
3
+ from pip._internal.distributions.wheel import WheelDistribution
4
+ from pip._internal.req.req_install import InstallRequirement
5
+
6
+
7
+ def make_distribution_for_install_requirement(
8
+ install_req: InstallRequirement,
9
+ ) -> AbstractDistribution:
10
+ """Returns a Distribution for the given InstallRequirement"""
11
+ # Editable requirements will always be source distributions. They use the
12
+ # legacy logic until we create a modern standard for them.
13
+ if install_req.editable:
14
+ return SourceDistribution(install_req)
15
+
16
+ # If it's a wheel, it's a WheelDistribution
17
+ if install_req.is_wheel:
18
+ return WheelDistribution(install_req)
19
+
20
+ # Otherwise, a SourceDistribution
21
+ return SourceDistribution(install_req)
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (802 Bytes). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc ADDED
Binary file (1.86 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc ADDED
Binary file (1.24 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc ADDED
Binary file (4.45 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (1.6 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/base.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+
3
+ from pip._internal.index.package_finder import PackageFinder
4
+ from pip._internal.metadata.base import BaseDistribution
5
+ from pip._internal.req import InstallRequirement
6
+
7
+
8
+ class AbstractDistribution(metaclass=abc.ABCMeta):
9
+ """A base class for handling installable artifacts.
10
+
11
+ The requirements for anything installable are as follows:
12
+
13
+ - we must be able to determine the requirement name
14
+ (or we can't correctly handle the non-upgrade case).
15
+
16
+ - for packages with setup requirements, we must also be able
17
+ to determine their requirements without installing additional
18
+ packages (for the same reason as run-time dependencies)
19
+
20
+ - we must be able to create a Distribution object exposing the
21
+ above metadata.
22
+ """
23
+
24
+ def __init__(self, req: InstallRequirement) -> None:
25
+ super().__init__()
26
+ self.req = req
27
+
28
+ @abc.abstractmethod
29
+ def get_metadata_distribution(self) -> BaseDistribution:
30
+ raise NotImplementedError()
31
+
32
+ @abc.abstractmethod
33
+ def prepare_distribution_metadata(
34
+ self, finder: PackageFinder, build_isolation: bool
35
+ ) -> None:
36
+ raise NotImplementedError()
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/installed.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.index.package_finder import PackageFinder
3
+ from pip._internal.metadata import BaseDistribution
4
+
5
+
6
+ class InstalledDistribution(AbstractDistribution):
7
+ """Represents an installed package.
8
+
9
+ This does not need any preparation as the required information has already
10
+ been computed.
11
+ """
12
+
13
+ def get_metadata_distribution(self) -> BaseDistribution:
14
+ assert self.req.satisfied_by is not None, "not actually installed"
15
+ return self.req.satisfied_by
16
+
17
+ def prepare_distribution_metadata(
18
+ self, finder: PackageFinder, build_isolation: bool
19
+ ) -> None:
20
+ pass
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Iterable, Set, Tuple
3
+
4
+ from pip._internal.build_env import BuildEnvironment
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.exceptions import InstallationError
7
+ from pip._internal.index.package_finder import PackageFinder
8
+ from pip._internal.metadata import BaseDistribution
9
+ from pip._internal.utils.subprocess import runner_with_spinner_message
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SourceDistribution(AbstractDistribution):
15
+ """Represents a source distribution.
16
+
17
+ The preparation step for these needs metadata for the packages to be
18
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
19
+ """
20
+
21
+ def get_metadata_distribution(self) -> BaseDistribution:
22
+ return self.req.get_dist()
23
+
24
+ def prepare_distribution_metadata(
25
+ self, finder: PackageFinder, build_isolation: bool
26
+ ) -> None:
27
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
28
+ self.req.load_pyproject_toml()
29
+
30
+ # Set up the build isolation, if this requirement should be isolated
31
+ should_isolate = self.req.use_pep517 and build_isolation
32
+ if should_isolate:
33
+ # Setup an isolated environment and install the build backend static
34
+ # requirements in it.
35
+ self._prepare_build_backend(finder)
36
+ # Check that if the requirement is editable, it either supports PEP 660 or
37
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
38
+ # to setup the build backend to verify it supports build_editable, nor can
39
+ # it be done later, because we want to avoid installing build requirements
40
+ # needlessly. Doing it here also works around setuptools generating
41
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
42
+ # without setup.py nor setup.cfg.
43
+ self.req.isolated_editable_sanity_check()
44
+ # Install the dynamic build requirements.
45
+ self._install_build_reqs(finder)
46
+
47
+ self.req.prepare_metadata()
48
+
49
+ def _prepare_build_backend(self, finder: PackageFinder) -> None:
50
+ # Isolate in a BuildEnvironment and install the build-time
51
+ # requirements.
52
+ pyproject_requires = self.req.pyproject_requires
53
+ assert pyproject_requires is not None
54
+
55
+ self.req.build_env = BuildEnvironment()
56
+ self.req.build_env.install_requirements(
57
+ finder, pyproject_requires, "overlay", kind="build dependencies"
58
+ )
59
+ conflicting, missing = self.req.build_env.check_requirements(
60
+ self.req.requirements_to_check
61
+ )
62
+ if conflicting:
63
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
64
+ if missing:
65
+ logger.warning(
66
+ "Missing build requirements in pyproject.toml for %s.",
67
+ self.req,
68
+ )
69
+ logger.warning(
70
+ "The project does not specify a build backend, and "
71
+ "pip cannot fall back to setuptools without %s.",
72
+ " and ".join(map(repr, sorted(missing))),
73
+ )
74
+
75
+ def _get_build_requires_wheel(self) -> Iterable[str]:
76
+ with self.req.build_env:
77
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
78
+ backend = self.req.pep517_backend
79
+ assert backend is not None
80
+ with backend.subprocess_runner(runner):
81
+ return backend.get_requires_for_build_wheel()
82
+
83
+ def _get_build_requires_editable(self) -> Iterable[str]:
84
+ with self.req.build_env:
85
+ runner = runner_with_spinner_message(
86
+ "Getting requirements to build editable"
87
+ )
88
+ backend = self.req.pep517_backend
89
+ assert backend is not None
90
+ with backend.subprocess_runner(runner):
91
+ return backend.get_requires_for_build_editable()
92
+
93
+ def _install_build_reqs(self, finder: PackageFinder) -> None:
94
+ # Install any extra build dependencies that the backend requests.
95
+ # This must be done in a second pass, as the pyproject.toml
96
+ # dependencies must be installed before we can call the backend.
97
+ if (
98
+ self.req.editable
99
+ and self.req.permit_editable_wheels
100
+ and self.req.supports_pyproject_editable()
101
+ ):
102
+ build_reqs = self._get_build_requires_editable()
103
+ else:
104
+ build_reqs = self._get_build_requires_wheel()
105
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
106
+ if conflicting:
107
+ self._raise_conflicts("the backend dependencies", conflicting)
108
+ self.req.build_env.install_requirements(
109
+ finder, missing, "normal", kind="backend dependencies"
110
+ )
111
+
112
+ def _raise_conflicts(
113
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
114
+ ) -> None:
115
+ format_string = (
116
+ "Some build dependencies for {requirement} "
117
+ "conflict with {conflicting_with}: {description}."
118
+ )
119
+ error_message = format_string.format(
120
+ requirement=self.req,
121
+ conflicting_with=conflicting_with,
122
+ description=", ".join(
123
+ f"{installed} is incompatible with {wanted}"
124
+ for installed, wanted in sorted(conflicting_reqs)
125
+ ),
126
+ )
127
+ raise InstallationError(error_message)
scripts/myenv/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.utils import canonicalize_name
2
+
3
+ from pip._internal.distributions.base import AbstractDistribution
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata import (
6
+ BaseDistribution,
7
+ FilesystemWheel,
8
+ get_wheel_distribution,
9
+ )
10
+
11
+
12
+ class WheelDistribution(AbstractDistribution):
13
+ """Represents a wheel distribution.
14
+
15
+ This does not need any preparation as wheels can be directly unpacked.
16
+ """
17
+
18
+ def get_metadata_distribution(self) -> BaseDistribution:
19
+ """Loads the metadata from the wheel file into memory and returns a
20
+ Distribution that uses it, not relying on the wheel file or
21
+ requirement.
22
+ """
23
+ assert self.req.local_file_path, "Set as part of preparation during download"
24
+ assert self.req.name, "Wheels are never unnamed"
25
+ wheel = FilesystemWheel(self.req.local_file_path)
26
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
27
+
28
+ def prepare_distribution_metadata(
29
+ self, finder: PackageFinder, build_isolation: bool
30
+ ) -> None:
31
+ pass
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """A package that contains models that represent entities.
2
+ """
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (263 Bytes). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/candidate.cpython-310.pyc ADDED
Binary file (1.42 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/direct_url.cpython-310.pyc ADDED
Binary file (7.29 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/format_control.cpython-310.pyc ADDED
Binary file (2.74 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/index.cpython-310.pyc ADDED
Binary file (1.23 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/link.cpython-310.pyc ADDED
Binary file (10.2 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/scheme.cpython-310.pyc ADDED
Binary file (1.03 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/search_scope.cpython-310.pyc ADDED
Binary file (3.49 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/selection_prefs.cpython-310.pyc ADDED
Binary file (1.69 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/target_python.cpython-310.pyc ADDED
Binary file (3.44 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (4.36 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/candidate.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.version import parse as parse_version
2
+
3
+ from pip._internal.models.link import Link
4
+ from pip._internal.utils.models import KeyBasedCompareMixin
5
+
6
+
7
+ class InstallationCandidate(KeyBasedCompareMixin):
8
+ """Represents a potential "candidate" for installation."""
9
+
10
+ __slots__ = ["name", "version", "link"]
11
+
12
+ def __init__(self, name: str, version: str, link: Link) -> None:
13
+ self.name = name
14
+ self.version = parse_version(version)
15
+ self.link = link
16
+
17
+ super().__init__(
18
+ key=(self.name, self.version, self.link),
19
+ defining_class=InstallationCandidate,
20
+ )
21
+
22
+ def __repr__(self) -> str:
23
+ return "<InstallationCandidate({!r}, {!r}, {!r})>".format(
24
+ self.name,
25
+ self.version,
26
+ self.link,
27
+ )
28
+
29
+ def __str__(self) -> str:
30
+ return "{!r} candidate (version {} at {})".format(
31
+ self.name,
32
+ self.version,
33
+ self.link,
34
+ )
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/direct_url.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """ PEP 610 """
2
+ import json
3
+ import re
4
+ import urllib.parse
5
+ from typing import Any, Dict, Iterable, Optional, Type, TypeVar, Union
6
+
7
+ __all__ = [
8
+ "DirectUrl",
9
+ "DirectUrlValidationError",
10
+ "DirInfo",
11
+ "ArchiveInfo",
12
+ "VcsInfo",
13
+ ]
14
+
15
+ T = TypeVar("T")
16
+
17
+ DIRECT_URL_METADATA_NAME = "direct_url.json"
18
+ ENV_VAR_RE = re.compile(r"^\$\{[A-Za-z0-9-_]+\}(:\$\{[A-Za-z0-9-_]+\})?$")
19
+
20
+
21
+ class DirectUrlValidationError(Exception):
22
+ pass
23
+
24
+
25
+ def _get(
26
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
27
+ ) -> Optional[T]:
28
+ """Get value from dictionary and verify expected type."""
29
+ if key not in d:
30
+ return default
31
+ value = d[key]
32
+ if not isinstance(value, expected_type):
33
+ raise DirectUrlValidationError(
34
+ "{!r} has unexpected type for {} (expected {})".format(
35
+ value, key, expected_type
36
+ )
37
+ )
38
+ return value
39
+
40
+
41
+ def _get_required(
42
+ d: Dict[str, Any], expected_type: Type[T], key: str, default: Optional[T] = None
43
+ ) -> T:
44
+ value = _get(d, expected_type, key, default)
45
+ if value is None:
46
+ raise DirectUrlValidationError(f"{key} must have a value")
47
+ return value
48
+
49
+
50
+ def _exactly_one_of(infos: Iterable[Optional["InfoType"]]) -> "InfoType":
51
+ infos = [info for info in infos if info is not None]
52
+ if not infos:
53
+ raise DirectUrlValidationError(
54
+ "missing one of archive_info, dir_info, vcs_info"
55
+ )
56
+ if len(infos) > 1:
57
+ raise DirectUrlValidationError(
58
+ "more than one of archive_info, dir_info, vcs_info"
59
+ )
60
+ assert infos[0] is not None
61
+ return infos[0]
62
+
63
+
64
+ def _filter_none(**kwargs: Any) -> Dict[str, Any]:
65
+ """Make dict excluding None values."""
66
+ return {k: v for k, v in kwargs.items() if v is not None}
67
+
68
+
69
+ class VcsInfo:
70
+ name = "vcs_info"
71
+
72
+ def __init__(
73
+ self,
74
+ vcs: str,
75
+ commit_id: str,
76
+ requested_revision: Optional[str] = None,
77
+ resolved_revision: Optional[str] = None,
78
+ resolved_revision_type: Optional[str] = None,
79
+ ) -> None:
80
+ self.vcs = vcs
81
+ self.requested_revision = requested_revision
82
+ self.commit_id = commit_id
83
+ self.resolved_revision = resolved_revision
84
+ self.resolved_revision_type = resolved_revision_type
85
+
86
+ @classmethod
87
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["VcsInfo"]:
88
+ if d is None:
89
+ return None
90
+ return cls(
91
+ vcs=_get_required(d, str, "vcs"),
92
+ commit_id=_get_required(d, str, "commit_id"),
93
+ requested_revision=_get(d, str, "requested_revision"),
94
+ resolved_revision=_get(d, str, "resolved_revision"),
95
+ resolved_revision_type=_get(d, str, "resolved_revision_type"),
96
+ )
97
+
98
+ def _to_dict(self) -> Dict[str, Any]:
99
+ return _filter_none(
100
+ vcs=self.vcs,
101
+ requested_revision=self.requested_revision,
102
+ commit_id=self.commit_id,
103
+ resolved_revision=self.resolved_revision,
104
+ resolved_revision_type=self.resolved_revision_type,
105
+ )
106
+
107
+
108
+ class ArchiveInfo:
109
+ name = "archive_info"
110
+
111
+ def __init__(
112
+ self,
113
+ hash: Optional[str] = None,
114
+ ) -> None:
115
+ self.hash = hash
116
+
117
+ @classmethod
118
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["ArchiveInfo"]:
119
+ if d is None:
120
+ return None
121
+ return cls(hash=_get(d, str, "hash"))
122
+
123
+ def _to_dict(self) -> Dict[str, Any]:
124
+ return _filter_none(hash=self.hash)
125
+
126
+
127
+ class DirInfo:
128
+ name = "dir_info"
129
+
130
+ def __init__(
131
+ self,
132
+ editable: bool = False,
133
+ ) -> None:
134
+ self.editable = editable
135
+
136
+ @classmethod
137
+ def _from_dict(cls, d: Optional[Dict[str, Any]]) -> Optional["DirInfo"]:
138
+ if d is None:
139
+ return None
140
+ return cls(editable=_get_required(d, bool, "editable", default=False))
141
+
142
+ def _to_dict(self) -> Dict[str, Any]:
143
+ return _filter_none(editable=self.editable or None)
144
+
145
+
146
+ InfoType = Union[ArchiveInfo, DirInfo, VcsInfo]
147
+
148
+
149
+ class DirectUrl:
150
+ def __init__(
151
+ self,
152
+ url: str,
153
+ info: InfoType,
154
+ subdirectory: Optional[str] = None,
155
+ ) -> None:
156
+ self.url = url
157
+ self.info = info
158
+ self.subdirectory = subdirectory
159
+
160
+ def _remove_auth_from_netloc(self, netloc: str) -> str:
161
+ if "@" not in netloc:
162
+ return netloc
163
+ user_pass, netloc_no_user_pass = netloc.split("@", 1)
164
+ if (
165
+ isinstance(self.info, VcsInfo)
166
+ and self.info.vcs == "git"
167
+ and user_pass == "git"
168
+ ):
169
+ return netloc
170
+ if ENV_VAR_RE.match(user_pass):
171
+ return netloc
172
+ return netloc_no_user_pass
173
+
174
+ @property
175
+ def redacted_url(self) -> str:
176
+ """url with user:password part removed unless it is formed with
177
+ environment variables as specified in PEP 610, or it is ``git``
178
+ in the case of a git URL.
179
+ """
180
+ purl = urllib.parse.urlsplit(self.url)
181
+ netloc = self._remove_auth_from_netloc(purl.netloc)
182
+ surl = urllib.parse.urlunsplit(
183
+ (purl.scheme, netloc, purl.path, purl.query, purl.fragment)
184
+ )
185
+ return surl
186
+
187
+ def validate(self) -> None:
188
+ self.from_dict(self.to_dict())
189
+
190
+ @classmethod
191
+ def from_dict(cls, d: Dict[str, Any]) -> "DirectUrl":
192
+ return DirectUrl(
193
+ url=_get_required(d, str, "url"),
194
+ subdirectory=_get(d, str, "subdirectory"),
195
+ info=_exactly_one_of(
196
+ [
197
+ ArchiveInfo._from_dict(_get(d, dict, "archive_info")),
198
+ DirInfo._from_dict(_get(d, dict, "dir_info")),
199
+ VcsInfo._from_dict(_get(d, dict, "vcs_info")),
200
+ ]
201
+ ),
202
+ )
203
+
204
+ def to_dict(self) -> Dict[str, Any]:
205
+ res = _filter_none(
206
+ url=self.redacted_url,
207
+ subdirectory=self.subdirectory,
208
+ )
209
+ res[self.info.name] = self.info._to_dict()
210
+ return res
211
+
212
+ @classmethod
213
+ def from_json(cls, s: str) -> "DirectUrl":
214
+ return cls.from_dict(json.loads(s))
215
+
216
+ def to_json(self) -> str:
217
+ return json.dumps(self.to_dict(), sort_keys=True)
218
+
219
+ def is_local_editable(self) -> bool:
220
+ return isinstance(self.info, DirInfo) and self.info.editable
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/format_control.py ADDED
@@ -0,0 +1,80 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import FrozenSet, Optional, Set
2
+
3
+ from pip._vendor.packaging.utils import canonicalize_name
4
+
5
+ from pip._internal.exceptions import CommandError
6
+
7
+
8
+ class FormatControl:
9
+ """Helper for managing formats from which a package can be installed."""
10
+
11
+ __slots__ = ["no_binary", "only_binary"]
12
+
13
+ def __init__(
14
+ self,
15
+ no_binary: Optional[Set[str]] = None,
16
+ only_binary: Optional[Set[str]] = None,
17
+ ) -> None:
18
+ if no_binary is None:
19
+ no_binary = set()
20
+ if only_binary is None:
21
+ only_binary = set()
22
+
23
+ self.no_binary = no_binary
24
+ self.only_binary = only_binary
25
+
26
+ def __eq__(self, other: object) -> bool:
27
+ if not isinstance(other, self.__class__):
28
+ return NotImplemented
29
+
30
+ if self.__slots__ != other.__slots__:
31
+ return False
32
+
33
+ return all(getattr(self, k) == getattr(other, k) for k in self.__slots__)
34
+
35
+ def __repr__(self) -> str:
36
+ return "{}({}, {})".format(
37
+ self.__class__.__name__, self.no_binary, self.only_binary
38
+ )
39
+
40
+ @staticmethod
41
+ def handle_mutual_excludes(value: str, target: Set[str], other: Set[str]) -> None:
42
+ if value.startswith("-"):
43
+ raise CommandError(
44
+ "--no-binary / --only-binary option requires 1 argument."
45
+ )
46
+ new = value.split(",")
47
+ while ":all:" in new:
48
+ other.clear()
49
+ target.clear()
50
+ target.add(":all:")
51
+ del new[: new.index(":all:") + 1]
52
+ # Without a none, we want to discard everything as :all: covers it
53
+ if ":none:" not in new:
54
+ return
55
+ for name in new:
56
+ if name == ":none:":
57
+ target.clear()
58
+ continue
59
+ name = canonicalize_name(name)
60
+ other.discard(name)
61
+ target.add(name)
62
+
63
+ def get_allowed_formats(self, canonical_name: str) -> FrozenSet[str]:
64
+ result = {"binary", "source"}
65
+ if canonical_name in self.only_binary:
66
+ result.discard("source")
67
+ elif canonical_name in self.no_binary:
68
+ result.discard("binary")
69
+ elif ":all:" in self.only_binary:
70
+ result.discard("source")
71
+ elif ":all:" in self.no_binary:
72
+ result.discard("binary")
73
+ return frozenset(result)
74
+
75
+ def disallow_binaries(self) -> None:
76
+ self.handle_mutual_excludes(
77
+ ":all:",
78
+ self.no_binary,
79
+ self.only_binary,
80
+ )
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/index.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import urllib.parse
2
+
3
+
4
+ class PackageIndex:
5
+ """Represents a Package Index and provides easier access to endpoints"""
6
+
7
+ __slots__ = ["url", "netloc", "simple_url", "pypi_url", "file_storage_domain"]
8
+
9
+ def __init__(self, url: str, file_storage_domain: str) -> None:
10
+ super().__init__()
11
+ self.url = url
12
+ self.netloc = urllib.parse.urlsplit(url).netloc
13
+ self.simple_url = self._url_for_path("simple")
14
+ self.pypi_url = self._url_for_path("pypi")
15
+
16
+ # This is part of a temporary hack used to block installs of PyPI
17
+ # packages which depend on external urls only necessary until PyPI can
18
+ # block such packages themselves
19
+ self.file_storage_domain = file_storage_domain
20
+
21
+ def _url_for_path(self, path: str) -> str:
22
+ return urllib.parse.urljoin(self.url, path)
23
+
24
+
25
+ PyPI = PackageIndex("https://pypi.org/", file_storage_domain="files.pythonhosted.org")
26
+ TestPyPI = PackageIndex(
27
+ "https://test.pypi.org/", file_storage_domain="test-files.pythonhosted.org"
28
+ )
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/link.py ADDED
@@ -0,0 +1,288 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import logging
3
+ import os
4
+ import posixpath
5
+ import re
6
+ import urllib.parse
7
+ from typing import TYPE_CHECKING, Dict, List, NamedTuple, Optional, Tuple, Union
8
+
9
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
10
+ from pip._internal.utils.hashes import Hashes
11
+ from pip._internal.utils.misc import (
12
+ redact_auth_from_url,
13
+ split_auth_from_netloc,
14
+ splitext,
15
+ )
16
+ from pip._internal.utils.models import KeyBasedCompareMixin
17
+ from pip._internal.utils.urls import path_to_url, url_to_path
18
+
19
+ if TYPE_CHECKING:
20
+ from pip._internal.index.collector import HTMLPage
21
+
22
+ logger = logging.getLogger(__name__)
23
+
24
+
25
+ _SUPPORTED_HASHES = ("sha1", "sha224", "sha384", "sha256", "sha512", "md5")
26
+
27
+
28
+ class Link(KeyBasedCompareMixin):
29
+ """Represents a parsed link from a Package Index's simple URL"""
30
+
31
+ __slots__ = [
32
+ "_parsed_url",
33
+ "_url",
34
+ "comes_from",
35
+ "requires_python",
36
+ "yanked_reason",
37
+ "cache_link_parsing",
38
+ ]
39
+
40
+ def __init__(
41
+ self,
42
+ url: str,
43
+ comes_from: Optional[Union[str, "HTMLPage"]] = None,
44
+ requires_python: Optional[str] = None,
45
+ yanked_reason: Optional[str] = None,
46
+ cache_link_parsing: bool = True,
47
+ ) -> None:
48
+ """
49
+ :param url: url of the resource pointed to (href of the link)
50
+ :param comes_from: instance of HTMLPage where the link was found,
51
+ or string.
52
+ :param requires_python: String containing the `Requires-Python`
53
+ metadata field, specified in PEP 345. This may be specified by
54
+ a data-requires-python attribute in the HTML link tag, as
55
+ described in PEP 503.
56
+ :param yanked_reason: the reason the file has been yanked, if the
57
+ file has been yanked, or None if the file hasn't been yanked.
58
+ This is the value of the "data-yanked" attribute, if present, in
59
+ a simple repository HTML link. If the file has been yanked but
60
+ no reason was provided, this should be the empty string. See
61
+ PEP 592 for more information and the specification.
62
+ :param cache_link_parsing: A flag that is used elsewhere to determine
63
+ whether resources retrieved from this link
64
+ should be cached. PyPI index urls should
65
+ generally have this set to False, for
66
+ example.
67
+ """
68
+
69
+ # url can be a UNC windows share
70
+ if url.startswith("\\\\"):
71
+ url = path_to_url(url)
72
+
73
+ self._parsed_url = urllib.parse.urlsplit(url)
74
+ # Store the url as a private attribute to prevent accidentally
75
+ # trying to set a new value.
76
+ self._url = url
77
+
78
+ self.comes_from = comes_from
79
+ self.requires_python = requires_python if requires_python else None
80
+ self.yanked_reason = yanked_reason
81
+
82
+ super().__init__(key=url, defining_class=Link)
83
+
84
+ self.cache_link_parsing = cache_link_parsing
85
+
86
+ def __str__(self) -> str:
87
+ if self.requires_python:
88
+ rp = f" (requires-python:{self.requires_python})"
89
+ else:
90
+ rp = ""
91
+ if self.comes_from:
92
+ return "{} (from {}){}".format(
93
+ redact_auth_from_url(self._url), self.comes_from, rp
94
+ )
95
+ else:
96
+ return redact_auth_from_url(str(self._url))
97
+
98
+ def __repr__(self) -> str:
99
+ return f"<Link {self}>"
100
+
101
+ @property
102
+ def url(self) -> str:
103
+ return self._url
104
+
105
+ @property
106
+ def filename(self) -> str:
107
+ path = self.path.rstrip("/")
108
+ name = posixpath.basename(path)
109
+ if not name:
110
+ # Make sure we don't leak auth information if the netloc
111
+ # includes a username and password.
112
+ netloc, user_pass = split_auth_from_netloc(self.netloc)
113
+ return netloc
114
+
115
+ name = urllib.parse.unquote(name)
116
+ assert name, f"URL {self._url!r} produced no filename"
117
+ return name
118
+
119
+ @property
120
+ def file_path(self) -> str:
121
+ return url_to_path(self.url)
122
+
123
+ @property
124
+ def scheme(self) -> str:
125
+ return self._parsed_url.scheme
126
+
127
+ @property
128
+ def netloc(self) -> str:
129
+ """
130
+ This can contain auth information.
131
+ """
132
+ return self._parsed_url.netloc
133
+
134
+ @property
135
+ def path(self) -> str:
136
+ return urllib.parse.unquote(self._parsed_url.path)
137
+
138
+ def splitext(self) -> Tuple[str, str]:
139
+ return splitext(posixpath.basename(self.path.rstrip("/")))
140
+
141
+ @property
142
+ def ext(self) -> str:
143
+ return self.splitext()[1]
144
+
145
+ @property
146
+ def url_without_fragment(self) -> str:
147
+ scheme, netloc, path, query, fragment = self._parsed_url
148
+ return urllib.parse.urlunsplit((scheme, netloc, path, query, ""))
149
+
150
+ _egg_fragment_re = re.compile(r"[#&]egg=([^&]*)")
151
+
152
+ @property
153
+ def egg_fragment(self) -> Optional[str]:
154
+ match = self._egg_fragment_re.search(self._url)
155
+ if not match:
156
+ return None
157
+ return match.group(1)
158
+
159
+ _subdirectory_fragment_re = re.compile(r"[#&]subdirectory=([^&]*)")
160
+
161
+ @property
162
+ def subdirectory_fragment(self) -> Optional[str]:
163
+ match = self._subdirectory_fragment_re.search(self._url)
164
+ if not match:
165
+ return None
166
+ return match.group(1)
167
+
168
+ _hash_re = re.compile(
169
+ r"({choices})=([a-f0-9]+)".format(choices="|".join(_SUPPORTED_HASHES))
170
+ )
171
+
172
+ @property
173
+ def hash(self) -> Optional[str]:
174
+ match = self._hash_re.search(self._url)
175
+ if match:
176
+ return match.group(2)
177
+ return None
178
+
179
+ @property
180
+ def hash_name(self) -> Optional[str]:
181
+ match = self._hash_re.search(self._url)
182
+ if match:
183
+ return match.group(1)
184
+ return None
185
+
186
+ @property
187
+ def show_url(self) -> str:
188
+ return posixpath.basename(self._url.split("#", 1)[0].split("?", 1)[0])
189
+
190
+ @property
191
+ def is_file(self) -> bool:
192
+ return self.scheme == "file"
193
+
194
+ def is_existing_dir(self) -> bool:
195
+ return self.is_file and os.path.isdir(self.file_path)
196
+
197
+ @property
198
+ def is_wheel(self) -> bool:
199
+ return self.ext == WHEEL_EXTENSION
200
+
201
+ @property
202
+ def is_vcs(self) -> bool:
203
+ from pip._internal.vcs import vcs
204
+
205
+ return self.scheme in vcs.all_schemes
206
+
207
+ @property
208
+ def is_yanked(self) -> bool:
209
+ return self.yanked_reason is not None
210
+
211
+ @property
212
+ def has_hash(self) -> bool:
213
+ return self.hash_name is not None
214
+
215
+ def is_hash_allowed(self, hashes: Optional[Hashes]) -> bool:
216
+ """
217
+ Return True if the link has a hash and it is allowed.
218
+ """
219
+ if hashes is None or not self.has_hash:
220
+ return False
221
+ # Assert non-None so mypy knows self.hash_name and self.hash are str.
222
+ assert self.hash_name is not None
223
+ assert self.hash is not None
224
+
225
+ return hashes.is_hash_allowed(self.hash_name, hex_digest=self.hash)
226
+
227
+
228
+ class _CleanResult(NamedTuple):
229
+ """Convert link for equivalency check.
230
+
231
+ This is used in the resolver to check whether two URL-specified requirements
232
+ likely point to the same distribution and can be considered equivalent. This
233
+ equivalency logic avoids comparing URLs literally, which can be too strict
234
+ (e.g. "a=1&b=2" vs "b=2&a=1") and produce conflicts unexpecting to users.
235
+
236
+ Currently this does three things:
237
+
238
+ 1. Drop the basic auth part. This is technically wrong since a server can
239
+ serve different content based on auth, but if it does that, it is even
240
+ impossible to guarantee two URLs without auth are equivalent, since
241
+ the user can input different auth information when prompted. So the
242
+ practical solution is to assume the auth doesn't affect the response.
243
+ 2. Parse the query to avoid the ordering issue. Note that ordering under the
244
+ same key in the query are NOT cleaned; i.e. "a=1&a=2" and "a=2&a=1" are
245
+ still considered different.
246
+ 3. Explicitly drop most of the fragment part, except ``subdirectory=`` and
247
+ hash values, since it should have no impact the downloaded content. Note
248
+ that this drops the "egg=" part historically used to denote the requested
249
+ project (and extras), which is wrong in the strictest sense, but too many
250
+ people are supplying it inconsistently to cause superfluous resolution
251
+ conflicts, so we choose to also ignore them.
252
+ """
253
+
254
+ parsed: urllib.parse.SplitResult
255
+ query: Dict[str, List[str]]
256
+ subdirectory: str
257
+ hashes: Dict[str, str]
258
+
259
+
260
+ def _clean_link(link: Link) -> _CleanResult:
261
+ parsed = link._parsed_url
262
+ netloc = parsed.netloc.rsplit("@", 1)[-1]
263
+ # According to RFC 8089, an empty host in file: means localhost.
264
+ if parsed.scheme == "file" and not netloc:
265
+ netloc = "localhost"
266
+ fragment = urllib.parse.parse_qs(parsed.fragment)
267
+ if "egg" in fragment:
268
+ logger.debug("Ignoring egg= fragment in %s", link)
269
+ try:
270
+ # If there are multiple subdirectory values, use the first one.
271
+ # This matches the behavior of Link.subdirectory_fragment.
272
+ subdirectory = fragment["subdirectory"][0]
273
+ except (IndexError, KeyError):
274
+ subdirectory = ""
275
+ # If there are multiple hash values under the same algorithm, use the
276
+ # first one. This matches the behavior of Link.hash_value.
277
+ hashes = {k: fragment[k][0] for k in _SUPPORTED_HASHES if k in fragment}
278
+ return _CleanResult(
279
+ parsed=parsed._replace(netloc=netloc, query="", fragment=""),
280
+ query=urllib.parse.parse_qs(parsed.query),
281
+ subdirectory=subdirectory,
282
+ hashes=hashes,
283
+ )
284
+
285
+
286
+ @functools.lru_cache(maxsize=None)
287
+ def links_equivalent(link1: Link, link2: Link) -> bool:
288
+ return _clean_link(link1) == _clean_link(link2)
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/scheme.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ For types associated with installation schemes.
3
+
4
+ For a general overview of available schemes and their context, see
5
+ https://docs.python.org/3/install/index.html#alternate-installation.
6
+ """
7
+
8
+
9
+ SCHEME_KEYS = ["platlib", "purelib", "headers", "scripts", "data"]
10
+
11
+
12
+ class Scheme:
13
+ """A Scheme holds paths which are used as the base directories for
14
+ artifacts associated with a Python package.
15
+ """
16
+
17
+ __slots__ = SCHEME_KEYS
18
+
19
+ def __init__(
20
+ self,
21
+ platlib: str,
22
+ purelib: str,
23
+ headers: str,
24
+ scripts: str,
25
+ data: str,
26
+ ) -> None:
27
+ self.platlib = platlib
28
+ self.purelib = purelib
29
+ self.headers = headers
30
+ self.scripts = scripts
31
+ self.data = data
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/search_scope.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import itertools
2
+ import logging
3
+ import os
4
+ import posixpath
5
+ import urllib.parse
6
+ from typing import List
7
+
8
+ from pip._vendor.packaging.utils import canonicalize_name
9
+
10
+ from pip._internal.models.index import PyPI
11
+ from pip._internal.utils.compat import has_tls
12
+ from pip._internal.utils.misc import normalize_path, redact_auth_from_url
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+
17
+ class SearchScope:
18
+
19
+ """
20
+ Encapsulates the locations that pip is configured to search.
21
+ """
22
+
23
+ __slots__ = ["find_links", "index_urls"]
24
+
25
+ @classmethod
26
+ def create(
27
+ cls,
28
+ find_links: List[str],
29
+ index_urls: List[str],
30
+ ) -> "SearchScope":
31
+ """
32
+ Create a SearchScope object after normalizing the `find_links`.
33
+ """
34
+ # Build find_links. If an argument starts with ~, it may be
35
+ # a local file relative to a home directory. So try normalizing
36
+ # it and if it exists, use the normalized version.
37
+ # This is deliberately conservative - it might be fine just to
38
+ # blindly normalize anything starting with a ~...
39
+ built_find_links: List[str] = []
40
+ for link in find_links:
41
+ if link.startswith("~"):
42
+ new_link = normalize_path(link)
43
+ if os.path.exists(new_link):
44
+ link = new_link
45
+ built_find_links.append(link)
46
+
47
+ # If we don't have TLS enabled, then WARN if anyplace we're looking
48
+ # relies on TLS.
49
+ if not has_tls():
50
+ for link in itertools.chain(index_urls, built_find_links):
51
+ parsed = urllib.parse.urlparse(link)
52
+ if parsed.scheme == "https":
53
+ logger.warning(
54
+ "pip is configured with locations that require "
55
+ "TLS/SSL, however the ssl module in Python is not "
56
+ "available."
57
+ )
58
+ break
59
+
60
+ return cls(
61
+ find_links=built_find_links,
62
+ index_urls=index_urls,
63
+ )
64
+
65
+ def __init__(
66
+ self,
67
+ find_links: List[str],
68
+ index_urls: List[str],
69
+ ) -> None:
70
+ self.find_links = find_links
71
+ self.index_urls = index_urls
72
+
73
+ def get_formatted_locations(self) -> str:
74
+ lines = []
75
+ redacted_index_urls = []
76
+ if self.index_urls and self.index_urls != [PyPI.simple_url]:
77
+ for url in self.index_urls:
78
+
79
+ redacted_index_url = redact_auth_from_url(url)
80
+
81
+ # Parse the URL
82
+ purl = urllib.parse.urlsplit(redacted_index_url)
83
+
84
+ # URL is generally invalid if scheme and netloc is missing
85
+ # there are issues with Python and URL parsing, so this test
86
+ # is a bit crude. See bpo-20271, bpo-23505. Python doesn't
87
+ # always parse invalid URLs correctly - it should raise
88
+ # exceptions for malformed URLs
89
+ if not purl.scheme and not purl.netloc:
90
+ logger.warning(
91
+ 'The index url "%s" seems invalid, please provide a scheme.',
92
+ redacted_index_url,
93
+ )
94
+
95
+ redacted_index_urls.append(redacted_index_url)
96
+
97
+ lines.append(
98
+ "Looking in indexes: {}".format(", ".join(redacted_index_urls))
99
+ )
100
+
101
+ if self.find_links:
102
+ lines.append(
103
+ "Looking in links: {}".format(
104
+ ", ".join(redact_auth_from_url(url) for url in self.find_links)
105
+ )
106
+ )
107
+ return "\n".join(lines)
108
+
109
+ def get_index_urls_locations(self, project_name: str) -> List[str]:
110
+ """Returns the locations found via self.index_urls
111
+
112
+ Checks the url_name on the main (first in the list) index and
113
+ use this url_name to produce all locations
114
+ """
115
+
116
+ def mkurl_pypi_url(url: str) -> str:
117
+ loc = posixpath.join(
118
+ url, urllib.parse.quote(canonicalize_name(project_name))
119
+ )
120
+ # For maximum compatibility with easy_install, ensure the path
121
+ # ends in a trailing slash. Although this isn't in the spec
122
+ # (and PyPI can handle it without the slash) some other index
123
+ # implementations might break if they relied on easy_install's
124
+ # behavior.
125
+ if not loc.endswith("/"):
126
+ loc = loc + "/"
127
+ return loc
128
+
129
+ return [mkurl_pypi_url(url) for url in self.index_urls]
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/selection_prefs.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.models.format_control import FormatControl
4
+
5
+
6
+ class SelectionPreferences:
7
+ """
8
+ Encapsulates the candidate selection preferences for downloading
9
+ and installing files.
10
+ """
11
+
12
+ __slots__ = [
13
+ "allow_yanked",
14
+ "allow_all_prereleases",
15
+ "format_control",
16
+ "prefer_binary",
17
+ "ignore_requires_python",
18
+ ]
19
+
20
+ # Don't include an allow_yanked default value to make sure each call
21
+ # site considers whether yanked releases are allowed. This also causes
22
+ # that decision to be made explicit in the calling code, which helps
23
+ # people when reading the code.
24
+ def __init__(
25
+ self,
26
+ allow_yanked: bool,
27
+ allow_all_prereleases: bool = False,
28
+ format_control: Optional[FormatControl] = None,
29
+ prefer_binary: bool = False,
30
+ ignore_requires_python: Optional[bool] = None,
31
+ ) -> None:
32
+ """Create a SelectionPreferences object.
33
+
34
+ :param allow_yanked: Whether files marked as yanked (in the sense
35
+ of PEP 592) are permitted to be candidates for install.
36
+ :param format_control: A FormatControl object or None. Used to control
37
+ the selection of source packages / binary packages when consulting
38
+ the index and links.
39
+ :param prefer_binary: Whether to prefer an old, but valid, binary
40
+ dist over a new source dist.
41
+ :param ignore_requires_python: Whether to ignore incompatible
42
+ "Requires-Python" values in links. Defaults to False.
43
+ """
44
+ if ignore_requires_python is None:
45
+ ignore_requires_python = False
46
+
47
+ self.allow_yanked = allow_yanked
48
+ self.allow_all_prereleases = allow_all_prereleases
49
+ self.format_control = format_control
50
+ self.prefer_binary = prefer_binary
51
+ self.ignore_requires_python = ignore_requires_python
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/target_python.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sys
2
+ from typing import List, Optional, Tuple
3
+
4
+ from pip._vendor.packaging.tags import Tag
5
+
6
+ from pip._internal.utils.compatibility_tags import get_supported, version_info_to_nodot
7
+ from pip._internal.utils.misc import normalize_version_info
8
+
9
+
10
+ class TargetPython:
11
+
12
+ """
13
+ Encapsulates the properties of a Python interpreter one is targeting
14
+ for a package install, download, etc.
15
+ """
16
+
17
+ __slots__ = [
18
+ "_given_py_version_info",
19
+ "abis",
20
+ "implementation",
21
+ "platforms",
22
+ "py_version",
23
+ "py_version_info",
24
+ "_valid_tags",
25
+ ]
26
+
27
+ def __init__(
28
+ self,
29
+ platforms: Optional[List[str]] = None,
30
+ py_version_info: Optional[Tuple[int, ...]] = None,
31
+ abis: Optional[List[str]] = None,
32
+ implementation: Optional[str] = None,
33
+ ) -> None:
34
+ """
35
+ :param platforms: A list of strings or None. If None, searches for
36
+ packages that are supported by the current system. Otherwise, will
37
+ find packages that can be built on the platforms passed in. These
38
+ packages will only be downloaded for distribution: they will
39
+ not be built locally.
40
+ :param py_version_info: An optional tuple of ints representing the
41
+ Python version information to use (e.g. `sys.version_info[:3]`).
42
+ This can have length 1, 2, or 3 when provided.
43
+ :param abis: A list of strings or None. This is passed to
44
+ compatibility_tags.py's get_supported() function as is.
45
+ :param implementation: A string or None. This is passed to
46
+ compatibility_tags.py's get_supported() function as is.
47
+ """
48
+ # Store the given py_version_info for when we call get_supported().
49
+ self._given_py_version_info = py_version_info
50
+
51
+ if py_version_info is None:
52
+ py_version_info = sys.version_info[:3]
53
+ else:
54
+ py_version_info = normalize_version_info(py_version_info)
55
+
56
+ py_version = ".".join(map(str, py_version_info[:2]))
57
+
58
+ self.abis = abis
59
+ self.implementation = implementation
60
+ self.platforms = platforms
61
+ self.py_version = py_version
62
+ self.py_version_info = py_version_info
63
+
64
+ # This is used to cache the return value of get_tags().
65
+ self._valid_tags: Optional[List[Tag]] = None
66
+
67
+ def format_given(self) -> str:
68
+ """
69
+ Format the given, non-None attributes for display.
70
+ """
71
+ display_version = None
72
+ if self._given_py_version_info is not None:
73
+ display_version = ".".join(
74
+ str(part) for part in self._given_py_version_info
75
+ )
76
+
77
+ key_values = [
78
+ ("platforms", self.platforms),
79
+ ("version_info", display_version),
80
+ ("abis", self.abis),
81
+ ("implementation", self.implementation),
82
+ ]
83
+ return " ".join(
84
+ f"{key}={value!r}" for key, value in key_values if value is not None
85
+ )
86
+
87
+ def get_tags(self) -> List[Tag]:
88
+ """
89
+ Return the supported PEP 425 tags to check wheel candidates against.
90
+
91
+ The tags are returned in order of preference (most preferred first).
92
+ """
93
+ if self._valid_tags is None:
94
+ # Pass versions=None if no py_version_info was given since
95
+ # versions=None uses special default logic.
96
+ py_version_info = self._given_py_version_info
97
+ if py_version_info is None:
98
+ version = None
99
+ else:
100
+ version = version_info_to_nodot(py_version_info)
101
+
102
+ tags = get_supported(
103
+ version=version,
104
+ platforms=self.platforms,
105
+ abis=self.abis,
106
+ impl=self.implementation,
107
+ )
108
+ self._valid_tags = tags
109
+
110
+ return self._valid_tags
scripts/myenv/lib/python3.10/site-packages/pip/_internal/models/wheel.py ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Represents a wheel file and provides access to the various parts of the
2
+ name that have meaning.
3
+ """
4
+ import re
5
+ from typing import Dict, Iterable, List
6
+
7
+ from pip._vendor.packaging.tags import Tag
8
+
9
+ from pip._internal.exceptions import InvalidWheelFilename
10
+
11
+
12
+ class Wheel:
13
+ """A wheel file"""
14
+
15
+ wheel_file_re = re.compile(
16
+ r"""^(?P<namever>(?P<name>[^\s-]+?)-(?P<ver>[^\s-]*?))
17
+ ((-(?P<build>\d[^-]*?))?-(?P<pyver>[^\s-]+?)-(?P<abi>[^\s-]+?)-(?P<plat>[^\s-]+?)
18
+ \.whl|\.dist-info)$""",
19
+ re.VERBOSE,
20
+ )
21
+
22
+ def __init__(self, filename: str) -> None:
23
+ """
24
+ :raises InvalidWheelFilename: when the filename is invalid for a wheel
25
+ """
26
+ wheel_info = self.wheel_file_re.match(filename)
27
+ if not wheel_info:
28
+ raise InvalidWheelFilename(f"{filename} is not a valid wheel filename.")
29
+ self.filename = filename
30
+ self.name = wheel_info.group("name").replace("_", "-")
31
+ # we'll assume "_" means "-" due to wheel naming scheme
32
+ # (https://github.com/pypa/pip/issues/1150)
33
+ self.version = wheel_info.group("ver").replace("_", "-")
34
+ self.build_tag = wheel_info.group("build")
35
+ self.pyversions = wheel_info.group("pyver").split(".")
36
+ self.abis = wheel_info.group("abi").split(".")
37
+ self.plats = wheel_info.group("plat").split(".")
38
+
39
+ # All the tag combinations from this file
40
+ self.file_tags = {
41
+ Tag(x, y, z) for x in self.pyversions for y in self.abis for z in self.plats
42
+ }
43
+
44
+ def get_formatted_file_tags(self) -> List[str]:
45
+ """Return the wheel's tags as a sorted list of strings."""
46
+ return sorted(str(tag) for tag in self.file_tags)
47
+
48
+ def support_index_min(self, tags: List[Tag]) -> int:
49
+ """Return the lowest index that one of the wheel's file_tag combinations
50
+ achieves in the given list of supported tags.
51
+
52
+ For example, if there are 8 supported tags and one of the file tags
53
+ is first in the list, then return 0.
54
+
55
+ :param tags: the PEP 425 tags to check the wheel against, in order
56
+ with most preferred first.
57
+
58
+ :raises ValueError: If none of the wheel's file tags match one of
59
+ the supported tags.
60
+ """
61
+ return min(tags.index(tag) for tag in self.file_tags if tag in tags)
62
+
63
+ def find_most_preferred_tag(
64
+ self, tags: List[Tag], tag_to_priority: Dict[Tag, int]
65
+ ) -> int:
66
+ """Return the priority of the most preferred tag that one of the wheel's file
67
+ tag combinations achieves in the given list of supported tags using the given
68
+ tag_to_priority mapping, where lower priorities are more-preferred.
69
+
70
+ This is used in place of support_index_min in some cases in order to avoid
71
+ an expensive linear scan of a large list of tags.
72
+
73
+ :param tags: the PEP 425 tags to check the wheel against.
74
+ :param tag_to_priority: a mapping from tag to priority of that tag, where
75
+ lower is more preferred.
76
+
77
+ :raises ValueError: If none of the wheel's file tags match one of
78
+ the supported tags.
79
+ """
80
+ return min(
81
+ tag_to_priority[tag] for tag in self.file_tags if tag in tag_to_priority
82
+ )
83
+
84
+ def supported(self, tags: Iterable[Tag]) -> bool:
85
+ """Return whether the wheel is compatible with one of the given tags.
86
+
87
+ :param tags: the PEP 425 tags to check the wheel against.
88
+ """
89
+ return not self.file_tags.isdisjoint(tags)
scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (199 Bytes). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/check.cpython-310.pyc ADDED
Binary file (4.01 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/__pycache__/prepare.cpython-310.pyc ADDED
Binary file (14.9 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-310.pyc ADDED
Binary file (2.38 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/operations/build/__pycache__/wheel_editable.cpython-310.pyc ADDED
Binary file (1.45 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__init__.py ADDED
File without changes
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc ADDED
Binary file (1.67 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc ADDED
Binary file (9.63 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/setuptools_build.cpython-310.pyc ADDED
Binary file (4.6 kB). View file
 
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/_log.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Customize logging
2
+
3
+ Defines custom logger class for the `logger.verbose(...)` method.
4
+
5
+ init_logging() must be called before any other modules that call logging.getLogger.
6
+ """
7
+
8
+ import logging
9
+ from typing import Any, cast
10
+
11
+ # custom log level for `--verbose` output
12
+ # between DEBUG and INFO
13
+ VERBOSE = 15
14
+
15
+
16
+ class VerboseLogger(logging.Logger):
17
+ """Custom Logger, defining a verbose log-level
18
+
19
+ VERBOSE is between INFO and DEBUG.
20
+ """
21
+
22
+ def verbose(self, msg: str, *args: Any, **kwargs: Any) -> None:
23
+ return self.log(VERBOSE, msg, *args, **kwargs)
24
+
25
+
26
+ def getLogger(name: str) -> VerboseLogger:
27
+ """logging.getLogger, but ensures our VerboseLogger class is returned"""
28
+ return cast(VerboseLogger, logging.getLogger(name))
29
+
30
+
31
+ def init_logging() -> None:
32
+ """Register our VerboseLogger and VERBOSE log level.
33
+
34
+ Should be called before any calls to getLogger(),
35
+ i.e. in pip._internal.__init__
36
+ """
37
+ logging.setLoggerClass(VerboseLogger)
38
+ logging.addLevelName(VERBOSE, "VERBOSE")
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/appdirs.py ADDED
@@ -0,0 +1,52 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This code wraps the vendored appdirs module to so the return values are
3
+ compatible for the current pip code base.
4
+
5
+ The intention is to rewrite current usages gradually, keeping the tests pass,
6
+ and eventually drop this after all usages are changed.
7
+ """
8
+
9
+ import os
10
+ import sys
11
+ from typing import List
12
+
13
+ from pip._vendor import platformdirs as _appdirs
14
+
15
+
16
+ def user_cache_dir(appname: str) -> str:
17
+ return _appdirs.user_cache_dir(appname, appauthor=False)
18
+
19
+
20
+ def _macos_user_config_dir(appname: str, roaming: bool = True) -> str:
21
+ # Use ~/Application Support/pip, if the directory exists.
22
+ path = _appdirs.user_data_dir(appname, appauthor=False, roaming=roaming)
23
+ if os.path.isdir(path):
24
+ return path
25
+
26
+ # Use a Linux-like ~/.config/pip, by default.
27
+ linux_like_path = "~/.config/"
28
+ if appname:
29
+ linux_like_path = os.path.join(linux_like_path, appname)
30
+
31
+ return os.path.expanduser(linux_like_path)
32
+
33
+
34
+ def user_config_dir(appname: str, roaming: bool = True) -> str:
35
+ if sys.platform == "darwin":
36
+ return _macos_user_config_dir(appname, roaming)
37
+
38
+ return _appdirs.user_config_dir(appname, appauthor=False, roaming=roaming)
39
+
40
+
41
+ # for the discussion regarding site_config_dir locations
42
+ # see <https://github.com/pypa/pip/issues/1733>
43
+ def site_config_dirs(appname: str) -> List[str]:
44
+ if sys.platform == "darwin":
45
+ return [_appdirs.site_data_dir(appname, appauthor=False, multipath=True)]
46
+
47
+ dirval = _appdirs.site_config_dir(appname, appauthor=False, multipath=True)
48
+ if sys.platform == "win32":
49
+ return [dirval]
50
+
51
+ # Unix-y system. Look in /etc as well.
52
+ return dirval.split(os.pathsep) + ["/etc"]
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/compat.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Stuff that differs in different Python versions and platform
2
+ distributions."""
3
+
4
+ import logging
5
+ import os
6
+ import sys
7
+
8
+ __all__ = ["get_path_uid", "stdlib_pkgs", "WINDOWS"]
9
+
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ def has_tls() -> bool:
15
+ try:
16
+ import _ssl # noqa: F401 # ignore unused
17
+
18
+ return True
19
+ except ImportError:
20
+ pass
21
+
22
+ from pip._vendor.urllib3.util import IS_PYOPENSSL
23
+
24
+ return IS_PYOPENSSL
25
+
26
+
27
+ def get_path_uid(path: str) -> int:
28
+ """
29
+ Return path's uid.
30
+
31
+ Does not follow symlinks:
32
+ https://github.com/pypa/pip/pull/935#discussion_r5307003
33
+
34
+ Placed this function in compat due to differences on AIX and
35
+ Jython, that should eventually go away.
36
+
37
+ :raises OSError: When path is a symlink or can't be read.
38
+ """
39
+ if hasattr(os, "O_NOFOLLOW"):
40
+ fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
41
+ file_uid = os.fstat(fd).st_uid
42
+ os.close(fd)
43
+ else: # AIX and Jython
44
+ # WARNING: time of check vulnerability, but best we can do w/o NOFOLLOW
45
+ if not os.path.islink(path):
46
+ # older versions of Jython don't have `os.fstat`
47
+ file_uid = os.stat(path).st_uid
48
+ else:
49
+ # raise OSError for parity with os.O_NOFOLLOW above
50
+ raise OSError(f"{path} is a symlink; Will not return uid for symlinks")
51
+ return file_uid
52
+
53
+
54
+ # packages in the stdlib that may have installation metadata, but should not be
55
+ # considered 'installed'. this theoretically could be determined based on
56
+ # dist.location (py27:`sysconfig.get_paths()['stdlib']`,
57
+ # py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may
58
+ # make this ineffective, so hard-coding
59
+ stdlib_pkgs = {"python", "wsgiref", "argparse"}
60
+
61
+
62
+ # windows detection, covers cpython and ironpython
63
+ WINDOWS = sys.platform.startswith("win") or (sys.platform == "cli" and os.name == "nt")
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/compatibility_tags.py ADDED
@@ -0,0 +1,165 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Generate and work with PEP 425 Compatibility Tags.
2
+ """
3
+
4
+ import re
5
+ from typing import List, Optional, Tuple
6
+
7
+ from pip._vendor.packaging.tags import (
8
+ PythonVersion,
9
+ Tag,
10
+ compatible_tags,
11
+ cpython_tags,
12
+ generic_tags,
13
+ interpreter_name,
14
+ interpreter_version,
15
+ mac_platforms,
16
+ )
17
+
18
+ _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)")
19
+
20
+
21
+ def version_info_to_nodot(version_info: Tuple[int, ...]) -> str:
22
+ # Only use up to the first two numbers.
23
+ return "".join(map(str, version_info[:2]))
24
+
25
+
26
+ def _mac_platforms(arch: str) -> List[str]:
27
+ match = _osx_arch_pat.match(arch)
28
+ if match:
29
+ name, major, minor, actual_arch = match.groups()
30
+ mac_version = (int(major), int(minor))
31
+ arches = [
32
+ # Since we have always only checked that the platform starts
33
+ # with "macosx", for backwards-compatibility we extract the
34
+ # actual prefix provided by the user in case they provided
35
+ # something like "macosxcustom_". It may be good to remove
36
+ # this as undocumented or deprecate it in the future.
37
+ "{}_{}".format(name, arch[len("macosx_") :])
38
+ for arch in mac_platforms(mac_version, actual_arch)
39
+ ]
40
+ else:
41
+ # arch pattern didn't match (?!)
42
+ arches = [arch]
43
+ return arches
44
+
45
+
46
+ def _custom_manylinux_platforms(arch: str) -> List[str]:
47
+ arches = [arch]
48
+ arch_prefix, arch_sep, arch_suffix = arch.partition("_")
49
+ if arch_prefix == "manylinux2014":
50
+ # manylinux1/manylinux2010 wheels run on most manylinux2014 systems
51
+ # with the exception of wheels depending on ncurses. PEP 599 states
52
+ # manylinux1/manylinux2010 wheels should be considered
53
+ # manylinux2014 wheels:
54
+ # https://www.python.org/dev/peps/pep-0599/#backwards-compatibility-with-manylinux2010-wheels
55
+ if arch_suffix in {"i686", "x86_64"}:
56
+ arches.append("manylinux2010" + arch_sep + arch_suffix)
57
+ arches.append("manylinux1" + arch_sep + arch_suffix)
58
+ elif arch_prefix == "manylinux2010":
59
+ # manylinux1 wheels run on most manylinux2010 systems with the
60
+ # exception of wheels depending on ncurses. PEP 571 states
61
+ # manylinux1 wheels should be considered manylinux2010 wheels:
62
+ # https://www.python.org/dev/peps/pep-0571/#backwards-compatibility-with-manylinux1-wheels
63
+ arches.append("manylinux1" + arch_sep + arch_suffix)
64
+ return arches
65
+
66
+
67
+ def _get_custom_platforms(arch: str) -> List[str]:
68
+ arch_prefix, arch_sep, arch_suffix = arch.partition("_")
69
+ if arch.startswith("macosx"):
70
+ arches = _mac_platforms(arch)
71
+ elif arch_prefix in ["manylinux2014", "manylinux2010"]:
72
+ arches = _custom_manylinux_platforms(arch)
73
+ else:
74
+ arches = [arch]
75
+ return arches
76
+
77
+
78
+ def _expand_allowed_platforms(platforms: Optional[List[str]]) -> Optional[List[str]]:
79
+ if not platforms:
80
+ return None
81
+
82
+ seen = set()
83
+ result = []
84
+
85
+ for p in platforms:
86
+ if p in seen:
87
+ continue
88
+ additions = [c for c in _get_custom_platforms(p) if c not in seen]
89
+ seen.update(additions)
90
+ result.extend(additions)
91
+
92
+ return result
93
+
94
+
95
+ def _get_python_version(version: str) -> PythonVersion:
96
+ if len(version) > 1:
97
+ return int(version[0]), int(version[1:])
98
+ else:
99
+ return (int(version[0]),)
100
+
101
+
102
+ def _get_custom_interpreter(
103
+ implementation: Optional[str] = None, version: Optional[str] = None
104
+ ) -> str:
105
+ if implementation is None:
106
+ implementation = interpreter_name()
107
+ if version is None:
108
+ version = interpreter_version()
109
+ return f"{implementation}{version}"
110
+
111
+
112
+ def get_supported(
113
+ version: Optional[str] = None,
114
+ platforms: Optional[List[str]] = None,
115
+ impl: Optional[str] = None,
116
+ abis: Optional[List[str]] = None,
117
+ ) -> List[Tag]:
118
+ """Return a list of supported tags for each version specified in
119
+ `versions`.
120
+
121
+ :param version: a string version, of the form "33" or "32",
122
+ or None. The version will be assumed to support our ABI.
123
+ :param platform: specify a list of platforms you want valid
124
+ tags for, or None. If None, use the local system platform.
125
+ :param impl: specify the exact implementation you want valid
126
+ tags for, or None. If None, use the local interpreter impl.
127
+ :param abis: specify a list of abis you want valid
128
+ tags for, or None. If None, use the local interpreter abi.
129
+ """
130
+ supported: List[Tag] = []
131
+
132
+ python_version: Optional[PythonVersion] = None
133
+ if version is not None:
134
+ python_version = _get_python_version(version)
135
+
136
+ interpreter = _get_custom_interpreter(impl, version)
137
+
138
+ platforms = _expand_allowed_platforms(platforms)
139
+
140
+ is_cpython = (impl or interpreter_name()) == "cp"
141
+ if is_cpython:
142
+ supported.extend(
143
+ cpython_tags(
144
+ python_version=python_version,
145
+ abis=abis,
146
+ platforms=platforms,
147
+ )
148
+ )
149
+ else:
150
+ supported.extend(
151
+ generic_tags(
152
+ interpreter=interpreter,
153
+ abis=abis,
154
+ platforms=platforms,
155
+ )
156
+ )
157
+ supported.extend(
158
+ compatible_tags(
159
+ python_version=python_version,
160
+ interpreter=interpreter,
161
+ platforms=platforms,
162
+ )
163
+ )
164
+
165
+ return supported
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/datetime.py ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """For when pip wants to check the date or time.
2
+ """
3
+
4
+ import datetime
5
+
6
+
7
+ def today_is_later_than(year: int, month: int, day: int) -> bool:
8
+ today = datetime.date.today()
9
+ given = datetime.date(year, month, day)
10
+
11
+ return today > given
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/deprecation.py ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ A module that implements tooling to enable easy warnings about deprecations.
3
+ """
4
+
5
+ import logging
6
+ import warnings
7
+ from typing import Any, Optional, TextIO, Type, Union
8
+
9
+ from pip._vendor.packaging.version import parse
10
+
11
+ from pip import __version__ as current_version # NOTE: tests patch this name.
12
+
13
+ DEPRECATION_MSG_PREFIX = "DEPRECATION: "
14
+
15
+
16
+ class PipDeprecationWarning(Warning):
17
+ pass
18
+
19
+
20
+ _original_showwarning: Any = None
21
+
22
+
23
+ # Warnings <-> Logging Integration
24
+ def _showwarning(
25
+ message: Union[Warning, str],
26
+ category: Type[Warning],
27
+ filename: str,
28
+ lineno: int,
29
+ file: Optional[TextIO] = None,
30
+ line: Optional[str] = None,
31
+ ) -> None:
32
+ if file is not None:
33
+ if _original_showwarning is not None:
34
+ _original_showwarning(message, category, filename, lineno, file, line)
35
+ elif issubclass(category, PipDeprecationWarning):
36
+ # We use a specially named logger which will handle all of the
37
+ # deprecation messages for pip.
38
+ logger = logging.getLogger("pip._internal.deprecations")
39
+ logger.warning(message)
40
+ else:
41
+ _original_showwarning(message, category, filename, lineno, file, line)
42
+
43
+
44
+ def install_warning_logger() -> None:
45
+ # Enable our Deprecation Warnings
46
+ warnings.simplefilter("default", PipDeprecationWarning, append=True)
47
+
48
+ global _original_showwarning
49
+
50
+ if _original_showwarning is None:
51
+ _original_showwarning = warnings.showwarning
52
+ warnings.showwarning = _showwarning
53
+
54
+
55
+ def deprecated(
56
+ *,
57
+ reason: str,
58
+ replacement: Optional[str],
59
+ gone_in: Optional[str],
60
+ feature_flag: Optional[str] = None,
61
+ issue: Optional[int] = None,
62
+ ) -> None:
63
+ """Helper to deprecate existing functionality.
64
+
65
+ reason:
66
+ Textual reason shown to the user about why this functionality has
67
+ been deprecated. Should be a complete sentence.
68
+ replacement:
69
+ Textual suggestion shown to the user about what alternative
70
+ functionality they can use.
71
+ gone_in:
72
+ The version of pip does this functionality should get removed in.
73
+ Raises an error if pip's current version is greater than or equal to
74
+ this.
75
+ feature_flag:
76
+ Command-line flag of the form --use-feature={feature_flag} for testing
77
+ upcoming functionality.
78
+ issue:
79
+ Issue number on the tracker that would serve as a useful place for
80
+ users to find related discussion and provide feedback.
81
+ """
82
+
83
+ # Determine whether or not the feature is already gone in this version.
84
+ is_gone = gone_in is not None and parse(current_version) >= parse(gone_in)
85
+
86
+ message_parts = [
87
+ (reason, f"{DEPRECATION_MSG_PREFIX}{{}}"),
88
+ (
89
+ gone_in,
90
+ "pip {} will enforce this behaviour change."
91
+ if not is_gone
92
+ else "Since pip {}, this is no longer supported.",
93
+ ),
94
+ (
95
+ replacement,
96
+ "A possible replacement is {}.",
97
+ ),
98
+ (
99
+ feature_flag,
100
+ "You can use the flag --use-feature={} to test the upcoming behaviour."
101
+ if not is_gone
102
+ else None,
103
+ ),
104
+ (
105
+ issue,
106
+ "Discussion can be found at https://github.com/pypa/pip/issues/{}",
107
+ ),
108
+ ]
109
+
110
+ message = " ".join(
111
+ format_str.format(value)
112
+ for value, format_str in message_parts
113
+ if format_str is not None and value is not None
114
+ )
115
+
116
+ # Raise as an error if this behaviour is deprecated.
117
+ if is_gone:
118
+ raise PipDeprecationWarning(message)
119
+
120
+ warnings.warn(message, category=PipDeprecationWarning, stacklevel=2)
scripts/myenv/lib/python3.10/site-packages/pip/_internal/utils/direct_url_helpers.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ from pip._internal.models.direct_url import ArchiveInfo, DirectUrl, DirInfo, VcsInfo
4
+ from pip._internal.models.link import Link
5
+ from pip._internal.utils.urls import path_to_url
6
+ from pip._internal.vcs import vcs
7
+
8
+
9
+ def direct_url_as_pep440_direct_reference(direct_url: DirectUrl, name: str) -> str:
10
+ """Convert a DirectUrl to a pip requirement string."""
11
+ direct_url.validate() # if invalid, this is a pip bug
12
+ requirement = name + " @ "
13
+ fragments = []
14
+ if isinstance(direct_url.info, VcsInfo):
15
+ requirement += "{}+{}@{}".format(
16
+ direct_url.info.vcs, direct_url.url, direct_url.info.commit_id
17
+ )
18
+ elif isinstance(direct_url.info, ArchiveInfo):
19
+ requirement += direct_url.url
20
+ if direct_url.info.hash:
21
+ fragments.append(direct_url.info.hash)
22
+ else:
23
+ assert isinstance(direct_url.info, DirInfo)
24
+ requirement += direct_url.url
25
+ if direct_url.subdirectory:
26
+ fragments.append("subdirectory=" + direct_url.subdirectory)
27
+ if fragments:
28
+ requirement += "#" + "&".join(fragments)
29
+ return requirement
30
+
31
+
32
+ def direct_url_for_editable(source_dir: str) -> DirectUrl:
33
+ return DirectUrl(
34
+ url=path_to_url(source_dir),
35
+ info=DirInfo(editable=True),
36
+ )
37
+
38
+
39
+ def direct_url_from_link(
40
+ link: Link, source_dir: Optional[str] = None, link_is_in_wheel_cache: bool = False
41
+ ) -> DirectUrl:
42
+ if link.is_vcs:
43
+ vcs_backend = vcs.get_backend_for_scheme(link.scheme)
44
+ assert vcs_backend
45
+ url, requested_revision, _ = vcs_backend.get_url_rev_and_auth(
46
+ link.url_without_fragment
47
+ )
48
+ # For VCS links, we need to find out and add commit_id.
49
+ if link_is_in_wheel_cache:
50
+ # If the requested VCS link corresponds to a cached
51
+ # wheel, it means the requested revision was an
52
+ # immutable commit hash, otherwise it would not have
53
+ # been cached. In that case we don't have a source_dir
54
+ # with the VCS checkout.
55
+ assert requested_revision
56
+ commit_id = requested_revision
57
+ else:
58
+ # If the wheel was not in cache, it means we have
59
+ # had to checkout from VCS to build and we have a source_dir
60
+ # which we can inspect to find out the commit id.
61
+ assert source_dir
62
+ commit_id = vcs_backend.get_revision(source_dir)
63
+ return DirectUrl(
64
+ url=url,
65
+ info=VcsInfo(
66
+ vcs=vcs_backend.name,
67
+ commit_id=commit_id,
68
+ requested_revision=requested_revision,
69
+ ),
70
+ subdirectory=link.subdirectory_fragment,
71
+ )
72
+ elif link.is_existing_dir():
73
+ return DirectUrl(
74
+ url=link.url_without_fragment,
75
+ info=DirInfo(),
76
+ subdirectory=link.subdirectory_fragment,
77
+ )
78
+ else:
79
+ hash = None
80
+ hash_name = link.hash_name
81
+ if hash_name:
82
+ hash = f"{hash_name}={link.hash}"
83
+ return DirectUrl(
84
+ url=link.url_without_fragment,
85
+ info=ArchiveInfo(hash=hash),
86
+ subdirectory=link.subdirectory_fragment,
87
+ )