Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/constructors.py +490 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/req_install.py +858 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py +633 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__init__.py +111 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__init__.py +6 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansitowin32.py +258 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/initialise.py +80 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/win32.py +152 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/distro.py +1386 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__init__.py +331 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py +46 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/android.py +119 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/api.py +156 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/macos.py +64 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/unix.py +181 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/version.py +4 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/windows.py +182 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py +154 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc +0 -0
scripts/myenv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc
ADDED
Binary file (28.1 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (2.6 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc
ADDED
Binary file (12.2 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc
ADDED
Binary file (13.5 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc
ADDED
Binary file (22.2 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc
ADDED
Binary file (5.84 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc
ADDED
Binary file (4.31 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc
ADDED
Binary file (18.9 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/constructors.py
ADDED
@@ -0,0 +1,490 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Backing implementation for InstallRequirement's various constructors
|
2 |
+
|
3 |
+
The idea here is that these formed a major chunk of InstallRequirement's size
|
4 |
+
so, moving them and support code dedicated to them outside of that class
|
5 |
+
helps creates for better understandability for the rest of the code.
|
6 |
+
|
7 |
+
These are meant to be used elsewhere within pip to create instances of
|
8 |
+
InstallRequirement.
|
9 |
+
"""
|
10 |
+
|
11 |
+
import logging
|
12 |
+
import os
|
13 |
+
import re
|
14 |
+
from typing import Any, Dict, Optional, Set, Tuple, Union
|
15 |
+
|
16 |
+
from pip._vendor.packaging.markers import Marker
|
17 |
+
from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
|
18 |
+
from pip._vendor.packaging.specifiers import Specifier
|
19 |
+
|
20 |
+
from pip._internal.exceptions import InstallationError
|
21 |
+
from pip._internal.models.index import PyPI, TestPyPI
|
22 |
+
from pip._internal.models.link import Link
|
23 |
+
from pip._internal.models.wheel import Wheel
|
24 |
+
from pip._internal.req.req_file import ParsedRequirement
|
25 |
+
from pip._internal.req.req_install import InstallRequirement
|
26 |
+
from pip._internal.utils.filetypes import is_archive_file
|
27 |
+
from pip._internal.utils.misc import is_installable_dir
|
28 |
+
from pip._internal.utils.packaging import get_requirement
|
29 |
+
from pip._internal.utils.urls import path_to_url
|
30 |
+
from pip._internal.vcs import is_url, vcs
|
31 |
+
|
32 |
+
__all__ = [
|
33 |
+
"install_req_from_editable",
|
34 |
+
"install_req_from_line",
|
35 |
+
"parse_editable",
|
36 |
+
]
|
37 |
+
|
38 |
+
logger = logging.getLogger(__name__)
|
39 |
+
operators = Specifier._operators.keys()
|
40 |
+
|
41 |
+
|
42 |
+
def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
|
43 |
+
m = re.match(r"^(.+)(\[[^\]]+\])$", path)
|
44 |
+
extras = None
|
45 |
+
if m:
|
46 |
+
path_no_extras = m.group(1)
|
47 |
+
extras = m.group(2)
|
48 |
+
else:
|
49 |
+
path_no_extras = path
|
50 |
+
|
51 |
+
return path_no_extras, extras
|
52 |
+
|
53 |
+
|
54 |
+
def convert_extras(extras: Optional[str]) -> Set[str]:
|
55 |
+
if not extras:
|
56 |
+
return set()
|
57 |
+
return get_requirement("placeholder" + extras.lower()).extras
|
58 |
+
|
59 |
+
|
60 |
+
def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
|
61 |
+
"""Parses an editable requirement into:
|
62 |
+
- a requirement name
|
63 |
+
- an URL
|
64 |
+
- extras
|
65 |
+
- editable options
|
66 |
+
Accepted requirements:
|
67 |
+
svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
|
68 |
+
.[some_extra]
|
69 |
+
"""
|
70 |
+
|
71 |
+
url = editable_req
|
72 |
+
|
73 |
+
# If a file path is specified with extras, strip off the extras.
|
74 |
+
url_no_extras, extras = _strip_extras(url)
|
75 |
+
|
76 |
+
if os.path.isdir(url_no_extras):
|
77 |
+
# Treating it as code that has already been checked out
|
78 |
+
url_no_extras = path_to_url(url_no_extras)
|
79 |
+
|
80 |
+
if url_no_extras.lower().startswith("file:"):
|
81 |
+
package_name = Link(url_no_extras).egg_fragment
|
82 |
+
if extras:
|
83 |
+
return (
|
84 |
+
package_name,
|
85 |
+
url_no_extras,
|
86 |
+
get_requirement("placeholder" + extras.lower()).extras,
|
87 |
+
)
|
88 |
+
else:
|
89 |
+
return package_name, url_no_extras, set()
|
90 |
+
|
91 |
+
for version_control in vcs:
|
92 |
+
if url.lower().startswith(f"{version_control}:"):
|
93 |
+
url = f"{version_control}+{url}"
|
94 |
+
break
|
95 |
+
|
96 |
+
link = Link(url)
|
97 |
+
|
98 |
+
if not link.is_vcs:
|
99 |
+
backends = ", ".join(vcs.all_schemes)
|
100 |
+
raise InstallationError(
|
101 |
+
f"{editable_req} is not a valid editable requirement. "
|
102 |
+
f"It should either be a path to a local project or a VCS URL "
|
103 |
+
f"(beginning with {backends})."
|
104 |
+
)
|
105 |
+
|
106 |
+
package_name = link.egg_fragment
|
107 |
+
if not package_name:
|
108 |
+
raise InstallationError(
|
109 |
+
"Could not detect requirement name for '{}', please specify one "
|
110 |
+
"with #egg=your_package_name".format(editable_req)
|
111 |
+
)
|
112 |
+
return package_name, url, set()
|
113 |
+
|
114 |
+
|
115 |
+
def check_first_requirement_in_file(filename: str) -> None:
|
116 |
+
"""Check if file is parsable as a requirements file.
|
117 |
+
|
118 |
+
This is heavily based on ``pkg_resources.parse_requirements``, but
|
119 |
+
simplified to just check the first meaningful line.
|
120 |
+
|
121 |
+
:raises InvalidRequirement: If the first meaningful line cannot be parsed
|
122 |
+
as an requirement.
|
123 |
+
"""
|
124 |
+
with open(filename, encoding="utf-8", errors="ignore") as f:
|
125 |
+
# Create a steppable iterator, so we can handle \-continuations.
|
126 |
+
lines = (
|
127 |
+
line
|
128 |
+
for line in (line.strip() for line in f)
|
129 |
+
if line and not line.startswith("#") # Skip blank lines/comments.
|
130 |
+
)
|
131 |
+
|
132 |
+
for line in lines:
|
133 |
+
# Drop comments -- a hash without a space may be in a URL.
|
134 |
+
if " #" in line:
|
135 |
+
line = line[: line.find(" #")]
|
136 |
+
# If there is a line continuation, drop it, and append the next line.
|
137 |
+
if line.endswith("\\"):
|
138 |
+
line = line[:-2].strip() + next(lines, "")
|
139 |
+
Requirement(line)
|
140 |
+
return
|
141 |
+
|
142 |
+
|
143 |
+
def deduce_helpful_msg(req: str) -> str:
|
144 |
+
"""Returns helpful msg in case requirements file does not exist,
|
145 |
+
or cannot be parsed.
|
146 |
+
|
147 |
+
:params req: Requirements file path
|
148 |
+
"""
|
149 |
+
if not os.path.exists(req):
|
150 |
+
return f" File '{req}' does not exist."
|
151 |
+
msg = " The path does exist. "
|
152 |
+
# Try to parse and check if it is a requirements file.
|
153 |
+
try:
|
154 |
+
check_first_requirement_in_file(req)
|
155 |
+
except InvalidRequirement:
|
156 |
+
logger.debug("Cannot parse '%s' as requirements file", req)
|
157 |
+
else:
|
158 |
+
msg += (
|
159 |
+
f"The argument you provided "
|
160 |
+
f"({req}) appears to be a"
|
161 |
+
f" requirements file. If that is the"
|
162 |
+
f" case, use the '-r' flag to install"
|
163 |
+
f" the packages specified within it."
|
164 |
+
)
|
165 |
+
return msg
|
166 |
+
|
167 |
+
|
168 |
+
class RequirementParts:
|
169 |
+
def __init__(
|
170 |
+
self,
|
171 |
+
requirement: Optional[Requirement],
|
172 |
+
link: Optional[Link],
|
173 |
+
markers: Optional[Marker],
|
174 |
+
extras: Set[str],
|
175 |
+
):
|
176 |
+
self.requirement = requirement
|
177 |
+
self.link = link
|
178 |
+
self.markers = markers
|
179 |
+
self.extras = extras
|
180 |
+
|
181 |
+
|
182 |
+
def parse_req_from_editable(editable_req: str) -> RequirementParts:
|
183 |
+
name, url, extras_override = parse_editable(editable_req)
|
184 |
+
|
185 |
+
if name is not None:
|
186 |
+
try:
|
187 |
+
req: Optional[Requirement] = Requirement(name)
|
188 |
+
except InvalidRequirement:
|
189 |
+
raise InstallationError(f"Invalid requirement: '{name}'")
|
190 |
+
else:
|
191 |
+
req = None
|
192 |
+
|
193 |
+
link = Link(url)
|
194 |
+
|
195 |
+
return RequirementParts(req, link, None, extras_override)
|
196 |
+
|
197 |
+
|
198 |
+
# ---- The actual constructors follow ----
|
199 |
+
|
200 |
+
|
201 |
+
def install_req_from_editable(
|
202 |
+
editable_req: str,
|
203 |
+
comes_from: Optional[Union[InstallRequirement, str]] = None,
|
204 |
+
use_pep517: Optional[bool] = None,
|
205 |
+
isolated: bool = False,
|
206 |
+
options: Optional[Dict[str, Any]] = None,
|
207 |
+
constraint: bool = False,
|
208 |
+
user_supplied: bool = False,
|
209 |
+
permit_editable_wheels: bool = False,
|
210 |
+
) -> InstallRequirement:
|
211 |
+
|
212 |
+
parts = parse_req_from_editable(editable_req)
|
213 |
+
|
214 |
+
return InstallRequirement(
|
215 |
+
parts.requirement,
|
216 |
+
comes_from=comes_from,
|
217 |
+
user_supplied=user_supplied,
|
218 |
+
editable=True,
|
219 |
+
permit_editable_wheels=permit_editable_wheels,
|
220 |
+
link=parts.link,
|
221 |
+
constraint=constraint,
|
222 |
+
use_pep517=use_pep517,
|
223 |
+
isolated=isolated,
|
224 |
+
install_options=options.get("install_options", []) if options else [],
|
225 |
+
global_options=options.get("global_options", []) if options else [],
|
226 |
+
hash_options=options.get("hashes", {}) if options else {},
|
227 |
+
extras=parts.extras,
|
228 |
+
)
|
229 |
+
|
230 |
+
|
231 |
+
def _looks_like_path(name: str) -> bool:
|
232 |
+
"""Checks whether the string "looks like" a path on the filesystem.
|
233 |
+
|
234 |
+
This does not check whether the target actually exists, only judge from the
|
235 |
+
appearance.
|
236 |
+
|
237 |
+
Returns true if any of the following conditions is true:
|
238 |
+
* a path separator is found (either os.path.sep or os.path.altsep);
|
239 |
+
* a dot is found (which represents the current directory).
|
240 |
+
"""
|
241 |
+
if os.path.sep in name:
|
242 |
+
return True
|
243 |
+
if os.path.altsep is not None and os.path.altsep in name:
|
244 |
+
return True
|
245 |
+
if name.startswith("."):
|
246 |
+
return True
|
247 |
+
return False
|
248 |
+
|
249 |
+
|
250 |
+
def _get_url_from_path(path: str, name: str) -> Optional[str]:
|
251 |
+
"""
|
252 |
+
First, it checks whether a provided path is an installable directory. If it
|
253 |
+
is, returns the path.
|
254 |
+
|
255 |
+
If false, check if the path is an archive file (such as a .whl).
|
256 |
+
The function checks if the path is a file. If false, if the path has
|
257 |
+
an @, it will treat it as a PEP 440 URL requirement and return the path.
|
258 |
+
"""
|
259 |
+
if _looks_like_path(name) and os.path.isdir(path):
|
260 |
+
if is_installable_dir(path):
|
261 |
+
return path_to_url(path)
|
262 |
+
# TODO: The is_installable_dir test here might not be necessary
|
263 |
+
# now that it is done in load_pyproject_toml too.
|
264 |
+
raise InstallationError(
|
265 |
+
f"Directory {name!r} is not installable. Neither 'setup.py' "
|
266 |
+
"nor 'pyproject.toml' found."
|
267 |
+
)
|
268 |
+
if not is_archive_file(path):
|
269 |
+
return None
|
270 |
+
if os.path.isfile(path):
|
271 |
+
return path_to_url(path)
|
272 |
+
urlreq_parts = name.split("@", 1)
|
273 |
+
if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
|
274 |
+
# If the path contains '@' and the part before it does not look
|
275 |
+
# like a path, try to treat it as a PEP 440 URL req instead.
|
276 |
+
return None
|
277 |
+
logger.warning(
|
278 |
+
"Requirement %r looks like a filename, but the file does not exist",
|
279 |
+
name,
|
280 |
+
)
|
281 |
+
return path_to_url(path)
|
282 |
+
|
283 |
+
|
284 |
+
def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
|
285 |
+
if is_url(name):
|
286 |
+
marker_sep = "; "
|
287 |
+
else:
|
288 |
+
marker_sep = ";"
|
289 |
+
if marker_sep in name:
|
290 |
+
name, markers_as_string = name.split(marker_sep, 1)
|
291 |
+
markers_as_string = markers_as_string.strip()
|
292 |
+
if not markers_as_string:
|
293 |
+
markers = None
|
294 |
+
else:
|
295 |
+
markers = Marker(markers_as_string)
|
296 |
+
else:
|
297 |
+
markers = None
|
298 |
+
name = name.strip()
|
299 |
+
req_as_string = None
|
300 |
+
path = os.path.normpath(os.path.abspath(name))
|
301 |
+
link = None
|
302 |
+
extras_as_string = None
|
303 |
+
|
304 |
+
if is_url(name):
|
305 |
+
link = Link(name)
|
306 |
+
else:
|
307 |
+
p, extras_as_string = _strip_extras(path)
|
308 |
+
url = _get_url_from_path(p, name)
|
309 |
+
if url is not None:
|
310 |
+
link = Link(url)
|
311 |
+
|
312 |
+
# it's a local file, dir, or url
|
313 |
+
if link:
|
314 |
+
# Handle relative file URLs
|
315 |
+
if link.scheme == "file" and re.search(r"\.\./", link.url):
|
316 |
+
link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
|
317 |
+
# wheel file
|
318 |
+
if link.is_wheel:
|
319 |
+
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
|
320 |
+
req_as_string = f"{wheel.name}=={wheel.version}"
|
321 |
+
else:
|
322 |
+
# set the req to the egg fragment. when it's not there, this
|
323 |
+
# will become an 'unnamed' requirement
|
324 |
+
req_as_string = link.egg_fragment
|
325 |
+
|
326 |
+
# a requirement specifier
|
327 |
+
else:
|
328 |
+
req_as_string = name
|
329 |
+
|
330 |
+
extras = convert_extras(extras_as_string)
|
331 |
+
|
332 |
+
def with_source(text: str) -> str:
|
333 |
+
if not line_source:
|
334 |
+
return text
|
335 |
+
return f"{text} (from {line_source})"
|
336 |
+
|
337 |
+
def _parse_req_string(req_as_string: str) -> Requirement:
|
338 |
+
try:
|
339 |
+
req = get_requirement(req_as_string)
|
340 |
+
except InvalidRequirement:
|
341 |
+
if os.path.sep in req_as_string:
|
342 |
+
add_msg = "It looks like a path."
|
343 |
+
add_msg += deduce_helpful_msg(req_as_string)
|
344 |
+
elif "=" in req_as_string and not any(
|
345 |
+
op in req_as_string for op in operators
|
346 |
+
):
|
347 |
+
add_msg = "= is not a valid operator. Did you mean == ?"
|
348 |
+
else:
|
349 |
+
add_msg = ""
|
350 |
+
msg = with_source(f"Invalid requirement: {req_as_string!r}")
|
351 |
+
if add_msg:
|
352 |
+
msg += f"\nHint: {add_msg}"
|
353 |
+
raise InstallationError(msg)
|
354 |
+
else:
|
355 |
+
# Deprecate extras after specifiers: "name>=1.0[extras]"
|
356 |
+
# This currently works by accident because _strip_extras() parses
|
357 |
+
# any extras in the end of the string and those are saved in
|
358 |
+
# RequirementParts
|
359 |
+
for spec in req.specifier:
|
360 |
+
spec_str = str(spec)
|
361 |
+
if spec_str.endswith("]"):
|
362 |
+
msg = f"Extras after version '{spec_str}'."
|
363 |
+
raise InstallationError(msg)
|
364 |
+
return req
|
365 |
+
|
366 |
+
if req_as_string is not None:
|
367 |
+
req: Optional[Requirement] = _parse_req_string(req_as_string)
|
368 |
+
else:
|
369 |
+
req = None
|
370 |
+
|
371 |
+
return RequirementParts(req, link, markers, extras)
|
372 |
+
|
373 |
+
|
374 |
+
def install_req_from_line(
|
375 |
+
name: str,
|
376 |
+
comes_from: Optional[Union[str, InstallRequirement]] = None,
|
377 |
+
use_pep517: Optional[bool] = None,
|
378 |
+
isolated: bool = False,
|
379 |
+
options: Optional[Dict[str, Any]] = None,
|
380 |
+
constraint: bool = False,
|
381 |
+
line_source: Optional[str] = None,
|
382 |
+
user_supplied: bool = False,
|
383 |
+
) -> InstallRequirement:
|
384 |
+
"""Creates an InstallRequirement from a name, which might be a
|
385 |
+
requirement, directory containing 'setup.py', filename, or URL.
|
386 |
+
|
387 |
+
:param line_source: An optional string describing where the line is from,
|
388 |
+
for logging purposes in case of an error.
|
389 |
+
"""
|
390 |
+
parts = parse_req_from_line(name, line_source)
|
391 |
+
|
392 |
+
return InstallRequirement(
|
393 |
+
parts.requirement,
|
394 |
+
comes_from,
|
395 |
+
link=parts.link,
|
396 |
+
markers=parts.markers,
|
397 |
+
use_pep517=use_pep517,
|
398 |
+
isolated=isolated,
|
399 |
+
install_options=options.get("install_options", []) if options else [],
|
400 |
+
global_options=options.get("global_options", []) if options else [],
|
401 |
+
hash_options=options.get("hashes", {}) if options else {},
|
402 |
+
constraint=constraint,
|
403 |
+
extras=parts.extras,
|
404 |
+
user_supplied=user_supplied,
|
405 |
+
)
|
406 |
+
|
407 |
+
|
408 |
+
def install_req_from_req_string(
|
409 |
+
req_string: str,
|
410 |
+
comes_from: Optional[InstallRequirement] = None,
|
411 |
+
isolated: bool = False,
|
412 |
+
use_pep517: Optional[bool] = None,
|
413 |
+
user_supplied: bool = False,
|
414 |
+
) -> InstallRequirement:
|
415 |
+
try:
|
416 |
+
req = get_requirement(req_string)
|
417 |
+
except InvalidRequirement:
|
418 |
+
raise InstallationError(f"Invalid requirement: '{req_string}'")
|
419 |
+
|
420 |
+
domains_not_allowed = [
|
421 |
+
PyPI.file_storage_domain,
|
422 |
+
TestPyPI.file_storage_domain,
|
423 |
+
]
|
424 |
+
if (
|
425 |
+
req.url
|
426 |
+
and comes_from
|
427 |
+
and comes_from.link
|
428 |
+
and comes_from.link.netloc in domains_not_allowed
|
429 |
+
):
|
430 |
+
# Explicitly disallow pypi packages that depend on external urls
|
431 |
+
raise InstallationError(
|
432 |
+
"Packages installed from PyPI cannot depend on packages "
|
433 |
+
"which are not also hosted on PyPI.\n"
|
434 |
+
"{} depends on {} ".format(comes_from.name, req)
|
435 |
+
)
|
436 |
+
|
437 |
+
return InstallRequirement(
|
438 |
+
req,
|
439 |
+
comes_from,
|
440 |
+
isolated=isolated,
|
441 |
+
use_pep517=use_pep517,
|
442 |
+
user_supplied=user_supplied,
|
443 |
+
)
|
444 |
+
|
445 |
+
|
446 |
+
def install_req_from_parsed_requirement(
|
447 |
+
parsed_req: ParsedRequirement,
|
448 |
+
isolated: bool = False,
|
449 |
+
use_pep517: Optional[bool] = None,
|
450 |
+
user_supplied: bool = False,
|
451 |
+
) -> InstallRequirement:
|
452 |
+
if parsed_req.is_editable:
|
453 |
+
req = install_req_from_editable(
|
454 |
+
parsed_req.requirement,
|
455 |
+
comes_from=parsed_req.comes_from,
|
456 |
+
use_pep517=use_pep517,
|
457 |
+
constraint=parsed_req.constraint,
|
458 |
+
isolated=isolated,
|
459 |
+
user_supplied=user_supplied,
|
460 |
+
)
|
461 |
+
|
462 |
+
else:
|
463 |
+
req = install_req_from_line(
|
464 |
+
parsed_req.requirement,
|
465 |
+
comes_from=parsed_req.comes_from,
|
466 |
+
use_pep517=use_pep517,
|
467 |
+
isolated=isolated,
|
468 |
+
options=parsed_req.options,
|
469 |
+
constraint=parsed_req.constraint,
|
470 |
+
line_source=parsed_req.line_source,
|
471 |
+
user_supplied=user_supplied,
|
472 |
+
)
|
473 |
+
return req
|
474 |
+
|
475 |
+
|
476 |
+
def install_req_from_link_and_ireq(
|
477 |
+
link: Link, ireq: InstallRequirement
|
478 |
+
) -> InstallRequirement:
|
479 |
+
return InstallRequirement(
|
480 |
+
req=ireq.req,
|
481 |
+
comes_from=ireq.comes_from,
|
482 |
+
editable=ireq.editable,
|
483 |
+
link=link,
|
484 |
+
markers=ireq.markers,
|
485 |
+
use_pep517=ireq.use_pep517,
|
486 |
+
isolated=ireq.isolated,
|
487 |
+
install_options=ireq.install_options,
|
488 |
+
global_options=ireq.global_options,
|
489 |
+
hash_options=ireq.hash_options,
|
490 |
+
)
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/req_install.py
ADDED
@@ -0,0 +1,858 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# The following comment should be removed at some point in the future.
|
2 |
+
# mypy: strict-optional=False
|
3 |
+
|
4 |
+
import functools
|
5 |
+
import logging
|
6 |
+
import os
|
7 |
+
import shutil
|
8 |
+
import sys
|
9 |
+
import uuid
|
10 |
+
import zipfile
|
11 |
+
from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
|
12 |
+
|
13 |
+
from pip._vendor.packaging.markers import Marker
|
14 |
+
from pip._vendor.packaging.requirements import Requirement
|
15 |
+
from pip._vendor.packaging.specifiers import SpecifierSet
|
16 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
17 |
+
from pip._vendor.packaging.version import Version
|
18 |
+
from pip._vendor.packaging.version import parse as parse_version
|
19 |
+
from pip._vendor.pep517.wrappers import Pep517HookCaller
|
20 |
+
|
21 |
+
from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
|
22 |
+
from pip._internal.exceptions import InstallationError, LegacyInstallFailure
|
23 |
+
from pip._internal.locations import get_scheme
|
24 |
+
from pip._internal.metadata import (
|
25 |
+
BaseDistribution,
|
26 |
+
get_default_environment,
|
27 |
+
get_directory_distribution,
|
28 |
+
)
|
29 |
+
from pip._internal.models.link import Link
|
30 |
+
from pip._internal.operations.build.metadata import generate_metadata
|
31 |
+
from pip._internal.operations.build.metadata_editable import generate_editable_metadata
|
32 |
+
from pip._internal.operations.build.metadata_legacy import (
|
33 |
+
generate_metadata as generate_metadata_legacy,
|
34 |
+
)
|
35 |
+
from pip._internal.operations.install.editable_legacy import (
|
36 |
+
install_editable as install_editable_legacy,
|
37 |
+
)
|
38 |
+
from pip._internal.operations.install.legacy import install as install_legacy
|
39 |
+
from pip._internal.operations.install.wheel import install_wheel
|
40 |
+
from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
|
41 |
+
from pip._internal.req.req_uninstall import UninstallPathSet
|
42 |
+
from pip._internal.utils.deprecation import deprecated
|
43 |
+
from pip._internal.utils.direct_url_helpers import (
|
44 |
+
direct_url_for_editable,
|
45 |
+
direct_url_from_link,
|
46 |
+
)
|
47 |
+
from pip._internal.utils.hashes import Hashes
|
48 |
+
from pip._internal.utils.misc import (
|
49 |
+
ask_path_exists,
|
50 |
+
backup_dir,
|
51 |
+
display_path,
|
52 |
+
hide_url,
|
53 |
+
redact_auth_from_url,
|
54 |
+
)
|
55 |
+
from pip._internal.utils.packaging import safe_extra
|
56 |
+
from pip._internal.utils.subprocess import runner_with_spinner_message
|
57 |
+
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
|
58 |
+
from pip._internal.utils.virtualenv import running_under_virtualenv
|
59 |
+
from pip._internal.vcs import vcs
|
60 |
+
|
61 |
+
logger = logging.getLogger(__name__)
|
62 |
+
|
63 |
+
|
64 |
+
class InstallRequirement:
|
65 |
+
"""
|
66 |
+
Represents something that may be installed later on, may have information
|
67 |
+
about where to fetch the relevant requirement and also contains logic for
|
68 |
+
installing the said requirement.
|
69 |
+
"""
|
70 |
+
|
71 |
+
def __init__(
|
72 |
+
self,
|
73 |
+
req: Optional[Requirement],
|
74 |
+
comes_from: Optional[Union[str, "InstallRequirement"]],
|
75 |
+
editable: bool = False,
|
76 |
+
link: Optional[Link] = None,
|
77 |
+
markers: Optional[Marker] = None,
|
78 |
+
use_pep517: Optional[bool] = None,
|
79 |
+
isolated: bool = False,
|
80 |
+
install_options: Optional[List[str]] = None,
|
81 |
+
global_options: Optional[List[str]] = None,
|
82 |
+
hash_options: Optional[Dict[str, List[str]]] = None,
|
83 |
+
constraint: bool = False,
|
84 |
+
extras: Collection[str] = (),
|
85 |
+
user_supplied: bool = False,
|
86 |
+
permit_editable_wheels: bool = False,
|
87 |
+
) -> None:
|
88 |
+
assert req is None or isinstance(req, Requirement), req
|
89 |
+
self.req = req
|
90 |
+
self.comes_from = comes_from
|
91 |
+
self.constraint = constraint
|
92 |
+
self.editable = editable
|
93 |
+
self.permit_editable_wheels = permit_editable_wheels
|
94 |
+
self.legacy_install_reason: Optional[int] = None
|
95 |
+
|
96 |
+
# source_dir is the local directory where the linked requirement is
|
97 |
+
# located, or unpacked. In case unpacking is needed, creating and
|
98 |
+
# populating source_dir is done by the RequirementPreparer. Note this
|
99 |
+
# is not necessarily the directory where pyproject.toml or setup.py is
|
100 |
+
# located - that one is obtained via unpacked_source_directory.
|
101 |
+
self.source_dir: Optional[str] = None
|
102 |
+
if self.editable:
|
103 |
+
assert link
|
104 |
+
if link.is_file:
|
105 |
+
self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
|
106 |
+
|
107 |
+
if link is None and req and req.url:
|
108 |
+
# PEP 508 URL requirement
|
109 |
+
link = Link(req.url)
|
110 |
+
self.link = self.original_link = link
|
111 |
+
self.original_link_is_in_wheel_cache = False
|
112 |
+
|
113 |
+
# Path to any downloaded or already-existing package.
|
114 |
+
self.local_file_path: Optional[str] = None
|
115 |
+
if self.link and self.link.is_file:
|
116 |
+
self.local_file_path = self.link.file_path
|
117 |
+
|
118 |
+
if extras:
|
119 |
+
self.extras = extras
|
120 |
+
elif req:
|
121 |
+
self.extras = {safe_extra(extra) for extra in req.extras}
|
122 |
+
else:
|
123 |
+
self.extras = set()
|
124 |
+
if markers is None and req:
|
125 |
+
markers = req.marker
|
126 |
+
self.markers = markers
|
127 |
+
|
128 |
+
# This holds the Distribution object if this requirement is already installed.
|
129 |
+
self.satisfied_by: Optional[BaseDistribution] = None
|
130 |
+
# Whether the installation process should try to uninstall an existing
|
131 |
+
# distribution before installing this requirement.
|
132 |
+
self.should_reinstall = False
|
133 |
+
# Temporary build location
|
134 |
+
self._temp_build_dir: Optional[TempDirectory] = None
|
135 |
+
# Set to True after successful installation
|
136 |
+
self.install_succeeded: Optional[bool] = None
|
137 |
+
# Supplied options
|
138 |
+
self.install_options = install_options if install_options else []
|
139 |
+
self.global_options = global_options if global_options else []
|
140 |
+
self.hash_options = hash_options if hash_options else {}
|
141 |
+
# Set to True after successful preparation of this requirement
|
142 |
+
self.prepared = False
|
143 |
+
# User supplied requirement are explicitly requested for installation
|
144 |
+
# by the user via CLI arguments or requirements files, as opposed to,
|
145 |
+
# e.g. dependencies, extras or constraints.
|
146 |
+
self.user_supplied = user_supplied
|
147 |
+
|
148 |
+
self.isolated = isolated
|
149 |
+
self.build_env: BuildEnvironment = NoOpBuildEnvironment()
|
150 |
+
|
151 |
+
# For PEP 517, the directory where we request the project metadata
|
152 |
+
# gets stored. We need this to pass to build_wheel, so the backend
|
153 |
+
# can ensure that the wheel matches the metadata (see the PEP for
|
154 |
+
# details).
|
155 |
+
self.metadata_directory: Optional[str] = None
|
156 |
+
|
157 |
+
# The static build requirements (from pyproject.toml)
|
158 |
+
self.pyproject_requires: Optional[List[str]] = None
|
159 |
+
|
160 |
+
# Build requirements that we will check are available
|
161 |
+
self.requirements_to_check: List[str] = []
|
162 |
+
|
163 |
+
# The PEP 517 backend we should use to build the project
|
164 |
+
self.pep517_backend: Optional[Pep517HookCaller] = None
|
165 |
+
|
166 |
+
# Are we using PEP 517 for this requirement?
|
167 |
+
# After pyproject.toml has been loaded, the only valid values are True
|
168 |
+
# and False. Before loading, None is valid (meaning "use the default").
|
169 |
+
# Setting an explicit value before loading pyproject.toml is supported,
|
170 |
+
# but after loading this flag should be treated as read only.
|
171 |
+
self.use_pep517 = use_pep517
|
172 |
+
|
173 |
+
# This requirement needs more preparation before it can be built
|
174 |
+
self.needs_more_preparation = False
|
175 |
+
|
176 |
+
def __str__(self) -> str:
|
177 |
+
if self.req:
|
178 |
+
s = str(self.req)
|
179 |
+
if self.link:
|
180 |
+
s += " from {}".format(redact_auth_from_url(self.link.url))
|
181 |
+
elif self.link:
|
182 |
+
s = redact_auth_from_url(self.link.url)
|
183 |
+
else:
|
184 |
+
s = "<InstallRequirement>"
|
185 |
+
if self.satisfied_by is not None:
|
186 |
+
s += " in {}".format(display_path(self.satisfied_by.location))
|
187 |
+
if self.comes_from:
|
188 |
+
if isinstance(self.comes_from, str):
|
189 |
+
comes_from: Optional[str] = self.comes_from
|
190 |
+
else:
|
191 |
+
comes_from = self.comes_from.from_path()
|
192 |
+
if comes_from:
|
193 |
+
s += f" (from {comes_from})"
|
194 |
+
return s
|
195 |
+
|
196 |
+
def __repr__(self) -> str:
|
197 |
+
return "<{} object: {} editable={!r}>".format(
|
198 |
+
self.__class__.__name__, str(self), self.editable
|
199 |
+
)
|
200 |
+
|
201 |
+
def format_debug(self) -> str:
|
202 |
+
"""An un-tested helper for getting state, for debugging."""
|
203 |
+
attributes = vars(self)
|
204 |
+
names = sorted(attributes)
|
205 |
+
|
206 |
+
state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
|
207 |
+
return "<{name} object: {{{state}}}>".format(
|
208 |
+
name=self.__class__.__name__,
|
209 |
+
state=", ".join(state),
|
210 |
+
)
|
211 |
+
|
212 |
+
# Things that are valid for all kinds of requirements?
|
213 |
+
@property
|
214 |
+
def name(self) -> Optional[str]:
|
215 |
+
if self.req is None:
|
216 |
+
return None
|
217 |
+
return self.req.name
|
218 |
+
|
219 |
+
@functools.lru_cache() # use cached_property in python 3.8+
|
220 |
+
def supports_pyproject_editable(self) -> bool:
|
221 |
+
if not self.use_pep517:
|
222 |
+
return False
|
223 |
+
assert self.pep517_backend
|
224 |
+
with self.build_env:
|
225 |
+
runner = runner_with_spinner_message(
|
226 |
+
"Checking if build backend supports build_editable"
|
227 |
+
)
|
228 |
+
with self.pep517_backend.subprocess_runner(runner):
|
229 |
+
return "build_editable" in self.pep517_backend._supported_features()
|
230 |
+
|
231 |
+
@property
|
232 |
+
def specifier(self) -> SpecifierSet:
|
233 |
+
return self.req.specifier
|
234 |
+
|
235 |
+
@property
|
236 |
+
def is_pinned(self) -> bool:
|
237 |
+
"""Return whether I am pinned to an exact version.
|
238 |
+
|
239 |
+
For example, some-package==1.2 is pinned; some-package>1.2 is not.
|
240 |
+
"""
|
241 |
+
specifiers = self.specifier
|
242 |
+
return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
|
243 |
+
|
244 |
+
def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
|
245 |
+
if not extras_requested:
|
246 |
+
# Provide an extra to safely evaluate the markers
|
247 |
+
# without matching any extra
|
248 |
+
extras_requested = ("",)
|
249 |
+
if self.markers is not None:
|
250 |
+
return any(
|
251 |
+
self.markers.evaluate({"extra": extra}) for extra in extras_requested
|
252 |
+
)
|
253 |
+
else:
|
254 |
+
return True
|
255 |
+
|
256 |
+
@property
|
257 |
+
def has_hash_options(self) -> bool:
|
258 |
+
"""Return whether any known-good hashes are specified as options.
|
259 |
+
|
260 |
+
These activate --require-hashes mode; hashes specified as part of a
|
261 |
+
URL do not.
|
262 |
+
|
263 |
+
"""
|
264 |
+
return bool(self.hash_options)
|
265 |
+
|
266 |
+
def hashes(self, trust_internet: bool = True) -> Hashes:
|
267 |
+
"""Return a hash-comparer that considers my option- and URL-based
|
268 |
+
hashes to be known-good.
|
269 |
+
|
270 |
+
Hashes in URLs--ones embedded in the requirements file, not ones
|
271 |
+
downloaded from an index server--are almost peers with ones from
|
272 |
+
flags. They satisfy --require-hashes (whether it was implicitly or
|
273 |
+
explicitly activated) but do not activate it. md5 and sha224 are not
|
274 |
+
allowed in flags, which should nudge people toward good algos. We
|
275 |
+
always OR all hashes together, even ones from URLs.
|
276 |
+
|
277 |
+
:param trust_internet: Whether to trust URL-based (#md5=...) hashes
|
278 |
+
downloaded from the internet, as by populate_link()
|
279 |
+
|
280 |
+
"""
|
281 |
+
good_hashes = self.hash_options.copy()
|
282 |
+
link = self.link if trust_internet else self.original_link
|
283 |
+
if link and link.hash:
|
284 |
+
good_hashes.setdefault(link.hash_name, []).append(link.hash)
|
285 |
+
return Hashes(good_hashes)
|
286 |
+
|
287 |
+
def from_path(self) -> Optional[str]:
|
288 |
+
"""Format a nice indicator to show where this "comes from" """
|
289 |
+
if self.req is None:
|
290 |
+
return None
|
291 |
+
s = str(self.req)
|
292 |
+
if self.comes_from:
|
293 |
+
if isinstance(self.comes_from, str):
|
294 |
+
comes_from = self.comes_from
|
295 |
+
else:
|
296 |
+
comes_from = self.comes_from.from_path()
|
297 |
+
if comes_from:
|
298 |
+
s += "->" + comes_from
|
299 |
+
return s
|
300 |
+
|
301 |
+
def ensure_build_location(
|
302 |
+
self, build_dir: str, autodelete: bool, parallel_builds: bool
|
303 |
+
) -> str:
|
304 |
+
assert build_dir is not None
|
305 |
+
if self._temp_build_dir is not None:
|
306 |
+
assert self._temp_build_dir.path
|
307 |
+
return self._temp_build_dir.path
|
308 |
+
if self.req is None:
|
309 |
+
# Some systems have /tmp as a symlink which confuses custom
|
310 |
+
# builds (such as numpy). Thus, we ensure that the real path
|
311 |
+
# is returned.
|
312 |
+
self._temp_build_dir = TempDirectory(
|
313 |
+
kind=tempdir_kinds.REQ_BUILD, globally_managed=True
|
314 |
+
)
|
315 |
+
|
316 |
+
return self._temp_build_dir.path
|
317 |
+
|
318 |
+
# This is the only remaining place where we manually determine the path
|
319 |
+
# for the temporary directory. It is only needed for editables where
|
320 |
+
# it is the value of the --src option.
|
321 |
+
|
322 |
+
# When parallel builds are enabled, add a UUID to the build directory
|
323 |
+
# name so multiple builds do not interfere with each other.
|
324 |
+
dir_name: str = canonicalize_name(self.name)
|
325 |
+
if parallel_builds:
|
326 |
+
dir_name = f"{dir_name}_{uuid.uuid4().hex}"
|
327 |
+
|
328 |
+
# FIXME: Is there a better place to create the build_dir? (hg and bzr
|
329 |
+
# need this)
|
330 |
+
if not os.path.exists(build_dir):
|
331 |
+
logger.debug("Creating directory %s", build_dir)
|
332 |
+
os.makedirs(build_dir)
|
333 |
+
actual_build_dir = os.path.join(build_dir, dir_name)
|
334 |
+
# `None` indicates that we respect the globally-configured deletion
|
335 |
+
# settings, which is what we actually want when auto-deleting.
|
336 |
+
delete_arg = None if autodelete else False
|
337 |
+
return TempDirectory(
|
338 |
+
path=actual_build_dir,
|
339 |
+
delete=delete_arg,
|
340 |
+
kind=tempdir_kinds.REQ_BUILD,
|
341 |
+
globally_managed=True,
|
342 |
+
).path
|
343 |
+
|
344 |
+
def _set_requirement(self) -> None:
|
345 |
+
"""Set requirement after generating metadata."""
|
346 |
+
assert self.req is None
|
347 |
+
assert self.metadata is not None
|
348 |
+
assert self.source_dir is not None
|
349 |
+
|
350 |
+
# Construct a Requirement object from the generated metadata
|
351 |
+
if isinstance(parse_version(self.metadata["Version"]), Version):
|
352 |
+
op = "=="
|
353 |
+
else:
|
354 |
+
op = "==="
|
355 |
+
|
356 |
+
self.req = Requirement(
|
357 |
+
"".join(
|
358 |
+
[
|
359 |
+
self.metadata["Name"],
|
360 |
+
op,
|
361 |
+
self.metadata["Version"],
|
362 |
+
]
|
363 |
+
)
|
364 |
+
)
|
365 |
+
|
366 |
+
def warn_on_mismatching_name(self) -> None:
|
367 |
+
metadata_name = canonicalize_name(self.metadata["Name"])
|
368 |
+
if canonicalize_name(self.req.name) == metadata_name:
|
369 |
+
# Everything is fine.
|
370 |
+
return
|
371 |
+
|
372 |
+
# If we're here, there's a mismatch. Log a warning about it.
|
373 |
+
logger.warning(
|
374 |
+
"Generating metadata for package %s "
|
375 |
+
"produced metadata for project name %s. Fix your "
|
376 |
+
"#egg=%s fragments.",
|
377 |
+
self.name,
|
378 |
+
metadata_name,
|
379 |
+
self.name,
|
380 |
+
)
|
381 |
+
self.req = Requirement(metadata_name)
|
382 |
+
|
383 |
+
def check_if_exists(self, use_user_site: bool) -> None:
|
384 |
+
"""Find an installed distribution that satisfies or conflicts
|
385 |
+
with this requirement, and set self.satisfied_by or
|
386 |
+
self.should_reinstall appropriately.
|
387 |
+
"""
|
388 |
+
if self.req is None:
|
389 |
+
return
|
390 |
+
existing_dist = get_default_environment().get_distribution(self.req.name)
|
391 |
+
if not existing_dist:
|
392 |
+
return
|
393 |
+
|
394 |
+
version_compatible = self.req.specifier.contains(
|
395 |
+
existing_dist.version,
|
396 |
+
prereleases=True,
|
397 |
+
)
|
398 |
+
if not version_compatible:
|
399 |
+
self.satisfied_by = None
|
400 |
+
if use_user_site:
|
401 |
+
if existing_dist.in_usersite:
|
402 |
+
self.should_reinstall = True
|
403 |
+
elif running_under_virtualenv() and existing_dist.in_site_packages:
|
404 |
+
raise InstallationError(
|
405 |
+
f"Will not install to the user site because it will "
|
406 |
+
f"lack sys.path precedence to {existing_dist.raw_name} "
|
407 |
+
f"in {existing_dist.location}"
|
408 |
+
)
|
409 |
+
else:
|
410 |
+
self.should_reinstall = True
|
411 |
+
else:
|
412 |
+
if self.editable:
|
413 |
+
self.should_reinstall = True
|
414 |
+
# when installing editables, nothing pre-existing should ever
|
415 |
+
# satisfy
|
416 |
+
self.satisfied_by = None
|
417 |
+
else:
|
418 |
+
self.satisfied_by = existing_dist
|
419 |
+
|
420 |
+
# Things valid for wheels
|
421 |
+
@property
|
422 |
+
def is_wheel(self) -> bool:
|
423 |
+
if not self.link:
|
424 |
+
return False
|
425 |
+
return self.link.is_wheel
|
426 |
+
|
427 |
+
# Things valid for sdists
|
428 |
+
@property
|
429 |
+
def unpacked_source_directory(self) -> str:
|
430 |
+
return os.path.join(
|
431 |
+
self.source_dir, self.link and self.link.subdirectory_fragment or ""
|
432 |
+
)
|
433 |
+
|
434 |
+
@property
|
435 |
+
def setup_py_path(self) -> str:
|
436 |
+
assert self.source_dir, f"No source dir for {self}"
|
437 |
+
setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
|
438 |
+
|
439 |
+
return setup_py
|
440 |
+
|
441 |
+
@property
|
442 |
+
def setup_cfg_path(self) -> str:
|
443 |
+
assert self.source_dir, f"No source dir for {self}"
|
444 |
+
setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
|
445 |
+
|
446 |
+
return setup_cfg
|
447 |
+
|
448 |
+
@property
|
449 |
+
def pyproject_toml_path(self) -> str:
|
450 |
+
assert self.source_dir, f"No source dir for {self}"
|
451 |
+
return make_pyproject_path(self.unpacked_source_directory)
|
452 |
+
|
453 |
+
def load_pyproject_toml(self) -> None:
|
454 |
+
"""Load the pyproject.toml file.
|
455 |
+
|
456 |
+
After calling this routine, all of the attributes related to PEP 517
|
457 |
+
processing for this requirement have been set. In particular, the
|
458 |
+
use_pep517 attribute can be used to determine whether we should
|
459 |
+
follow the PEP 517 or legacy (setup.py) code path.
|
460 |
+
"""
|
461 |
+
pyproject_toml_data = load_pyproject_toml(
|
462 |
+
self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
|
463 |
+
)
|
464 |
+
|
465 |
+
if pyproject_toml_data is None:
|
466 |
+
self.use_pep517 = False
|
467 |
+
return
|
468 |
+
|
469 |
+
self.use_pep517 = True
|
470 |
+
requires, backend, check, backend_path = pyproject_toml_data
|
471 |
+
self.requirements_to_check = check
|
472 |
+
self.pyproject_requires = requires
|
473 |
+
self.pep517_backend = Pep517HookCaller(
|
474 |
+
self.unpacked_source_directory,
|
475 |
+
backend,
|
476 |
+
backend_path=backend_path,
|
477 |
+
)
|
478 |
+
|
479 |
+
def isolated_editable_sanity_check(self) -> None:
|
480 |
+
"""Check that an editable requirement if valid for use with PEP 517/518.
|
481 |
+
|
482 |
+
This verifies that an editable that has a pyproject.toml either supports PEP 660
|
483 |
+
or as a setup.py or a setup.cfg
|
484 |
+
"""
|
485 |
+
if (
|
486 |
+
self.editable
|
487 |
+
and self.use_pep517
|
488 |
+
and not self.supports_pyproject_editable()
|
489 |
+
and not os.path.isfile(self.setup_py_path)
|
490 |
+
and not os.path.isfile(self.setup_cfg_path)
|
491 |
+
):
|
492 |
+
raise InstallationError(
|
493 |
+
f"Project {self} has a 'pyproject.toml' and its build "
|
494 |
+
f"backend is missing the 'build_editable' hook. Since it does not "
|
495 |
+
f"have a 'setup.py' nor a 'setup.cfg', "
|
496 |
+
f"it cannot be installed in editable mode. "
|
497 |
+
f"Consider using a build backend that supports PEP 660."
|
498 |
+
)
|
499 |
+
|
500 |
+
def prepare_metadata(self) -> None:
|
501 |
+
"""Ensure that project metadata is available.
|
502 |
+
|
503 |
+
Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
|
504 |
+
Under legacy processing, call setup.py egg-info.
|
505 |
+
"""
|
506 |
+
assert self.source_dir
|
507 |
+
details = self.name or f"from {self.link}"
|
508 |
+
|
509 |
+
if self.use_pep517:
|
510 |
+
assert self.pep517_backend is not None
|
511 |
+
if (
|
512 |
+
self.editable
|
513 |
+
and self.permit_editable_wheels
|
514 |
+
and self.supports_pyproject_editable()
|
515 |
+
):
|
516 |
+
self.metadata_directory = generate_editable_metadata(
|
517 |
+
build_env=self.build_env,
|
518 |
+
backend=self.pep517_backend,
|
519 |
+
details=details,
|
520 |
+
)
|
521 |
+
else:
|
522 |
+
self.metadata_directory = generate_metadata(
|
523 |
+
build_env=self.build_env,
|
524 |
+
backend=self.pep517_backend,
|
525 |
+
details=details,
|
526 |
+
)
|
527 |
+
else:
|
528 |
+
self.metadata_directory = generate_metadata_legacy(
|
529 |
+
build_env=self.build_env,
|
530 |
+
setup_py_path=self.setup_py_path,
|
531 |
+
source_dir=self.unpacked_source_directory,
|
532 |
+
isolated=self.isolated,
|
533 |
+
details=details,
|
534 |
+
)
|
535 |
+
|
536 |
+
# Act on the newly generated metadata, based on the name and version.
|
537 |
+
if not self.name:
|
538 |
+
self._set_requirement()
|
539 |
+
else:
|
540 |
+
self.warn_on_mismatching_name()
|
541 |
+
|
542 |
+
self.assert_source_matches_version()
|
543 |
+
|
544 |
+
@property
|
545 |
+
def metadata(self) -> Any:
|
546 |
+
if not hasattr(self, "_metadata"):
|
547 |
+
self._metadata = self.get_dist().metadata
|
548 |
+
|
549 |
+
return self._metadata
|
550 |
+
|
551 |
+
def get_dist(self) -> BaseDistribution:
|
552 |
+
return get_directory_distribution(self.metadata_directory)
|
553 |
+
|
554 |
+
def assert_source_matches_version(self) -> None:
|
555 |
+
assert self.source_dir
|
556 |
+
version = self.metadata["version"]
|
557 |
+
if self.req.specifier and version not in self.req.specifier:
|
558 |
+
logger.warning(
|
559 |
+
"Requested %s, but installing version %s",
|
560 |
+
self,
|
561 |
+
version,
|
562 |
+
)
|
563 |
+
else:
|
564 |
+
logger.debug(
|
565 |
+
"Source in %s has version %s, which satisfies requirement %s",
|
566 |
+
display_path(self.source_dir),
|
567 |
+
version,
|
568 |
+
self,
|
569 |
+
)
|
570 |
+
|
571 |
+
# For both source distributions and editables
|
572 |
+
def ensure_has_source_dir(
|
573 |
+
self,
|
574 |
+
parent_dir: str,
|
575 |
+
autodelete: bool = False,
|
576 |
+
parallel_builds: bool = False,
|
577 |
+
) -> None:
|
578 |
+
"""Ensure that a source_dir is set.
|
579 |
+
|
580 |
+
This will create a temporary build dir if the name of the requirement
|
581 |
+
isn't known yet.
|
582 |
+
|
583 |
+
:param parent_dir: The ideal pip parent_dir for the source_dir.
|
584 |
+
Generally src_dir for editables and build_dir for sdists.
|
585 |
+
:return: self.source_dir
|
586 |
+
"""
|
587 |
+
if self.source_dir is None:
|
588 |
+
self.source_dir = self.ensure_build_location(
|
589 |
+
parent_dir,
|
590 |
+
autodelete=autodelete,
|
591 |
+
parallel_builds=parallel_builds,
|
592 |
+
)
|
593 |
+
|
594 |
+
# For editable installations
|
595 |
+
def update_editable(self) -> None:
|
596 |
+
if not self.link:
|
597 |
+
logger.debug(
|
598 |
+
"Cannot update repository at %s; repository location is unknown",
|
599 |
+
self.source_dir,
|
600 |
+
)
|
601 |
+
return
|
602 |
+
assert self.editable
|
603 |
+
assert self.source_dir
|
604 |
+
if self.link.scheme == "file":
|
605 |
+
# Static paths don't get updated
|
606 |
+
return
|
607 |
+
vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
|
608 |
+
# Editable requirements are validated in Requirement constructors.
|
609 |
+
# So here, if it's neither a path nor a valid VCS URL, it's a bug.
|
610 |
+
assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
|
611 |
+
hidden_url = hide_url(self.link.url)
|
612 |
+
vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
|
613 |
+
|
614 |
+
# Top-level Actions
|
615 |
+
def uninstall(
|
616 |
+
self, auto_confirm: bool = False, verbose: bool = False
|
617 |
+
) -> Optional[UninstallPathSet]:
|
618 |
+
"""
|
619 |
+
Uninstall the distribution currently satisfying this requirement.
|
620 |
+
|
621 |
+
Prompts before removing or modifying files unless
|
622 |
+
``auto_confirm`` is True.
|
623 |
+
|
624 |
+
Refuses to delete or modify files outside of ``sys.prefix`` -
|
625 |
+
thus uninstallation within a virtual environment can only
|
626 |
+
modify that virtual environment, even if the virtualenv is
|
627 |
+
linked to global site-packages.
|
628 |
+
|
629 |
+
"""
|
630 |
+
assert self.req
|
631 |
+
dist = get_default_environment().get_distribution(self.req.name)
|
632 |
+
if not dist:
|
633 |
+
logger.warning("Skipping %s as it is not installed.", self.name)
|
634 |
+
return None
|
635 |
+
logger.info("Found existing installation: %s", dist)
|
636 |
+
|
637 |
+
uninstalled_pathset = UninstallPathSet.from_dist(dist)
|
638 |
+
uninstalled_pathset.remove(auto_confirm, verbose)
|
639 |
+
return uninstalled_pathset
|
640 |
+
|
641 |
+
def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
|
642 |
+
def _clean_zip_name(name: str, prefix: str) -> str:
|
643 |
+
assert name.startswith(
|
644 |
+
prefix + os.path.sep
|
645 |
+
), f"name {name!r} doesn't start with prefix {prefix!r}"
|
646 |
+
name = name[len(prefix) + 1 :]
|
647 |
+
name = name.replace(os.path.sep, "/")
|
648 |
+
return name
|
649 |
+
|
650 |
+
path = os.path.join(parentdir, path)
|
651 |
+
name = _clean_zip_name(path, rootdir)
|
652 |
+
return self.name + "/" + name
|
653 |
+
|
654 |
+
def archive(self, build_dir: Optional[str]) -> None:
|
655 |
+
"""Saves archive to provided build_dir.
|
656 |
+
|
657 |
+
Used for saving downloaded VCS requirements as part of `pip download`.
|
658 |
+
"""
|
659 |
+
assert self.source_dir
|
660 |
+
if build_dir is None:
|
661 |
+
return
|
662 |
+
|
663 |
+
create_archive = True
|
664 |
+
archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
|
665 |
+
archive_path = os.path.join(build_dir, archive_name)
|
666 |
+
|
667 |
+
if os.path.exists(archive_path):
|
668 |
+
response = ask_path_exists(
|
669 |
+
"The file {} exists. (i)gnore, (w)ipe, "
|
670 |
+
"(b)ackup, (a)bort ".format(display_path(archive_path)),
|
671 |
+
("i", "w", "b", "a"),
|
672 |
+
)
|
673 |
+
if response == "i":
|
674 |
+
create_archive = False
|
675 |
+
elif response == "w":
|
676 |
+
logger.warning("Deleting %s", display_path(archive_path))
|
677 |
+
os.remove(archive_path)
|
678 |
+
elif response == "b":
|
679 |
+
dest_file = backup_dir(archive_path)
|
680 |
+
logger.warning(
|
681 |
+
"Backing up %s to %s",
|
682 |
+
display_path(archive_path),
|
683 |
+
display_path(dest_file),
|
684 |
+
)
|
685 |
+
shutil.move(archive_path, dest_file)
|
686 |
+
elif response == "a":
|
687 |
+
sys.exit(-1)
|
688 |
+
|
689 |
+
if not create_archive:
|
690 |
+
return
|
691 |
+
|
692 |
+
zip_output = zipfile.ZipFile(
|
693 |
+
archive_path,
|
694 |
+
"w",
|
695 |
+
zipfile.ZIP_DEFLATED,
|
696 |
+
allowZip64=True,
|
697 |
+
)
|
698 |
+
with zip_output:
|
699 |
+
dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
|
700 |
+
for dirpath, dirnames, filenames in os.walk(dir):
|
701 |
+
for dirname in dirnames:
|
702 |
+
dir_arcname = self._get_archive_name(
|
703 |
+
dirname,
|
704 |
+
parentdir=dirpath,
|
705 |
+
rootdir=dir,
|
706 |
+
)
|
707 |
+
zipdir = zipfile.ZipInfo(dir_arcname + "/")
|
708 |
+
zipdir.external_attr = 0x1ED << 16 # 0o755
|
709 |
+
zip_output.writestr(zipdir, "")
|
710 |
+
for filename in filenames:
|
711 |
+
file_arcname = self._get_archive_name(
|
712 |
+
filename,
|
713 |
+
parentdir=dirpath,
|
714 |
+
rootdir=dir,
|
715 |
+
)
|
716 |
+
filename = os.path.join(dirpath, filename)
|
717 |
+
zip_output.write(filename, file_arcname)
|
718 |
+
|
719 |
+
logger.info("Saved %s", display_path(archive_path))
|
720 |
+
|
721 |
+
def install(
|
722 |
+
self,
|
723 |
+
install_options: List[str],
|
724 |
+
global_options: Optional[Sequence[str]] = None,
|
725 |
+
root: Optional[str] = None,
|
726 |
+
home: Optional[str] = None,
|
727 |
+
prefix: Optional[str] = None,
|
728 |
+
warn_script_location: bool = True,
|
729 |
+
use_user_site: bool = False,
|
730 |
+
pycompile: bool = True,
|
731 |
+
) -> None:
|
732 |
+
scheme = get_scheme(
|
733 |
+
self.name,
|
734 |
+
user=use_user_site,
|
735 |
+
home=home,
|
736 |
+
root=root,
|
737 |
+
isolated=self.isolated,
|
738 |
+
prefix=prefix,
|
739 |
+
)
|
740 |
+
|
741 |
+
global_options = global_options if global_options is not None else []
|
742 |
+
if self.editable and not self.is_wheel:
|
743 |
+
install_editable_legacy(
|
744 |
+
install_options,
|
745 |
+
global_options,
|
746 |
+
prefix=prefix,
|
747 |
+
home=home,
|
748 |
+
use_user_site=use_user_site,
|
749 |
+
name=self.name,
|
750 |
+
setup_py_path=self.setup_py_path,
|
751 |
+
isolated=self.isolated,
|
752 |
+
build_env=self.build_env,
|
753 |
+
unpacked_source_directory=self.unpacked_source_directory,
|
754 |
+
)
|
755 |
+
self.install_succeeded = True
|
756 |
+
return
|
757 |
+
|
758 |
+
if self.is_wheel:
|
759 |
+
assert self.local_file_path
|
760 |
+
direct_url = None
|
761 |
+
if self.editable:
|
762 |
+
direct_url = direct_url_for_editable(self.unpacked_source_directory)
|
763 |
+
elif self.original_link:
|
764 |
+
direct_url = direct_url_from_link(
|
765 |
+
self.original_link,
|
766 |
+
self.source_dir,
|
767 |
+
self.original_link_is_in_wheel_cache,
|
768 |
+
)
|
769 |
+
install_wheel(
|
770 |
+
self.name,
|
771 |
+
self.local_file_path,
|
772 |
+
scheme=scheme,
|
773 |
+
req_description=str(self.req),
|
774 |
+
pycompile=pycompile,
|
775 |
+
warn_script_location=warn_script_location,
|
776 |
+
direct_url=direct_url,
|
777 |
+
requested=self.user_supplied,
|
778 |
+
)
|
779 |
+
self.install_succeeded = True
|
780 |
+
return
|
781 |
+
|
782 |
+
# TODO: Why don't we do this for editable installs?
|
783 |
+
|
784 |
+
# Extend the list of global and install options passed on to
|
785 |
+
# the setup.py call with the ones from the requirements file.
|
786 |
+
# Options specified in requirements file override those
|
787 |
+
# specified on the command line, since the last option given
|
788 |
+
# to setup.py is the one that is used.
|
789 |
+
global_options = list(global_options) + self.global_options
|
790 |
+
install_options = list(install_options) + self.install_options
|
791 |
+
|
792 |
+
try:
|
793 |
+
success = install_legacy(
|
794 |
+
install_options=install_options,
|
795 |
+
global_options=global_options,
|
796 |
+
root=root,
|
797 |
+
home=home,
|
798 |
+
prefix=prefix,
|
799 |
+
use_user_site=use_user_site,
|
800 |
+
pycompile=pycompile,
|
801 |
+
scheme=scheme,
|
802 |
+
setup_py_path=self.setup_py_path,
|
803 |
+
isolated=self.isolated,
|
804 |
+
req_name=self.name,
|
805 |
+
build_env=self.build_env,
|
806 |
+
unpacked_source_directory=self.unpacked_source_directory,
|
807 |
+
req_description=str(self.req),
|
808 |
+
)
|
809 |
+
except LegacyInstallFailure as exc:
|
810 |
+
self.install_succeeded = False
|
811 |
+
raise exc
|
812 |
+
except Exception:
|
813 |
+
self.install_succeeded = True
|
814 |
+
raise
|
815 |
+
|
816 |
+
self.install_succeeded = success
|
817 |
+
|
818 |
+
if success and self.legacy_install_reason == 8368:
|
819 |
+
deprecated(
|
820 |
+
reason=(
|
821 |
+
"{} was installed using the legacy 'setup.py install' "
|
822 |
+
"method, because a wheel could not be built for it.".format(
|
823 |
+
self.name
|
824 |
+
)
|
825 |
+
),
|
826 |
+
replacement="to fix the wheel build issue reported above",
|
827 |
+
gone_in=None,
|
828 |
+
issue=8368,
|
829 |
+
)
|
830 |
+
|
831 |
+
|
832 |
+
def check_invalid_constraint_type(req: InstallRequirement) -> str:
|
833 |
+
|
834 |
+
# Check for unsupported forms
|
835 |
+
problem = ""
|
836 |
+
if not req.name:
|
837 |
+
problem = "Unnamed requirements are not allowed as constraints"
|
838 |
+
elif req.editable:
|
839 |
+
problem = "Editable requirements are not allowed as constraints"
|
840 |
+
elif req.extras:
|
841 |
+
problem = "Constraints cannot have extras"
|
842 |
+
|
843 |
+
if problem:
|
844 |
+
deprecated(
|
845 |
+
reason=(
|
846 |
+
"Constraints are only allowed to take the form of a package "
|
847 |
+
"name and a version specifier. Other forms were originally "
|
848 |
+
"permitted as an accident of the implementation, but were "
|
849 |
+
"undocumented. The new implementation of the resolver no "
|
850 |
+
"longer supports these forms."
|
851 |
+
),
|
852 |
+
replacement="replacing the constraint with a requirement",
|
853 |
+
# No plan yet for when the new resolver becomes default
|
854 |
+
gone_in=None,
|
855 |
+
issue=8210,
|
856 |
+
)
|
857 |
+
|
858 |
+
return problem
|
scripts/myenv/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py
ADDED
@@ -0,0 +1,633 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import functools
|
2 |
+
import os
|
3 |
+
import sys
|
4 |
+
import sysconfig
|
5 |
+
from importlib.util import cache_from_source
|
6 |
+
from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple
|
7 |
+
|
8 |
+
from pip._internal.exceptions import UninstallationError
|
9 |
+
from pip._internal.locations import get_bin_prefix, get_bin_user
|
10 |
+
from pip._internal.metadata import BaseDistribution
|
11 |
+
from pip._internal.utils.compat import WINDOWS
|
12 |
+
from pip._internal.utils.egg_link import egg_link_path_from_location
|
13 |
+
from pip._internal.utils.logging import getLogger, indent_log
|
14 |
+
from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
|
15 |
+
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
|
16 |
+
|
17 |
+
logger = getLogger(__name__)
|
18 |
+
|
19 |
+
|
20 |
+
def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]:
|
21 |
+
"""Create the fully qualified name of the files created by
|
22 |
+
{console,gui}_scripts for the given ``dist``.
|
23 |
+
Returns the list of file names
|
24 |
+
"""
|
25 |
+
exe_name = os.path.join(bin_dir, script_name)
|
26 |
+
yield exe_name
|
27 |
+
if not WINDOWS:
|
28 |
+
return
|
29 |
+
yield f"{exe_name}.exe"
|
30 |
+
yield f"{exe_name}.exe.manifest"
|
31 |
+
if is_gui:
|
32 |
+
yield f"{exe_name}-script.pyw"
|
33 |
+
else:
|
34 |
+
yield f"{exe_name}-script.py"
|
35 |
+
|
36 |
+
|
37 |
+
def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]:
|
38 |
+
@functools.wraps(fn)
|
39 |
+
def unique(*args: Any, **kw: Any) -> Iterator[Any]:
|
40 |
+
seen: Set[Any] = set()
|
41 |
+
for item in fn(*args, **kw):
|
42 |
+
if item not in seen:
|
43 |
+
seen.add(item)
|
44 |
+
yield item
|
45 |
+
|
46 |
+
return unique
|
47 |
+
|
48 |
+
|
49 |
+
@_unique
|
50 |
+
def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]:
|
51 |
+
"""
|
52 |
+
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
|
53 |
+
|
54 |
+
Yield paths to all the files in RECORD. For each .py file in RECORD, add
|
55 |
+
the .pyc and .pyo in the same directory.
|
56 |
+
|
57 |
+
UninstallPathSet.add() takes care of the __pycache__ .py[co].
|
58 |
+
|
59 |
+
If RECORD is not found, raises UninstallationError,
|
60 |
+
with possible information from the INSTALLER file.
|
61 |
+
|
62 |
+
https://packaging.python.org/specifications/recording-installed-packages/
|
63 |
+
"""
|
64 |
+
location = dist.location
|
65 |
+
assert location is not None, "not installed"
|
66 |
+
|
67 |
+
entries = dist.iter_declared_entries()
|
68 |
+
if entries is None:
|
69 |
+
msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
|
70 |
+
installer = dist.installer
|
71 |
+
if not installer or installer == "pip":
|
72 |
+
dep = "{}=={}".format(dist.raw_name, dist.version)
|
73 |
+
msg += (
|
74 |
+
" You might be able to recover from this via: "
|
75 |
+
"'pip install --force-reinstall --no-deps {}'.".format(dep)
|
76 |
+
)
|
77 |
+
else:
|
78 |
+
msg += " Hint: The package was installed by {}.".format(installer)
|
79 |
+
raise UninstallationError(msg)
|
80 |
+
|
81 |
+
for entry in entries:
|
82 |
+
path = os.path.join(location, entry)
|
83 |
+
yield path
|
84 |
+
if path.endswith(".py"):
|
85 |
+
dn, fn = os.path.split(path)
|
86 |
+
base = fn[:-3]
|
87 |
+
path = os.path.join(dn, base + ".pyc")
|
88 |
+
yield path
|
89 |
+
path = os.path.join(dn, base + ".pyo")
|
90 |
+
yield path
|
91 |
+
|
92 |
+
|
93 |
+
def compact(paths: Iterable[str]) -> Set[str]:
|
94 |
+
"""Compact a path set to contain the minimal number of paths
|
95 |
+
necessary to contain all paths in the set. If /a/path/ and
|
96 |
+
/a/path/to/a/file.txt are both in the set, leave only the
|
97 |
+
shorter path."""
|
98 |
+
|
99 |
+
sep = os.path.sep
|
100 |
+
short_paths: Set[str] = set()
|
101 |
+
for path in sorted(paths, key=len):
|
102 |
+
should_skip = any(
|
103 |
+
path.startswith(shortpath.rstrip("*"))
|
104 |
+
and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
|
105 |
+
for shortpath in short_paths
|
106 |
+
)
|
107 |
+
if not should_skip:
|
108 |
+
short_paths.add(path)
|
109 |
+
return short_paths
|
110 |
+
|
111 |
+
|
112 |
+
def compress_for_rename(paths: Iterable[str]) -> Set[str]:
|
113 |
+
"""Returns a set containing the paths that need to be renamed.
|
114 |
+
|
115 |
+
This set may include directories when the original sequence of paths
|
116 |
+
included every file on disk.
|
117 |
+
"""
|
118 |
+
case_map = {os.path.normcase(p): p for p in paths}
|
119 |
+
remaining = set(case_map)
|
120 |
+
unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
|
121 |
+
wildcards: Set[str] = set()
|
122 |
+
|
123 |
+
def norm_join(*a: str) -> str:
|
124 |
+
return os.path.normcase(os.path.join(*a))
|
125 |
+
|
126 |
+
for root in unchecked:
|
127 |
+
if any(os.path.normcase(root).startswith(w) for w in wildcards):
|
128 |
+
# This directory has already been handled.
|
129 |
+
continue
|
130 |
+
|
131 |
+
all_files: Set[str] = set()
|
132 |
+
all_subdirs: Set[str] = set()
|
133 |
+
for dirname, subdirs, files in os.walk(root):
|
134 |
+
all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
|
135 |
+
all_files.update(norm_join(root, dirname, f) for f in files)
|
136 |
+
# If all the files we found are in our remaining set of files to
|
137 |
+
# remove, then remove them from the latter set and add a wildcard
|
138 |
+
# for the directory.
|
139 |
+
if not (all_files - remaining):
|
140 |
+
remaining.difference_update(all_files)
|
141 |
+
wildcards.add(root + os.sep)
|
142 |
+
|
143 |
+
return set(map(case_map.__getitem__, remaining)) | wildcards
|
144 |
+
|
145 |
+
|
146 |
+
def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
|
147 |
+
"""Returns a tuple of 2 sets of which paths to display to user
|
148 |
+
|
149 |
+
The first set contains paths that would be deleted. Files of a package
|
150 |
+
are not added and the top-level directory of the package has a '*' added
|
151 |
+
at the end - to signify that all it's contents are removed.
|
152 |
+
|
153 |
+
The second set contains files that would have been skipped in the above
|
154 |
+
folders.
|
155 |
+
"""
|
156 |
+
|
157 |
+
will_remove = set(paths)
|
158 |
+
will_skip = set()
|
159 |
+
|
160 |
+
# Determine folders and files
|
161 |
+
folders = set()
|
162 |
+
files = set()
|
163 |
+
for path in will_remove:
|
164 |
+
if path.endswith(".pyc"):
|
165 |
+
continue
|
166 |
+
if path.endswith("__init__.py") or ".dist-info" in path:
|
167 |
+
folders.add(os.path.dirname(path))
|
168 |
+
files.add(path)
|
169 |
+
|
170 |
+
# probably this one https://github.com/python/mypy/issues/390
|
171 |
+
_normcased_files = set(map(os.path.normcase, files)) # type: ignore
|
172 |
+
|
173 |
+
folders = compact(folders)
|
174 |
+
|
175 |
+
# This walks the tree using os.walk to not miss extra folders
|
176 |
+
# that might get added.
|
177 |
+
for folder in folders:
|
178 |
+
for dirpath, _, dirfiles in os.walk(folder):
|
179 |
+
for fname in dirfiles:
|
180 |
+
if fname.endswith(".pyc"):
|
181 |
+
continue
|
182 |
+
|
183 |
+
file_ = os.path.join(dirpath, fname)
|
184 |
+
if (
|
185 |
+
os.path.isfile(file_)
|
186 |
+
and os.path.normcase(file_) not in _normcased_files
|
187 |
+
):
|
188 |
+
# We are skipping this file. Add it to the set.
|
189 |
+
will_skip.add(file_)
|
190 |
+
|
191 |
+
will_remove = files | {os.path.join(folder, "*") for folder in folders}
|
192 |
+
|
193 |
+
return will_remove, will_skip
|
194 |
+
|
195 |
+
|
196 |
+
class StashedUninstallPathSet:
|
197 |
+
"""A set of file rename operations to stash files while
|
198 |
+
tentatively uninstalling them."""
|
199 |
+
|
200 |
+
def __init__(self) -> None:
|
201 |
+
# Mapping from source file root to [Adjacent]TempDirectory
|
202 |
+
# for files under that directory.
|
203 |
+
self._save_dirs: Dict[str, TempDirectory] = {}
|
204 |
+
# (old path, new path) tuples for each move that may need
|
205 |
+
# to be undone.
|
206 |
+
self._moves: List[Tuple[str, str]] = []
|
207 |
+
|
208 |
+
def _get_directory_stash(self, path: str) -> str:
|
209 |
+
"""Stashes a directory.
|
210 |
+
|
211 |
+
Directories are stashed adjacent to their original location if
|
212 |
+
possible, or else moved/copied into the user's temp dir."""
|
213 |
+
|
214 |
+
try:
|
215 |
+
save_dir: TempDirectory = AdjacentTempDirectory(path)
|
216 |
+
except OSError:
|
217 |
+
save_dir = TempDirectory(kind="uninstall")
|
218 |
+
self._save_dirs[os.path.normcase(path)] = save_dir
|
219 |
+
|
220 |
+
return save_dir.path
|
221 |
+
|
222 |
+
def _get_file_stash(self, path: str) -> str:
|
223 |
+
"""Stashes a file.
|
224 |
+
|
225 |
+
If no root has been provided, one will be created for the directory
|
226 |
+
in the user's temp directory."""
|
227 |
+
path = os.path.normcase(path)
|
228 |
+
head, old_head = os.path.dirname(path), None
|
229 |
+
save_dir = None
|
230 |
+
|
231 |
+
while head != old_head:
|
232 |
+
try:
|
233 |
+
save_dir = self._save_dirs[head]
|
234 |
+
break
|
235 |
+
except KeyError:
|
236 |
+
pass
|
237 |
+
head, old_head = os.path.dirname(head), head
|
238 |
+
else:
|
239 |
+
# Did not find any suitable root
|
240 |
+
head = os.path.dirname(path)
|
241 |
+
save_dir = TempDirectory(kind="uninstall")
|
242 |
+
self._save_dirs[head] = save_dir
|
243 |
+
|
244 |
+
relpath = os.path.relpath(path, head)
|
245 |
+
if relpath and relpath != os.path.curdir:
|
246 |
+
return os.path.join(save_dir.path, relpath)
|
247 |
+
return save_dir.path
|
248 |
+
|
249 |
+
def stash(self, path: str) -> str:
|
250 |
+
"""Stashes the directory or file and returns its new location.
|
251 |
+
Handle symlinks as files to avoid modifying the symlink targets.
|
252 |
+
"""
|
253 |
+
path_is_dir = os.path.isdir(path) and not os.path.islink(path)
|
254 |
+
if path_is_dir:
|
255 |
+
new_path = self._get_directory_stash(path)
|
256 |
+
else:
|
257 |
+
new_path = self._get_file_stash(path)
|
258 |
+
|
259 |
+
self._moves.append((path, new_path))
|
260 |
+
if path_is_dir and os.path.isdir(new_path):
|
261 |
+
# If we're moving a directory, we need to
|
262 |
+
# remove the destination first or else it will be
|
263 |
+
# moved to inside the existing directory.
|
264 |
+
# We just created new_path ourselves, so it will
|
265 |
+
# be removable.
|
266 |
+
os.rmdir(new_path)
|
267 |
+
renames(path, new_path)
|
268 |
+
return new_path
|
269 |
+
|
270 |
+
def commit(self) -> None:
|
271 |
+
"""Commits the uninstall by removing stashed files."""
|
272 |
+
for _, save_dir in self._save_dirs.items():
|
273 |
+
save_dir.cleanup()
|
274 |
+
self._moves = []
|
275 |
+
self._save_dirs = {}
|
276 |
+
|
277 |
+
def rollback(self) -> None:
|
278 |
+
"""Undoes the uninstall by moving stashed files back."""
|
279 |
+
for p in self._moves:
|
280 |
+
logger.info("Moving to %s\n from %s", *p)
|
281 |
+
|
282 |
+
for new_path, path in self._moves:
|
283 |
+
try:
|
284 |
+
logger.debug("Replacing %s from %s", new_path, path)
|
285 |
+
if os.path.isfile(new_path) or os.path.islink(new_path):
|
286 |
+
os.unlink(new_path)
|
287 |
+
elif os.path.isdir(new_path):
|
288 |
+
rmtree(new_path)
|
289 |
+
renames(path, new_path)
|
290 |
+
except OSError as ex:
|
291 |
+
logger.error("Failed to restore %s", new_path)
|
292 |
+
logger.debug("Exception: %s", ex)
|
293 |
+
|
294 |
+
self.commit()
|
295 |
+
|
296 |
+
@property
|
297 |
+
def can_rollback(self) -> bool:
|
298 |
+
return bool(self._moves)
|
299 |
+
|
300 |
+
|
301 |
+
class UninstallPathSet:
|
302 |
+
"""A set of file paths to be removed in the uninstallation of a
|
303 |
+
requirement."""
|
304 |
+
|
305 |
+
def __init__(self, dist: BaseDistribution) -> None:
|
306 |
+
self._paths: Set[str] = set()
|
307 |
+
self._refuse: Set[str] = set()
|
308 |
+
self._pth: Dict[str, UninstallPthEntries] = {}
|
309 |
+
self._dist = dist
|
310 |
+
self._moved_paths = StashedUninstallPathSet()
|
311 |
+
|
312 |
+
def _permitted(self, path: str) -> bool:
|
313 |
+
"""
|
314 |
+
Return True if the given path is one we are permitted to
|
315 |
+
remove/modify, False otherwise.
|
316 |
+
|
317 |
+
"""
|
318 |
+
return is_local(path)
|
319 |
+
|
320 |
+
def add(self, path: str) -> None:
|
321 |
+
head, tail = os.path.split(path)
|
322 |
+
|
323 |
+
# we normalize the head to resolve parent directory symlinks, but not
|
324 |
+
# the tail, since we only want to uninstall symlinks, not their targets
|
325 |
+
path = os.path.join(normalize_path(head), os.path.normcase(tail))
|
326 |
+
|
327 |
+
if not os.path.exists(path):
|
328 |
+
return
|
329 |
+
if self._permitted(path):
|
330 |
+
self._paths.add(path)
|
331 |
+
else:
|
332 |
+
self._refuse.add(path)
|
333 |
+
|
334 |
+
# __pycache__ files can show up after 'installed-files.txt' is created,
|
335 |
+
# due to imports
|
336 |
+
if os.path.splitext(path)[1] == ".py":
|
337 |
+
self.add(cache_from_source(path))
|
338 |
+
|
339 |
+
def add_pth(self, pth_file: str, entry: str) -> None:
|
340 |
+
pth_file = normalize_path(pth_file)
|
341 |
+
if self._permitted(pth_file):
|
342 |
+
if pth_file not in self._pth:
|
343 |
+
self._pth[pth_file] = UninstallPthEntries(pth_file)
|
344 |
+
self._pth[pth_file].add(entry)
|
345 |
+
else:
|
346 |
+
self._refuse.add(pth_file)
|
347 |
+
|
348 |
+
def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
|
349 |
+
"""Remove paths in ``self._paths`` with confirmation (unless
|
350 |
+
``auto_confirm`` is True)."""
|
351 |
+
|
352 |
+
if not self._paths:
|
353 |
+
logger.info(
|
354 |
+
"Can't uninstall '%s'. No files were found to uninstall.",
|
355 |
+
self._dist.raw_name,
|
356 |
+
)
|
357 |
+
return
|
358 |
+
|
359 |
+
dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
|
360 |
+
logger.info("Uninstalling %s:", dist_name_version)
|
361 |
+
|
362 |
+
with indent_log():
|
363 |
+
if auto_confirm or self._allowed_to_proceed(verbose):
|
364 |
+
moved = self._moved_paths
|
365 |
+
|
366 |
+
for_rename = compress_for_rename(self._paths)
|
367 |
+
|
368 |
+
for path in sorted(compact(for_rename)):
|
369 |
+
moved.stash(path)
|
370 |
+
logger.verbose("Removing file or directory %s", path)
|
371 |
+
|
372 |
+
for pth in self._pth.values():
|
373 |
+
pth.remove()
|
374 |
+
|
375 |
+
logger.info("Successfully uninstalled %s", dist_name_version)
|
376 |
+
|
377 |
+
def _allowed_to_proceed(self, verbose: bool) -> bool:
|
378 |
+
"""Display which files would be deleted and prompt for confirmation"""
|
379 |
+
|
380 |
+
def _display(msg: str, paths: Iterable[str]) -> None:
|
381 |
+
if not paths:
|
382 |
+
return
|
383 |
+
|
384 |
+
logger.info(msg)
|
385 |
+
with indent_log():
|
386 |
+
for path in sorted(compact(paths)):
|
387 |
+
logger.info(path)
|
388 |
+
|
389 |
+
if not verbose:
|
390 |
+
will_remove, will_skip = compress_for_output_listing(self._paths)
|
391 |
+
else:
|
392 |
+
# In verbose mode, display all the files that are going to be
|
393 |
+
# deleted.
|
394 |
+
will_remove = set(self._paths)
|
395 |
+
will_skip = set()
|
396 |
+
|
397 |
+
_display("Would remove:", will_remove)
|
398 |
+
_display("Would not remove (might be manually added):", will_skip)
|
399 |
+
_display("Would not remove (outside of prefix):", self._refuse)
|
400 |
+
if verbose:
|
401 |
+
_display("Will actually move:", compress_for_rename(self._paths))
|
402 |
+
|
403 |
+
return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
|
404 |
+
|
405 |
+
def rollback(self) -> None:
|
406 |
+
"""Rollback the changes previously made by remove()."""
|
407 |
+
if not self._moved_paths.can_rollback:
|
408 |
+
logger.error(
|
409 |
+
"Can't roll back %s; was not uninstalled",
|
410 |
+
self._dist.raw_name,
|
411 |
+
)
|
412 |
+
return
|
413 |
+
logger.info("Rolling back uninstall of %s", self._dist.raw_name)
|
414 |
+
self._moved_paths.rollback()
|
415 |
+
for pth in self._pth.values():
|
416 |
+
pth.rollback()
|
417 |
+
|
418 |
+
def commit(self) -> None:
|
419 |
+
"""Remove temporary save dir: rollback will no longer be possible."""
|
420 |
+
self._moved_paths.commit()
|
421 |
+
|
422 |
+
@classmethod
|
423 |
+
def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
|
424 |
+
dist_location = dist.location
|
425 |
+
info_location = dist.info_location
|
426 |
+
if dist_location is None:
|
427 |
+
logger.info(
|
428 |
+
"Not uninstalling %s since it is not installed",
|
429 |
+
dist.canonical_name,
|
430 |
+
)
|
431 |
+
return cls(dist)
|
432 |
+
|
433 |
+
normalized_dist_location = normalize_path(dist_location)
|
434 |
+
if not dist.local:
|
435 |
+
logger.info(
|
436 |
+
"Not uninstalling %s at %s, outside environment %s",
|
437 |
+
dist.canonical_name,
|
438 |
+
normalized_dist_location,
|
439 |
+
sys.prefix,
|
440 |
+
)
|
441 |
+
return cls(dist)
|
442 |
+
|
443 |
+
if normalized_dist_location in {
|
444 |
+
p
|
445 |
+
for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
|
446 |
+
if p
|
447 |
+
}:
|
448 |
+
logger.info(
|
449 |
+
"Not uninstalling %s at %s, as it is in the standard library.",
|
450 |
+
dist.canonical_name,
|
451 |
+
normalized_dist_location,
|
452 |
+
)
|
453 |
+
return cls(dist)
|
454 |
+
|
455 |
+
paths_to_remove = cls(dist)
|
456 |
+
develop_egg_link = egg_link_path_from_location(dist.raw_name)
|
457 |
+
|
458 |
+
# Distribution is installed with metadata in a "flat" .egg-info
|
459 |
+
# directory. This means it is not a modern .dist-info installation, an
|
460 |
+
# egg, or legacy editable.
|
461 |
+
setuptools_flat_installation = (
|
462 |
+
dist.installed_with_setuptools_egg_info
|
463 |
+
and info_location is not None
|
464 |
+
and os.path.exists(info_location)
|
465 |
+
# If dist is editable and the location points to a ``.egg-info``,
|
466 |
+
# we are in fact in the legacy editable case.
|
467 |
+
and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
|
468 |
+
)
|
469 |
+
|
470 |
+
# Uninstall cases order do matter as in the case of 2 installs of the
|
471 |
+
# same package, pip needs to uninstall the currently detected version
|
472 |
+
if setuptools_flat_installation:
|
473 |
+
if info_location is not None:
|
474 |
+
paths_to_remove.add(info_location)
|
475 |
+
installed_files = dist.iter_declared_entries()
|
476 |
+
if installed_files is not None:
|
477 |
+
for installed_file in installed_files:
|
478 |
+
paths_to_remove.add(os.path.join(dist_location, installed_file))
|
479 |
+
# FIXME: need a test for this elif block
|
480 |
+
# occurs with --single-version-externally-managed/--record outside
|
481 |
+
# of pip
|
482 |
+
elif dist.is_file("top_level.txt"):
|
483 |
+
try:
|
484 |
+
namespace_packages = dist.read_text("namespace_packages.txt")
|
485 |
+
except FileNotFoundError:
|
486 |
+
namespaces = []
|
487 |
+
else:
|
488 |
+
namespaces = namespace_packages.splitlines(keepends=False)
|
489 |
+
for top_level_pkg in [
|
490 |
+
p
|
491 |
+
for p in dist.read_text("top_level.txt").splitlines()
|
492 |
+
if p and p not in namespaces
|
493 |
+
]:
|
494 |
+
path = os.path.join(dist_location, top_level_pkg)
|
495 |
+
paths_to_remove.add(path)
|
496 |
+
paths_to_remove.add(f"{path}.py")
|
497 |
+
paths_to_remove.add(f"{path}.pyc")
|
498 |
+
paths_to_remove.add(f"{path}.pyo")
|
499 |
+
|
500 |
+
elif dist.installed_by_distutils:
|
501 |
+
raise UninstallationError(
|
502 |
+
"Cannot uninstall {!r}. It is a distutils installed project "
|
503 |
+
"and thus we cannot accurately determine which files belong "
|
504 |
+
"to it which would lead to only a partial uninstall.".format(
|
505 |
+
dist.raw_name,
|
506 |
+
)
|
507 |
+
)
|
508 |
+
|
509 |
+
elif dist.installed_as_egg:
|
510 |
+
# package installed by easy_install
|
511 |
+
# We cannot match on dist.egg_name because it can slightly vary
|
512 |
+
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
|
513 |
+
paths_to_remove.add(dist_location)
|
514 |
+
easy_install_egg = os.path.split(dist_location)[1]
|
515 |
+
easy_install_pth = os.path.join(
|
516 |
+
os.path.dirname(dist_location),
|
517 |
+
"easy-install.pth",
|
518 |
+
)
|
519 |
+
paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
|
520 |
+
|
521 |
+
elif dist.installed_with_dist_info:
|
522 |
+
for path in uninstallation_paths(dist):
|
523 |
+
paths_to_remove.add(path)
|
524 |
+
|
525 |
+
elif develop_egg_link:
|
526 |
+
# PEP 660 modern editable is handled in the ``.dist-info`` case
|
527 |
+
# above, so this only covers the setuptools-style editable.
|
528 |
+
with open(develop_egg_link) as fh:
|
529 |
+
link_pointer = os.path.normcase(fh.readline().strip())
|
530 |
+
assert link_pointer == dist_location, (
|
531 |
+
f"Egg-link {link_pointer} does not match installed location of "
|
532 |
+
f"{dist.raw_name} (at {dist_location})"
|
533 |
+
)
|
534 |
+
paths_to_remove.add(develop_egg_link)
|
535 |
+
easy_install_pth = os.path.join(
|
536 |
+
os.path.dirname(develop_egg_link), "easy-install.pth"
|
537 |
+
)
|
538 |
+
paths_to_remove.add_pth(easy_install_pth, dist_location)
|
539 |
+
|
540 |
+
else:
|
541 |
+
logger.debug(
|
542 |
+
"Not sure how to uninstall: %s - Check: %s",
|
543 |
+
dist,
|
544 |
+
dist_location,
|
545 |
+
)
|
546 |
+
|
547 |
+
if dist.in_usersite:
|
548 |
+
bin_dir = get_bin_user()
|
549 |
+
else:
|
550 |
+
bin_dir = get_bin_prefix()
|
551 |
+
|
552 |
+
# find distutils scripts= scripts
|
553 |
+
try:
|
554 |
+
for script in dist.iterdir("scripts"):
|
555 |
+
paths_to_remove.add(os.path.join(bin_dir, script.name))
|
556 |
+
if WINDOWS:
|
557 |
+
paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat"))
|
558 |
+
except (FileNotFoundError, NotADirectoryError):
|
559 |
+
pass
|
560 |
+
|
561 |
+
# find console_scripts and gui_scripts
|
562 |
+
def iter_scripts_to_remove(
|
563 |
+
dist: BaseDistribution,
|
564 |
+
bin_dir: str,
|
565 |
+
) -> Iterator[str]:
|
566 |
+
for entry_point in dist.iter_entry_points():
|
567 |
+
if entry_point.group == "console_scripts":
|
568 |
+
yield from _script_names(bin_dir, entry_point.name, False)
|
569 |
+
elif entry_point.group == "gui_scripts":
|
570 |
+
yield from _script_names(bin_dir, entry_point.name, True)
|
571 |
+
|
572 |
+
for s in iter_scripts_to_remove(dist, bin_dir):
|
573 |
+
paths_to_remove.add(s)
|
574 |
+
|
575 |
+
return paths_to_remove
|
576 |
+
|
577 |
+
|
578 |
+
class UninstallPthEntries:
|
579 |
+
def __init__(self, pth_file: str) -> None:
|
580 |
+
self.file = pth_file
|
581 |
+
self.entries: Set[str] = set()
|
582 |
+
self._saved_lines: Optional[List[bytes]] = None
|
583 |
+
|
584 |
+
def add(self, entry: str) -> None:
|
585 |
+
entry = os.path.normcase(entry)
|
586 |
+
# On Windows, os.path.normcase converts the entry to use
|
587 |
+
# backslashes. This is correct for entries that describe absolute
|
588 |
+
# paths outside of site-packages, but all the others use forward
|
589 |
+
# slashes.
|
590 |
+
# os.path.splitdrive is used instead of os.path.isabs because isabs
|
591 |
+
# treats non-absolute paths with drive letter markings like c:foo\bar
|
592 |
+
# as absolute paths. It also does not recognize UNC paths if they don't
|
593 |
+
# have more than "\\sever\share". Valid examples: "\\server\share\" or
|
594 |
+
# "\\server\share\folder".
|
595 |
+
if WINDOWS and not os.path.splitdrive(entry)[0]:
|
596 |
+
entry = entry.replace("\\", "/")
|
597 |
+
self.entries.add(entry)
|
598 |
+
|
599 |
+
def remove(self) -> None:
|
600 |
+
logger.verbose("Removing pth entries from %s:", self.file)
|
601 |
+
|
602 |
+
# If the file doesn't exist, log a warning and return
|
603 |
+
if not os.path.isfile(self.file):
|
604 |
+
logger.warning("Cannot remove entries from nonexistent file %s", self.file)
|
605 |
+
return
|
606 |
+
with open(self.file, "rb") as fh:
|
607 |
+
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
|
608 |
+
lines = fh.readlines()
|
609 |
+
self._saved_lines = lines
|
610 |
+
if any(b"\r\n" in line for line in lines):
|
611 |
+
endline = "\r\n"
|
612 |
+
else:
|
613 |
+
endline = "\n"
|
614 |
+
# handle missing trailing newline
|
615 |
+
if lines and not lines[-1].endswith(endline.encode("utf-8")):
|
616 |
+
lines[-1] = lines[-1] + endline.encode("utf-8")
|
617 |
+
for entry in self.entries:
|
618 |
+
try:
|
619 |
+
logger.verbose("Removing entry: %s", entry)
|
620 |
+
lines.remove((entry + endline).encode("utf-8"))
|
621 |
+
except ValueError:
|
622 |
+
pass
|
623 |
+
with open(self.file, "wb") as fh:
|
624 |
+
fh.writelines(lines)
|
625 |
+
|
626 |
+
def rollback(self) -> bool:
|
627 |
+
if self._saved_lines is None:
|
628 |
+
logger.error("Cannot roll back changes to %s, none were made", self.file)
|
629 |
+
return False
|
630 |
+
logger.debug("Rolling %s back to previous state", self.file)
|
631 |
+
with open(self.file, "wb") as fh:
|
632 |
+
fh.writelines(self._saved_lines)
|
633 |
+
return True
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__init__.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
pip._vendor is for vendoring dependencies of pip to prevent needing pip to
|
3 |
+
depend on something external.
|
4 |
+
|
5 |
+
Files inside of pip._vendor should be considered immutable and should only be
|
6 |
+
updated to versions from upstream.
|
7 |
+
"""
|
8 |
+
from __future__ import absolute_import
|
9 |
+
|
10 |
+
import glob
|
11 |
+
import os.path
|
12 |
+
import sys
|
13 |
+
|
14 |
+
# Downstream redistributors which have debundled our dependencies should also
|
15 |
+
# patch this value to be true. This will trigger the additional patching
|
16 |
+
# to cause things like "six" to be available as pip.
|
17 |
+
DEBUNDLED = False
|
18 |
+
|
19 |
+
# By default, look in this directory for a bunch of .whl files which we will
|
20 |
+
# add to the beginning of sys.path before attempting to import anything. This
|
21 |
+
# is done to support downstream re-distributors like Debian and Fedora who
|
22 |
+
# wish to create their own Wheels for our dependencies to aid in debundling.
|
23 |
+
WHEEL_DIR = os.path.abspath(os.path.dirname(__file__))
|
24 |
+
|
25 |
+
|
26 |
+
# Define a small helper function to alias our vendored modules to the real ones
|
27 |
+
# if the vendored ones do not exist. This idea of this was taken from
|
28 |
+
# https://github.com/kennethreitz/requests/pull/2567.
|
29 |
+
def vendored(modulename):
|
30 |
+
vendored_name = "{0}.{1}".format(__name__, modulename)
|
31 |
+
|
32 |
+
try:
|
33 |
+
__import__(modulename, globals(), locals(), level=0)
|
34 |
+
except ImportError:
|
35 |
+
# We can just silently allow import failures to pass here. If we
|
36 |
+
# got to this point it means that ``import pip._vendor.whatever``
|
37 |
+
# failed and so did ``import whatever``. Since we're importing this
|
38 |
+
# upfront in an attempt to alias imports, not erroring here will
|
39 |
+
# just mean we get a regular import error whenever pip *actually*
|
40 |
+
# tries to import one of these modules to use it, which actually
|
41 |
+
# gives us a better error message than we would have otherwise
|
42 |
+
# gotten.
|
43 |
+
pass
|
44 |
+
else:
|
45 |
+
sys.modules[vendored_name] = sys.modules[modulename]
|
46 |
+
base, head = vendored_name.rsplit(".", 1)
|
47 |
+
setattr(sys.modules[base], head, sys.modules[modulename])
|
48 |
+
|
49 |
+
|
50 |
+
# If we're operating in a debundled setup, then we want to go ahead and trigger
|
51 |
+
# the aliasing of our vendored libraries as well as looking for wheels to add
|
52 |
+
# to our sys.path. This will cause all of this code to be a no-op typically
|
53 |
+
# however downstream redistributors can enable it in a consistent way across
|
54 |
+
# all platforms.
|
55 |
+
if DEBUNDLED:
|
56 |
+
# Actually look inside of WHEEL_DIR to find .whl files and add them to the
|
57 |
+
# front of our sys.path.
|
58 |
+
sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path
|
59 |
+
|
60 |
+
# Actually alias all of our vendored dependencies.
|
61 |
+
vendored("cachecontrol")
|
62 |
+
vendored("certifi")
|
63 |
+
vendored("colorama")
|
64 |
+
vendored("distlib")
|
65 |
+
vendored("distro")
|
66 |
+
vendored("html5lib")
|
67 |
+
vendored("six")
|
68 |
+
vendored("six.moves")
|
69 |
+
vendored("six.moves.urllib")
|
70 |
+
vendored("six.moves.urllib.parse")
|
71 |
+
vendored("packaging")
|
72 |
+
vendored("packaging.version")
|
73 |
+
vendored("packaging.specifiers")
|
74 |
+
vendored("pep517")
|
75 |
+
vendored("pkg_resources")
|
76 |
+
vendored("platformdirs")
|
77 |
+
vendored("progress")
|
78 |
+
vendored("requests")
|
79 |
+
vendored("requests.exceptions")
|
80 |
+
vendored("requests.packages")
|
81 |
+
vendored("requests.packages.urllib3")
|
82 |
+
vendored("requests.packages.urllib3._collections")
|
83 |
+
vendored("requests.packages.urllib3.connection")
|
84 |
+
vendored("requests.packages.urllib3.connectionpool")
|
85 |
+
vendored("requests.packages.urllib3.contrib")
|
86 |
+
vendored("requests.packages.urllib3.contrib.ntlmpool")
|
87 |
+
vendored("requests.packages.urllib3.contrib.pyopenssl")
|
88 |
+
vendored("requests.packages.urllib3.exceptions")
|
89 |
+
vendored("requests.packages.urllib3.fields")
|
90 |
+
vendored("requests.packages.urllib3.filepost")
|
91 |
+
vendored("requests.packages.urllib3.packages")
|
92 |
+
vendored("requests.packages.urllib3.packages.ordered_dict")
|
93 |
+
vendored("requests.packages.urllib3.packages.six")
|
94 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname")
|
95 |
+
vendored("requests.packages.urllib3.packages.ssl_match_hostname."
|
96 |
+
"_implementation")
|
97 |
+
vendored("requests.packages.urllib3.poolmanager")
|
98 |
+
vendored("requests.packages.urllib3.request")
|
99 |
+
vendored("requests.packages.urllib3.response")
|
100 |
+
vendored("requests.packages.urllib3.util")
|
101 |
+
vendored("requests.packages.urllib3.util.connection")
|
102 |
+
vendored("requests.packages.urllib3.util.request")
|
103 |
+
vendored("requests.packages.urllib3.util.response")
|
104 |
+
vendored("requests.packages.urllib3.util.retry")
|
105 |
+
vendored("requests.packages.urllib3.util.ssl_")
|
106 |
+
vendored("requests.packages.urllib3.util.timeout")
|
107 |
+
vendored("requests.packages.urllib3.util.url")
|
108 |
+
vendored("resolvelib")
|
109 |
+
vendored("tenacity")
|
110 |
+
vendored("tomli")
|
111 |
+
vendored("urllib3")
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc
ADDED
Binary file (66.6 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
from .initialise import init, deinit, reinit, colorama_text
|
3 |
+
from .ansi import Fore, Back, Style, Cursor
|
4 |
+
from .ansitowin32 import AnsiToWin32
|
5 |
+
|
6 |
+
__version__ = '0.4.4'
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (446 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/ansi.cpython-310.pyc
ADDED
Binary file (3.01 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/initialise.cpython-310.pyc
ADDED
Binary file (1.69 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/win32.cpython-310.pyc
ADDED
Binary file (3.95 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/__pycache__/winterm.cpython-310.pyc
ADDED
Binary file (4.57 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/ansitowin32.py
ADDED
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
import re
|
3 |
+
import sys
|
4 |
+
import os
|
5 |
+
|
6 |
+
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style, BEL
|
7 |
+
from .winterm import WinTerm, WinColor, WinStyle
|
8 |
+
from .win32 import windll, winapi_test
|
9 |
+
|
10 |
+
|
11 |
+
winterm = None
|
12 |
+
if windll is not None:
|
13 |
+
winterm = WinTerm()
|
14 |
+
|
15 |
+
|
16 |
+
class StreamWrapper(object):
|
17 |
+
'''
|
18 |
+
Wraps a stream (such as stdout), acting as a transparent proxy for all
|
19 |
+
attribute access apart from method 'write()', which is delegated to our
|
20 |
+
Converter instance.
|
21 |
+
'''
|
22 |
+
def __init__(self, wrapped, converter):
|
23 |
+
# double-underscore everything to prevent clashes with names of
|
24 |
+
# attributes on the wrapped stream object.
|
25 |
+
self.__wrapped = wrapped
|
26 |
+
self.__convertor = converter
|
27 |
+
|
28 |
+
def __getattr__(self, name):
|
29 |
+
return getattr(self.__wrapped, name)
|
30 |
+
|
31 |
+
def __enter__(self, *args, **kwargs):
|
32 |
+
# special method lookup bypasses __getattr__/__getattribute__, see
|
33 |
+
# https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
|
34 |
+
# thus, contextlib magic methods are not proxied via __getattr__
|
35 |
+
return self.__wrapped.__enter__(*args, **kwargs)
|
36 |
+
|
37 |
+
def __exit__(self, *args, **kwargs):
|
38 |
+
return self.__wrapped.__exit__(*args, **kwargs)
|
39 |
+
|
40 |
+
def write(self, text):
|
41 |
+
self.__convertor.write(text)
|
42 |
+
|
43 |
+
def isatty(self):
|
44 |
+
stream = self.__wrapped
|
45 |
+
if 'PYCHARM_HOSTED' in os.environ:
|
46 |
+
if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
|
47 |
+
return True
|
48 |
+
try:
|
49 |
+
stream_isatty = stream.isatty
|
50 |
+
except AttributeError:
|
51 |
+
return False
|
52 |
+
else:
|
53 |
+
return stream_isatty()
|
54 |
+
|
55 |
+
@property
|
56 |
+
def closed(self):
|
57 |
+
stream = self.__wrapped
|
58 |
+
try:
|
59 |
+
return stream.closed
|
60 |
+
except AttributeError:
|
61 |
+
return True
|
62 |
+
|
63 |
+
|
64 |
+
class AnsiToWin32(object):
|
65 |
+
'''
|
66 |
+
Implements a 'write()' method which, on Windows, will strip ANSI character
|
67 |
+
sequences from the text, and if outputting to a tty, will convert them into
|
68 |
+
win32 function calls.
|
69 |
+
'''
|
70 |
+
ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
|
71 |
+
ANSI_OSC_RE = re.compile('\001?\033\\]([^\a]*)(\a)\002?') # Operating System Command
|
72 |
+
|
73 |
+
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
|
74 |
+
# The wrapped stream (normally sys.stdout or sys.stderr)
|
75 |
+
self.wrapped = wrapped
|
76 |
+
|
77 |
+
# should we reset colors to defaults after every .write()
|
78 |
+
self.autoreset = autoreset
|
79 |
+
|
80 |
+
# create the proxy wrapping our output stream
|
81 |
+
self.stream = StreamWrapper(wrapped, self)
|
82 |
+
|
83 |
+
on_windows = os.name == 'nt'
|
84 |
+
# We test if the WinAPI works, because even if we are on Windows
|
85 |
+
# we may be using a terminal that doesn't support the WinAPI
|
86 |
+
# (e.g. Cygwin Terminal). In this case it's up to the terminal
|
87 |
+
# to support the ANSI codes.
|
88 |
+
conversion_supported = on_windows and winapi_test()
|
89 |
+
|
90 |
+
# should we strip ANSI sequences from our output?
|
91 |
+
if strip is None:
|
92 |
+
strip = conversion_supported or (not self.stream.closed and not self.stream.isatty())
|
93 |
+
self.strip = strip
|
94 |
+
|
95 |
+
# should we should convert ANSI sequences into win32 calls?
|
96 |
+
if convert is None:
|
97 |
+
convert = conversion_supported and not self.stream.closed and self.stream.isatty()
|
98 |
+
self.convert = convert
|
99 |
+
|
100 |
+
# dict of ansi codes to win32 functions and parameters
|
101 |
+
self.win32_calls = self.get_win32_calls()
|
102 |
+
|
103 |
+
# are we wrapping stderr?
|
104 |
+
self.on_stderr = self.wrapped is sys.stderr
|
105 |
+
|
106 |
+
def should_wrap(self):
|
107 |
+
'''
|
108 |
+
True if this class is actually needed. If false, then the output
|
109 |
+
stream will not be affected, nor will win32 calls be issued, so
|
110 |
+
wrapping stdout is not actually required. This will generally be
|
111 |
+
False on non-Windows platforms, unless optional functionality like
|
112 |
+
autoreset has been requested using kwargs to init()
|
113 |
+
'''
|
114 |
+
return self.convert or self.strip or self.autoreset
|
115 |
+
|
116 |
+
def get_win32_calls(self):
|
117 |
+
if self.convert and winterm:
|
118 |
+
return {
|
119 |
+
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
|
120 |
+
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
|
121 |
+
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
|
122 |
+
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
|
123 |
+
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
|
124 |
+
AnsiFore.RED: (winterm.fore, WinColor.RED),
|
125 |
+
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
|
126 |
+
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
|
127 |
+
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
|
128 |
+
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
|
129 |
+
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
|
130 |
+
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
|
131 |
+
AnsiFore.RESET: (winterm.fore, ),
|
132 |
+
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
|
133 |
+
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
|
134 |
+
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
|
135 |
+
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
|
136 |
+
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
|
137 |
+
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
|
138 |
+
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
|
139 |
+
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
|
140 |
+
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
|
141 |
+
AnsiBack.RED: (winterm.back, WinColor.RED),
|
142 |
+
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
|
143 |
+
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
|
144 |
+
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
|
145 |
+
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
|
146 |
+
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
|
147 |
+
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
|
148 |
+
AnsiBack.RESET: (winterm.back, ),
|
149 |
+
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
|
150 |
+
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
|
151 |
+
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
|
152 |
+
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
|
153 |
+
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
|
154 |
+
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
|
155 |
+
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
|
156 |
+
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
|
157 |
+
}
|
158 |
+
return dict()
|
159 |
+
|
160 |
+
def write(self, text):
|
161 |
+
if self.strip or self.convert:
|
162 |
+
self.write_and_convert(text)
|
163 |
+
else:
|
164 |
+
self.wrapped.write(text)
|
165 |
+
self.wrapped.flush()
|
166 |
+
if self.autoreset:
|
167 |
+
self.reset_all()
|
168 |
+
|
169 |
+
|
170 |
+
def reset_all(self):
|
171 |
+
if self.convert:
|
172 |
+
self.call_win32('m', (0,))
|
173 |
+
elif not self.strip and not self.stream.closed:
|
174 |
+
self.wrapped.write(Style.RESET_ALL)
|
175 |
+
|
176 |
+
|
177 |
+
def write_and_convert(self, text):
|
178 |
+
'''
|
179 |
+
Write the given text to our wrapped stream, stripping any ANSI
|
180 |
+
sequences from the text, and optionally converting them into win32
|
181 |
+
calls.
|
182 |
+
'''
|
183 |
+
cursor = 0
|
184 |
+
text = self.convert_osc(text)
|
185 |
+
for match in self.ANSI_CSI_RE.finditer(text):
|
186 |
+
start, end = match.span()
|
187 |
+
self.write_plain_text(text, cursor, start)
|
188 |
+
self.convert_ansi(*match.groups())
|
189 |
+
cursor = end
|
190 |
+
self.write_plain_text(text, cursor, len(text))
|
191 |
+
|
192 |
+
|
193 |
+
def write_plain_text(self, text, start, end):
|
194 |
+
if start < end:
|
195 |
+
self.wrapped.write(text[start:end])
|
196 |
+
self.wrapped.flush()
|
197 |
+
|
198 |
+
|
199 |
+
def convert_ansi(self, paramstring, command):
|
200 |
+
if self.convert:
|
201 |
+
params = self.extract_params(command, paramstring)
|
202 |
+
self.call_win32(command, params)
|
203 |
+
|
204 |
+
|
205 |
+
def extract_params(self, command, paramstring):
|
206 |
+
if command in 'Hf':
|
207 |
+
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
|
208 |
+
while len(params) < 2:
|
209 |
+
# defaults:
|
210 |
+
params = params + (1,)
|
211 |
+
else:
|
212 |
+
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
|
213 |
+
if len(params) == 0:
|
214 |
+
# defaults:
|
215 |
+
if command in 'JKm':
|
216 |
+
params = (0,)
|
217 |
+
elif command in 'ABCD':
|
218 |
+
params = (1,)
|
219 |
+
|
220 |
+
return params
|
221 |
+
|
222 |
+
|
223 |
+
def call_win32(self, command, params):
|
224 |
+
if command == 'm':
|
225 |
+
for param in params:
|
226 |
+
if param in self.win32_calls:
|
227 |
+
func_args = self.win32_calls[param]
|
228 |
+
func = func_args[0]
|
229 |
+
args = func_args[1:]
|
230 |
+
kwargs = dict(on_stderr=self.on_stderr)
|
231 |
+
func(*args, **kwargs)
|
232 |
+
elif command in 'J':
|
233 |
+
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
|
234 |
+
elif command in 'K':
|
235 |
+
winterm.erase_line(params[0], on_stderr=self.on_stderr)
|
236 |
+
elif command in 'Hf': # cursor position - absolute
|
237 |
+
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
|
238 |
+
elif command in 'ABCD': # cursor position - relative
|
239 |
+
n = params[0]
|
240 |
+
# A - up, B - down, C - forward, D - back
|
241 |
+
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
|
242 |
+
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
|
243 |
+
|
244 |
+
|
245 |
+
def convert_osc(self, text):
|
246 |
+
for match in self.ANSI_OSC_RE.finditer(text):
|
247 |
+
start, end = match.span()
|
248 |
+
text = text[:start] + text[end:]
|
249 |
+
paramstring, command = match.groups()
|
250 |
+
if command == BEL:
|
251 |
+
if paramstring.count(";") == 1:
|
252 |
+
params = paramstring.split(";")
|
253 |
+
# 0 - change title and icon (we will only change title)
|
254 |
+
# 1 - change icon (we don't support this)
|
255 |
+
# 2 - change title
|
256 |
+
if params[0] in '02':
|
257 |
+
winterm.set_title(params[1])
|
258 |
+
return text
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/initialise.py
ADDED
@@ -0,0 +1,80 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
import atexit
|
3 |
+
import contextlib
|
4 |
+
import sys
|
5 |
+
|
6 |
+
from .ansitowin32 import AnsiToWin32
|
7 |
+
|
8 |
+
|
9 |
+
orig_stdout = None
|
10 |
+
orig_stderr = None
|
11 |
+
|
12 |
+
wrapped_stdout = None
|
13 |
+
wrapped_stderr = None
|
14 |
+
|
15 |
+
atexit_done = False
|
16 |
+
|
17 |
+
|
18 |
+
def reset_all():
|
19 |
+
if AnsiToWin32 is not None: # Issue #74: objects might become None at exit
|
20 |
+
AnsiToWin32(orig_stdout).reset_all()
|
21 |
+
|
22 |
+
|
23 |
+
def init(autoreset=False, convert=None, strip=None, wrap=True):
|
24 |
+
|
25 |
+
if not wrap and any([autoreset, convert, strip]):
|
26 |
+
raise ValueError('wrap=False conflicts with any other arg=True')
|
27 |
+
|
28 |
+
global wrapped_stdout, wrapped_stderr
|
29 |
+
global orig_stdout, orig_stderr
|
30 |
+
|
31 |
+
orig_stdout = sys.stdout
|
32 |
+
orig_stderr = sys.stderr
|
33 |
+
|
34 |
+
if sys.stdout is None:
|
35 |
+
wrapped_stdout = None
|
36 |
+
else:
|
37 |
+
sys.stdout = wrapped_stdout = \
|
38 |
+
wrap_stream(orig_stdout, convert, strip, autoreset, wrap)
|
39 |
+
if sys.stderr is None:
|
40 |
+
wrapped_stderr = None
|
41 |
+
else:
|
42 |
+
sys.stderr = wrapped_stderr = \
|
43 |
+
wrap_stream(orig_stderr, convert, strip, autoreset, wrap)
|
44 |
+
|
45 |
+
global atexit_done
|
46 |
+
if not atexit_done:
|
47 |
+
atexit.register(reset_all)
|
48 |
+
atexit_done = True
|
49 |
+
|
50 |
+
|
51 |
+
def deinit():
|
52 |
+
if orig_stdout is not None:
|
53 |
+
sys.stdout = orig_stdout
|
54 |
+
if orig_stderr is not None:
|
55 |
+
sys.stderr = orig_stderr
|
56 |
+
|
57 |
+
|
58 |
+
@contextlib.contextmanager
|
59 |
+
def colorama_text(*args, **kwargs):
|
60 |
+
init(*args, **kwargs)
|
61 |
+
try:
|
62 |
+
yield
|
63 |
+
finally:
|
64 |
+
deinit()
|
65 |
+
|
66 |
+
|
67 |
+
def reinit():
|
68 |
+
if wrapped_stdout is not None:
|
69 |
+
sys.stdout = wrapped_stdout
|
70 |
+
if wrapped_stderr is not None:
|
71 |
+
sys.stderr = wrapped_stderr
|
72 |
+
|
73 |
+
|
74 |
+
def wrap_stream(stream, convert, strip, autoreset, wrap):
|
75 |
+
if wrap:
|
76 |
+
wrapper = AnsiToWin32(stream,
|
77 |
+
convert=convert, strip=strip, autoreset=autoreset)
|
78 |
+
if wrapper.should_wrap():
|
79 |
+
stream = wrapper.stream
|
80 |
+
return stream
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/colorama/win32.py
ADDED
@@ -0,0 +1,152 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
|
2 |
+
|
3 |
+
# from winbase.h
|
4 |
+
STDOUT = -11
|
5 |
+
STDERR = -12
|
6 |
+
|
7 |
+
try:
|
8 |
+
import ctypes
|
9 |
+
from ctypes import LibraryLoader
|
10 |
+
windll = LibraryLoader(ctypes.WinDLL)
|
11 |
+
from ctypes import wintypes
|
12 |
+
except (AttributeError, ImportError):
|
13 |
+
windll = None
|
14 |
+
SetConsoleTextAttribute = lambda *_: None
|
15 |
+
winapi_test = lambda *_: None
|
16 |
+
else:
|
17 |
+
from ctypes import byref, Structure, c_char, POINTER
|
18 |
+
|
19 |
+
COORD = wintypes._COORD
|
20 |
+
|
21 |
+
class CONSOLE_SCREEN_BUFFER_INFO(Structure):
|
22 |
+
"""struct in wincon.h."""
|
23 |
+
_fields_ = [
|
24 |
+
("dwSize", COORD),
|
25 |
+
("dwCursorPosition", COORD),
|
26 |
+
("wAttributes", wintypes.WORD),
|
27 |
+
("srWindow", wintypes.SMALL_RECT),
|
28 |
+
("dwMaximumWindowSize", COORD),
|
29 |
+
]
|
30 |
+
def __str__(self):
|
31 |
+
return '(%d,%d,%d,%d,%d,%d,%d,%d,%d,%d,%d)' % (
|
32 |
+
self.dwSize.Y, self.dwSize.X
|
33 |
+
, self.dwCursorPosition.Y, self.dwCursorPosition.X
|
34 |
+
, self.wAttributes
|
35 |
+
, self.srWindow.Top, self.srWindow.Left, self.srWindow.Bottom, self.srWindow.Right
|
36 |
+
, self.dwMaximumWindowSize.Y, self.dwMaximumWindowSize.X
|
37 |
+
)
|
38 |
+
|
39 |
+
_GetStdHandle = windll.kernel32.GetStdHandle
|
40 |
+
_GetStdHandle.argtypes = [
|
41 |
+
wintypes.DWORD,
|
42 |
+
]
|
43 |
+
_GetStdHandle.restype = wintypes.HANDLE
|
44 |
+
|
45 |
+
_GetConsoleScreenBufferInfo = windll.kernel32.GetConsoleScreenBufferInfo
|
46 |
+
_GetConsoleScreenBufferInfo.argtypes = [
|
47 |
+
wintypes.HANDLE,
|
48 |
+
POINTER(CONSOLE_SCREEN_BUFFER_INFO),
|
49 |
+
]
|
50 |
+
_GetConsoleScreenBufferInfo.restype = wintypes.BOOL
|
51 |
+
|
52 |
+
_SetConsoleTextAttribute = windll.kernel32.SetConsoleTextAttribute
|
53 |
+
_SetConsoleTextAttribute.argtypes = [
|
54 |
+
wintypes.HANDLE,
|
55 |
+
wintypes.WORD,
|
56 |
+
]
|
57 |
+
_SetConsoleTextAttribute.restype = wintypes.BOOL
|
58 |
+
|
59 |
+
_SetConsoleCursorPosition = windll.kernel32.SetConsoleCursorPosition
|
60 |
+
_SetConsoleCursorPosition.argtypes = [
|
61 |
+
wintypes.HANDLE,
|
62 |
+
COORD,
|
63 |
+
]
|
64 |
+
_SetConsoleCursorPosition.restype = wintypes.BOOL
|
65 |
+
|
66 |
+
_FillConsoleOutputCharacterA = windll.kernel32.FillConsoleOutputCharacterA
|
67 |
+
_FillConsoleOutputCharacterA.argtypes = [
|
68 |
+
wintypes.HANDLE,
|
69 |
+
c_char,
|
70 |
+
wintypes.DWORD,
|
71 |
+
COORD,
|
72 |
+
POINTER(wintypes.DWORD),
|
73 |
+
]
|
74 |
+
_FillConsoleOutputCharacterA.restype = wintypes.BOOL
|
75 |
+
|
76 |
+
_FillConsoleOutputAttribute = windll.kernel32.FillConsoleOutputAttribute
|
77 |
+
_FillConsoleOutputAttribute.argtypes = [
|
78 |
+
wintypes.HANDLE,
|
79 |
+
wintypes.WORD,
|
80 |
+
wintypes.DWORD,
|
81 |
+
COORD,
|
82 |
+
POINTER(wintypes.DWORD),
|
83 |
+
]
|
84 |
+
_FillConsoleOutputAttribute.restype = wintypes.BOOL
|
85 |
+
|
86 |
+
_SetConsoleTitleW = windll.kernel32.SetConsoleTitleW
|
87 |
+
_SetConsoleTitleW.argtypes = [
|
88 |
+
wintypes.LPCWSTR
|
89 |
+
]
|
90 |
+
_SetConsoleTitleW.restype = wintypes.BOOL
|
91 |
+
|
92 |
+
def _winapi_test(handle):
|
93 |
+
csbi = CONSOLE_SCREEN_BUFFER_INFO()
|
94 |
+
success = _GetConsoleScreenBufferInfo(
|
95 |
+
handle, byref(csbi))
|
96 |
+
return bool(success)
|
97 |
+
|
98 |
+
def winapi_test():
|
99 |
+
return any(_winapi_test(h) for h in
|
100 |
+
(_GetStdHandle(STDOUT), _GetStdHandle(STDERR)))
|
101 |
+
|
102 |
+
def GetConsoleScreenBufferInfo(stream_id=STDOUT):
|
103 |
+
handle = _GetStdHandle(stream_id)
|
104 |
+
csbi = CONSOLE_SCREEN_BUFFER_INFO()
|
105 |
+
success = _GetConsoleScreenBufferInfo(
|
106 |
+
handle, byref(csbi))
|
107 |
+
return csbi
|
108 |
+
|
109 |
+
def SetConsoleTextAttribute(stream_id, attrs):
|
110 |
+
handle = _GetStdHandle(stream_id)
|
111 |
+
return _SetConsoleTextAttribute(handle, attrs)
|
112 |
+
|
113 |
+
def SetConsoleCursorPosition(stream_id, position, adjust=True):
|
114 |
+
position = COORD(*position)
|
115 |
+
# If the position is out of range, do nothing.
|
116 |
+
if position.Y <= 0 or position.X <= 0:
|
117 |
+
return
|
118 |
+
# Adjust for Windows' SetConsoleCursorPosition:
|
119 |
+
# 1. being 0-based, while ANSI is 1-based.
|
120 |
+
# 2. expecting (x,y), while ANSI uses (y,x).
|
121 |
+
adjusted_position = COORD(position.Y - 1, position.X - 1)
|
122 |
+
if adjust:
|
123 |
+
# Adjust for viewport's scroll position
|
124 |
+
sr = GetConsoleScreenBufferInfo(STDOUT).srWindow
|
125 |
+
adjusted_position.Y += sr.Top
|
126 |
+
adjusted_position.X += sr.Left
|
127 |
+
# Resume normal processing
|
128 |
+
handle = _GetStdHandle(stream_id)
|
129 |
+
return _SetConsoleCursorPosition(handle, adjusted_position)
|
130 |
+
|
131 |
+
def FillConsoleOutputCharacter(stream_id, char, length, start):
|
132 |
+
handle = _GetStdHandle(stream_id)
|
133 |
+
char = c_char(char.encode())
|
134 |
+
length = wintypes.DWORD(length)
|
135 |
+
num_written = wintypes.DWORD(0)
|
136 |
+
# Note that this is hard-coded for ANSI (vs wide) bytes.
|
137 |
+
success = _FillConsoleOutputCharacterA(
|
138 |
+
handle, char, length, start, byref(num_written))
|
139 |
+
return num_written.value
|
140 |
+
|
141 |
+
def FillConsoleOutputAttribute(stream_id, attr, length, start):
|
142 |
+
''' FillConsoleOutputAttribute( hConsole, csbi.wAttributes, dwConSize, coordScreen, &cCharsWritten )'''
|
143 |
+
handle = _GetStdHandle(stream_id)
|
144 |
+
attribute = wintypes.WORD(attr)
|
145 |
+
length = wintypes.DWORD(length)
|
146 |
+
num_written = wintypes.DWORD(0)
|
147 |
+
# Note that this is hard-coded for ANSI (vs wide) bytes.
|
148 |
+
return _FillConsoleOutputAttribute(
|
149 |
+
handle, attribute, length, start, byref(num_written))
|
150 |
+
|
151 |
+
def SetConsoleTitle(title):
|
152 |
+
return _SetConsoleTitleW(title)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/distro.py
ADDED
@@ -0,0 +1,1386 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2015,2016,2017 Nir Cohen
|
2 |
+
#
|
3 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
+
# you may not use this file except in compliance with the License.
|
5 |
+
# You may obtain a copy of the License at
|
6 |
+
#
|
7 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
+
#
|
9 |
+
# Unless required by applicable law or agreed to in writing, software
|
10 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
+
# See the License for the specific language governing permissions and
|
13 |
+
# limitations under the License.
|
14 |
+
|
15 |
+
"""
|
16 |
+
The ``distro`` package (``distro`` stands for Linux Distribution) provides
|
17 |
+
information about the Linux distribution it runs on, such as a reliable
|
18 |
+
machine-readable distro ID, or version information.
|
19 |
+
|
20 |
+
It is the recommended replacement for Python's original
|
21 |
+
:py:func:`platform.linux_distribution` function, but it provides much more
|
22 |
+
functionality. An alternative implementation became necessary because Python
|
23 |
+
3.5 deprecated this function, and Python 3.8 removed it altogether. Its
|
24 |
+
predecessor function :py:func:`platform.dist` was already deprecated since
|
25 |
+
Python 2.6 and removed in Python 3.8. Still, there are many cases in which
|
26 |
+
access to OS distribution information is needed. See `Python issue 1322
|
27 |
+
<https://bugs.python.org/issue1322>`_ for more information.
|
28 |
+
"""
|
29 |
+
|
30 |
+
import argparse
|
31 |
+
import json
|
32 |
+
import logging
|
33 |
+
import os
|
34 |
+
import re
|
35 |
+
import shlex
|
36 |
+
import subprocess
|
37 |
+
import sys
|
38 |
+
import warnings
|
39 |
+
|
40 |
+
__version__ = "1.6.0"
|
41 |
+
|
42 |
+
# Use `if False` to avoid an ImportError on Python 2. After dropping Python 2
|
43 |
+
# support, can use typing.TYPE_CHECKING instead. See:
|
44 |
+
# https://docs.python.org/3/library/typing.html#typing.TYPE_CHECKING
|
45 |
+
if False: # pragma: nocover
|
46 |
+
from typing import (
|
47 |
+
Any,
|
48 |
+
Callable,
|
49 |
+
Dict,
|
50 |
+
Iterable,
|
51 |
+
Optional,
|
52 |
+
Sequence,
|
53 |
+
TextIO,
|
54 |
+
Tuple,
|
55 |
+
Type,
|
56 |
+
TypedDict,
|
57 |
+
Union,
|
58 |
+
)
|
59 |
+
|
60 |
+
VersionDict = TypedDict(
|
61 |
+
"VersionDict", {"major": str, "minor": str, "build_number": str}
|
62 |
+
)
|
63 |
+
InfoDict = TypedDict(
|
64 |
+
"InfoDict",
|
65 |
+
{
|
66 |
+
"id": str,
|
67 |
+
"version": str,
|
68 |
+
"version_parts": VersionDict,
|
69 |
+
"like": str,
|
70 |
+
"codename": str,
|
71 |
+
},
|
72 |
+
)
|
73 |
+
|
74 |
+
|
75 |
+
_UNIXCONFDIR = os.environ.get("UNIXCONFDIR", "/etc")
|
76 |
+
_UNIXUSRLIBDIR = os.environ.get("UNIXUSRLIBDIR", "/usr/lib")
|
77 |
+
_OS_RELEASE_BASENAME = "os-release"
|
78 |
+
|
79 |
+
#: Translation table for normalizing the "ID" attribute defined in os-release
|
80 |
+
#: files, for use by the :func:`distro.id` method.
|
81 |
+
#:
|
82 |
+
#: * Key: Value as defined in the os-release file, translated to lower case,
|
83 |
+
#: with blanks translated to underscores.
|
84 |
+
#:
|
85 |
+
#: * Value: Normalized value.
|
86 |
+
NORMALIZED_OS_ID = {
|
87 |
+
"ol": "oracle", # Oracle Linux
|
88 |
+
}
|
89 |
+
|
90 |
+
#: Translation table for normalizing the "Distributor ID" attribute returned by
|
91 |
+
#: the lsb_release command, for use by the :func:`distro.id` method.
|
92 |
+
#:
|
93 |
+
#: * Key: Value as returned by the lsb_release command, translated to lower
|
94 |
+
#: case, with blanks translated to underscores.
|
95 |
+
#:
|
96 |
+
#: * Value: Normalized value.
|
97 |
+
NORMALIZED_LSB_ID = {
|
98 |
+
"enterpriseenterpriseas": "oracle", # Oracle Enterprise Linux 4
|
99 |
+
"enterpriseenterpriseserver": "oracle", # Oracle Linux 5
|
100 |
+
"redhatenterpriseworkstation": "rhel", # RHEL 6, 7 Workstation
|
101 |
+
"redhatenterpriseserver": "rhel", # RHEL 6, 7 Server
|
102 |
+
"redhatenterprisecomputenode": "rhel", # RHEL 6 ComputeNode
|
103 |
+
}
|
104 |
+
|
105 |
+
#: Translation table for normalizing the distro ID derived from the file name
|
106 |
+
#: of distro release files, for use by the :func:`distro.id` method.
|
107 |
+
#:
|
108 |
+
#: * Key: Value as derived from the file name of a distro release file,
|
109 |
+
#: translated to lower case, with blanks translated to underscores.
|
110 |
+
#:
|
111 |
+
#: * Value: Normalized value.
|
112 |
+
NORMALIZED_DISTRO_ID = {
|
113 |
+
"redhat": "rhel", # RHEL 6.x, 7.x
|
114 |
+
}
|
115 |
+
|
116 |
+
# Pattern for content of distro release file (reversed)
|
117 |
+
_DISTRO_RELEASE_CONTENT_REVERSED_PATTERN = re.compile(
|
118 |
+
r"(?:[^)]*\)(.*)\()? *(?:STL )?([\d.+\-a-z]*\d) *(?:esaeler *)?(.+)"
|
119 |
+
)
|
120 |
+
|
121 |
+
# Pattern for base file name of distro release file
|
122 |
+
_DISTRO_RELEASE_BASENAME_PATTERN = re.compile(r"(\w+)[-_](release|version)$")
|
123 |
+
|
124 |
+
# Base file names to be ignored when searching for distro release file
|
125 |
+
_DISTRO_RELEASE_IGNORE_BASENAMES = (
|
126 |
+
"debian_version",
|
127 |
+
"lsb-release",
|
128 |
+
"oem-release",
|
129 |
+
_OS_RELEASE_BASENAME,
|
130 |
+
"system-release",
|
131 |
+
"plesk-release",
|
132 |
+
"iredmail-release",
|
133 |
+
)
|
134 |
+
|
135 |
+
|
136 |
+
def linux_distribution(full_distribution_name=True):
|
137 |
+
# type: (bool) -> Tuple[str, str, str]
|
138 |
+
"""
|
139 |
+
.. deprecated:: 1.6.0
|
140 |
+
|
141 |
+
:func:`distro.linux_distribution()` is deprecated. It should only be
|
142 |
+
used as a compatibility shim with Python's
|
143 |
+
:py:func:`platform.linux_distribution()`. Please use :func:`distro.id`,
|
144 |
+
:func:`distro.version` and :func:`distro.name` instead.
|
145 |
+
|
146 |
+
Return information about the current OS distribution as a tuple
|
147 |
+
``(id_name, version, codename)`` with items as follows:
|
148 |
+
|
149 |
+
* ``id_name``: If *full_distribution_name* is false, the result of
|
150 |
+
:func:`distro.id`. Otherwise, the result of :func:`distro.name`.
|
151 |
+
|
152 |
+
* ``version``: The result of :func:`distro.version`.
|
153 |
+
|
154 |
+
* ``codename``: The result of :func:`distro.codename`.
|
155 |
+
|
156 |
+
The interface of this function is compatible with the original
|
157 |
+
:py:func:`platform.linux_distribution` function, supporting a subset of
|
158 |
+
its parameters.
|
159 |
+
|
160 |
+
The data it returns may not exactly be the same, because it uses more data
|
161 |
+
sources than the original function, and that may lead to different data if
|
162 |
+
the OS distribution is not consistent across multiple data sources it
|
163 |
+
provides (there are indeed such distributions ...).
|
164 |
+
|
165 |
+
Another reason for differences is the fact that the :func:`distro.id`
|
166 |
+
method normalizes the distro ID string to a reliable machine-readable value
|
167 |
+
for a number of popular OS distributions.
|
168 |
+
"""
|
169 |
+
warnings.warn(
|
170 |
+
"distro.linux_distribution() is deprecated. It should only be used as a "
|
171 |
+
"compatibility shim with Python's platform.linux_distribution(). Please use "
|
172 |
+
"distro.id(), distro.version() and distro.name() instead.",
|
173 |
+
DeprecationWarning,
|
174 |
+
stacklevel=2,
|
175 |
+
)
|
176 |
+
return _distro.linux_distribution(full_distribution_name)
|
177 |
+
|
178 |
+
|
179 |
+
def id():
|
180 |
+
# type: () -> str
|
181 |
+
"""
|
182 |
+
Return the distro ID of the current distribution, as a
|
183 |
+
machine-readable string.
|
184 |
+
|
185 |
+
For a number of OS distributions, the returned distro ID value is
|
186 |
+
*reliable*, in the sense that it is documented and that it does not change
|
187 |
+
across releases of the distribution.
|
188 |
+
|
189 |
+
This package maintains the following reliable distro ID values:
|
190 |
+
|
191 |
+
============== =========================================
|
192 |
+
Distro ID Distribution
|
193 |
+
============== =========================================
|
194 |
+
"ubuntu" Ubuntu
|
195 |
+
"debian" Debian
|
196 |
+
"rhel" RedHat Enterprise Linux
|
197 |
+
"centos" CentOS
|
198 |
+
"fedora" Fedora
|
199 |
+
"sles" SUSE Linux Enterprise Server
|
200 |
+
"opensuse" openSUSE
|
201 |
+
"amazon" Amazon Linux
|
202 |
+
"arch" Arch Linux
|
203 |
+
"cloudlinux" CloudLinux OS
|
204 |
+
"exherbo" Exherbo Linux
|
205 |
+
"gentoo" GenToo Linux
|
206 |
+
"ibm_powerkvm" IBM PowerKVM
|
207 |
+
"kvmibm" KVM for IBM z Systems
|
208 |
+
"linuxmint" Linux Mint
|
209 |
+
"mageia" Mageia
|
210 |
+
"mandriva" Mandriva Linux
|
211 |
+
"parallels" Parallels
|
212 |
+
"pidora" Pidora
|
213 |
+
"raspbian" Raspbian
|
214 |
+
"oracle" Oracle Linux (and Oracle Enterprise Linux)
|
215 |
+
"scientific" Scientific Linux
|
216 |
+
"slackware" Slackware
|
217 |
+
"xenserver" XenServer
|
218 |
+
"openbsd" OpenBSD
|
219 |
+
"netbsd" NetBSD
|
220 |
+
"freebsd" FreeBSD
|
221 |
+
"midnightbsd" MidnightBSD
|
222 |
+
============== =========================================
|
223 |
+
|
224 |
+
If you have a need to get distros for reliable IDs added into this set,
|
225 |
+
or if you find that the :func:`distro.id` function returns a different
|
226 |
+
distro ID for one of the listed distros, please create an issue in the
|
227 |
+
`distro issue tracker`_.
|
228 |
+
|
229 |
+
**Lookup hierarchy and transformations:**
|
230 |
+
|
231 |
+
First, the ID is obtained from the following sources, in the specified
|
232 |
+
order. The first available and non-empty value is used:
|
233 |
+
|
234 |
+
* the value of the "ID" attribute of the os-release file,
|
235 |
+
|
236 |
+
* the value of the "Distributor ID" attribute returned by the lsb_release
|
237 |
+
command,
|
238 |
+
|
239 |
+
* the first part of the file name of the distro release file,
|
240 |
+
|
241 |
+
The so determined ID value then passes the following transformations,
|
242 |
+
before it is returned by this method:
|
243 |
+
|
244 |
+
* it is translated to lower case,
|
245 |
+
|
246 |
+
* blanks (which should not be there anyway) are translated to underscores,
|
247 |
+
|
248 |
+
* a normalization of the ID is performed, based upon
|
249 |
+
`normalization tables`_. The purpose of this normalization is to ensure
|
250 |
+
that the ID is as reliable as possible, even across incompatible changes
|
251 |
+
in the OS distributions. A common reason for an incompatible change is
|
252 |
+
the addition of an os-release file, or the addition of the lsb_release
|
253 |
+
command, with ID values that differ from what was previously determined
|
254 |
+
from the distro release file name.
|
255 |
+
"""
|
256 |
+
return _distro.id()
|
257 |
+
|
258 |
+
|
259 |
+
def name(pretty=False):
|
260 |
+
# type: (bool) -> str
|
261 |
+
"""
|
262 |
+
Return the name of the current OS distribution, as a human-readable
|
263 |
+
string.
|
264 |
+
|
265 |
+
If *pretty* is false, the name is returned without version or codename.
|
266 |
+
(e.g. "CentOS Linux")
|
267 |
+
|
268 |
+
If *pretty* is true, the version and codename are appended.
|
269 |
+
(e.g. "CentOS Linux 7.1.1503 (Core)")
|
270 |
+
|
271 |
+
**Lookup hierarchy:**
|
272 |
+
|
273 |
+
The name is obtained from the following sources, in the specified order.
|
274 |
+
The first available and non-empty value is used:
|
275 |
+
|
276 |
+
* If *pretty* is false:
|
277 |
+
|
278 |
+
- the value of the "NAME" attribute of the os-release file,
|
279 |
+
|
280 |
+
- the value of the "Distributor ID" attribute returned by the lsb_release
|
281 |
+
command,
|
282 |
+
|
283 |
+
- the value of the "<name>" field of the distro release file.
|
284 |
+
|
285 |
+
* If *pretty* is true:
|
286 |
+
|
287 |
+
- the value of the "PRETTY_NAME" attribute of the os-release file,
|
288 |
+
|
289 |
+
- the value of the "Description" attribute returned by the lsb_release
|
290 |
+
command,
|
291 |
+
|
292 |
+
- the value of the "<name>" field of the distro release file, appended
|
293 |
+
with the value of the pretty version ("<version_id>" and "<codename>"
|
294 |
+
fields) of the distro release file, if available.
|
295 |
+
"""
|
296 |
+
return _distro.name(pretty)
|
297 |
+
|
298 |
+
|
299 |
+
def version(pretty=False, best=False):
|
300 |
+
# type: (bool, bool) -> str
|
301 |
+
"""
|
302 |
+
Return the version of the current OS distribution, as a human-readable
|
303 |
+
string.
|
304 |
+
|
305 |
+
If *pretty* is false, the version is returned without codename (e.g.
|
306 |
+
"7.0").
|
307 |
+
|
308 |
+
If *pretty* is true, the codename in parenthesis is appended, if the
|
309 |
+
codename is non-empty (e.g. "7.0 (Maipo)").
|
310 |
+
|
311 |
+
Some distributions provide version numbers with different precisions in
|
312 |
+
the different sources of distribution information. Examining the different
|
313 |
+
sources in a fixed priority order does not always yield the most precise
|
314 |
+
version (e.g. for Debian 8.2, or CentOS 7.1).
|
315 |
+
|
316 |
+
The *best* parameter can be used to control the approach for the returned
|
317 |
+
version:
|
318 |
+
|
319 |
+
If *best* is false, the first non-empty version number in priority order of
|
320 |
+
the examined sources is returned.
|
321 |
+
|
322 |
+
If *best* is true, the most precise version number out of all examined
|
323 |
+
sources is returned.
|
324 |
+
|
325 |
+
**Lookup hierarchy:**
|
326 |
+
|
327 |
+
In all cases, the version number is obtained from the following sources.
|
328 |
+
If *best* is false, this order represents the priority order:
|
329 |
+
|
330 |
+
* the value of the "VERSION_ID" attribute of the os-release file,
|
331 |
+
* the value of the "Release" attribute returned by the lsb_release
|
332 |
+
command,
|
333 |
+
* the version number parsed from the "<version_id>" field of the first line
|
334 |
+
of the distro release file,
|
335 |
+
* the version number parsed from the "PRETTY_NAME" attribute of the
|
336 |
+
os-release file, if it follows the format of the distro release files.
|
337 |
+
* the version number parsed from the "Description" attribute returned by
|
338 |
+
the lsb_release command, if it follows the format of the distro release
|
339 |
+
files.
|
340 |
+
"""
|
341 |
+
return _distro.version(pretty, best)
|
342 |
+
|
343 |
+
|
344 |
+
def version_parts(best=False):
|
345 |
+
# type: (bool) -> Tuple[str, str, str]
|
346 |
+
"""
|
347 |
+
Return the version of the current OS distribution as a tuple
|
348 |
+
``(major, minor, build_number)`` with items as follows:
|
349 |
+
|
350 |
+
* ``major``: The result of :func:`distro.major_version`.
|
351 |
+
|
352 |
+
* ``minor``: The result of :func:`distro.minor_version`.
|
353 |
+
|
354 |
+
* ``build_number``: The result of :func:`distro.build_number`.
|
355 |
+
|
356 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
357 |
+
method.
|
358 |
+
"""
|
359 |
+
return _distro.version_parts(best)
|
360 |
+
|
361 |
+
|
362 |
+
def major_version(best=False):
|
363 |
+
# type: (bool) -> str
|
364 |
+
"""
|
365 |
+
Return the major version of the current OS distribution, as a string,
|
366 |
+
if provided.
|
367 |
+
Otherwise, the empty string is returned. The major version is the first
|
368 |
+
part of the dot-separated version string.
|
369 |
+
|
370 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
371 |
+
method.
|
372 |
+
"""
|
373 |
+
return _distro.major_version(best)
|
374 |
+
|
375 |
+
|
376 |
+
def minor_version(best=False):
|
377 |
+
# type: (bool) -> str
|
378 |
+
"""
|
379 |
+
Return the minor version of the current OS distribution, as a string,
|
380 |
+
if provided.
|
381 |
+
Otherwise, the empty string is returned. The minor version is the second
|
382 |
+
part of the dot-separated version string.
|
383 |
+
|
384 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
385 |
+
method.
|
386 |
+
"""
|
387 |
+
return _distro.minor_version(best)
|
388 |
+
|
389 |
+
|
390 |
+
def build_number(best=False):
|
391 |
+
# type: (bool) -> str
|
392 |
+
"""
|
393 |
+
Return the build number of the current OS distribution, as a string,
|
394 |
+
if provided.
|
395 |
+
Otherwise, the empty string is returned. The build number is the third part
|
396 |
+
of the dot-separated version string.
|
397 |
+
|
398 |
+
For a description of the *best* parameter, see the :func:`distro.version`
|
399 |
+
method.
|
400 |
+
"""
|
401 |
+
return _distro.build_number(best)
|
402 |
+
|
403 |
+
|
404 |
+
def like():
|
405 |
+
# type: () -> str
|
406 |
+
"""
|
407 |
+
Return a space-separated list of distro IDs of distributions that are
|
408 |
+
closely related to the current OS distribution in regards to packaging
|
409 |
+
and programming interfaces, for example distributions the current
|
410 |
+
distribution is a derivative from.
|
411 |
+
|
412 |
+
**Lookup hierarchy:**
|
413 |
+
|
414 |
+
This information item is only provided by the os-release file.
|
415 |
+
For details, see the description of the "ID_LIKE" attribute in the
|
416 |
+
`os-release man page
|
417 |
+
<http://www.freedesktop.org/software/systemd/man/os-release.html>`_.
|
418 |
+
"""
|
419 |
+
return _distro.like()
|
420 |
+
|
421 |
+
|
422 |
+
def codename():
|
423 |
+
# type: () -> str
|
424 |
+
"""
|
425 |
+
Return the codename for the release of the current OS distribution,
|
426 |
+
as a string.
|
427 |
+
|
428 |
+
If the distribution does not have a codename, an empty string is returned.
|
429 |
+
|
430 |
+
Note that the returned codename is not always really a codename. For
|
431 |
+
example, openSUSE returns "x86_64". This function does not handle such
|
432 |
+
cases in any special way and just returns the string it finds, if any.
|
433 |
+
|
434 |
+
**Lookup hierarchy:**
|
435 |
+
|
436 |
+
* the codename within the "VERSION" attribute of the os-release file, if
|
437 |
+
provided,
|
438 |
+
|
439 |
+
* the value of the "Codename" attribute returned by the lsb_release
|
440 |
+
command,
|
441 |
+
|
442 |
+
* the value of the "<codename>" field of the distro release file.
|
443 |
+
"""
|
444 |
+
return _distro.codename()
|
445 |
+
|
446 |
+
|
447 |
+
def info(pretty=False, best=False):
|
448 |
+
# type: (bool, bool) -> InfoDict
|
449 |
+
"""
|
450 |
+
Return certain machine-readable information items about the current OS
|
451 |
+
distribution in a dictionary, as shown in the following example:
|
452 |
+
|
453 |
+
.. sourcecode:: python
|
454 |
+
|
455 |
+
{
|
456 |
+
'id': 'rhel',
|
457 |
+
'version': '7.0',
|
458 |
+
'version_parts': {
|
459 |
+
'major': '7',
|
460 |
+
'minor': '0',
|
461 |
+
'build_number': ''
|
462 |
+
},
|
463 |
+
'like': 'fedora',
|
464 |
+
'codename': 'Maipo'
|
465 |
+
}
|
466 |
+
|
467 |
+
The dictionary structure and keys are always the same, regardless of which
|
468 |
+
information items are available in the underlying data sources. The values
|
469 |
+
for the various keys are as follows:
|
470 |
+
|
471 |
+
* ``id``: The result of :func:`distro.id`.
|
472 |
+
|
473 |
+
* ``version``: The result of :func:`distro.version`.
|
474 |
+
|
475 |
+
* ``version_parts -> major``: The result of :func:`distro.major_version`.
|
476 |
+
|
477 |
+
* ``version_parts -> minor``: The result of :func:`distro.minor_version`.
|
478 |
+
|
479 |
+
* ``version_parts -> build_number``: The result of
|
480 |
+
:func:`distro.build_number`.
|
481 |
+
|
482 |
+
* ``like``: The result of :func:`distro.like`.
|
483 |
+
|
484 |
+
* ``codename``: The result of :func:`distro.codename`.
|
485 |
+
|
486 |
+
For a description of the *pretty* and *best* parameters, see the
|
487 |
+
:func:`distro.version` method.
|
488 |
+
"""
|
489 |
+
return _distro.info(pretty, best)
|
490 |
+
|
491 |
+
|
492 |
+
def os_release_info():
|
493 |
+
# type: () -> Dict[str, str]
|
494 |
+
"""
|
495 |
+
Return a dictionary containing key-value pairs for the information items
|
496 |
+
from the os-release file data source of the current OS distribution.
|
497 |
+
|
498 |
+
See `os-release file`_ for details about these information items.
|
499 |
+
"""
|
500 |
+
return _distro.os_release_info()
|
501 |
+
|
502 |
+
|
503 |
+
def lsb_release_info():
|
504 |
+
# type: () -> Dict[str, str]
|
505 |
+
"""
|
506 |
+
Return a dictionary containing key-value pairs for the information items
|
507 |
+
from the lsb_release command data source of the current OS distribution.
|
508 |
+
|
509 |
+
See `lsb_release command output`_ for details about these information
|
510 |
+
items.
|
511 |
+
"""
|
512 |
+
return _distro.lsb_release_info()
|
513 |
+
|
514 |
+
|
515 |
+
def distro_release_info():
|
516 |
+
# type: () -> Dict[str, str]
|
517 |
+
"""
|
518 |
+
Return a dictionary containing key-value pairs for the information items
|
519 |
+
from the distro release file data source of the current OS distribution.
|
520 |
+
|
521 |
+
See `distro release file`_ for details about these information items.
|
522 |
+
"""
|
523 |
+
return _distro.distro_release_info()
|
524 |
+
|
525 |
+
|
526 |
+
def uname_info():
|
527 |
+
# type: () -> Dict[str, str]
|
528 |
+
"""
|
529 |
+
Return a dictionary containing key-value pairs for the information items
|
530 |
+
from the distro release file data source of the current OS distribution.
|
531 |
+
"""
|
532 |
+
return _distro.uname_info()
|
533 |
+
|
534 |
+
|
535 |
+
def os_release_attr(attribute):
|
536 |
+
# type: (str) -> str
|
537 |
+
"""
|
538 |
+
Return a single named information item from the os-release file data source
|
539 |
+
of the current OS distribution.
|
540 |
+
|
541 |
+
Parameters:
|
542 |
+
|
543 |
+
* ``attribute`` (string): Key of the information item.
|
544 |
+
|
545 |
+
Returns:
|
546 |
+
|
547 |
+
* (string): Value of the information item, if the item exists.
|
548 |
+
The empty string, if the item does not exist.
|
549 |
+
|
550 |
+
See `os-release file`_ for details about these information items.
|
551 |
+
"""
|
552 |
+
return _distro.os_release_attr(attribute)
|
553 |
+
|
554 |
+
|
555 |
+
def lsb_release_attr(attribute):
|
556 |
+
# type: (str) -> str
|
557 |
+
"""
|
558 |
+
Return a single named information item from the lsb_release command output
|
559 |
+
data source of the current OS distribution.
|
560 |
+
|
561 |
+
Parameters:
|
562 |
+
|
563 |
+
* ``attribute`` (string): Key of the information item.
|
564 |
+
|
565 |
+
Returns:
|
566 |
+
|
567 |
+
* (string): Value of the information item, if the item exists.
|
568 |
+
The empty string, if the item does not exist.
|
569 |
+
|
570 |
+
See `lsb_release command output`_ for details about these information
|
571 |
+
items.
|
572 |
+
"""
|
573 |
+
return _distro.lsb_release_attr(attribute)
|
574 |
+
|
575 |
+
|
576 |
+
def distro_release_attr(attribute):
|
577 |
+
# type: (str) -> str
|
578 |
+
"""
|
579 |
+
Return a single named information item from the distro release file
|
580 |
+
data source of the current OS distribution.
|
581 |
+
|
582 |
+
Parameters:
|
583 |
+
|
584 |
+
* ``attribute`` (string): Key of the information item.
|
585 |
+
|
586 |
+
Returns:
|
587 |
+
|
588 |
+
* (string): Value of the information item, if the item exists.
|
589 |
+
The empty string, if the item does not exist.
|
590 |
+
|
591 |
+
See `distro release file`_ for details about these information items.
|
592 |
+
"""
|
593 |
+
return _distro.distro_release_attr(attribute)
|
594 |
+
|
595 |
+
|
596 |
+
def uname_attr(attribute):
|
597 |
+
# type: (str) -> str
|
598 |
+
"""
|
599 |
+
Return a single named information item from the distro release file
|
600 |
+
data source of the current OS distribution.
|
601 |
+
|
602 |
+
Parameters:
|
603 |
+
|
604 |
+
* ``attribute`` (string): Key of the information item.
|
605 |
+
|
606 |
+
Returns:
|
607 |
+
|
608 |
+
* (string): Value of the information item, if the item exists.
|
609 |
+
The empty string, if the item does not exist.
|
610 |
+
"""
|
611 |
+
return _distro.uname_attr(attribute)
|
612 |
+
|
613 |
+
|
614 |
+
try:
|
615 |
+
from functools import cached_property
|
616 |
+
except ImportError:
|
617 |
+
# Python < 3.8
|
618 |
+
class cached_property(object): # type: ignore
|
619 |
+
"""A version of @property which caches the value. On access, it calls the
|
620 |
+
underlying function and sets the value in `__dict__` so future accesses
|
621 |
+
will not re-call the property.
|
622 |
+
"""
|
623 |
+
|
624 |
+
def __init__(self, f):
|
625 |
+
# type: (Callable[[Any], Any]) -> None
|
626 |
+
self._fname = f.__name__
|
627 |
+
self._f = f
|
628 |
+
|
629 |
+
def __get__(self, obj, owner):
|
630 |
+
# type: (Any, Type[Any]) -> Any
|
631 |
+
assert obj is not None, "call {} on an instance".format(self._fname)
|
632 |
+
ret = obj.__dict__[self._fname] = self._f(obj)
|
633 |
+
return ret
|
634 |
+
|
635 |
+
|
636 |
+
class LinuxDistribution(object):
|
637 |
+
"""
|
638 |
+
Provides information about a OS distribution.
|
639 |
+
|
640 |
+
This package creates a private module-global instance of this class with
|
641 |
+
default initialization arguments, that is used by the
|
642 |
+
`consolidated accessor functions`_ and `single source accessor functions`_.
|
643 |
+
By using default initialization arguments, that module-global instance
|
644 |
+
returns data about the current OS distribution (i.e. the distro this
|
645 |
+
package runs on).
|
646 |
+
|
647 |
+
Normally, it is not necessary to create additional instances of this class.
|
648 |
+
However, in situations where control is needed over the exact data sources
|
649 |
+
that are used, instances of this class can be created with a specific
|
650 |
+
distro release file, or a specific os-release file, or without invoking the
|
651 |
+
lsb_release command.
|
652 |
+
"""
|
653 |
+
|
654 |
+
def __init__(
|
655 |
+
self,
|
656 |
+
include_lsb=True,
|
657 |
+
os_release_file="",
|
658 |
+
distro_release_file="",
|
659 |
+
include_uname=True,
|
660 |
+
root_dir=None,
|
661 |
+
):
|
662 |
+
# type: (bool, str, str, bool, Optional[str]) -> None
|
663 |
+
"""
|
664 |
+
The initialization method of this class gathers information from the
|
665 |
+
available data sources, and stores that in private instance attributes.
|
666 |
+
Subsequent access to the information items uses these private instance
|
667 |
+
attributes, so that the data sources are read only once.
|
668 |
+
|
669 |
+
Parameters:
|
670 |
+
|
671 |
+
* ``include_lsb`` (bool): Controls whether the
|
672 |
+
`lsb_release command output`_ is included as a data source.
|
673 |
+
|
674 |
+
If the lsb_release command is not available in the program execution
|
675 |
+
path, the data source for the lsb_release command will be empty.
|
676 |
+
|
677 |
+
* ``os_release_file`` (string): The path name of the
|
678 |
+
`os-release file`_ that is to be used as a data source.
|
679 |
+
|
680 |
+
An empty string (the default) will cause the default path name to
|
681 |
+
be used (see `os-release file`_ for details).
|
682 |
+
|
683 |
+
If the specified or defaulted os-release file does not exist, the
|
684 |
+
data source for the os-release file will be empty.
|
685 |
+
|
686 |
+
* ``distro_release_file`` (string): The path name of the
|
687 |
+
`distro release file`_ that is to be used as a data source.
|
688 |
+
|
689 |
+
An empty string (the default) will cause a default search algorithm
|
690 |
+
to be used (see `distro release file`_ for details).
|
691 |
+
|
692 |
+
If the specified distro release file does not exist, or if no default
|
693 |
+
distro release file can be found, the data source for the distro
|
694 |
+
release file will be empty.
|
695 |
+
|
696 |
+
* ``include_uname`` (bool): Controls whether uname command output is
|
697 |
+
included as a data source. If the uname command is not available in
|
698 |
+
the program execution path the data source for the uname command will
|
699 |
+
be empty.
|
700 |
+
|
701 |
+
* ``root_dir`` (string): The absolute path to the root directory to use
|
702 |
+
to find distro-related information files.
|
703 |
+
|
704 |
+
Public instance attributes:
|
705 |
+
|
706 |
+
* ``os_release_file`` (string): The path name of the
|
707 |
+
`os-release file`_ that is actually used as a data source. The
|
708 |
+
empty string if no distro release file is used as a data source.
|
709 |
+
|
710 |
+
* ``distro_release_file`` (string): The path name of the
|
711 |
+
`distro release file`_ that is actually used as a data source. The
|
712 |
+
empty string if no distro release file is used as a data source.
|
713 |
+
|
714 |
+
* ``include_lsb`` (bool): The result of the ``include_lsb`` parameter.
|
715 |
+
This controls whether the lsb information will be loaded.
|
716 |
+
|
717 |
+
* ``include_uname`` (bool): The result of the ``include_uname``
|
718 |
+
parameter. This controls whether the uname information will
|
719 |
+
be loaded.
|
720 |
+
|
721 |
+
Raises:
|
722 |
+
|
723 |
+
* :py:exc:`IOError`: Some I/O issue with an os-release file or distro
|
724 |
+
release file.
|
725 |
+
|
726 |
+
* :py:exc:`subprocess.CalledProcessError`: The lsb_release command had
|
727 |
+
some issue (other than not being available in the program execution
|
728 |
+
path).
|
729 |
+
|
730 |
+
* :py:exc:`UnicodeError`: A data source has unexpected characters or
|
731 |
+
uses an unexpected encoding.
|
732 |
+
"""
|
733 |
+
self.root_dir = root_dir
|
734 |
+
self.etc_dir = os.path.join(root_dir, "etc") if root_dir else _UNIXCONFDIR
|
735 |
+
self.usr_lib_dir = (
|
736 |
+
os.path.join(root_dir, "usr/lib") if root_dir else _UNIXUSRLIBDIR
|
737 |
+
)
|
738 |
+
|
739 |
+
if os_release_file:
|
740 |
+
self.os_release_file = os_release_file
|
741 |
+
else:
|
742 |
+
etc_dir_os_release_file = os.path.join(self.etc_dir, _OS_RELEASE_BASENAME)
|
743 |
+
usr_lib_os_release_file = os.path.join(
|
744 |
+
self.usr_lib_dir, _OS_RELEASE_BASENAME
|
745 |
+
)
|
746 |
+
|
747 |
+
# NOTE: The idea is to respect order **and** have it set
|
748 |
+
# at all times for API backwards compatibility.
|
749 |
+
if os.path.isfile(etc_dir_os_release_file) or not os.path.isfile(
|
750 |
+
usr_lib_os_release_file
|
751 |
+
):
|
752 |
+
self.os_release_file = etc_dir_os_release_file
|
753 |
+
else:
|
754 |
+
self.os_release_file = usr_lib_os_release_file
|
755 |
+
|
756 |
+
self.distro_release_file = distro_release_file or "" # updated later
|
757 |
+
self.include_lsb = include_lsb
|
758 |
+
self.include_uname = include_uname
|
759 |
+
|
760 |
+
def __repr__(self):
|
761 |
+
# type: () -> str
|
762 |
+
"""Return repr of all info"""
|
763 |
+
return (
|
764 |
+
"LinuxDistribution("
|
765 |
+
"os_release_file={self.os_release_file!r}, "
|
766 |
+
"distro_release_file={self.distro_release_file!r}, "
|
767 |
+
"include_lsb={self.include_lsb!r}, "
|
768 |
+
"include_uname={self.include_uname!r}, "
|
769 |
+
"_os_release_info={self._os_release_info!r}, "
|
770 |
+
"_lsb_release_info={self._lsb_release_info!r}, "
|
771 |
+
"_distro_release_info={self._distro_release_info!r}, "
|
772 |
+
"_uname_info={self._uname_info!r})".format(self=self)
|
773 |
+
)
|
774 |
+
|
775 |
+
def linux_distribution(self, full_distribution_name=True):
|
776 |
+
# type: (bool) -> Tuple[str, str, str]
|
777 |
+
"""
|
778 |
+
Return information about the OS distribution that is compatible
|
779 |
+
with Python's :func:`platform.linux_distribution`, supporting a subset
|
780 |
+
of its parameters.
|
781 |
+
|
782 |
+
For details, see :func:`distro.linux_distribution`.
|
783 |
+
"""
|
784 |
+
return (
|
785 |
+
self.name() if full_distribution_name else self.id(),
|
786 |
+
self.version(),
|
787 |
+
self.codename(),
|
788 |
+
)
|
789 |
+
|
790 |
+
def id(self):
|
791 |
+
# type: () -> str
|
792 |
+
"""Return the distro ID of the OS distribution, as a string.
|
793 |
+
|
794 |
+
For details, see :func:`distro.id`.
|
795 |
+
"""
|
796 |
+
|
797 |
+
def normalize(distro_id, table):
|
798 |
+
# type: (str, Dict[str, str]) -> str
|
799 |
+
distro_id = distro_id.lower().replace(" ", "_")
|
800 |
+
return table.get(distro_id, distro_id)
|
801 |
+
|
802 |
+
distro_id = self.os_release_attr("id")
|
803 |
+
if distro_id:
|
804 |
+
return normalize(distro_id, NORMALIZED_OS_ID)
|
805 |
+
|
806 |
+
distro_id = self.lsb_release_attr("distributor_id")
|
807 |
+
if distro_id:
|
808 |
+
return normalize(distro_id, NORMALIZED_LSB_ID)
|
809 |
+
|
810 |
+
distro_id = self.distro_release_attr("id")
|
811 |
+
if distro_id:
|
812 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
813 |
+
|
814 |
+
distro_id = self.uname_attr("id")
|
815 |
+
if distro_id:
|
816 |
+
return normalize(distro_id, NORMALIZED_DISTRO_ID)
|
817 |
+
|
818 |
+
return ""
|
819 |
+
|
820 |
+
def name(self, pretty=False):
|
821 |
+
# type: (bool) -> str
|
822 |
+
"""
|
823 |
+
Return the name of the OS distribution, as a string.
|
824 |
+
|
825 |
+
For details, see :func:`distro.name`.
|
826 |
+
"""
|
827 |
+
name = (
|
828 |
+
self.os_release_attr("name")
|
829 |
+
or self.lsb_release_attr("distributor_id")
|
830 |
+
or self.distro_release_attr("name")
|
831 |
+
or self.uname_attr("name")
|
832 |
+
)
|
833 |
+
if pretty:
|
834 |
+
name = self.os_release_attr("pretty_name") or self.lsb_release_attr(
|
835 |
+
"description"
|
836 |
+
)
|
837 |
+
if not name:
|
838 |
+
name = self.distro_release_attr("name") or self.uname_attr("name")
|
839 |
+
version = self.version(pretty=True)
|
840 |
+
if version:
|
841 |
+
name = name + " " + version
|
842 |
+
return name or ""
|
843 |
+
|
844 |
+
def version(self, pretty=False, best=False):
|
845 |
+
# type: (bool, bool) -> str
|
846 |
+
"""
|
847 |
+
Return the version of the OS distribution, as a string.
|
848 |
+
|
849 |
+
For details, see :func:`distro.version`.
|
850 |
+
"""
|
851 |
+
versions = [
|
852 |
+
self.os_release_attr("version_id"),
|
853 |
+
self.lsb_release_attr("release"),
|
854 |
+
self.distro_release_attr("version_id"),
|
855 |
+
self._parse_distro_release_content(self.os_release_attr("pretty_name")).get(
|
856 |
+
"version_id", ""
|
857 |
+
),
|
858 |
+
self._parse_distro_release_content(
|
859 |
+
self.lsb_release_attr("description")
|
860 |
+
).get("version_id", ""),
|
861 |
+
self.uname_attr("release"),
|
862 |
+
]
|
863 |
+
version = ""
|
864 |
+
if best:
|
865 |
+
# This algorithm uses the last version in priority order that has
|
866 |
+
# the best precision. If the versions are not in conflict, that
|
867 |
+
# does not matter; otherwise, using the last one instead of the
|
868 |
+
# first one might be considered a surprise.
|
869 |
+
for v in versions:
|
870 |
+
if v.count(".") > version.count(".") or version == "":
|
871 |
+
version = v
|
872 |
+
else:
|
873 |
+
for v in versions:
|
874 |
+
if v != "":
|
875 |
+
version = v
|
876 |
+
break
|
877 |
+
if pretty and version and self.codename():
|
878 |
+
version = "{0} ({1})".format(version, self.codename())
|
879 |
+
return version
|
880 |
+
|
881 |
+
def version_parts(self, best=False):
|
882 |
+
# type: (bool) -> Tuple[str, str, str]
|
883 |
+
"""
|
884 |
+
Return the version of the OS distribution, as a tuple of version
|
885 |
+
numbers.
|
886 |
+
|
887 |
+
For details, see :func:`distro.version_parts`.
|
888 |
+
"""
|
889 |
+
version_str = self.version(best=best)
|
890 |
+
if version_str:
|
891 |
+
version_regex = re.compile(r"(\d+)\.?(\d+)?\.?(\d+)?")
|
892 |
+
matches = version_regex.match(version_str)
|
893 |
+
if matches:
|
894 |
+
major, minor, build_number = matches.groups()
|
895 |
+
return major, minor or "", build_number or ""
|
896 |
+
return "", "", ""
|
897 |
+
|
898 |
+
def major_version(self, best=False):
|
899 |
+
# type: (bool) -> str
|
900 |
+
"""
|
901 |
+
Return the major version number of the current distribution.
|
902 |
+
|
903 |
+
For details, see :func:`distro.major_version`.
|
904 |
+
"""
|
905 |
+
return self.version_parts(best)[0]
|
906 |
+
|
907 |
+
def minor_version(self, best=False):
|
908 |
+
# type: (bool) -> str
|
909 |
+
"""
|
910 |
+
Return the minor version number of the current distribution.
|
911 |
+
|
912 |
+
For details, see :func:`distro.minor_version`.
|
913 |
+
"""
|
914 |
+
return self.version_parts(best)[1]
|
915 |
+
|
916 |
+
def build_number(self, best=False):
|
917 |
+
# type: (bool) -> str
|
918 |
+
"""
|
919 |
+
Return the build number of the current distribution.
|
920 |
+
|
921 |
+
For details, see :func:`distro.build_number`.
|
922 |
+
"""
|
923 |
+
return self.version_parts(best)[2]
|
924 |
+
|
925 |
+
def like(self):
|
926 |
+
# type: () -> str
|
927 |
+
"""
|
928 |
+
Return the IDs of distributions that are like the OS distribution.
|
929 |
+
|
930 |
+
For details, see :func:`distro.like`.
|
931 |
+
"""
|
932 |
+
return self.os_release_attr("id_like") or ""
|
933 |
+
|
934 |
+
def codename(self):
|
935 |
+
# type: () -> str
|
936 |
+
"""
|
937 |
+
Return the codename of the OS distribution.
|
938 |
+
|
939 |
+
For details, see :func:`distro.codename`.
|
940 |
+
"""
|
941 |
+
try:
|
942 |
+
# Handle os_release specially since distros might purposefully set
|
943 |
+
# this to empty string to have no codename
|
944 |
+
return self._os_release_info["codename"]
|
945 |
+
except KeyError:
|
946 |
+
return (
|
947 |
+
self.lsb_release_attr("codename")
|
948 |
+
or self.distro_release_attr("codename")
|
949 |
+
or ""
|
950 |
+
)
|
951 |
+
|
952 |
+
def info(self, pretty=False, best=False):
|
953 |
+
# type: (bool, bool) -> InfoDict
|
954 |
+
"""
|
955 |
+
Return certain machine-readable information about the OS
|
956 |
+
distribution.
|
957 |
+
|
958 |
+
For details, see :func:`distro.info`.
|
959 |
+
"""
|
960 |
+
return dict(
|
961 |
+
id=self.id(),
|
962 |
+
version=self.version(pretty, best),
|
963 |
+
version_parts=dict(
|
964 |
+
major=self.major_version(best),
|
965 |
+
minor=self.minor_version(best),
|
966 |
+
build_number=self.build_number(best),
|
967 |
+
),
|
968 |
+
like=self.like(),
|
969 |
+
codename=self.codename(),
|
970 |
+
)
|
971 |
+
|
972 |
+
def os_release_info(self):
|
973 |
+
# type: () -> Dict[str, str]
|
974 |
+
"""
|
975 |
+
Return a dictionary containing key-value pairs for the information
|
976 |
+
items from the os-release file data source of the OS distribution.
|
977 |
+
|
978 |
+
For details, see :func:`distro.os_release_info`.
|
979 |
+
"""
|
980 |
+
return self._os_release_info
|
981 |
+
|
982 |
+
def lsb_release_info(self):
|
983 |
+
# type: () -> Dict[str, str]
|
984 |
+
"""
|
985 |
+
Return a dictionary containing key-value pairs for the information
|
986 |
+
items from the lsb_release command data source of the OS
|
987 |
+
distribution.
|
988 |
+
|
989 |
+
For details, see :func:`distro.lsb_release_info`.
|
990 |
+
"""
|
991 |
+
return self._lsb_release_info
|
992 |
+
|
993 |
+
def distro_release_info(self):
|
994 |
+
# type: () -> Dict[str, str]
|
995 |
+
"""
|
996 |
+
Return a dictionary containing key-value pairs for the information
|
997 |
+
items from the distro release file data source of the OS
|
998 |
+
distribution.
|
999 |
+
|
1000 |
+
For details, see :func:`distro.distro_release_info`.
|
1001 |
+
"""
|
1002 |
+
return self._distro_release_info
|
1003 |
+
|
1004 |
+
def uname_info(self):
|
1005 |
+
# type: () -> Dict[str, str]
|
1006 |
+
"""
|
1007 |
+
Return a dictionary containing key-value pairs for the information
|
1008 |
+
items from the uname command data source of the OS distribution.
|
1009 |
+
|
1010 |
+
For details, see :func:`distro.uname_info`.
|
1011 |
+
"""
|
1012 |
+
return self._uname_info
|
1013 |
+
|
1014 |
+
def os_release_attr(self, attribute):
|
1015 |
+
# type: (str) -> str
|
1016 |
+
"""
|
1017 |
+
Return a single named information item from the os-release file data
|
1018 |
+
source of the OS distribution.
|
1019 |
+
|
1020 |
+
For details, see :func:`distro.os_release_attr`.
|
1021 |
+
"""
|
1022 |
+
return self._os_release_info.get(attribute, "")
|
1023 |
+
|
1024 |
+
def lsb_release_attr(self, attribute):
|
1025 |
+
# type: (str) -> str
|
1026 |
+
"""
|
1027 |
+
Return a single named information item from the lsb_release command
|
1028 |
+
output data source of the OS distribution.
|
1029 |
+
|
1030 |
+
For details, see :func:`distro.lsb_release_attr`.
|
1031 |
+
"""
|
1032 |
+
return self._lsb_release_info.get(attribute, "")
|
1033 |
+
|
1034 |
+
def distro_release_attr(self, attribute):
|
1035 |
+
# type: (str) -> str
|
1036 |
+
"""
|
1037 |
+
Return a single named information item from the distro release file
|
1038 |
+
data source of the OS distribution.
|
1039 |
+
|
1040 |
+
For details, see :func:`distro.distro_release_attr`.
|
1041 |
+
"""
|
1042 |
+
return self._distro_release_info.get(attribute, "")
|
1043 |
+
|
1044 |
+
def uname_attr(self, attribute):
|
1045 |
+
# type: (str) -> str
|
1046 |
+
"""
|
1047 |
+
Return a single named information item from the uname command
|
1048 |
+
output data source of the OS distribution.
|
1049 |
+
|
1050 |
+
For details, see :func:`distro.uname_attr`.
|
1051 |
+
"""
|
1052 |
+
return self._uname_info.get(attribute, "")
|
1053 |
+
|
1054 |
+
@cached_property
|
1055 |
+
def _os_release_info(self):
|
1056 |
+
# type: () -> Dict[str, str]
|
1057 |
+
"""
|
1058 |
+
Get the information items from the specified os-release file.
|
1059 |
+
|
1060 |
+
Returns:
|
1061 |
+
A dictionary containing all information items.
|
1062 |
+
"""
|
1063 |
+
if os.path.isfile(self.os_release_file):
|
1064 |
+
with open(self.os_release_file) as release_file:
|
1065 |
+
return self._parse_os_release_content(release_file)
|
1066 |
+
return {}
|
1067 |
+
|
1068 |
+
@staticmethod
|
1069 |
+
def _parse_os_release_content(lines):
|
1070 |
+
# type: (TextIO) -> Dict[str, str]
|
1071 |
+
"""
|
1072 |
+
Parse the lines of an os-release file.
|
1073 |
+
|
1074 |
+
Parameters:
|
1075 |
+
|
1076 |
+
* lines: Iterable through the lines in the os-release file.
|
1077 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
1078 |
+
string.
|
1079 |
+
|
1080 |
+
Returns:
|
1081 |
+
A dictionary containing all information items.
|
1082 |
+
"""
|
1083 |
+
props = {}
|
1084 |
+
lexer = shlex.shlex(lines, posix=True)
|
1085 |
+
lexer.whitespace_split = True
|
1086 |
+
|
1087 |
+
# The shlex module defines its `wordchars` variable using literals,
|
1088 |
+
# making it dependent on the encoding of the Python source file.
|
1089 |
+
# In Python 2.6 and 2.7, the shlex source file is encoded in
|
1090 |
+
# 'iso-8859-1', and the `wordchars` variable is defined as a byte
|
1091 |
+
# string. This causes a UnicodeDecodeError to be raised when the
|
1092 |
+
# parsed content is a unicode object. The following fix resolves that
|
1093 |
+
# (... but it should be fixed in shlex...):
|
1094 |
+
if sys.version_info[0] == 2 and isinstance(lexer.wordchars, bytes):
|
1095 |
+
lexer.wordchars = lexer.wordchars.decode("iso-8859-1")
|
1096 |
+
|
1097 |
+
tokens = list(lexer)
|
1098 |
+
for token in tokens:
|
1099 |
+
# At this point, all shell-like parsing has been done (i.e.
|
1100 |
+
# comments processed, quotes and backslash escape sequences
|
1101 |
+
# processed, multi-line values assembled, trailing newlines
|
1102 |
+
# stripped, etc.), so the tokens are now either:
|
1103 |
+
# * variable assignments: var=value
|
1104 |
+
# * commands or their arguments (not allowed in os-release)
|
1105 |
+
if "=" in token:
|
1106 |
+
k, v = token.split("=", 1)
|
1107 |
+
props[k.lower()] = v
|
1108 |
+
else:
|
1109 |
+
# Ignore any tokens that are not variable assignments
|
1110 |
+
pass
|
1111 |
+
|
1112 |
+
if "version_codename" in props:
|
1113 |
+
# os-release added a version_codename field. Use that in
|
1114 |
+
# preference to anything else Note that some distros purposefully
|
1115 |
+
# do not have code names. They should be setting
|
1116 |
+
# version_codename=""
|
1117 |
+
props["codename"] = props["version_codename"]
|
1118 |
+
elif "ubuntu_codename" in props:
|
1119 |
+
# Same as above but a non-standard field name used on older Ubuntus
|
1120 |
+
props["codename"] = props["ubuntu_codename"]
|
1121 |
+
elif "version" in props:
|
1122 |
+
# If there is no version_codename, parse it from the version
|
1123 |
+
match = re.search(r"(\(\D+\))|,(\s+)?\D+", props["version"])
|
1124 |
+
if match:
|
1125 |
+
codename = match.group()
|
1126 |
+
codename = codename.strip("()")
|
1127 |
+
codename = codename.strip(",")
|
1128 |
+
codename = codename.strip()
|
1129 |
+
# codename appears within paranthese.
|
1130 |
+
props["codename"] = codename
|
1131 |
+
|
1132 |
+
return props
|
1133 |
+
|
1134 |
+
@cached_property
|
1135 |
+
def _lsb_release_info(self):
|
1136 |
+
# type: () -> Dict[str, str]
|
1137 |
+
"""
|
1138 |
+
Get the information items from the lsb_release command output.
|
1139 |
+
|
1140 |
+
Returns:
|
1141 |
+
A dictionary containing all information items.
|
1142 |
+
"""
|
1143 |
+
if not self.include_lsb:
|
1144 |
+
return {}
|
1145 |
+
with open(os.devnull, "wb") as devnull:
|
1146 |
+
try:
|
1147 |
+
cmd = ("lsb_release", "-a")
|
1148 |
+
stdout = subprocess.check_output(cmd, stderr=devnull)
|
1149 |
+
# Command not found or lsb_release returned error
|
1150 |
+
except (OSError, subprocess.CalledProcessError):
|
1151 |
+
return {}
|
1152 |
+
content = self._to_str(stdout).splitlines()
|
1153 |
+
return self._parse_lsb_release_content(content)
|
1154 |
+
|
1155 |
+
@staticmethod
|
1156 |
+
def _parse_lsb_release_content(lines):
|
1157 |
+
# type: (Iterable[str]) -> Dict[str, str]
|
1158 |
+
"""
|
1159 |
+
Parse the output of the lsb_release command.
|
1160 |
+
|
1161 |
+
Parameters:
|
1162 |
+
|
1163 |
+
* lines: Iterable through the lines of the lsb_release output.
|
1164 |
+
Each line must be a unicode string or a UTF-8 encoded byte
|
1165 |
+
string.
|
1166 |
+
|
1167 |
+
Returns:
|
1168 |
+
A dictionary containing all information items.
|
1169 |
+
"""
|
1170 |
+
props = {}
|
1171 |
+
for line in lines:
|
1172 |
+
kv = line.strip("\n").split(":", 1)
|
1173 |
+
if len(kv) != 2:
|
1174 |
+
# Ignore lines without colon.
|
1175 |
+
continue
|
1176 |
+
k, v = kv
|
1177 |
+
props.update({k.replace(" ", "_").lower(): v.strip()})
|
1178 |
+
return props
|
1179 |
+
|
1180 |
+
@cached_property
|
1181 |
+
def _uname_info(self):
|
1182 |
+
# type: () -> Dict[str, str]
|
1183 |
+
with open(os.devnull, "wb") as devnull:
|
1184 |
+
try:
|
1185 |
+
cmd = ("uname", "-rs")
|
1186 |
+
stdout = subprocess.check_output(cmd, stderr=devnull)
|
1187 |
+
except OSError:
|
1188 |
+
return {}
|
1189 |
+
content = self._to_str(stdout).splitlines()
|
1190 |
+
return self._parse_uname_content(content)
|
1191 |
+
|
1192 |
+
@staticmethod
|
1193 |
+
def _parse_uname_content(lines):
|
1194 |
+
# type: (Sequence[str]) -> Dict[str, str]
|
1195 |
+
props = {}
|
1196 |
+
match = re.search(r"^([^\s]+)\s+([\d\.]+)", lines[0].strip())
|
1197 |
+
if match:
|
1198 |
+
name, version = match.groups()
|
1199 |
+
|
1200 |
+
# This is to prevent the Linux kernel version from
|
1201 |
+
# appearing as the 'best' version on otherwise
|
1202 |
+
# identifiable distributions.
|
1203 |
+
if name == "Linux":
|
1204 |
+
return {}
|
1205 |
+
props["id"] = name.lower()
|
1206 |
+
props["name"] = name
|
1207 |
+
props["release"] = version
|
1208 |
+
return props
|
1209 |
+
|
1210 |
+
@staticmethod
|
1211 |
+
def _to_str(text):
|
1212 |
+
# type: (Union[bytes, str]) -> str
|
1213 |
+
encoding = sys.getfilesystemencoding()
|
1214 |
+
encoding = "utf-8" if encoding == "ascii" else encoding
|
1215 |
+
|
1216 |
+
if sys.version_info[0] >= 3:
|
1217 |
+
if isinstance(text, bytes):
|
1218 |
+
return text.decode(encoding)
|
1219 |
+
else:
|
1220 |
+
if isinstance(text, unicode): # noqa
|
1221 |
+
return text.encode(encoding)
|
1222 |
+
|
1223 |
+
return text
|
1224 |
+
|
1225 |
+
@cached_property
|
1226 |
+
def _distro_release_info(self):
|
1227 |
+
# type: () -> Dict[str, str]
|
1228 |
+
"""
|
1229 |
+
Get the information items from the specified distro release file.
|
1230 |
+
|
1231 |
+
Returns:
|
1232 |
+
A dictionary containing all information items.
|
1233 |
+
"""
|
1234 |
+
if self.distro_release_file:
|
1235 |
+
# If it was specified, we use it and parse what we can, even if
|
1236 |
+
# its file name or content does not match the expected pattern.
|
1237 |
+
distro_info = self._parse_distro_release_file(self.distro_release_file)
|
1238 |
+
basename = os.path.basename(self.distro_release_file)
|
1239 |
+
# The file name pattern for user-specified distro release files
|
1240 |
+
# is somewhat more tolerant (compared to when searching for the
|
1241 |
+
# file), because we want to use what was specified as best as
|
1242 |
+
# possible.
|
1243 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
1244 |
+
if "name" in distro_info and "cloudlinux" in distro_info["name"].lower():
|
1245 |
+
distro_info["id"] = "cloudlinux"
|
1246 |
+
elif match:
|
1247 |
+
distro_info["id"] = match.group(1)
|
1248 |
+
return distro_info
|
1249 |
+
else:
|
1250 |
+
try:
|
1251 |
+
basenames = os.listdir(self.etc_dir)
|
1252 |
+
# We sort for repeatability in cases where there are multiple
|
1253 |
+
# distro specific files; e.g. CentOS, Oracle, Enterprise all
|
1254 |
+
# containing `redhat-release` on top of their own.
|
1255 |
+
basenames.sort()
|
1256 |
+
except OSError:
|
1257 |
+
# This may occur when /etc is not readable but we can't be
|
1258 |
+
# sure about the *-release files. Check common entries of
|
1259 |
+
# /etc for information. If they turn out to not be there the
|
1260 |
+
# error is handled in `_parse_distro_release_file()`.
|
1261 |
+
basenames = [
|
1262 |
+
"SuSE-release",
|
1263 |
+
"arch-release",
|
1264 |
+
"base-release",
|
1265 |
+
"centos-release",
|
1266 |
+
"fedora-release",
|
1267 |
+
"gentoo-release",
|
1268 |
+
"mageia-release",
|
1269 |
+
"mandrake-release",
|
1270 |
+
"mandriva-release",
|
1271 |
+
"mandrivalinux-release",
|
1272 |
+
"manjaro-release",
|
1273 |
+
"oracle-release",
|
1274 |
+
"redhat-release",
|
1275 |
+
"sl-release",
|
1276 |
+
"slackware-version",
|
1277 |
+
]
|
1278 |
+
for basename in basenames:
|
1279 |
+
if basename in _DISTRO_RELEASE_IGNORE_BASENAMES:
|
1280 |
+
continue
|
1281 |
+
match = _DISTRO_RELEASE_BASENAME_PATTERN.match(basename)
|
1282 |
+
if match:
|
1283 |
+
filepath = os.path.join(self.etc_dir, basename)
|
1284 |
+
distro_info = self._parse_distro_release_file(filepath)
|
1285 |
+
if "name" in distro_info:
|
1286 |
+
# The name is always present if the pattern matches
|
1287 |
+
self.distro_release_file = filepath
|
1288 |
+
distro_info["id"] = match.group(1)
|
1289 |
+
if "cloudlinux" in distro_info["name"].lower():
|
1290 |
+
distro_info["id"] = "cloudlinux"
|
1291 |
+
return distro_info
|
1292 |
+
return {}
|
1293 |
+
|
1294 |
+
def _parse_distro_release_file(self, filepath):
|
1295 |
+
# type: (str) -> Dict[str, str]
|
1296 |
+
"""
|
1297 |
+
Parse a distro release file.
|
1298 |
+
|
1299 |
+
Parameters:
|
1300 |
+
|
1301 |
+
* filepath: Path name of the distro release file.
|
1302 |
+
|
1303 |
+
Returns:
|
1304 |
+
A dictionary containing all information items.
|
1305 |
+
"""
|
1306 |
+
try:
|
1307 |
+
with open(filepath) as fp:
|
1308 |
+
# Only parse the first line. For instance, on SLES there
|
1309 |
+
# are multiple lines. We don't want them...
|
1310 |
+
return self._parse_distro_release_content(fp.readline())
|
1311 |
+
except (OSError, IOError):
|
1312 |
+
# Ignore not being able to read a specific, seemingly version
|
1313 |
+
# related file.
|
1314 |
+
# See https://github.com/python-distro/distro/issues/162
|
1315 |
+
return {}
|
1316 |
+
|
1317 |
+
@staticmethod
|
1318 |
+
def _parse_distro_release_content(line):
|
1319 |
+
# type: (str) -> Dict[str, str]
|
1320 |
+
"""
|
1321 |
+
Parse a line from a distro release file.
|
1322 |
+
|
1323 |
+
Parameters:
|
1324 |
+
* line: Line from the distro release file. Must be a unicode string
|
1325 |
+
or a UTF-8 encoded byte string.
|
1326 |
+
|
1327 |
+
Returns:
|
1328 |
+
A dictionary containing all information items.
|
1329 |
+
"""
|
1330 |
+
matches = _DISTRO_RELEASE_CONTENT_REVERSED_PATTERN.match(line.strip()[::-1])
|
1331 |
+
distro_info = {}
|
1332 |
+
if matches:
|
1333 |
+
# regexp ensures non-None
|
1334 |
+
distro_info["name"] = matches.group(3)[::-1]
|
1335 |
+
if matches.group(2):
|
1336 |
+
distro_info["version_id"] = matches.group(2)[::-1]
|
1337 |
+
if matches.group(1):
|
1338 |
+
distro_info["codename"] = matches.group(1)[::-1]
|
1339 |
+
elif line:
|
1340 |
+
distro_info["name"] = line.strip()
|
1341 |
+
return distro_info
|
1342 |
+
|
1343 |
+
|
1344 |
+
_distro = LinuxDistribution()
|
1345 |
+
|
1346 |
+
|
1347 |
+
def main():
|
1348 |
+
# type: () -> None
|
1349 |
+
logger = logging.getLogger(__name__)
|
1350 |
+
logger.setLevel(logging.DEBUG)
|
1351 |
+
logger.addHandler(logging.StreamHandler(sys.stdout))
|
1352 |
+
|
1353 |
+
parser = argparse.ArgumentParser(description="OS distro info tool")
|
1354 |
+
parser.add_argument(
|
1355 |
+
"--json", "-j", help="Output in machine readable format", action="store_true"
|
1356 |
+
)
|
1357 |
+
|
1358 |
+
parser.add_argument(
|
1359 |
+
"--root-dir",
|
1360 |
+
"-r",
|
1361 |
+
type=str,
|
1362 |
+
dest="root_dir",
|
1363 |
+
help="Path to the root filesystem directory (defaults to /)",
|
1364 |
+
)
|
1365 |
+
|
1366 |
+
args = parser.parse_args()
|
1367 |
+
|
1368 |
+
if args.root_dir:
|
1369 |
+
dist = LinuxDistribution(
|
1370 |
+
include_lsb=False, include_uname=False, root_dir=args.root_dir
|
1371 |
+
)
|
1372 |
+
else:
|
1373 |
+
dist = _distro
|
1374 |
+
|
1375 |
+
if args.json:
|
1376 |
+
logger.info(json.dumps(dist.info(), indent=4, sort_keys=True))
|
1377 |
+
else:
|
1378 |
+
logger.info("Name: %s", dist.name(pretty=True))
|
1379 |
+
distribution_version = dist.version(pretty=True)
|
1380 |
+
logger.info("Version: %s", distribution_version)
|
1381 |
+
distribution_codename = dist.codename()
|
1382 |
+
logger.info("Codename: %s", distribution_codename)
|
1383 |
+
|
1384 |
+
|
1385 |
+
if __name__ == "__main__":
|
1386 |
+
main()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__init__.py
ADDED
@@ -0,0 +1,331 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Utilities for determining application-specific dirs. See <https://github.com/platformdirs/platformdirs> for details and
|
3 |
+
usage.
|
4 |
+
"""
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import importlib
|
8 |
+
import os
|
9 |
+
import sys
|
10 |
+
from pathlib import Path
|
11 |
+
from typing import TYPE_CHECKING
|
12 |
+
|
13 |
+
if TYPE_CHECKING:
|
14 |
+
from pip._vendor.typing_extensions import Literal # pragma: no cover
|
15 |
+
|
16 |
+
from .api import PlatformDirsABC
|
17 |
+
from .version import __version__, __version_info__
|
18 |
+
|
19 |
+
|
20 |
+
def _set_platform_dir_class() -> type[PlatformDirsABC]:
|
21 |
+
if os.getenv("ANDROID_DATA") == "/data" and os.getenv("ANDROID_ROOT") == "/system":
|
22 |
+
module, name = "pip._vendor.platformdirs.android", "Android"
|
23 |
+
elif sys.platform == "win32":
|
24 |
+
module, name = "pip._vendor.platformdirs.windows", "Windows"
|
25 |
+
elif sys.platform == "darwin":
|
26 |
+
module, name = "pip._vendor.platformdirs.macos", "MacOS"
|
27 |
+
else:
|
28 |
+
module, name = "pip._vendor.platformdirs.unix", "Unix"
|
29 |
+
result: type[PlatformDirsABC] = getattr(importlib.import_module(module), name)
|
30 |
+
return result
|
31 |
+
|
32 |
+
|
33 |
+
PlatformDirs = _set_platform_dir_class() #: Currently active platform
|
34 |
+
AppDirs = PlatformDirs #: Backwards compatibility with appdirs
|
35 |
+
|
36 |
+
|
37 |
+
def user_data_dir(
|
38 |
+
appname: str | None = None,
|
39 |
+
appauthor: str | None | Literal[False] = None,
|
40 |
+
version: str | None = None,
|
41 |
+
roaming: bool = False,
|
42 |
+
) -> str:
|
43 |
+
"""
|
44 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
45 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
46 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
47 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
48 |
+
:returns: data directory tied to the user
|
49 |
+
"""
|
50 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_dir
|
51 |
+
|
52 |
+
|
53 |
+
def site_data_dir(
|
54 |
+
appname: str | None = None,
|
55 |
+
appauthor: str | None | Literal[False] = None,
|
56 |
+
version: str | None = None,
|
57 |
+
multipath: bool = False,
|
58 |
+
) -> str:
|
59 |
+
"""
|
60 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
61 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
62 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
63 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
64 |
+
:returns: data directory shared by users
|
65 |
+
"""
|
66 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_dir
|
67 |
+
|
68 |
+
|
69 |
+
def user_config_dir(
|
70 |
+
appname: str | None = None,
|
71 |
+
appauthor: str | None | Literal[False] = None,
|
72 |
+
version: str | None = None,
|
73 |
+
roaming: bool = False,
|
74 |
+
) -> str:
|
75 |
+
"""
|
76 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
77 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
78 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
79 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
80 |
+
:returns: config directory tied to the user
|
81 |
+
"""
|
82 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_dir
|
83 |
+
|
84 |
+
|
85 |
+
def site_config_dir(
|
86 |
+
appname: str | None = None,
|
87 |
+
appauthor: str | None | Literal[False] = None,
|
88 |
+
version: str | None = None,
|
89 |
+
multipath: bool = False,
|
90 |
+
) -> str:
|
91 |
+
"""
|
92 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
93 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
94 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
95 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
96 |
+
:returns: config directory shared by the users
|
97 |
+
"""
|
98 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_dir
|
99 |
+
|
100 |
+
|
101 |
+
def user_cache_dir(
|
102 |
+
appname: str | None = None,
|
103 |
+
appauthor: str | None | Literal[False] = None,
|
104 |
+
version: str | None = None,
|
105 |
+
opinion: bool = True,
|
106 |
+
) -> str:
|
107 |
+
"""
|
108 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
109 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
110 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
111 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
112 |
+
:returns: cache directory tied to the user
|
113 |
+
"""
|
114 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_dir
|
115 |
+
|
116 |
+
|
117 |
+
def user_state_dir(
|
118 |
+
appname: str | None = None,
|
119 |
+
appauthor: str | None | Literal[False] = None,
|
120 |
+
version: str | None = None,
|
121 |
+
roaming: bool = False,
|
122 |
+
) -> str:
|
123 |
+
"""
|
124 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
125 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
126 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
127 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
128 |
+
:returns: state directory tied to the user
|
129 |
+
"""
|
130 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_dir
|
131 |
+
|
132 |
+
|
133 |
+
def user_log_dir(
|
134 |
+
appname: str | None = None,
|
135 |
+
appauthor: str | None | Literal[False] = None,
|
136 |
+
version: str | None = None,
|
137 |
+
opinion: bool = True,
|
138 |
+
) -> str:
|
139 |
+
"""
|
140 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
141 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
142 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
143 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
144 |
+
:returns: log directory tied to the user
|
145 |
+
"""
|
146 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_dir
|
147 |
+
|
148 |
+
|
149 |
+
def user_documents_dir() -> str:
|
150 |
+
"""
|
151 |
+
:returns: documents directory tied to the user
|
152 |
+
"""
|
153 |
+
return PlatformDirs().user_documents_dir
|
154 |
+
|
155 |
+
|
156 |
+
def user_runtime_dir(
|
157 |
+
appname: str | None = None,
|
158 |
+
appauthor: str | None | Literal[False] = None,
|
159 |
+
version: str | None = None,
|
160 |
+
opinion: bool = True,
|
161 |
+
) -> str:
|
162 |
+
"""
|
163 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
164 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
165 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
166 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
167 |
+
:returns: runtime directory tied to the user
|
168 |
+
"""
|
169 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_dir
|
170 |
+
|
171 |
+
|
172 |
+
def user_data_path(
|
173 |
+
appname: str | None = None,
|
174 |
+
appauthor: str | None | Literal[False] = None,
|
175 |
+
version: str | None = None,
|
176 |
+
roaming: bool = False,
|
177 |
+
) -> Path:
|
178 |
+
"""
|
179 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
180 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
181 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
182 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
183 |
+
:returns: data path tied to the user
|
184 |
+
"""
|
185 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_data_path
|
186 |
+
|
187 |
+
|
188 |
+
def site_data_path(
|
189 |
+
appname: str | None = None,
|
190 |
+
appauthor: str | None | Literal[False] = None,
|
191 |
+
version: str | None = None,
|
192 |
+
multipath: bool = False,
|
193 |
+
) -> Path:
|
194 |
+
"""
|
195 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
196 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
197 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
198 |
+
:param multipath: See `multipath <platformdirs.api.PlatformDirsABC.multipath>`.
|
199 |
+
:returns: data path shared by users
|
200 |
+
"""
|
201 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_data_path
|
202 |
+
|
203 |
+
|
204 |
+
def user_config_path(
|
205 |
+
appname: str | None = None,
|
206 |
+
appauthor: str | None | Literal[False] = None,
|
207 |
+
version: str | None = None,
|
208 |
+
roaming: bool = False,
|
209 |
+
) -> Path:
|
210 |
+
"""
|
211 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
212 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
213 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
214 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
215 |
+
:returns: config path tied to the user
|
216 |
+
"""
|
217 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_config_path
|
218 |
+
|
219 |
+
|
220 |
+
def site_config_path(
|
221 |
+
appname: str | None = None,
|
222 |
+
appauthor: str | None | Literal[False] = None,
|
223 |
+
version: str | None = None,
|
224 |
+
multipath: bool = False,
|
225 |
+
) -> Path:
|
226 |
+
"""
|
227 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
228 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
229 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
230 |
+
:param multipath: See `roaming <platformdirs.api.PlatformDirsABC.multipath>`.
|
231 |
+
:returns: config path shared by the users
|
232 |
+
"""
|
233 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, multipath=multipath).site_config_path
|
234 |
+
|
235 |
+
|
236 |
+
def user_cache_path(
|
237 |
+
appname: str | None = None,
|
238 |
+
appauthor: str | None | Literal[False] = None,
|
239 |
+
version: str | None = None,
|
240 |
+
opinion: bool = True,
|
241 |
+
) -> Path:
|
242 |
+
"""
|
243 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
244 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
245 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
246 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
247 |
+
:returns: cache path tied to the user
|
248 |
+
"""
|
249 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_cache_path
|
250 |
+
|
251 |
+
|
252 |
+
def user_state_path(
|
253 |
+
appname: str | None = None,
|
254 |
+
appauthor: str | None | Literal[False] = None,
|
255 |
+
version: str | None = None,
|
256 |
+
roaming: bool = False,
|
257 |
+
) -> Path:
|
258 |
+
"""
|
259 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
260 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
261 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
262 |
+
:param roaming: See `roaming <platformdirs.api.PlatformDirsABC.version>`.
|
263 |
+
:returns: state path tied to the user
|
264 |
+
"""
|
265 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, roaming=roaming).user_state_path
|
266 |
+
|
267 |
+
|
268 |
+
def user_log_path(
|
269 |
+
appname: str | None = None,
|
270 |
+
appauthor: str | None | Literal[False] = None,
|
271 |
+
version: str | None = None,
|
272 |
+
opinion: bool = True,
|
273 |
+
) -> Path:
|
274 |
+
"""
|
275 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
276 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
277 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
278 |
+
:param opinion: See `roaming <platformdirs.api.PlatformDirsABC.opinion>`.
|
279 |
+
:returns: log path tied to the user
|
280 |
+
"""
|
281 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_log_path
|
282 |
+
|
283 |
+
|
284 |
+
def user_documents_path() -> Path:
|
285 |
+
"""
|
286 |
+
:returns: documents path tied to the user
|
287 |
+
"""
|
288 |
+
return PlatformDirs().user_documents_path
|
289 |
+
|
290 |
+
|
291 |
+
def user_runtime_path(
|
292 |
+
appname: str | None = None,
|
293 |
+
appauthor: str | None | Literal[False] = None,
|
294 |
+
version: str | None = None,
|
295 |
+
opinion: bool = True,
|
296 |
+
) -> Path:
|
297 |
+
"""
|
298 |
+
:param appname: See `appname <platformdirs.api.PlatformDirsABC.appname>`.
|
299 |
+
:param appauthor: See `appauthor <platformdirs.api.PlatformDirsABC.appauthor>`.
|
300 |
+
:param version: See `version <platformdirs.api.PlatformDirsABC.version>`.
|
301 |
+
:param opinion: See `opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
302 |
+
:returns: runtime path tied to the user
|
303 |
+
"""
|
304 |
+
return PlatformDirs(appname=appname, appauthor=appauthor, version=version, opinion=opinion).user_runtime_path
|
305 |
+
|
306 |
+
|
307 |
+
__all__ = [
|
308 |
+
"__version__",
|
309 |
+
"__version_info__",
|
310 |
+
"PlatformDirs",
|
311 |
+
"AppDirs",
|
312 |
+
"PlatformDirsABC",
|
313 |
+
"user_data_dir",
|
314 |
+
"user_config_dir",
|
315 |
+
"user_cache_dir",
|
316 |
+
"user_state_dir",
|
317 |
+
"user_log_dir",
|
318 |
+
"user_documents_dir",
|
319 |
+
"user_runtime_dir",
|
320 |
+
"site_data_dir",
|
321 |
+
"site_config_dir",
|
322 |
+
"user_data_path",
|
323 |
+
"user_config_path",
|
324 |
+
"user_cache_path",
|
325 |
+
"user_state_path",
|
326 |
+
"user_log_path",
|
327 |
+
"user_documents_path",
|
328 |
+
"user_runtime_path",
|
329 |
+
"site_data_path",
|
330 |
+
"site_config_path",
|
331 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__main__.py
ADDED
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
from pip._vendor.platformdirs import PlatformDirs, __version__
|
4 |
+
|
5 |
+
PROPS = (
|
6 |
+
"user_data_dir",
|
7 |
+
"user_config_dir",
|
8 |
+
"user_cache_dir",
|
9 |
+
"user_state_dir",
|
10 |
+
"user_log_dir",
|
11 |
+
"user_documents_dir",
|
12 |
+
"user_runtime_dir",
|
13 |
+
"site_data_dir",
|
14 |
+
"site_config_dir",
|
15 |
+
)
|
16 |
+
|
17 |
+
|
18 |
+
def main() -> None:
|
19 |
+
app_name = "MyApp"
|
20 |
+
app_author = "MyCompany"
|
21 |
+
|
22 |
+
print(f"-- platformdirs {__version__} --")
|
23 |
+
|
24 |
+
print("-- app dirs (with optional 'version')")
|
25 |
+
dirs = PlatformDirs(app_name, app_author, version="1.0")
|
26 |
+
for prop in PROPS:
|
27 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
28 |
+
|
29 |
+
print("\n-- app dirs (without optional 'version')")
|
30 |
+
dirs = PlatformDirs(app_name, app_author)
|
31 |
+
for prop in PROPS:
|
32 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
33 |
+
|
34 |
+
print("\n-- app dirs (without optional 'appauthor')")
|
35 |
+
dirs = PlatformDirs(app_name)
|
36 |
+
for prop in PROPS:
|
37 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
38 |
+
|
39 |
+
print("\n-- app dirs (with disabled 'appauthor')")
|
40 |
+
dirs = PlatformDirs(app_name, appauthor=False)
|
41 |
+
for prop in PROPS:
|
42 |
+
print(f"{prop}: {getattr(dirs, prop)}")
|
43 |
+
|
44 |
+
|
45 |
+
if __name__ == "__main__":
|
46 |
+
main()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (10.5 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/__main__.cpython-310.pyc
ADDED
Binary file (1.24 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/android.cpython-310.pyc
ADDED
Binary file (4.27 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/api.cpython-310.pyc
ADDED
Binary file (5.21 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/macos.cpython-310.pyc
ADDED
Binary file (3.19 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/unix.cpython-310.pyc
ADDED
Binary file (6.89 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/version.cpython-310.pyc
ADDED
Binary file (298 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/__pycache__/windows.cpython-310.pyc
ADDED
Binary file (6.44 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/android.py
ADDED
@@ -0,0 +1,119 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import re
|
5 |
+
import sys
|
6 |
+
from functools import lru_cache
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
|
11 |
+
class Android(PlatformDirsABC):
|
12 |
+
"""
|
13 |
+
Follows the guidance `from here <https://android.stackexchange.com/a/216132>`_. Makes use of the
|
14 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>` and
|
15 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
16 |
+
"""
|
17 |
+
|
18 |
+
@property
|
19 |
+
def user_data_dir(self) -> str:
|
20 |
+
""":return: data directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/files/<AppName>``"""
|
21 |
+
return self._append_app_name_and_version(_android_folder(), "files")
|
22 |
+
|
23 |
+
@property
|
24 |
+
def site_data_dir(self) -> str:
|
25 |
+
""":return: data directory shared by users, same as `user_data_dir`"""
|
26 |
+
return self.user_data_dir
|
27 |
+
|
28 |
+
@property
|
29 |
+
def user_config_dir(self) -> str:
|
30 |
+
"""
|
31 |
+
:return: config directory tied to the user, e.g. ``/data/user/<userid>/<packagename>/shared_prefs/<AppName>``
|
32 |
+
"""
|
33 |
+
return self._append_app_name_and_version(_android_folder(), "shared_prefs")
|
34 |
+
|
35 |
+
@property
|
36 |
+
def site_config_dir(self) -> str:
|
37 |
+
""":return: config directory shared by the users, same as `user_config_dir`"""
|
38 |
+
return self.user_config_dir
|
39 |
+
|
40 |
+
@property
|
41 |
+
def user_cache_dir(self) -> str:
|
42 |
+
""":return: cache directory tied to the user, e.g. e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>``"""
|
43 |
+
return self._append_app_name_and_version(_android_folder(), "cache")
|
44 |
+
|
45 |
+
@property
|
46 |
+
def user_state_dir(self) -> str:
|
47 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
48 |
+
return self.user_data_dir
|
49 |
+
|
50 |
+
@property
|
51 |
+
def user_log_dir(self) -> str:
|
52 |
+
"""
|
53 |
+
:return: log directory tied to the user, same as `user_cache_dir` if not opinionated else ``log`` in it,
|
54 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/log``
|
55 |
+
"""
|
56 |
+
path = self.user_cache_dir
|
57 |
+
if self.opinion:
|
58 |
+
path = os.path.join(path, "log")
|
59 |
+
return path
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_documents_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: documents directory tied to the user e.g. ``/storage/emulated/0/Documents``
|
65 |
+
"""
|
66 |
+
return _android_documents_folder()
|
67 |
+
|
68 |
+
@property
|
69 |
+
def user_runtime_dir(self) -> str:
|
70 |
+
"""
|
71 |
+
:return: runtime directory tied to the user, same as `user_cache_dir` if not opinionated else ``tmp`` in it,
|
72 |
+
e.g. ``/data/user/<userid>/<packagename>/cache/<AppName>/tmp``
|
73 |
+
"""
|
74 |
+
path = self.user_cache_dir
|
75 |
+
if self.opinion:
|
76 |
+
path = os.path.join(path, "tmp")
|
77 |
+
return path
|
78 |
+
|
79 |
+
|
80 |
+
@lru_cache(maxsize=1)
|
81 |
+
def _android_folder() -> str:
|
82 |
+
""":return: base folder for the Android OS"""
|
83 |
+
try:
|
84 |
+
# First try to get path to android app via pyjnius
|
85 |
+
from jnius import autoclass
|
86 |
+
|
87 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
88 |
+
result: str = Context.getFilesDir().getParentFile().getAbsolutePath()
|
89 |
+
except Exception:
|
90 |
+
# if fails find an android folder looking path on the sys.path
|
91 |
+
pattern = re.compile(r"/data/(data|user/\d+)/(.+)/files")
|
92 |
+
for path in sys.path:
|
93 |
+
if pattern.match(path):
|
94 |
+
result = path.split("/files")[0]
|
95 |
+
break
|
96 |
+
else:
|
97 |
+
raise OSError("Cannot find path to android app folder")
|
98 |
+
return result
|
99 |
+
|
100 |
+
|
101 |
+
@lru_cache(maxsize=1)
|
102 |
+
def _android_documents_folder() -> str:
|
103 |
+
""":return: documents folder for the Android OS"""
|
104 |
+
# Get directories with pyjnius
|
105 |
+
try:
|
106 |
+
from jnius import autoclass
|
107 |
+
|
108 |
+
Context = autoclass("android.content.Context") # noqa: N806
|
109 |
+
Environment = autoclass("android.os.Environment") # noqa: N806
|
110 |
+
documents_dir: str = Context.getExternalFilesDir(Environment.DIRECTORY_DOCUMENTS).getAbsolutePath()
|
111 |
+
except Exception:
|
112 |
+
documents_dir = "/storage/emulated/0/Documents"
|
113 |
+
|
114 |
+
return documents_dir
|
115 |
+
|
116 |
+
|
117 |
+
__all__ = [
|
118 |
+
"Android",
|
119 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/api.py
ADDED
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from abc import ABC, abstractmethod
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
if sys.version_info >= (3, 8): # pragma: no branch
|
9 |
+
from typing import Literal # pragma: no cover
|
10 |
+
|
11 |
+
|
12 |
+
class PlatformDirsABC(ABC):
|
13 |
+
"""
|
14 |
+
Abstract base class for platform directories.
|
15 |
+
"""
|
16 |
+
|
17 |
+
def __init__(
|
18 |
+
self,
|
19 |
+
appname: str | None = None,
|
20 |
+
appauthor: str | None | Literal[False] = None,
|
21 |
+
version: str | None = None,
|
22 |
+
roaming: bool = False,
|
23 |
+
multipath: bool = False,
|
24 |
+
opinion: bool = True,
|
25 |
+
):
|
26 |
+
"""
|
27 |
+
Create a new platform directory.
|
28 |
+
|
29 |
+
:param appname: See `appname`.
|
30 |
+
:param appauthor: See `appauthor`.
|
31 |
+
:param version: See `version`.
|
32 |
+
:param roaming: See `roaming`.
|
33 |
+
:param multipath: See `multipath`.
|
34 |
+
:param opinion: See `opinion`.
|
35 |
+
"""
|
36 |
+
self.appname = appname #: The name of application.
|
37 |
+
self.appauthor = appauthor
|
38 |
+
"""
|
39 |
+
The name of the app author or distributing body for this application. Typically, it is the owning company name.
|
40 |
+
Defaults to `appname`. You may pass ``False`` to disable it.
|
41 |
+
"""
|
42 |
+
self.version = version
|
43 |
+
"""
|
44 |
+
An optional version path element to append to the path. You might want to use this if you want multiple versions
|
45 |
+
of your app to be able to run independently. If used, this would typically be ``<major>.<minor>``.
|
46 |
+
"""
|
47 |
+
self.roaming = roaming
|
48 |
+
"""
|
49 |
+
Whether to use the roaming appdata directory on Windows. That means that for users on a Windows network setup
|
50 |
+
for roaming profiles, this user data will be synced on login (see
|
51 |
+
`here <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>`_).
|
52 |
+
"""
|
53 |
+
self.multipath = multipath
|
54 |
+
"""
|
55 |
+
An optional parameter only applicable to Unix/Linux which indicates that the entire list of data dirs should be
|
56 |
+
returned. By default, the first item would only be returned.
|
57 |
+
"""
|
58 |
+
self.opinion = opinion #: A flag to indicating to use opinionated values.
|
59 |
+
|
60 |
+
def _append_app_name_and_version(self, *base: str) -> str:
|
61 |
+
params = list(base[1:])
|
62 |
+
if self.appname:
|
63 |
+
params.append(self.appname)
|
64 |
+
if self.version:
|
65 |
+
params.append(self.version)
|
66 |
+
return os.path.join(base[0], *params)
|
67 |
+
|
68 |
+
@property
|
69 |
+
@abstractmethod
|
70 |
+
def user_data_dir(self) -> str:
|
71 |
+
""":return: data directory tied to the user"""
|
72 |
+
|
73 |
+
@property
|
74 |
+
@abstractmethod
|
75 |
+
def site_data_dir(self) -> str:
|
76 |
+
""":return: data directory shared by users"""
|
77 |
+
|
78 |
+
@property
|
79 |
+
@abstractmethod
|
80 |
+
def user_config_dir(self) -> str:
|
81 |
+
""":return: config directory tied to the user"""
|
82 |
+
|
83 |
+
@property
|
84 |
+
@abstractmethod
|
85 |
+
def site_config_dir(self) -> str:
|
86 |
+
""":return: config directory shared by the users"""
|
87 |
+
|
88 |
+
@property
|
89 |
+
@abstractmethod
|
90 |
+
def user_cache_dir(self) -> str:
|
91 |
+
""":return: cache directory tied to the user"""
|
92 |
+
|
93 |
+
@property
|
94 |
+
@abstractmethod
|
95 |
+
def user_state_dir(self) -> str:
|
96 |
+
""":return: state directory tied to the user"""
|
97 |
+
|
98 |
+
@property
|
99 |
+
@abstractmethod
|
100 |
+
def user_log_dir(self) -> str:
|
101 |
+
""":return: log directory tied to the user"""
|
102 |
+
|
103 |
+
@property
|
104 |
+
@abstractmethod
|
105 |
+
def user_documents_dir(self) -> str:
|
106 |
+
""":return: documents directory tied to the user"""
|
107 |
+
|
108 |
+
@property
|
109 |
+
@abstractmethod
|
110 |
+
def user_runtime_dir(self) -> str:
|
111 |
+
""":return: runtime directory tied to the user"""
|
112 |
+
|
113 |
+
@property
|
114 |
+
def user_data_path(self) -> Path:
|
115 |
+
""":return: data path tied to the user"""
|
116 |
+
return Path(self.user_data_dir)
|
117 |
+
|
118 |
+
@property
|
119 |
+
def site_data_path(self) -> Path:
|
120 |
+
""":return: data path shared by users"""
|
121 |
+
return Path(self.site_data_dir)
|
122 |
+
|
123 |
+
@property
|
124 |
+
def user_config_path(self) -> Path:
|
125 |
+
""":return: config path tied to the user"""
|
126 |
+
return Path(self.user_config_dir)
|
127 |
+
|
128 |
+
@property
|
129 |
+
def site_config_path(self) -> Path:
|
130 |
+
""":return: config path shared by the users"""
|
131 |
+
return Path(self.site_config_dir)
|
132 |
+
|
133 |
+
@property
|
134 |
+
def user_cache_path(self) -> Path:
|
135 |
+
""":return: cache path tied to the user"""
|
136 |
+
return Path(self.user_cache_dir)
|
137 |
+
|
138 |
+
@property
|
139 |
+
def user_state_path(self) -> Path:
|
140 |
+
""":return: state path tied to the user"""
|
141 |
+
return Path(self.user_state_dir)
|
142 |
+
|
143 |
+
@property
|
144 |
+
def user_log_path(self) -> Path:
|
145 |
+
""":return: log path tied to the user"""
|
146 |
+
return Path(self.user_log_dir)
|
147 |
+
|
148 |
+
@property
|
149 |
+
def user_documents_path(self) -> Path:
|
150 |
+
""":return: documents path tied to the user"""
|
151 |
+
return Path(self.user_documents_dir)
|
152 |
+
|
153 |
+
@property
|
154 |
+
def user_runtime_path(self) -> Path:
|
155 |
+
""":return: runtime path tied to the user"""
|
156 |
+
return Path(self.user_runtime_dir)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/macos.py
ADDED
@@ -0,0 +1,64 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
|
5 |
+
from .api import PlatformDirsABC
|
6 |
+
|
7 |
+
|
8 |
+
class MacOS(PlatformDirsABC):
|
9 |
+
"""
|
10 |
+
Platform directories for the macOS operating system. Follows the guidance from `Apple documentation
|
11 |
+
<https://developer.apple.com/library/archive/documentation/FileManagement/Conceptual/FileSystemProgrammingGuide/MacOSXDirectories/MacOSXDirectories.html>`_.
|
12 |
+
Makes use of the `appname <platformdirs.api.PlatformDirsABC.appname>` and
|
13 |
+
`version <platformdirs.api.PlatformDirsABC.version>`.
|
14 |
+
"""
|
15 |
+
|
16 |
+
@property
|
17 |
+
def user_data_dir(self) -> str:
|
18 |
+
""":return: data directory tied to the user, e.g. ``~/Library/Application Support/$appname/$version``"""
|
19 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Application Support/"))
|
20 |
+
|
21 |
+
@property
|
22 |
+
def site_data_dir(self) -> str:
|
23 |
+
""":return: data directory shared by users, e.g. ``/Library/Application Support/$appname/$version``"""
|
24 |
+
return self._append_app_name_and_version("/Library/Application Support")
|
25 |
+
|
26 |
+
@property
|
27 |
+
def user_config_dir(self) -> str:
|
28 |
+
""":return: config directory tied to the user, e.g. ``~/Library/Preferences/$appname/$version``"""
|
29 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Preferences/"))
|
30 |
+
|
31 |
+
@property
|
32 |
+
def site_config_dir(self) -> str:
|
33 |
+
""":return: config directory shared by the users, e.g. ``/Library/Preferences/$appname``"""
|
34 |
+
return self._append_app_name_and_version("/Library/Preferences")
|
35 |
+
|
36 |
+
@property
|
37 |
+
def user_cache_dir(self) -> str:
|
38 |
+
""":return: cache directory tied to the user, e.g. ``~/Library/Caches/$appname/$version``"""
|
39 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches"))
|
40 |
+
|
41 |
+
@property
|
42 |
+
def user_state_dir(self) -> str:
|
43 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
44 |
+
return self.user_data_dir
|
45 |
+
|
46 |
+
@property
|
47 |
+
def user_log_dir(self) -> str:
|
48 |
+
""":return: log directory tied to the user, e.g. ``~/Library/Logs/$appname/$version``"""
|
49 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Logs"))
|
50 |
+
|
51 |
+
@property
|
52 |
+
def user_documents_dir(self) -> str:
|
53 |
+
""":return: documents directory tied to the user, e.g. ``~/Documents``"""
|
54 |
+
return os.path.expanduser("~/Documents")
|
55 |
+
|
56 |
+
@property
|
57 |
+
def user_runtime_dir(self) -> str:
|
58 |
+
""":return: runtime directory tied to the user, e.g. ``~/Library/Caches/TemporaryItems/$appname/$version``"""
|
59 |
+
return self._append_app_name_and_version(os.path.expanduser("~/Library/Caches/TemporaryItems"))
|
60 |
+
|
61 |
+
|
62 |
+
__all__ = [
|
63 |
+
"MacOS",
|
64 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/unix.py
ADDED
@@ -0,0 +1,181 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from configparser import ConfigParser
|
6 |
+
from pathlib import Path
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
if sys.platform.startswith("linux"): # pragma: no branch # no op check, only to please the type checker
|
11 |
+
from os import getuid
|
12 |
+
else:
|
13 |
+
|
14 |
+
def getuid() -> int:
|
15 |
+
raise RuntimeError("should only be used on Linux")
|
16 |
+
|
17 |
+
|
18 |
+
class Unix(PlatformDirsABC):
|
19 |
+
"""
|
20 |
+
On Unix/Linux, we follow the
|
21 |
+
`XDG Basedir Spec <https://specifications.freedesktop.org/basedir-spec/basedir-spec-latest.html>`_. The spec allows
|
22 |
+
overriding directories with environment variables. The examples show are the default values, alongside the name of
|
23 |
+
the environment variable that overrides them. Makes use of the
|
24 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
25 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
26 |
+
`multipath <platformdirs.api.PlatformDirsABC.multipath>`,
|
27 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`.
|
28 |
+
"""
|
29 |
+
|
30 |
+
@property
|
31 |
+
def user_data_dir(self) -> str:
|
32 |
+
"""
|
33 |
+
:return: data directory tied to the user, e.g. ``~/.local/share/$appname/$version`` or
|
34 |
+
``$XDG_DATA_HOME/$appname/$version``
|
35 |
+
"""
|
36 |
+
path = os.environ.get("XDG_DATA_HOME", "")
|
37 |
+
if not path.strip():
|
38 |
+
path = os.path.expanduser("~/.local/share")
|
39 |
+
return self._append_app_name_and_version(path)
|
40 |
+
|
41 |
+
@property
|
42 |
+
def site_data_dir(self) -> str:
|
43 |
+
"""
|
44 |
+
:return: data directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>` is
|
45 |
+
enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
46 |
+
path separator), e.g. ``/usr/local/share/$appname/$version`` or ``/usr/share/$appname/$version``
|
47 |
+
"""
|
48 |
+
# XDG default for $XDG_DATA_DIRS; only first, if multipath is False
|
49 |
+
path = os.environ.get("XDG_DATA_DIRS", "")
|
50 |
+
if not path.strip():
|
51 |
+
path = f"/usr/local/share{os.pathsep}/usr/share"
|
52 |
+
return self._with_multi_path(path)
|
53 |
+
|
54 |
+
def _with_multi_path(self, path: str) -> str:
|
55 |
+
path_list = path.split(os.pathsep)
|
56 |
+
if not self.multipath:
|
57 |
+
path_list = path_list[0:1]
|
58 |
+
path_list = [self._append_app_name_and_version(os.path.expanduser(p)) for p in path_list]
|
59 |
+
return os.pathsep.join(path_list)
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_config_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: config directory tied to the user, e.g. ``~/.config/$appname/$version`` or
|
65 |
+
``$XDG_CONFIG_HOME/$appname/$version``
|
66 |
+
"""
|
67 |
+
path = os.environ.get("XDG_CONFIG_HOME", "")
|
68 |
+
if not path.strip():
|
69 |
+
path = os.path.expanduser("~/.config")
|
70 |
+
return self._append_app_name_and_version(path)
|
71 |
+
|
72 |
+
@property
|
73 |
+
def site_config_dir(self) -> str:
|
74 |
+
"""
|
75 |
+
:return: config directories shared by users (if `multipath <platformdirs.api.PlatformDirsABC.multipath>`
|
76 |
+
is enabled and ``XDG_DATA_DIR`` is set and a multi path the response is also a multi path separated by the OS
|
77 |
+
path separator), e.g. ``/etc/xdg/$appname/$version``
|
78 |
+
"""
|
79 |
+
# XDG default for $XDG_CONFIG_DIRS only first, if multipath is False
|
80 |
+
path = os.environ.get("XDG_CONFIG_DIRS", "")
|
81 |
+
if not path.strip():
|
82 |
+
path = "/etc/xdg"
|
83 |
+
return self._with_multi_path(path)
|
84 |
+
|
85 |
+
@property
|
86 |
+
def user_cache_dir(self) -> str:
|
87 |
+
"""
|
88 |
+
:return: cache directory tied to the user, e.g. ``~/.cache/$appname/$version`` or
|
89 |
+
``~/$XDG_CACHE_HOME/$appname/$version``
|
90 |
+
"""
|
91 |
+
path = os.environ.get("XDG_CACHE_HOME", "")
|
92 |
+
if not path.strip():
|
93 |
+
path = os.path.expanduser("~/.cache")
|
94 |
+
return self._append_app_name_and_version(path)
|
95 |
+
|
96 |
+
@property
|
97 |
+
def user_state_dir(self) -> str:
|
98 |
+
"""
|
99 |
+
:return: state directory tied to the user, e.g. ``~/.local/state/$appname/$version`` or
|
100 |
+
``$XDG_STATE_HOME/$appname/$version``
|
101 |
+
"""
|
102 |
+
path = os.environ.get("XDG_STATE_HOME", "")
|
103 |
+
if not path.strip():
|
104 |
+
path = os.path.expanduser("~/.local/state")
|
105 |
+
return self._append_app_name_and_version(path)
|
106 |
+
|
107 |
+
@property
|
108 |
+
def user_log_dir(self) -> str:
|
109 |
+
"""
|
110 |
+
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``log`` in it
|
111 |
+
"""
|
112 |
+
path = self.user_cache_dir
|
113 |
+
if self.opinion:
|
114 |
+
path = os.path.join(path, "log")
|
115 |
+
return path
|
116 |
+
|
117 |
+
@property
|
118 |
+
def user_documents_dir(self) -> str:
|
119 |
+
"""
|
120 |
+
:return: documents directory tied to the user, e.g. ``~/Documents``
|
121 |
+
"""
|
122 |
+
documents_dir = _get_user_dirs_folder("XDG_DOCUMENTS_DIR")
|
123 |
+
if documents_dir is None:
|
124 |
+
documents_dir = os.environ.get("XDG_DOCUMENTS_DIR", "").strip()
|
125 |
+
if not documents_dir:
|
126 |
+
documents_dir = os.path.expanduser("~/Documents")
|
127 |
+
|
128 |
+
return documents_dir
|
129 |
+
|
130 |
+
@property
|
131 |
+
def user_runtime_dir(self) -> str:
|
132 |
+
"""
|
133 |
+
:return: runtime directory tied to the user, e.g. ``/run/user/$(id -u)/$appname/$version`` or
|
134 |
+
``$XDG_RUNTIME_DIR/$appname/$version``
|
135 |
+
"""
|
136 |
+
path = os.environ.get("XDG_RUNTIME_DIR", "")
|
137 |
+
if not path.strip():
|
138 |
+
path = f"/run/user/{getuid()}"
|
139 |
+
return self._append_app_name_and_version(path)
|
140 |
+
|
141 |
+
@property
|
142 |
+
def site_data_path(self) -> Path:
|
143 |
+
""":return: data path shared by users. Only return first item, even if ``multipath`` is set to ``True``"""
|
144 |
+
return self._first_item_as_path_if_multipath(self.site_data_dir)
|
145 |
+
|
146 |
+
@property
|
147 |
+
def site_config_path(self) -> Path:
|
148 |
+
""":return: config path shared by the users. Only return first item, even if ``multipath`` is set to ``True``"""
|
149 |
+
return self._first_item_as_path_if_multipath(self.site_config_dir)
|
150 |
+
|
151 |
+
def _first_item_as_path_if_multipath(self, directory: str) -> Path:
|
152 |
+
if self.multipath:
|
153 |
+
# If multipath is True, the first path is returned.
|
154 |
+
directory = directory.split(os.pathsep)[0]
|
155 |
+
return Path(directory)
|
156 |
+
|
157 |
+
|
158 |
+
def _get_user_dirs_folder(key: str) -> str | None:
|
159 |
+
"""Return directory from user-dirs.dirs config file. See https://freedesktop.org/wiki/Software/xdg-user-dirs/"""
|
160 |
+
user_dirs_config_path = os.path.join(Unix().user_config_dir, "user-dirs.dirs")
|
161 |
+
if os.path.exists(user_dirs_config_path):
|
162 |
+
parser = ConfigParser()
|
163 |
+
|
164 |
+
with open(user_dirs_config_path) as stream:
|
165 |
+
# Add fake section header, so ConfigParser doesn't complain
|
166 |
+
parser.read_string(f"[top]\n{stream.read()}")
|
167 |
+
|
168 |
+
if key not in parser["top"]:
|
169 |
+
return None
|
170 |
+
|
171 |
+
path = parser["top"][key].strip('"')
|
172 |
+
# Handle relative home paths
|
173 |
+
path = path.replace("$HOME", os.path.expanduser("~"))
|
174 |
+
return path
|
175 |
+
|
176 |
+
return None
|
177 |
+
|
178 |
+
|
179 |
+
__all__ = [
|
180 |
+
"Unix",
|
181 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/version.py
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
""" Version information """
|
2 |
+
|
3 |
+
__version__ = "2.4.1"
|
4 |
+
__version_info__ = (2, 4, 1)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/platformdirs/windows.py
ADDED
@@ -0,0 +1,182 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from __future__ import annotations
|
2 |
+
|
3 |
+
import ctypes
|
4 |
+
import os
|
5 |
+
from functools import lru_cache
|
6 |
+
from typing import Callable
|
7 |
+
|
8 |
+
from .api import PlatformDirsABC
|
9 |
+
|
10 |
+
|
11 |
+
class Windows(PlatformDirsABC):
|
12 |
+
"""`MSDN on where to store app data files
|
13 |
+
<http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120>`_.
|
14 |
+
Makes use of the
|
15 |
+
`appname <platformdirs.api.PlatformDirsABC.appname>`,
|
16 |
+
`appauthor <platformdirs.api.PlatformDirsABC.appauthor>`,
|
17 |
+
`version <platformdirs.api.PlatformDirsABC.version>`,
|
18 |
+
`roaming <platformdirs.api.PlatformDirsABC.roaming>`,
|
19 |
+
`opinion <platformdirs.api.PlatformDirsABC.opinion>`."""
|
20 |
+
|
21 |
+
@property
|
22 |
+
def user_data_dir(self) -> str:
|
23 |
+
"""
|
24 |
+
:return: data directory tied to the user, e.g.
|
25 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname`` (not roaming) or
|
26 |
+
``%USERPROFILE%\\AppData\\Roaming\\$appauthor\\$appname`` (roaming)
|
27 |
+
"""
|
28 |
+
const = "CSIDL_APPDATA" if self.roaming else "CSIDL_LOCAL_APPDATA"
|
29 |
+
path = os.path.normpath(get_win_folder(const))
|
30 |
+
return self._append_parts(path)
|
31 |
+
|
32 |
+
def _append_parts(self, path: str, *, opinion_value: str | None = None) -> str:
|
33 |
+
params = []
|
34 |
+
if self.appname:
|
35 |
+
if self.appauthor is not False:
|
36 |
+
author = self.appauthor or self.appname
|
37 |
+
params.append(author)
|
38 |
+
params.append(self.appname)
|
39 |
+
if opinion_value is not None and self.opinion:
|
40 |
+
params.append(opinion_value)
|
41 |
+
if self.version:
|
42 |
+
params.append(self.version)
|
43 |
+
return os.path.join(path, *params)
|
44 |
+
|
45 |
+
@property
|
46 |
+
def site_data_dir(self) -> str:
|
47 |
+
""":return: data directory shared by users, e.g. ``C:\\ProgramData\\$appauthor\\$appname``"""
|
48 |
+
path = os.path.normpath(get_win_folder("CSIDL_COMMON_APPDATA"))
|
49 |
+
return self._append_parts(path)
|
50 |
+
|
51 |
+
@property
|
52 |
+
def user_config_dir(self) -> str:
|
53 |
+
""":return: config directory tied to the user, same as `user_data_dir`"""
|
54 |
+
return self.user_data_dir
|
55 |
+
|
56 |
+
@property
|
57 |
+
def site_config_dir(self) -> str:
|
58 |
+
""":return: config directory shared by the users, same as `site_data_dir`"""
|
59 |
+
return self.site_data_dir
|
60 |
+
|
61 |
+
@property
|
62 |
+
def user_cache_dir(self) -> str:
|
63 |
+
"""
|
64 |
+
:return: cache directory tied to the user (if opinionated with ``Cache`` folder within ``$appname``) e.g.
|
65 |
+
``%USERPROFILE%\\AppData\\Local\\$appauthor\\$appname\\Cache\\$version``
|
66 |
+
"""
|
67 |
+
path = os.path.normpath(get_win_folder("CSIDL_LOCAL_APPDATA"))
|
68 |
+
return self._append_parts(path, opinion_value="Cache")
|
69 |
+
|
70 |
+
@property
|
71 |
+
def user_state_dir(self) -> str:
|
72 |
+
""":return: state directory tied to the user, same as `user_data_dir`"""
|
73 |
+
return self.user_data_dir
|
74 |
+
|
75 |
+
@property
|
76 |
+
def user_log_dir(self) -> str:
|
77 |
+
"""
|
78 |
+
:return: log directory tied to the user, same as `user_data_dir` if not opinionated else ``Logs`` in it
|
79 |
+
"""
|
80 |
+
path = self.user_data_dir
|
81 |
+
if self.opinion:
|
82 |
+
path = os.path.join(path, "Logs")
|
83 |
+
return path
|
84 |
+
|
85 |
+
@property
|
86 |
+
def user_documents_dir(self) -> str:
|
87 |
+
"""
|
88 |
+
:return: documents directory tied to the user e.g. ``%USERPROFILE%\\Documents``
|
89 |
+
"""
|
90 |
+
return os.path.normpath(get_win_folder("CSIDL_PERSONAL"))
|
91 |
+
|
92 |
+
@property
|
93 |
+
def user_runtime_dir(self) -> str:
|
94 |
+
"""
|
95 |
+
:return: runtime directory tied to the user, e.g.
|
96 |
+
``%USERPROFILE%\\AppData\\Local\\Temp\\$appauthor\\$appname``
|
97 |
+
"""
|
98 |
+
path = os.path.normpath(os.path.join(get_win_folder("CSIDL_LOCAL_APPDATA"), "Temp"))
|
99 |
+
return self._append_parts(path)
|
100 |
+
|
101 |
+
|
102 |
+
def get_win_folder_from_env_vars(csidl_name: str) -> str:
|
103 |
+
"""Get folder from environment variables."""
|
104 |
+
if csidl_name == "CSIDL_PERSONAL": # does not have an environment name
|
105 |
+
return os.path.join(os.path.normpath(os.environ["USERPROFILE"]), "Documents")
|
106 |
+
|
107 |
+
env_var_name = {
|
108 |
+
"CSIDL_APPDATA": "APPDATA",
|
109 |
+
"CSIDL_COMMON_APPDATA": "ALLUSERSPROFILE",
|
110 |
+
"CSIDL_LOCAL_APPDATA": "LOCALAPPDATA",
|
111 |
+
}.get(csidl_name)
|
112 |
+
if env_var_name is None:
|
113 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
114 |
+
result = os.environ.get(env_var_name)
|
115 |
+
if result is None:
|
116 |
+
raise ValueError(f"Unset environment variable: {env_var_name}")
|
117 |
+
return result
|
118 |
+
|
119 |
+
|
120 |
+
def get_win_folder_from_registry(csidl_name: str) -> str:
|
121 |
+
"""Get folder from the registry.
|
122 |
+
|
123 |
+
This is a fallback technique at best. I'm not sure if using the
|
124 |
+
registry for this guarantees us the correct answer for all CSIDL_*
|
125 |
+
names.
|
126 |
+
"""
|
127 |
+
shell_folder_name = {
|
128 |
+
"CSIDL_APPDATA": "AppData",
|
129 |
+
"CSIDL_COMMON_APPDATA": "Common AppData",
|
130 |
+
"CSIDL_LOCAL_APPDATA": "Local AppData",
|
131 |
+
"CSIDL_PERSONAL": "Personal",
|
132 |
+
}.get(csidl_name)
|
133 |
+
if shell_folder_name is None:
|
134 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
135 |
+
|
136 |
+
import winreg
|
137 |
+
|
138 |
+
key = winreg.OpenKey(winreg.HKEY_CURRENT_USER, r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders")
|
139 |
+
directory, _ = winreg.QueryValueEx(key, shell_folder_name)
|
140 |
+
return str(directory)
|
141 |
+
|
142 |
+
|
143 |
+
def get_win_folder_via_ctypes(csidl_name: str) -> str:
|
144 |
+
"""Get folder with ctypes."""
|
145 |
+
csidl_const = {
|
146 |
+
"CSIDL_APPDATA": 26,
|
147 |
+
"CSIDL_COMMON_APPDATA": 35,
|
148 |
+
"CSIDL_LOCAL_APPDATA": 28,
|
149 |
+
"CSIDL_PERSONAL": 5,
|
150 |
+
}.get(csidl_name)
|
151 |
+
if csidl_const is None:
|
152 |
+
raise ValueError(f"Unknown CSIDL name: {csidl_name}")
|
153 |
+
|
154 |
+
buf = ctypes.create_unicode_buffer(1024)
|
155 |
+
windll = getattr(ctypes, "windll") # noqa: B009 # using getattr to avoid false positive with mypy type checker
|
156 |
+
windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
|
157 |
+
|
158 |
+
# Downgrade to short path name if it has highbit chars.
|
159 |
+
if any(ord(c) > 255 for c in buf):
|
160 |
+
buf2 = ctypes.create_unicode_buffer(1024)
|
161 |
+
if windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
|
162 |
+
buf = buf2
|
163 |
+
|
164 |
+
return buf.value
|
165 |
+
|
166 |
+
|
167 |
+
def _pick_get_win_folder() -> Callable[[str], str]:
|
168 |
+
if hasattr(ctypes, "windll"):
|
169 |
+
return get_win_folder_via_ctypes
|
170 |
+
try:
|
171 |
+
import winreg # noqa: F401
|
172 |
+
except ImportError:
|
173 |
+
return get_win_folder_from_env_vars
|
174 |
+
else:
|
175 |
+
return get_win_folder_from_registry
|
176 |
+
|
177 |
+
|
178 |
+
get_win_folder = lru_cache(maxsize=None)(_pick_get_win_folder())
|
179 |
+
|
180 |
+
__all__ = [
|
181 |
+
"Windows",
|
182 |
+
]
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# __
|
4 |
+
# /__) _ _ _ _ _/ _
|
5 |
+
# / ( (- (/ (/ (- _) / _)
|
6 |
+
# /
|
7 |
+
|
8 |
+
"""
|
9 |
+
Requests HTTP Library
|
10 |
+
~~~~~~~~~~~~~~~~~~~~~
|
11 |
+
|
12 |
+
Requests is an HTTP library, written in Python, for human beings.
|
13 |
+
Basic GET usage:
|
14 |
+
|
15 |
+
>>> import requests
|
16 |
+
>>> r = requests.get('https://www.python.org')
|
17 |
+
>>> r.status_code
|
18 |
+
200
|
19 |
+
>>> b'Python is a programming language' in r.content
|
20 |
+
True
|
21 |
+
|
22 |
+
... or POST:
|
23 |
+
|
24 |
+
>>> payload = dict(key1='value1', key2='value2')
|
25 |
+
>>> r = requests.post('https://httpbin.org/post', data=payload)
|
26 |
+
>>> print(r.text)
|
27 |
+
{
|
28 |
+
...
|
29 |
+
"form": {
|
30 |
+
"key1": "value1",
|
31 |
+
"key2": "value2"
|
32 |
+
},
|
33 |
+
...
|
34 |
+
}
|
35 |
+
|
36 |
+
The other HTTP methods are supported - see `requests.api`. Full documentation
|
37 |
+
is at <https://requests.readthedocs.io>.
|
38 |
+
|
39 |
+
:copyright: (c) 2017 by Kenneth Reitz.
|
40 |
+
:license: Apache 2.0, see LICENSE for more details.
|
41 |
+
"""
|
42 |
+
|
43 |
+
from pip._vendor import urllib3
|
44 |
+
import warnings
|
45 |
+
from .exceptions import RequestsDependencyWarning
|
46 |
+
|
47 |
+
charset_normalizer_version = None
|
48 |
+
|
49 |
+
try:
|
50 |
+
from pip._vendor.chardet import __version__ as chardet_version
|
51 |
+
except ImportError:
|
52 |
+
chardet_version = None
|
53 |
+
|
54 |
+
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
55 |
+
urllib3_version = urllib3_version.split('.')
|
56 |
+
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
57 |
+
|
58 |
+
# Sometimes, urllib3 only reports its version as 16.1.
|
59 |
+
if len(urllib3_version) == 2:
|
60 |
+
urllib3_version.append('0')
|
61 |
+
|
62 |
+
# Check urllib3 for compatibility.
|
63 |
+
major, minor, patch = urllib3_version # noqa: F811
|
64 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
65 |
+
# urllib3 >= 1.21.1, <= 1.26
|
66 |
+
assert major == 1
|
67 |
+
assert minor >= 21
|
68 |
+
assert minor <= 26
|
69 |
+
|
70 |
+
# Check charset_normalizer for compatibility.
|
71 |
+
if chardet_version:
|
72 |
+
major, minor, patch = chardet_version.split('.')[:3]
|
73 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
74 |
+
# chardet_version >= 3.0.2, < 5.0.0
|
75 |
+
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
76 |
+
elif charset_normalizer_version:
|
77 |
+
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
78 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
79 |
+
# charset_normalizer >= 2.0.0 < 3.0.0
|
80 |
+
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
81 |
+
else:
|
82 |
+
raise Exception("You need either charset_normalizer or chardet installed")
|
83 |
+
|
84 |
+
def _check_cryptography(cryptography_version):
|
85 |
+
# cryptography < 1.3.4
|
86 |
+
try:
|
87 |
+
cryptography_version = list(map(int, cryptography_version.split('.')))
|
88 |
+
except ValueError:
|
89 |
+
return
|
90 |
+
|
91 |
+
if cryptography_version < [1, 3, 4]:
|
92 |
+
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
|
93 |
+
warnings.warn(warning, RequestsDependencyWarning)
|
94 |
+
|
95 |
+
# Check imported dependencies for compatibility.
|
96 |
+
try:
|
97 |
+
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
98 |
+
except (AssertionError, ValueError):
|
99 |
+
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
100 |
+
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
101 |
+
RequestsDependencyWarning)
|
102 |
+
|
103 |
+
# Attempt to enable urllib3's fallback for SNI support
|
104 |
+
# if the standard library doesn't support SNI or the
|
105 |
+
# 'ssl' library isn't available.
|
106 |
+
try:
|
107 |
+
# Note: This logic prevents upgrading cryptography on Windows, if imported
|
108 |
+
# as part of pip.
|
109 |
+
from pip._internal.utils.compat import WINDOWS
|
110 |
+
if not WINDOWS:
|
111 |
+
raise ImportError("pip internals: don't import cryptography on Windows")
|
112 |
+
try:
|
113 |
+
import ssl
|
114 |
+
except ImportError:
|
115 |
+
ssl = None
|
116 |
+
|
117 |
+
if not getattr(ssl, "HAS_SNI", False):
|
118 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
119 |
+
pyopenssl.inject_into_urllib3()
|
120 |
+
|
121 |
+
# Check cryptography version
|
122 |
+
from cryptography import __version__ as cryptography_version
|
123 |
+
_check_cryptography(cryptography_version)
|
124 |
+
except ImportError:
|
125 |
+
pass
|
126 |
+
|
127 |
+
# urllib3's DependencyWarnings should be silenced.
|
128 |
+
from pip._vendor.urllib3.exceptions import DependencyWarning
|
129 |
+
warnings.simplefilter('ignore', DependencyWarning)
|
130 |
+
|
131 |
+
from .__version__ import __title__, __description__, __url__, __version__
|
132 |
+
from .__version__ import __build__, __author__, __author_email__, __license__
|
133 |
+
from .__version__ import __copyright__, __cake__
|
134 |
+
|
135 |
+
from . import utils
|
136 |
+
from . import packages
|
137 |
+
from .models import Request, Response, PreparedRequest
|
138 |
+
from .api import request, get, head, post, patch, put, delete, options
|
139 |
+
from .sessions import session, Session
|
140 |
+
from .status_codes import codes
|
141 |
+
from .exceptions import (
|
142 |
+
RequestException, Timeout, URLRequired,
|
143 |
+
TooManyRedirects, HTTPError, ConnectionError,
|
144 |
+
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
|
145 |
+
)
|
146 |
+
|
147 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
148 |
+
import logging
|
149 |
+
from logging import NullHandler
|
150 |
+
|
151 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
152 |
+
|
153 |
+
# FileModeWarnings go off per the default.
|
154 |
+
warnings.simplefilter('default', FileModeWarning, append=True)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (4.04 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc
ADDED
Binary file (558 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc
ADDED
Binary file (1.31 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc
ADDED
Binary file (6.66 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc
ADDED
Binary file (8.1 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc
ADDED
Binary file (642 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (1.68 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc
ADDED
Binary file (18.7 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc
ADDED
Binary file (2.91 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc
ADDED
Binary file (997 Bytes). View file
|
|