Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- ckpts/universal/global_step80/zero/16.attention.query_key_value.weight/fp32.pt +3 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__init__.py +6 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/build.py +127 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/check.py +207 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/colorlog.py +115 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/compat.py +51 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/dirtools.py +44 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/envbuild.py +171 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__init__.py +17 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/_in_process.py +363 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/meta.py +92 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pep517/wrappers.py +375 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py +23 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py +154 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py +14 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/api.py +159 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/auth.py +305 -0
- venv/lib/python3.10/site-packages/pip/_vendor/requests/compat.py +77 -0
ckpts/universal/global_step80/zero/16.attention.query_key_value.weight/fp32.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:e0c6cc1dcac03335e53f8c0e75c0613384711f12dff233acf2bd178635468364
|
3 |
+
size 50332749
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Wrappers to build Python packages using PEP 517 hooks
|
2 |
+
"""
|
3 |
+
|
4 |
+
__version__ = '0.12.0'
|
5 |
+
|
6 |
+
from .wrappers import * # noqa: F401, F403
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (309 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/build.cpython-310.pyc
ADDED
Binary file (3.59 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/check.cpython-310.pyc
ADDED
Binary file (4.56 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/colorlog.cpython-310.pyc
ADDED
Binary file (2.96 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (1.53 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/dirtools.cpython-310.pyc
ADDED
Binary file (1.35 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/envbuild.cpython-310.pyc
ADDED
Binary file (4.37 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/meta.cpython-310.pyc
ADDED
Binary file (2.96 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/__pycache__/wrappers.cpython-310.pyc
ADDED
Binary file (12.3 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/build.py
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Build a project using PEP 517 hooks.
|
2 |
+
"""
|
3 |
+
import argparse
|
4 |
+
import io
|
5 |
+
import logging
|
6 |
+
import os
|
7 |
+
import shutil
|
8 |
+
|
9 |
+
from .envbuild import BuildEnvironment
|
10 |
+
from .wrappers import Pep517HookCaller
|
11 |
+
from .dirtools import tempdir, mkdir_p
|
12 |
+
from .compat import FileNotFoundError, toml_load
|
13 |
+
|
14 |
+
log = logging.getLogger(__name__)
|
15 |
+
|
16 |
+
|
17 |
+
def validate_system(system):
|
18 |
+
"""
|
19 |
+
Ensure build system has the requisite fields.
|
20 |
+
"""
|
21 |
+
required = {'requires', 'build-backend'}
|
22 |
+
if not (required <= set(system)):
|
23 |
+
message = "Missing required fields: {missing}".format(
|
24 |
+
missing=required-set(system),
|
25 |
+
)
|
26 |
+
raise ValueError(message)
|
27 |
+
|
28 |
+
|
29 |
+
def load_system(source_dir):
|
30 |
+
"""
|
31 |
+
Load the build system from a source dir (pyproject.toml).
|
32 |
+
"""
|
33 |
+
pyproject = os.path.join(source_dir, 'pyproject.toml')
|
34 |
+
with io.open(pyproject, 'rb') as f:
|
35 |
+
pyproject_data = toml_load(f)
|
36 |
+
return pyproject_data['build-system']
|
37 |
+
|
38 |
+
|
39 |
+
def compat_system(source_dir):
|
40 |
+
"""
|
41 |
+
Given a source dir, attempt to get a build system backend
|
42 |
+
and requirements from pyproject.toml. Fallback to
|
43 |
+
setuptools but only if the file was not found or a build
|
44 |
+
system was not indicated.
|
45 |
+
"""
|
46 |
+
try:
|
47 |
+
system = load_system(source_dir)
|
48 |
+
except (FileNotFoundError, KeyError):
|
49 |
+
system = {}
|
50 |
+
system.setdefault(
|
51 |
+
'build-backend',
|
52 |
+
'setuptools.build_meta:__legacy__',
|
53 |
+
)
|
54 |
+
system.setdefault('requires', ['setuptools', 'wheel'])
|
55 |
+
return system
|
56 |
+
|
57 |
+
|
58 |
+
def _do_build(hooks, env, dist, dest):
|
59 |
+
get_requires_name = 'get_requires_for_build_{dist}'.format(**locals())
|
60 |
+
get_requires = getattr(hooks, get_requires_name)
|
61 |
+
reqs = get_requires({})
|
62 |
+
log.info('Got build requires: %s', reqs)
|
63 |
+
|
64 |
+
env.pip_install(reqs)
|
65 |
+
log.info('Installed dynamic build dependencies')
|
66 |
+
|
67 |
+
with tempdir() as td:
|
68 |
+
log.info('Trying to build %s in %s', dist, td)
|
69 |
+
build_name = 'build_{dist}'.format(**locals())
|
70 |
+
build = getattr(hooks, build_name)
|
71 |
+
filename = build(td, {})
|
72 |
+
source = os.path.join(td, filename)
|
73 |
+
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
|
74 |
+
|
75 |
+
|
76 |
+
def build(source_dir, dist, dest=None, system=None):
|
77 |
+
system = system or load_system(source_dir)
|
78 |
+
dest = os.path.join(source_dir, dest or 'dist')
|
79 |
+
mkdir_p(dest)
|
80 |
+
|
81 |
+
validate_system(system)
|
82 |
+
hooks = Pep517HookCaller(
|
83 |
+
source_dir, system['build-backend'], system.get('backend-path')
|
84 |
+
)
|
85 |
+
|
86 |
+
with BuildEnvironment() as env:
|
87 |
+
env.pip_install(system['requires'])
|
88 |
+
_do_build(hooks, env, dist, dest)
|
89 |
+
|
90 |
+
|
91 |
+
parser = argparse.ArgumentParser()
|
92 |
+
parser.add_argument(
|
93 |
+
'source_dir',
|
94 |
+
help="A directory containing pyproject.toml",
|
95 |
+
)
|
96 |
+
parser.add_argument(
|
97 |
+
'--binary', '-b',
|
98 |
+
action='store_true',
|
99 |
+
default=False,
|
100 |
+
)
|
101 |
+
parser.add_argument(
|
102 |
+
'--source', '-s',
|
103 |
+
action='store_true',
|
104 |
+
default=False,
|
105 |
+
)
|
106 |
+
parser.add_argument(
|
107 |
+
'--out-dir', '-o',
|
108 |
+
help="Destination in which to save the builds relative to source dir",
|
109 |
+
)
|
110 |
+
|
111 |
+
|
112 |
+
def main(args):
|
113 |
+
log.warning('pep517.build is deprecated. '
|
114 |
+
'Consider switching to https://pypi.org/project/build/')
|
115 |
+
|
116 |
+
# determine which dists to build
|
117 |
+
dists = list(filter(None, (
|
118 |
+
'sdist' if args.source or not args.binary else None,
|
119 |
+
'wheel' if args.binary or not args.source else None,
|
120 |
+
)))
|
121 |
+
|
122 |
+
for dist in dists:
|
123 |
+
build(args.source_dir, dist, args.out_dir)
|
124 |
+
|
125 |
+
|
126 |
+
if __name__ == '__main__':
|
127 |
+
main(parser.parse_args())
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/check.py
ADDED
@@ -0,0 +1,207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Check a project and backend by attempting to build using PEP 517 hooks.
|
2 |
+
"""
|
3 |
+
import argparse
|
4 |
+
import io
|
5 |
+
import logging
|
6 |
+
import os
|
7 |
+
from os.path import isfile, join as pjoin
|
8 |
+
import shutil
|
9 |
+
from subprocess import CalledProcessError
|
10 |
+
import sys
|
11 |
+
import tarfile
|
12 |
+
from tempfile import mkdtemp
|
13 |
+
import zipfile
|
14 |
+
|
15 |
+
from .colorlog import enable_colourful_output
|
16 |
+
from .compat import TOMLDecodeError, toml_load
|
17 |
+
from .envbuild import BuildEnvironment
|
18 |
+
from .wrappers import Pep517HookCaller
|
19 |
+
|
20 |
+
log = logging.getLogger(__name__)
|
21 |
+
|
22 |
+
|
23 |
+
def check_build_sdist(hooks, build_sys_requires):
|
24 |
+
with BuildEnvironment() as env:
|
25 |
+
try:
|
26 |
+
env.pip_install(build_sys_requires)
|
27 |
+
log.info('Installed static build dependencies')
|
28 |
+
except CalledProcessError:
|
29 |
+
log.error('Failed to install static build dependencies')
|
30 |
+
return False
|
31 |
+
|
32 |
+
try:
|
33 |
+
reqs = hooks.get_requires_for_build_sdist({})
|
34 |
+
log.info('Got build requires: %s', reqs)
|
35 |
+
except Exception:
|
36 |
+
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
37 |
+
return False
|
38 |
+
|
39 |
+
try:
|
40 |
+
env.pip_install(reqs)
|
41 |
+
log.info('Installed dynamic build dependencies')
|
42 |
+
except CalledProcessError:
|
43 |
+
log.error('Failed to install dynamic build dependencies')
|
44 |
+
return False
|
45 |
+
|
46 |
+
td = mkdtemp()
|
47 |
+
log.info('Trying to build sdist in %s', td)
|
48 |
+
try:
|
49 |
+
try:
|
50 |
+
filename = hooks.build_sdist(td, {})
|
51 |
+
log.info('build_sdist returned %r', filename)
|
52 |
+
except Exception:
|
53 |
+
log.info('Failure in build_sdist', exc_info=True)
|
54 |
+
return False
|
55 |
+
|
56 |
+
if not filename.endswith('.tar.gz'):
|
57 |
+
log.error(
|
58 |
+
"Filename %s doesn't have .tar.gz extension", filename)
|
59 |
+
return False
|
60 |
+
|
61 |
+
path = pjoin(td, filename)
|
62 |
+
if isfile(path):
|
63 |
+
log.info("Output file %s exists", path)
|
64 |
+
else:
|
65 |
+
log.error("Output file %s does not exist", path)
|
66 |
+
return False
|
67 |
+
|
68 |
+
if tarfile.is_tarfile(path):
|
69 |
+
log.info("Output file is a tar file")
|
70 |
+
else:
|
71 |
+
log.error("Output file is not a tar file")
|
72 |
+
return False
|
73 |
+
|
74 |
+
finally:
|
75 |
+
shutil.rmtree(td)
|
76 |
+
|
77 |
+
return True
|
78 |
+
|
79 |
+
|
80 |
+
def check_build_wheel(hooks, build_sys_requires):
|
81 |
+
with BuildEnvironment() as env:
|
82 |
+
try:
|
83 |
+
env.pip_install(build_sys_requires)
|
84 |
+
log.info('Installed static build dependencies')
|
85 |
+
except CalledProcessError:
|
86 |
+
log.error('Failed to install static build dependencies')
|
87 |
+
return False
|
88 |
+
|
89 |
+
try:
|
90 |
+
reqs = hooks.get_requires_for_build_wheel({})
|
91 |
+
log.info('Got build requires: %s', reqs)
|
92 |
+
except Exception:
|
93 |
+
log.error('Failure in get_requires_for_build_sdist', exc_info=True)
|
94 |
+
return False
|
95 |
+
|
96 |
+
try:
|
97 |
+
env.pip_install(reqs)
|
98 |
+
log.info('Installed dynamic build dependencies')
|
99 |
+
except CalledProcessError:
|
100 |
+
log.error('Failed to install dynamic build dependencies')
|
101 |
+
return False
|
102 |
+
|
103 |
+
td = mkdtemp()
|
104 |
+
log.info('Trying to build wheel in %s', td)
|
105 |
+
try:
|
106 |
+
try:
|
107 |
+
filename = hooks.build_wheel(td, {})
|
108 |
+
log.info('build_wheel returned %r', filename)
|
109 |
+
except Exception:
|
110 |
+
log.info('Failure in build_wheel', exc_info=True)
|
111 |
+
return False
|
112 |
+
|
113 |
+
if not filename.endswith('.whl'):
|
114 |
+
log.error("Filename %s doesn't have .whl extension", filename)
|
115 |
+
return False
|
116 |
+
|
117 |
+
path = pjoin(td, filename)
|
118 |
+
if isfile(path):
|
119 |
+
log.info("Output file %s exists", path)
|
120 |
+
else:
|
121 |
+
log.error("Output file %s does not exist", path)
|
122 |
+
return False
|
123 |
+
|
124 |
+
if zipfile.is_zipfile(path):
|
125 |
+
log.info("Output file is a zip file")
|
126 |
+
else:
|
127 |
+
log.error("Output file is not a zip file")
|
128 |
+
return False
|
129 |
+
|
130 |
+
finally:
|
131 |
+
shutil.rmtree(td)
|
132 |
+
|
133 |
+
return True
|
134 |
+
|
135 |
+
|
136 |
+
def check(source_dir):
|
137 |
+
pyproject = pjoin(source_dir, 'pyproject.toml')
|
138 |
+
if isfile(pyproject):
|
139 |
+
log.info('Found pyproject.toml')
|
140 |
+
else:
|
141 |
+
log.error('Missing pyproject.toml')
|
142 |
+
return False
|
143 |
+
|
144 |
+
try:
|
145 |
+
with io.open(pyproject, 'rb') as f:
|
146 |
+
pyproject_data = toml_load(f)
|
147 |
+
# Ensure the mandatory data can be loaded
|
148 |
+
buildsys = pyproject_data['build-system']
|
149 |
+
requires = buildsys['requires']
|
150 |
+
backend = buildsys['build-backend']
|
151 |
+
backend_path = buildsys.get('backend-path')
|
152 |
+
log.info('Loaded pyproject.toml')
|
153 |
+
except (TOMLDecodeError, KeyError):
|
154 |
+
log.error("Invalid pyproject.toml", exc_info=True)
|
155 |
+
return False
|
156 |
+
|
157 |
+
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
158 |
+
|
159 |
+
sdist_ok = check_build_sdist(hooks, requires)
|
160 |
+
wheel_ok = check_build_wheel(hooks, requires)
|
161 |
+
|
162 |
+
if not sdist_ok:
|
163 |
+
log.warning('Sdist checks failed; scroll up to see')
|
164 |
+
if not wheel_ok:
|
165 |
+
log.warning('Wheel checks failed')
|
166 |
+
|
167 |
+
return sdist_ok
|
168 |
+
|
169 |
+
|
170 |
+
def main(argv=None):
|
171 |
+
log.warning('pep517.check is deprecated. '
|
172 |
+
'Consider switching to https://pypi.org/project/build/')
|
173 |
+
|
174 |
+
ap = argparse.ArgumentParser()
|
175 |
+
ap.add_argument(
|
176 |
+
'source_dir',
|
177 |
+
help="A directory containing pyproject.toml")
|
178 |
+
args = ap.parse_args(argv)
|
179 |
+
|
180 |
+
enable_colourful_output()
|
181 |
+
|
182 |
+
ok = check(args.source_dir)
|
183 |
+
|
184 |
+
if ok:
|
185 |
+
print(ansi('Checks passed', 'green'))
|
186 |
+
else:
|
187 |
+
print(ansi('Checks failed', 'red'))
|
188 |
+
sys.exit(1)
|
189 |
+
|
190 |
+
|
191 |
+
ansi_codes = {
|
192 |
+
'reset': '\x1b[0m',
|
193 |
+
'bold': '\x1b[1m',
|
194 |
+
'red': '\x1b[31m',
|
195 |
+
'green': '\x1b[32m',
|
196 |
+
}
|
197 |
+
|
198 |
+
|
199 |
+
def ansi(s, attr):
|
200 |
+
if os.name != 'nt' and sys.stdout.isatty():
|
201 |
+
return ansi_codes[attr] + str(s) + ansi_codes['reset']
|
202 |
+
else:
|
203 |
+
return str(s)
|
204 |
+
|
205 |
+
|
206 |
+
if __name__ == '__main__':
|
207 |
+
main()
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/colorlog.py
ADDED
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Nicer log formatting with colours.
|
2 |
+
|
3 |
+
Code copied from Tornado, Apache licensed.
|
4 |
+
"""
|
5 |
+
# Copyright 2012 Facebook
|
6 |
+
#
|
7 |
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may
|
8 |
+
# not use this file except in compliance with the License. You may obtain
|
9 |
+
# a copy of the License at
|
10 |
+
#
|
11 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
12 |
+
#
|
13 |
+
# Unless required by applicable law or agreed to in writing, software
|
14 |
+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
15 |
+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
16 |
+
# License for the specific language governing permissions and limitations
|
17 |
+
# under the License.
|
18 |
+
|
19 |
+
import logging
|
20 |
+
import sys
|
21 |
+
|
22 |
+
try:
|
23 |
+
import curses
|
24 |
+
except ImportError:
|
25 |
+
curses = None
|
26 |
+
|
27 |
+
|
28 |
+
def _stderr_supports_color():
|
29 |
+
color = False
|
30 |
+
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
|
31 |
+
try:
|
32 |
+
curses.setupterm()
|
33 |
+
if curses.tigetnum("colors") > 0:
|
34 |
+
color = True
|
35 |
+
except Exception:
|
36 |
+
pass
|
37 |
+
return color
|
38 |
+
|
39 |
+
|
40 |
+
class LogFormatter(logging.Formatter):
|
41 |
+
"""Log formatter with colour support
|
42 |
+
"""
|
43 |
+
DEFAULT_COLORS = {
|
44 |
+
logging.INFO: 2, # Green
|
45 |
+
logging.WARNING: 3, # Yellow
|
46 |
+
logging.ERROR: 1, # Red
|
47 |
+
logging.CRITICAL: 1,
|
48 |
+
}
|
49 |
+
|
50 |
+
def __init__(self, color=True, datefmt=None):
|
51 |
+
r"""
|
52 |
+
:arg bool color: Enables color support.
|
53 |
+
:arg string fmt: Log message format.
|
54 |
+
It will be applied to the attributes dict of log records. The
|
55 |
+
text between ``%(color)s`` and ``%(end_color)s`` will be colored
|
56 |
+
depending on the level if color support is on.
|
57 |
+
:arg dict colors: color mappings from logging level to terminal color
|
58 |
+
code
|
59 |
+
:arg string datefmt: Datetime format.
|
60 |
+
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
|
61 |
+
.. versionchanged:: 3.2
|
62 |
+
Added ``fmt`` and ``datefmt`` arguments.
|
63 |
+
"""
|
64 |
+
logging.Formatter.__init__(self, datefmt=datefmt)
|
65 |
+
self._colors = {}
|
66 |
+
if color and _stderr_supports_color():
|
67 |
+
# The curses module has some str/bytes confusion in
|
68 |
+
# python3. Until version 3.2.3, most methods return
|
69 |
+
# bytes, but only accept strings. In addition, we want to
|
70 |
+
# output these strings with the logging module, which
|
71 |
+
# works with unicode strings. The explicit calls to
|
72 |
+
# unicode() below are harmless in python2 but will do the
|
73 |
+
# right conversion in python 3.
|
74 |
+
fg_color = (curses.tigetstr("setaf") or
|
75 |
+
curses.tigetstr("setf") or "")
|
76 |
+
if (3, 0) < sys.version_info < (3, 2, 3):
|
77 |
+
fg_color = str(fg_color, "ascii")
|
78 |
+
|
79 |
+
for levelno, code in self.DEFAULT_COLORS.items():
|
80 |
+
self._colors[levelno] = str(
|
81 |
+
curses.tparm(fg_color, code), "ascii")
|
82 |
+
self._normal = str(curses.tigetstr("sgr0"), "ascii")
|
83 |
+
|
84 |
+
scr = curses.initscr()
|
85 |
+
self.termwidth = scr.getmaxyx()[1]
|
86 |
+
curses.endwin()
|
87 |
+
else:
|
88 |
+
self._normal = ''
|
89 |
+
# Default width is usually 80, but too wide is
|
90 |
+
# worse than too narrow
|
91 |
+
self.termwidth = 70
|
92 |
+
|
93 |
+
def formatMessage(self, record):
|
94 |
+
mlen = len(record.message)
|
95 |
+
right_text = '{initial}-{name}'.format(initial=record.levelname[0],
|
96 |
+
name=record.name)
|
97 |
+
if mlen + len(right_text) < self.termwidth:
|
98 |
+
space = ' ' * (self.termwidth - (mlen + len(right_text)))
|
99 |
+
else:
|
100 |
+
space = ' '
|
101 |
+
|
102 |
+
if record.levelno in self._colors:
|
103 |
+
start_color = self._colors[record.levelno]
|
104 |
+
end_color = self._normal
|
105 |
+
else:
|
106 |
+
start_color = end_color = ''
|
107 |
+
|
108 |
+
return record.message + space + start_color + right_text + end_color
|
109 |
+
|
110 |
+
|
111 |
+
def enable_colourful_output(level=logging.INFO):
|
112 |
+
handler = logging.StreamHandler()
|
113 |
+
handler.setFormatter(LogFormatter())
|
114 |
+
logging.root.addHandler(handler)
|
115 |
+
logging.root.setLevel(level)
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/compat.py
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Python 2/3 compatibility"""
|
2 |
+
import io
|
3 |
+
import json
|
4 |
+
import sys
|
5 |
+
|
6 |
+
|
7 |
+
# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
|
8 |
+
|
9 |
+
if sys.version_info[0] >= 3:
|
10 |
+
# Python 3
|
11 |
+
def write_json(obj, path, **kwargs):
|
12 |
+
with open(path, 'w', encoding='utf-8') as f:
|
13 |
+
json.dump(obj, f, **kwargs)
|
14 |
+
|
15 |
+
def read_json(path):
|
16 |
+
with open(path, 'r', encoding='utf-8') as f:
|
17 |
+
return json.load(f)
|
18 |
+
|
19 |
+
else:
|
20 |
+
# Python 2
|
21 |
+
def write_json(obj, path, **kwargs):
|
22 |
+
with open(path, 'wb') as f:
|
23 |
+
json.dump(obj, f, encoding='utf-8', **kwargs)
|
24 |
+
|
25 |
+
def read_json(path):
|
26 |
+
with open(path, 'rb') as f:
|
27 |
+
return json.load(f)
|
28 |
+
|
29 |
+
|
30 |
+
# FileNotFoundError
|
31 |
+
|
32 |
+
try:
|
33 |
+
FileNotFoundError = FileNotFoundError
|
34 |
+
except NameError:
|
35 |
+
FileNotFoundError = IOError
|
36 |
+
|
37 |
+
|
38 |
+
if sys.version_info < (3, 6):
|
39 |
+
from toml import load as _toml_load # noqa: F401
|
40 |
+
|
41 |
+
def toml_load(f):
|
42 |
+
w = io.TextIOWrapper(f, encoding="utf8", newline="")
|
43 |
+
try:
|
44 |
+
return _toml_load(w)
|
45 |
+
finally:
|
46 |
+
w.detach()
|
47 |
+
|
48 |
+
from toml import TomlDecodeError as TOMLDecodeError # noqa: F401
|
49 |
+
else:
|
50 |
+
from pip._vendor.tomli import load as toml_load # noqa: F401
|
51 |
+
from pip._vendor.tomli import TOMLDecodeError # noqa: F401
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/dirtools.py
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import io
|
3 |
+
import contextlib
|
4 |
+
import tempfile
|
5 |
+
import shutil
|
6 |
+
import errno
|
7 |
+
import zipfile
|
8 |
+
|
9 |
+
|
10 |
+
@contextlib.contextmanager
|
11 |
+
def tempdir():
|
12 |
+
"""Create a temporary directory in a context manager."""
|
13 |
+
td = tempfile.mkdtemp()
|
14 |
+
try:
|
15 |
+
yield td
|
16 |
+
finally:
|
17 |
+
shutil.rmtree(td)
|
18 |
+
|
19 |
+
|
20 |
+
def mkdir_p(*args, **kwargs):
|
21 |
+
"""Like `mkdir`, but does not raise an exception if the
|
22 |
+
directory already exists.
|
23 |
+
"""
|
24 |
+
try:
|
25 |
+
return os.mkdir(*args, **kwargs)
|
26 |
+
except OSError as exc:
|
27 |
+
if exc.errno != errno.EEXIST:
|
28 |
+
raise
|
29 |
+
|
30 |
+
|
31 |
+
def dir_to_zipfile(root):
|
32 |
+
"""Construct an in-memory zip file for a directory."""
|
33 |
+
buffer = io.BytesIO()
|
34 |
+
zip_file = zipfile.ZipFile(buffer, 'w')
|
35 |
+
for root, dirs, files in os.walk(root):
|
36 |
+
for path in dirs:
|
37 |
+
fs_path = os.path.join(root, path)
|
38 |
+
rel_path = os.path.relpath(fs_path, root)
|
39 |
+
zip_file.writestr(rel_path + '/', '')
|
40 |
+
for path in files:
|
41 |
+
fs_path = os.path.join(root, path)
|
42 |
+
rel_path = os.path.relpath(fs_path, root)
|
43 |
+
zip_file.write(fs_path, rel_path)
|
44 |
+
return zip_file
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/envbuild.py
ADDED
@@ -0,0 +1,171 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Build wheels/sdists by installing build deps to a temporary environment.
|
2 |
+
"""
|
3 |
+
|
4 |
+
import io
|
5 |
+
import os
|
6 |
+
import logging
|
7 |
+
import shutil
|
8 |
+
from subprocess import check_call
|
9 |
+
import sys
|
10 |
+
from sysconfig import get_paths
|
11 |
+
from tempfile import mkdtemp
|
12 |
+
|
13 |
+
from .compat import toml_load
|
14 |
+
from .wrappers import Pep517HookCaller, LoggerWrapper
|
15 |
+
|
16 |
+
log = logging.getLogger(__name__)
|
17 |
+
|
18 |
+
|
19 |
+
def _load_pyproject(source_dir):
|
20 |
+
with io.open(
|
21 |
+
os.path.join(source_dir, 'pyproject.toml'),
|
22 |
+
'rb',
|
23 |
+
) as f:
|
24 |
+
pyproject_data = toml_load(f)
|
25 |
+
buildsys = pyproject_data['build-system']
|
26 |
+
return (
|
27 |
+
buildsys['requires'],
|
28 |
+
buildsys['build-backend'],
|
29 |
+
buildsys.get('backend-path'),
|
30 |
+
)
|
31 |
+
|
32 |
+
|
33 |
+
class BuildEnvironment(object):
|
34 |
+
"""Context manager to install build deps in a simple temporary environment
|
35 |
+
|
36 |
+
Based on code I wrote for pip, which is MIT licensed.
|
37 |
+
"""
|
38 |
+
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
|
39 |
+
#
|
40 |
+
# Permission is hereby granted, free of charge, to any person obtaining
|
41 |
+
# a copy of this software and associated documentation files (the
|
42 |
+
# "Software"), to deal in the Software without restriction, including
|
43 |
+
# without limitation the rights to use, copy, modify, merge, publish,
|
44 |
+
# distribute, sublicense, and/or sell copies of the Software, and to
|
45 |
+
# permit persons to whom the Software is furnished to do so, subject to
|
46 |
+
# the following conditions:
|
47 |
+
#
|
48 |
+
# The above copyright notice and this permission notice shall be
|
49 |
+
# included in all copies or substantial portions of the Software.
|
50 |
+
#
|
51 |
+
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
|
52 |
+
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
|
53 |
+
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
|
54 |
+
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
|
55 |
+
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
|
56 |
+
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
|
57 |
+
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
58 |
+
|
59 |
+
path = None
|
60 |
+
|
61 |
+
def __init__(self, cleanup=True):
|
62 |
+
self._cleanup = cleanup
|
63 |
+
|
64 |
+
def __enter__(self):
|
65 |
+
self.path = mkdtemp(prefix='pep517-build-env-')
|
66 |
+
log.info('Temporary build environment: %s', self.path)
|
67 |
+
|
68 |
+
self.save_path = os.environ.get('PATH', None)
|
69 |
+
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
|
70 |
+
|
71 |
+
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
|
72 |
+
install_dirs = get_paths(install_scheme, vars={
|
73 |
+
'base': self.path,
|
74 |
+
'platbase': self.path,
|
75 |
+
})
|
76 |
+
|
77 |
+
scripts = install_dirs['scripts']
|
78 |
+
if self.save_path:
|
79 |
+
os.environ['PATH'] = scripts + os.pathsep + self.save_path
|
80 |
+
else:
|
81 |
+
os.environ['PATH'] = scripts + os.pathsep + os.defpath
|
82 |
+
|
83 |
+
if install_dirs['purelib'] == install_dirs['platlib']:
|
84 |
+
lib_dirs = install_dirs['purelib']
|
85 |
+
else:
|
86 |
+
lib_dirs = install_dirs['purelib'] + os.pathsep + \
|
87 |
+
install_dirs['platlib']
|
88 |
+
if self.save_pythonpath:
|
89 |
+
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
|
90 |
+
self.save_pythonpath
|
91 |
+
else:
|
92 |
+
os.environ['PYTHONPATH'] = lib_dirs
|
93 |
+
|
94 |
+
return self
|
95 |
+
|
96 |
+
def pip_install(self, reqs):
|
97 |
+
"""Install dependencies into this env by calling pip in a subprocess"""
|
98 |
+
if not reqs:
|
99 |
+
return
|
100 |
+
log.info('Calling pip to install %s', reqs)
|
101 |
+
cmd = [
|
102 |
+
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
|
103 |
+
'--prefix', self.path] + list(reqs)
|
104 |
+
check_call(
|
105 |
+
cmd,
|
106 |
+
stdout=LoggerWrapper(log, logging.INFO),
|
107 |
+
stderr=LoggerWrapper(log, logging.ERROR),
|
108 |
+
)
|
109 |
+
|
110 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
111 |
+
needs_cleanup = (
|
112 |
+
self._cleanup and
|
113 |
+
self.path is not None and
|
114 |
+
os.path.isdir(self.path)
|
115 |
+
)
|
116 |
+
if needs_cleanup:
|
117 |
+
shutil.rmtree(self.path)
|
118 |
+
|
119 |
+
if self.save_path is None:
|
120 |
+
os.environ.pop('PATH', None)
|
121 |
+
else:
|
122 |
+
os.environ['PATH'] = self.save_path
|
123 |
+
|
124 |
+
if self.save_pythonpath is None:
|
125 |
+
os.environ.pop('PYTHONPATH', None)
|
126 |
+
else:
|
127 |
+
os.environ['PYTHONPATH'] = self.save_pythonpath
|
128 |
+
|
129 |
+
|
130 |
+
def build_wheel(source_dir, wheel_dir, config_settings=None):
|
131 |
+
"""Build a wheel from a source directory using PEP 517 hooks.
|
132 |
+
|
133 |
+
:param str source_dir: Source directory containing pyproject.toml
|
134 |
+
:param str wheel_dir: Target directory to create wheel in
|
135 |
+
:param dict config_settings: Options to pass to build backend
|
136 |
+
|
137 |
+
This is a blocking function which will run pip in a subprocess to install
|
138 |
+
build requirements.
|
139 |
+
"""
|
140 |
+
if config_settings is None:
|
141 |
+
config_settings = {}
|
142 |
+
requires, backend, backend_path = _load_pyproject(source_dir)
|
143 |
+
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
144 |
+
|
145 |
+
with BuildEnvironment() as env:
|
146 |
+
env.pip_install(requires)
|
147 |
+
reqs = hooks.get_requires_for_build_wheel(config_settings)
|
148 |
+
env.pip_install(reqs)
|
149 |
+
return hooks.build_wheel(wheel_dir, config_settings)
|
150 |
+
|
151 |
+
|
152 |
+
def build_sdist(source_dir, sdist_dir, config_settings=None):
|
153 |
+
"""Build an sdist from a source directory using PEP 517 hooks.
|
154 |
+
|
155 |
+
:param str source_dir: Source directory containing pyproject.toml
|
156 |
+
:param str sdist_dir: Target directory to place sdist in
|
157 |
+
:param dict config_settings: Options to pass to build backend
|
158 |
+
|
159 |
+
This is a blocking function which will run pip in a subprocess to install
|
160 |
+
build requirements.
|
161 |
+
"""
|
162 |
+
if config_settings is None:
|
163 |
+
config_settings = {}
|
164 |
+
requires, backend, backend_path = _load_pyproject(source_dir)
|
165 |
+
hooks = Pep517HookCaller(source_dir, backend, backend_path)
|
166 |
+
|
167 |
+
with BuildEnvironment() as env:
|
168 |
+
env.pip_install(requires)
|
169 |
+
reqs = hooks.get_requires_for_build_sdist(config_settings)
|
170 |
+
env.pip_install(reqs)
|
171 |
+
return hooks.build_sdist(sdist_dir, config_settings)
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__init__.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""This is a subpackage because the directory is on sys.path for _in_process.py
|
2 |
+
|
3 |
+
The subpackage should stay as empty as possible to avoid shadowing modules that
|
4 |
+
the backend might import.
|
5 |
+
"""
|
6 |
+
from os.path import dirname, abspath, join as pjoin
|
7 |
+
from contextlib import contextmanager
|
8 |
+
|
9 |
+
try:
|
10 |
+
import importlib.resources as resources
|
11 |
+
|
12 |
+
def _in_proc_script_path():
|
13 |
+
return resources.path(__package__, '_in_process.py')
|
14 |
+
except ImportError:
|
15 |
+
@contextmanager
|
16 |
+
def _in_proc_script_path():
|
17 |
+
yield pjoin(dirname(abspath(__file__)), '_in_process.py')
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (917 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/__pycache__/_in_process.cpython-310.pyc
ADDED
Binary file (10.1 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/in_process/_in_process.py
ADDED
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""This is invoked in a subprocess to call the build backend hooks.
|
2 |
+
|
3 |
+
It expects:
|
4 |
+
- Command line args: hook_name, control_dir
|
5 |
+
- Environment variables:
|
6 |
+
PEP517_BUILD_BACKEND=entry.point:spec
|
7 |
+
PEP517_BACKEND_PATH=paths (separated with os.pathsep)
|
8 |
+
- control_dir/input.json:
|
9 |
+
- {"kwargs": {...}}
|
10 |
+
|
11 |
+
Results:
|
12 |
+
- control_dir/output.json
|
13 |
+
- {"return_val": ...}
|
14 |
+
"""
|
15 |
+
from glob import glob
|
16 |
+
from importlib import import_module
|
17 |
+
import json
|
18 |
+
import os
|
19 |
+
import os.path
|
20 |
+
from os.path import join as pjoin
|
21 |
+
import re
|
22 |
+
import shutil
|
23 |
+
import sys
|
24 |
+
import traceback
|
25 |
+
|
26 |
+
# This file is run as a script, and `import compat` is not zip-safe, so we
|
27 |
+
# include write_json() and read_json() from compat.py.
|
28 |
+
#
|
29 |
+
# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
|
30 |
+
|
31 |
+
if sys.version_info[0] >= 3:
|
32 |
+
# Python 3
|
33 |
+
def write_json(obj, path, **kwargs):
|
34 |
+
with open(path, 'w', encoding='utf-8') as f:
|
35 |
+
json.dump(obj, f, **kwargs)
|
36 |
+
|
37 |
+
def read_json(path):
|
38 |
+
with open(path, 'r', encoding='utf-8') as f:
|
39 |
+
return json.load(f)
|
40 |
+
|
41 |
+
else:
|
42 |
+
# Python 2
|
43 |
+
def write_json(obj, path, **kwargs):
|
44 |
+
with open(path, 'wb') as f:
|
45 |
+
json.dump(obj, f, encoding='utf-8', **kwargs)
|
46 |
+
|
47 |
+
def read_json(path):
|
48 |
+
with open(path, 'rb') as f:
|
49 |
+
return json.load(f)
|
50 |
+
|
51 |
+
|
52 |
+
class BackendUnavailable(Exception):
|
53 |
+
"""Raised if we cannot import the backend"""
|
54 |
+
def __init__(self, traceback):
|
55 |
+
self.traceback = traceback
|
56 |
+
|
57 |
+
|
58 |
+
class BackendInvalid(Exception):
|
59 |
+
"""Raised if the backend is invalid"""
|
60 |
+
def __init__(self, message):
|
61 |
+
self.message = message
|
62 |
+
|
63 |
+
|
64 |
+
class HookMissing(Exception):
|
65 |
+
"""Raised if a hook is missing and we are not executing the fallback"""
|
66 |
+
def __init__(self, hook_name=None):
|
67 |
+
super(HookMissing, self).__init__(hook_name)
|
68 |
+
self.hook_name = hook_name
|
69 |
+
|
70 |
+
|
71 |
+
def contained_in(filename, directory):
|
72 |
+
"""Test if a file is located within the given directory."""
|
73 |
+
filename = os.path.normcase(os.path.abspath(filename))
|
74 |
+
directory = os.path.normcase(os.path.abspath(directory))
|
75 |
+
return os.path.commonprefix([filename, directory]) == directory
|
76 |
+
|
77 |
+
|
78 |
+
def _build_backend():
|
79 |
+
"""Find and load the build backend"""
|
80 |
+
# Add in-tree backend directories to the front of sys.path.
|
81 |
+
backend_path = os.environ.get('PEP517_BACKEND_PATH')
|
82 |
+
if backend_path:
|
83 |
+
extra_pathitems = backend_path.split(os.pathsep)
|
84 |
+
sys.path[:0] = extra_pathitems
|
85 |
+
|
86 |
+
ep = os.environ['PEP517_BUILD_BACKEND']
|
87 |
+
mod_path, _, obj_path = ep.partition(':')
|
88 |
+
try:
|
89 |
+
obj = import_module(mod_path)
|
90 |
+
except ImportError:
|
91 |
+
raise BackendUnavailable(traceback.format_exc())
|
92 |
+
|
93 |
+
if backend_path:
|
94 |
+
if not any(
|
95 |
+
contained_in(obj.__file__, path)
|
96 |
+
for path in extra_pathitems
|
97 |
+
):
|
98 |
+
raise BackendInvalid("Backend was not loaded from backend-path")
|
99 |
+
|
100 |
+
if obj_path:
|
101 |
+
for path_part in obj_path.split('.'):
|
102 |
+
obj = getattr(obj, path_part)
|
103 |
+
return obj
|
104 |
+
|
105 |
+
|
106 |
+
def _supported_features():
|
107 |
+
"""Return the list of options features supported by the backend.
|
108 |
+
|
109 |
+
Returns a list of strings.
|
110 |
+
The only possible value is 'build_editable'.
|
111 |
+
"""
|
112 |
+
backend = _build_backend()
|
113 |
+
features = []
|
114 |
+
if hasattr(backend, "build_editable"):
|
115 |
+
features.append("build_editable")
|
116 |
+
return features
|
117 |
+
|
118 |
+
|
119 |
+
def get_requires_for_build_wheel(config_settings):
|
120 |
+
"""Invoke the optional get_requires_for_build_wheel hook
|
121 |
+
|
122 |
+
Returns [] if the hook is not defined.
|
123 |
+
"""
|
124 |
+
backend = _build_backend()
|
125 |
+
try:
|
126 |
+
hook = backend.get_requires_for_build_wheel
|
127 |
+
except AttributeError:
|
128 |
+
return []
|
129 |
+
else:
|
130 |
+
return hook(config_settings)
|
131 |
+
|
132 |
+
|
133 |
+
def get_requires_for_build_editable(config_settings):
|
134 |
+
"""Invoke the optional get_requires_for_build_editable hook
|
135 |
+
|
136 |
+
Returns [] if the hook is not defined.
|
137 |
+
"""
|
138 |
+
backend = _build_backend()
|
139 |
+
try:
|
140 |
+
hook = backend.get_requires_for_build_editable
|
141 |
+
except AttributeError:
|
142 |
+
return []
|
143 |
+
else:
|
144 |
+
return hook(config_settings)
|
145 |
+
|
146 |
+
|
147 |
+
def prepare_metadata_for_build_wheel(
|
148 |
+
metadata_directory, config_settings, _allow_fallback):
|
149 |
+
"""Invoke optional prepare_metadata_for_build_wheel
|
150 |
+
|
151 |
+
Implements a fallback by building a wheel if the hook isn't defined,
|
152 |
+
unless _allow_fallback is False in which case HookMissing is raised.
|
153 |
+
"""
|
154 |
+
backend = _build_backend()
|
155 |
+
try:
|
156 |
+
hook = backend.prepare_metadata_for_build_wheel
|
157 |
+
except AttributeError:
|
158 |
+
if not _allow_fallback:
|
159 |
+
raise HookMissing()
|
160 |
+
whl_basename = backend.build_wheel(metadata_directory, config_settings)
|
161 |
+
return _get_wheel_metadata_from_wheel(whl_basename, metadata_directory,
|
162 |
+
config_settings)
|
163 |
+
else:
|
164 |
+
return hook(metadata_directory, config_settings)
|
165 |
+
|
166 |
+
|
167 |
+
def prepare_metadata_for_build_editable(
|
168 |
+
metadata_directory, config_settings, _allow_fallback):
|
169 |
+
"""Invoke optional prepare_metadata_for_build_editable
|
170 |
+
|
171 |
+
Implements a fallback by building an editable wheel if the hook isn't
|
172 |
+
defined, unless _allow_fallback is False in which case HookMissing is
|
173 |
+
raised.
|
174 |
+
"""
|
175 |
+
backend = _build_backend()
|
176 |
+
try:
|
177 |
+
hook = backend.prepare_metadata_for_build_editable
|
178 |
+
except AttributeError:
|
179 |
+
if not _allow_fallback:
|
180 |
+
raise HookMissing()
|
181 |
+
try:
|
182 |
+
build_hook = backend.build_editable
|
183 |
+
except AttributeError:
|
184 |
+
raise HookMissing(hook_name='build_editable')
|
185 |
+
else:
|
186 |
+
whl_basename = build_hook(metadata_directory, config_settings)
|
187 |
+
return _get_wheel_metadata_from_wheel(whl_basename,
|
188 |
+
metadata_directory,
|
189 |
+
config_settings)
|
190 |
+
else:
|
191 |
+
return hook(metadata_directory, config_settings)
|
192 |
+
|
193 |
+
|
194 |
+
WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
|
195 |
+
|
196 |
+
|
197 |
+
def _dist_info_files(whl_zip):
|
198 |
+
"""Identify the .dist-info folder inside a wheel ZipFile."""
|
199 |
+
res = []
|
200 |
+
for path in whl_zip.namelist():
|
201 |
+
m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
|
202 |
+
if m:
|
203 |
+
res.append(path)
|
204 |
+
if res:
|
205 |
+
return res
|
206 |
+
raise Exception("No .dist-info folder found in wheel")
|
207 |
+
|
208 |
+
|
209 |
+
def _get_wheel_metadata_from_wheel(
|
210 |
+
whl_basename, metadata_directory, config_settings):
|
211 |
+
"""Extract the metadata from a wheel.
|
212 |
+
|
213 |
+
Fallback for when the build backend does not
|
214 |
+
define the 'get_wheel_metadata' hook.
|
215 |
+
"""
|
216 |
+
from zipfile import ZipFile
|
217 |
+
with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
|
218 |
+
pass # Touch marker file
|
219 |
+
|
220 |
+
whl_file = os.path.join(metadata_directory, whl_basename)
|
221 |
+
with ZipFile(whl_file) as zipf:
|
222 |
+
dist_info = _dist_info_files(zipf)
|
223 |
+
zipf.extractall(path=metadata_directory, members=dist_info)
|
224 |
+
return dist_info[0].split('/')[0]
|
225 |
+
|
226 |
+
|
227 |
+
def _find_already_built_wheel(metadata_directory):
|
228 |
+
"""Check for a wheel already built during the get_wheel_metadata hook.
|
229 |
+
"""
|
230 |
+
if not metadata_directory:
|
231 |
+
return None
|
232 |
+
metadata_parent = os.path.dirname(metadata_directory)
|
233 |
+
if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
|
234 |
+
return None
|
235 |
+
|
236 |
+
whl_files = glob(os.path.join(metadata_parent, '*.whl'))
|
237 |
+
if not whl_files:
|
238 |
+
print('Found wheel built marker, but no .whl files')
|
239 |
+
return None
|
240 |
+
if len(whl_files) > 1:
|
241 |
+
print('Found multiple .whl files; unspecified behaviour. '
|
242 |
+
'Will call build_wheel.')
|
243 |
+
return None
|
244 |
+
|
245 |
+
# Exactly one .whl file
|
246 |
+
return whl_files[0]
|
247 |
+
|
248 |
+
|
249 |
+
def build_wheel(wheel_directory, config_settings, metadata_directory=None):
|
250 |
+
"""Invoke the mandatory build_wheel hook.
|
251 |
+
|
252 |
+
If a wheel was already built in the
|
253 |
+
prepare_metadata_for_build_wheel fallback, this
|
254 |
+
will copy it rather than rebuilding the wheel.
|
255 |
+
"""
|
256 |
+
prebuilt_whl = _find_already_built_wheel(metadata_directory)
|
257 |
+
if prebuilt_whl:
|
258 |
+
shutil.copy2(prebuilt_whl, wheel_directory)
|
259 |
+
return os.path.basename(prebuilt_whl)
|
260 |
+
|
261 |
+
return _build_backend().build_wheel(wheel_directory, config_settings,
|
262 |
+
metadata_directory)
|
263 |
+
|
264 |
+
|
265 |
+
def build_editable(wheel_directory, config_settings, metadata_directory=None):
|
266 |
+
"""Invoke the optional build_editable hook.
|
267 |
+
|
268 |
+
If a wheel was already built in the
|
269 |
+
prepare_metadata_for_build_editable fallback, this
|
270 |
+
will copy it rather than rebuilding the wheel.
|
271 |
+
"""
|
272 |
+
backend = _build_backend()
|
273 |
+
try:
|
274 |
+
hook = backend.build_editable
|
275 |
+
except AttributeError:
|
276 |
+
raise HookMissing()
|
277 |
+
else:
|
278 |
+
prebuilt_whl = _find_already_built_wheel(metadata_directory)
|
279 |
+
if prebuilt_whl:
|
280 |
+
shutil.copy2(prebuilt_whl, wheel_directory)
|
281 |
+
return os.path.basename(prebuilt_whl)
|
282 |
+
|
283 |
+
return hook(wheel_directory, config_settings, metadata_directory)
|
284 |
+
|
285 |
+
|
286 |
+
def get_requires_for_build_sdist(config_settings):
|
287 |
+
"""Invoke the optional get_requires_for_build_wheel hook
|
288 |
+
|
289 |
+
Returns [] if the hook is not defined.
|
290 |
+
"""
|
291 |
+
backend = _build_backend()
|
292 |
+
try:
|
293 |
+
hook = backend.get_requires_for_build_sdist
|
294 |
+
except AttributeError:
|
295 |
+
return []
|
296 |
+
else:
|
297 |
+
return hook(config_settings)
|
298 |
+
|
299 |
+
|
300 |
+
class _DummyException(Exception):
|
301 |
+
"""Nothing should ever raise this exception"""
|
302 |
+
|
303 |
+
|
304 |
+
class GotUnsupportedOperation(Exception):
|
305 |
+
"""For internal use when backend raises UnsupportedOperation"""
|
306 |
+
def __init__(self, traceback):
|
307 |
+
self.traceback = traceback
|
308 |
+
|
309 |
+
|
310 |
+
def build_sdist(sdist_directory, config_settings):
|
311 |
+
"""Invoke the mandatory build_sdist hook."""
|
312 |
+
backend = _build_backend()
|
313 |
+
try:
|
314 |
+
return backend.build_sdist(sdist_directory, config_settings)
|
315 |
+
except getattr(backend, 'UnsupportedOperation', _DummyException):
|
316 |
+
raise GotUnsupportedOperation(traceback.format_exc())
|
317 |
+
|
318 |
+
|
319 |
+
HOOK_NAMES = {
|
320 |
+
'get_requires_for_build_wheel',
|
321 |
+
'prepare_metadata_for_build_wheel',
|
322 |
+
'build_wheel',
|
323 |
+
'get_requires_for_build_editable',
|
324 |
+
'prepare_metadata_for_build_editable',
|
325 |
+
'build_editable',
|
326 |
+
'get_requires_for_build_sdist',
|
327 |
+
'build_sdist',
|
328 |
+
'_supported_features',
|
329 |
+
}
|
330 |
+
|
331 |
+
|
332 |
+
def main():
|
333 |
+
if len(sys.argv) < 3:
|
334 |
+
sys.exit("Needs args: hook_name, control_dir")
|
335 |
+
hook_name = sys.argv[1]
|
336 |
+
control_dir = sys.argv[2]
|
337 |
+
if hook_name not in HOOK_NAMES:
|
338 |
+
sys.exit("Unknown hook: %s" % hook_name)
|
339 |
+
hook = globals()[hook_name]
|
340 |
+
|
341 |
+
hook_input = read_json(pjoin(control_dir, 'input.json'))
|
342 |
+
|
343 |
+
json_out = {'unsupported': False, 'return_val': None}
|
344 |
+
try:
|
345 |
+
json_out['return_val'] = hook(**hook_input['kwargs'])
|
346 |
+
except BackendUnavailable as e:
|
347 |
+
json_out['no_backend'] = True
|
348 |
+
json_out['traceback'] = e.traceback
|
349 |
+
except BackendInvalid as e:
|
350 |
+
json_out['backend_invalid'] = True
|
351 |
+
json_out['backend_error'] = e.message
|
352 |
+
except GotUnsupportedOperation as e:
|
353 |
+
json_out['unsupported'] = True
|
354 |
+
json_out['traceback'] = e.traceback
|
355 |
+
except HookMissing as e:
|
356 |
+
json_out['hook_missing'] = True
|
357 |
+
json_out['missing_hook_name'] = e.hook_name or hook_name
|
358 |
+
|
359 |
+
write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
|
360 |
+
|
361 |
+
|
362 |
+
if __name__ == '__main__':
|
363 |
+
main()
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/meta.py
ADDED
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Build metadata for a project using PEP 517 hooks.
|
2 |
+
"""
|
3 |
+
import argparse
|
4 |
+
import logging
|
5 |
+
import os
|
6 |
+
import shutil
|
7 |
+
import functools
|
8 |
+
|
9 |
+
try:
|
10 |
+
import importlib.metadata as imp_meta
|
11 |
+
except ImportError:
|
12 |
+
import importlib_metadata as imp_meta
|
13 |
+
|
14 |
+
try:
|
15 |
+
from zipfile import Path
|
16 |
+
except ImportError:
|
17 |
+
from zipp import Path
|
18 |
+
|
19 |
+
from .envbuild import BuildEnvironment
|
20 |
+
from .wrappers import Pep517HookCaller, quiet_subprocess_runner
|
21 |
+
from .dirtools import tempdir, mkdir_p, dir_to_zipfile
|
22 |
+
from .build import validate_system, load_system, compat_system
|
23 |
+
|
24 |
+
log = logging.getLogger(__name__)
|
25 |
+
|
26 |
+
|
27 |
+
def _prep_meta(hooks, env, dest):
|
28 |
+
reqs = hooks.get_requires_for_build_wheel({})
|
29 |
+
log.info('Got build requires: %s', reqs)
|
30 |
+
|
31 |
+
env.pip_install(reqs)
|
32 |
+
log.info('Installed dynamic build dependencies')
|
33 |
+
|
34 |
+
with tempdir() as td:
|
35 |
+
log.info('Trying to build metadata in %s', td)
|
36 |
+
filename = hooks.prepare_metadata_for_build_wheel(td, {})
|
37 |
+
source = os.path.join(td, filename)
|
38 |
+
shutil.move(source, os.path.join(dest, os.path.basename(filename)))
|
39 |
+
|
40 |
+
|
41 |
+
def build(source_dir='.', dest=None, system=None):
|
42 |
+
system = system or load_system(source_dir)
|
43 |
+
dest = os.path.join(source_dir, dest or 'dist')
|
44 |
+
mkdir_p(dest)
|
45 |
+
validate_system(system)
|
46 |
+
hooks = Pep517HookCaller(
|
47 |
+
source_dir, system['build-backend'], system.get('backend-path')
|
48 |
+
)
|
49 |
+
|
50 |
+
with hooks.subprocess_runner(quiet_subprocess_runner):
|
51 |
+
with BuildEnvironment() as env:
|
52 |
+
env.pip_install(system['requires'])
|
53 |
+
_prep_meta(hooks, env, dest)
|
54 |
+
|
55 |
+
|
56 |
+
def build_as_zip(builder=build):
|
57 |
+
with tempdir() as out_dir:
|
58 |
+
builder(dest=out_dir)
|
59 |
+
return dir_to_zipfile(out_dir)
|
60 |
+
|
61 |
+
|
62 |
+
def load(root):
|
63 |
+
"""
|
64 |
+
Given a source directory (root) of a package,
|
65 |
+
return an importlib.metadata.Distribution object
|
66 |
+
with metadata build from that package.
|
67 |
+
"""
|
68 |
+
root = os.path.expanduser(root)
|
69 |
+
system = compat_system(root)
|
70 |
+
builder = functools.partial(build, source_dir=root, system=system)
|
71 |
+
path = Path(build_as_zip(builder))
|
72 |
+
return imp_meta.PathDistribution(path)
|
73 |
+
|
74 |
+
|
75 |
+
parser = argparse.ArgumentParser()
|
76 |
+
parser.add_argument(
|
77 |
+
'source_dir',
|
78 |
+
help="A directory containing pyproject.toml",
|
79 |
+
)
|
80 |
+
parser.add_argument(
|
81 |
+
'--out-dir', '-o',
|
82 |
+
help="Destination in which to save the builds relative to source dir",
|
83 |
+
)
|
84 |
+
|
85 |
+
|
86 |
+
def main():
|
87 |
+
args = parser.parse_args()
|
88 |
+
build(args.source_dir, args.out_dir)
|
89 |
+
|
90 |
+
|
91 |
+
if __name__ == '__main__':
|
92 |
+
main()
|
venv/lib/python3.10/site-packages/pip/_vendor/pep517/wrappers.py
ADDED
@@ -0,0 +1,375 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import threading
|
2 |
+
from contextlib import contextmanager
|
3 |
+
import os
|
4 |
+
from os.path import abspath, join as pjoin
|
5 |
+
import shutil
|
6 |
+
from subprocess import check_call, check_output, STDOUT
|
7 |
+
import sys
|
8 |
+
from tempfile import mkdtemp
|
9 |
+
|
10 |
+
from . import compat
|
11 |
+
from .in_process import _in_proc_script_path
|
12 |
+
|
13 |
+
__all__ = [
|
14 |
+
'BackendUnavailable',
|
15 |
+
'BackendInvalid',
|
16 |
+
'HookMissing',
|
17 |
+
'UnsupportedOperation',
|
18 |
+
'default_subprocess_runner',
|
19 |
+
'quiet_subprocess_runner',
|
20 |
+
'Pep517HookCaller',
|
21 |
+
]
|
22 |
+
|
23 |
+
|
24 |
+
@contextmanager
|
25 |
+
def tempdir():
|
26 |
+
td = mkdtemp()
|
27 |
+
try:
|
28 |
+
yield td
|
29 |
+
finally:
|
30 |
+
shutil.rmtree(td)
|
31 |
+
|
32 |
+
|
33 |
+
class BackendUnavailable(Exception):
|
34 |
+
"""Will be raised if the backend cannot be imported in the hook process."""
|
35 |
+
def __init__(self, traceback):
|
36 |
+
self.traceback = traceback
|
37 |
+
|
38 |
+
|
39 |
+
class BackendInvalid(Exception):
|
40 |
+
"""Will be raised if the backend is invalid."""
|
41 |
+
def __init__(self, backend_name, backend_path, message):
|
42 |
+
self.backend_name = backend_name
|
43 |
+
self.backend_path = backend_path
|
44 |
+
self.message = message
|
45 |
+
|
46 |
+
|
47 |
+
class HookMissing(Exception):
|
48 |
+
"""Will be raised on missing hooks."""
|
49 |
+
def __init__(self, hook_name):
|
50 |
+
super(HookMissing, self).__init__(hook_name)
|
51 |
+
self.hook_name = hook_name
|
52 |
+
|
53 |
+
|
54 |
+
class UnsupportedOperation(Exception):
|
55 |
+
"""May be raised by build_sdist if the backend indicates that it can't."""
|
56 |
+
def __init__(self, traceback):
|
57 |
+
self.traceback = traceback
|
58 |
+
|
59 |
+
|
60 |
+
def default_subprocess_runner(cmd, cwd=None, extra_environ=None):
|
61 |
+
"""The default method of calling the wrapper subprocess."""
|
62 |
+
env = os.environ.copy()
|
63 |
+
if extra_environ:
|
64 |
+
env.update(extra_environ)
|
65 |
+
|
66 |
+
check_call(cmd, cwd=cwd, env=env)
|
67 |
+
|
68 |
+
|
69 |
+
def quiet_subprocess_runner(cmd, cwd=None, extra_environ=None):
|
70 |
+
"""A method of calling the wrapper subprocess while suppressing output."""
|
71 |
+
env = os.environ.copy()
|
72 |
+
if extra_environ:
|
73 |
+
env.update(extra_environ)
|
74 |
+
|
75 |
+
check_output(cmd, cwd=cwd, env=env, stderr=STDOUT)
|
76 |
+
|
77 |
+
|
78 |
+
def norm_and_check(source_tree, requested):
|
79 |
+
"""Normalise and check a backend path.
|
80 |
+
|
81 |
+
Ensure that the requested backend path is specified as a relative path,
|
82 |
+
and resolves to a location under the given source tree.
|
83 |
+
|
84 |
+
Return an absolute version of the requested path.
|
85 |
+
"""
|
86 |
+
if os.path.isabs(requested):
|
87 |
+
raise ValueError("paths must be relative")
|
88 |
+
|
89 |
+
abs_source = os.path.abspath(source_tree)
|
90 |
+
abs_requested = os.path.normpath(os.path.join(abs_source, requested))
|
91 |
+
# We have to use commonprefix for Python 2.7 compatibility. So we
|
92 |
+
# normalise case to avoid problems because commonprefix is a character
|
93 |
+
# based comparison :-(
|
94 |
+
norm_source = os.path.normcase(abs_source)
|
95 |
+
norm_requested = os.path.normcase(abs_requested)
|
96 |
+
if os.path.commonprefix([norm_source, norm_requested]) != norm_source:
|
97 |
+
raise ValueError("paths must be inside source tree")
|
98 |
+
|
99 |
+
return abs_requested
|
100 |
+
|
101 |
+
|
102 |
+
class Pep517HookCaller(object):
|
103 |
+
"""A wrapper around a source directory to be built with a PEP 517 backend.
|
104 |
+
|
105 |
+
:param source_dir: The path to the source directory, containing
|
106 |
+
pyproject.toml.
|
107 |
+
:param build_backend: The build backend spec, as per PEP 517, from
|
108 |
+
pyproject.toml.
|
109 |
+
:param backend_path: The backend path, as per PEP 517, from pyproject.toml.
|
110 |
+
:param runner: A callable that invokes the wrapper subprocess.
|
111 |
+
:param python_executable: The Python executable used to invoke the backend
|
112 |
+
|
113 |
+
The 'runner', if provided, must expect the following:
|
114 |
+
|
115 |
+
- cmd: a list of strings representing the command and arguments to
|
116 |
+
execute, as would be passed to e.g. 'subprocess.check_call'.
|
117 |
+
- cwd: a string representing the working directory that must be
|
118 |
+
used for the subprocess. Corresponds to the provided source_dir.
|
119 |
+
- extra_environ: a dict mapping environment variable names to values
|
120 |
+
which must be set for the subprocess execution.
|
121 |
+
"""
|
122 |
+
def __init__(
|
123 |
+
self,
|
124 |
+
source_dir,
|
125 |
+
build_backend,
|
126 |
+
backend_path=None,
|
127 |
+
runner=None,
|
128 |
+
python_executable=None,
|
129 |
+
):
|
130 |
+
if runner is None:
|
131 |
+
runner = default_subprocess_runner
|
132 |
+
|
133 |
+
self.source_dir = abspath(source_dir)
|
134 |
+
self.build_backend = build_backend
|
135 |
+
if backend_path:
|
136 |
+
backend_path = [
|
137 |
+
norm_and_check(self.source_dir, p) for p in backend_path
|
138 |
+
]
|
139 |
+
self.backend_path = backend_path
|
140 |
+
self._subprocess_runner = runner
|
141 |
+
if not python_executable:
|
142 |
+
python_executable = sys.executable
|
143 |
+
self.python_executable = python_executable
|
144 |
+
|
145 |
+
@contextmanager
|
146 |
+
def subprocess_runner(self, runner):
|
147 |
+
"""A context manager for temporarily overriding the default subprocess
|
148 |
+
runner.
|
149 |
+
"""
|
150 |
+
prev = self._subprocess_runner
|
151 |
+
self._subprocess_runner = runner
|
152 |
+
try:
|
153 |
+
yield
|
154 |
+
finally:
|
155 |
+
self._subprocess_runner = prev
|
156 |
+
|
157 |
+
def _supported_features(self):
|
158 |
+
"""Return the list of optional features supported by the backend."""
|
159 |
+
return self._call_hook('_supported_features', {})
|
160 |
+
|
161 |
+
def get_requires_for_build_wheel(self, config_settings=None):
|
162 |
+
"""Identify packages required for building a wheel
|
163 |
+
|
164 |
+
Returns a list of dependency specifications, e.g.::
|
165 |
+
|
166 |
+
["wheel >= 0.25", "setuptools"]
|
167 |
+
|
168 |
+
This does not include requirements specified in pyproject.toml.
|
169 |
+
It returns the result of calling the equivalently named hook in a
|
170 |
+
subprocess.
|
171 |
+
"""
|
172 |
+
return self._call_hook('get_requires_for_build_wheel', {
|
173 |
+
'config_settings': config_settings
|
174 |
+
})
|
175 |
+
|
176 |
+
def prepare_metadata_for_build_wheel(
|
177 |
+
self, metadata_directory, config_settings=None,
|
178 |
+
_allow_fallback=True):
|
179 |
+
"""Prepare a ``*.dist-info`` folder with metadata for this project.
|
180 |
+
|
181 |
+
Returns the name of the newly created folder.
|
182 |
+
|
183 |
+
If the build backend defines a hook with this name, it will be called
|
184 |
+
in a subprocess. If not, the backend will be asked to build a wheel,
|
185 |
+
and the dist-info extracted from that (unless _allow_fallback is
|
186 |
+
False).
|
187 |
+
"""
|
188 |
+
return self._call_hook('prepare_metadata_for_build_wheel', {
|
189 |
+
'metadata_directory': abspath(metadata_directory),
|
190 |
+
'config_settings': config_settings,
|
191 |
+
'_allow_fallback': _allow_fallback,
|
192 |
+
})
|
193 |
+
|
194 |
+
def build_wheel(
|
195 |
+
self, wheel_directory, config_settings=None,
|
196 |
+
metadata_directory=None):
|
197 |
+
"""Build a wheel from this project.
|
198 |
+
|
199 |
+
Returns the name of the newly created file.
|
200 |
+
|
201 |
+
In general, this will call the 'build_wheel' hook in the backend.
|
202 |
+
However, if that was previously called by
|
203 |
+
'prepare_metadata_for_build_wheel', and the same metadata_directory is
|
204 |
+
used, the previously built wheel will be copied to wheel_directory.
|
205 |
+
"""
|
206 |
+
if metadata_directory is not None:
|
207 |
+
metadata_directory = abspath(metadata_directory)
|
208 |
+
return self._call_hook('build_wheel', {
|
209 |
+
'wheel_directory': abspath(wheel_directory),
|
210 |
+
'config_settings': config_settings,
|
211 |
+
'metadata_directory': metadata_directory,
|
212 |
+
})
|
213 |
+
|
214 |
+
def get_requires_for_build_editable(self, config_settings=None):
|
215 |
+
"""Identify packages required for building an editable wheel
|
216 |
+
|
217 |
+
Returns a list of dependency specifications, e.g.::
|
218 |
+
|
219 |
+
["wheel >= 0.25", "setuptools"]
|
220 |
+
|
221 |
+
This does not include requirements specified in pyproject.toml.
|
222 |
+
It returns the result of calling the equivalently named hook in a
|
223 |
+
subprocess.
|
224 |
+
"""
|
225 |
+
return self._call_hook('get_requires_for_build_editable', {
|
226 |
+
'config_settings': config_settings
|
227 |
+
})
|
228 |
+
|
229 |
+
def prepare_metadata_for_build_editable(
|
230 |
+
self, metadata_directory, config_settings=None,
|
231 |
+
_allow_fallback=True):
|
232 |
+
"""Prepare a ``*.dist-info`` folder with metadata for this project.
|
233 |
+
|
234 |
+
Returns the name of the newly created folder.
|
235 |
+
|
236 |
+
If the build backend defines a hook with this name, it will be called
|
237 |
+
in a subprocess. If not, the backend will be asked to build an editable
|
238 |
+
wheel, and the dist-info extracted from that (unless _allow_fallback is
|
239 |
+
False).
|
240 |
+
"""
|
241 |
+
return self._call_hook('prepare_metadata_for_build_editable', {
|
242 |
+
'metadata_directory': abspath(metadata_directory),
|
243 |
+
'config_settings': config_settings,
|
244 |
+
'_allow_fallback': _allow_fallback,
|
245 |
+
})
|
246 |
+
|
247 |
+
def build_editable(
|
248 |
+
self, wheel_directory, config_settings=None,
|
249 |
+
metadata_directory=None):
|
250 |
+
"""Build an editable wheel from this project.
|
251 |
+
|
252 |
+
Returns the name of the newly created file.
|
253 |
+
|
254 |
+
In general, this will call the 'build_editable' hook in the backend.
|
255 |
+
However, if that was previously called by
|
256 |
+
'prepare_metadata_for_build_editable', and the same metadata_directory
|
257 |
+
is used, the previously built wheel will be copied to wheel_directory.
|
258 |
+
"""
|
259 |
+
if metadata_directory is not None:
|
260 |
+
metadata_directory = abspath(metadata_directory)
|
261 |
+
return self._call_hook('build_editable', {
|
262 |
+
'wheel_directory': abspath(wheel_directory),
|
263 |
+
'config_settings': config_settings,
|
264 |
+
'metadata_directory': metadata_directory,
|
265 |
+
})
|
266 |
+
|
267 |
+
def get_requires_for_build_sdist(self, config_settings=None):
|
268 |
+
"""Identify packages required for building a wheel
|
269 |
+
|
270 |
+
Returns a list of dependency specifications, e.g.::
|
271 |
+
|
272 |
+
["setuptools >= 26"]
|
273 |
+
|
274 |
+
This does not include requirements specified in pyproject.toml.
|
275 |
+
It returns the result of calling the equivalently named hook in a
|
276 |
+
subprocess.
|
277 |
+
"""
|
278 |
+
return self._call_hook('get_requires_for_build_sdist', {
|
279 |
+
'config_settings': config_settings
|
280 |
+
})
|
281 |
+
|
282 |
+
def build_sdist(self, sdist_directory, config_settings=None):
|
283 |
+
"""Build an sdist from this project.
|
284 |
+
|
285 |
+
Returns the name of the newly created file.
|
286 |
+
|
287 |
+
This calls the 'build_sdist' backend hook in a subprocess.
|
288 |
+
"""
|
289 |
+
return self._call_hook('build_sdist', {
|
290 |
+
'sdist_directory': abspath(sdist_directory),
|
291 |
+
'config_settings': config_settings,
|
292 |
+
})
|
293 |
+
|
294 |
+
def _call_hook(self, hook_name, kwargs):
|
295 |
+
# On Python 2, pytoml returns Unicode values (which is correct) but the
|
296 |
+
# environment passed to check_call needs to contain string values. We
|
297 |
+
# convert here by encoding using ASCII (the backend can only contain
|
298 |
+
# letters, digits and _, . and : characters, and will be used as a
|
299 |
+
# Python identifier, so non-ASCII content is wrong on Python 2 in
|
300 |
+
# any case).
|
301 |
+
# For backend_path, we use sys.getfilesystemencoding.
|
302 |
+
if sys.version_info[0] == 2:
|
303 |
+
build_backend = self.build_backend.encode('ASCII')
|
304 |
+
else:
|
305 |
+
build_backend = self.build_backend
|
306 |
+
extra_environ = {'PEP517_BUILD_BACKEND': build_backend}
|
307 |
+
|
308 |
+
if self.backend_path:
|
309 |
+
backend_path = os.pathsep.join(self.backend_path)
|
310 |
+
if sys.version_info[0] == 2:
|
311 |
+
backend_path = backend_path.encode(sys.getfilesystemencoding())
|
312 |
+
extra_environ['PEP517_BACKEND_PATH'] = backend_path
|
313 |
+
|
314 |
+
with tempdir() as td:
|
315 |
+
hook_input = {'kwargs': kwargs}
|
316 |
+
compat.write_json(hook_input, pjoin(td, 'input.json'),
|
317 |
+
indent=2)
|
318 |
+
|
319 |
+
# Run the hook in a subprocess
|
320 |
+
with _in_proc_script_path() as script:
|
321 |
+
python = self.python_executable
|
322 |
+
self._subprocess_runner(
|
323 |
+
[python, abspath(str(script)), hook_name, td],
|
324 |
+
cwd=self.source_dir,
|
325 |
+
extra_environ=extra_environ
|
326 |
+
)
|
327 |
+
|
328 |
+
data = compat.read_json(pjoin(td, 'output.json'))
|
329 |
+
if data.get('unsupported'):
|
330 |
+
raise UnsupportedOperation(data.get('traceback', ''))
|
331 |
+
if data.get('no_backend'):
|
332 |
+
raise BackendUnavailable(data.get('traceback', ''))
|
333 |
+
if data.get('backend_invalid'):
|
334 |
+
raise BackendInvalid(
|
335 |
+
backend_name=self.build_backend,
|
336 |
+
backend_path=self.backend_path,
|
337 |
+
message=data.get('backend_error', '')
|
338 |
+
)
|
339 |
+
if data.get('hook_missing'):
|
340 |
+
raise HookMissing(data.get('missing_hook_name') or hook_name)
|
341 |
+
return data['return_val']
|
342 |
+
|
343 |
+
|
344 |
+
class LoggerWrapper(threading.Thread):
|
345 |
+
"""
|
346 |
+
Read messages from a pipe and redirect them
|
347 |
+
to a logger (see python's logging module).
|
348 |
+
"""
|
349 |
+
|
350 |
+
def __init__(self, logger, level):
|
351 |
+
threading.Thread.__init__(self)
|
352 |
+
self.daemon = True
|
353 |
+
|
354 |
+
self.logger = logger
|
355 |
+
self.level = level
|
356 |
+
|
357 |
+
# create the pipe and reader
|
358 |
+
self.fd_read, self.fd_write = os.pipe()
|
359 |
+
self.reader = os.fdopen(self.fd_read)
|
360 |
+
|
361 |
+
self.start()
|
362 |
+
|
363 |
+
def fileno(self):
|
364 |
+
return self.fd_write
|
365 |
+
|
366 |
+
@staticmethod
|
367 |
+
def remove_newline(msg):
|
368 |
+
return msg[:-1] if msg.endswith(os.linesep) else msg
|
369 |
+
|
370 |
+
def run(self):
|
371 |
+
for line in self.reader:
|
372 |
+
self._write(self.remove_newline(line))
|
373 |
+
|
374 |
+
def _write(self, message):
|
375 |
+
self.logger.log(self.level, message)
|
venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (99.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc
ADDED
Binary file (656 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import errno
|
3 |
+
import sys
|
4 |
+
|
5 |
+
from pip._vendor import six
|
6 |
+
|
7 |
+
|
8 |
+
def _makedirs_31(path, exist_ok=False):
|
9 |
+
try:
|
10 |
+
os.makedirs(path)
|
11 |
+
except OSError as exc:
|
12 |
+
if not exist_ok or exc.errno != errno.EEXIST:
|
13 |
+
raise
|
14 |
+
|
15 |
+
|
16 |
+
# rely on compatibility behavior until mode considerations
|
17 |
+
# and exists_ok considerations are disentangled.
|
18 |
+
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
|
19 |
+
needs_makedirs = (
|
20 |
+
six.PY2 or
|
21 |
+
(3, 4) <= sys.version_info < (3, 4, 1)
|
22 |
+
)
|
23 |
+
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__init__.py
ADDED
@@ -0,0 +1,154 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# __
|
4 |
+
# /__) _ _ _ _ _/ _
|
5 |
+
# / ( (- (/ (/ (- _) / _)
|
6 |
+
# /
|
7 |
+
|
8 |
+
"""
|
9 |
+
Requests HTTP Library
|
10 |
+
~~~~~~~~~~~~~~~~~~~~~
|
11 |
+
|
12 |
+
Requests is an HTTP library, written in Python, for human beings.
|
13 |
+
Basic GET usage:
|
14 |
+
|
15 |
+
>>> import requests
|
16 |
+
>>> r = requests.get('https://www.python.org')
|
17 |
+
>>> r.status_code
|
18 |
+
200
|
19 |
+
>>> b'Python is a programming language' in r.content
|
20 |
+
True
|
21 |
+
|
22 |
+
... or POST:
|
23 |
+
|
24 |
+
>>> payload = dict(key1='value1', key2='value2')
|
25 |
+
>>> r = requests.post('https://httpbin.org/post', data=payload)
|
26 |
+
>>> print(r.text)
|
27 |
+
{
|
28 |
+
...
|
29 |
+
"form": {
|
30 |
+
"key1": "value1",
|
31 |
+
"key2": "value2"
|
32 |
+
},
|
33 |
+
...
|
34 |
+
}
|
35 |
+
|
36 |
+
The other HTTP methods are supported - see `requests.api`. Full documentation
|
37 |
+
is at <https://requests.readthedocs.io>.
|
38 |
+
|
39 |
+
:copyright: (c) 2017 by Kenneth Reitz.
|
40 |
+
:license: Apache 2.0, see LICENSE for more details.
|
41 |
+
"""
|
42 |
+
|
43 |
+
from pip._vendor import urllib3
|
44 |
+
import warnings
|
45 |
+
from .exceptions import RequestsDependencyWarning
|
46 |
+
|
47 |
+
charset_normalizer_version = None
|
48 |
+
|
49 |
+
try:
|
50 |
+
from pip._vendor.chardet import __version__ as chardet_version
|
51 |
+
except ImportError:
|
52 |
+
chardet_version = None
|
53 |
+
|
54 |
+
def check_compatibility(urllib3_version, chardet_version, charset_normalizer_version):
|
55 |
+
urllib3_version = urllib3_version.split('.')
|
56 |
+
assert urllib3_version != ['dev'] # Verify urllib3 isn't installed from git.
|
57 |
+
|
58 |
+
# Sometimes, urllib3 only reports its version as 16.1.
|
59 |
+
if len(urllib3_version) == 2:
|
60 |
+
urllib3_version.append('0')
|
61 |
+
|
62 |
+
# Check urllib3 for compatibility.
|
63 |
+
major, minor, patch = urllib3_version # noqa: F811
|
64 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
65 |
+
# urllib3 >= 1.21.1, <= 1.26
|
66 |
+
assert major == 1
|
67 |
+
assert minor >= 21
|
68 |
+
assert minor <= 26
|
69 |
+
|
70 |
+
# Check charset_normalizer for compatibility.
|
71 |
+
if chardet_version:
|
72 |
+
major, minor, patch = chardet_version.split('.')[:3]
|
73 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
74 |
+
# chardet_version >= 3.0.2, < 5.0.0
|
75 |
+
assert (3, 0, 2) <= (major, minor, patch) < (5, 0, 0)
|
76 |
+
elif charset_normalizer_version:
|
77 |
+
major, minor, patch = charset_normalizer_version.split('.')[:3]
|
78 |
+
major, minor, patch = int(major), int(minor), int(patch)
|
79 |
+
# charset_normalizer >= 2.0.0 < 3.0.0
|
80 |
+
assert (2, 0, 0) <= (major, minor, patch) < (3, 0, 0)
|
81 |
+
else:
|
82 |
+
raise Exception("You need either charset_normalizer or chardet installed")
|
83 |
+
|
84 |
+
def _check_cryptography(cryptography_version):
|
85 |
+
# cryptography < 1.3.4
|
86 |
+
try:
|
87 |
+
cryptography_version = list(map(int, cryptography_version.split('.')))
|
88 |
+
except ValueError:
|
89 |
+
return
|
90 |
+
|
91 |
+
if cryptography_version < [1, 3, 4]:
|
92 |
+
warning = 'Old version of cryptography ({}) may cause slowdown.'.format(cryptography_version)
|
93 |
+
warnings.warn(warning, RequestsDependencyWarning)
|
94 |
+
|
95 |
+
# Check imported dependencies for compatibility.
|
96 |
+
try:
|
97 |
+
check_compatibility(urllib3.__version__, chardet_version, charset_normalizer_version)
|
98 |
+
except (AssertionError, ValueError):
|
99 |
+
warnings.warn("urllib3 ({}) or chardet ({})/charset_normalizer ({}) doesn't match a supported "
|
100 |
+
"version!".format(urllib3.__version__, chardet_version, charset_normalizer_version),
|
101 |
+
RequestsDependencyWarning)
|
102 |
+
|
103 |
+
# Attempt to enable urllib3's fallback for SNI support
|
104 |
+
# if the standard library doesn't support SNI or the
|
105 |
+
# 'ssl' library isn't available.
|
106 |
+
try:
|
107 |
+
# Note: This logic prevents upgrading cryptography on Windows, if imported
|
108 |
+
# as part of pip.
|
109 |
+
from pip._internal.utils.compat import WINDOWS
|
110 |
+
if not WINDOWS:
|
111 |
+
raise ImportError("pip internals: don't import cryptography on Windows")
|
112 |
+
try:
|
113 |
+
import ssl
|
114 |
+
except ImportError:
|
115 |
+
ssl = None
|
116 |
+
|
117 |
+
if not getattr(ssl, "HAS_SNI", False):
|
118 |
+
from pip._vendor.urllib3.contrib import pyopenssl
|
119 |
+
pyopenssl.inject_into_urllib3()
|
120 |
+
|
121 |
+
# Check cryptography version
|
122 |
+
from cryptography import __version__ as cryptography_version
|
123 |
+
_check_cryptography(cryptography_version)
|
124 |
+
except ImportError:
|
125 |
+
pass
|
126 |
+
|
127 |
+
# urllib3's DependencyWarnings should be silenced.
|
128 |
+
from pip._vendor.urllib3.exceptions import DependencyWarning
|
129 |
+
warnings.simplefilter('ignore', DependencyWarning)
|
130 |
+
|
131 |
+
from .__version__ import __title__, __description__, __url__, __version__
|
132 |
+
from .__version__ import __build__, __author__, __author_email__, __license__
|
133 |
+
from .__version__ import __copyright__, __cake__
|
134 |
+
|
135 |
+
from . import utils
|
136 |
+
from . import packages
|
137 |
+
from .models import Request, Response, PreparedRequest
|
138 |
+
from .api import request, get, head, post, patch, put, delete, options
|
139 |
+
from .sessions import session, Session
|
140 |
+
from .status_codes import codes
|
141 |
+
from .exceptions import (
|
142 |
+
RequestException, Timeout, URLRequired,
|
143 |
+
TooManyRedirects, HTTPError, ConnectionError,
|
144 |
+
FileModeWarning, ConnectTimeout, ReadTimeout, JSONDecodeError
|
145 |
+
)
|
146 |
+
|
147 |
+
# Set default logging handler to avoid "No handler found" warnings.
|
148 |
+
import logging
|
149 |
+
from logging import NullHandler
|
150 |
+
|
151 |
+
logging.getLogger(__name__).addHandler(NullHandler())
|
152 |
+
|
153 |
+
# FileModeWarnings go off per the default.
|
154 |
+
warnings.simplefilter('default', FileModeWarning, append=True)
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (4.03 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/__version__.cpython-310.pyc
ADDED
Binary file (551 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/_internal_utils.cpython-310.pyc
ADDED
Binary file (1.3 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/adapters.cpython-310.pyc
ADDED
Binary file (17 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/api.cpython-310.pyc
ADDED
Binary file (6.65 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/auth.cpython-310.pyc
ADDED
Binary file (8.09 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/certs.cpython-310.pyc
ADDED
Binary file (635 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (1.67 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/cookies.cpython-310.pyc
ADDED
Binary file (18.7 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (5.24 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/help.cpython-310.pyc
ADDED
Binary file (2.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/hooks.cpython-310.pyc
ADDED
Binary file (990 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/models.cpython-310.pyc
ADDED
Binary file (24.3 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/packages.cpython-310.pyc
ADDED
Binary file (504 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/sessions.cpython-310.pyc
ADDED
Binary file (19.6 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/status_codes.cpython-310.pyc
ADDED
Binary file (4.67 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/structures.cpython-310.pyc
ADDED
Binary file (4.45 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__pycache__/utils.cpython-310.pyc
ADDED
Binary file (24.4 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/__version__.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# .-. .-. .-. . . .-. .-. .-. .-.
|
2 |
+
# |( |- |.| | | |- `-. | `-.
|
3 |
+
# ' ' `-' `-`.`-' `-' `-' ' `-'
|
4 |
+
|
5 |
+
__title__ = 'requests'
|
6 |
+
__description__ = 'Python HTTP for Humans.'
|
7 |
+
__url__ = 'https://requests.readthedocs.io'
|
8 |
+
__version__ = '2.27.1'
|
9 |
+
__build__ = 0x022701
|
10 |
+
__author__ = 'Kenneth Reitz'
|
11 |
+
__author_email__ = '[email protected]'
|
12 |
+
__license__ = 'Apache 2.0'
|
13 |
+
__copyright__ = 'Copyright 2022 Kenneth Reitz'
|
14 |
+
__cake__ = u'\u2728 \U0001f370 \u2728'
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/api.py
ADDED
@@ -0,0 +1,159 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.api
|
5 |
+
~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module implements the Requests API.
|
8 |
+
|
9 |
+
:copyright: (c) 2012 by Kenneth Reitz.
|
10 |
+
:license: Apache2, see LICENSE for more details.
|
11 |
+
"""
|
12 |
+
|
13 |
+
from . import sessions
|
14 |
+
|
15 |
+
|
16 |
+
def request(method, url, **kwargs):
|
17 |
+
"""Constructs and sends a :class:`Request <Request>`.
|
18 |
+
|
19 |
+
:param method: method for the new :class:`Request` object: ``GET``, ``OPTIONS``, ``HEAD``, ``POST``, ``PUT``, ``PATCH``, or ``DELETE``.
|
20 |
+
:param url: URL for the new :class:`Request` object.
|
21 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
22 |
+
in the query string for the :class:`Request`.
|
23 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
24 |
+
object to send in the body of the :class:`Request`.
|
25 |
+
:param json: (optional) A JSON serializable Python object to send in the body of the :class:`Request`.
|
26 |
+
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
|
27 |
+
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
|
28 |
+
:param files: (optional) Dictionary of ``'name': file-like-objects`` (or ``{'name': file-tuple}``) for multipart encoding upload.
|
29 |
+
``file-tuple`` can be a 2-tuple ``('filename', fileobj)``, 3-tuple ``('filename', fileobj, 'content_type')``
|
30 |
+
or a 4-tuple ``('filename', fileobj, 'content_type', custom_headers)``, where ``'content-type'`` is a string
|
31 |
+
defining the content type of the given file and ``custom_headers`` a dict-like object containing additional headers
|
32 |
+
to add for the file.
|
33 |
+
:param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
|
34 |
+
:param timeout: (optional) How many seconds to wait for the server to send data
|
35 |
+
before giving up, as a float, or a :ref:`(connect timeout, read
|
36 |
+
timeout) <timeouts>` tuple.
|
37 |
+
:type timeout: float or tuple
|
38 |
+
:param allow_redirects: (optional) Boolean. Enable/disable GET/OPTIONS/POST/PUT/PATCH/DELETE/HEAD redirection. Defaults to ``True``.
|
39 |
+
:type allow_redirects: bool
|
40 |
+
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
|
41 |
+
:param verify: (optional) Either a boolean, in which case it controls whether we verify
|
42 |
+
the server's TLS certificate, or a string, in which case it must be a path
|
43 |
+
to a CA bundle to use. Defaults to ``True``.
|
44 |
+
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
|
45 |
+
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
|
46 |
+
:return: :class:`Response <Response>` object
|
47 |
+
:rtype: requests.Response
|
48 |
+
|
49 |
+
Usage::
|
50 |
+
|
51 |
+
>>> import requests
|
52 |
+
>>> req = requests.request('GET', 'https://httpbin.org/get')
|
53 |
+
>>> req
|
54 |
+
<Response [200]>
|
55 |
+
"""
|
56 |
+
|
57 |
+
# By using the 'with' statement we are sure the session is closed, thus we
|
58 |
+
# avoid leaving sockets open which can trigger a ResourceWarning in some
|
59 |
+
# cases, and look like a memory leak in others.
|
60 |
+
with sessions.Session() as session:
|
61 |
+
return session.request(method=method, url=url, **kwargs)
|
62 |
+
|
63 |
+
|
64 |
+
def get(url, params=None, **kwargs):
|
65 |
+
r"""Sends a GET request.
|
66 |
+
|
67 |
+
:param url: URL for the new :class:`Request` object.
|
68 |
+
:param params: (optional) Dictionary, list of tuples or bytes to send
|
69 |
+
in the query string for the :class:`Request`.
|
70 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
71 |
+
:return: :class:`Response <Response>` object
|
72 |
+
:rtype: requests.Response
|
73 |
+
"""
|
74 |
+
|
75 |
+
return request('get', url, params=params, **kwargs)
|
76 |
+
|
77 |
+
|
78 |
+
def options(url, **kwargs):
|
79 |
+
r"""Sends an OPTIONS request.
|
80 |
+
|
81 |
+
:param url: URL for the new :class:`Request` object.
|
82 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
83 |
+
:return: :class:`Response <Response>` object
|
84 |
+
:rtype: requests.Response
|
85 |
+
"""
|
86 |
+
|
87 |
+
return request('options', url, **kwargs)
|
88 |
+
|
89 |
+
|
90 |
+
def head(url, **kwargs):
|
91 |
+
r"""Sends a HEAD request.
|
92 |
+
|
93 |
+
:param url: URL for the new :class:`Request` object.
|
94 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes. If
|
95 |
+
`allow_redirects` is not provided, it will be set to `False` (as
|
96 |
+
opposed to the default :meth:`request` behavior).
|
97 |
+
:return: :class:`Response <Response>` object
|
98 |
+
:rtype: requests.Response
|
99 |
+
"""
|
100 |
+
|
101 |
+
kwargs.setdefault('allow_redirects', False)
|
102 |
+
return request('head', url, **kwargs)
|
103 |
+
|
104 |
+
|
105 |
+
def post(url, data=None, json=None, **kwargs):
|
106 |
+
r"""Sends a POST request.
|
107 |
+
|
108 |
+
:param url: URL for the new :class:`Request` object.
|
109 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
110 |
+
object to send in the body of the :class:`Request`.
|
111 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
112 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
113 |
+
:return: :class:`Response <Response>` object
|
114 |
+
:rtype: requests.Response
|
115 |
+
"""
|
116 |
+
|
117 |
+
return request('post', url, data=data, json=json, **kwargs)
|
118 |
+
|
119 |
+
|
120 |
+
def put(url, data=None, **kwargs):
|
121 |
+
r"""Sends a PUT request.
|
122 |
+
|
123 |
+
:param url: URL for the new :class:`Request` object.
|
124 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
125 |
+
object to send in the body of the :class:`Request`.
|
126 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
127 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
128 |
+
:return: :class:`Response <Response>` object
|
129 |
+
:rtype: requests.Response
|
130 |
+
"""
|
131 |
+
|
132 |
+
return request('put', url, data=data, **kwargs)
|
133 |
+
|
134 |
+
|
135 |
+
def patch(url, data=None, **kwargs):
|
136 |
+
r"""Sends a PATCH request.
|
137 |
+
|
138 |
+
:param url: URL for the new :class:`Request` object.
|
139 |
+
:param data: (optional) Dictionary, list of tuples, bytes, or file-like
|
140 |
+
object to send in the body of the :class:`Request`.
|
141 |
+
:param json: (optional) json data to send in the body of the :class:`Request`.
|
142 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
143 |
+
:return: :class:`Response <Response>` object
|
144 |
+
:rtype: requests.Response
|
145 |
+
"""
|
146 |
+
|
147 |
+
return request('patch', url, data=data, **kwargs)
|
148 |
+
|
149 |
+
|
150 |
+
def delete(url, **kwargs):
|
151 |
+
r"""Sends a DELETE request.
|
152 |
+
|
153 |
+
:param url: URL for the new :class:`Request` object.
|
154 |
+
:param \*\*kwargs: Optional arguments that ``request`` takes.
|
155 |
+
:return: :class:`Response <Response>` object
|
156 |
+
:rtype: requests.Response
|
157 |
+
"""
|
158 |
+
|
159 |
+
return request('delete', url, **kwargs)
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/auth.py
ADDED
@@ -0,0 +1,305 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.auth
|
5 |
+
~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module contains the authentication handlers for Requests.
|
8 |
+
"""
|
9 |
+
|
10 |
+
import os
|
11 |
+
import re
|
12 |
+
import time
|
13 |
+
import hashlib
|
14 |
+
import threading
|
15 |
+
import warnings
|
16 |
+
|
17 |
+
from base64 import b64encode
|
18 |
+
|
19 |
+
from .compat import urlparse, str, basestring
|
20 |
+
from .cookies import extract_cookies_to_jar
|
21 |
+
from ._internal_utils import to_native_string
|
22 |
+
from .utils import parse_dict_header
|
23 |
+
|
24 |
+
CONTENT_TYPE_FORM_URLENCODED = 'application/x-www-form-urlencoded'
|
25 |
+
CONTENT_TYPE_MULTI_PART = 'multipart/form-data'
|
26 |
+
|
27 |
+
|
28 |
+
def _basic_auth_str(username, password):
|
29 |
+
"""Returns a Basic Auth string."""
|
30 |
+
|
31 |
+
# "I want us to put a big-ol' comment on top of it that
|
32 |
+
# says that this behaviour is dumb but we need to preserve
|
33 |
+
# it because people are relying on it."
|
34 |
+
# - Lukasa
|
35 |
+
#
|
36 |
+
# These are here solely to maintain backwards compatibility
|
37 |
+
# for things like ints. This will be removed in 3.0.0.
|
38 |
+
if not isinstance(username, basestring):
|
39 |
+
warnings.warn(
|
40 |
+
"Non-string usernames will no longer be supported in Requests "
|
41 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
42 |
+
"a string or bytes object in the near future to avoid "
|
43 |
+
"problems.".format(username),
|
44 |
+
category=DeprecationWarning,
|
45 |
+
)
|
46 |
+
username = str(username)
|
47 |
+
|
48 |
+
if not isinstance(password, basestring):
|
49 |
+
warnings.warn(
|
50 |
+
"Non-string passwords will no longer be supported in Requests "
|
51 |
+
"3.0.0. Please convert the object you've passed in ({!r}) to "
|
52 |
+
"a string or bytes object in the near future to avoid "
|
53 |
+
"problems.".format(type(password)),
|
54 |
+
category=DeprecationWarning,
|
55 |
+
)
|
56 |
+
password = str(password)
|
57 |
+
# -- End Removal --
|
58 |
+
|
59 |
+
if isinstance(username, str):
|
60 |
+
username = username.encode('latin1')
|
61 |
+
|
62 |
+
if isinstance(password, str):
|
63 |
+
password = password.encode('latin1')
|
64 |
+
|
65 |
+
authstr = 'Basic ' + to_native_string(
|
66 |
+
b64encode(b':'.join((username, password))).strip()
|
67 |
+
)
|
68 |
+
|
69 |
+
return authstr
|
70 |
+
|
71 |
+
|
72 |
+
class AuthBase(object):
|
73 |
+
"""Base class that all auth implementations derive from"""
|
74 |
+
|
75 |
+
def __call__(self, r):
|
76 |
+
raise NotImplementedError('Auth hooks must be callable.')
|
77 |
+
|
78 |
+
|
79 |
+
class HTTPBasicAuth(AuthBase):
|
80 |
+
"""Attaches HTTP Basic Authentication to the given Request object."""
|
81 |
+
|
82 |
+
def __init__(self, username, password):
|
83 |
+
self.username = username
|
84 |
+
self.password = password
|
85 |
+
|
86 |
+
def __eq__(self, other):
|
87 |
+
return all([
|
88 |
+
self.username == getattr(other, 'username', None),
|
89 |
+
self.password == getattr(other, 'password', None)
|
90 |
+
])
|
91 |
+
|
92 |
+
def __ne__(self, other):
|
93 |
+
return not self == other
|
94 |
+
|
95 |
+
def __call__(self, r):
|
96 |
+
r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
|
97 |
+
return r
|
98 |
+
|
99 |
+
|
100 |
+
class HTTPProxyAuth(HTTPBasicAuth):
|
101 |
+
"""Attaches HTTP Proxy Authentication to a given Request object."""
|
102 |
+
|
103 |
+
def __call__(self, r):
|
104 |
+
r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
|
105 |
+
return r
|
106 |
+
|
107 |
+
|
108 |
+
class HTTPDigestAuth(AuthBase):
|
109 |
+
"""Attaches HTTP Digest Authentication to the given Request object."""
|
110 |
+
|
111 |
+
def __init__(self, username, password):
|
112 |
+
self.username = username
|
113 |
+
self.password = password
|
114 |
+
# Keep state in per-thread local storage
|
115 |
+
self._thread_local = threading.local()
|
116 |
+
|
117 |
+
def init_per_thread_state(self):
|
118 |
+
# Ensure state is initialized just once per-thread
|
119 |
+
if not hasattr(self._thread_local, 'init'):
|
120 |
+
self._thread_local.init = True
|
121 |
+
self._thread_local.last_nonce = ''
|
122 |
+
self._thread_local.nonce_count = 0
|
123 |
+
self._thread_local.chal = {}
|
124 |
+
self._thread_local.pos = None
|
125 |
+
self._thread_local.num_401_calls = None
|
126 |
+
|
127 |
+
def build_digest_header(self, method, url):
|
128 |
+
"""
|
129 |
+
:rtype: str
|
130 |
+
"""
|
131 |
+
|
132 |
+
realm = self._thread_local.chal['realm']
|
133 |
+
nonce = self._thread_local.chal['nonce']
|
134 |
+
qop = self._thread_local.chal.get('qop')
|
135 |
+
algorithm = self._thread_local.chal.get('algorithm')
|
136 |
+
opaque = self._thread_local.chal.get('opaque')
|
137 |
+
hash_utf8 = None
|
138 |
+
|
139 |
+
if algorithm is None:
|
140 |
+
_algorithm = 'MD5'
|
141 |
+
else:
|
142 |
+
_algorithm = algorithm.upper()
|
143 |
+
# lambdas assume digest modules are imported at the top level
|
144 |
+
if _algorithm == 'MD5' or _algorithm == 'MD5-SESS':
|
145 |
+
def md5_utf8(x):
|
146 |
+
if isinstance(x, str):
|
147 |
+
x = x.encode('utf-8')
|
148 |
+
return hashlib.md5(x).hexdigest()
|
149 |
+
hash_utf8 = md5_utf8
|
150 |
+
elif _algorithm == 'SHA':
|
151 |
+
def sha_utf8(x):
|
152 |
+
if isinstance(x, str):
|
153 |
+
x = x.encode('utf-8')
|
154 |
+
return hashlib.sha1(x).hexdigest()
|
155 |
+
hash_utf8 = sha_utf8
|
156 |
+
elif _algorithm == 'SHA-256':
|
157 |
+
def sha256_utf8(x):
|
158 |
+
if isinstance(x, str):
|
159 |
+
x = x.encode('utf-8')
|
160 |
+
return hashlib.sha256(x).hexdigest()
|
161 |
+
hash_utf8 = sha256_utf8
|
162 |
+
elif _algorithm == 'SHA-512':
|
163 |
+
def sha512_utf8(x):
|
164 |
+
if isinstance(x, str):
|
165 |
+
x = x.encode('utf-8')
|
166 |
+
return hashlib.sha512(x).hexdigest()
|
167 |
+
hash_utf8 = sha512_utf8
|
168 |
+
|
169 |
+
KD = lambda s, d: hash_utf8("%s:%s" % (s, d))
|
170 |
+
|
171 |
+
if hash_utf8 is None:
|
172 |
+
return None
|
173 |
+
|
174 |
+
# XXX not implemented yet
|
175 |
+
entdig = None
|
176 |
+
p_parsed = urlparse(url)
|
177 |
+
#: path is request-uri defined in RFC 2616 which should not be empty
|
178 |
+
path = p_parsed.path or "/"
|
179 |
+
if p_parsed.query:
|
180 |
+
path += '?' + p_parsed.query
|
181 |
+
|
182 |
+
A1 = '%s:%s:%s' % (self.username, realm, self.password)
|
183 |
+
A2 = '%s:%s' % (method, path)
|
184 |
+
|
185 |
+
HA1 = hash_utf8(A1)
|
186 |
+
HA2 = hash_utf8(A2)
|
187 |
+
|
188 |
+
if nonce == self._thread_local.last_nonce:
|
189 |
+
self._thread_local.nonce_count += 1
|
190 |
+
else:
|
191 |
+
self._thread_local.nonce_count = 1
|
192 |
+
ncvalue = '%08x' % self._thread_local.nonce_count
|
193 |
+
s = str(self._thread_local.nonce_count).encode('utf-8')
|
194 |
+
s += nonce.encode('utf-8')
|
195 |
+
s += time.ctime().encode('utf-8')
|
196 |
+
s += os.urandom(8)
|
197 |
+
|
198 |
+
cnonce = (hashlib.sha1(s).hexdigest()[:16])
|
199 |
+
if _algorithm == 'MD5-SESS':
|
200 |
+
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
|
201 |
+
|
202 |
+
if not qop:
|
203 |
+
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
|
204 |
+
elif qop == 'auth' or 'auth' in qop.split(','):
|
205 |
+
noncebit = "%s:%s:%s:%s:%s" % (
|
206 |
+
nonce, ncvalue, cnonce, 'auth', HA2
|
207 |
+
)
|
208 |
+
respdig = KD(HA1, noncebit)
|
209 |
+
else:
|
210 |
+
# XXX handle auth-int.
|
211 |
+
return None
|
212 |
+
|
213 |
+
self._thread_local.last_nonce = nonce
|
214 |
+
|
215 |
+
# XXX should the partial digests be encoded too?
|
216 |
+
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
|
217 |
+
'response="%s"' % (self.username, realm, nonce, path, respdig)
|
218 |
+
if opaque:
|
219 |
+
base += ', opaque="%s"' % opaque
|
220 |
+
if algorithm:
|
221 |
+
base += ', algorithm="%s"' % algorithm
|
222 |
+
if entdig:
|
223 |
+
base += ', digest="%s"' % entdig
|
224 |
+
if qop:
|
225 |
+
base += ', qop="auth", nc=%s, cnonce="%s"' % (ncvalue, cnonce)
|
226 |
+
|
227 |
+
return 'Digest %s' % (base)
|
228 |
+
|
229 |
+
def handle_redirect(self, r, **kwargs):
|
230 |
+
"""Reset num_401_calls counter on redirects."""
|
231 |
+
if r.is_redirect:
|
232 |
+
self._thread_local.num_401_calls = 1
|
233 |
+
|
234 |
+
def handle_401(self, r, **kwargs):
|
235 |
+
"""
|
236 |
+
Takes the given response and tries digest-auth, if needed.
|
237 |
+
|
238 |
+
:rtype: requests.Response
|
239 |
+
"""
|
240 |
+
|
241 |
+
# If response is not 4xx, do not auth
|
242 |
+
# See https://github.com/psf/requests/issues/3772
|
243 |
+
if not 400 <= r.status_code < 500:
|
244 |
+
self._thread_local.num_401_calls = 1
|
245 |
+
return r
|
246 |
+
|
247 |
+
if self._thread_local.pos is not None:
|
248 |
+
# Rewind the file position indicator of the body to where
|
249 |
+
# it was to resend the request.
|
250 |
+
r.request.body.seek(self._thread_local.pos)
|
251 |
+
s_auth = r.headers.get('www-authenticate', '')
|
252 |
+
|
253 |
+
if 'digest' in s_auth.lower() and self._thread_local.num_401_calls < 2:
|
254 |
+
|
255 |
+
self._thread_local.num_401_calls += 1
|
256 |
+
pat = re.compile(r'digest ', flags=re.IGNORECASE)
|
257 |
+
self._thread_local.chal = parse_dict_header(pat.sub('', s_auth, count=1))
|
258 |
+
|
259 |
+
# Consume content and release the original connection
|
260 |
+
# to allow our new request to reuse the same one.
|
261 |
+
r.content
|
262 |
+
r.close()
|
263 |
+
prep = r.request.copy()
|
264 |
+
extract_cookies_to_jar(prep._cookies, r.request, r.raw)
|
265 |
+
prep.prepare_cookies(prep._cookies)
|
266 |
+
|
267 |
+
prep.headers['Authorization'] = self.build_digest_header(
|
268 |
+
prep.method, prep.url)
|
269 |
+
_r = r.connection.send(prep, **kwargs)
|
270 |
+
_r.history.append(r)
|
271 |
+
_r.request = prep
|
272 |
+
|
273 |
+
return _r
|
274 |
+
|
275 |
+
self._thread_local.num_401_calls = 1
|
276 |
+
return r
|
277 |
+
|
278 |
+
def __call__(self, r):
|
279 |
+
# Initialize per-thread state, if needed
|
280 |
+
self.init_per_thread_state()
|
281 |
+
# If we have a saved nonce, skip the 401
|
282 |
+
if self._thread_local.last_nonce:
|
283 |
+
r.headers['Authorization'] = self.build_digest_header(r.method, r.url)
|
284 |
+
try:
|
285 |
+
self._thread_local.pos = r.body.tell()
|
286 |
+
except AttributeError:
|
287 |
+
# In the case of HTTPDigestAuth being reused and the body of
|
288 |
+
# the previous request was a file-like object, pos has the
|
289 |
+
# file position of the previous body. Ensure it's set to
|
290 |
+
# None.
|
291 |
+
self._thread_local.pos = None
|
292 |
+
r.register_hook('response', self.handle_401)
|
293 |
+
r.register_hook('response', self.handle_redirect)
|
294 |
+
self._thread_local.num_401_calls = 1
|
295 |
+
|
296 |
+
return r
|
297 |
+
|
298 |
+
def __eq__(self, other):
|
299 |
+
return all([
|
300 |
+
self.username == getattr(other, 'username', None),
|
301 |
+
self.password == getattr(other, 'password', None)
|
302 |
+
])
|
303 |
+
|
304 |
+
def __ne__(self, other):
|
305 |
+
return not self == other
|
venv/lib/python3.10/site-packages/pip/_vendor/requests/compat.py
ADDED
@@ -0,0 +1,77 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
"""
|
4 |
+
requests.compat
|
5 |
+
~~~~~~~~~~~~~~~
|
6 |
+
|
7 |
+
This module handles import compatibility issues between Python 2 and
|
8 |
+
Python 3.
|
9 |
+
"""
|
10 |
+
|
11 |
+
from pip._vendor import chardet
|
12 |
+
|
13 |
+
import sys
|
14 |
+
|
15 |
+
# -------
|
16 |
+
# Pythons
|
17 |
+
# -------
|
18 |
+
|
19 |
+
# Syntax sugar.
|
20 |
+
_ver = sys.version_info
|
21 |
+
|
22 |
+
#: Python 2.x?
|
23 |
+
is_py2 = (_ver[0] == 2)
|
24 |
+
|
25 |
+
#: Python 3.x?
|
26 |
+
is_py3 = (_ver[0] == 3)
|
27 |
+
|
28 |
+
# Note: We've patched out simplejson support in pip because it prevents
|
29 |
+
# upgrading simplejson on Windows.
|
30 |
+
# try:
|
31 |
+
# import simplejson as json
|
32 |
+
# except (ImportError, SyntaxError):
|
33 |
+
# # simplejson does not support Python 3.2, it throws a SyntaxError
|
34 |
+
# # because of u'...' Unicode literals.
|
35 |
+
import json
|
36 |
+
|
37 |
+
# ---------
|
38 |
+
# Specifics
|
39 |
+
# ---------
|
40 |
+
|
41 |
+
if is_py2:
|
42 |
+
from urllib import (
|
43 |
+
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
|
44 |
+
proxy_bypass, proxy_bypass_environment, getproxies_environment)
|
45 |
+
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
|
46 |
+
from urllib2 import parse_http_list
|
47 |
+
import cookielib
|
48 |
+
from Cookie import Morsel
|
49 |
+
from StringIO import StringIO
|
50 |
+
# Keep OrderedDict for backwards compatibility.
|
51 |
+
from collections import Callable, Mapping, MutableMapping, OrderedDict
|
52 |
+
|
53 |
+
builtin_str = str
|
54 |
+
bytes = str
|
55 |
+
str = unicode
|
56 |
+
basestring = basestring
|
57 |
+
numeric_types = (int, long, float)
|
58 |
+
integer_types = (int, long)
|
59 |
+
JSONDecodeError = ValueError
|
60 |
+
|
61 |
+
elif is_py3:
|
62 |
+
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
|
63 |
+
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
|
64 |
+
from http import cookiejar as cookielib
|
65 |
+
from http.cookies import Morsel
|
66 |
+
from io import StringIO
|
67 |
+
# Keep OrderedDict for backwards compatibility.
|
68 |
+
from collections import OrderedDict
|
69 |
+
from collections.abc import Callable, Mapping, MutableMapping
|
70 |
+
from json import JSONDecodeError
|
71 |
+
|
72 |
+
builtin_str = str
|
73 |
+
str = str
|
74 |
+
bytes = bytes
|
75 |
+
basestring = (str, bytes)
|
76 |
+
numeric_types = (int, float)
|
77 |
+
integer_types = (int,)
|