applied-ai-018 commited on
Commit
438aed8
·
verified ·
1 Parent(s): 0811477

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/13.input_layernorm.weight/exp_avg_sq.pt +3 -0
  2. ckpts/universal/global_step80/zero/14.attention.dense.weight/exp_avg.pt +3 -0
  3. ckpts/universal/global_step80/zero/14.attention.dense.weight/exp_avg_sq.pt +3 -0
  4. ckpts/universal/global_step80/zero/14.attention.dense.weight/fp32.pt +3 -0
  5. ckpts/universal/global_step80/zero/7.attention.dense.weight/fp32.pt +3 -0
  6. venv/lib/python3.10/site-packages/pip/_internal/cli/__init__.py +4 -0
  7. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc +0 -0
  9. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc +0 -0
  16. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py +171 -0
  20. venv/lib/python3.10/site-packages/pip/_internal/cli/base_command.py +220 -0
  21. venv/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py +1018 -0
  22. venv/lib/python3.10/site-packages/pip/_internal/cli/command_context.py +27 -0
  23. venv/lib/python3.10/site-packages/pip/_internal/cli/main.py +70 -0
  24. venv/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py +87 -0
  25. venv/lib/python3.10/site-packages/pip/_internal/cli/parser.py +292 -0
  26. venv/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py +321 -0
  27. venv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py +506 -0
  28. venv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py +157 -0
  29. venv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py +6 -0
  30. venv/lib/python3.10/site-packages/pip/_internal/index/__init__.py +2 -0
  31. venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc +0 -0
  33. venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
  34. venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc +0 -0
  35. venv/lib/python3.10/site-packages/pip/_internal/index/collector.py +648 -0
  36. venv/lib/python3.10/site-packages/pip/_internal/index/package_finder.py +1004 -0
  37. venv/lib/python3.10/site-packages/pip/_internal/index/sources.py +224 -0
  38. venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc +0 -0
  39. venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc +0 -0
  40. venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc +0 -0
  41. venv/lib/python3.10/site-packages/pip/_internal/network/cache.py +69 -0
  42. venv/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py +210 -0
  43. venv/lib/python3.10/site-packages/pip/_internal/network/session.py +454 -0
  44. venv/lib/python3.10/site-packages/pip/_internal/operations/__init__.py +0 -0
  45. venv/lib/python3.10/site-packages/pip/_internal/operations/check.py +149 -0
  46. venv/lib/python3.10/site-packages/pip/_internal/operations/freeze.py +254 -0
  47. venv/lib/python3.10/site-packages/pip/_internal/operations/prepare.py +642 -0
  48. venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  49. venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc +0 -0
  50. venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc +0 -0
ckpts/universal/global_step80/zero/13.input_layernorm.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9f32d6040e0c3afda460864c25cc0e3e92c7d50b996daea8497e1ce65cf56583
3
+ size 9387
ckpts/universal/global_step80/zero/14.attention.dense.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6f1aaff802c4060eb19735f60385faae6728438578ba55ca238ac45120791aa4
3
+ size 16778396
ckpts/universal/global_step80/zero/14.attention.dense.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fdbe289d2b88096f8151aec68da59e8eedba50e341f15a7bf056a22853141955
3
+ size 16778411
ckpts/universal/global_step80/zero/14.attention.dense.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf1b704c32e5ec35fa2b5344a631505518f3b524488cc698b5516e8e0ba266e4
3
+ size 16778317
ckpts/universal/global_step80/zero/7.attention.dense.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e637a752a0b7d016bf23c6289e04c7389a5bb735bdb80ea4df7646ccbe29a253
3
+ size 16778317
venv/lib/python3.10/site-packages/pip/_internal/cli/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ """Subpackage containing all of pip's command line interface related code
2
+ """
3
+
4
+ # This file intentionally does not import submodules
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (268 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc ADDED
Binary file (5.3 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc ADDED
Binary file (6.24 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc ADDED
Binary file (22.5 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc ADDED
Binary file (1.37 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc ADDED
Binary file (2.15 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc ADDED
Binary file (9.94 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc ADDED
Binary file (9.23 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc ADDED
Binary file (4.94 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc ADDED
Binary file (347 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ for opt_str in opt._long_opts + opt._short_opts:
75
+ options.append((opt_str, opt.nargs))
76
+
77
+ # filter out previously specified options from available options
78
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
79
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
80
+ # filter options by current input
81
+ options = [(k, v) for k, v in options if k.startswith(current)]
82
+ # get completion type given cwords and available subcommand options
83
+ completion_type = get_path_completion_type(
84
+ cwords,
85
+ cword,
86
+ subcommand.parser.option_list_all,
87
+ )
88
+ # get completion files and directories if ``completion_type`` is
89
+ # ``<file>``, ``<dir>`` or ``<path>``
90
+ if completion_type:
91
+ paths = auto_complete_paths(current, completion_type)
92
+ options = [(path, 0) for path in paths]
93
+ for option in options:
94
+ opt_label = option[0]
95
+ # append '=' to options which require args
96
+ if option[1] and option[0][:2] == "--":
97
+ opt_label += "="
98
+ print(opt_label)
99
+ else:
100
+ # show main parser options only when necessary
101
+
102
+ opts = [i.option_list for i in parser.option_groups]
103
+ opts.append(parser.option_list)
104
+ flattened_opts = chain.from_iterable(opts)
105
+ if current.startswith("-"):
106
+ for opt in flattened_opts:
107
+ if opt.help != optparse.SUPPRESS_HELP:
108
+ subcommands += opt._long_opts + opt._short_opts
109
+ else:
110
+ # get completion type given cwords and all available options
111
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
112
+ if completion_type:
113
+ subcommands = list(auto_complete_paths(current, completion_type))
114
+
115
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
116
+ sys.exit(1)
117
+
118
+
119
+ def get_path_completion_type(
120
+ cwords: List[str], cword: int, opts: Iterable[Any]
121
+ ) -> Optional[str]:
122
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
123
+
124
+ :param cwords: same as the environmental variable ``COMP_WORDS``
125
+ :param cword: same as the environmental variable ``COMP_CWORD``
126
+ :param opts: The available options to check
127
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
128
+ """
129
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
130
+ return None
131
+ for opt in opts:
132
+ if opt.help == optparse.SUPPRESS_HELP:
133
+ continue
134
+ for o in str(opt).split("/"):
135
+ if cwords[cword - 2].split("=")[0] == o:
136
+ if not opt.metavar or any(
137
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
138
+ ):
139
+ return opt.metavar
140
+ return None
141
+
142
+
143
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
144
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
145
+ and directories starting with ``current``; otherwise only list directories
146
+ starting with ``current``.
147
+
148
+ :param current: The word to be completed
149
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
150
+ :return: A generator of regular files and/or directories
151
+ """
152
+ directory, filename = os.path.split(current)
153
+ current_path = os.path.abspath(directory)
154
+ # Don't complete paths if they can't be accessed
155
+ if not os.access(current_path, os.R_OK):
156
+ return
157
+ filename = os.path.normcase(filename)
158
+ # list all files that start with ``filename``
159
+ file_list = (
160
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
161
+ )
162
+ for f in file_list:
163
+ opt = os.path.join(current_path, f)
164
+ comp_file = os.path.normcase(os.path.join(directory, f))
165
+ # complete regular files when there is not ``<dir>`` after option
166
+ # complete directories when there is ``<file>``, ``<path>`` or
167
+ # ``<dir>``after option
168
+ if completion_type != "dir" and os.path.isfile(opt):
169
+ yield comp_file
170
+ elif os.path.isdir(opt):
171
+ yield os.path.join(comp_file, "")
venv/lib/python3.10/site-packages/pip/_internal/cli/base_command.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base Command class, and related routines"""
2
+
3
+ import functools
4
+ import logging
5
+ import logging.config
6
+ import optparse
7
+ import os
8
+ import sys
9
+ import traceback
10
+ from optparse import Values
11
+ from typing import Any, Callable, List, Optional, Tuple
12
+
13
+ from pip._internal.cli import cmdoptions
14
+ from pip._internal.cli.command_context import CommandContextMixIn
15
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
16
+ from pip._internal.cli.status_codes import (
17
+ ERROR,
18
+ PREVIOUS_BUILD_DIR_ERROR,
19
+ UNKNOWN_ERROR,
20
+ VIRTUALENV_NOT_FOUND,
21
+ )
22
+ from pip._internal.exceptions import (
23
+ BadCommand,
24
+ CommandError,
25
+ DiagnosticPipError,
26
+ InstallationError,
27
+ NetworkConnectionError,
28
+ PreviousBuildDirError,
29
+ UninstallationError,
30
+ )
31
+ from pip._internal.utils.filesystem import check_path_owner
32
+ from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
33
+ from pip._internal.utils.misc import get_prog, normalize_path
34
+ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
35
+ from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
36
+ from pip._internal.utils.virtualenv import running_under_virtualenv
37
+
38
+ __all__ = ["Command"]
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ class Command(CommandContextMixIn):
44
+ usage: str = ""
45
+ ignore_require_venv: bool = False
46
+
47
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
48
+ super().__init__()
49
+
50
+ self.name = name
51
+ self.summary = summary
52
+ self.parser = ConfigOptionParser(
53
+ usage=self.usage,
54
+ prog=f"{get_prog()} {name}",
55
+ formatter=UpdatingDefaultsHelpFormatter(),
56
+ add_help_option=False,
57
+ name=name,
58
+ description=self.__doc__,
59
+ isolated=isolated,
60
+ )
61
+
62
+ self.tempdir_registry: Optional[TempDirRegistry] = None
63
+
64
+ # Commands should add options to this option group
65
+ optgroup_name = f"{self.name.capitalize()} Options"
66
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
67
+
68
+ # Add the general options
69
+ gen_opts = cmdoptions.make_option_group(
70
+ cmdoptions.general_group,
71
+ self.parser,
72
+ )
73
+ self.parser.add_option_group(gen_opts)
74
+
75
+ self.add_options()
76
+
77
+ def add_options(self) -> None:
78
+ pass
79
+
80
+ def handle_pip_version_check(self, options: Values) -> None:
81
+ """
82
+ This is a no-op so that commands by default do not do the pip version
83
+ check.
84
+ """
85
+ # Make sure we do the pip version check if the index_group options
86
+ # are present.
87
+ assert not hasattr(options, "no_index")
88
+
89
+ def run(self, options: Values, args: List[str]) -> int:
90
+ raise NotImplementedError
91
+
92
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
93
+ # factored out for testability
94
+ return self.parser.parse_args(args)
95
+
96
+ def main(self, args: List[str]) -> int:
97
+ try:
98
+ with self.main_context():
99
+ return self._main(args)
100
+ finally:
101
+ logging.shutdown()
102
+
103
+ def _main(self, args: List[str]) -> int:
104
+ # We must initialize this before the tempdir manager, otherwise the
105
+ # configuration would not be accessible by the time we clean up the
106
+ # tempdir manager.
107
+ self.tempdir_registry = self.enter_context(tempdir_registry())
108
+ # Intentionally set as early as possible so globally-managed temporary
109
+ # directories are available to the rest of the code.
110
+ self.enter_context(global_tempdir_manager())
111
+
112
+ options, args = self.parse_args(args)
113
+
114
+ # Set verbosity so that it can be used elsewhere.
115
+ self.verbosity = options.verbose - options.quiet
116
+
117
+ level_number = setup_logging(
118
+ verbosity=self.verbosity,
119
+ no_color=options.no_color,
120
+ user_log_file=options.log,
121
+ )
122
+
123
+ # TODO: Try to get these passing down from the command?
124
+ # without resorting to os.environ to hold these.
125
+ # This also affects isolated builds and it should.
126
+
127
+ if options.no_input:
128
+ os.environ["PIP_NO_INPUT"] = "1"
129
+
130
+ if options.exists_action:
131
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
132
+
133
+ if options.require_venv and not self.ignore_require_venv:
134
+ # If a venv is required check if it can really be found
135
+ if not running_under_virtualenv():
136
+ logger.critical("Could not find an activated virtualenv (required).")
137
+ sys.exit(VIRTUALENV_NOT_FOUND)
138
+
139
+ if options.cache_dir:
140
+ options.cache_dir = normalize_path(options.cache_dir)
141
+ if not check_path_owner(options.cache_dir):
142
+ logger.warning(
143
+ "The directory '%s' or its parent directory is not owned "
144
+ "or is not writable by the current user. The cache "
145
+ "has been disabled. Check the permissions and owner of "
146
+ "that directory. If executing pip with sudo, you should "
147
+ "use sudo's -H flag.",
148
+ options.cache_dir,
149
+ )
150
+ options.cache_dir = None
151
+
152
+ if "2020-resolver" in options.features_enabled:
153
+ logger.warning(
154
+ "--use-feature=2020-resolver no longer has any effect, "
155
+ "since it is now the default dependency resolver in pip. "
156
+ "This will become an error in pip 21.0."
157
+ )
158
+
159
+ def intercepts_unhandled_exc(
160
+ run_func: Callable[..., int]
161
+ ) -> Callable[..., int]:
162
+ @functools.wraps(run_func)
163
+ def exc_logging_wrapper(*args: Any) -> int:
164
+ try:
165
+ status = run_func(*args)
166
+ assert isinstance(status, int)
167
+ return status
168
+ except DiagnosticPipError as exc:
169
+ logger.error("[present-diagnostic] %s", exc)
170
+ logger.debug("Exception information:", exc_info=True)
171
+
172
+ return ERROR
173
+ except PreviousBuildDirError as exc:
174
+ logger.critical(str(exc))
175
+ logger.debug("Exception information:", exc_info=True)
176
+
177
+ return PREVIOUS_BUILD_DIR_ERROR
178
+ except (
179
+ InstallationError,
180
+ UninstallationError,
181
+ BadCommand,
182
+ NetworkConnectionError,
183
+ ) as exc:
184
+ logger.critical(str(exc))
185
+ logger.debug("Exception information:", exc_info=True)
186
+
187
+ return ERROR
188
+ except CommandError as exc:
189
+ logger.critical("%s", exc)
190
+ logger.debug("Exception information:", exc_info=True)
191
+
192
+ return ERROR
193
+ except BrokenStdoutLoggingError:
194
+ # Bypass our logger and write any remaining messages to
195
+ # stderr because stdout no longer works.
196
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
197
+ if level_number <= logging.DEBUG:
198
+ traceback.print_exc(file=sys.stderr)
199
+
200
+ return ERROR
201
+ except KeyboardInterrupt:
202
+ logger.critical("Operation cancelled by user")
203
+ logger.debug("Exception information:", exc_info=True)
204
+
205
+ return ERROR
206
+ except BaseException:
207
+ logger.critical("Exception:", exc_info=True)
208
+
209
+ return UNKNOWN_ERROR
210
+
211
+ return exc_logging_wrapper
212
+
213
+ try:
214
+ if not options.debug_mode:
215
+ run = intercepts_unhandled_exc(self.run)
216
+ else:
217
+ run = self.run
218
+ return run(options, args)
219
+ finally:
220
+ self.handle_pip_version_check(options)
venv/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py ADDED
@@ -0,0 +1,1018 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ shared options and groups
3
+
4
+ The principle here is to define options once, but *not* instantiate them
5
+ globally. One reason being that options with action='append' can carry state
6
+ between parses. pip parses general options twice internally, and shouldn't
7
+ pass on state. To be consistent, all options will follow this design.
8
+ """
9
+
10
+ # The following comment should be removed at some point in the future.
11
+ # mypy: strict-optional=False
12
+
13
+ import logging
14
+ import os
15
+ import textwrap
16
+ from functools import partial
17
+ from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
18
+ from textwrap import dedent
19
+ from typing import Any, Callable, Dict, Optional, Tuple
20
+
21
+ from pip._vendor.packaging.utils import canonicalize_name
22
+
23
+ from pip._internal.cli.parser import ConfigOptionParser
24
+ from pip._internal.cli.progress_bars import BAR_TYPES
25
+ from pip._internal.exceptions import CommandError
26
+ from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
27
+ from pip._internal.models.format_control import FormatControl
28
+ from pip._internal.models.index import PyPI
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.utils.hashes import STRONG_HASHES
31
+ from pip._internal.utils.misc import strtobool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
37
+ """
38
+ Raise an option parsing error using parser.error().
39
+
40
+ Args:
41
+ parser: an OptionParser instance.
42
+ option: an Option instance.
43
+ msg: the error text.
44
+ """
45
+ msg = f"{option} error: {msg}"
46
+ msg = textwrap.fill(" ".join(msg.split()))
47
+ parser.error(msg)
48
+
49
+
50
+ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
51
+ """
52
+ Return an OptionGroup object
53
+ group -- assumed to be dict with 'name' and 'options' keys
54
+ parser -- an optparse Parser
55
+ """
56
+ option_group = OptionGroup(parser, group["name"])
57
+ for option in group["options"]:
58
+ option_group.add_option(option())
59
+ return option_group
60
+
61
+
62
+ def check_install_build_global(
63
+ options: Values, check_options: Optional[Values] = None
64
+ ) -> None:
65
+ """Disable wheels if per-setup.py call options are set.
66
+
67
+ :param options: The OptionParser options to update.
68
+ :param check_options: The options to check, if not supplied defaults to
69
+ options.
70
+ """
71
+ if check_options is None:
72
+ check_options = options
73
+
74
+ def getname(n: str) -> Optional[Any]:
75
+ return getattr(check_options, n, None)
76
+
77
+ names = ["build_options", "global_options", "install_options"]
78
+ if any(map(getname, names)):
79
+ control = options.format_control
80
+ control.disallow_binaries()
81
+ logger.warning(
82
+ "Disabling all use of wheels due to the use of --build-option "
83
+ "/ --global-option / --install-option.",
84
+ )
85
+
86
+
87
+ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
88
+ """Function for determining if custom platform options are allowed.
89
+
90
+ :param options: The OptionParser options.
91
+ :param check_target: Whether or not to check if --target is being used.
92
+ """
93
+ dist_restriction_set = any(
94
+ [
95
+ options.python_version,
96
+ options.platforms,
97
+ options.abis,
98
+ options.implementation,
99
+ ]
100
+ )
101
+
102
+ binary_only = FormatControl(set(), {":all:"})
103
+ sdist_dependencies_allowed = (
104
+ options.format_control != binary_only and not options.ignore_dependencies
105
+ )
106
+
107
+ # Installations or downloads using dist restrictions must not combine
108
+ # source distributions and dist-specific wheels, as they are not
109
+ # guaranteed to be locally compatible.
110
+ if dist_restriction_set and sdist_dependencies_allowed:
111
+ raise CommandError(
112
+ "When restricting platform and interpreter constraints using "
113
+ "--python-version, --platform, --abi, or --implementation, "
114
+ "either --no-deps must be set, or --only-binary=:all: must be "
115
+ "set and --no-binary must not be set (or must be set to "
116
+ ":none:)."
117
+ )
118
+
119
+ if check_target:
120
+ if dist_restriction_set and not options.target_dir:
121
+ raise CommandError(
122
+ "Can not use any platform or abi specific options unless "
123
+ "installing via '--target'"
124
+ )
125
+
126
+
127
+ def _path_option_check(option: Option, opt: str, value: str) -> str:
128
+ return os.path.expanduser(value)
129
+
130
+
131
+ def _package_name_option_check(option: Option, opt: str, value: str) -> str:
132
+ return canonicalize_name(value)
133
+
134
+
135
+ class PipOption(Option):
136
+ TYPES = Option.TYPES + ("path", "package_name")
137
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
138
+ TYPE_CHECKER["package_name"] = _package_name_option_check
139
+ TYPE_CHECKER["path"] = _path_option_check
140
+
141
+
142
+ ###########
143
+ # options #
144
+ ###########
145
+
146
+ help_: Callable[..., Option] = partial(
147
+ Option,
148
+ "-h",
149
+ "--help",
150
+ dest="help",
151
+ action="help",
152
+ help="Show help.",
153
+ )
154
+
155
+ debug_mode: Callable[..., Option] = partial(
156
+ Option,
157
+ "--debug",
158
+ dest="debug_mode",
159
+ action="store_true",
160
+ default=False,
161
+ help=(
162
+ "Let unhandled exceptions propagate outside the main subroutine, "
163
+ "instead of logging them to stderr."
164
+ ),
165
+ )
166
+
167
+ isolated_mode: Callable[..., Option] = partial(
168
+ Option,
169
+ "--isolated",
170
+ dest="isolated_mode",
171
+ action="store_true",
172
+ default=False,
173
+ help=(
174
+ "Run pip in an isolated mode, ignoring environment variables and user "
175
+ "configuration."
176
+ ),
177
+ )
178
+
179
+ require_virtualenv: Callable[..., Option] = partial(
180
+ Option,
181
+ "--require-virtualenv",
182
+ "--require-venv",
183
+ dest="require_venv",
184
+ action="store_true",
185
+ default=False,
186
+ help=(
187
+ "Allow pip to only run in a virtual environment; "
188
+ "exit with an error otherwise."
189
+ ),
190
+ )
191
+
192
+ verbose: Callable[..., Option] = partial(
193
+ Option,
194
+ "-v",
195
+ "--verbose",
196
+ dest="verbose",
197
+ action="count",
198
+ default=0,
199
+ help="Give more output. Option is additive, and can be used up to 3 times.",
200
+ )
201
+
202
+ no_color: Callable[..., Option] = partial(
203
+ Option,
204
+ "--no-color",
205
+ dest="no_color",
206
+ action="store_true",
207
+ default=False,
208
+ help="Suppress colored output.",
209
+ )
210
+
211
+ version: Callable[..., Option] = partial(
212
+ Option,
213
+ "-V",
214
+ "--version",
215
+ dest="version",
216
+ action="store_true",
217
+ help="Show version and exit.",
218
+ )
219
+
220
+ quiet: Callable[..., Option] = partial(
221
+ Option,
222
+ "-q",
223
+ "--quiet",
224
+ dest="quiet",
225
+ action="count",
226
+ default=0,
227
+ help=(
228
+ "Give less output. Option is additive, and can be used up to 3"
229
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
230
+ " levels)."
231
+ ),
232
+ )
233
+
234
+ progress_bar: Callable[..., Option] = partial(
235
+ Option,
236
+ "--progress-bar",
237
+ dest="progress_bar",
238
+ type="choice",
239
+ choices=list(BAR_TYPES.keys()),
240
+ default="on",
241
+ help=(
242
+ "Specify type of progress to be displayed ["
243
+ + "|".join(BAR_TYPES.keys())
244
+ + "] (default: %default)"
245
+ ),
246
+ )
247
+
248
+ log: Callable[..., Option] = partial(
249
+ PipOption,
250
+ "--log",
251
+ "--log-file",
252
+ "--local-log",
253
+ dest="log",
254
+ metavar="path",
255
+ type="path",
256
+ help="Path to a verbose appending log.",
257
+ )
258
+
259
+ no_input: Callable[..., Option] = partial(
260
+ Option,
261
+ # Don't ask for input
262
+ "--no-input",
263
+ dest="no_input",
264
+ action="store_true",
265
+ default=False,
266
+ help="Disable prompting for input.",
267
+ )
268
+
269
+ proxy: Callable[..., Option] = partial(
270
+ Option,
271
+ "--proxy",
272
+ dest="proxy",
273
+ type="str",
274
+ default="",
275
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
276
+ )
277
+
278
+ retries: Callable[..., Option] = partial(
279
+ Option,
280
+ "--retries",
281
+ dest="retries",
282
+ type="int",
283
+ default=5,
284
+ help="Maximum number of retries each connection should attempt "
285
+ "(default %default times).",
286
+ )
287
+
288
+ timeout: Callable[..., Option] = partial(
289
+ Option,
290
+ "--timeout",
291
+ "--default-timeout",
292
+ metavar="sec",
293
+ dest="timeout",
294
+ type="float",
295
+ default=15,
296
+ help="Set the socket timeout (default %default seconds).",
297
+ )
298
+
299
+
300
+ def exists_action() -> Option:
301
+ return Option(
302
+ # Option when path already exist
303
+ "--exists-action",
304
+ dest="exists_action",
305
+ type="choice",
306
+ choices=["s", "i", "w", "b", "a"],
307
+ default=[],
308
+ action="append",
309
+ metavar="action",
310
+ help="Default action when a path already exists: "
311
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
312
+ )
313
+
314
+
315
+ cert: Callable[..., Option] = partial(
316
+ PipOption,
317
+ "--cert",
318
+ dest="cert",
319
+ type="path",
320
+ metavar="path",
321
+ help=(
322
+ "Path to PEM-encoded CA certificate bundle. "
323
+ "If provided, overrides the default. "
324
+ "See 'SSL Certificate Verification' in pip documentation "
325
+ "for more information."
326
+ ),
327
+ )
328
+
329
+ client_cert: Callable[..., Option] = partial(
330
+ PipOption,
331
+ "--client-cert",
332
+ dest="client_cert",
333
+ type="path",
334
+ default=None,
335
+ metavar="path",
336
+ help="Path to SSL client certificate, a single file containing the "
337
+ "private key and the certificate in PEM format.",
338
+ )
339
+
340
+ index_url: Callable[..., Option] = partial(
341
+ Option,
342
+ "-i",
343
+ "--index-url",
344
+ "--pypi-url",
345
+ dest="index_url",
346
+ metavar="URL",
347
+ default=PyPI.simple_url,
348
+ help="Base URL of the Python Package Index (default %default). "
349
+ "This should point to a repository compliant with PEP 503 "
350
+ "(the simple repository API) or a local directory laid out "
351
+ "in the same format.",
352
+ )
353
+
354
+
355
+ def extra_index_url() -> Option:
356
+ return Option(
357
+ "--extra-index-url",
358
+ dest="extra_index_urls",
359
+ metavar="URL",
360
+ action="append",
361
+ default=[],
362
+ help="Extra URLs of package indexes to use in addition to "
363
+ "--index-url. Should follow the same rules as "
364
+ "--index-url.",
365
+ )
366
+
367
+
368
+ no_index: Callable[..., Option] = partial(
369
+ Option,
370
+ "--no-index",
371
+ dest="no_index",
372
+ action="store_true",
373
+ default=False,
374
+ help="Ignore package index (only looking at --find-links URLs instead).",
375
+ )
376
+
377
+
378
+ def find_links() -> Option:
379
+ return Option(
380
+ "-f",
381
+ "--find-links",
382
+ dest="find_links",
383
+ action="append",
384
+ default=[],
385
+ metavar="url",
386
+ help="If a URL or path to an html file, then parse for links to "
387
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
388
+ "If a local path or file:// URL that's a directory, "
389
+ "then look for archives in the directory listing. "
390
+ "Links to VCS project URLs are not supported.",
391
+ )
392
+
393
+
394
+ def trusted_host() -> Option:
395
+ return Option(
396
+ "--trusted-host",
397
+ dest="trusted_hosts",
398
+ action="append",
399
+ metavar="HOSTNAME",
400
+ default=[],
401
+ help="Mark this host or host:port pair as trusted, even though it "
402
+ "does not have valid or any HTTPS.",
403
+ )
404
+
405
+
406
+ def constraints() -> Option:
407
+ return Option(
408
+ "-c",
409
+ "--constraint",
410
+ dest="constraints",
411
+ action="append",
412
+ default=[],
413
+ metavar="file",
414
+ help="Constrain versions using the given constraints file. "
415
+ "This option can be used multiple times.",
416
+ )
417
+
418
+
419
+ def requirements() -> Option:
420
+ return Option(
421
+ "-r",
422
+ "--requirement",
423
+ dest="requirements",
424
+ action="append",
425
+ default=[],
426
+ metavar="file",
427
+ help="Install from the given requirements file. "
428
+ "This option can be used multiple times.",
429
+ )
430
+
431
+
432
+ def editable() -> Option:
433
+ return Option(
434
+ "-e",
435
+ "--editable",
436
+ dest="editables",
437
+ action="append",
438
+ default=[],
439
+ metavar="path/url",
440
+ help=(
441
+ "Install a project in editable mode (i.e. setuptools "
442
+ '"develop mode") from a local project path or a VCS url.'
443
+ ),
444
+ )
445
+
446
+
447
+ def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
448
+ value = os.path.abspath(value)
449
+ setattr(parser.values, option.dest, value)
450
+
451
+
452
+ src: Callable[..., Option] = partial(
453
+ PipOption,
454
+ "--src",
455
+ "--source",
456
+ "--source-dir",
457
+ "--source-directory",
458
+ dest="src_dir",
459
+ type="path",
460
+ metavar="dir",
461
+ default=get_src_prefix(),
462
+ action="callback",
463
+ callback=_handle_src,
464
+ help="Directory to check out editable projects into. "
465
+ 'The default in a virtualenv is "<venv path>/src". '
466
+ 'The default for global installs is "<current dir>/src".',
467
+ )
468
+
469
+
470
+ def _get_format_control(values: Values, option: Option) -> Any:
471
+ """Get a format_control object."""
472
+ return getattr(values, option.dest)
473
+
474
+
475
+ def _handle_no_binary(
476
+ option: Option, opt_str: str, value: str, parser: OptionParser
477
+ ) -> None:
478
+ existing = _get_format_control(parser.values, option)
479
+ FormatControl.handle_mutual_excludes(
480
+ value,
481
+ existing.no_binary,
482
+ existing.only_binary,
483
+ )
484
+
485
+
486
+ def _handle_only_binary(
487
+ option: Option, opt_str: str, value: str, parser: OptionParser
488
+ ) -> None:
489
+ existing = _get_format_control(parser.values, option)
490
+ FormatControl.handle_mutual_excludes(
491
+ value,
492
+ existing.only_binary,
493
+ existing.no_binary,
494
+ )
495
+
496
+
497
+ def no_binary() -> Option:
498
+ format_control = FormatControl(set(), set())
499
+ return Option(
500
+ "--no-binary",
501
+ dest="format_control",
502
+ action="callback",
503
+ callback=_handle_no_binary,
504
+ type="str",
505
+ default=format_control,
506
+ help="Do not use binary packages. Can be supplied multiple times, and "
507
+ 'each time adds to the existing value. Accepts either ":all:" to '
508
+ 'disable all binary packages, ":none:" to empty the set (notice '
509
+ "the colons), or one or more package names with commas between "
510
+ "them (no colons). Note that some packages are tricky to compile "
511
+ "and may fail to install when this option is used on them.",
512
+ )
513
+
514
+
515
+ def only_binary() -> Option:
516
+ format_control = FormatControl(set(), set())
517
+ return Option(
518
+ "--only-binary",
519
+ dest="format_control",
520
+ action="callback",
521
+ callback=_handle_only_binary,
522
+ type="str",
523
+ default=format_control,
524
+ help="Do not use source packages. Can be supplied multiple times, and "
525
+ 'each time adds to the existing value. Accepts either ":all:" to '
526
+ 'disable all source packages, ":none:" to empty the set, or one '
527
+ "or more package names with commas between them. Packages "
528
+ "without binary distributions will fail to install when this "
529
+ "option is used on them.",
530
+ )
531
+
532
+
533
+ platforms: Callable[..., Option] = partial(
534
+ Option,
535
+ "--platform",
536
+ dest="platforms",
537
+ metavar="platform",
538
+ action="append",
539
+ default=None,
540
+ help=(
541
+ "Only use wheels compatible with <platform>. Defaults to the "
542
+ "platform of the running system. Use this option multiple times to "
543
+ "specify multiple platforms supported by the target interpreter."
544
+ ),
545
+ )
546
+
547
+
548
+ # This was made a separate function for unit-testing purposes.
549
+ def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
550
+ """
551
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
552
+
553
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
554
+ non-None if and only if there was a parsing error.
555
+ """
556
+ if not value:
557
+ # The empty string is the same as not providing a value.
558
+ return (None, None)
559
+
560
+ parts = value.split(".")
561
+ if len(parts) > 3:
562
+ return ((), "at most three version parts are allowed")
563
+
564
+ if len(parts) == 1:
565
+ # Then we are in the case of "3" or "37".
566
+ value = parts[0]
567
+ if len(value) > 1:
568
+ parts = [value[0], value[1:]]
569
+
570
+ try:
571
+ version_info = tuple(int(part) for part in parts)
572
+ except ValueError:
573
+ return ((), "each version part must be an integer")
574
+
575
+ return (version_info, None)
576
+
577
+
578
+ def _handle_python_version(
579
+ option: Option, opt_str: str, value: str, parser: OptionParser
580
+ ) -> None:
581
+ """
582
+ Handle a provided --python-version value.
583
+ """
584
+ version_info, error_msg = _convert_python_version(value)
585
+ if error_msg is not None:
586
+ msg = "invalid --python-version value: {!r}: {}".format(
587
+ value,
588
+ error_msg,
589
+ )
590
+ raise_option_error(parser, option=option, msg=msg)
591
+
592
+ parser.values.python_version = version_info
593
+
594
+
595
+ python_version: Callable[..., Option] = partial(
596
+ Option,
597
+ "--python-version",
598
+ dest="python_version",
599
+ metavar="python_version",
600
+ action="callback",
601
+ callback=_handle_python_version,
602
+ type="str",
603
+ default=None,
604
+ help=dedent(
605
+ """\
606
+ The Python interpreter version to use for wheel and "Requires-Python"
607
+ compatibility checks. Defaults to a version derived from the running
608
+ interpreter. The version can be specified using up to three dot-separated
609
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
610
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
611
+ """
612
+ ),
613
+ )
614
+
615
+
616
+ implementation: Callable[..., Option] = partial(
617
+ Option,
618
+ "--implementation",
619
+ dest="implementation",
620
+ metavar="implementation",
621
+ default=None,
622
+ help=(
623
+ "Only use wheels compatible with Python "
624
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
625
+ " or 'ip'. If not specified, then the current "
626
+ "interpreter implementation is used. Use 'py' to force "
627
+ "implementation-agnostic wheels."
628
+ ),
629
+ )
630
+
631
+
632
+ abis: Callable[..., Option] = partial(
633
+ Option,
634
+ "--abi",
635
+ dest="abis",
636
+ metavar="abi",
637
+ action="append",
638
+ default=None,
639
+ help=(
640
+ "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
641
+ "If not specified, then the current interpreter abi tag is used. "
642
+ "Use this option multiple times to specify multiple abis supported "
643
+ "by the target interpreter. Generally you will need to specify "
644
+ "--implementation, --platform, and --python-version when using this "
645
+ "option."
646
+ ),
647
+ )
648
+
649
+
650
+ def add_target_python_options(cmd_opts: OptionGroup) -> None:
651
+ cmd_opts.add_option(platforms())
652
+ cmd_opts.add_option(python_version())
653
+ cmd_opts.add_option(implementation())
654
+ cmd_opts.add_option(abis())
655
+
656
+
657
+ def make_target_python(options: Values) -> TargetPython:
658
+ target_python = TargetPython(
659
+ platforms=options.platforms,
660
+ py_version_info=options.python_version,
661
+ abis=options.abis,
662
+ implementation=options.implementation,
663
+ )
664
+
665
+ return target_python
666
+
667
+
668
+ def prefer_binary() -> Option:
669
+ return Option(
670
+ "--prefer-binary",
671
+ dest="prefer_binary",
672
+ action="store_true",
673
+ default=False,
674
+ help="Prefer older binary packages over newer source packages.",
675
+ )
676
+
677
+
678
+ cache_dir: Callable[..., Option] = partial(
679
+ PipOption,
680
+ "--cache-dir",
681
+ dest="cache_dir",
682
+ default=USER_CACHE_DIR,
683
+ metavar="dir",
684
+ type="path",
685
+ help="Store the cache data in <dir>.",
686
+ )
687
+
688
+
689
+ def _handle_no_cache_dir(
690
+ option: Option, opt: str, value: str, parser: OptionParser
691
+ ) -> None:
692
+ """
693
+ Process a value provided for the --no-cache-dir option.
694
+
695
+ This is an optparse.Option callback for the --no-cache-dir option.
696
+ """
697
+ # The value argument will be None if --no-cache-dir is passed via the
698
+ # command-line, since the option doesn't accept arguments. However,
699
+ # the value can be non-None if the option is triggered e.g. by an
700
+ # environment variable, like PIP_NO_CACHE_DIR=true.
701
+ if value is not None:
702
+ # Then parse the string value to get argument error-checking.
703
+ try:
704
+ strtobool(value)
705
+ except ValueError as exc:
706
+ raise_option_error(parser, option=option, msg=str(exc))
707
+
708
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
709
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
710
+ # rather than enabled (logic would say the latter). Thus, we disable
711
+ # the cache directory not just on values that parse to True, but (for
712
+ # backwards compatibility reasons) also on values that parse to False.
713
+ # In other words, always set it to False if the option is provided in
714
+ # some (valid) form.
715
+ parser.values.cache_dir = False
716
+
717
+
718
+ no_cache: Callable[..., Option] = partial(
719
+ Option,
720
+ "--no-cache-dir",
721
+ dest="cache_dir",
722
+ action="callback",
723
+ callback=_handle_no_cache_dir,
724
+ help="Disable the cache.",
725
+ )
726
+
727
+ no_deps: Callable[..., Option] = partial(
728
+ Option,
729
+ "--no-deps",
730
+ "--no-dependencies",
731
+ dest="ignore_dependencies",
732
+ action="store_true",
733
+ default=False,
734
+ help="Don't install package dependencies.",
735
+ )
736
+
737
+ ignore_requires_python: Callable[..., Option] = partial(
738
+ Option,
739
+ "--ignore-requires-python",
740
+ dest="ignore_requires_python",
741
+ action="store_true",
742
+ help="Ignore the Requires-Python information.",
743
+ )
744
+
745
+ no_build_isolation: Callable[..., Option] = partial(
746
+ Option,
747
+ "--no-build-isolation",
748
+ dest="build_isolation",
749
+ action="store_false",
750
+ default=True,
751
+ help="Disable isolation when building a modern source distribution. "
752
+ "Build dependencies specified by PEP 518 must be already installed "
753
+ "if this option is used.",
754
+ )
755
+
756
+
757
+ def _handle_no_use_pep517(
758
+ option: Option, opt: str, value: str, parser: OptionParser
759
+ ) -> None:
760
+ """
761
+ Process a value provided for the --no-use-pep517 option.
762
+
763
+ This is an optparse.Option callback for the no_use_pep517 option.
764
+ """
765
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
766
+ # will be None if --no-use-pep517 is passed via the command-line.
767
+ # However, the value can be non-None if the option is triggered e.g.
768
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
769
+ if value is not None:
770
+ msg = """A value was passed for --no-use-pep517,
771
+ probably using either the PIP_NO_USE_PEP517 environment variable
772
+ or the "no-use-pep517" config file option. Use an appropriate value
773
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
774
+ config file option instead.
775
+ """
776
+ raise_option_error(parser, option=option, msg=msg)
777
+
778
+ # Otherwise, --no-use-pep517 was passed via the command-line.
779
+ parser.values.use_pep517 = False
780
+
781
+
782
+ use_pep517: Any = partial(
783
+ Option,
784
+ "--use-pep517",
785
+ dest="use_pep517",
786
+ action="store_true",
787
+ default=None,
788
+ help="Use PEP 517 for building source distributions "
789
+ "(use --no-use-pep517 to force legacy behaviour).",
790
+ )
791
+
792
+ no_use_pep517: Any = partial(
793
+ Option,
794
+ "--no-use-pep517",
795
+ dest="use_pep517",
796
+ action="callback",
797
+ callback=_handle_no_use_pep517,
798
+ default=None,
799
+ help=SUPPRESS_HELP,
800
+ )
801
+
802
+ install_options: Callable[..., Option] = partial(
803
+ Option,
804
+ "--install-option",
805
+ dest="install_options",
806
+ action="append",
807
+ metavar="options",
808
+ help="Extra arguments to be supplied to the setup.py install "
809
+ 'command (use like --install-option="--install-scripts=/usr/local/'
810
+ 'bin"). Use multiple --install-option options to pass multiple '
811
+ "options to setup.py install. If you are using an option with a "
812
+ "directory path, be sure to use absolute path.",
813
+ )
814
+
815
+ build_options: Callable[..., Option] = partial(
816
+ Option,
817
+ "--build-option",
818
+ dest="build_options",
819
+ metavar="options",
820
+ action="append",
821
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
822
+ )
823
+
824
+ global_options: Callable[..., Option] = partial(
825
+ Option,
826
+ "--global-option",
827
+ dest="global_options",
828
+ action="append",
829
+ metavar="options",
830
+ help="Extra global options to be supplied to the setup.py "
831
+ "call before the install or bdist_wheel command.",
832
+ )
833
+
834
+ no_clean: Callable[..., Option] = partial(
835
+ Option,
836
+ "--no-clean",
837
+ action="store_true",
838
+ default=False,
839
+ help="Don't clean up build directories.",
840
+ )
841
+
842
+ pre: Callable[..., Option] = partial(
843
+ Option,
844
+ "--pre",
845
+ action="store_true",
846
+ default=False,
847
+ help="Include pre-release and development versions. By default, "
848
+ "pip only finds stable versions.",
849
+ )
850
+
851
+ disable_pip_version_check: Callable[..., Option] = partial(
852
+ Option,
853
+ "--disable-pip-version-check",
854
+ dest="disable_pip_version_check",
855
+ action="store_true",
856
+ default=True,
857
+ help="Don't periodically check PyPI to determine whether a new version "
858
+ "of pip is available for download. Implied with --no-index.",
859
+ )
860
+
861
+
862
+ def _handle_merge_hash(
863
+ option: Option, opt_str: str, value: str, parser: OptionParser
864
+ ) -> None:
865
+ """Given a value spelled "algo:digest", append the digest to a list
866
+ pointed to in a dict by the algo name."""
867
+ if not parser.values.hashes:
868
+ parser.values.hashes = {}
869
+ try:
870
+ algo, digest = value.split(":", 1)
871
+ except ValueError:
872
+ parser.error(
873
+ "Arguments to {} must be a hash name " # noqa
874
+ "followed by a value, like --hash=sha256:"
875
+ "abcde...".format(opt_str)
876
+ )
877
+ if algo not in STRONG_HASHES:
878
+ parser.error(
879
+ "Allowed hash algorithms for {} are {}.".format( # noqa
880
+ opt_str, ", ".join(STRONG_HASHES)
881
+ )
882
+ )
883
+ parser.values.hashes.setdefault(algo, []).append(digest)
884
+
885
+
886
+ hash: Callable[..., Option] = partial(
887
+ Option,
888
+ "--hash",
889
+ # Hash values eventually end up in InstallRequirement.hashes due to
890
+ # __dict__ copying in process_line().
891
+ dest="hashes",
892
+ action="callback",
893
+ callback=_handle_merge_hash,
894
+ type="string",
895
+ help="Verify that the package's archive matches this "
896
+ "hash before installing. Example: --hash=sha256:abcdef...",
897
+ )
898
+
899
+
900
+ require_hashes: Callable[..., Option] = partial(
901
+ Option,
902
+ "--require-hashes",
903
+ dest="require_hashes",
904
+ action="store_true",
905
+ default=False,
906
+ help="Require a hash to check each requirement against, for "
907
+ "repeatable installs. This option is implied when any package in a "
908
+ "requirements file has a --hash option.",
909
+ )
910
+
911
+
912
+ list_path: Callable[..., Option] = partial(
913
+ PipOption,
914
+ "--path",
915
+ dest="path",
916
+ type="path",
917
+ action="append",
918
+ help="Restrict to the specified installation path for listing "
919
+ "packages (can be used multiple times).",
920
+ )
921
+
922
+
923
+ def check_list_path_option(options: Values) -> None:
924
+ if options.path and (options.user or options.local):
925
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
926
+
927
+
928
+ list_exclude: Callable[..., Option] = partial(
929
+ PipOption,
930
+ "--exclude",
931
+ dest="excludes",
932
+ action="append",
933
+ metavar="package",
934
+ type="package_name",
935
+ help="Exclude specified package from the output",
936
+ )
937
+
938
+
939
+ no_python_version_warning: Callable[..., Option] = partial(
940
+ Option,
941
+ "--no-python-version-warning",
942
+ dest="no_python_version_warning",
943
+ action="store_true",
944
+ default=False,
945
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
946
+ )
947
+
948
+
949
+ use_new_feature: Callable[..., Option] = partial(
950
+ Option,
951
+ "--use-feature",
952
+ dest="features_enabled",
953
+ metavar="feature",
954
+ action="append",
955
+ default=[],
956
+ choices=["2020-resolver", "fast-deps", "in-tree-build"],
957
+ help="Enable new functionality, that may be backward incompatible.",
958
+ )
959
+
960
+ use_deprecated_feature: Callable[..., Option] = partial(
961
+ Option,
962
+ "--use-deprecated",
963
+ dest="deprecated_features_enabled",
964
+ metavar="feature",
965
+ action="append",
966
+ default=[],
967
+ choices=[
968
+ "legacy-resolver",
969
+ "out-of-tree-build",
970
+ "backtrack-on-build-failures",
971
+ "html5lib",
972
+ ],
973
+ help=("Enable deprecated functionality, that will be removed in the future."),
974
+ )
975
+
976
+
977
+ ##########
978
+ # groups #
979
+ ##########
980
+
981
+ general_group: Dict[str, Any] = {
982
+ "name": "General Options",
983
+ "options": [
984
+ help_,
985
+ debug_mode,
986
+ isolated_mode,
987
+ require_virtualenv,
988
+ verbose,
989
+ version,
990
+ quiet,
991
+ log,
992
+ no_input,
993
+ proxy,
994
+ retries,
995
+ timeout,
996
+ exists_action,
997
+ trusted_host,
998
+ cert,
999
+ client_cert,
1000
+ cache_dir,
1001
+ no_cache,
1002
+ disable_pip_version_check,
1003
+ no_color,
1004
+ no_python_version_warning,
1005
+ use_new_feature,
1006
+ use_deprecated_feature,
1007
+ ],
1008
+ }
1009
+
1010
+ index_group: Dict[str, Any] = {
1011
+ "name": "Package Index Options",
1012
+ "options": [
1013
+ index_url,
1014
+ extra_index_url,
1015
+ no_index,
1016
+ find_links,
1017
+ ],
1018
+ }
venv/lib/python3.10/site-packages/pip/_internal/cli/command_context.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import ExitStack, contextmanager
2
+ from typing import ContextManager, Iterator, TypeVar
3
+
4
+ _T = TypeVar("_T", covariant=True)
5
+
6
+
7
+ class CommandContextMixIn:
8
+ def __init__(self) -> None:
9
+ super().__init__()
10
+ self._in_main_context = False
11
+ self._main_context = ExitStack()
12
+
13
+ @contextmanager
14
+ def main_context(self) -> Iterator[None]:
15
+ assert not self._in_main_context
16
+
17
+ self._in_main_context = True
18
+ try:
19
+ with self._main_context:
20
+ yield
21
+ finally:
22
+ self._in_main_context = False
23
+
24
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
25
+ assert self._in_main_context
26
+
27
+ return self._main_context.enter_context(context_provider)
venv/lib/python3.10/site-packages/pip/_internal/cli/main.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Primary application entrypoint.
2
+ """
3
+ import locale
4
+ import logging
5
+ import os
6
+ import sys
7
+ from typing import List, Optional
8
+
9
+ from pip._internal.cli.autocompletion import autocomplete
10
+ from pip._internal.cli.main_parser import parse_command
11
+ from pip._internal.commands import create_command
12
+ from pip._internal.exceptions import PipError
13
+ from pip._internal.utils import deprecation
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ # Do not import and use main() directly! Using it directly is actively
19
+ # discouraged by pip's maintainers. The name, location and behavior of
20
+ # this function is subject to change, so calling it directly is not
21
+ # portable across different pip versions.
22
+
23
+ # In addition, running pip in-process is unsupported and unsafe. This is
24
+ # elaborated in detail at
25
+ # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
26
+ # That document also provides suggestions that should work for nearly
27
+ # all users that are considering importing and using main() directly.
28
+
29
+ # However, we know that certain users will still want to invoke pip
30
+ # in-process. If you understand and accept the implications of using pip
31
+ # in an unsupported manner, the best approach is to use runpy to avoid
32
+ # depending on the exact location of this entry point.
33
+
34
+ # The following example shows how to use runpy to invoke pip in that
35
+ # case:
36
+ #
37
+ # sys.argv = ["pip", your, args, here]
38
+ # runpy.run_module("pip", run_name="__main__")
39
+ #
40
+ # Note that this will exit the process after running, unlike a direct
41
+ # call to main. As it is not safe to do any processing after calling
42
+ # main, this should not be an issue in practice.
43
+
44
+
45
+ def main(args: Optional[List[str]] = None) -> int:
46
+ if args is None:
47
+ args = sys.argv[1:]
48
+
49
+ # Configure our deprecation warnings to be sent through loggers
50
+ deprecation.install_warning_logger()
51
+
52
+ autocomplete()
53
+
54
+ try:
55
+ cmd_name, cmd_args = parse_command(args)
56
+ except PipError as exc:
57
+ sys.stderr.write(f"ERROR: {exc}")
58
+ sys.stderr.write(os.linesep)
59
+ sys.exit(1)
60
+
61
+ # Needed for locale.getpreferredencoding(False) to work
62
+ # in pip._internal.utils.encoding.auto_decode
63
+ try:
64
+ locale.setlocale(locale.LC_ALL, "")
65
+ except locale.Error as e:
66
+ # setlocale can apparently crash if locale are uninitialized
67
+ logger.debug("Ignoring error %s when setting locale", e)
68
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
69
+
70
+ return command.main(cmd_args)
venv/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A single place for constructing and exposing the main parser
2
+ """
3
+
4
+ import os
5
+ import sys
6
+ from typing import List, Tuple
7
+
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
10
+ from pip._internal.commands import commands_dict, get_similar_commands
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.utils.misc import get_pip_version, get_prog
13
+
14
+ __all__ = ["create_main_parser", "parse_command"]
15
+
16
+
17
+ def create_main_parser() -> ConfigOptionParser:
18
+ """Creates and returns the main parser for pip's CLI"""
19
+
20
+ parser = ConfigOptionParser(
21
+ usage="\n%prog <command> [options]",
22
+ add_help_option=False,
23
+ formatter=UpdatingDefaultsHelpFormatter(),
24
+ name="global",
25
+ prog=get_prog(),
26
+ )
27
+ parser.disable_interspersed_args()
28
+
29
+ parser.version = get_pip_version()
30
+
31
+ # add the general options
32
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
33
+ parser.add_option_group(gen_opts)
34
+
35
+ # so the help formatter knows
36
+ parser.main = True # type: ignore
37
+
38
+ # create command listing for description
39
+ description = [""] + [
40
+ f"{name:27} {command_info.summary}"
41
+ for name, command_info in commands_dict.items()
42
+ ]
43
+ parser.description = "\n".join(description)
44
+
45
+ return parser
46
+
47
+
48
+ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
49
+ parser = create_main_parser()
50
+
51
+ # Note: parser calls disable_interspersed_args(), so the result of this
52
+ # call is to split the initial args into the general options before the
53
+ # subcommand and everything else.
54
+ # For example:
55
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
56
+ # general_options: ['--timeout==5']
57
+ # args_else: ['install', '--user', 'INITools']
58
+ general_options, args_else = parser.parse_args(args)
59
+
60
+ # --version
61
+ if general_options.version:
62
+ sys.stdout.write(parser.version)
63
+ sys.stdout.write(os.linesep)
64
+ sys.exit()
65
+
66
+ # pip || pip help -> print_help()
67
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
68
+ parser.print_help()
69
+ sys.exit()
70
+
71
+ # the subcommand name
72
+ cmd_name = args_else[0]
73
+
74
+ if cmd_name not in commands_dict:
75
+ guess = get_similar_commands(cmd_name)
76
+
77
+ msg = [f'unknown command "{cmd_name}"']
78
+ if guess:
79
+ msg.append(f'maybe you meant "{guess}"')
80
+
81
+ raise CommandError(" - ".join(msg))
82
+
83
+ # all the args without the subcommand
84
+ cmd_args = args[:]
85
+ cmd_args.remove(cmd_name)
86
+
87
+ return cmd_name, cmd_args
venv/lib/python3.10/site-packages/pip/_internal/cli/parser.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base option parser setup"""
2
+
3
+ import logging
4
+ import optparse
5
+ import shutil
6
+ import sys
7
+ import textwrap
8
+ from contextlib import suppress
9
+ from typing import Any, Dict, Iterator, List, Tuple
10
+
11
+ from pip._internal.cli.status_codes import UNKNOWN_ERROR
12
+ from pip._internal.configuration import Configuration, ConfigurationError
13
+ from pip._internal.utils.misc import redact_auth_from_url, strtobool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19
+ """A prettier/less verbose help formatter for optparse."""
20
+
21
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
22
+ # help position must be aligned with __init__.parseopts.description
23
+ kwargs["max_help_position"] = 30
24
+ kwargs["indent_increment"] = 1
25
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
26
+ super().__init__(*args, **kwargs)
27
+
28
+ def format_option_strings(self, option: optparse.Option) -> str:
29
+ return self._format_option_strings(option)
30
+
31
+ def _format_option_strings(
32
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
33
+ ) -> str:
34
+ """
35
+ Return a comma-separated list of option strings and metavars.
36
+
37
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
38
+ :param mvarfmt: metavar format string
39
+ :param optsep: separator
40
+ """
41
+ opts = []
42
+
43
+ if option._short_opts:
44
+ opts.append(option._short_opts[0])
45
+ if option._long_opts:
46
+ opts.append(option._long_opts[0])
47
+ if len(opts) > 1:
48
+ opts.insert(1, optsep)
49
+
50
+ if option.takes_value():
51
+ assert option.dest is not None
52
+ metavar = option.metavar or option.dest.lower()
53
+ opts.append(mvarfmt.format(metavar.lower()))
54
+
55
+ return "".join(opts)
56
+
57
+ def format_heading(self, heading: str) -> str:
58
+ if heading == "Options":
59
+ return ""
60
+ return heading + ":\n"
61
+
62
+ def format_usage(self, usage: str) -> str:
63
+ """
64
+ Ensure there is only one newline between usage and the first heading
65
+ if there is no description.
66
+ """
67
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
68
+ return msg
69
+
70
+ def format_description(self, description: str) -> str:
71
+ # leave full control over description to us
72
+ if description:
73
+ if hasattr(self.parser, "main"):
74
+ label = "Commands"
75
+ else:
76
+ label = "Description"
77
+ # some doc strings have initial newlines, some don't
78
+ description = description.lstrip("\n")
79
+ # some doc strings have final newlines and spaces, some don't
80
+ description = description.rstrip()
81
+ # dedent, then reindent
82
+ description = self.indent_lines(textwrap.dedent(description), " ")
83
+ description = f"{label}:\n{description}\n"
84
+ return description
85
+ else:
86
+ return ""
87
+
88
+ def format_epilog(self, epilog: str) -> str:
89
+ # leave full control over epilog to us
90
+ if epilog:
91
+ return epilog
92
+ else:
93
+ return ""
94
+
95
+ def indent_lines(self, text: str, indent: str) -> str:
96
+ new_lines = [indent + line for line in text.split("\n")]
97
+ return "\n".join(new_lines)
98
+
99
+
100
+ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
101
+ """Custom help formatter for use in ConfigOptionParser.
102
+
103
+ This is updates the defaults before expanding them, allowing
104
+ them to show up correctly in the help listing.
105
+
106
+ Also redact auth from url type options
107
+ """
108
+
109
+ def expand_default(self, option: optparse.Option) -> str:
110
+ default_values = None
111
+ if self.parser is not None:
112
+ assert isinstance(self.parser, ConfigOptionParser)
113
+ self.parser._update_defaults(self.parser.defaults)
114
+ assert option.dest is not None
115
+ default_values = self.parser.defaults.get(option.dest)
116
+ help_text = super().expand_default(option)
117
+
118
+ if default_values and option.metavar == "URL":
119
+ if isinstance(default_values, str):
120
+ default_values = [default_values]
121
+
122
+ # If its not a list, we should abort and just return the help text
123
+ if not isinstance(default_values, list):
124
+ default_values = []
125
+
126
+ for val in default_values:
127
+ help_text = help_text.replace(val, redact_auth_from_url(val))
128
+
129
+ return help_text
130
+
131
+
132
+ class CustomOptionParser(optparse.OptionParser):
133
+ def insert_option_group(
134
+ self, idx: int, *args: Any, **kwargs: Any
135
+ ) -> optparse.OptionGroup:
136
+ """Insert an OptionGroup at a given position."""
137
+ group = self.add_option_group(*args, **kwargs)
138
+
139
+ self.option_groups.pop()
140
+ self.option_groups.insert(idx, group)
141
+
142
+ return group
143
+
144
+ @property
145
+ def option_list_all(self) -> List[optparse.Option]:
146
+ """Get a list of all options, including those in option groups."""
147
+ res = self.option_list[:]
148
+ for i in self.option_groups:
149
+ res.extend(i.option_list)
150
+
151
+ return res
152
+
153
+
154
+ class ConfigOptionParser(CustomOptionParser):
155
+ """Custom option parser which updates its defaults by checking the
156
+ configuration files and environmental variables"""
157
+
158
+ def __init__(
159
+ self,
160
+ *args: Any,
161
+ name: str,
162
+ isolated: bool = False,
163
+ **kwargs: Any,
164
+ ) -> None:
165
+ self.name = name
166
+ self.config = Configuration(isolated)
167
+
168
+ assert self.name
169
+ super().__init__(*args, **kwargs)
170
+
171
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
172
+ try:
173
+ return option.check_value(key, val)
174
+ except optparse.OptionValueError as exc:
175
+ print(f"An error occurred during configuration: {exc}")
176
+ sys.exit(3)
177
+
178
+ def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]:
179
+ # Configuration gives keys in an unordered manner. Order them.
180
+ override_order = ["global", self.name, ":env:"]
181
+
182
+ # Pool the options into different groups
183
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
184
+ name: [] for name in override_order
185
+ }
186
+ for section_key, val in self.config.items():
187
+ # ignore empty values
188
+ if not val:
189
+ logger.debug(
190
+ "Ignoring configuration key '%s' as it's value is empty.",
191
+ section_key,
192
+ )
193
+ continue
194
+
195
+ section, key = section_key.split(".", 1)
196
+ if section in override_order:
197
+ section_items[section].append((key, val))
198
+
199
+ # Yield each group in their override order
200
+ for section in override_order:
201
+ for key, val in section_items[section]:
202
+ yield key, val
203
+
204
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
205
+ """Updates the given defaults with values from the config files and
206
+ the environ. Does a little special handling for certain types of
207
+ options (lists)."""
208
+
209
+ # Accumulate complex default state.
210
+ self.values = optparse.Values(self.defaults)
211
+ late_eval = set()
212
+ # Then set the options with those values
213
+ for key, val in self._get_ordered_configuration_items():
214
+ # '--' because configuration supports only long names
215
+ option = self.get_option("--" + key)
216
+
217
+ # Ignore options not present in this parser. E.g. non-globals put
218
+ # in [global] by users that want them to apply to all applicable
219
+ # commands.
220
+ if option is None:
221
+ continue
222
+
223
+ assert option.dest is not None
224
+
225
+ if option.action in ("store_true", "store_false"):
226
+ try:
227
+ val = strtobool(val)
228
+ except ValueError:
229
+ self.error(
230
+ "{} is not a valid value for {} option, " # noqa
231
+ "please specify a boolean value like yes/no, "
232
+ "true/false or 1/0 instead.".format(val, key)
233
+ )
234
+ elif option.action == "count":
235
+ with suppress(ValueError):
236
+ val = strtobool(val)
237
+ with suppress(ValueError):
238
+ val = int(val)
239
+ if not isinstance(val, int) or val < 0:
240
+ self.error(
241
+ "{} is not a valid value for {} option, " # noqa
242
+ "please instead specify either a non-negative integer "
243
+ "or a boolean value like yes/no or false/true "
244
+ "which is equivalent to 1/0.".format(val, key)
245
+ )
246
+ elif option.action == "append":
247
+ val = val.split()
248
+ val = [self.check_default(option, key, v) for v in val]
249
+ elif option.action == "callback":
250
+ assert option.callback is not None
251
+ late_eval.add(option.dest)
252
+ opt_str = option.get_opt_string()
253
+ val = option.convert_value(opt_str, val)
254
+ # From take_action
255
+ args = option.callback_args or ()
256
+ kwargs = option.callback_kwargs or {}
257
+ option.callback(option, opt_str, val, self, *args, **kwargs)
258
+ else:
259
+ val = self.check_default(option, key, val)
260
+
261
+ defaults[option.dest] = val
262
+
263
+ for key in late_eval:
264
+ defaults[key] = getattr(self.values, key)
265
+ self.values = None
266
+ return defaults
267
+
268
+ def get_default_values(self) -> optparse.Values:
269
+ """Overriding to make updating the defaults after instantiation of
270
+ the option parser possible, _update_defaults() does the dirty work."""
271
+ if not self.process_default_values:
272
+ # Old, pre-Optik 1.5 behaviour.
273
+ return optparse.Values(self.defaults)
274
+
275
+ # Load the configuration, or error out in case of an error
276
+ try:
277
+ self.config.load()
278
+ except ConfigurationError as err:
279
+ self.exit(UNKNOWN_ERROR, str(err))
280
+
281
+ defaults = self._update_defaults(self.defaults.copy()) # ours
282
+ for option in self._get_all_options():
283
+ assert option.dest is not None
284
+ default = defaults.get(option.dest)
285
+ if isinstance(default, str):
286
+ opt_str = option.get_opt_string()
287
+ defaults[option.dest] = option.check_value(opt_str, default)
288
+ return optparse.Values(defaults)
289
+
290
+ def error(self, msg: str) -> None:
291
+ self.print_usage(sys.stderr)
292
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")
venv/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import sys
4
+ from signal import SIGINT, default_int_handler, signal
5
+ from typing import Any, Callable, Iterator, Optional, Tuple
6
+
7
+ from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
8
+ from pip._vendor.progress.spinner import Spinner
9
+ from pip._vendor.rich.progress import (
10
+ BarColumn,
11
+ DownloadColumn,
12
+ FileSizeColumn,
13
+ Progress,
14
+ ProgressColumn,
15
+ SpinnerColumn,
16
+ TextColumn,
17
+ TimeElapsedColumn,
18
+ TimeRemainingColumn,
19
+ TransferSpeedColumn,
20
+ )
21
+
22
+ from pip._internal.utils.compat import WINDOWS
23
+ from pip._internal.utils.logging import get_indentation
24
+ from pip._internal.utils.misc import format_size
25
+
26
+ try:
27
+ from pip._vendor import colorama
28
+ # Lots of different errors can come from this, including SystemError and
29
+ # ImportError.
30
+ except Exception:
31
+ colorama = None
32
+
33
+ DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]]
34
+
35
+
36
+ def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar:
37
+ encoding = getattr(preferred.file, "encoding", None)
38
+
39
+ # If we don't know what encoding this file is in, then we'll just assume
40
+ # that it doesn't support unicode and use the ASCII bar.
41
+ if not encoding:
42
+ return fallback
43
+
44
+ # Collect all of the possible characters we want to use with the preferred
45
+ # bar.
46
+ characters = [
47
+ getattr(preferred, "empty_fill", ""),
48
+ getattr(preferred, "fill", ""),
49
+ ]
50
+ characters += list(getattr(preferred, "phases", []))
51
+
52
+ # Try to decode the characters we're using for the bar using the encoding
53
+ # of the given file, if this works then we'll assume that we can use the
54
+ # fancier bar and if not we'll fall back to the plaintext bar.
55
+ try:
56
+ "".join(characters).encode(encoding)
57
+ except UnicodeEncodeError:
58
+ return fallback
59
+ else:
60
+ return preferred
61
+
62
+
63
+ _BaseBar: Any = _select_progress_class(IncrementalBar, Bar)
64
+
65
+
66
+ class InterruptibleMixin:
67
+ """
68
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
69
+
70
+ This allows downloads to be interrupted without leaving temporary state
71
+ (like hidden cursors) behind.
72
+
73
+ This class is similar to the progress library's existing SigIntMixin
74
+ helper, but as of version 1.2, that helper has the following problems:
75
+
76
+ 1. It calls sys.exit().
77
+ 2. It discards the existing SIGINT handler completely.
78
+ 3. It leaves its own handler in place even after an uninterrupted finish,
79
+ which will have unexpected delayed effects if the user triggers an
80
+ unrelated keyboard interrupt some time after a progress-displaying
81
+ download has already completed, for example.
82
+ """
83
+
84
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
85
+ """
86
+ Save the original SIGINT handler for later.
87
+ """
88
+ # https://github.com/python/mypy/issues/5887
89
+ super().__init__(*args, **kwargs) # type: ignore
90
+
91
+ self.original_handler = signal(SIGINT, self.handle_sigint)
92
+
93
+ # If signal() returns None, the previous handler was not installed from
94
+ # Python, and we cannot restore it. This probably should not happen,
95
+ # but if it does, we must restore something sensible instead, at least.
96
+ # The least bad option should be Python's default SIGINT handler, which
97
+ # just raises KeyboardInterrupt.
98
+ if self.original_handler is None:
99
+ self.original_handler = default_int_handler
100
+
101
+ def finish(self) -> None:
102
+ """
103
+ Restore the original SIGINT handler after finishing.
104
+
105
+ This should happen regardless of whether the progress display finishes
106
+ normally, or gets interrupted.
107
+ """
108
+ super().finish() # type: ignore
109
+ signal(SIGINT, self.original_handler)
110
+
111
+ def handle_sigint(self, signum, frame): # type: ignore
112
+ """
113
+ Call self.finish() before delegating to the original SIGINT handler.
114
+
115
+ This handler should only be in place while the progress display is
116
+ active.
117
+ """
118
+ self.finish()
119
+ self.original_handler(signum, frame)
120
+
121
+
122
+ class SilentBar(Bar):
123
+ def update(self) -> None:
124
+ pass
125
+
126
+
127
+ class BlueEmojiBar(IncrementalBar):
128
+
129
+ suffix = "%(percent)d%%"
130
+ bar_prefix = " "
131
+ bar_suffix = " "
132
+ phases = ("\U0001F539", "\U0001F537", "\U0001F535")
133
+
134
+
135
+ class DownloadProgressMixin:
136
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
137
+ # https://github.com/python/mypy/issues/5887
138
+ super().__init__(*args, **kwargs) # type: ignore
139
+ self.message: str = (" " * (get_indentation() + 2)) + self.message
140
+
141
+ @property
142
+ def downloaded(self) -> str:
143
+ return format_size(self.index) # type: ignore
144
+
145
+ @property
146
+ def download_speed(self) -> str:
147
+ # Avoid zero division errors...
148
+ if self.avg == 0.0: # type: ignore
149
+ return "..."
150
+ return format_size(1 / self.avg) + "/s" # type: ignore
151
+
152
+ @property
153
+ def pretty_eta(self) -> str:
154
+ if self.eta: # type: ignore
155
+ return f"eta {self.eta_td}" # type: ignore
156
+ return ""
157
+
158
+ def iter(self, it): # type: ignore
159
+ for x in it:
160
+ yield x
161
+ # B305 is incorrectly raised here
162
+ # https://github.com/PyCQA/flake8-bugbear/issues/59
163
+ self.next(len(x)) # noqa: B305
164
+ self.finish()
165
+
166
+
167
+ class WindowsMixin:
168
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
169
+ # The Windows terminal does not support the hide/show cursor ANSI codes
170
+ # even with colorama. So we'll ensure that hide_cursor is False on
171
+ # Windows.
172
+ # This call needs to go before the super() call, so that hide_cursor
173
+ # is set in time. The base progress bar class writes the "hide cursor"
174
+ # code to the terminal in its init, so if we don't set this soon
175
+ # enough, we get a "hide" with no corresponding "show"...
176
+ if WINDOWS and self.hide_cursor: # type: ignore
177
+ self.hide_cursor = False
178
+
179
+ # https://github.com/python/mypy/issues/5887
180
+ super().__init__(*args, **kwargs) # type: ignore
181
+
182
+ # Check if we are running on Windows and we have the colorama module,
183
+ # if we do then wrap our file with it.
184
+ if WINDOWS and colorama:
185
+ self.file = colorama.AnsiToWin32(self.file) # type: ignore
186
+ # The progress code expects to be able to call self.file.isatty()
187
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
188
+ # add it.
189
+ self.file.isatty = lambda: self.file.wrapped.isatty()
190
+ # The progress code expects to be able to call self.file.flush()
191
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
192
+ # add it.
193
+ self.file.flush = lambda: self.file.wrapped.flush()
194
+
195
+
196
+ class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
197
+
198
+ file = sys.stdout
199
+ message = "%(percent)d%%"
200
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
201
+
202
+
203
+ class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
204
+ pass
205
+
206
+
207
+ class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
208
+ pass
209
+
210
+
211
+ class DownloadBar(BaseDownloadProgressBar, Bar):
212
+ pass
213
+
214
+
215
+ class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
216
+ pass
217
+
218
+
219
+ class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
220
+ pass
221
+
222
+
223
+ class DownloadProgressSpinner(
224
+ WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
225
+ ):
226
+
227
+ file = sys.stdout
228
+ suffix = "%(downloaded)s %(download_speed)s"
229
+
230
+ def next_phase(self) -> str:
231
+ if not hasattr(self, "_phaser"):
232
+ self._phaser = itertools.cycle(self.phases)
233
+ return next(self._phaser)
234
+
235
+ def update(self) -> None:
236
+ message = self.message % self
237
+ phase = self.next_phase()
238
+ suffix = self.suffix % self
239
+ line = "".join(
240
+ [
241
+ message,
242
+ " " if message else "",
243
+ phase,
244
+ " " if suffix else "",
245
+ suffix,
246
+ ]
247
+ )
248
+
249
+ self.writeln(line)
250
+
251
+
252
+ BAR_TYPES = {
253
+ "off": (DownloadSilentBar, DownloadSilentBar),
254
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
255
+ "ascii": (DownloadBar, DownloadProgressSpinner),
256
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
257
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
258
+ }
259
+
260
+
261
+ def _legacy_progress_bar(
262
+ progress_bar: str, max: Optional[int]
263
+ ) -> DownloadProgressRenderer:
264
+ if max is None or max == 0:
265
+ return BAR_TYPES[progress_bar][1]().iter # type: ignore
266
+ else:
267
+ return BAR_TYPES[progress_bar][0](max=max).iter
268
+
269
+
270
+ #
271
+ # Modern replacement, for our legacy progress bars.
272
+ #
273
+ def _rich_progress_bar(
274
+ iterable: Iterator[bytes],
275
+ *,
276
+ bar_type: str,
277
+ size: int,
278
+ ) -> Iterator[bytes]:
279
+ assert bar_type == "on", "This should only be used in the default mode."
280
+
281
+ if not size:
282
+ total = float("inf")
283
+ columns: Tuple[ProgressColumn, ...] = (
284
+ TextColumn("[progress.description]{task.description}"),
285
+ SpinnerColumn("line", speed=1.5),
286
+ FileSizeColumn(),
287
+ TransferSpeedColumn(),
288
+ TimeElapsedColumn(),
289
+ )
290
+ else:
291
+ total = size
292
+ columns = (
293
+ TextColumn("[progress.description]{task.description}"),
294
+ BarColumn(),
295
+ DownloadColumn(),
296
+ TransferSpeedColumn(),
297
+ TextColumn("eta"),
298
+ TimeRemainingColumn(),
299
+ )
300
+
301
+ progress = Progress(*columns, refresh_per_second=30)
302
+ task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
303
+ with progress:
304
+ for chunk in iterable:
305
+ yield chunk
306
+ progress.update(task_id, advance=len(chunk))
307
+
308
+
309
+ def get_download_progress_renderer(
310
+ *, bar_type: str, size: Optional[int] = None
311
+ ) -> DownloadProgressRenderer:
312
+ """Get an object that can be used to render the download progress.
313
+
314
+ Returns a callable, that takes an iterable to "wrap".
315
+ """
316
+ if bar_type == "on":
317
+ return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
318
+ elif bar_type == "off":
319
+ return iter # no-op, when passed an iterator
320
+ else:
321
+ return _legacy_progress_bar(bar_type, size)
venv/lib/python3.10/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.prepare import RequirementPreparer
26
+ from pip._internal.req.constructors import (
27
+ install_req_from_editable,
28
+ install_req_from_line,
29
+ install_req_from_parsed_requirement,
30
+ install_req_from_req_string,
31
+ )
32
+ from pip._internal.req.req_file import parse_requirements
33
+ from pip._internal.req.req_install import InstallRequirement
34
+ from pip._internal.req.req_tracker import RequirementTracker
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.deprecation import deprecated
38
+ from pip._internal.utils.temp_dir import (
39
+ TempDirectory,
40
+ TempDirectoryTypeRegistry,
41
+ tempdir_kinds,
42
+ )
43
+ from pip._internal.utils.virtualenv import running_under_virtualenv
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+
48
+ class SessionCommandMixin(CommandContextMixIn):
49
+
50
+ """
51
+ A class mixin for command classes needing _build_session().
52
+ """
53
+
54
+ def __init__(self) -> None:
55
+ super().__init__()
56
+ self._session: Optional[PipSession] = None
57
+
58
+ @classmethod
59
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
60
+ """Return a list of index urls from user-provided options."""
61
+ index_urls = []
62
+ if not getattr(options, "no_index", False):
63
+ url = getattr(options, "index_url", None)
64
+ if url:
65
+ index_urls.append(url)
66
+ urls = getattr(options, "extra_index_urls", None)
67
+ if urls:
68
+ index_urls.extend(urls)
69
+ # Return None rather than an empty list
70
+ return index_urls or None
71
+
72
+ def get_default_session(self, options: Values) -> PipSession:
73
+ """Get a default-managed session."""
74
+ if self._session is None:
75
+ self._session = self.enter_context(self._build_session(options))
76
+ # there's no type annotation on requests.Session, so it's
77
+ # automatically ContextManager[Any] and self._session becomes Any,
78
+ # then https://github.com/python/mypy/issues/7696 kicks in
79
+ assert self._session is not None
80
+ return self._session
81
+
82
+ def _build_session(
83
+ self,
84
+ options: Values,
85
+ retries: Optional[int] = None,
86
+ timeout: Optional[int] = None,
87
+ ) -> PipSession:
88
+ assert not options.cache_dir or os.path.isabs(options.cache_dir)
89
+ session = PipSession(
90
+ cache=(
91
+ os.path.join(options.cache_dir, "http") if options.cache_dir else None
92
+ ),
93
+ retries=retries if retries is not None else options.retries,
94
+ trusted_hosts=options.trusted_hosts,
95
+ index_urls=self._get_index_urls(options),
96
+ )
97
+
98
+ # Handle custom ca-bundles from the user
99
+ if options.cert:
100
+ session.verify = options.cert
101
+
102
+ # Handle SSL client certificate
103
+ if options.client_cert:
104
+ session.cert = options.client_cert
105
+
106
+ # Handle timeouts
107
+ if options.timeout or timeout:
108
+ session.timeout = timeout if timeout is not None else options.timeout
109
+
110
+ # Handle configured proxies
111
+ if options.proxy:
112
+ session.proxies = {
113
+ "http": options.proxy,
114
+ "https": options.proxy,
115
+ }
116
+
117
+ # Determine if we can prompt the user for authentication or not
118
+ session.auth.prompting = not options.no_input
119
+
120
+ return session
121
+
122
+
123
+ class IndexGroupCommand(Command, SessionCommandMixin):
124
+
125
+ """
126
+ Abstract base class for commands with the index_group options.
127
+
128
+ This also corresponds to the commands that permit the pip version check.
129
+ """
130
+
131
+ def handle_pip_version_check(self, options: Values) -> None:
132
+ """
133
+ Do the pip version check if not disabled.
134
+
135
+ This overrides the default behavior of not doing the check.
136
+ """
137
+ # Make sure the index_group options are present.
138
+ assert hasattr(options, "no_index")
139
+
140
+ if options.disable_pip_version_check or options.no_index:
141
+ return
142
+
143
+ # Otherwise, check if we're using the latest version of pip available.
144
+ session = self._build_session(
145
+ options, retries=0, timeout=min(5, options.timeout)
146
+ )
147
+ with session:
148
+ pip_self_version_check(session, options)
149
+
150
+
151
+ KEEPABLE_TEMPDIR_TYPES = [
152
+ tempdir_kinds.BUILD_ENV,
153
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
154
+ tempdir_kinds.REQ_BUILD,
155
+ ]
156
+
157
+
158
+ def warn_if_run_as_root() -> None:
159
+ """Output a warning for sudo users on Unix.
160
+
161
+ In a virtual environment, sudo pip still writes to virtualenv.
162
+ On Windows, users may run pip as Administrator without issues.
163
+ This warning only applies to Unix root users outside of virtualenv.
164
+ """
165
+ if running_under_virtualenv():
166
+ return
167
+ if not hasattr(os, "getuid"):
168
+ return
169
+ # On Windows, there are no "system managed" Python packages. Installing as
170
+ # Administrator via pip is the correct way of updating system environments.
171
+ #
172
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
173
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
174
+ if sys.platform == "win32" or sys.platform == "cygwin":
175
+ return
176
+
177
+ if os.getuid() != 0:
178
+ return
179
+
180
+ logger.warning(
181
+ "Running pip as the 'root' user can result in broken permissions and "
182
+ "conflicting behaviour with the system package manager. "
183
+ "It is recommended to use a virtual environment instead: "
184
+ "https://pip.pypa.io/warnings/venv"
185
+ )
186
+
187
+
188
+ def with_cleanup(func: Any) -> Any:
189
+ """Decorator for common logic related to managing temporary
190
+ directories.
191
+ """
192
+
193
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
194
+ for t in KEEPABLE_TEMPDIR_TYPES:
195
+ registry.set_delete(t, False)
196
+
197
+ def wrapper(
198
+ self: RequirementCommand, options: Values, args: List[Any]
199
+ ) -> Optional[int]:
200
+ assert self.tempdir_registry is not None
201
+ if options.no_clean:
202
+ configure_tempdir_registry(self.tempdir_registry)
203
+
204
+ try:
205
+ return func(self, options, args)
206
+ except PreviousBuildDirError:
207
+ # This kind of conflict can occur when the user passes an explicit
208
+ # build directory with a pre-existing folder. In that case we do
209
+ # not want to accidentally remove it.
210
+ configure_tempdir_registry(self.tempdir_registry)
211
+ raise
212
+
213
+ return wrapper
214
+
215
+
216
+ class RequirementCommand(IndexGroupCommand):
217
+ def __init__(self, *args: Any, **kw: Any) -> None:
218
+ super().__init__(*args, **kw)
219
+
220
+ self.cmd_opts.add_option(cmdoptions.no_clean())
221
+
222
+ @staticmethod
223
+ def determine_resolver_variant(options: Values) -> str:
224
+ """Determines which resolver should be used, based on the given options."""
225
+ if "legacy-resolver" in options.deprecated_features_enabled:
226
+ return "legacy"
227
+
228
+ return "2020-resolver"
229
+
230
+ @staticmethod
231
+ def determine_build_failure_suppression(options: Values) -> bool:
232
+ """Determines whether build failures should be suppressed and backtracked on."""
233
+ if "backtrack-on-build-failures" not in options.deprecated_features_enabled:
234
+ return False
235
+
236
+ if "legacy-resolver" in options.deprecated_features_enabled:
237
+ raise CommandError("Cannot backtrack with legacy resolver.")
238
+
239
+ deprecated(
240
+ reason=(
241
+ "Backtracking on build failures can mask issues related to how "
242
+ "a package generates metadata or builds a wheel. This flag will "
243
+ "be removed in pip 22.2."
244
+ ),
245
+ gone_in=None,
246
+ replacement=(
247
+ "avoiding known-bad versions by explicitly telling pip to ignore them "
248
+ "(either directly as requirements, or via a constraints file)"
249
+ ),
250
+ feature_flag=None,
251
+ issue=10655,
252
+ )
253
+ return True
254
+
255
+ @classmethod
256
+ def make_requirement_preparer(
257
+ cls,
258
+ temp_build_dir: TempDirectory,
259
+ options: Values,
260
+ req_tracker: RequirementTracker,
261
+ session: PipSession,
262
+ finder: PackageFinder,
263
+ use_user_site: bool,
264
+ download_dir: Optional[str] = None,
265
+ verbosity: int = 0,
266
+ ) -> RequirementPreparer:
267
+ """
268
+ Create a RequirementPreparer instance for the given parameters.
269
+ """
270
+ temp_build_dir_path = temp_build_dir.path
271
+ assert temp_build_dir_path is not None
272
+
273
+ resolver_variant = cls.determine_resolver_variant(options)
274
+ if resolver_variant == "2020-resolver":
275
+ lazy_wheel = "fast-deps" in options.features_enabled
276
+ if lazy_wheel:
277
+ logger.warning(
278
+ "pip is using lazily downloaded wheels using HTTP "
279
+ "range requests to obtain dependency information. "
280
+ "This experimental feature is enabled through "
281
+ "--use-feature=fast-deps and it is not ready for "
282
+ "production."
283
+ )
284
+ else:
285
+ lazy_wheel = False
286
+ if "fast-deps" in options.features_enabled:
287
+ logger.warning(
288
+ "fast-deps has no effect when used with the legacy resolver."
289
+ )
290
+
291
+ in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled
292
+ if "in-tree-build" in options.features_enabled:
293
+ deprecated(
294
+ reason="In-tree builds are now the default.",
295
+ replacement="to remove the --use-feature=in-tree-build flag",
296
+ gone_in="22.1",
297
+ )
298
+ if "out-of-tree-build" in options.deprecated_features_enabled:
299
+ deprecated(
300
+ reason="Out-of-tree builds are deprecated.",
301
+ replacement=None,
302
+ gone_in="22.1",
303
+ )
304
+
305
+ if options.progress_bar not in {"on", "off"}:
306
+ deprecated(
307
+ reason="Custom progress bar styles are deprecated",
308
+ replacement="to use the default progress bar style.",
309
+ gone_in="22.1",
310
+ )
311
+
312
+ return RequirementPreparer(
313
+ build_dir=temp_build_dir_path,
314
+ src_dir=options.src_dir,
315
+ download_dir=download_dir,
316
+ build_isolation=options.build_isolation,
317
+ req_tracker=req_tracker,
318
+ session=session,
319
+ progress_bar=options.progress_bar,
320
+ finder=finder,
321
+ require_hashes=options.require_hashes,
322
+ use_user_site=use_user_site,
323
+ lazy_wheel=lazy_wheel,
324
+ verbosity=verbosity,
325
+ in_tree_build=in_tree_build,
326
+ )
327
+
328
+ @classmethod
329
+ def make_resolver(
330
+ cls,
331
+ preparer: RequirementPreparer,
332
+ finder: PackageFinder,
333
+ options: Values,
334
+ wheel_cache: Optional[WheelCache] = None,
335
+ use_user_site: bool = False,
336
+ ignore_installed: bool = True,
337
+ ignore_requires_python: bool = False,
338
+ force_reinstall: bool = False,
339
+ upgrade_strategy: str = "to-satisfy-only",
340
+ use_pep517: Optional[bool] = None,
341
+ py_version_info: Optional[Tuple[int, ...]] = None,
342
+ ) -> BaseResolver:
343
+ """
344
+ Create a Resolver instance for the given parameters.
345
+ """
346
+ make_install_req = partial(
347
+ install_req_from_req_string,
348
+ isolated=options.isolated_mode,
349
+ use_pep517=use_pep517,
350
+ )
351
+ suppress_build_failures = cls.determine_build_failure_suppression(options)
352
+ resolver_variant = cls.determine_resolver_variant(options)
353
+ # The long import name and duplicated invocation is needed to convince
354
+ # Mypy into correctly typechecking. Otherwise it would complain the
355
+ # "Resolver" class being redefined.
356
+ if resolver_variant == "2020-resolver":
357
+ import pip._internal.resolution.resolvelib.resolver
358
+
359
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
360
+ preparer=preparer,
361
+ finder=finder,
362
+ wheel_cache=wheel_cache,
363
+ make_install_req=make_install_req,
364
+ use_user_site=use_user_site,
365
+ ignore_dependencies=options.ignore_dependencies,
366
+ ignore_installed=ignore_installed,
367
+ ignore_requires_python=ignore_requires_python,
368
+ force_reinstall=force_reinstall,
369
+ upgrade_strategy=upgrade_strategy,
370
+ py_version_info=py_version_info,
371
+ suppress_build_failures=suppress_build_failures,
372
+ )
373
+ import pip._internal.resolution.legacy.resolver
374
+
375
+ return pip._internal.resolution.legacy.resolver.Resolver(
376
+ preparer=preparer,
377
+ finder=finder,
378
+ wheel_cache=wheel_cache,
379
+ make_install_req=make_install_req,
380
+ use_user_site=use_user_site,
381
+ ignore_dependencies=options.ignore_dependencies,
382
+ ignore_installed=ignore_installed,
383
+ ignore_requires_python=ignore_requires_python,
384
+ force_reinstall=force_reinstall,
385
+ upgrade_strategy=upgrade_strategy,
386
+ py_version_info=py_version_info,
387
+ )
388
+
389
+ def get_requirements(
390
+ self,
391
+ args: List[str],
392
+ options: Values,
393
+ finder: PackageFinder,
394
+ session: PipSession,
395
+ ) -> List[InstallRequirement]:
396
+ """
397
+ Parse command-line arguments into the corresponding requirements.
398
+ """
399
+ requirements: List[InstallRequirement] = []
400
+ for filename in options.constraints:
401
+ for parsed_req in parse_requirements(
402
+ filename,
403
+ constraint=True,
404
+ finder=finder,
405
+ options=options,
406
+ session=session,
407
+ ):
408
+ req_to_add = install_req_from_parsed_requirement(
409
+ parsed_req,
410
+ isolated=options.isolated_mode,
411
+ user_supplied=False,
412
+ )
413
+ requirements.append(req_to_add)
414
+
415
+ for req in args:
416
+ req_to_add = install_req_from_line(
417
+ req,
418
+ None,
419
+ isolated=options.isolated_mode,
420
+ use_pep517=options.use_pep517,
421
+ user_supplied=True,
422
+ )
423
+ requirements.append(req_to_add)
424
+
425
+ for req in options.editables:
426
+ req_to_add = install_req_from_editable(
427
+ req,
428
+ user_supplied=True,
429
+ isolated=options.isolated_mode,
430
+ use_pep517=options.use_pep517,
431
+ )
432
+ requirements.append(req_to_add)
433
+
434
+ # NOTE: options.require_hashes may be set if --require-hashes is True
435
+ for filename in options.requirements:
436
+ for parsed_req in parse_requirements(
437
+ filename, finder=finder, options=options, session=session
438
+ ):
439
+ req_to_add = install_req_from_parsed_requirement(
440
+ parsed_req,
441
+ isolated=options.isolated_mode,
442
+ use_pep517=options.use_pep517,
443
+ user_supplied=True,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
506
+ )
venv/lib/python3.10/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Iterator
7
+
8
+ from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
9
+
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.logging import get_indentation
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SpinnerInterface:
17
+ def spin(self) -> None:
18
+ raise NotImplementedError()
19
+
20
+ def finish(self, final_status: str) -> None:
21
+ raise NotImplementedError()
22
+
23
+
24
+ class InteractiveSpinner(SpinnerInterface):
25
+ def __init__(
26
+ self,
27
+ message: str,
28
+ file: IO[str] = None,
29
+ spin_chars: str = "-\\|/",
30
+ # Empirically, 8 updates/second looks nice
31
+ min_update_interval_seconds: float = 0.125,
32
+ ):
33
+ self._message = message
34
+ if file is None:
35
+ file = sys.stdout
36
+ self._file = file
37
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
38
+ self._finished = False
39
+
40
+ self._spin_cycle = itertools.cycle(spin_chars)
41
+
42
+ self._file.write(" " * get_indentation() + self._message + " ... ")
43
+ self._width = 0
44
+
45
+ def _write(self, status: str) -> None:
46
+ assert not self._finished
47
+ # Erase what we wrote before by backspacing to the beginning, writing
48
+ # spaces to overwrite the old text, and then backspacing again
49
+ backup = "\b" * self._width
50
+ self._file.write(backup + " " * self._width + backup)
51
+ # Now we have a blank slate to add our status
52
+ self._file.write(status)
53
+ self._width = len(status)
54
+ self._file.flush()
55
+ self._rate_limiter.reset()
56
+
57
+ def spin(self) -> None:
58
+ if self._finished:
59
+ return
60
+ if not self._rate_limiter.ready():
61
+ return
62
+ self._write(next(self._spin_cycle))
63
+
64
+ def finish(self, final_status: str) -> None:
65
+ if self._finished:
66
+ return
67
+ self._write(final_status)
68
+ self._file.write("\n")
69
+ self._file.flush()
70
+ self._finished = True
71
+
72
+
73
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
74
+ # We still print updates occasionally (once every 60 seconds by default) to
75
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
76
+ # an indication that a task has frozen.
77
+ class NonInteractiveSpinner(SpinnerInterface):
78
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
79
+ self._message = message
80
+ self._finished = False
81
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
82
+ self._update("started")
83
+
84
+ def _update(self, status: str) -> None:
85
+ assert not self._finished
86
+ self._rate_limiter.reset()
87
+ logger.info("%s: %s", self._message, status)
88
+
89
+ def spin(self) -> None:
90
+ if self._finished:
91
+ return
92
+ if not self._rate_limiter.ready():
93
+ return
94
+ self._update("still running...")
95
+
96
+ def finish(self, final_status: str) -> None:
97
+ if self._finished:
98
+ return
99
+ self._update(f"finished with status '{final_status}'")
100
+ self._finished = True
101
+
102
+
103
+ class RateLimiter:
104
+ def __init__(self, min_update_interval_seconds: float) -> None:
105
+ self._min_update_interval_seconds = min_update_interval_seconds
106
+ self._last_update: float = 0
107
+
108
+ def ready(self) -> bool:
109
+ now = time.time()
110
+ delta = now - self._last_update
111
+ return delta >= self._min_update_interval_seconds
112
+
113
+ def reset(self) -> None:
114
+ self._last_update = time.time()
115
+
116
+
117
+ @contextlib.contextmanager
118
+ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
119
+ # Interactive spinner goes directly to sys.stdout rather than being routed
120
+ # through the logging system, but it acts like it has level INFO,
121
+ # i.e. it's only displayed if we're at level INFO or better.
122
+ # Non-interactive spinner goes through the logging system, so it is always
123
+ # in sync with logging configuration.
124
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
125
+ spinner: SpinnerInterface = InteractiveSpinner(message)
126
+ else:
127
+ spinner = NonInteractiveSpinner(message)
128
+ try:
129
+ with hidden_cursor(sys.stdout):
130
+ yield spinner
131
+ except KeyboardInterrupt:
132
+ spinner.finish("canceled")
133
+ raise
134
+ except Exception:
135
+ spinner.finish("error")
136
+ raise
137
+ else:
138
+ spinner.finish("done")
139
+
140
+
141
+ @contextlib.contextmanager
142
+ def hidden_cursor(file: IO[str]) -> Iterator[None]:
143
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
144
+ # even via colorama. So don't even try.
145
+ if WINDOWS:
146
+ yield
147
+ # We don't want to clutter the output with control characters if we're
148
+ # writing to a file, or if the user is running with --quiet.
149
+ # See https://github.com/pypa/pip/issues/3418
150
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
151
+ yield
152
+ else:
153
+ file.write(HIDE_CURSOR)
154
+ try:
155
+ yield
156
+ finally:
157
+ file.write(SHOW_CURSOR)
venv/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ SUCCESS = 0
2
+ ERROR = 1
3
+ UNKNOWN_ERROR = 2
4
+ VIRTUALENV_NOT_FOUND = 3
5
+ PREVIOUS_BUILD_DIR_ERROR = 4
6
+ NO_MATCHES_FOUND = 23
venv/lib/python3.10/site-packages/pip/_internal/index/__init__.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ """Index interaction code
2
+ """
venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (222 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc ADDED
Binary file (19.3 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc ADDED
Binary file (28.1 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc ADDED
Binary file (7.12 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/index/collector.py ADDED
@@ -0,0 +1,648 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ The main purpose of this module is to expose LinkCollector.collect_sources().
3
+ """
4
+
5
+ import cgi
6
+ import collections
7
+ import functools
8
+ import itertools
9
+ import logging
10
+ import os
11
+ import re
12
+ import urllib.parse
13
+ import urllib.request
14
+ import xml.etree.ElementTree
15
+ from html.parser import HTMLParser
16
+ from optparse import Values
17
+ from typing import (
18
+ TYPE_CHECKING,
19
+ Any,
20
+ Callable,
21
+ Dict,
22
+ Iterable,
23
+ List,
24
+ MutableMapping,
25
+ NamedTuple,
26
+ Optional,
27
+ Sequence,
28
+ Tuple,
29
+ Union,
30
+ )
31
+
32
+ from pip._vendor import html5lib, requests
33
+ from pip._vendor.requests import Response
34
+ from pip._vendor.requests.exceptions import RetryError, SSLError
35
+
36
+ from pip._internal.exceptions import NetworkConnectionError
37
+ from pip._internal.models.link import Link
38
+ from pip._internal.models.search_scope import SearchScope
39
+ from pip._internal.network.session import PipSession
40
+ from pip._internal.network.utils import raise_for_status
41
+ from pip._internal.utils.deprecation import deprecated
42
+ from pip._internal.utils.filetypes import is_archive_file
43
+ from pip._internal.utils.misc import pairwise, redact_auth_from_url
44
+ from pip._internal.vcs import vcs
45
+
46
+ from .sources import CandidatesFromPage, LinkSource, build_source
47
+
48
+ if TYPE_CHECKING:
49
+ from typing import Protocol
50
+ else:
51
+ Protocol = object
52
+
53
+ logger = logging.getLogger(__name__)
54
+
55
+ HTMLElement = xml.etree.ElementTree.Element
56
+ ResponseHeaders = MutableMapping[str, str]
57
+
58
+
59
+ def _match_vcs_scheme(url: str) -> Optional[str]:
60
+ """Look for VCS schemes in the URL.
61
+
62
+ Returns the matched VCS scheme, or None if there's no match.
63
+ """
64
+ for scheme in vcs.schemes:
65
+ if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
66
+ return scheme
67
+ return None
68
+
69
+
70
+ class _NotHTML(Exception):
71
+ def __init__(self, content_type: str, request_desc: str) -> None:
72
+ super().__init__(content_type, request_desc)
73
+ self.content_type = content_type
74
+ self.request_desc = request_desc
75
+
76
+
77
+ def _ensure_html_header(response: Response) -> None:
78
+ """Check the Content-Type header to ensure the response contains HTML.
79
+
80
+ Raises `_NotHTML` if the content type is not text/html.
81
+ """
82
+ content_type = response.headers.get("Content-Type", "")
83
+ if not content_type.lower().startswith("text/html"):
84
+ raise _NotHTML(content_type, response.request.method)
85
+
86
+
87
+ class _NotHTTP(Exception):
88
+ pass
89
+
90
+
91
+ def _ensure_html_response(url: str, session: PipSession) -> None:
92
+ """Send a HEAD request to the URL, and ensure the response contains HTML.
93
+
94
+ Raises `_NotHTTP` if the URL is not available for a HEAD request, or
95
+ `_NotHTML` if the content type is not text/html.
96
+ """
97
+ scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
98
+ if scheme not in {"http", "https"}:
99
+ raise _NotHTTP()
100
+
101
+ resp = session.head(url, allow_redirects=True)
102
+ raise_for_status(resp)
103
+
104
+ _ensure_html_header(resp)
105
+
106
+
107
+ def _get_html_response(url: str, session: PipSession) -> Response:
108
+ """Access an HTML page with GET, and return the response.
109
+
110
+ This consists of three parts:
111
+
112
+ 1. If the URL looks suspiciously like an archive, send a HEAD first to
113
+ check the Content-Type is HTML, to avoid downloading a large file.
114
+ Raise `_NotHTTP` if the content type cannot be determined, or
115
+ `_NotHTML` if it is not HTML.
116
+ 2. Actually perform the request. Raise HTTP exceptions on network failures.
117
+ 3. Check the Content-Type header to make sure we got HTML, and raise
118
+ `_NotHTML` otherwise.
119
+ """
120
+ if is_archive_file(Link(url).filename):
121
+ _ensure_html_response(url, session=session)
122
+
123
+ logger.debug("Getting page %s", redact_auth_from_url(url))
124
+
125
+ resp = session.get(
126
+ url,
127
+ headers={
128
+ "Accept": "text/html",
129
+ # We don't want to blindly returned cached data for
130
+ # /simple/, because authors generally expecting that
131
+ # twine upload && pip install will function, but if
132
+ # they've done a pip install in the last ~10 minutes
133
+ # it won't. Thus by setting this to zero we will not
134
+ # blindly use any cached data, however the benefit of
135
+ # using max-age=0 instead of no-cache, is that we will
136
+ # still support conditional requests, so we will still
137
+ # minimize traffic sent in cases where the page hasn't
138
+ # changed at all, we will just always incur the round
139
+ # trip for the conditional GET now instead of only
140
+ # once per 10 minutes.
141
+ # For more information, please see pypa/pip#5670.
142
+ "Cache-Control": "max-age=0",
143
+ },
144
+ )
145
+ raise_for_status(resp)
146
+
147
+ # The check for archives above only works if the url ends with
148
+ # something that looks like an archive. However that is not a
149
+ # requirement of an url. Unless we issue a HEAD request on every
150
+ # url we cannot know ahead of time for sure if something is HTML
151
+ # or not. However we can check after we've downloaded it.
152
+ _ensure_html_header(resp)
153
+
154
+ return resp
155
+
156
+
157
+ def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
158
+ """Determine if we have any encoding information in our headers."""
159
+ if headers and "Content-Type" in headers:
160
+ content_type, params = cgi.parse_header(headers["Content-Type"])
161
+ if "charset" in params:
162
+ return params["charset"]
163
+ return None
164
+
165
+
166
+ def _determine_base_url(document: HTMLElement, page_url: str) -> str:
167
+ """Determine the HTML document's base URL.
168
+
169
+ This looks for a ``<base>`` tag in the HTML document. If present, its href
170
+ attribute denotes the base URL of anchor tags in the document. If there is
171
+ no such tag (or if it does not have a valid href attribute), the HTML
172
+ file's URL is used as the base URL.
173
+
174
+ :param document: An HTML document representation. The current
175
+ implementation expects the result of ``html5lib.parse()``.
176
+ :param page_url: The URL of the HTML document.
177
+
178
+ TODO: Remove when `html5lib` is dropped.
179
+ """
180
+ for base in document.findall(".//base"):
181
+ href = base.get("href")
182
+ if href is not None:
183
+ return href
184
+ return page_url
185
+
186
+
187
+ def _clean_url_path_part(part: str) -> str:
188
+ """
189
+ Clean a "part" of a URL path (i.e. after splitting on "@" characters).
190
+ """
191
+ # We unquote prior to quoting to make sure nothing is double quoted.
192
+ return urllib.parse.quote(urllib.parse.unquote(part))
193
+
194
+
195
+ def _clean_file_url_path(part: str) -> str:
196
+ """
197
+ Clean the first part of a URL path that corresponds to a local
198
+ filesystem path (i.e. the first part after splitting on "@" characters).
199
+ """
200
+ # We unquote prior to quoting to make sure nothing is double quoted.
201
+ # Also, on Windows the path part might contain a drive letter which
202
+ # should not be quoted. On Linux where drive letters do not
203
+ # exist, the colon should be quoted. We rely on urllib.request
204
+ # to do the right thing here.
205
+ return urllib.request.pathname2url(urllib.request.url2pathname(part))
206
+
207
+
208
+ # percent-encoded: /
209
+ _reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
210
+
211
+
212
+ def _clean_url_path(path: str, is_local_path: bool) -> str:
213
+ """
214
+ Clean the path portion of a URL.
215
+ """
216
+ if is_local_path:
217
+ clean_func = _clean_file_url_path
218
+ else:
219
+ clean_func = _clean_url_path_part
220
+
221
+ # Split on the reserved characters prior to cleaning so that
222
+ # revision strings in VCS URLs are properly preserved.
223
+ parts = _reserved_chars_re.split(path)
224
+
225
+ cleaned_parts = []
226
+ for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
227
+ cleaned_parts.append(clean_func(to_clean))
228
+ # Normalize %xx escapes (e.g. %2f -> %2F)
229
+ cleaned_parts.append(reserved.upper())
230
+
231
+ return "".join(cleaned_parts)
232
+
233
+
234
+ def _clean_link(url: str) -> str:
235
+ """
236
+ Make sure a link is fully quoted.
237
+ For example, if ' ' occurs in the URL, it will be replaced with "%20",
238
+ and without double-quoting other characters.
239
+ """
240
+ # Split the URL into parts according to the general structure
241
+ # `scheme://netloc/path;parameters?query#fragment`.
242
+ result = urllib.parse.urlparse(url)
243
+ # If the netloc is empty, then the URL refers to a local filesystem path.
244
+ is_local_path = not result.netloc
245
+ path = _clean_url_path(result.path, is_local_path=is_local_path)
246
+ return urllib.parse.urlunparse(result._replace(path=path))
247
+
248
+
249
+ def _create_link_from_element(
250
+ element_attribs: Dict[str, Optional[str]],
251
+ page_url: str,
252
+ base_url: str,
253
+ ) -> Optional[Link]:
254
+ """
255
+ Convert an anchor element's attributes in a simple repository page to a Link.
256
+ """
257
+ href = element_attribs.get("href")
258
+ if not href:
259
+ return None
260
+
261
+ url = _clean_link(urllib.parse.urljoin(base_url, href))
262
+ pyrequire = element_attribs.get("data-requires-python")
263
+ yanked_reason = element_attribs.get("data-yanked")
264
+
265
+ link = Link(
266
+ url,
267
+ comes_from=page_url,
268
+ requires_python=pyrequire,
269
+ yanked_reason=yanked_reason,
270
+ )
271
+
272
+ return link
273
+
274
+
275
+ class CacheablePageContent:
276
+ def __init__(self, page: "HTMLPage") -> None:
277
+ assert page.cache_link_parsing
278
+ self.page = page
279
+
280
+ def __eq__(self, other: object) -> bool:
281
+ return isinstance(other, type(self)) and self.page.url == other.page.url
282
+
283
+ def __hash__(self) -> int:
284
+ return hash(self.page.url)
285
+
286
+
287
+ class ParseLinks(Protocol):
288
+ def __call__(
289
+ self, page: "HTMLPage", use_deprecated_html5lib: bool
290
+ ) -> Iterable[Link]:
291
+ ...
292
+
293
+
294
+ def with_cached_html_pages(fn: ParseLinks) -> ParseLinks:
295
+ """
296
+ Given a function that parses an Iterable[Link] from an HTMLPage, cache the
297
+ function's result (keyed by CacheablePageContent), unless the HTMLPage
298
+ `page` has `page.cache_link_parsing == False`.
299
+ """
300
+
301
+ @functools.lru_cache(maxsize=None)
302
+ def wrapper(
303
+ cacheable_page: CacheablePageContent, use_deprecated_html5lib: bool
304
+ ) -> List[Link]:
305
+ return list(fn(cacheable_page.page, use_deprecated_html5lib))
306
+
307
+ @functools.wraps(fn)
308
+ def wrapper_wrapper(page: "HTMLPage", use_deprecated_html5lib: bool) -> List[Link]:
309
+ if page.cache_link_parsing:
310
+ return wrapper(CacheablePageContent(page), use_deprecated_html5lib)
311
+ return list(fn(page, use_deprecated_html5lib))
312
+
313
+ return wrapper_wrapper
314
+
315
+
316
+ def _parse_links_html5lib(page: "HTMLPage") -> Iterable[Link]:
317
+ """
318
+ Parse an HTML document, and yield its anchor elements as Link objects.
319
+
320
+ TODO: Remove when `html5lib` is dropped.
321
+ """
322
+ document = html5lib.parse(
323
+ page.content,
324
+ transport_encoding=page.encoding,
325
+ namespaceHTMLElements=False,
326
+ )
327
+
328
+ url = page.url
329
+ base_url = _determine_base_url(document, url)
330
+ for anchor in document.findall(".//a"):
331
+ link = _create_link_from_element(
332
+ anchor.attrib,
333
+ page_url=url,
334
+ base_url=base_url,
335
+ )
336
+ if link is None:
337
+ continue
338
+ yield link
339
+
340
+
341
+ @with_cached_html_pages
342
+ def parse_links(page: "HTMLPage", use_deprecated_html5lib: bool) -> Iterable[Link]:
343
+ """
344
+ Parse an HTML document, and yield its anchor elements as Link objects.
345
+ """
346
+ encoding = page.encoding or "utf-8"
347
+
348
+ # Check if the page starts with a valid doctype, to decide whether to use
349
+ # http.parser or (deprecated) html5lib for parsing -- unless explicitly
350
+ # requested to use html5lib.
351
+ if not use_deprecated_html5lib:
352
+ expected_doctype = "<!doctype html>".encode(encoding)
353
+ actual_start = page.content[: len(expected_doctype)]
354
+ if actual_start.decode(encoding).lower() != "<!doctype html>":
355
+ deprecated(
356
+ reason=(
357
+ f"The HTML index page being used ({page.url}) is not a proper "
358
+ "HTML 5 document. This is in violation of PEP 503 which requires "
359
+ "these pages to be well-formed HTML 5 documents. Please reach out "
360
+ "to the owners of this index page, and ask them to update this "
361
+ "index page to a valid HTML 5 document."
362
+ ),
363
+ replacement=None,
364
+ gone_in="22.2",
365
+ issue=10825,
366
+ )
367
+ use_deprecated_html5lib = True
368
+
369
+ if use_deprecated_html5lib:
370
+ yield from _parse_links_html5lib(page)
371
+ return
372
+
373
+ parser = HTMLLinkParser()
374
+ parser.feed(page.content.decode(encoding))
375
+
376
+ url = page.url
377
+ base_url = parser.base_url or url
378
+ for anchor in parser.anchors:
379
+ link = _create_link_from_element(
380
+ anchor,
381
+ page_url=url,
382
+ base_url=base_url,
383
+ )
384
+ if link is None:
385
+ continue
386
+ yield link
387
+
388
+
389
+ class HTMLPage:
390
+ """Represents one page, along with its URL"""
391
+
392
+ def __init__(
393
+ self,
394
+ content: bytes,
395
+ encoding: Optional[str],
396
+ url: str,
397
+ cache_link_parsing: bool = True,
398
+ ) -> None:
399
+ """
400
+ :param encoding: the encoding to decode the given content.
401
+ :param url: the URL from which the HTML was downloaded.
402
+ :param cache_link_parsing: whether links parsed from this page's url
403
+ should be cached. PyPI index urls should
404
+ have this set to False, for example.
405
+ """
406
+ self.content = content
407
+ self.encoding = encoding
408
+ self.url = url
409
+ self.cache_link_parsing = cache_link_parsing
410
+
411
+ def __str__(self) -> str:
412
+ return redact_auth_from_url(self.url)
413
+
414
+
415
+ class HTMLLinkParser(HTMLParser):
416
+ """
417
+ HTMLParser that keeps the first base HREF and a list of all anchor
418
+ elements' attributes.
419
+ """
420
+
421
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
422
+ super().__init__(*args, **kwargs)
423
+ self._seen_decl = False
424
+ self.base_url: Optional[str] = None
425
+ self.anchors: List[Dict[str, Optional[str]]] = []
426
+
427
+ def handle_decl(self, decl: str) -> None:
428
+ if decl.lower() != "doctype html":
429
+ self._raise_error()
430
+ self._seen_decl = True
431
+
432
+ def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
433
+ if not self._seen_decl:
434
+ self._raise_error()
435
+
436
+ if tag == "base" and self.base_url is None:
437
+ href = self.get_href(attrs)
438
+ if href is not None:
439
+ self.base_url = href
440
+ elif tag == "a":
441
+ self.anchors.append(dict(attrs))
442
+
443
+ def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
444
+ for name, value in attrs:
445
+ if name == "href":
446
+ return value
447
+ return None
448
+
449
+ def _raise_error(self) -> None:
450
+ raise ValueError(
451
+ "HTML doctype missing or incorrect. Expected <!DOCTYPE html>.\n\n"
452
+ "If you believe this error to be incorrect, try passing the "
453
+ "command line option --use-deprecated=html5lib and please leave "
454
+ "a comment on the pip issue at https://github.com/pypa/pip/issues/10825."
455
+ )
456
+
457
+
458
+ def _handle_get_page_fail(
459
+ link: Link,
460
+ reason: Union[str, Exception],
461
+ meth: Optional[Callable[..., None]] = None,
462
+ ) -> None:
463
+ if meth is None:
464
+ meth = logger.debug
465
+ meth("Could not fetch URL %s: %s - skipping", link, reason)
466
+
467
+
468
+ def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage:
469
+ encoding = _get_encoding_from_headers(response.headers)
470
+ return HTMLPage(
471
+ response.content,
472
+ encoding=encoding,
473
+ url=response.url,
474
+ cache_link_parsing=cache_link_parsing,
475
+ )
476
+
477
+
478
+ def _get_html_page(
479
+ link: Link, session: Optional[PipSession] = None
480
+ ) -> Optional["HTMLPage"]:
481
+ if session is None:
482
+ raise TypeError(
483
+ "_get_html_page() missing 1 required keyword argument: 'session'"
484
+ )
485
+
486
+ url = link.url.split("#", 1)[0]
487
+
488
+ # Check for VCS schemes that do not support lookup as web pages.
489
+ vcs_scheme = _match_vcs_scheme(url)
490
+ if vcs_scheme:
491
+ logger.warning(
492
+ "Cannot look at %s URL %s because it does not support lookup as web pages.",
493
+ vcs_scheme,
494
+ link,
495
+ )
496
+ return None
497
+
498
+ # Tack index.html onto file:// URLs that point to directories
499
+ scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
500
+ if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
501
+ # add trailing slash if not present so urljoin doesn't trim
502
+ # final segment
503
+ if not url.endswith("/"):
504
+ url += "/"
505
+ url = urllib.parse.urljoin(url, "index.html")
506
+ logger.debug(" file: URL is directory, getting %s", url)
507
+
508
+ try:
509
+ resp = _get_html_response(url, session=session)
510
+ except _NotHTTP:
511
+ logger.warning(
512
+ "Skipping page %s because it looks like an archive, and cannot "
513
+ "be checked by a HTTP HEAD request.",
514
+ link,
515
+ )
516
+ except _NotHTML as exc:
517
+ logger.warning(
518
+ "Skipping page %s because the %s request got Content-Type: %s."
519
+ "The only supported Content-Type is text/html",
520
+ link,
521
+ exc.request_desc,
522
+ exc.content_type,
523
+ )
524
+ except NetworkConnectionError as exc:
525
+ _handle_get_page_fail(link, exc)
526
+ except RetryError as exc:
527
+ _handle_get_page_fail(link, exc)
528
+ except SSLError as exc:
529
+ reason = "There was a problem confirming the ssl certificate: "
530
+ reason += str(exc)
531
+ _handle_get_page_fail(link, reason, meth=logger.info)
532
+ except requests.ConnectionError as exc:
533
+ _handle_get_page_fail(link, f"connection error: {exc}")
534
+ except requests.Timeout:
535
+ _handle_get_page_fail(link, "timed out")
536
+ else:
537
+ return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing)
538
+ return None
539
+
540
+
541
+ class CollectedSources(NamedTuple):
542
+ find_links: Sequence[Optional[LinkSource]]
543
+ index_urls: Sequence[Optional[LinkSource]]
544
+
545
+
546
+ class LinkCollector:
547
+
548
+ """
549
+ Responsible for collecting Link objects from all configured locations,
550
+ making network requests as needed.
551
+
552
+ The class's main method is its collect_sources() method.
553
+ """
554
+
555
+ def __init__(
556
+ self,
557
+ session: PipSession,
558
+ search_scope: SearchScope,
559
+ ) -> None:
560
+ self.search_scope = search_scope
561
+ self.session = session
562
+
563
+ @classmethod
564
+ def create(
565
+ cls,
566
+ session: PipSession,
567
+ options: Values,
568
+ suppress_no_index: bool = False,
569
+ ) -> "LinkCollector":
570
+ """
571
+ :param session: The Session to use to make requests.
572
+ :param suppress_no_index: Whether to ignore the --no-index option
573
+ when constructing the SearchScope object.
574
+ """
575
+ index_urls = [options.index_url] + options.extra_index_urls
576
+ if options.no_index and not suppress_no_index:
577
+ logger.debug(
578
+ "Ignoring indexes: %s",
579
+ ",".join(redact_auth_from_url(url) for url in index_urls),
580
+ )
581
+ index_urls = []
582
+
583
+ # Make sure find_links is a list before passing to create().
584
+ find_links = options.find_links or []
585
+
586
+ search_scope = SearchScope.create(
587
+ find_links=find_links,
588
+ index_urls=index_urls,
589
+ )
590
+ link_collector = LinkCollector(
591
+ session=session,
592
+ search_scope=search_scope,
593
+ )
594
+ return link_collector
595
+
596
+ @property
597
+ def find_links(self) -> List[str]:
598
+ return self.search_scope.find_links
599
+
600
+ def fetch_page(self, location: Link) -> Optional[HTMLPage]:
601
+ """
602
+ Fetch an HTML page containing package links.
603
+ """
604
+ return _get_html_page(location, session=self.session)
605
+
606
+ def collect_sources(
607
+ self,
608
+ project_name: str,
609
+ candidates_from_page: CandidatesFromPage,
610
+ ) -> CollectedSources:
611
+ # The OrderedDict calls deduplicate sources by URL.
612
+ index_url_sources = collections.OrderedDict(
613
+ build_source(
614
+ loc,
615
+ candidates_from_page=candidates_from_page,
616
+ page_validator=self.session.is_secure_origin,
617
+ expand_dir=False,
618
+ cache_link_parsing=False,
619
+ )
620
+ for loc in self.search_scope.get_index_urls_locations(project_name)
621
+ ).values()
622
+ find_links_sources = collections.OrderedDict(
623
+ build_source(
624
+ loc,
625
+ candidates_from_page=candidates_from_page,
626
+ page_validator=self.session.is_secure_origin,
627
+ expand_dir=True,
628
+ cache_link_parsing=True,
629
+ )
630
+ for loc in self.find_links
631
+ ).values()
632
+
633
+ if logger.isEnabledFor(logging.DEBUG):
634
+ lines = [
635
+ f"* {s.link}"
636
+ for s in itertools.chain(find_links_sources, index_url_sources)
637
+ if s is not None and s.link is not None
638
+ ]
639
+ lines = [
640
+ f"{len(lines)} location(s) to search "
641
+ f"for versions of {project_name}:"
642
+ ] + lines
643
+ logger.debug("\n".join(lines))
644
+
645
+ return CollectedSources(
646
+ find_links=list(find_links_sources),
647
+ index_urls=list(index_url_sources),
648
+ )
venv/lib/python3.10/site-packages/pip/_internal/index/package_finder.py ADDED
@@ -0,0 +1,1004 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Routines related to PyPI, indexes"""
2
+
3
+ # The following comment should be removed at some point in the future.
4
+ # mypy: strict-optional=False
5
+
6
+ import functools
7
+ import itertools
8
+ import logging
9
+ import re
10
+ from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union
11
+
12
+ from pip._vendor.packaging import specifiers
13
+ from pip._vendor.packaging.tags import Tag
14
+ from pip._vendor.packaging.utils import canonicalize_name
15
+ from pip._vendor.packaging.version import _BaseVersion
16
+ from pip._vendor.packaging.version import parse as parse_version
17
+
18
+ from pip._internal.exceptions import (
19
+ BestVersionAlreadyInstalled,
20
+ DistributionNotFound,
21
+ InvalidWheelFilename,
22
+ UnsupportedWheel,
23
+ )
24
+ from pip._internal.index.collector import LinkCollector, parse_links
25
+ from pip._internal.models.candidate import InstallationCandidate
26
+ from pip._internal.models.format_control import FormatControl
27
+ from pip._internal.models.link import Link
28
+ from pip._internal.models.search_scope import SearchScope
29
+ from pip._internal.models.selection_prefs import SelectionPreferences
30
+ from pip._internal.models.target_python import TargetPython
31
+ from pip._internal.models.wheel import Wheel
32
+ from pip._internal.req import InstallRequirement
33
+ from pip._internal.utils._log import getLogger
34
+ from pip._internal.utils.filetypes import WHEEL_EXTENSION
35
+ from pip._internal.utils.hashes import Hashes
36
+ from pip._internal.utils.logging import indent_log
37
+ from pip._internal.utils.misc import build_netloc
38
+ from pip._internal.utils.packaging import check_requires_python
39
+ from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
40
+
41
+ __all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
42
+
43
+
44
+ logger = getLogger(__name__)
45
+
46
+ BuildTag = Union[Tuple[()], Tuple[int, str]]
47
+ CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
48
+
49
+
50
+ def _check_link_requires_python(
51
+ link: Link,
52
+ version_info: Tuple[int, int, int],
53
+ ignore_requires_python: bool = False,
54
+ ) -> bool:
55
+ """
56
+ Return whether the given Python version is compatible with a link's
57
+ "Requires-Python" value.
58
+
59
+ :param version_info: A 3-tuple of ints representing the Python
60
+ major-minor-micro version to check.
61
+ :param ignore_requires_python: Whether to ignore the "Requires-Python"
62
+ value if the given Python version isn't compatible.
63
+ """
64
+ try:
65
+ is_compatible = check_requires_python(
66
+ link.requires_python,
67
+ version_info=version_info,
68
+ )
69
+ except specifiers.InvalidSpecifier:
70
+ logger.debug(
71
+ "Ignoring invalid Requires-Python (%r) for link: %s",
72
+ link.requires_python,
73
+ link,
74
+ )
75
+ else:
76
+ if not is_compatible:
77
+ version = ".".join(map(str, version_info))
78
+ if not ignore_requires_python:
79
+ logger.verbose(
80
+ "Link requires a different Python (%s not in: %r): %s",
81
+ version,
82
+ link.requires_python,
83
+ link,
84
+ )
85
+ return False
86
+
87
+ logger.debug(
88
+ "Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
89
+ version,
90
+ link.requires_python,
91
+ link,
92
+ )
93
+
94
+ return True
95
+
96
+
97
+ class LinkEvaluator:
98
+
99
+ """
100
+ Responsible for evaluating links for a particular project.
101
+ """
102
+
103
+ _py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
104
+
105
+ # Don't include an allow_yanked default value to make sure each call
106
+ # site considers whether yanked releases are allowed. This also causes
107
+ # that decision to be made explicit in the calling code, which helps
108
+ # people when reading the code.
109
+ def __init__(
110
+ self,
111
+ project_name: str,
112
+ canonical_name: str,
113
+ formats: FrozenSet[str],
114
+ target_python: TargetPython,
115
+ allow_yanked: bool,
116
+ ignore_requires_python: Optional[bool] = None,
117
+ ) -> None:
118
+ """
119
+ :param project_name: The user supplied package name.
120
+ :param canonical_name: The canonical package name.
121
+ :param formats: The formats allowed for this package. Should be a set
122
+ with 'binary' or 'source' or both in it.
123
+ :param target_python: The target Python interpreter to use when
124
+ evaluating link compatibility. This is used, for example, to
125
+ check wheel compatibility, as well as when checking the Python
126
+ version, e.g. the Python version embedded in a link filename
127
+ (or egg fragment) and against an HTML link's optional PEP 503
128
+ "data-requires-python" attribute.
129
+ :param allow_yanked: Whether files marked as yanked (in the sense
130
+ of PEP 592) are permitted to be candidates for install.
131
+ :param ignore_requires_python: Whether to ignore incompatible
132
+ PEP 503 "data-requires-python" values in HTML links. Defaults
133
+ to False.
134
+ """
135
+ if ignore_requires_python is None:
136
+ ignore_requires_python = False
137
+
138
+ self._allow_yanked = allow_yanked
139
+ self._canonical_name = canonical_name
140
+ self._ignore_requires_python = ignore_requires_python
141
+ self._formats = formats
142
+ self._target_python = target_python
143
+
144
+ self.project_name = project_name
145
+
146
+ def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]:
147
+ """
148
+ Determine whether a link is a candidate for installation.
149
+
150
+ :return: A tuple (is_candidate, result), where `result` is (1) a
151
+ version string if `is_candidate` is True, and (2) if
152
+ `is_candidate` is False, an optional string to log the reason
153
+ the link fails to qualify.
154
+ """
155
+ version = None
156
+ if link.is_yanked and not self._allow_yanked:
157
+ reason = link.yanked_reason or "<none given>"
158
+ return (False, f"yanked for reason: {reason}")
159
+
160
+ if link.egg_fragment:
161
+ egg_info = link.egg_fragment
162
+ ext = link.ext
163
+ else:
164
+ egg_info, ext = link.splitext()
165
+ if not ext:
166
+ return (False, "not a file")
167
+ if ext not in SUPPORTED_EXTENSIONS:
168
+ return (False, f"unsupported archive format: {ext}")
169
+ if "binary" not in self._formats and ext == WHEEL_EXTENSION:
170
+ reason = "No binaries permitted for {}".format(self.project_name)
171
+ return (False, reason)
172
+ if "macosx10" in link.path and ext == ".zip":
173
+ return (False, "macosx10 one")
174
+ if ext == WHEEL_EXTENSION:
175
+ try:
176
+ wheel = Wheel(link.filename)
177
+ except InvalidWheelFilename:
178
+ return (False, "invalid wheel filename")
179
+ if canonicalize_name(wheel.name) != self._canonical_name:
180
+ reason = "wrong project name (not {})".format(self.project_name)
181
+ return (False, reason)
182
+
183
+ supported_tags = self._target_python.get_tags()
184
+ if not wheel.supported(supported_tags):
185
+ # Include the wheel's tags in the reason string to
186
+ # simplify troubleshooting compatibility issues.
187
+ file_tags = wheel.get_formatted_file_tags()
188
+ reason = (
189
+ "none of the wheel's tags ({}) are compatible "
190
+ "(run pip debug --verbose to show compatible tags)".format(
191
+ ", ".join(file_tags)
192
+ )
193
+ )
194
+ return (False, reason)
195
+
196
+ version = wheel.version
197
+
198
+ # This should be up by the self.ok_binary check, but see issue 2700.
199
+ if "source" not in self._formats and ext != WHEEL_EXTENSION:
200
+ reason = f"No sources permitted for {self.project_name}"
201
+ return (False, reason)
202
+
203
+ if not version:
204
+ version = _extract_version_from_fragment(
205
+ egg_info,
206
+ self._canonical_name,
207
+ )
208
+ if not version:
209
+ reason = f"Missing project version for {self.project_name}"
210
+ return (False, reason)
211
+
212
+ match = self._py_version_re.search(version)
213
+ if match:
214
+ version = version[: match.start()]
215
+ py_version = match.group(1)
216
+ if py_version != self._target_python.py_version:
217
+ return (False, "Python version is incorrect")
218
+
219
+ supports_python = _check_link_requires_python(
220
+ link,
221
+ version_info=self._target_python.py_version_info,
222
+ ignore_requires_python=self._ignore_requires_python,
223
+ )
224
+ if not supports_python:
225
+ # Return None for the reason text to suppress calling
226
+ # _log_skipped_link().
227
+ return (False, None)
228
+
229
+ logger.debug("Found link %s, version: %s", link, version)
230
+
231
+ return (True, version)
232
+
233
+
234
+ def filter_unallowed_hashes(
235
+ candidates: List[InstallationCandidate],
236
+ hashes: Hashes,
237
+ project_name: str,
238
+ ) -> List[InstallationCandidate]:
239
+ """
240
+ Filter out candidates whose hashes aren't allowed, and return a new
241
+ list of candidates.
242
+
243
+ If at least one candidate has an allowed hash, then all candidates with
244
+ either an allowed hash or no hash specified are returned. Otherwise,
245
+ the given candidates are returned.
246
+
247
+ Including the candidates with no hash specified when there is a match
248
+ allows a warning to be logged if there is a more preferred candidate
249
+ with no hash specified. Returning all candidates in the case of no
250
+ matches lets pip report the hash of the candidate that would otherwise
251
+ have been installed (e.g. permitting the user to more easily update
252
+ their requirements file with the desired hash).
253
+ """
254
+ if not hashes:
255
+ logger.debug(
256
+ "Given no hashes to check %s links for project %r: "
257
+ "discarding no candidates",
258
+ len(candidates),
259
+ project_name,
260
+ )
261
+ # Make sure we're not returning back the given value.
262
+ return list(candidates)
263
+
264
+ matches_or_no_digest = []
265
+ # Collect the non-matches for logging purposes.
266
+ non_matches = []
267
+ match_count = 0
268
+ for candidate in candidates:
269
+ link = candidate.link
270
+ if not link.has_hash:
271
+ pass
272
+ elif link.is_hash_allowed(hashes=hashes):
273
+ match_count += 1
274
+ else:
275
+ non_matches.append(candidate)
276
+ continue
277
+
278
+ matches_or_no_digest.append(candidate)
279
+
280
+ if match_count:
281
+ filtered = matches_or_no_digest
282
+ else:
283
+ # Make sure we're not returning back the given value.
284
+ filtered = list(candidates)
285
+
286
+ if len(filtered) == len(candidates):
287
+ discard_message = "discarding no candidates"
288
+ else:
289
+ discard_message = "discarding {} non-matches:\n {}".format(
290
+ len(non_matches),
291
+ "\n ".join(str(candidate.link) for candidate in non_matches),
292
+ )
293
+
294
+ logger.debug(
295
+ "Checked %s links for project %r against %s hashes "
296
+ "(%s matches, %s no digest): %s",
297
+ len(candidates),
298
+ project_name,
299
+ hashes.digest_count,
300
+ match_count,
301
+ len(matches_or_no_digest) - match_count,
302
+ discard_message,
303
+ )
304
+
305
+ return filtered
306
+
307
+
308
+ class CandidatePreferences:
309
+
310
+ """
311
+ Encapsulates some of the preferences for filtering and sorting
312
+ InstallationCandidate objects.
313
+ """
314
+
315
+ def __init__(
316
+ self,
317
+ prefer_binary: bool = False,
318
+ allow_all_prereleases: bool = False,
319
+ ) -> None:
320
+ """
321
+ :param allow_all_prereleases: Whether to allow all pre-releases.
322
+ """
323
+ self.allow_all_prereleases = allow_all_prereleases
324
+ self.prefer_binary = prefer_binary
325
+
326
+
327
+ class BestCandidateResult:
328
+ """A collection of candidates, returned by `PackageFinder.find_best_candidate`.
329
+
330
+ This class is only intended to be instantiated by CandidateEvaluator's
331
+ `compute_best_candidate()` method.
332
+ """
333
+
334
+ def __init__(
335
+ self,
336
+ candidates: List[InstallationCandidate],
337
+ applicable_candidates: List[InstallationCandidate],
338
+ best_candidate: Optional[InstallationCandidate],
339
+ ) -> None:
340
+ """
341
+ :param candidates: A sequence of all available candidates found.
342
+ :param applicable_candidates: The applicable candidates.
343
+ :param best_candidate: The most preferred candidate found, or None
344
+ if no applicable candidates were found.
345
+ """
346
+ assert set(applicable_candidates) <= set(candidates)
347
+
348
+ if best_candidate is None:
349
+ assert not applicable_candidates
350
+ else:
351
+ assert best_candidate in applicable_candidates
352
+
353
+ self._applicable_candidates = applicable_candidates
354
+ self._candidates = candidates
355
+
356
+ self.best_candidate = best_candidate
357
+
358
+ def iter_all(self) -> Iterable[InstallationCandidate]:
359
+ """Iterate through all candidates."""
360
+ return iter(self._candidates)
361
+
362
+ def iter_applicable(self) -> Iterable[InstallationCandidate]:
363
+ """Iterate through the applicable candidates."""
364
+ return iter(self._applicable_candidates)
365
+
366
+
367
+ class CandidateEvaluator:
368
+
369
+ """
370
+ Responsible for filtering and sorting candidates for installation based
371
+ on what tags are valid.
372
+ """
373
+
374
+ @classmethod
375
+ def create(
376
+ cls,
377
+ project_name: str,
378
+ target_python: Optional[TargetPython] = None,
379
+ prefer_binary: bool = False,
380
+ allow_all_prereleases: bool = False,
381
+ specifier: Optional[specifiers.BaseSpecifier] = None,
382
+ hashes: Optional[Hashes] = None,
383
+ ) -> "CandidateEvaluator":
384
+ """Create a CandidateEvaluator object.
385
+
386
+ :param target_python: The target Python interpreter to use when
387
+ checking compatibility. If None (the default), a TargetPython
388
+ object will be constructed from the running Python.
389
+ :param specifier: An optional object implementing `filter`
390
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
391
+ versions.
392
+ :param hashes: An optional collection of allowed hashes.
393
+ """
394
+ if target_python is None:
395
+ target_python = TargetPython()
396
+ if specifier is None:
397
+ specifier = specifiers.SpecifierSet()
398
+
399
+ supported_tags = target_python.get_tags()
400
+
401
+ return cls(
402
+ project_name=project_name,
403
+ supported_tags=supported_tags,
404
+ specifier=specifier,
405
+ prefer_binary=prefer_binary,
406
+ allow_all_prereleases=allow_all_prereleases,
407
+ hashes=hashes,
408
+ )
409
+
410
+ def __init__(
411
+ self,
412
+ project_name: str,
413
+ supported_tags: List[Tag],
414
+ specifier: specifiers.BaseSpecifier,
415
+ prefer_binary: bool = False,
416
+ allow_all_prereleases: bool = False,
417
+ hashes: Optional[Hashes] = None,
418
+ ) -> None:
419
+ """
420
+ :param supported_tags: The PEP 425 tags supported by the target
421
+ Python in order of preference (most preferred first).
422
+ """
423
+ self._allow_all_prereleases = allow_all_prereleases
424
+ self._hashes = hashes
425
+ self._prefer_binary = prefer_binary
426
+ self._project_name = project_name
427
+ self._specifier = specifier
428
+ self._supported_tags = supported_tags
429
+ # Since the index of the tag in the _supported_tags list is used
430
+ # as a priority, precompute a map from tag to index/priority to be
431
+ # used in wheel.find_most_preferred_tag.
432
+ self._wheel_tag_preferences = {
433
+ tag: idx for idx, tag in enumerate(supported_tags)
434
+ }
435
+
436
+ def get_applicable_candidates(
437
+ self,
438
+ candidates: List[InstallationCandidate],
439
+ ) -> List[InstallationCandidate]:
440
+ """
441
+ Return the applicable candidates from a list of candidates.
442
+ """
443
+ # Using None infers from the specifier instead.
444
+ allow_prereleases = self._allow_all_prereleases or None
445
+ specifier = self._specifier
446
+ versions = {
447
+ str(v)
448
+ for v in specifier.filter(
449
+ # We turn the version object into a str here because otherwise
450
+ # when we're debundled but setuptools isn't, Python will see
451
+ # packaging.version.Version and
452
+ # pkg_resources._vendor.packaging.version.Version as different
453
+ # types. This way we'll use a str as a common data interchange
454
+ # format. If we stop using the pkg_resources provided specifier
455
+ # and start using our own, we can drop the cast to str().
456
+ (str(c.version) for c in candidates),
457
+ prereleases=allow_prereleases,
458
+ )
459
+ }
460
+
461
+ # Again, converting version to str to deal with debundling.
462
+ applicable_candidates = [c for c in candidates if str(c.version) in versions]
463
+
464
+ filtered_applicable_candidates = filter_unallowed_hashes(
465
+ candidates=applicable_candidates,
466
+ hashes=self._hashes,
467
+ project_name=self._project_name,
468
+ )
469
+
470
+ return sorted(filtered_applicable_candidates, key=self._sort_key)
471
+
472
+ def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
473
+ """
474
+ Function to pass as the `key` argument to a call to sorted() to sort
475
+ InstallationCandidates by preference.
476
+
477
+ Returns a tuple such that tuples sorting as greater using Python's
478
+ default comparison operator are more preferred.
479
+
480
+ The preference is as follows:
481
+
482
+ First and foremost, candidates with allowed (matching) hashes are
483
+ always preferred over candidates without matching hashes. This is
484
+ because e.g. if the only candidate with an allowed hash is yanked,
485
+ we still want to use that candidate.
486
+
487
+ Second, excepting hash considerations, candidates that have been
488
+ yanked (in the sense of PEP 592) are always less preferred than
489
+ candidates that haven't been yanked. Then:
490
+
491
+ If not finding wheels, they are sorted by version only.
492
+ If finding wheels, then the sort order is by version, then:
493
+ 1. existing installs
494
+ 2. wheels ordered via Wheel.support_index_min(self._supported_tags)
495
+ 3. source archives
496
+ If prefer_binary was set, then all wheels are sorted above sources.
497
+
498
+ Note: it was considered to embed this logic into the Link
499
+ comparison operators, but then different sdist links
500
+ with the same version, would have to be considered equal
501
+ """
502
+ valid_tags = self._supported_tags
503
+ support_num = len(valid_tags)
504
+ build_tag: BuildTag = ()
505
+ binary_preference = 0
506
+ link = candidate.link
507
+ if link.is_wheel:
508
+ # can raise InvalidWheelFilename
509
+ wheel = Wheel(link.filename)
510
+ try:
511
+ pri = -(
512
+ wheel.find_most_preferred_tag(
513
+ valid_tags, self._wheel_tag_preferences
514
+ )
515
+ )
516
+ except ValueError:
517
+ raise UnsupportedWheel(
518
+ "{} is not a supported wheel for this platform. It "
519
+ "can't be sorted.".format(wheel.filename)
520
+ )
521
+ if self._prefer_binary:
522
+ binary_preference = 1
523
+ if wheel.build_tag is not None:
524
+ match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
525
+ build_tag_groups = match.groups()
526
+ build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
527
+ else: # sdist
528
+ pri = -(support_num)
529
+ has_allowed_hash = int(link.is_hash_allowed(self._hashes))
530
+ yank_value = -1 * int(link.is_yanked) # -1 for yanked.
531
+ return (
532
+ has_allowed_hash,
533
+ yank_value,
534
+ binary_preference,
535
+ candidate.version,
536
+ pri,
537
+ build_tag,
538
+ )
539
+
540
+ def sort_best_candidate(
541
+ self,
542
+ candidates: List[InstallationCandidate],
543
+ ) -> Optional[InstallationCandidate]:
544
+ """
545
+ Return the best candidate per the instance's sort order, or None if
546
+ no candidate is acceptable.
547
+ """
548
+ if not candidates:
549
+ return None
550
+ best_candidate = max(candidates, key=self._sort_key)
551
+ return best_candidate
552
+
553
+ def compute_best_candidate(
554
+ self,
555
+ candidates: List[InstallationCandidate],
556
+ ) -> BestCandidateResult:
557
+ """
558
+ Compute and return a `BestCandidateResult` instance.
559
+ """
560
+ applicable_candidates = self.get_applicable_candidates(candidates)
561
+
562
+ best_candidate = self.sort_best_candidate(applicable_candidates)
563
+
564
+ return BestCandidateResult(
565
+ candidates,
566
+ applicable_candidates=applicable_candidates,
567
+ best_candidate=best_candidate,
568
+ )
569
+
570
+
571
+ class PackageFinder:
572
+ """This finds packages.
573
+
574
+ This is meant to match easy_install's technique for looking for
575
+ packages, by reading pages and looking for appropriate links.
576
+ """
577
+
578
+ def __init__(
579
+ self,
580
+ link_collector: LinkCollector,
581
+ target_python: TargetPython,
582
+ allow_yanked: bool,
583
+ use_deprecated_html5lib: bool,
584
+ format_control: Optional[FormatControl] = None,
585
+ candidate_prefs: Optional[CandidatePreferences] = None,
586
+ ignore_requires_python: Optional[bool] = None,
587
+ ) -> None:
588
+ """
589
+ This constructor is primarily meant to be used by the create() class
590
+ method and from tests.
591
+
592
+ :param format_control: A FormatControl object, used to control
593
+ the selection of source packages / binary packages when consulting
594
+ the index and links.
595
+ :param candidate_prefs: Options to use when creating a
596
+ CandidateEvaluator object.
597
+ """
598
+ if candidate_prefs is None:
599
+ candidate_prefs = CandidatePreferences()
600
+
601
+ format_control = format_control or FormatControl(set(), set())
602
+
603
+ self._allow_yanked = allow_yanked
604
+ self._candidate_prefs = candidate_prefs
605
+ self._ignore_requires_python = ignore_requires_python
606
+ self._link_collector = link_collector
607
+ self._target_python = target_python
608
+ self._use_deprecated_html5lib = use_deprecated_html5lib
609
+
610
+ self.format_control = format_control
611
+
612
+ # These are boring links that have already been logged somehow.
613
+ self._logged_links: Set[Link] = set()
614
+
615
+ # Don't include an allow_yanked default value to make sure each call
616
+ # site considers whether yanked releases are allowed. This also causes
617
+ # that decision to be made explicit in the calling code, which helps
618
+ # people when reading the code.
619
+ @classmethod
620
+ def create(
621
+ cls,
622
+ link_collector: LinkCollector,
623
+ selection_prefs: SelectionPreferences,
624
+ target_python: Optional[TargetPython] = None,
625
+ *,
626
+ use_deprecated_html5lib: bool,
627
+ ) -> "PackageFinder":
628
+ """Create a PackageFinder.
629
+
630
+ :param selection_prefs: The candidate selection preferences, as a
631
+ SelectionPreferences object.
632
+ :param target_python: The target Python interpreter to use when
633
+ checking compatibility. If None (the default), a TargetPython
634
+ object will be constructed from the running Python.
635
+ """
636
+ if target_python is None:
637
+ target_python = TargetPython()
638
+
639
+ candidate_prefs = CandidatePreferences(
640
+ prefer_binary=selection_prefs.prefer_binary,
641
+ allow_all_prereleases=selection_prefs.allow_all_prereleases,
642
+ )
643
+
644
+ return cls(
645
+ candidate_prefs=candidate_prefs,
646
+ link_collector=link_collector,
647
+ target_python=target_python,
648
+ allow_yanked=selection_prefs.allow_yanked,
649
+ format_control=selection_prefs.format_control,
650
+ ignore_requires_python=selection_prefs.ignore_requires_python,
651
+ use_deprecated_html5lib=use_deprecated_html5lib,
652
+ )
653
+
654
+ @property
655
+ def target_python(self) -> TargetPython:
656
+ return self._target_python
657
+
658
+ @property
659
+ def search_scope(self) -> SearchScope:
660
+ return self._link_collector.search_scope
661
+
662
+ @search_scope.setter
663
+ def search_scope(self, search_scope: SearchScope) -> None:
664
+ self._link_collector.search_scope = search_scope
665
+
666
+ @property
667
+ def find_links(self) -> List[str]:
668
+ return self._link_collector.find_links
669
+
670
+ @property
671
+ def index_urls(self) -> List[str]:
672
+ return self.search_scope.index_urls
673
+
674
+ @property
675
+ def trusted_hosts(self) -> Iterable[str]:
676
+ for host_port in self._link_collector.session.pip_trusted_origins:
677
+ yield build_netloc(*host_port)
678
+
679
+ @property
680
+ def allow_all_prereleases(self) -> bool:
681
+ return self._candidate_prefs.allow_all_prereleases
682
+
683
+ def set_allow_all_prereleases(self) -> None:
684
+ self._candidate_prefs.allow_all_prereleases = True
685
+
686
+ @property
687
+ def prefer_binary(self) -> bool:
688
+ return self._candidate_prefs.prefer_binary
689
+
690
+ def set_prefer_binary(self) -> None:
691
+ self._candidate_prefs.prefer_binary = True
692
+
693
+ def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
694
+ canonical_name = canonicalize_name(project_name)
695
+ formats = self.format_control.get_allowed_formats(canonical_name)
696
+
697
+ return LinkEvaluator(
698
+ project_name=project_name,
699
+ canonical_name=canonical_name,
700
+ formats=formats,
701
+ target_python=self._target_python,
702
+ allow_yanked=self._allow_yanked,
703
+ ignore_requires_python=self._ignore_requires_python,
704
+ )
705
+
706
+ def _sort_links(self, links: Iterable[Link]) -> List[Link]:
707
+ """
708
+ Returns elements of links in order, non-egg links first, egg links
709
+ second, while eliminating duplicates
710
+ """
711
+ eggs, no_eggs = [], []
712
+ seen: Set[Link] = set()
713
+ for link in links:
714
+ if link not in seen:
715
+ seen.add(link)
716
+ if link.egg_fragment:
717
+ eggs.append(link)
718
+ else:
719
+ no_eggs.append(link)
720
+ return no_eggs + eggs
721
+
722
+ def _log_skipped_link(self, link: Link, reason: str) -> None:
723
+ if link not in self._logged_links:
724
+ # Put the link at the end so the reason is more visible and because
725
+ # the link string is usually very long.
726
+ logger.debug("Skipping link: %s: %s", reason, link)
727
+ self._logged_links.add(link)
728
+
729
+ def get_install_candidate(
730
+ self, link_evaluator: LinkEvaluator, link: Link
731
+ ) -> Optional[InstallationCandidate]:
732
+ """
733
+ If the link is a candidate for install, convert it to an
734
+ InstallationCandidate and return it. Otherwise, return None.
735
+ """
736
+ is_candidate, result = link_evaluator.evaluate_link(link)
737
+ if not is_candidate:
738
+ if result:
739
+ self._log_skipped_link(link, reason=result)
740
+ return None
741
+
742
+ return InstallationCandidate(
743
+ name=link_evaluator.project_name,
744
+ link=link,
745
+ version=result,
746
+ )
747
+
748
+ def evaluate_links(
749
+ self, link_evaluator: LinkEvaluator, links: Iterable[Link]
750
+ ) -> List[InstallationCandidate]:
751
+ """
752
+ Convert links that are candidates to InstallationCandidate objects.
753
+ """
754
+ candidates = []
755
+ for link in self._sort_links(links):
756
+ candidate = self.get_install_candidate(link_evaluator, link)
757
+ if candidate is not None:
758
+ candidates.append(candidate)
759
+
760
+ return candidates
761
+
762
+ def process_project_url(
763
+ self, project_url: Link, link_evaluator: LinkEvaluator
764
+ ) -> List[InstallationCandidate]:
765
+ logger.debug(
766
+ "Fetching project page and analyzing links: %s",
767
+ project_url,
768
+ )
769
+ html_page = self._link_collector.fetch_page(project_url)
770
+ if html_page is None:
771
+ return []
772
+
773
+ page_links = list(parse_links(html_page, self._use_deprecated_html5lib))
774
+
775
+ with indent_log():
776
+ package_links = self.evaluate_links(
777
+ link_evaluator,
778
+ links=page_links,
779
+ )
780
+
781
+ return package_links
782
+
783
+ @functools.lru_cache(maxsize=None)
784
+ def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
785
+ """Find all available InstallationCandidate for project_name
786
+
787
+ This checks index_urls and find_links.
788
+ All versions found are returned as an InstallationCandidate list.
789
+
790
+ See LinkEvaluator.evaluate_link() for details on which files
791
+ are accepted.
792
+ """
793
+ link_evaluator = self.make_link_evaluator(project_name)
794
+
795
+ collected_sources = self._link_collector.collect_sources(
796
+ project_name=project_name,
797
+ candidates_from_page=functools.partial(
798
+ self.process_project_url,
799
+ link_evaluator=link_evaluator,
800
+ ),
801
+ )
802
+
803
+ page_candidates_it = itertools.chain.from_iterable(
804
+ source.page_candidates()
805
+ for sources in collected_sources
806
+ for source in sources
807
+ if source is not None
808
+ )
809
+ page_candidates = list(page_candidates_it)
810
+
811
+ file_links_it = itertools.chain.from_iterable(
812
+ source.file_links()
813
+ for sources in collected_sources
814
+ for source in sources
815
+ if source is not None
816
+ )
817
+ file_candidates = self.evaluate_links(
818
+ link_evaluator,
819
+ sorted(file_links_it, reverse=True),
820
+ )
821
+
822
+ if logger.isEnabledFor(logging.DEBUG) and file_candidates:
823
+ paths = []
824
+ for candidate in file_candidates:
825
+ assert candidate.link.url # we need to have a URL
826
+ try:
827
+ paths.append(candidate.link.file_path)
828
+ except Exception:
829
+ paths.append(candidate.link.url) # it's not a local file
830
+
831
+ logger.debug("Local files found: %s", ", ".join(paths))
832
+
833
+ # This is an intentional priority ordering
834
+ return file_candidates + page_candidates
835
+
836
+ def make_candidate_evaluator(
837
+ self,
838
+ project_name: str,
839
+ specifier: Optional[specifiers.BaseSpecifier] = None,
840
+ hashes: Optional[Hashes] = None,
841
+ ) -> CandidateEvaluator:
842
+ """Create a CandidateEvaluator object to use."""
843
+ candidate_prefs = self._candidate_prefs
844
+ return CandidateEvaluator.create(
845
+ project_name=project_name,
846
+ target_python=self._target_python,
847
+ prefer_binary=candidate_prefs.prefer_binary,
848
+ allow_all_prereleases=candidate_prefs.allow_all_prereleases,
849
+ specifier=specifier,
850
+ hashes=hashes,
851
+ )
852
+
853
+ @functools.lru_cache(maxsize=None)
854
+ def find_best_candidate(
855
+ self,
856
+ project_name: str,
857
+ specifier: Optional[specifiers.BaseSpecifier] = None,
858
+ hashes: Optional[Hashes] = None,
859
+ ) -> BestCandidateResult:
860
+ """Find matches for the given project and specifier.
861
+
862
+ :param specifier: An optional object implementing `filter`
863
+ (e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
864
+ versions.
865
+
866
+ :return: A `BestCandidateResult` instance.
867
+ """
868
+ candidates = self.find_all_candidates(project_name)
869
+ candidate_evaluator = self.make_candidate_evaluator(
870
+ project_name=project_name,
871
+ specifier=specifier,
872
+ hashes=hashes,
873
+ )
874
+ return candidate_evaluator.compute_best_candidate(candidates)
875
+
876
+ def find_requirement(
877
+ self, req: InstallRequirement, upgrade: bool
878
+ ) -> Optional[InstallationCandidate]:
879
+ """Try to find a Link matching req
880
+
881
+ Expects req, an InstallRequirement and upgrade, a boolean
882
+ Returns a InstallationCandidate if found,
883
+ Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
884
+ """
885
+ hashes = req.hashes(trust_internet=False)
886
+ best_candidate_result = self.find_best_candidate(
887
+ req.name,
888
+ specifier=req.specifier,
889
+ hashes=hashes,
890
+ )
891
+ best_candidate = best_candidate_result.best_candidate
892
+
893
+ installed_version: Optional[_BaseVersion] = None
894
+ if req.satisfied_by is not None:
895
+ installed_version = req.satisfied_by.version
896
+
897
+ def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
898
+ # This repeated parse_version and str() conversion is needed to
899
+ # handle different vendoring sources from pip and pkg_resources.
900
+ # If we stop using the pkg_resources provided specifier and start
901
+ # using our own, we can drop the cast to str().
902
+ return (
903
+ ", ".join(
904
+ sorted(
905
+ {str(c.version) for c in cand_iter},
906
+ key=parse_version,
907
+ )
908
+ )
909
+ or "none"
910
+ )
911
+
912
+ if installed_version is None and best_candidate is None:
913
+ logger.critical(
914
+ "Could not find a version that satisfies the requirement %s "
915
+ "(from versions: %s)",
916
+ req,
917
+ _format_versions(best_candidate_result.iter_all()),
918
+ )
919
+
920
+ raise DistributionNotFound(
921
+ "No matching distribution found for {}".format(req)
922
+ )
923
+
924
+ best_installed = False
925
+ if installed_version and (
926
+ best_candidate is None or best_candidate.version <= installed_version
927
+ ):
928
+ best_installed = True
929
+
930
+ if not upgrade and installed_version is not None:
931
+ if best_installed:
932
+ logger.debug(
933
+ "Existing installed version (%s) is most up-to-date and "
934
+ "satisfies requirement",
935
+ installed_version,
936
+ )
937
+ else:
938
+ logger.debug(
939
+ "Existing installed version (%s) satisfies requirement "
940
+ "(most up-to-date version is %s)",
941
+ installed_version,
942
+ best_candidate.version,
943
+ )
944
+ return None
945
+
946
+ if best_installed:
947
+ # We have an existing version, and its the best version
948
+ logger.debug(
949
+ "Installed version (%s) is most up-to-date (past versions: %s)",
950
+ installed_version,
951
+ _format_versions(best_candidate_result.iter_applicable()),
952
+ )
953
+ raise BestVersionAlreadyInstalled
954
+
955
+ logger.debug(
956
+ "Using version %s (newest of versions: %s)",
957
+ best_candidate.version,
958
+ _format_versions(best_candidate_result.iter_applicable()),
959
+ )
960
+ return best_candidate
961
+
962
+
963
+ def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
964
+ """Find the separator's index based on the package's canonical name.
965
+
966
+ :param fragment: A <package>+<version> filename "fragment" (stem) or
967
+ egg fragment.
968
+ :param canonical_name: The package's canonical name.
969
+
970
+ This function is needed since the canonicalized name does not necessarily
971
+ have the same length as the egg info's name part. An example::
972
+
973
+ >>> fragment = 'foo__bar-1.0'
974
+ >>> canonical_name = 'foo-bar'
975
+ >>> _find_name_version_sep(fragment, canonical_name)
976
+ 8
977
+ """
978
+ # Project name and version must be separated by one single dash. Find all
979
+ # occurrences of dashes; if the string in front of it matches the canonical
980
+ # name, this is the one separating the name and version parts.
981
+ for i, c in enumerate(fragment):
982
+ if c != "-":
983
+ continue
984
+ if canonicalize_name(fragment[:i]) == canonical_name:
985
+ return i
986
+ raise ValueError(f"{fragment} does not match {canonical_name}")
987
+
988
+
989
+ def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
990
+ """Parse the version string from a <package>+<version> filename
991
+ "fragment" (stem) or egg fragment.
992
+
993
+ :param fragment: The string to parse. E.g. foo-2.1
994
+ :param canonical_name: The canonicalized name of the package this
995
+ belongs to.
996
+ """
997
+ try:
998
+ version_start = _find_name_version_sep(fragment, canonical_name) + 1
999
+ except ValueError:
1000
+ return None
1001
+ version = fragment[version_start:]
1002
+ if not version:
1003
+ return None
1004
+ return version
venv/lib/python3.10/site-packages/pip/_internal/index/sources.py ADDED
@@ -0,0 +1,224 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ import mimetypes
3
+ import os
4
+ import pathlib
5
+ from typing import Callable, Iterable, Optional, Tuple
6
+
7
+ from pip._internal.models.candidate import InstallationCandidate
8
+ from pip._internal.models.link import Link
9
+ from pip._internal.utils.urls import path_to_url, url_to_path
10
+ from pip._internal.vcs import is_url
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+ FoundCandidates = Iterable[InstallationCandidate]
15
+ FoundLinks = Iterable[Link]
16
+ CandidatesFromPage = Callable[[Link], Iterable[InstallationCandidate]]
17
+ PageValidator = Callable[[Link], bool]
18
+
19
+
20
+ class LinkSource:
21
+ @property
22
+ def link(self) -> Optional[Link]:
23
+ """Returns the underlying link, if there's one."""
24
+ raise NotImplementedError()
25
+
26
+ def page_candidates(self) -> FoundCandidates:
27
+ """Candidates found by parsing an archive listing HTML file."""
28
+ raise NotImplementedError()
29
+
30
+ def file_links(self) -> FoundLinks:
31
+ """Links found by specifying archives directly."""
32
+ raise NotImplementedError()
33
+
34
+
35
+ def _is_html_file(file_url: str) -> bool:
36
+ return mimetypes.guess_type(file_url, strict=False)[0] == "text/html"
37
+
38
+
39
+ class _FlatDirectorySource(LinkSource):
40
+ """Link source specified by ``--find-links=<path-to-dir>``.
41
+
42
+ This looks the content of the directory, and returns:
43
+
44
+ * ``page_candidates``: Links listed on each HTML file in the directory.
45
+ * ``file_candidates``: Archives in the directory.
46
+ """
47
+
48
+ def __init__(
49
+ self,
50
+ candidates_from_page: CandidatesFromPage,
51
+ path: str,
52
+ ) -> None:
53
+ self._candidates_from_page = candidates_from_page
54
+ self._path = pathlib.Path(os.path.realpath(path))
55
+
56
+ @property
57
+ def link(self) -> Optional[Link]:
58
+ return None
59
+
60
+ def page_candidates(self) -> FoundCandidates:
61
+ for path in self._path.iterdir():
62
+ url = path_to_url(str(path))
63
+ if not _is_html_file(url):
64
+ continue
65
+ yield from self._candidates_from_page(Link(url))
66
+
67
+ def file_links(self) -> FoundLinks:
68
+ for path in self._path.iterdir():
69
+ url = path_to_url(str(path))
70
+ if _is_html_file(url):
71
+ continue
72
+ yield Link(url)
73
+
74
+
75
+ class _LocalFileSource(LinkSource):
76
+ """``--find-links=<path-or-url>`` or ``--[extra-]index-url=<path-or-url>``.
77
+
78
+ If a URL is supplied, it must be a ``file:`` URL. If a path is supplied to
79
+ the option, it is converted to a URL first. This returns:
80
+
81
+ * ``page_candidates``: Links listed on an HTML file.
82
+ * ``file_candidates``: The non-HTML file.
83
+ """
84
+
85
+ def __init__(
86
+ self,
87
+ candidates_from_page: CandidatesFromPage,
88
+ link: Link,
89
+ ) -> None:
90
+ self._candidates_from_page = candidates_from_page
91
+ self._link = link
92
+
93
+ @property
94
+ def link(self) -> Optional[Link]:
95
+ return self._link
96
+
97
+ def page_candidates(self) -> FoundCandidates:
98
+ if not _is_html_file(self._link.url):
99
+ return
100
+ yield from self._candidates_from_page(self._link)
101
+
102
+ def file_links(self) -> FoundLinks:
103
+ if _is_html_file(self._link.url):
104
+ return
105
+ yield self._link
106
+
107
+
108
+ class _RemoteFileSource(LinkSource):
109
+ """``--find-links=<url>`` or ``--[extra-]index-url=<url>``.
110
+
111
+ This returns:
112
+
113
+ * ``page_candidates``: Links listed on an HTML file.
114
+ * ``file_candidates``: The non-HTML file.
115
+ """
116
+
117
+ def __init__(
118
+ self,
119
+ candidates_from_page: CandidatesFromPage,
120
+ page_validator: PageValidator,
121
+ link: Link,
122
+ ) -> None:
123
+ self._candidates_from_page = candidates_from_page
124
+ self._page_validator = page_validator
125
+ self._link = link
126
+
127
+ @property
128
+ def link(self) -> Optional[Link]:
129
+ return self._link
130
+
131
+ def page_candidates(self) -> FoundCandidates:
132
+ if not self._page_validator(self._link):
133
+ return
134
+ yield from self._candidates_from_page(self._link)
135
+
136
+ def file_links(self) -> FoundLinks:
137
+ yield self._link
138
+
139
+
140
+ class _IndexDirectorySource(LinkSource):
141
+ """``--[extra-]index-url=<path-to-directory>``.
142
+
143
+ This is treated like a remote URL; ``candidates_from_page`` contains logic
144
+ for this by appending ``index.html`` to the link.
145
+ """
146
+
147
+ def __init__(
148
+ self,
149
+ candidates_from_page: CandidatesFromPage,
150
+ link: Link,
151
+ ) -> None:
152
+ self._candidates_from_page = candidates_from_page
153
+ self._link = link
154
+
155
+ @property
156
+ def link(self) -> Optional[Link]:
157
+ return self._link
158
+
159
+ def page_candidates(self) -> FoundCandidates:
160
+ yield from self._candidates_from_page(self._link)
161
+
162
+ def file_links(self) -> FoundLinks:
163
+ return ()
164
+
165
+
166
+ def build_source(
167
+ location: str,
168
+ *,
169
+ candidates_from_page: CandidatesFromPage,
170
+ page_validator: PageValidator,
171
+ expand_dir: bool,
172
+ cache_link_parsing: bool,
173
+ ) -> Tuple[Optional[str], Optional[LinkSource]]:
174
+
175
+ path: Optional[str] = None
176
+ url: Optional[str] = None
177
+ if os.path.exists(location): # Is a local path.
178
+ url = path_to_url(location)
179
+ path = location
180
+ elif location.startswith("file:"): # A file: URL.
181
+ url = location
182
+ path = url_to_path(location)
183
+ elif is_url(location):
184
+ url = location
185
+
186
+ if url is None:
187
+ msg = (
188
+ "Location '%s' is ignored: "
189
+ "it is either a non-existing path or lacks a specific scheme."
190
+ )
191
+ logger.warning(msg, location)
192
+ return (None, None)
193
+
194
+ if path is None:
195
+ source: LinkSource = _RemoteFileSource(
196
+ candidates_from_page=candidates_from_page,
197
+ page_validator=page_validator,
198
+ link=Link(url, cache_link_parsing=cache_link_parsing),
199
+ )
200
+ return (url, source)
201
+
202
+ if os.path.isdir(path):
203
+ if expand_dir:
204
+ source = _FlatDirectorySource(
205
+ candidates_from_page=candidates_from_page,
206
+ path=path,
207
+ )
208
+ else:
209
+ source = _IndexDirectorySource(
210
+ candidates_from_page=candidates_from_page,
211
+ link=Link(url, cache_link_parsing=cache_link_parsing),
212
+ )
213
+ return (url, source)
214
+ elif os.path.isfile(path):
215
+ source = _LocalFileSource(
216
+ candidates_from_page=candidates_from_page,
217
+ link=Link(url, cache_link_parsing=cache_link_parsing),
218
+ )
219
+ return (url, source)
220
+ logger.warning(
221
+ "Location '%s' is ignored: it is neither a file nor a directory.",
222
+ location,
223
+ )
224
+ return (url, None)
venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (244 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/download.cpython-310.pyc ADDED
Binary file (5.49 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/network/__pycache__/utils.cpython-310.pyc ADDED
Binary file (1.44 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/network/cache.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """HTTP cache implementation.
2
+ """
3
+
4
+ import os
5
+ from contextlib import contextmanager
6
+ from typing import Iterator, Optional
7
+
8
+ from pip._vendor.cachecontrol.cache import BaseCache
9
+ from pip._vendor.cachecontrol.caches import FileCache
10
+ from pip._vendor.requests.models import Response
11
+
12
+ from pip._internal.utils.filesystem import adjacent_tmp_file, replace
13
+ from pip._internal.utils.misc import ensure_dir
14
+
15
+
16
+ def is_from_cache(response: Response) -> bool:
17
+ return getattr(response, "from_cache", False)
18
+
19
+
20
+ @contextmanager
21
+ def suppressed_cache_errors() -> Iterator[None]:
22
+ """If we can't access the cache then we can just skip caching and process
23
+ requests as if caching wasn't enabled.
24
+ """
25
+ try:
26
+ yield
27
+ except OSError:
28
+ pass
29
+
30
+
31
+ class SafeFileCache(BaseCache):
32
+ """
33
+ A file based cache which is safe to use even when the target directory may
34
+ not be accessible or writable.
35
+ """
36
+
37
+ def __init__(self, directory: str) -> None:
38
+ assert directory is not None, "Cache directory must not be None."
39
+ super().__init__()
40
+ self.directory = directory
41
+
42
+ def _get_cache_path(self, name: str) -> str:
43
+ # From cachecontrol.caches.file_cache.FileCache._fn, brought into our
44
+ # class for backwards-compatibility and to avoid using a non-public
45
+ # method.
46
+ hashed = FileCache.encode(name)
47
+ parts = list(hashed[:5]) + [hashed]
48
+ return os.path.join(self.directory, *parts)
49
+
50
+ def get(self, key: str) -> Optional[bytes]:
51
+ path = self._get_cache_path(key)
52
+ with suppressed_cache_errors():
53
+ with open(path, "rb") as f:
54
+ return f.read()
55
+
56
+ def set(self, key: str, value: bytes, expires: Optional[int] = None) -> None:
57
+ path = self._get_cache_path(key)
58
+ with suppressed_cache_errors():
59
+ ensure_dir(os.path.dirname(path))
60
+
61
+ with adjacent_tmp_file(path) as f:
62
+ f.write(value)
63
+
64
+ replace(f.name, path)
65
+
66
+ def delete(self, key: str) -> None:
67
+ path = self._get_cache_path(key)
68
+ with suppressed_cache_errors():
69
+ os.remove(path)
venv/lib/python3.10/site-packages/pip/_internal/network/lazy_wheel.py ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Lazy ZIP over HTTP"""
2
+
3
+ __all__ = ["HTTPRangeRequestUnsupported", "dist_from_wheel_url"]
4
+
5
+ from bisect import bisect_left, bisect_right
6
+ from contextlib import contextmanager
7
+ from tempfile import NamedTemporaryFile
8
+ from typing import Any, Dict, Iterator, List, Optional, Tuple
9
+ from zipfile import BadZipfile, ZipFile
10
+
11
+ from pip._vendor.packaging.utils import canonicalize_name
12
+ from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response
13
+
14
+ from pip._internal.metadata import BaseDistribution, MemoryWheel, get_wheel_distribution
15
+ from pip._internal.network.session import PipSession
16
+ from pip._internal.network.utils import HEADERS, raise_for_status, response_chunks
17
+
18
+
19
+ class HTTPRangeRequestUnsupported(Exception):
20
+ pass
21
+
22
+
23
+ def dist_from_wheel_url(name: str, url: str, session: PipSession) -> BaseDistribution:
24
+ """Return a distribution object from the given wheel URL.
25
+
26
+ This uses HTTP range requests to only fetch the potion of the wheel
27
+ containing metadata, just enough for the object to be constructed.
28
+ If such requests are not supported, HTTPRangeRequestUnsupported
29
+ is raised.
30
+ """
31
+ with LazyZipOverHTTP(url, session) as zf:
32
+ # For read-only ZIP files, ZipFile only needs methods read,
33
+ # seek, seekable and tell, not the whole IO protocol.
34
+ wheel = MemoryWheel(zf.name, zf) # type: ignore
35
+ # After context manager exit, wheel.name
36
+ # is an invalid file by intention.
37
+ return get_wheel_distribution(wheel, canonicalize_name(name))
38
+
39
+
40
+ class LazyZipOverHTTP:
41
+ """File-like object mapped to a ZIP file over HTTP.
42
+
43
+ This uses HTTP range requests to lazily fetch the file's content,
44
+ which is supposed to be fed to ZipFile. If such requests are not
45
+ supported by the server, raise HTTPRangeRequestUnsupported
46
+ during initialization.
47
+ """
48
+
49
+ def __init__(
50
+ self, url: str, session: PipSession, chunk_size: int = CONTENT_CHUNK_SIZE
51
+ ) -> None:
52
+ head = session.head(url, headers=HEADERS)
53
+ raise_for_status(head)
54
+ assert head.status_code == 200
55
+ self._session, self._url, self._chunk_size = session, url, chunk_size
56
+ self._length = int(head.headers["Content-Length"])
57
+ self._file = NamedTemporaryFile()
58
+ self.truncate(self._length)
59
+ self._left: List[int] = []
60
+ self._right: List[int] = []
61
+ if "bytes" not in head.headers.get("Accept-Ranges", "none"):
62
+ raise HTTPRangeRequestUnsupported("range request is not supported")
63
+ self._check_zip()
64
+
65
+ @property
66
+ def mode(self) -> str:
67
+ """Opening mode, which is always rb."""
68
+ return "rb"
69
+
70
+ @property
71
+ def name(self) -> str:
72
+ """Path to the underlying file."""
73
+ return self._file.name
74
+
75
+ def seekable(self) -> bool:
76
+ """Return whether random access is supported, which is True."""
77
+ return True
78
+
79
+ def close(self) -> None:
80
+ """Close the file."""
81
+ self._file.close()
82
+
83
+ @property
84
+ def closed(self) -> bool:
85
+ """Whether the file is closed."""
86
+ return self._file.closed
87
+
88
+ def read(self, size: int = -1) -> bytes:
89
+ """Read up to size bytes from the object and return them.
90
+
91
+ As a convenience, if size is unspecified or -1,
92
+ all bytes until EOF are returned. Fewer than
93
+ size bytes may be returned if EOF is reached.
94
+ """
95
+ download_size = max(size, self._chunk_size)
96
+ start, length = self.tell(), self._length
97
+ stop = length if size < 0 else min(start + download_size, length)
98
+ start = max(0, stop - download_size)
99
+ self._download(start, stop - 1)
100
+ return self._file.read(size)
101
+
102
+ def readable(self) -> bool:
103
+ """Return whether the file is readable, which is True."""
104
+ return True
105
+
106
+ def seek(self, offset: int, whence: int = 0) -> int:
107
+ """Change stream position and return the new absolute position.
108
+
109
+ Seek to offset relative position indicated by whence:
110
+ * 0: Start of stream (the default). pos should be >= 0;
111
+ * 1: Current position - pos may be negative;
112
+ * 2: End of stream - pos usually negative.
113
+ """
114
+ return self._file.seek(offset, whence)
115
+
116
+ def tell(self) -> int:
117
+ """Return the current position."""
118
+ return self._file.tell()
119
+
120
+ def truncate(self, size: Optional[int] = None) -> int:
121
+ """Resize the stream to the given size in bytes.
122
+
123
+ If size is unspecified resize to the current position.
124
+ The current stream position isn't changed.
125
+
126
+ Return the new file size.
127
+ """
128
+ return self._file.truncate(size)
129
+
130
+ def writable(self) -> bool:
131
+ """Return False."""
132
+ return False
133
+
134
+ def __enter__(self) -> "LazyZipOverHTTP":
135
+ self._file.__enter__()
136
+ return self
137
+
138
+ def __exit__(self, *exc: Any) -> Optional[bool]:
139
+ return self._file.__exit__(*exc)
140
+
141
+ @contextmanager
142
+ def _stay(self) -> Iterator[None]:
143
+ """Return a context manager keeping the position.
144
+
145
+ At the end of the block, seek back to original position.
146
+ """
147
+ pos = self.tell()
148
+ try:
149
+ yield
150
+ finally:
151
+ self.seek(pos)
152
+
153
+ def _check_zip(self) -> None:
154
+ """Check and download until the file is a valid ZIP."""
155
+ end = self._length - 1
156
+ for start in reversed(range(0, end, self._chunk_size)):
157
+ self._download(start, end)
158
+ with self._stay():
159
+ try:
160
+ # For read-only ZIP files, ZipFile only needs
161
+ # methods read, seek, seekable and tell.
162
+ ZipFile(self) # type: ignore
163
+ except BadZipfile:
164
+ pass
165
+ else:
166
+ break
167
+
168
+ def _stream_response(
169
+ self, start: int, end: int, base_headers: Dict[str, str] = HEADERS
170
+ ) -> Response:
171
+ """Return HTTP response to a range request from start to end."""
172
+ headers = base_headers.copy()
173
+ headers["Range"] = f"bytes={start}-{end}"
174
+ # TODO: Get range requests to be correctly cached
175
+ headers["Cache-Control"] = "no-cache"
176
+ return self._session.get(self._url, headers=headers, stream=True)
177
+
178
+ def _merge(
179
+ self, start: int, end: int, left: int, right: int
180
+ ) -> Iterator[Tuple[int, int]]:
181
+ """Return an iterator of intervals to be fetched.
182
+
183
+ Args:
184
+ start (int): Start of needed interval
185
+ end (int): End of needed interval
186
+ left (int): Index of first overlapping downloaded data
187
+ right (int): Index after last overlapping downloaded data
188
+ """
189
+ lslice, rslice = self._left[left:right], self._right[left:right]
190
+ i = start = min([start] + lslice[:1])
191
+ end = max([end] + rslice[-1:])
192
+ for j, k in zip(lslice, rslice):
193
+ if j > i:
194
+ yield i, j - 1
195
+ i = k + 1
196
+ if i <= end:
197
+ yield i, end
198
+ self._left[left:right], self._right[left:right] = [start], [end]
199
+
200
+ def _download(self, start: int, end: int) -> None:
201
+ """Download bytes from start to end inclusively."""
202
+ with self._stay():
203
+ left = bisect_left(self._right, start)
204
+ right = bisect_right(self._left, end)
205
+ for start, end in self._merge(start, end, left, right):
206
+ response = self._stream_response(start, end)
207
+ response.raise_for_status()
208
+ self.seek(start)
209
+ for chunk in response_chunks(response, self._chunk_size):
210
+ self._file.write(chunk)
venv/lib/python3.10/site-packages/pip/_internal/network/session.py ADDED
@@ -0,0 +1,454 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """PipSession and supporting code, containing all pip-specific
2
+ network request configuration and behavior.
3
+ """
4
+
5
+ import email.utils
6
+ import io
7
+ import ipaddress
8
+ import json
9
+ import logging
10
+ import mimetypes
11
+ import os
12
+ import platform
13
+ import shutil
14
+ import subprocess
15
+ import sys
16
+ import urllib.parse
17
+ import warnings
18
+ from typing import Any, Dict, Iterator, List, Mapping, Optional, Sequence, Tuple, Union
19
+
20
+ from pip._vendor import requests, urllib3
21
+ from pip._vendor.cachecontrol import CacheControlAdapter
22
+ from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter
23
+ from pip._vendor.requests.models import PreparedRequest, Response
24
+ from pip._vendor.requests.structures import CaseInsensitiveDict
25
+ from pip._vendor.urllib3.connectionpool import ConnectionPool
26
+ from pip._vendor.urllib3.exceptions import InsecureRequestWarning
27
+
28
+ from pip import __version__
29
+ from pip._internal.metadata import get_default_environment
30
+ from pip._internal.models.link import Link
31
+ from pip._internal.network.auth import MultiDomainBasicAuth
32
+ from pip._internal.network.cache import SafeFileCache
33
+
34
+ # Import ssl from compat so the initial import occurs in only one place.
35
+ from pip._internal.utils.compat import has_tls
36
+ from pip._internal.utils.glibc import libc_ver
37
+ from pip._internal.utils.misc import build_url_from_netloc, parse_netloc
38
+ from pip._internal.utils.urls import url_to_path
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+ SecureOrigin = Tuple[str, str, Optional[Union[int, str]]]
43
+
44
+
45
+ # Ignore warning raised when using --trusted-host.
46
+ warnings.filterwarnings("ignore", category=InsecureRequestWarning)
47
+
48
+
49
+ SECURE_ORIGINS: List[SecureOrigin] = [
50
+ # protocol, hostname, port
51
+ # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC)
52
+ ("https", "*", "*"),
53
+ ("*", "localhost", "*"),
54
+ ("*", "127.0.0.0/8", "*"),
55
+ ("*", "::1/128", "*"),
56
+ ("file", "*", None),
57
+ # ssh is always secure.
58
+ ("ssh", "*", "*"),
59
+ ]
60
+
61
+
62
+ # These are environment variables present when running under various
63
+ # CI systems. For each variable, some CI systems that use the variable
64
+ # are indicated. The collection was chosen so that for each of a number
65
+ # of popular systems, at least one of the environment variables is used.
66
+ # This list is used to provide some indication of and lower bound for
67
+ # CI traffic to PyPI. Thus, it is okay if the list is not comprehensive.
68
+ # For more background, see: https://github.com/pypa/pip/issues/5499
69
+ CI_ENVIRONMENT_VARIABLES = (
70
+ # Azure Pipelines
71
+ "BUILD_BUILDID",
72
+ # Jenkins
73
+ "BUILD_ID",
74
+ # AppVeyor, CircleCI, Codeship, Gitlab CI, Shippable, Travis CI
75
+ "CI",
76
+ # Explicit environment variable.
77
+ "PIP_IS_CI",
78
+ )
79
+
80
+
81
+ def looks_like_ci() -> bool:
82
+ """
83
+ Return whether it looks like pip is running under CI.
84
+ """
85
+ # We don't use the method of checking for a tty (e.g. using isatty())
86
+ # because some CI systems mimic a tty (e.g. Travis CI). Thus that
87
+ # method doesn't provide definitive information in either direction.
88
+ return any(name in os.environ for name in CI_ENVIRONMENT_VARIABLES)
89
+
90
+
91
+ def user_agent() -> str:
92
+ """
93
+ Return a string representing the user agent.
94
+ """
95
+ data: Dict[str, Any] = {
96
+ "installer": {"name": "pip", "version": __version__},
97
+ "python": platform.python_version(),
98
+ "implementation": {
99
+ "name": platform.python_implementation(),
100
+ },
101
+ }
102
+
103
+ if data["implementation"]["name"] == "CPython":
104
+ data["implementation"]["version"] = platform.python_version()
105
+ elif data["implementation"]["name"] == "PyPy":
106
+ pypy_version_info = sys.pypy_version_info # type: ignore
107
+ if pypy_version_info.releaselevel == "final":
108
+ pypy_version_info = pypy_version_info[:3]
109
+ data["implementation"]["version"] = ".".join(
110
+ [str(x) for x in pypy_version_info]
111
+ )
112
+ elif data["implementation"]["name"] == "Jython":
113
+ # Complete Guess
114
+ data["implementation"]["version"] = platform.python_version()
115
+ elif data["implementation"]["name"] == "IronPython":
116
+ # Complete Guess
117
+ data["implementation"]["version"] = platform.python_version()
118
+
119
+ if sys.platform.startswith("linux"):
120
+ from pip._vendor import distro
121
+
122
+ linux_distribution = distro.name(), distro.version(), distro.codename()
123
+ distro_infos: Dict[str, Any] = dict(
124
+ filter(
125
+ lambda x: x[1],
126
+ zip(["name", "version", "id"], linux_distribution),
127
+ )
128
+ )
129
+ libc = dict(
130
+ filter(
131
+ lambda x: x[1],
132
+ zip(["lib", "version"], libc_ver()),
133
+ )
134
+ )
135
+ if libc:
136
+ distro_infos["libc"] = libc
137
+ if distro_infos:
138
+ data["distro"] = distro_infos
139
+
140
+ if sys.platform.startswith("darwin") and platform.mac_ver()[0]:
141
+ data["distro"] = {"name": "macOS", "version": platform.mac_ver()[0]}
142
+
143
+ if platform.system():
144
+ data.setdefault("system", {})["name"] = platform.system()
145
+
146
+ if platform.release():
147
+ data.setdefault("system", {})["release"] = platform.release()
148
+
149
+ if platform.machine():
150
+ data["cpu"] = platform.machine()
151
+
152
+ if has_tls():
153
+ import _ssl as ssl
154
+
155
+ data["openssl_version"] = ssl.OPENSSL_VERSION
156
+
157
+ setuptools_dist = get_default_environment().get_distribution("setuptools")
158
+ if setuptools_dist is not None:
159
+ data["setuptools_version"] = str(setuptools_dist.version)
160
+
161
+ if shutil.which("rustc") is not None:
162
+ # If for any reason `rustc --version` fails, silently ignore it
163
+ try:
164
+ rustc_output = subprocess.check_output(
165
+ ["rustc", "--version"], stderr=subprocess.STDOUT, timeout=0.5
166
+ )
167
+ except Exception:
168
+ pass
169
+ else:
170
+ if rustc_output.startswith(b"rustc "):
171
+ # The format of `rustc --version` is:
172
+ # `b'rustc 1.52.1 (9bc8c42bb 2021-05-09)\n'`
173
+ # We extract just the middle (1.52.1) part
174
+ data["rustc_version"] = rustc_output.split(b" ")[1].decode()
175
+
176
+ # Use None rather than False so as not to give the impression that
177
+ # pip knows it is not being run under CI. Rather, it is a null or
178
+ # inconclusive result. Also, we include some value rather than no
179
+ # value to make it easier to know that the check has been run.
180
+ data["ci"] = True if looks_like_ci() else None
181
+
182
+ user_data = os.environ.get("PIP_USER_AGENT_USER_DATA")
183
+ if user_data is not None:
184
+ data["user_data"] = user_data
185
+
186
+ return "{data[installer][name]}/{data[installer][version]} {json}".format(
187
+ data=data,
188
+ json=json.dumps(data, separators=(",", ":"), sort_keys=True),
189
+ )
190
+
191
+
192
+ class LocalFSAdapter(BaseAdapter):
193
+ def send(
194
+ self,
195
+ request: PreparedRequest,
196
+ stream: bool = False,
197
+ timeout: Optional[Union[float, Tuple[float, float]]] = None,
198
+ verify: Union[bool, str] = True,
199
+ cert: Optional[Union[str, Tuple[str, str]]] = None,
200
+ proxies: Optional[Mapping[str, str]] = None,
201
+ ) -> Response:
202
+ pathname = url_to_path(request.url)
203
+
204
+ resp = Response()
205
+ resp.status_code = 200
206
+ resp.url = request.url
207
+
208
+ try:
209
+ stats = os.stat(pathname)
210
+ except OSError as exc:
211
+ # format the exception raised as a io.BytesIO object,
212
+ # to return a better error message:
213
+ resp.status_code = 404
214
+ resp.reason = type(exc).__name__
215
+ resp.raw = io.BytesIO(f"{resp.reason}: {exc}".encode("utf8"))
216
+ else:
217
+ modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
218
+ content_type = mimetypes.guess_type(pathname)[0] or "text/plain"
219
+ resp.headers = CaseInsensitiveDict(
220
+ {
221
+ "Content-Type": content_type,
222
+ "Content-Length": stats.st_size,
223
+ "Last-Modified": modified,
224
+ }
225
+ )
226
+
227
+ resp.raw = open(pathname, "rb")
228
+ resp.close = resp.raw.close
229
+
230
+ return resp
231
+
232
+ def close(self) -> None:
233
+ pass
234
+
235
+
236
+ class InsecureHTTPAdapter(HTTPAdapter):
237
+ def cert_verify(
238
+ self,
239
+ conn: ConnectionPool,
240
+ url: str,
241
+ verify: Union[bool, str],
242
+ cert: Optional[Union[str, Tuple[str, str]]],
243
+ ) -> None:
244
+ super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
245
+
246
+
247
+ class InsecureCacheControlAdapter(CacheControlAdapter):
248
+ def cert_verify(
249
+ self,
250
+ conn: ConnectionPool,
251
+ url: str,
252
+ verify: Union[bool, str],
253
+ cert: Optional[Union[str, Tuple[str, str]]],
254
+ ) -> None:
255
+ super().cert_verify(conn=conn, url=url, verify=False, cert=cert)
256
+
257
+
258
+ class PipSession(requests.Session):
259
+
260
+ timeout: Optional[int] = None
261
+
262
+ def __init__(
263
+ self,
264
+ *args: Any,
265
+ retries: int = 0,
266
+ cache: Optional[str] = None,
267
+ trusted_hosts: Sequence[str] = (),
268
+ index_urls: Optional[List[str]] = None,
269
+ **kwargs: Any,
270
+ ) -> None:
271
+ """
272
+ :param trusted_hosts: Domains not to emit warnings for when not using
273
+ HTTPS.
274
+ """
275
+ super().__init__(*args, **kwargs)
276
+
277
+ # Namespace the attribute with "pip_" just in case to prevent
278
+ # possible conflicts with the base class.
279
+ self.pip_trusted_origins: List[Tuple[str, Optional[int]]] = []
280
+
281
+ # Attach our User Agent to the request
282
+ self.headers["User-Agent"] = user_agent()
283
+
284
+ # Attach our Authentication handler to the session
285
+ self.auth = MultiDomainBasicAuth(index_urls=index_urls)
286
+
287
+ # Create our urllib3.Retry instance which will allow us to customize
288
+ # how we handle retries.
289
+ retries = urllib3.Retry(
290
+ # Set the total number of retries that a particular request can
291
+ # have.
292
+ total=retries,
293
+ # A 503 error from PyPI typically means that the Fastly -> Origin
294
+ # connection got interrupted in some way. A 503 error in general
295
+ # is typically considered a transient error so we'll go ahead and
296
+ # retry it.
297
+ # A 500 may indicate transient error in Amazon S3
298
+ # A 520 or 527 - may indicate transient error in CloudFlare
299
+ status_forcelist=[500, 503, 520, 527],
300
+ # Add a small amount of back off between failed requests in
301
+ # order to prevent hammering the service.
302
+ backoff_factor=0.25,
303
+ ) # type: ignore
304
+
305
+ # Our Insecure HTTPAdapter disables HTTPS validation. It does not
306
+ # support caching so we'll use it for all http:// URLs.
307
+ # If caching is disabled, we will also use it for
308
+ # https:// hosts that we've marked as ignoring
309
+ # TLS errors for (trusted-hosts).
310
+ insecure_adapter = InsecureHTTPAdapter(max_retries=retries)
311
+
312
+ # We want to _only_ cache responses on securely fetched origins or when
313
+ # the host is specified as trusted. We do this because
314
+ # we can't validate the response of an insecurely/untrusted fetched
315
+ # origin, and we don't want someone to be able to poison the cache and
316
+ # require manual eviction from the cache to fix it.
317
+ if cache:
318
+ secure_adapter = CacheControlAdapter(
319
+ cache=SafeFileCache(cache),
320
+ max_retries=retries,
321
+ )
322
+ self._trusted_host_adapter = InsecureCacheControlAdapter(
323
+ cache=SafeFileCache(cache),
324
+ max_retries=retries,
325
+ )
326
+ else:
327
+ secure_adapter = HTTPAdapter(max_retries=retries)
328
+ self._trusted_host_adapter = insecure_adapter
329
+
330
+ self.mount("https://", secure_adapter)
331
+ self.mount("http://", insecure_adapter)
332
+
333
+ # Enable file:// urls
334
+ self.mount("file://", LocalFSAdapter())
335
+
336
+ for host in trusted_hosts:
337
+ self.add_trusted_host(host, suppress_logging=True)
338
+
339
+ def update_index_urls(self, new_index_urls: List[str]) -> None:
340
+ """
341
+ :param new_index_urls: New index urls to update the authentication
342
+ handler with.
343
+ """
344
+ self.auth.index_urls = new_index_urls
345
+
346
+ def add_trusted_host(
347
+ self, host: str, source: Optional[str] = None, suppress_logging: bool = False
348
+ ) -> None:
349
+ """
350
+ :param host: It is okay to provide a host that has previously been
351
+ added.
352
+ :param source: An optional source string, for logging where the host
353
+ string came from.
354
+ """
355
+ if not suppress_logging:
356
+ msg = f"adding trusted host: {host!r}"
357
+ if source is not None:
358
+ msg += f" (from {source})"
359
+ logger.info(msg)
360
+
361
+ host_port = parse_netloc(host)
362
+ if host_port not in self.pip_trusted_origins:
363
+ self.pip_trusted_origins.append(host_port)
364
+
365
+ self.mount(
366
+ build_url_from_netloc(host, scheme="http") + "/", self._trusted_host_adapter
367
+ )
368
+ self.mount(build_url_from_netloc(host) + "/", self._trusted_host_adapter)
369
+ if not host_port[1]:
370
+ self.mount(
371
+ build_url_from_netloc(host, scheme="http") + ":",
372
+ self._trusted_host_adapter,
373
+ )
374
+ # Mount wildcard ports for the same host.
375
+ self.mount(build_url_from_netloc(host) + ":", self._trusted_host_adapter)
376
+
377
+ def iter_secure_origins(self) -> Iterator[SecureOrigin]:
378
+ yield from SECURE_ORIGINS
379
+ for host, port in self.pip_trusted_origins:
380
+ yield ("*", host, "*" if port is None else port)
381
+
382
+ def is_secure_origin(self, location: Link) -> bool:
383
+ # Determine if this url used a secure transport mechanism
384
+ parsed = urllib.parse.urlparse(str(location))
385
+ origin_protocol, origin_host, origin_port = (
386
+ parsed.scheme,
387
+ parsed.hostname,
388
+ parsed.port,
389
+ )
390
+
391
+ # The protocol to use to see if the protocol matches.
392
+ # Don't count the repository type as part of the protocol: in
393
+ # cases such as "git+ssh", only use "ssh". (I.e., Only verify against
394
+ # the last scheme.)
395
+ origin_protocol = origin_protocol.rsplit("+", 1)[-1]
396
+
397
+ # Determine if our origin is a secure origin by looking through our
398
+ # hardcoded list of secure origins, as well as any additional ones
399
+ # configured on this PackageFinder instance.
400
+ for secure_origin in self.iter_secure_origins():
401
+ secure_protocol, secure_host, secure_port = secure_origin
402
+ if origin_protocol != secure_protocol and secure_protocol != "*":
403
+ continue
404
+
405
+ try:
406
+ addr = ipaddress.ip_address(origin_host)
407
+ network = ipaddress.ip_network(secure_host)
408
+ except ValueError:
409
+ # We don't have both a valid address or a valid network, so
410
+ # we'll check this origin against hostnames.
411
+ if (
412
+ origin_host
413
+ and origin_host.lower() != secure_host.lower()
414
+ and secure_host != "*"
415
+ ):
416
+ continue
417
+ else:
418
+ # We have a valid address and network, so see if the address
419
+ # is contained within the network.
420
+ if addr not in network:
421
+ continue
422
+
423
+ # Check to see if the port matches.
424
+ if (
425
+ origin_port != secure_port
426
+ and secure_port != "*"
427
+ and secure_port is not None
428
+ ):
429
+ continue
430
+
431
+ # If we've gotten here, then this origin matches the current
432
+ # secure origin and we should return True
433
+ return True
434
+
435
+ # If we've gotten to this point, then the origin isn't secure and we
436
+ # will not accept it as a valid location to search. We will however
437
+ # log a warning that we are ignoring it.
438
+ logger.warning(
439
+ "The repository located at %s is not a trusted or secure host and "
440
+ "is being ignored. If this repository is available via HTTPS we "
441
+ "recommend you use HTTPS instead, otherwise you may silence "
442
+ "this warning and allow it anyway with '--trusted-host %s'.",
443
+ origin_host,
444
+ origin_host,
445
+ )
446
+
447
+ return False
448
+
449
+ def request(self, method: str, url: str, *args: Any, **kwargs: Any) -> Response:
450
+ # Allow setting a default timeout on a session
451
+ kwargs.setdefault("timeout", self.timeout)
452
+
453
+ # Dispatch the actual request
454
+ return super().request(method, url, *args, **kwargs)
venv/lib/python3.10/site-packages/pip/_internal/operations/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pip/_internal/operations/check.py ADDED
@@ -0,0 +1,149 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Validation of dependencies of packages
2
+ """
3
+
4
+ import logging
5
+ from typing import Callable, Dict, List, NamedTuple, Optional, Set, Tuple
6
+
7
+ from pip._vendor.packaging.requirements import Requirement
8
+ from pip._vendor.packaging.utils import NormalizedName, canonicalize_name
9
+
10
+ from pip._internal.distributions import make_distribution_for_install_requirement
11
+ from pip._internal.metadata import get_default_environment
12
+ from pip._internal.metadata.base import DistributionVersion
13
+ from pip._internal.req.req_install import InstallRequirement
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PackageDetails(NamedTuple):
19
+ version: DistributionVersion
20
+ dependencies: List[Requirement]
21
+
22
+
23
+ # Shorthands
24
+ PackageSet = Dict[NormalizedName, PackageDetails]
25
+ Missing = Tuple[NormalizedName, Requirement]
26
+ Conflicting = Tuple[NormalizedName, DistributionVersion, Requirement]
27
+
28
+ MissingDict = Dict[NormalizedName, List[Missing]]
29
+ ConflictingDict = Dict[NormalizedName, List[Conflicting]]
30
+ CheckResult = Tuple[MissingDict, ConflictingDict]
31
+ ConflictDetails = Tuple[PackageSet, CheckResult]
32
+
33
+
34
+ def create_package_set_from_installed() -> Tuple[PackageSet, bool]:
35
+ """Converts a list of distributions into a PackageSet."""
36
+ package_set = {}
37
+ problems = False
38
+ env = get_default_environment()
39
+ for dist in env.iter_installed_distributions(local_only=False, skip=()):
40
+ name = dist.canonical_name
41
+ try:
42
+ dependencies = list(dist.iter_dependencies())
43
+ package_set[name] = PackageDetails(dist.version, dependencies)
44
+ except (OSError, ValueError) as e:
45
+ # Don't crash on unreadable or broken metadata.
46
+ logger.warning("Error parsing requirements for %s: %s", name, e)
47
+ problems = True
48
+ return package_set, problems
49
+
50
+
51
+ def check_package_set(
52
+ package_set: PackageSet, should_ignore: Optional[Callable[[str], bool]] = None
53
+ ) -> CheckResult:
54
+ """Check if a package set is consistent
55
+
56
+ If should_ignore is passed, it should be a callable that takes a
57
+ package name and returns a boolean.
58
+ """
59
+
60
+ missing = {}
61
+ conflicting = {}
62
+
63
+ for package_name, package_detail in package_set.items():
64
+ # Info about dependencies of package_name
65
+ missing_deps: Set[Missing] = set()
66
+ conflicting_deps: Set[Conflicting] = set()
67
+
68
+ if should_ignore and should_ignore(package_name):
69
+ continue
70
+
71
+ for req in package_detail.dependencies:
72
+ name = canonicalize_name(req.name)
73
+
74
+ # Check if it's missing
75
+ if name not in package_set:
76
+ missed = True
77
+ if req.marker is not None:
78
+ missed = req.marker.evaluate()
79
+ if missed:
80
+ missing_deps.add((name, req))
81
+ continue
82
+
83
+ # Check if there's a conflict
84
+ version = package_set[name].version
85
+ if not req.specifier.contains(version, prereleases=True):
86
+ conflicting_deps.add((name, version, req))
87
+
88
+ if missing_deps:
89
+ missing[package_name] = sorted(missing_deps, key=str)
90
+ if conflicting_deps:
91
+ conflicting[package_name] = sorted(conflicting_deps, key=str)
92
+
93
+ return missing, conflicting
94
+
95
+
96
+ def check_install_conflicts(to_install: List[InstallRequirement]) -> ConflictDetails:
97
+ """For checking if the dependency graph would be consistent after \
98
+ installing given requirements
99
+ """
100
+ # Start from the current state
101
+ package_set, _ = create_package_set_from_installed()
102
+ # Install packages
103
+ would_be_installed = _simulate_installation_of(to_install, package_set)
104
+
105
+ # Only warn about directly-dependent packages; create a whitelist of them
106
+ whitelist = _create_whitelist(would_be_installed, package_set)
107
+
108
+ return (
109
+ package_set,
110
+ check_package_set(
111
+ package_set, should_ignore=lambda name: name not in whitelist
112
+ ),
113
+ )
114
+
115
+
116
+ def _simulate_installation_of(
117
+ to_install: List[InstallRequirement], package_set: PackageSet
118
+ ) -> Set[NormalizedName]:
119
+ """Computes the version of packages after installing to_install."""
120
+ # Keep track of packages that were installed
121
+ installed = set()
122
+
123
+ # Modify it as installing requirement_set would (assuming no errors)
124
+ for inst_req in to_install:
125
+ abstract_dist = make_distribution_for_install_requirement(inst_req)
126
+ dist = abstract_dist.get_metadata_distribution()
127
+ name = dist.canonical_name
128
+ package_set[name] = PackageDetails(dist.version, list(dist.iter_dependencies()))
129
+
130
+ installed.add(name)
131
+
132
+ return installed
133
+
134
+
135
+ def _create_whitelist(
136
+ would_be_installed: Set[NormalizedName], package_set: PackageSet
137
+ ) -> Set[NormalizedName]:
138
+ packages_affected = set(would_be_installed)
139
+
140
+ for package_name in package_set:
141
+ if package_name in packages_affected:
142
+ continue
143
+
144
+ for req in package_set[package_name].dependencies:
145
+ if canonicalize_name(req.name) in packages_affected:
146
+ packages_affected.add(package_name)
147
+ break
148
+
149
+ return packages_affected
venv/lib/python3.10/site-packages/pip/_internal/operations/freeze.py ADDED
@@ -0,0 +1,254 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ import os
4
+ from typing import Container, Dict, Iterable, Iterator, List, NamedTuple, Optional, Set
5
+
6
+ from pip._vendor.packaging.utils import canonicalize_name
7
+ from pip._vendor.packaging.version import Version
8
+
9
+ from pip._internal.exceptions import BadCommand, InstallationError
10
+ from pip._internal.metadata import BaseDistribution, get_environment
11
+ from pip._internal.req.constructors import (
12
+ install_req_from_editable,
13
+ install_req_from_line,
14
+ )
15
+ from pip._internal.req.req_file import COMMENT_RE
16
+ from pip._internal.utils.direct_url_helpers import direct_url_as_pep440_direct_reference
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class _EditableInfo(NamedTuple):
22
+ requirement: str
23
+ comments: List[str]
24
+
25
+
26
+ def freeze(
27
+ requirement: Optional[List[str]] = None,
28
+ local_only: bool = False,
29
+ user_only: bool = False,
30
+ paths: Optional[List[str]] = None,
31
+ isolated: bool = False,
32
+ exclude_editable: bool = False,
33
+ skip: Container[str] = (),
34
+ ) -> Iterator[str]:
35
+ installations: Dict[str, FrozenRequirement] = {}
36
+
37
+ dists = get_environment(paths).iter_installed_distributions(
38
+ local_only=local_only,
39
+ skip=(),
40
+ user_only=user_only,
41
+ )
42
+ for dist in dists:
43
+ req = FrozenRequirement.from_dist(dist)
44
+ if exclude_editable and req.editable:
45
+ continue
46
+ installations[req.canonical_name] = req
47
+
48
+ if requirement:
49
+ # the options that don't get turned into an InstallRequirement
50
+ # should only be emitted once, even if the same option is in multiple
51
+ # requirements files, so we need to keep track of what has been emitted
52
+ # so that we don't emit it again if it's seen again
53
+ emitted_options: Set[str] = set()
54
+ # keep track of which files a requirement is in so that we can
55
+ # give an accurate warning if a requirement appears multiple times.
56
+ req_files: Dict[str, List[str]] = collections.defaultdict(list)
57
+ for req_file_path in requirement:
58
+ with open(req_file_path) as req_file:
59
+ for line in req_file:
60
+ if (
61
+ not line.strip()
62
+ or line.strip().startswith("#")
63
+ or line.startswith(
64
+ (
65
+ "-r",
66
+ "--requirement",
67
+ "-f",
68
+ "--find-links",
69
+ "-i",
70
+ "--index-url",
71
+ "--pre",
72
+ "--trusted-host",
73
+ "--process-dependency-links",
74
+ "--extra-index-url",
75
+ "--use-feature",
76
+ )
77
+ )
78
+ ):
79
+ line = line.rstrip()
80
+ if line not in emitted_options:
81
+ emitted_options.add(line)
82
+ yield line
83
+ continue
84
+
85
+ if line.startswith("-e") or line.startswith("--editable"):
86
+ if line.startswith("-e"):
87
+ line = line[2:].strip()
88
+ else:
89
+ line = line[len("--editable") :].strip().lstrip("=")
90
+ line_req = install_req_from_editable(
91
+ line,
92
+ isolated=isolated,
93
+ )
94
+ else:
95
+ line_req = install_req_from_line(
96
+ COMMENT_RE.sub("", line).strip(),
97
+ isolated=isolated,
98
+ )
99
+
100
+ if not line_req.name:
101
+ logger.info(
102
+ "Skipping line in requirement file [%s] because "
103
+ "it's not clear what it would install: %s",
104
+ req_file_path,
105
+ line.strip(),
106
+ )
107
+ logger.info(
108
+ " (add #egg=PackageName to the URL to avoid"
109
+ " this warning)"
110
+ )
111
+ else:
112
+ line_req_canonical_name = canonicalize_name(line_req.name)
113
+ if line_req_canonical_name not in installations:
114
+ # either it's not installed, or it is installed
115
+ # but has been processed already
116
+ if not req_files[line_req.name]:
117
+ logger.warning(
118
+ "Requirement file [%s] contains %s, but "
119
+ "package %r is not installed",
120
+ req_file_path,
121
+ COMMENT_RE.sub("", line).strip(),
122
+ line_req.name,
123
+ )
124
+ else:
125
+ req_files[line_req.name].append(req_file_path)
126
+ else:
127
+ yield str(installations[line_req_canonical_name]).rstrip()
128
+ del installations[line_req_canonical_name]
129
+ req_files[line_req.name].append(req_file_path)
130
+
131
+ # Warn about requirements that were included multiple times (in a
132
+ # single requirements file or in different requirements files).
133
+ for name, files in req_files.items():
134
+ if len(files) > 1:
135
+ logger.warning(
136
+ "Requirement %s included multiple times [%s]",
137
+ name,
138
+ ", ".join(sorted(set(files))),
139
+ )
140
+
141
+ yield ("## The following requirements were added by pip freeze:")
142
+ for installation in sorted(installations.values(), key=lambda x: x.name.lower()):
143
+ if installation.canonical_name not in skip:
144
+ yield str(installation).rstrip()
145
+
146
+
147
+ def _format_as_name_version(dist: BaseDistribution) -> str:
148
+ if isinstance(dist.version, Version):
149
+ return f"{dist.raw_name}=={dist.version}"
150
+ return f"{dist.raw_name}==={dist.version}"
151
+
152
+
153
+ def _get_editable_info(dist: BaseDistribution) -> _EditableInfo:
154
+ """
155
+ Compute and return values (req, comments) for use in
156
+ FrozenRequirement.from_dist().
157
+ """
158
+ editable_project_location = dist.editable_project_location
159
+ assert editable_project_location
160
+ location = os.path.normcase(os.path.abspath(editable_project_location))
161
+
162
+ from pip._internal.vcs import RemoteNotFoundError, RemoteNotValidError, vcs
163
+
164
+ vcs_backend = vcs.get_backend_for_dir(location)
165
+
166
+ if vcs_backend is None:
167
+ display = _format_as_name_version(dist)
168
+ logger.debug(
169
+ 'No VCS found for editable requirement "%s" in: %r',
170
+ display,
171
+ location,
172
+ )
173
+ return _EditableInfo(
174
+ requirement=location,
175
+ comments=[f"# Editable install with no version control ({display})"],
176
+ )
177
+
178
+ vcs_name = type(vcs_backend).__name__
179
+
180
+ try:
181
+ req = vcs_backend.get_src_requirement(location, dist.raw_name)
182
+ except RemoteNotFoundError:
183
+ display = _format_as_name_version(dist)
184
+ return _EditableInfo(
185
+ requirement=location,
186
+ comments=[f"# Editable {vcs_name} install with no remote ({display})"],
187
+ )
188
+ except RemoteNotValidError as ex:
189
+ display = _format_as_name_version(dist)
190
+ return _EditableInfo(
191
+ requirement=location,
192
+ comments=[
193
+ f"# Editable {vcs_name} install ({display}) with either a deleted "
194
+ f"local remote or invalid URI:",
195
+ f"# '{ex.url}'",
196
+ ],
197
+ )
198
+ except BadCommand:
199
+ logger.warning(
200
+ "cannot determine version of editable source in %s "
201
+ "(%s command not found in path)",
202
+ location,
203
+ vcs_backend.name,
204
+ )
205
+ return _EditableInfo(requirement=location, comments=[])
206
+ except InstallationError as exc:
207
+ logger.warning("Error when trying to get requirement for VCS system %s", exc)
208
+ else:
209
+ return _EditableInfo(requirement=req, comments=[])
210
+
211
+ logger.warning("Could not determine repository location of %s", location)
212
+
213
+ return _EditableInfo(
214
+ requirement=location,
215
+ comments=["## !! Could not determine repository location"],
216
+ )
217
+
218
+
219
+ class FrozenRequirement:
220
+ def __init__(
221
+ self,
222
+ name: str,
223
+ req: str,
224
+ editable: bool,
225
+ comments: Iterable[str] = (),
226
+ ) -> None:
227
+ self.name = name
228
+ self.canonical_name = canonicalize_name(name)
229
+ self.req = req
230
+ self.editable = editable
231
+ self.comments = comments
232
+
233
+ @classmethod
234
+ def from_dist(cls, dist: BaseDistribution) -> "FrozenRequirement":
235
+ editable = dist.editable
236
+ if editable:
237
+ req, comments = _get_editable_info(dist)
238
+ else:
239
+ comments = []
240
+ direct_url = dist.direct_url
241
+ if direct_url:
242
+ # if PEP 610 metadata is present, use it
243
+ req = direct_url_as_pep440_direct_reference(direct_url, dist.raw_name)
244
+ else:
245
+ # name==version requirement
246
+ req = _format_as_name_version(dist)
247
+
248
+ return cls(dist.raw_name, req, editable, comments=comments)
249
+
250
+ def __str__(self) -> str:
251
+ req = self.req
252
+ if self.editable:
253
+ req = f"-e {req}"
254
+ return "\n".join(list(self.comments) + [str(req)]) + "\n"
venv/lib/python3.10/site-packages/pip/_internal/operations/prepare.py ADDED
@@ -0,0 +1,642 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Prepares a distribution for installation
2
+ """
3
+
4
+ # The following comment should be removed at some point in the future.
5
+ # mypy: strict-optional=False
6
+
7
+ import logging
8
+ import mimetypes
9
+ import os
10
+ import shutil
11
+ from typing import Dict, Iterable, List, Optional
12
+
13
+ from pip._vendor.packaging.utils import canonicalize_name
14
+
15
+ from pip._internal.distributions import make_distribution_for_install_requirement
16
+ from pip._internal.distributions.installed import InstalledDistribution
17
+ from pip._internal.exceptions import (
18
+ DirectoryUrlHashUnsupported,
19
+ HashMismatch,
20
+ HashUnpinned,
21
+ InstallationError,
22
+ NetworkConnectionError,
23
+ PreviousBuildDirError,
24
+ VcsHashUnsupported,
25
+ )
26
+ from pip._internal.index.package_finder import PackageFinder
27
+ from pip._internal.metadata import BaseDistribution
28
+ from pip._internal.models.link import Link
29
+ from pip._internal.models.wheel import Wheel
30
+ from pip._internal.network.download import BatchDownloader, Downloader
31
+ from pip._internal.network.lazy_wheel import (
32
+ HTTPRangeRequestUnsupported,
33
+ dist_from_wheel_url,
34
+ )
35
+ from pip._internal.network.session import PipSession
36
+ from pip._internal.req.req_install import InstallRequirement
37
+ from pip._internal.req.req_tracker import RequirementTracker
38
+ from pip._internal.utils.filesystem import copy2_fixed
39
+ from pip._internal.utils.hashes import Hashes, MissingHashes
40
+ from pip._internal.utils.logging import indent_log
41
+ from pip._internal.utils.misc import display_path, hide_url, is_installable_dir, rmtree
42
+ from pip._internal.utils.temp_dir import TempDirectory
43
+ from pip._internal.utils.unpacking import unpack_file
44
+ from pip._internal.vcs import vcs
45
+
46
+ logger = logging.getLogger(__name__)
47
+
48
+
49
+ def _get_prepared_distribution(
50
+ req: InstallRequirement,
51
+ req_tracker: RequirementTracker,
52
+ finder: PackageFinder,
53
+ build_isolation: bool,
54
+ ) -> BaseDistribution:
55
+ """Prepare a distribution for installation."""
56
+ abstract_dist = make_distribution_for_install_requirement(req)
57
+ with req_tracker.track(req):
58
+ abstract_dist.prepare_distribution_metadata(finder, build_isolation)
59
+ return abstract_dist.get_metadata_distribution()
60
+
61
+
62
+ def unpack_vcs_link(link: Link, location: str, verbosity: int) -> None:
63
+ vcs_backend = vcs.get_backend_for_scheme(link.scheme)
64
+ assert vcs_backend is not None
65
+ vcs_backend.unpack(location, url=hide_url(link.url), verbosity=verbosity)
66
+
67
+
68
+ class File:
69
+ def __init__(self, path: str, content_type: Optional[str]) -> None:
70
+ self.path = path
71
+ if content_type is None:
72
+ self.content_type = mimetypes.guess_type(path)[0]
73
+ else:
74
+ self.content_type = content_type
75
+
76
+
77
+ def get_http_url(
78
+ link: Link,
79
+ download: Downloader,
80
+ download_dir: Optional[str] = None,
81
+ hashes: Optional[Hashes] = None,
82
+ ) -> File:
83
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True)
84
+ # If a download dir is specified, is the file already downloaded there?
85
+ already_downloaded_path = None
86
+ if download_dir:
87
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
88
+
89
+ if already_downloaded_path:
90
+ from_path = already_downloaded_path
91
+ content_type = None
92
+ else:
93
+ # let's download to a tmp dir
94
+ from_path, content_type = download(link, temp_dir.path)
95
+ if hashes:
96
+ hashes.check_against_path(from_path)
97
+
98
+ return File(from_path, content_type)
99
+
100
+
101
+ def _copy2_ignoring_special_files(src: str, dest: str) -> None:
102
+ """Copying special files is not supported, but as a convenience to users
103
+ we skip errors copying them. This supports tools that may create e.g.
104
+ socket files in the project source directory.
105
+ """
106
+ try:
107
+ copy2_fixed(src, dest)
108
+ except shutil.SpecialFileError as e:
109
+ # SpecialFileError may be raised due to either the source or
110
+ # destination. If the destination was the cause then we would actually
111
+ # care, but since the destination directory is deleted prior to
112
+ # copy we ignore all of them assuming it is caused by the source.
113
+ logger.warning(
114
+ "Ignoring special file error '%s' encountered copying %s to %s.",
115
+ str(e),
116
+ src,
117
+ dest,
118
+ )
119
+
120
+
121
+ def _copy_source_tree(source: str, target: str) -> None:
122
+ target_abspath = os.path.abspath(target)
123
+ target_basename = os.path.basename(target_abspath)
124
+ target_dirname = os.path.dirname(target_abspath)
125
+
126
+ def ignore(d: str, names: List[str]) -> List[str]:
127
+ skipped: List[str] = []
128
+ if d == source:
129
+ # Pulling in those directories can potentially be very slow,
130
+ # exclude the following directories if they appear in the top
131
+ # level dir (and only it).
132
+ # See discussion at https://github.com/pypa/pip/pull/6770
133
+ skipped += [".tox", ".nox"]
134
+ if os.path.abspath(d) == target_dirname:
135
+ # Prevent an infinite recursion if the target is in source.
136
+ # This can happen when TMPDIR is set to ${PWD}/...
137
+ # and we copy PWD to TMPDIR.
138
+ skipped += [target_basename]
139
+ return skipped
140
+
141
+ shutil.copytree(
142
+ source,
143
+ target,
144
+ ignore=ignore,
145
+ symlinks=True,
146
+ copy_function=_copy2_ignoring_special_files,
147
+ )
148
+
149
+
150
+ def get_file_url(
151
+ link: Link, download_dir: Optional[str] = None, hashes: Optional[Hashes] = None
152
+ ) -> File:
153
+ """Get file and optionally check its hash."""
154
+ # If a download dir is specified, is the file already there and valid?
155
+ already_downloaded_path = None
156
+ if download_dir:
157
+ already_downloaded_path = _check_download_dir(link, download_dir, hashes)
158
+
159
+ if already_downloaded_path:
160
+ from_path = already_downloaded_path
161
+ else:
162
+ from_path = link.file_path
163
+
164
+ # If --require-hashes is off, `hashes` is either empty, the
165
+ # link's embedded hash, or MissingHashes; it is required to
166
+ # match. If --require-hashes is on, we are satisfied by any
167
+ # hash in `hashes` matching: a URL-based or an option-based
168
+ # one; no internet-sourced hash will be in `hashes`.
169
+ if hashes:
170
+ hashes.check_against_path(from_path)
171
+ return File(from_path, None)
172
+
173
+
174
+ def unpack_url(
175
+ link: Link,
176
+ location: str,
177
+ download: Downloader,
178
+ verbosity: int,
179
+ download_dir: Optional[str] = None,
180
+ hashes: Optional[Hashes] = None,
181
+ ) -> Optional[File]:
182
+ """Unpack link into location, downloading if required.
183
+
184
+ :param hashes: A Hashes object, one of whose embedded hashes must match,
185
+ or HashMismatch will be raised. If the Hashes is empty, no matches are
186
+ required, and unhashable types of requirements (like VCS ones, which
187
+ would ordinarily raise HashUnsupported) are allowed.
188
+ """
189
+ # non-editable vcs urls
190
+ if link.is_vcs:
191
+ unpack_vcs_link(link, location, verbosity=verbosity)
192
+ return None
193
+
194
+ # Once out-of-tree-builds are no longer supported, could potentially
195
+ # replace the below condition with `assert not link.is_existing_dir`
196
+ # - unpack_url does not need to be called for in-tree-builds.
197
+ #
198
+ # As further cleanup, _copy_source_tree and accompanying tests can
199
+ # be removed.
200
+ #
201
+ # TODO when use-deprecated=out-of-tree-build is removed
202
+ if link.is_existing_dir():
203
+ if os.path.isdir(location):
204
+ rmtree(location)
205
+ _copy_source_tree(link.file_path, location)
206
+ return None
207
+
208
+ # file urls
209
+ if link.is_file:
210
+ file = get_file_url(link, download_dir, hashes=hashes)
211
+
212
+ # http urls
213
+ else:
214
+ file = get_http_url(
215
+ link,
216
+ download,
217
+ download_dir,
218
+ hashes=hashes,
219
+ )
220
+
221
+ # unpack the archive to the build dir location. even when only downloading
222
+ # archives, they have to be unpacked to parse dependencies, except wheels
223
+ if not link.is_wheel:
224
+ unpack_file(file.path, location, file.content_type)
225
+
226
+ return file
227
+
228
+
229
+ def _check_download_dir(
230
+ link: Link, download_dir: str, hashes: Optional[Hashes]
231
+ ) -> Optional[str]:
232
+ """Check download_dir for previously downloaded file with correct hash
233
+ If a correct file is found return its path else None
234
+ """
235
+ download_path = os.path.join(download_dir, link.filename)
236
+
237
+ if not os.path.exists(download_path):
238
+ return None
239
+
240
+ # If already downloaded, does its hash match?
241
+ logger.info("File was already downloaded %s", download_path)
242
+ if hashes:
243
+ try:
244
+ hashes.check_against_path(download_path)
245
+ except HashMismatch:
246
+ logger.warning(
247
+ "Previously-downloaded file %s has bad hash. Re-downloading.",
248
+ download_path,
249
+ )
250
+ os.unlink(download_path)
251
+ return None
252
+ return download_path
253
+
254
+
255
+ class RequirementPreparer:
256
+ """Prepares a Requirement"""
257
+
258
+ def __init__(
259
+ self,
260
+ build_dir: str,
261
+ download_dir: Optional[str],
262
+ src_dir: str,
263
+ build_isolation: bool,
264
+ req_tracker: RequirementTracker,
265
+ session: PipSession,
266
+ progress_bar: str,
267
+ finder: PackageFinder,
268
+ require_hashes: bool,
269
+ use_user_site: bool,
270
+ lazy_wheel: bool,
271
+ verbosity: int,
272
+ in_tree_build: bool,
273
+ ) -> None:
274
+ super().__init__()
275
+
276
+ self.src_dir = src_dir
277
+ self.build_dir = build_dir
278
+ self.req_tracker = req_tracker
279
+ self._session = session
280
+ self._download = Downloader(session, progress_bar)
281
+ self._batch_download = BatchDownloader(session, progress_bar)
282
+ self.finder = finder
283
+
284
+ # Where still-packed archives should be written to. If None, they are
285
+ # not saved, and are deleted immediately after unpacking.
286
+ self.download_dir = download_dir
287
+
288
+ # Is build isolation allowed?
289
+ self.build_isolation = build_isolation
290
+
291
+ # Should hash-checking be required?
292
+ self.require_hashes = require_hashes
293
+
294
+ # Should install in user site-packages?
295
+ self.use_user_site = use_user_site
296
+
297
+ # Should wheels be downloaded lazily?
298
+ self.use_lazy_wheel = lazy_wheel
299
+
300
+ # How verbose should underlying tooling be?
301
+ self.verbosity = verbosity
302
+
303
+ # Should in-tree builds be used for local paths?
304
+ self.in_tree_build = in_tree_build
305
+
306
+ # Memoized downloaded files, as mapping of url: path.
307
+ self._downloaded: Dict[str, str] = {}
308
+
309
+ # Previous "header" printed for a link-based InstallRequirement
310
+ self._previous_requirement_header = ("", "")
311
+
312
+ def _log_preparing_link(self, req: InstallRequirement) -> None:
313
+ """Provide context for the requirement being prepared."""
314
+ if req.link.is_file and not req.original_link_is_in_wheel_cache:
315
+ message = "Processing %s"
316
+ information = str(display_path(req.link.file_path))
317
+ else:
318
+ message = "Collecting %s"
319
+ information = str(req.req or req)
320
+
321
+ if (message, information) != self._previous_requirement_header:
322
+ self._previous_requirement_header = (message, information)
323
+ logger.info(message, information)
324
+
325
+ if req.original_link_is_in_wheel_cache:
326
+ with indent_log():
327
+ logger.info("Using cached %s", req.link.filename)
328
+
329
+ def _ensure_link_req_src_dir(
330
+ self, req: InstallRequirement, parallel_builds: bool
331
+ ) -> None:
332
+ """Ensure source_dir of a linked InstallRequirement."""
333
+ # Since source_dir is only set for editable requirements.
334
+ if req.link.is_wheel:
335
+ # We don't need to unpack wheels, so no need for a source
336
+ # directory.
337
+ return
338
+ assert req.source_dir is None
339
+ if req.link.is_existing_dir() and self.in_tree_build:
340
+ # build local directories in-tree
341
+ req.source_dir = req.link.file_path
342
+ return
343
+
344
+ # We always delete unpacked sdists after pip runs.
345
+ req.ensure_has_source_dir(
346
+ self.build_dir,
347
+ autodelete=True,
348
+ parallel_builds=parallel_builds,
349
+ )
350
+
351
+ # If a checkout exists, it's unwise to keep going. version
352
+ # inconsistencies are logged later, but do not fail the
353
+ # installation.
354
+ # FIXME: this won't upgrade when there's an existing
355
+ # package unpacked in `req.source_dir`
356
+ # TODO: this check is now probably dead code
357
+ if is_installable_dir(req.source_dir):
358
+ raise PreviousBuildDirError(
359
+ "pip can't proceed with requirements '{}' due to a"
360
+ "pre-existing build directory ({}). This is likely "
361
+ "due to a previous installation that failed . pip is "
362
+ "being responsible and not assuming it can delete this. "
363
+ "Please delete it and try again.".format(req, req.source_dir)
364
+ )
365
+
366
+ def _get_linked_req_hashes(self, req: InstallRequirement) -> Hashes:
367
+ # By the time this is called, the requirement's link should have
368
+ # been checked so we can tell what kind of requirements req is
369
+ # and raise some more informative errors than otherwise.
370
+ # (For example, we can raise VcsHashUnsupported for a VCS URL
371
+ # rather than HashMissing.)
372
+ if not self.require_hashes:
373
+ return req.hashes(trust_internet=True)
374
+
375
+ # We could check these first 2 conditions inside unpack_url
376
+ # and save repetition of conditions, but then we would
377
+ # report less-useful error messages for unhashable
378
+ # requirements, complaining that there's no hash provided.
379
+ if req.link.is_vcs:
380
+ raise VcsHashUnsupported()
381
+ if req.link.is_existing_dir():
382
+ raise DirectoryUrlHashUnsupported()
383
+
384
+ # Unpinned packages are asking for trouble when a new version
385
+ # is uploaded. This isn't a security check, but it saves users
386
+ # a surprising hash mismatch in the future.
387
+ # file:/// URLs aren't pinnable, so don't complain about them
388
+ # not being pinned.
389
+ if req.original_link is None and not req.is_pinned:
390
+ raise HashUnpinned()
391
+
392
+ # If known-good hashes are missing for this requirement,
393
+ # shim it with a facade object that will provoke hash
394
+ # computation and then raise a HashMissing exception
395
+ # showing the user what the hash should be.
396
+ return req.hashes(trust_internet=False) or MissingHashes()
397
+
398
+ def _fetch_metadata_using_lazy_wheel(
399
+ self,
400
+ link: Link,
401
+ ) -> Optional[BaseDistribution]:
402
+ """Fetch metadata using lazy wheel, if possible."""
403
+ if not self.use_lazy_wheel:
404
+ return None
405
+ if self.require_hashes:
406
+ logger.debug("Lazy wheel is not used as hash checking is required")
407
+ return None
408
+ if link.is_file or not link.is_wheel:
409
+ logger.debug(
410
+ "Lazy wheel is not used as %r does not points to a remote wheel",
411
+ link,
412
+ )
413
+ return None
414
+
415
+ wheel = Wheel(link.filename)
416
+ name = canonicalize_name(wheel.name)
417
+ logger.info(
418
+ "Obtaining dependency information from %s %s",
419
+ name,
420
+ wheel.version,
421
+ )
422
+ url = link.url.split("#", 1)[0]
423
+ try:
424
+ return dist_from_wheel_url(name, url, self._session)
425
+ except HTTPRangeRequestUnsupported:
426
+ logger.debug("%s does not support range requests", url)
427
+ return None
428
+
429
+ def _complete_partial_requirements(
430
+ self,
431
+ partially_downloaded_reqs: Iterable[InstallRequirement],
432
+ parallel_builds: bool = False,
433
+ ) -> None:
434
+ """Download any requirements which were only fetched by metadata."""
435
+ # Download to a temporary directory. These will be copied over as
436
+ # needed for downstream 'download', 'wheel', and 'install' commands.
437
+ temp_dir = TempDirectory(kind="unpack", globally_managed=True).path
438
+
439
+ # Map each link to the requirement that owns it. This allows us to set
440
+ # `req.local_file_path` on the appropriate requirement after passing
441
+ # all the links at once into BatchDownloader.
442
+ links_to_fully_download: Dict[Link, InstallRequirement] = {}
443
+ for req in partially_downloaded_reqs:
444
+ assert req.link
445
+ links_to_fully_download[req.link] = req
446
+
447
+ batch_download = self._batch_download(
448
+ links_to_fully_download.keys(),
449
+ temp_dir,
450
+ )
451
+ for link, (filepath, _) in batch_download:
452
+ logger.debug("Downloading link %s to %s", link, filepath)
453
+ req = links_to_fully_download[link]
454
+ req.local_file_path = filepath
455
+
456
+ # This step is necessary to ensure all lazy wheels are processed
457
+ # successfully by the 'download', 'wheel', and 'install' commands.
458
+ for req in partially_downloaded_reqs:
459
+ self._prepare_linked_requirement(req, parallel_builds)
460
+
461
+ def prepare_linked_requirement(
462
+ self, req: InstallRequirement, parallel_builds: bool = False
463
+ ) -> BaseDistribution:
464
+ """Prepare a requirement to be obtained from req.link."""
465
+ assert req.link
466
+ link = req.link
467
+ self._log_preparing_link(req)
468
+ with indent_log():
469
+ # Check if the relevant file is already available
470
+ # in the download directory
471
+ file_path = None
472
+ if self.download_dir is not None and link.is_wheel:
473
+ hashes = self._get_linked_req_hashes(req)
474
+ file_path = _check_download_dir(req.link, self.download_dir, hashes)
475
+
476
+ if file_path is not None:
477
+ # The file is already available, so mark it as downloaded
478
+ self._downloaded[req.link.url] = file_path
479
+ else:
480
+ # The file is not available, attempt to fetch only metadata
481
+ wheel_dist = self._fetch_metadata_using_lazy_wheel(link)
482
+ if wheel_dist is not None:
483
+ req.needs_more_preparation = True
484
+ return wheel_dist
485
+
486
+ # None of the optimizations worked, fully prepare the requirement
487
+ return self._prepare_linked_requirement(req, parallel_builds)
488
+
489
+ def prepare_linked_requirements_more(
490
+ self, reqs: Iterable[InstallRequirement], parallel_builds: bool = False
491
+ ) -> None:
492
+ """Prepare linked requirements more, if needed."""
493
+ reqs = [req for req in reqs if req.needs_more_preparation]
494
+ for req in reqs:
495
+ # Determine if any of these requirements were already downloaded.
496
+ if self.download_dir is not None and req.link.is_wheel:
497
+ hashes = self._get_linked_req_hashes(req)
498
+ file_path = _check_download_dir(req.link, self.download_dir, hashes)
499
+ if file_path is not None:
500
+ self._downloaded[req.link.url] = file_path
501
+ req.needs_more_preparation = False
502
+
503
+ # Prepare requirements we found were already downloaded for some
504
+ # reason. The other downloads will be completed separately.
505
+ partially_downloaded_reqs: List[InstallRequirement] = []
506
+ for req in reqs:
507
+ if req.needs_more_preparation:
508
+ partially_downloaded_reqs.append(req)
509
+ else:
510
+ self._prepare_linked_requirement(req, parallel_builds)
511
+
512
+ # TODO: separate this part out from RequirementPreparer when the v1
513
+ # resolver can be removed!
514
+ self._complete_partial_requirements(
515
+ partially_downloaded_reqs,
516
+ parallel_builds=parallel_builds,
517
+ )
518
+
519
+ def _prepare_linked_requirement(
520
+ self, req: InstallRequirement, parallel_builds: bool
521
+ ) -> BaseDistribution:
522
+ assert req.link
523
+ link = req.link
524
+
525
+ self._ensure_link_req_src_dir(req, parallel_builds)
526
+ hashes = self._get_linked_req_hashes(req)
527
+
528
+ if link.is_existing_dir() and self.in_tree_build:
529
+ local_file = None
530
+ elif link.url not in self._downloaded:
531
+ try:
532
+ local_file = unpack_url(
533
+ link,
534
+ req.source_dir,
535
+ self._download,
536
+ self.verbosity,
537
+ self.download_dir,
538
+ hashes,
539
+ )
540
+ except NetworkConnectionError as exc:
541
+ raise InstallationError(
542
+ "Could not install requirement {} because of HTTP "
543
+ "error {} for URL {}".format(req, exc, link)
544
+ )
545
+ else:
546
+ file_path = self._downloaded[link.url]
547
+ if hashes:
548
+ hashes.check_against_path(file_path)
549
+ local_file = File(file_path, content_type=None)
550
+
551
+ # For use in later processing,
552
+ # preserve the file path on the requirement.
553
+ if local_file:
554
+ req.local_file_path = local_file.path
555
+
556
+ dist = _get_prepared_distribution(
557
+ req,
558
+ self.req_tracker,
559
+ self.finder,
560
+ self.build_isolation,
561
+ )
562
+ return dist
563
+
564
+ def save_linked_requirement(self, req: InstallRequirement) -> None:
565
+ assert self.download_dir is not None
566
+ assert req.link is not None
567
+ link = req.link
568
+ if link.is_vcs or (link.is_existing_dir() and req.editable):
569
+ # Make a .zip of the source_dir we already created.
570
+ req.archive(self.download_dir)
571
+ return
572
+
573
+ if link.is_existing_dir():
574
+ logger.debug(
575
+ "Not copying link to destination directory "
576
+ "since it is a directory: %s",
577
+ link,
578
+ )
579
+ return
580
+ if req.local_file_path is None:
581
+ # No distribution was downloaded for this requirement.
582
+ return
583
+
584
+ download_location = os.path.join(self.download_dir, link.filename)
585
+ if not os.path.exists(download_location):
586
+ shutil.copy(req.local_file_path, download_location)
587
+ download_path = display_path(download_location)
588
+ logger.info("Saved %s", download_path)
589
+
590
+ def prepare_editable_requirement(
591
+ self,
592
+ req: InstallRequirement,
593
+ ) -> BaseDistribution:
594
+ """Prepare an editable requirement."""
595
+ assert req.editable, "cannot prepare a non-editable req as editable"
596
+
597
+ logger.info("Obtaining %s", req)
598
+
599
+ with indent_log():
600
+ if self.require_hashes:
601
+ raise InstallationError(
602
+ "The editable requirement {} cannot be installed when "
603
+ "requiring hashes, because there is no single file to "
604
+ "hash.".format(req)
605
+ )
606
+ req.ensure_has_source_dir(self.src_dir)
607
+ req.update_editable()
608
+
609
+ dist = _get_prepared_distribution(
610
+ req,
611
+ self.req_tracker,
612
+ self.finder,
613
+ self.build_isolation,
614
+ )
615
+
616
+ req.check_if_exists(self.use_user_site)
617
+
618
+ return dist
619
+
620
+ def prepare_installed_requirement(
621
+ self,
622
+ req: InstallRequirement,
623
+ skip_reason: str,
624
+ ) -> BaseDistribution:
625
+ """Prepare an already-installed requirement."""
626
+ assert req.satisfied_by, "req should have been satisfied but isn't"
627
+ assert skip_reason is not None, (
628
+ "did not get skip reason skipped but req.satisfied_by "
629
+ "is set to {}".format(req.satisfied_by)
630
+ )
631
+ logger.info(
632
+ "Requirement %s: %s (%s)", skip_reason, req, req.satisfied_by.version
633
+ )
634
+ with indent_log():
635
+ if self.require_hashes:
636
+ logger.debug(
637
+ "Since it is already installed, we are trusting this "
638
+ "package without checking its hash. To ensure a "
639
+ "completely repeatable environment, install into an "
640
+ "empty virtualenv."
641
+ )
642
+ return InstalledDistribution(req).get_metadata_distribution()
venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (187 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc ADDED
Binary file (1.52 kB). View file
 
venv/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc ADDED
Binary file (1.61 kB). View file