applied-ai-018 commited on
Commit
49e4a4c
·
verified ·
1 Parent(s): 1021645

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. llmeval-env/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc +0 -0
  2. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__init__.py +4 -0
  3. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  4. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc +0 -0
  5. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc +0 -0
  6. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc +0 -0
  7. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc +0 -0
  8. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc +0 -0
  9. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc +0 -0
  10. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc +0 -0
  11. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc +0 -0
  12. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc +0 -0
  13. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc +0 -0
  14. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc +0 -0
  15. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py +171 -0
  16. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/base_command.py +220 -0
  17. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py +1018 -0
  18. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/command_context.py +27 -0
  19. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/main.py +70 -0
  20. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py +87 -0
  21. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/parser.py +292 -0
  22. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py +321 -0
  23. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/req_command.py +506 -0
  24. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/spinners.py +157 -0
  25. llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py +6 -0
  26. llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc +0 -0
  27. llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc +0 -0
  28. llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
  29. llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc +0 -0
  30. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__init__.py +94 -0
  31. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc +0 -0
  32. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc +0 -0
  33. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc +0 -0
  34. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc +0 -0
  35. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc +0 -0
  36. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc +0 -0
  37. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc +0 -0
  38. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/constructors.py +490 -0
  39. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_file.py +536 -0
  40. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_install.py +858 -0
  41. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_set.py +189 -0
  42. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py +124 -0
  43. llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py +633 -0
  44. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__init__.py +0 -0
  45. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc +0 -0
  46. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc +0 -0
  47. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc +0 -0
  48. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc +0 -0
  49. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc +0 -0
  50. llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc +0 -0
llmeval-env/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (753 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__init__.py ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ """Subpackage containing all of pip's command line interface related code
2
+ """
3
+
4
+ # This file intentionally does not import submodules
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (273 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc ADDED
Binary file (5.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/base_command.cpython-310.pyc ADDED
Binary file (6.25 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/cmdoptions.cpython-310.pyc ADDED
Binary file (22.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/command_context.cpython-310.pyc ADDED
Binary file (1.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc ADDED
Binary file (1.37 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main_parser.cpython-310.pyc ADDED
Binary file (2.16 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc ADDED
Binary file (9.94 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc ADDED
Binary file (9.23 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc ADDED
Binary file (4.95 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc ADDED
Binary file (352 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/autocompletion.py ADDED
@@ -0,0 +1,171 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Logic that powers autocompletion installed by ``pip completion``.
2
+ """
3
+
4
+ import optparse
5
+ import os
6
+ import sys
7
+ from itertools import chain
8
+ from typing import Any, Iterable, List, Optional
9
+
10
+ from pip._internal.cli.main_parser import create_main_parser
11
+ from pip._internal.commands import commands_dict, create_command
12
+ from pip._internal.metadata import get_default_environment
13
+
14
+
15
+ def autocomplete() -> None:
16
+ """Entry Point for completion of main and subcommand options."""
17
+ # Don't complete if user hasn't sourced bash_completion file.
18
+ if "PIP_AUTO_COMPLETE" not in os.environ:
19
+ return
20
+ cwords = os.environ["COMP_WORDS"].split()[1:]
21
+ cword = int(os.environ["COMP_CWORD"])
22
+ try:
23
+ current = cwords[cword - 1]
24
+ except IndexError:
25
+ current = ""
26
+
27
+ parser = create_main_parser()
28
+ subcommands = list(commands_dict)
29
+ options = []
30
+
31
+ # subcommand
32
+ subcommand_name: Optional[str] = None
33
+ for word in cwords:
34
+ if word in subcommands:
35
+ subcommand_name = word
36
+ break
37
+ # subcommand options
38
+ if subcommand_name is not None:
39
+ # special case: 'help' subcommand has no options
40
+ if subcommand_name == "help":
41
+ sys.exit(1)
42
+ # special case: list locally installed dists for show and uninstall
43
+ should_list_installed = not current.startswith("-") and subcommand_name in [
44
+ "show",
45
+ "uninstall",
46
+ ]
47
+ if should_list_installed:
48
+ env = get_default_environment()
49
+ lc = current.lower()
50
+ installed = [
51
+ dist.canonical_name
52
+ for dist in env.iter_installed_distributions(local_only=True)
53
+ if dist.canonical_name.startswith(lc)
54
+ and dist.canonical_name not in cwords[1:]
55
+ ]
56
+ # if there are no dists installed, fall back to option completion
57
+ if installed:
58
+ for dist in installed:
59
+ print(dist)
60
+ sys.exit(1)
61
+
62
+ should_list_installables = (
63
+ not current.startswith("-") and subcommand_name == "install"
64
+ )
65
+ if should_list_installables:
66
+ for path in auto_complete_paths(current, "path"):
67
+ print(path)
68
+ sys.exit(1)
69
+
70
+ subcommand = create_command(subcommand_name)
71
+
72
+ for opt in subcommand.parser.option_list_all:
73
+ if opt.help != optparse.SUPPRESS_HELP:
74
+ for opt_str in opt._long_opts + opt._short_opts:
75
+ options.append((opt_str, opt.nargs))
76
+
77
+ # filter out previously specified options from available options
78
+ prev_opts = [x.split("=")[0] for x in cwords[1 : cword - 1]]
79
+ options = [(x, v) for (x, v) in options if x not in prev_opts]
80
+ # filter options by current input
81
+ options = [(k, v) for k, v in options if k.startswith(current)]
82
+ # get completion type given cwords and available subcommand options
83
+ completion_type = get_path_completion_type(
84
+ cwords,
85
+ cword,
86
+ subcommand.parser.option_list_all,
87
+ )
88
+ # get completion files and directories if ``completion_type`` is
89
+ # ``<file>``, ``<dir>`` or ``<path>``
90
+ if completion_type:
91
+ paths = auto_complete_paths(current, completion_type)
92
+ options = [(path, 0) for path in paths]
93
+ for option in options:
94
+ opt_label = option[0]
95
+ # append '=' to options which require args
96
+ if option[1] and option[0][:2] == "--":
97
+ opt_label += "="
98
+ print(opt_label)
99
+ else:
100
+ # show main parser options only when necessary
101
+
102
+ opts = [i.option_list for i in parser.option_groups]
103
+ opts.append(parser.option_list)
104
+ flattened_opts = chain.from_iterable(opts)
105
+ if current.startswith("-"):
106
+ for opt in flattened_opts:
107
+ if opt.help != optparse.SUPPRESS_HELP:
108
+ subcommands += opt._long_opts + opt._short_opts
109
+ else:
110
+ # get completion type given cwords and all available options
111
+ completion_type = get_path_completion_type(cwords, cword, flattened_opts)
112
+ if completion_type:
113
+ subcommands = list(auto_complete_paths(current, completion_type))
114
+
115
+ print(" ".join([x for x in subcommands if x.startswith(current)]))
116
+ sys.exit(1)
117
+
118
+
119
+ def get_path_completion_type(
120
+ cwords: List[str], cword: int, opts: Iterable[Any]
121
+ ) -> Optional[str]:
122
+ """Get the type of path completion (``file``, ``dir``, ``path`` or None)
123
+
124
+ :param cwords: same as the environmental variable ``COMP_WORDS``
125
+ :param cword: same as the environmental variable ``COMP_CWORD``
126
+ :param opts: The available options to check
127
+ :return: path completion type (``file``, ``dir``, ``path`` or None)
128
+ """
129
+ if cword < 2 or not cwords[cword - 2].startswith("-"):
130
+ return None
131
+ for opt in opts:
132
+ if opt.help == optparse.SUPPRESS_HELP:
133
+ continue
134
+ for o in str(opt).split("/"):
135
+ if cwords[cword - 2].split("=")[0] == o:
136
+ if not opt.metavar or any(
137
+ x in ("path", "file", "dir") for x in opt.metavar.split("/")
138
+ ):
139
+ return opt.metavar
140
+ return None
141
+
142
+
143
+ def auto_complete_paths(current: str, completion_type: str) -> Iterable[str]:
144
+ """If ``completion_type`` is ``file`` or ``path``, list all regular files
145
+ and directories starting with ``current``; otherwise only list directories
146
+ starting with ``current``.
147
+
148
+ :param current: The word to be completed
149
+ :param completion_type: path completion type(``file``, ``path`` or ``dir``)
150
+ :return: A generator of regular files and/or directories
151
+ """
152
+ directory, filename = os.path.split(current)
153
+ current_path = os.path.abspath(directory)
154
+ # Don't complete paths if they can't be accessed
155
+ if not os.access(current_path, os.R_OK):
156
+ return
157
+ filename = os.path.normcase(filename)
158
+ # list all files that start with ``filename``
159
+ file_list = (
160
+ x for x in os.listdir(current_path) if os.path.normcase(x).startswith(filename)
161
+ )
162
+ for f in file_list:
163
+ opt = os.path.join(current_path, f)
164
+ comp_file = os.path.normcase(os.path.join(directory, f))
165
+ # complete regular files when there is not ``<dir>`` after option
166
+ # complete directories when there is ``<file>``, ``<path>`` or
167
+ # ``<dir>``after option
168
+ if completion_type != "dir" and os.path.isfile(opt):
169
+ yield comp_file
170
+ elif os.path.isdir(opt):
171
+ yield os.path.join(comp_file, "")
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/base_command.py ADDED
@@ -0,0 +1,220 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base Command class, and related routines"""
2
+
3
+ import functools
4
+ import logging
5
+ import logging.config
6
+ import optparse
7
+ import os
8
+ import sys
9
+ import traceback
10
+ from optparse import Values
11
+ from typing import Any, Callable, List, Optional, Tuple
12
+
13
+ from pip._internal.cli import cmdoptions
14
+ from pip._internal.cli.command_context import CommandContextMixIn
15
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
16
+ from pip._internal.cli.status_codes import (
17
+ ERROR,
18
+ PREVIOUS_BUILD_DIR_ERROR,
19
+ UNKNOWN_ERROR,
20
+ VIRTUALENV_NOT_FOUND,
21
+ )
22
+ from pip._internal.exceptions import (
23
+ BadCommand,
24
+ CommandError,
25
+ DiagnosticPipError,
26
+ InstallationError,
27
+ NetworkConnectionError,
28
+ PreviousBuildDirError,
29
+ UninstallationError,
30
+ )
31
+ from pip._internal.utils.filesystem import check_path_owner
32
+ from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
33
+ from pip._internal.utils.misc import get_prog, normalize_path
34
+ from pip._internal.utils.temp_dir import TempDirectoryTypeRegistry as TempDirRegistry
35
+ from pip._internal.utils.temp_dir import global_tempdir_manager, tempdir_registry
36
+ from pip._internal.utils.virtualenv import running_under_virtualenv
37
+
38
+ __all__ = ["Command"]
39
+
40
+ logger = logging.getLogger(__name__)
41
+
42
+
43
+ class Command(CommandContextMixIn):
44
+ usage: str = ""
45
+ ignore_require_venv: bool = False
46
+
47
+ def __init__(self, name: str, summary: str, isolated: bool = False) -> None:
48
+ super().__init__()
49
+
50
+ self.name = name
51
+ self.summary = summary
52
+ self.parser = ConfigOptionParser(
53
+ usage=self.usage,
54
+ prog=f"{get_prog()} {name}",
55
+ formatter=UpdatingDefaultsHelpFormatter(),
56
+ add_help_option=False,
57
+ name=name,
58
+ description=self.__doc__,
59
+ isolated=isolated,
60
+ )
61
+
62
+ self.tempdir_registry: Optional[TempDirRegistry] = None
63
+
64
+ # Commands should add options to this option group
65
+ optgroup_name = f"{self.name.capitalize()} Options"
66
+ self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
67
+
68
+ # Add the general options
69
+ gen_opts = cmdoptions.make_option_group(
70
+ cmdoptions.general_group,
71
+ self.parser,
72
+ )
73
+ self.parser.add_option_group(gen_opts)
74
+
75
+ self.add_options()
76
+
77
+ def add_options(self) -> None:
78
+ pass
79
+
80
+ def handle_pip_version_check(self, options: Values) -> None:
81
+ """
82
+ This is a no-op so that commands by default do not do the pip version
83
+ check.
84
+ """
85
+ # Make sure we do the pip version check if the index_group options
86
+ # are present.
87
+ assert not hasattr(options, "no_index")
88
+
89
+ def run(self, options: Values, args: List[str]) -> int:
90
+ raise NotImplementedError
91
+
92
+ def parse_args(self, args: List[str]) -> Tuple[Values, List[str]]:
93
+ # factored out for testability
94
+ return self.parser.parse_args(args)
95
+
96
+ def main(self, args: List[str]) -> int:
97
+ try:
98
+ with self.main_context():
99
+ return self._main(args)
100
+ finally:
101
+ logging.shutdown()
102
+
103
+ def _main(self, args: List[str]) -> int:
104
+ # We must initialize this before the tempdir manager, otherwise the
105
+ # configuration would not be accessible by the time we clean up the
106
+ # tempdir manager.
107
+ self.tempdir_registry = self.enter_context(tempdir_registry())
108
+ # Intentionally set as early as possible so globally-managed temporary
109
+ # directories are available to the rest of the code.
110
+ self.enter_context(global_tempdir_manager())
111
+
112
+ options, args = self.parse_args(args)
113
+
114
+ # Set verbosity so that it can be used elsewhere.
115
+ self.verbosity = options.verbose - options.quiet
116
+
117
+ level_number = setup_logging(
118
+ verbosity=self.verbosity,
119
+ no_color=options.no_color,
120
+ user_log_file=options.log,
121
+ )
122
+
123
+ # TODO: Try to get these passing down from the command?
124
+ # without resorting to os.environ to hold these.
125
+ # This also affects isolated builds and it should.
126
+
127
+ if options.no_input:
128
+ os.environ["PIP_NO_INPUT"] = "1"
129
+
130
+ if options.exists_action:
131
+ os.environ["PIP_EXISTS_ACTION"] = " ".join(options.exists_action)
132
+
133
+ if options.require_venv and not self.ignore_require_venv:
134
+ # If a venv is required check if it can really be found
135
+ if not running_under_virtualenv():
136
+ logger.critical("Could not find an activated virtualenv (required).")
137
+ sys.exit(VIRTUALENV_NOT_FOUND)
138
+
139
+ if options.cache_dir:
140
+ options.cache_dir = normalize_path(options.cache_dir)
141
+ if not check_path_owner(options.cache_dir):
142
+ logger.warning(
143
+ "The directory '%s' or its parent directory is not owned "
144
+ "or is not writable by the current user. The cache "
145
+ "has been disabled. Check the permissions and owner of "
146
+ "that directory. If executing pip with sudo, you should "
147
+ "use sudo's -H flag.",
148
+ options.cache_dir,
149
+ )
150
+ options.cache_dir = None
151
+
152
+ if "2020-resolver" in options.features_enabled:
153
+ logger.warning(
154
+ "--use-feature=2020-resolver no longer has any effect, "
155
+ "since it is now the default dependency resolver in pip. "
156
+ "This will become an error in pip 21.0."
157
+ )
158
+
159
+ def intercepts_unhandled_exc(
160
+ run_func: Callable[..., int]
161
+ ) -> Callable[..., int]:
162
+ @functools.wraps(run_func)
163
+ def exc_logging_wrapper(*args: Any) -> int:
164
+ try:
165
+ status = run_func(*args)
166
+ assert isinstance(status, int)
167
+ return status
168
+ except DiagnosticPipError as exc:
169
+ logger.error("[present-diagnostic] %s", exc)
170
+ logger.debug("Exception information:", exc_info=True)
171
+
172
+ return ERROR
173
+ except PreviousBuildDirError as exc:
174
+ logger.critical(str(exc))
175
+ logger.debug("Exception information:", exc_info=True)
176
+
177
+ return PREVIOUS_BUILD_DIR_ERROR
178
+ except (
179
+ InstallationError,
180
+ UninstallationError,
181
+ BadCommand,
182
+ NetworkConnectionError,
183
+ ) as exc:
184
+ logger.critical(str(exc))
185
+ logger.debug("Exception information:", exc_info=True)
186
+
187
+ return ERROR
188
+ except CommandError as exc:
189
+ logger.critical("%s", exc)
190
+ logger.debug("Exception information:", exc_info=True)
191
+
192
+ return ERROR
193
+ except BrokenStdoutLoggingError:
194
+ # Bypass our logger and write any remaining messages to
195
+ # stderr because stdout no longer works.
196
+ print("ERROR: Pipe to stdout was broken", file=sys.stderr)
197
+ if level_number <= logging.DEBUG:
198
+ traceback.print_exc(file=sys.stderr)
199
+
200
+ return ERROR
201
+ except KeyboardInterrupt:
202
+ logger.critical("Operation cancelled by user")
203
+ logger.debug("Exception information:", exc_info=True)
204
+
205
+ return ERROR
206
+ except BaseException:
207
+ logger.critical("Exception:", exc_info=True)
208
+
209
+ return UNKNOWN_ERROR
210
+
211
+ return exc_logging_wrapper
212
+
213
+ try:
214
+ if not options.debug_mode:
215
+ run = intercepts_unhandled_exc(self.run)
216
+ else:
217
+ run = self.run
218
+ return run(options, args)
219
+ finally:
220
+ self.handle_pip_version_check(options)
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py ADDED
@@ -0,0 +1,1018 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ shared options and groups
3
+
4
+ The principle here is to define options once, but *not* instantiate them
5
+ globally. One reason being that options with action='append' can carry state
6
+ between parses. pip parses general options twice internally, and shouldn't
7
+ pass on state. To be consistent, all options will follow this design.
8
+ """
9
+
10
+ # The following comment should be removed at some point in the future.
11
+ # mypy: strict-optional=False
12
+
13
+ import logging
14
+ import os
15
+ import textwrap
16
+ from functools import partial
17
+ from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
18
+ from textwrap import dedent
19
+ from typing import Any, Callable, Dict, Optional, Tuple
20
+
21
+ from pip._vendor.packaging.utils import canonicalize_name
22
+
23
+ from pip._internal.cli.parser import ConfigOptionParser
24
+ from pip._internal.cli.progress_bars import BAR_TYPES
25
+ from pip._internal.exceptions import CommandError
26
+ from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
27
+ from pip._internal.models.format_control import FormatControl
28
+ from pip._internal.models.index import PyPI
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.utils.hashes import STRONG_HASHES
31
+ from pip._internal.utils.misc import strtobool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
37
+ """
38
+ Raise an option parsing error using parser.error().
39
+
40
+ Args:
41
+ parser: an OptionParser instance.
42
+ option: an Option instance.
43
+ msg: the error text.
44
+ """
45
+ msg = f"{option} error: {msg}"
46
+ msg = textwrap.fill(" ".join(msg.split()))
47
+ parser.error(msg)
48
+
49
+
50
+ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
51
+ """
52
+ Return an OptionGroup object
53
+ group -- assumed to be dict with 'name' and 'options' keys
54
+ parser -- an optparse Parser
55
+ """
56
+ option_group = OptionGroup(parser, group["name"])
57
+ for option in group["options"]:
58
+ option_group.add_option(option())
59
+ return option_group
60
+
61
+
62
+ def check_install_build_global(
63
+ options: Values, check_options: Optional[Values] = None
64
+ ) -> None:
65
+ """Disable wheels if per-setup.py call options are set.
66
+
67
+ :param options: The OptionParser options to update.
68
+ :param check_options: The options to check, if not supplied defaults to
69
+ options.
70
+ """
71
+ if check_options is None:
72
+ check_options = options
73
+
74
+ def getname(n: str) -> Optional[Any]:
75
+ return getattr(check_options, n, None)
76
+
77
+ names = ["build_options", "global_options", "install_options"]
78
+ if any(map(getname, names)):
79
+ control = options.format_control
80
+ control.disallow_binaries()
81
+ logger.warning(
82
+ "Disabling all use of wheels due to the use of --build-option "
83
+ "/ --global-option / --install-option.",
84
+ )
85
+
86
+
87
+ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
88
+ """Function for determining if custom platform options are allowed.
89
+
90
+ :param options: The OptionParser options.
91
+ :param check_target: Whether or not to check if --target is being used.
92
+ """
93
+ dist_restriction_set = any(
94
+ [
95
+ options.python_version,
96
+ options.platforms,
97
+ options.abis,
98
+ options.implementation,
99
+ ]
100
+ )
101
+
102
+ binary_only = FormatControl(set(), {":all:"})
103
+ sdist_dependencies_allowed = (
104
+ options.format_control != binary_only and not options.ignore_dependencies
105
+ )
106
+
107
+ # Installations or downloads using dist restrictions must not combine
108
+ # source distributions and dist-specific wheels, as they are not
109
+ # guaranteed to be locally compatible.
110
+ if dist_restriction_set and sdist_dependencies_allowed:
111
+ raise CommandError(
112
+ "When restricting platform and interpreter constraints using "
113
+ "--python-version, --platform, --abi, or --implementation, "
114
+ "either --no-deps must be set, or --only-binary=:all: must be "
115
+ "set and --no-binary must not be set (or must be set to "
116
+ ":none:)."
117
+ )
118
+
119
+ if check_target:
120
+ if dist_restriction_set and not options.target_dir:
121
+ raise CommandError(
122
+ "Can not use any platform or abi specific options unless "
123
+ "installing via '--target'"
124
+ )
125
+
126
+
127
+ def _path_option_check(option: Option, opt: str, value: str) -> str:
128
+ return os.path.expanduser(value)
129
+
130
+
131
+ def _package_name_option_check(option: Option, opt: str, value: str) -> str:
132
+ return canonicalize_name(value)
133
+
134
+
135
+ class PipOption(Option):
136
+ TYPES = Option.TYPES + ("path", "package_name")
137
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
138
+ TYPE_CHECKER["package_name"] = _package_name_option_check
139
+ TYPE_CHECKER["path"] = _path_option_check
140
+
141
+
142
+ ###########
143
+ # options #
144
+ ###########
145
+
146
+ help_: Callable[..., Option] = partial(
147
+ Option,
148
+ "-h",
149
+ "--help",
150
+ dest="help",
151
+ action="help",
152
+ help="Show help.",
153
+ )
154
+
155
+ debug_mode: Callable[..., Option] = partial(
156
+ Option,
157
+ "--debug",
158
+ dest="debug_mode",
159
+ action="store_true",
160
+ default=False,
161
+ help=(
162
+ "Let unhandled exceptions propagate outside the main subroutine, "
163
+ "instead of logging them to stderr."
164
+ ),
165
+ )
166
+
167
+ isolated_mode: Callable[..., Option] = partial(
168
+ Option,
169
+ "--isolated",
170
+ dest="isolated_mode",
171
+ action="store_true",
172
+ default=False,
173
+ help=(
174
+ "Run pip in an isolated mode, ignoring environment variables and user "
175
+ "configuration."
176
+ ),
177
+ )
178
+
179
+ require_virtualenv: Callable[..., Option] = partial(
180
+ Option,
181
+ "--require-virtualenv",
182
+ "--require-venv",
183
+ dest="require_venv",
184
+ action="store_true",
185
+ default=False,
186
+ help=(
187
+ "Allow pip to only run in a virtual environment; "
188
+ "exit with an error otherwise."
189
+ ),
190
+ )
191
+
192
+ verbose: Callable[..., Option] = partial(
193
+ Option,
194
+ "-v",
195
+ "--verbose",
196
+ dest="verbose",
197
+ action="count",
198
+ default=0,
199
+ help="Give more output. Option is additive, and can be used up to 3 times.",
200
+ )
201
+
202
+ no_color: Callable[..., Option] = partial(
203
+ Option,
204
+ "--no-color",
205
+ dest="no_color",
206
+ action="store_true",
207
+ default=False,
208
+ help="Suppress colored output.",
209
+ )
210
+
211
+ version: Callable[..., Option] = partial(
212
+ Option,
213
+ "-V",
214
+ "--version",
215
+ dest="version",
216
+ action="store_true",
217
+ help="Show version and exit.",
218
+ )
219
+
220
+ quiet: Callable[..., Option] = partial(
221
+ Option,
222
+ "-q",
223
+ "--quiet",
224
+ dest="quiet",
225
+ action="count",
226
+ default=0,
227
+ help=(
228
+ "Give less output. Option is additive, and can be used up to 3"
229
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
230
+ " levels)."
231
+ ),
232
+ )
233
+
234
+ progress_bar: Callable[..., Option] = partial(
235
+ Option,
236
+ "--progress-bar",
237
+ dest="progress_bar",
238
+ type="choice",
239
+ choices=list(BAR_TYPES.keys()),
240
+ default="on",
241
+ help=(
242
+ "Specify type of progress to be displayed ["
243
+ + "|".join(BAR_TYPES.keys())
244
+ + "] (default: %default)"
245
+ ),
246
+ )
247
+
248
+ log: Callable[..., Option] = partial(
249
+ PipOption,
250
+ "--log",
251
+ "--log-file",
252
+ "--local-log",
253
+ dest="log",
254
+ metavar="path",
255
+ type="path",
256
+ help="Path to a verbose appending log.",
257
+ )
258
+
259
+ no_input: Callable[..., Option] = partial(
260
+ Option,
261
+ # Don't ask for input
262
+ "--no-input",
263
+ dest="no_input",
264
+ action="store_true",
265
+ default=False,
266
+ help="Disable prompting for input.",
267
+ )
268
+
269
+ proxy: Callable[..., Option] = partial(
270
+ Option,
271
+ "--proxy",
272
+ dest="proxy",
273
+ type="str",
274
+ default="",
275
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
276
+ )
277
+
278
+ retries: Callable[..., Option] = partial(
279
+ Option,
280
+ "--retries",
281
+ dest="retries",
282
+ type="int",
283
+ default=5,
284
+ help="Maximum number of retries each connection should attempt "
285
+ "(default %default times).",
286
+ )
287
+
288
+ timeout: Callable[..., Option] = partial(
289
+ Option,
290
+ "--timeout",
291
+ "--default-timeout",
292
+ metavar="sec",
293
+ dest="timeout",
294
+ type="float",
295
+ default=15,
296
+ help="Set the socket timeout (default %default seconds).",
297
+ )
298
+
299
+
300
+ def exists_action() -> Option:
301
+ return Option(
302
+ # Option when path already exist
303
+ "--exists-action",
304
+ dest="exists_action",
305
+ type="choice",
306
+ choices=["s", "i", "w", "b", "a"],
307
+ default=[],
308
+ action="append",
309
+ metavar="action",
310
+ help="Default action when a path already exists: "
311
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
312
+ )
313
+
314
+
315
+ cert: Callable[..., Option] = partial(
316
+ PipOption,
317
+ "--cert",
318
+ dest="cert",
319
+ type="path",
320
+ metavar="path",
321
+ help=(
322
+ "Path to PEM-encoded CA certificate bundle. "
323
+ "If provided, overrides the default. "
324
+ "See 'SSL Certificate Verification' in pip documentation "
325
+ "for more information."
326
+ ),
327
+ )
328
+
329
+ client_cert: Callable[..., Option] = partial(
330
+ PipOption,
331
+ "--client-cert",
332
+ dest="client_cert",
333
+ type="path",
334
+ default=None,
335
+ metavar="path",
336
+ help="Path to SSL client certificate, a single file containing the "
337
+ "private key and the certificate in PEM format.",
338
+ )
339
+
340
+ index_url: Callable[..., Option] = partial(
341
+ Option,
342
+ "-i",
343
+ "--index-url",
344
+ "--pypi-url",
345
+ dest="index_url",
346
+ metavar="URL",
347
+ default=PyPI.simple_url,
348
+ help="Base URL of the Python Package Index (default %default). "
349
+ "This should point to a repository compliant with PEP 503 "
350
+ "(the simple repository API) or a local directory laid out "
351
+ "in the same format.",
352
+ )
353
+
354
+
355
+ def extra_index_url() -> Option:
356
+ return Option(
357
+ "--extra-index-url",
358
+ dest="extra_index_urls",
359
+ metavar="URL",
360
+ action="append",
361
+ default=[],
362
+ help="Extra URLs of package indexes to use in addition to "
363
+ "--index-url. Should follow the same rules as "
364
+ "--index-url.",
365
+ )
366
+
367
+
368
+ no_index: Callable[..., Option] = partial(
369
+ Option,
370
+ "--no-index",
371
+ dest="no_index",
372
+ action="store_true",
373
+ default=False,
374
+ help="Ignore package index (only looking at --find-links URLs instead).",
375
+ )
376
+
377
+
378
+ def find_links() -> Option:
379
+ return Option(
380
+ "-f",
381
+ "--find-links",
382
+ dest="find_links",
383
+ action="append",
384
+ default=[],
385
+ metavar="url",
386
+ help="If a URL or path to an html file, then parse for links to "
387
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
388
+ "If a local path or file:// URL that's a directory, "
389
+ "then look for archives in the directory listing. "
390
+ "Links to VCS project URLs are not supported.",
391
+ )
392
+
393
+
394
+ def trusted_host() -> Option:
395
+ return Option(
396
+ "--trusted-host",
397
+ dest="trusted_hosts",
398
+ action="append",
399
+ metavar="HOSTNAME",
400
+ default=[],
401
+ help="Mark this host or host:port pair as trusted, even though it "
402
+ "does not have valid or any HTTPS.",
403
+ )
404
+
405
+
406
+ def constraints() -> Option:
407
+ return Option(
408
+ "-c",
409
+ "--constraint",
410
+ dest="constraints",
411
+ action="append",
412
+ default=[],
413
+ metavar="file",
414
+ help="Constrain versions using the given constraints file. "
415
+ "This option can be used multiple times.",
416
+ )
417
+
418
+
419
+ def requirements() -> Option:
420
+ return Option(
421
+ "-r",
422
+ "--requirement",
423
+ dest="requirements",
424
+ action="append",
425
+ default=[],
426
+ metavar="file",
427
+ help="Install from the given requirements file. "
428
+ "This option can be used multiple times.",
429
+ )
430
+
431
+
432
+ def editable() -> Option:
433
+ return Option(
434
+ "-e",
435
+ "--editable",
436
+ dest="editables",
437
+ action="append",
438
+ default=[],
439
+ metavar="path/url",
440
+ help=(
441
+ "Install a project in editable mode (i.e. setuptools "
442
+ '"develop mode") from a local project path or a VCS url.'
443
+ ),
444
+ )
445
+
446
+
447
+ def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
448
+ value = os.path.abspath(value)
449
+ setattr(parser.values, option.dest, value)
450
+
451
+
452
+ src: Callable[..., Option] = partial(
453
+ PipOption,
454
+ "--src",
455
+ "--source",
456
+ "--source-dir",
457
+ "--source-directory",
458
+ dest="src_dir",
459
+ type="path",
460
+ metavar="dir",
461
+ default=get_src_prefix(),
462
+ action="callback",
463
+ callback=_handle_src,
464
+ help="Directory to check out editable projects into. "
465
+ 'The default in a virtualenv is "<venv path>/src". '
466
+ 'The default for global installs is "<current dir>/src".',
467
+ )
468
+
469
+
470
+ def _get_format_control(values: Values, option: Option) -> Any:
471
+ """Get a format_control object."""
472
+ return getattr(values, option.dest)
473
+
474
+
475
+ def _handle_no_binary(
476
+ option: Option, opt_str: str, value: str, parser: OptionParser
477
+ ) -> None:
478
+ existing = _get_format_control(parser.values, option)
479
+ FormatControl.handle_mutual_excludes(
480
+ value,
481
+ existing.no_binary,
482
+ existing.only_binary,
483
+ )
484
+
485
+
486
+ def _handle_only_binary(
487
+ option: Option, opt_str: str, value: str, parser: OptionParser
488
+ ) -> None:
489
+ existing = _get_format_control(parser.values, option)
490
+ FormatControl.handle_mutual_excludes(
491
+ value,
492
+ existing.only_binary,
493
+ existing.no_binary,
494
+ )
495
+
496
+
497
+ def no_binary() -> Option:
498
+ format_control = FormatControl(set(), set())
499
+ return Option(
500
+ "--no-binary",
501
+ dest="format_control",
502
+ action="callback",
503
+ callback=_handle_no_binary,
504
+ type="str",
505
+ default=format_control,
506
+ help="Do not use binary packages. Can be supplied multiple times, and "
507
+ 'each time adds to the existing value. Accepts either ":all:" to '
508
+ 'disable all binary packages, ":none:" to empty the set (notice '
509
+ "the colons), or one or more package names with commas between "
510
+ "them (no colons). Note that some packages are tricky to compile "
511
+ "and may fail to install when this option is used on them.",
512
+ )
513
+
514
+
515
+ def only_binary() -> Option:
516
+ format_control = FormatControl(set(), set())
517
+ return Option(
518
+ "--only-binary",
519
+ dest="format_control",
520
+ action="callback",
521
+ callback=_handle_only_binary,
522
+ type="str",
523
+ default=format_control,
524
+ help="Do not use source packages. Can be supplied multiple times, and "
525
+ 'each time adds to the existing value. Accepts either ":all:" to '
526
+ 'disable all source packages, ":none:" to empty the set, or one '
527
+ "or more package names with commas between them. Packages "
528
+ "without binary distributions will fail to install when this "
529
+ "option is used on them.",
530
+ )
531
+
532
+
533
+ platforms: Callable[..., Option] = partial(
534
+ Option,
535
+ "--platform",
536
+ dest="platforms",
537
+ metavar="platform",
538
+ action="append",
539
+ default=None,
540
+ help=(
541
+ "Only use wheels compatible with <platform>. Defaults to the "
542
+ "platform of the running system. Use this option multiple times to "
543
+ "specify multiple platforms supported by the target interpreter."
544
+ ),
545
+ )
546
+
547
+
548
+ # This was made a separate function for unit-testing purposes.
549
+ def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
550
+ """
551
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
552
+
553
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
554
+ non-None if and only if there was a parsing error.
555
+ """
556
+ if not value:
557
+ # The empty string is the same as not providing a value.
558
+ return (None, None)
559
+
560
+ parts = value.split(".")
561
+ if len(parts) > 3:
562
+ return ((), "at most three version parts are allowed")
563
+
564
+ if len(parts) == 1:
565
+ # Then we are in the case of "3" or "37".
566
+ value = parts[0]
567
+ if len(value) > 1:
568
+ parts = [value[0], value[1:]]
569
+
570
+ try:
571
+ version_info = tuple(int(part) for part in parts)
572
+ except ValueError:
573
+ return ((), "each version part must be an integer")
574
+
575
+ return (version_info, None)
576
+
577
+
578
+ def _handle_python_version(
579
+ option: Option, opt_str: str, value: str, parser: OptionParser
580
+ ) -> None:
581
+ """
582
+ Handle a provided --python-version value.
583
+ """
584
+ version_info, error_msg = _convert_python_version(value)
585
+ if error_msg is not None:
586
+ msg = "invalid --python-version value: {!r}: {}".format(
587
+ value,
588
+ error_msg,
589
+ )
590
+ raise_option_error(parser, option=option, msg=msg)
591
+
592
+ parser.values.python_version = version_info
593
+
594
+
595
+ python_version: Callable[..., Option] = partial(
596
+ Option,
597
+ "--python-version",
598
+ dest="python_version",
599
+ metavar="python_version",
600
+ action="callback",
601
+ callback=_handle_python_version,
602
+ type="str",
603
+ default=None,
604
+ help=dedent(
605
+ """\
606
+ The Python interpreter version to use for wheel and "Requires-Python"
607
+ compatibility checks. Defaults to a version derived from the running
608
+ interpreter. The version can be specified using up to three dot-separated
609
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
610
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
611
+ """
612
+ ),
613
+ )
614
+
615
+
616
+ implementation: Callable[..., Option] = partial(
617
+ Option,
618
+ "--implementation",
619
+ dest="implementation",
620
+ metavar="implementation",
621
+ default=None,
622
+ help=(
623
+ "Only use wheels compatible with Python "
624
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
625
+ " or 'ip'. If not specified, then the current "
626
+ "interpreter implementation is used. Use 'py' to force "
627
+ "implementation-agnostic wheels."
628
+ ),
629
+ )
630
+
631
+
632
+ abis: Callable[..., Option] = partial(
633
+ Option,
634
+ "--abi",
635
+ dest="abis",
636
+ metavar="abi",
637
+ action="append",
638
+ default=None,
639
+ help=(
640
+ "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
641
+ "If not specified, then the current interpreter abi tag is used. "
642
+ "Use this option multiple times to specify multiple abis supported "
643
+ "by the target interpreter. Generally you will need to specify "
644
+ "--implementation, --platform, and --python-version when using this "
645
+ "option."
646
+ ),
647
+ )
648
+
649
+
650
+ def add_target_python_options(cmd_opts: OptionGroup) -> None:
651
+ cmd_opts.add_option(platforms())
652
+ cmd_opts.add_option(python_version())
653
+ cmd_opts.add_option(implementation())
654
+ cmd_opts.add_option(abis())
655
+
656
+
657
+ def make_target_python(options: Values) -> TargetPython:
658
+ target_python = TargetPython(
659
+ platforms=options.platforms,
660
+ py_version_info=options.python_version,
661
+ abis=options.abis,
662
+ implementation=options.implementation,
663
+ )
664
+
665
+ return target_python
666
+
667
+
668
+ def prefer_binary() -> Option:
669
+ return Option(
670
+ "--prefer-binary",
671
+ dest="prefer_binary",
672
+ action="store_true",
673
+ default=False,
674
+ help="Prefer older binary packages over newer source packages.",
675
+ )
676
+
677
+
678
+ cache_dir: Callable[..., Option] = partial(
679
+ PipOption,
680
+ "--cache-dir",
681
+ dest="cache_dir",
682
+ default=USER_CACHE_DIR,
683
+ metavar="dir",
684
+ type="path",
685
+ help="Store the cache data in <dir>.",
686
+ )
687
+
688
+
689
+ def _handle_no_cache_dir(
690
+ option: Option, opt: str, value: str, parser: OptionParser
691
+ ) -> None:
692
+ """
693
+ Process a value provided for the --no-cache-dir option.
694
+
695
+ This is an optparse.Option callback for the --no-cache-dir option.
696
+ """
697
+ # The value argument will be None if --no-cache-dir is passed via the
698
+ # command-line, since the option doesn't accept arguments. However,
699
+ # the value can be non-None if the option is triggered e.g. by an
700
+ # environment variable, like PIP_NO_CACHE_DIR=true.
701
+ if value is not None:
702
+ # Then parse the string value to get argument error-checking.
703
+ try:
704
+ strtobool(value)
705
+ except ValueError as exc:
706
+ raise_option_error(parser, option=option, msg=str(exc))
707
+
708
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
709
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
710
+ # rather than enabled (logic would say the latter). Thus, we disable
711
+ # the cache directory not just on values that parse to True, but (for
712
+ # backwards compatibility reasons) also on values that parse to False.
713
+ # In other words, always set it to False if the option is provided in
714
+ # some (valid) form.
715
+ parser.values.cache_dir = False
716
+
717
+
718
+ no_cache: Callable[..., Option] = partial(
719
+ Option,
720
+ "--no-cache-dir",
721
+ dest="cache_dir",
722
+ action="callback",
723
+ callback=_handle_no_cache_dir,
724
+ help="Disable the cache.",
725
+ )
726
+
727
+ no_deps: Callable[..., Option] = partial(
728
+ Option,
729
+ "--no-deps",
730
+ "--no-dependencies",
731
+ dest="ignore_dependencies",
732
+ action="store_true",
733
+ default=False,
734
+ help="Don't install package dependencies.",
735
+ )
736
+
737
+ ignore_requires_python: Callable[..., Option] = partial(
738
+ Option,
739
+ "--ignore-requires-python",
740
+ dest="ignore_requires_python",
741
+ action="store_true",
742
+ help="Ignore the Requires-Python information.",
743
+ )
744
+
745
+ no_build_isolation: Callable[..., Option] = partial(
746
+ Option,
747
+ "--no-build-isolation",
748
+ dest="build_isolation",
749
+ action="store_false",
750
+ default=True,
751
+ help="Disable isolation when building a modern source distribution. "
752
+ "Build dependencies specified by PEP 518 must be already installed "
753
+ "if this option is used.",
754
+ )
755
+
756
+
757
+ def _handle_no_use_pep517(
758
+ option: Option, opt: str, value: str, parser: OptionParser
759
+ ) -> None:
760
+ """
761
+ Process a value provided for the --no-use-pep517 option.
762
+
763
+ This is an optparse.Option callback for the no_use_pep517 option.
764
+ """
765
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
766
+ # will be None if --no-use-pep517 is passed via the command-line.
767
+ # However, the value can be non-None if the option is triggered e.g.
768
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
769
+ if value is not None:
770
+ msg = """A value was passed for --no-use-pep517,
771
+ probably using either the PIP_NO_USE_PEP517 environment variable
772
+ or the "no-use-pep517" config file option. Use an appropriate value
773
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
774
+ config file option instead.
775
+ """
776
+ raise_option_error(parser, option=option, msg=msg)
777
+
778
+ # Otherwise, --no-use-pep517 was passed via the command-line.
779
+ parser.values.use_pep517 = False
780
+
781
+
782
+ use_pep517: Any = partial(
783
+ Option,
784
+ "--use-pep517",
785
+ dest="use_pep517",
786
+ action="store_true",
787
+ default=None,
788
+ help="Use PEP 517 for building source distributions "
789
+ "(use --no-use-pep517 to force legacy behaviour).",
790
+ )
791
+
792
+ no_use_pep517: Any = partial(
793
+ Option,
794
+ "--no-use-pep517",
795
+ dest="use_pep517",
796
+ action="callback",
797
+ callback=_handle_no_use_pep517,
798
+ default=None,
799
+ help=SUPPRESS_HELP,
800
+ )
801
+
802
+ install_options: Callable[..., Option] = partial(
803
+ Option,
804
+ "--install-option",
805
+ dest="install_options",
806
+ action="append",
807
+ metavar="options",
808
+ help="Extra arguments to be supplied to the setup.py install "
809
+ 'command (use like --install-option="--install-scripts=/usr/local/'
810
+ 'bin"). Use multiple --install-option options to pass multiple '
811
+ "options to setup.py install. If you are using an option with a "
812
+ "directory path, be sure to use absolute path.",
813
+ )
814
+
815
+ build_options: Callable[..., Option] = partial(
816
+ Option,
817
+ "--build-option",
818
+ dest="build_options",
819
+ metavar="options",
820
+ action="append",
821
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
822
+ )
823
+
824
+ global_options: Callable[..., Option] = partial(
825
+ Option,
826
+ "--global-option",
827
+ dest="global_options",
828
+ action="append",
829
+ metavar="options",
830
+ help="Extra global options to be supplied to the setup.py "
831
+ "call before the install or bdist_wheel command.",
832
+ )
833
+
834
+ no_clean: Callable[..., Option] = partial(
835
+ Option,
836
+ "--no-clean",
837
+ action="store_true",
838
+ default=False,
839
+ help="Don't clean up build directories.",
840
+ )
841
+
842
+ pre: Callable[..., Option] = partial(
843
+ Option,
844
+ "--pre",
845
+ action="store_true",
846
+ default=False,
847
+ help="Include pre-release and development versions. By default, "
848
+ "pip only finds stable versions.",
849
+ )
850
+
851
+ disable_pip_version_check: Callable[..., Option] = partial(
852
+ Option,
853
+ "--disable-pip-version-check",
854
+ dest="disable_pip_version_check",
855
+ action="store_true",
856
+ default=True,
857
+ help="Don't periodically check PyPI to determine whether a new version "
858
+ "of pip is available for download. Implied with --no-index.",
859
+ )
860
+
861
+
862
+ def _handle_merge_hash(
863
+ option: Option, opt_str: str, value: str, parser: OptionParser
864
+ ) -> None:
865
+ """Given a value spelled "algo:digest", append the digest to a list
866
+ pointed to in a dict by the algo name."""
867
+ if not parser.values.hashes:
868
+ parser.values.hashes = {}
869
+ try:
870
+ algo, digest = value.split(":", 1)
871
+ except ValueError:
872
+ parser.error(
873
+ "Arguments to {} must be a hash name " # noqa
874
+ "followed by a value, like --hash=sha256:"
875
+ "abcde...".format(opt_str)
876
+ )
877
+ if algo not in STRONG_HASHES:
878
+ parser.error(
879
+ "Allowed hash algorithms for {} are {}.".format( # noqa
880
+ opt_str, ", ".join(STRONG_HASHES)
881
+ )
882
+ )
883
+ parser.values.hashes.setdefault(algo, []).append(digest)
884
+
885
+
886
+ hash: Callable[..., Option] = partial(
887
+ Option,
888
+ "--hash",
889
+ # Hash values eventually end up in InstallRequirement.hashes due to
890
+ # __dict__ copying in process_line().
891
+ dest="hashes",
892
+ action="callback",
893
+ callback=_handle_merge_hash,
894
+ type="string",
895
+ help="Verify that the package's archive matches this "
896
+ "hash before installing. Example: --hash=sha256:abcdef...",
897
+ )
898
+
899
+
900
+ require_hashes: Callable[..., Option] = partial(
901
+ Option,
902
+ "--require-hashes",
903
+ dest="require_hashes",
904
+ action="store_true",
905
+ default=False,
906
+ help="Require a hash to check each requirement against, for "
907
+ "repeatable installs. This option is implied when any package in a "
908
+ "requirements file has a --hash option.",
909
+ )
910
+
911
+
912
+ list_path: Callable[..., Option] = partial(
913
+ PipOption,
914
+ "--path",
915
+ dest="path",
916
+ type="path",
917
+ action="append",
918
+ help="Restrict to the specified installation path for listing "
919
+ "packages (can be used multiple times).",
920
+ )
921
+
922
+
923
+ def check_list_path_option(options: Values) -> None:
924
+ if options.path and (options.user or options.local):
925
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
926
+
927
+
928
+ list_exclude: Callable[..., Option] = partial(
929
+ PipOption,
930
+ "--exclude",
931
+ dest="excludes",
932
+ action="append",
933
+ metavar="package",
934
+ type="package_name",
935
+ help="Exclude specified package from the output",
936
+ )
937
+
938
+
939
+ no_python_version_warning: Callable[..., Option] = partial(
940
+ Option,
941
+ "--no-python-version-warning",
942
+ dest="no_python_version_warning",
943
+ action="store_true",
944
+ default=False,
945
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
946
+ )
947
+
948
+
949
+ use_new_feature: Callable[..., Option] = partial(
950
+ Option,
951
+ "--use-feature",
952
+ dest="features_enabled",
953
+ metavar="feature",
954
+ action="append",
955
+ default=[],
956
+ choices=["2020-resolver", "fast-deps", "in-tree-build"],
957
+ help="Enable new functionality, that may be backward incompatible.",
958
+ )
959
+
960
+ use_deprecated_feature: Callable[..., Option] = partial(
961
+ Option,
962
+ "--use-deprecated",
963
+ dest="deprecated_features_enabled",
964
+ metavar="feature",
965
+ action="append",
966
+ default=[],
967
+ choices=[
968
+ "legacy-resolver",
969
+ "out-of-tree-build",
970
+ "backtrack-on-build-failures",
971
+ "html5lib",
972
+ ],
973
+ help=("Enable deprecated functionality, that will be removed in the future."),
974
+ )
975
+
976
+
977
+ ##########
978
+ # groups #
979
+ ##########
980
+
981
+ general_group: Dict[str, Any] = {
982
+ "name": "General Options",
983
+ "options": [
984
+ help_,
985
+ debug_mode,
986
+ isolated_mode,
987
+ require_virtualenv,
988
+ verbose,
989
+ version,
990
+ quiet,
991
+ log,
992
+ no_input,
993
+ proxy,
994
+ retries,
995
+ timeout,
996
+ exists_action,
997
+ trusted_host,
998
+ cert,
999
+ client_cert,
1000
+ cache_dir,
1001
+ no_cache,
1002
+ disable_pip_version_check,
1003
+ no_color,
1004
+ no_python_version_warning,
1005
+ use_new_feature,
1006
+ use_deprecated_feature,
1007
+ ],
1008
+ }
1009
+
1010
+ index_group: Dict[str, Any] = {
1011
+ "name": "Package Index Options",
1012
+ "options": [
1013
+ index_url,
1014
+ extra_index_url,
1015
+ no_index,
1016
+ find_links,
1017
+ ],
1018
+ }
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/command_context.py ADDED
@@ -0,0 +1,27 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from contextlib import ExitStack, contextmanager
2
+ from typing import ContextManager, Iterator, TypeVar
3
+
4
+ _T = TypeVar("_T", covariant=True)
5
+
6
+
7
+ class CommandContextMixIn:
8
+ def __init__(self) -> None:
9
+ super().__init__()
10
+ self._in_main_context = False
11
+ self._main_context = ExitStack()
12
+
13
+ @contextmanager
14
+ def main_context(self) -> Iterator[None]:
15
+ assert not self._in_main_context
16
+
17
+ self._in_main_context = True
18
+ try:
19
+ with self._main_context:
20
+ yield
21
+ finally:
22
+ self._in_main_context = False
23
+
24
+ def enter_context(self, context_provider: ContextManager[_T]) -> _T:
25
+ assert self._in_main_context
26
+
27
+ return self._main_context.enter_context(context_provider)
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/main.py ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Primary application entrypoint.
2
+ """
3
+ import locale
4
+ import logging
5
+ import os
6
+ import sys
7
+ from typing import List, Optional
8
+
9
+ from pip._internal.cli.autocompletion import autocomplete
10
+ from pip._internal.cli.main_parser import parse_command
11
+ from pip._internal.commands import create_command
12
+ from pip._internal.exceptions import PipError
13
+ from pip._internal.utils import deprecation
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ # Do not import and use main() directly! Using it directly is actively
19
+ # discouraged by pip's maintainers. The name, location and behavior of
20
+ # this function is subject to change, so calling it directly is not
21
+ # portable across different pip versions.
22
+
23
+ # In addition, running pip in-process is unsupported and unsafe. This is
24
+ # elaborated in detail at
25
+ # https://pip.pypa.io/en/stable/user_guide/#using-pip-from-your-program.
26
+ # That document also provides suggestions that should work for nearly
27
+ # all users that are considering importing and using main() directly.
28
+
29
+ # However, we know that certain users will still want to invoke pip
30
+ # in-process. If you understand and accept the implications of using pip
31
+ # in an unsupported manner, the best approach is to use runpy to avoid
32
+ # depending on the exact location of this entry point.
33
+
34
+ # The following example shows how to use runpy to invoke pip in that
35
+ # case:
36
+ #
37
+ # sys.argv = ["pip", your, args, here]
38
+ # runpy.run_module("pip", run_name="__main__")
39
+ #
40
+ # Note that this will exit the process after running, unlike a direct
41
+ # call to main. As it is not safe to do any processing after calling
42
+ # main, this should not be an issue in practice.
43
+
44
+
45
+ def main(args: Optional[List[str]] = None) -> int:
46
+ if args is None:
47
+ args = sys.argv[1:]
48
+
49
+ # Configure our deprecation warnings to be sent through loggers
50
+ deprecation.install_warning_logger()
51
+
52
+ autocomplete()
53
+
54
+ try:
55
+ cmd_name, cmd_args = parse_command(args)
56
+ except PipError as exc:
57
+ sys.stderr.write(f"ERROR: {exc}")
58
+ sys.stderr.write(os.linesep)
59
+ sys.exit(1)
60
+
61
+ # Needed for locale.getpreferredencoding(False) to work
62
+ # in pip._internal.utils.encoding.auto_decode
63
+ try:
64
+ locale.setlocale(locale.LC_ALL, "")
65
+ except locale.Error as e:
66
+ # setlocale can apparently crash if locale are uninitialized
67
+ logger.debug("Ignoring error %s when setting locale", e)
68
+ command = create_command(cmd_name, isolated=("--isolated" in cmd_args))
69
+
70
+ return command.main(cmd_args)
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A single place for constructing and exposing the main parser
2
+ """
3
+
4
+ import os
5
+ import sys
6
+ from typing import List, Tuple
7
+
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
10
+ from pip._internal.commands import commands_dict, get_similar_commands
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.utils.misc import get_pip_version, get_prog
13
+
14
+ __all__ = ["create_main_parser", "parse_command"]
15
+
16
+
17
+ def create_main_parser() -> ConfigOptionParser:
18
+ """Creates and returns the main parser for pip's CLI"""
19
+
20
+ parser = ConfigOptionParser(
21
+ usage="\n%prog <command> [options]",
22
+ add_help_option=False,
23
+ formatter=UpdatingDefaultsHelpFormatter(),
24
+ name="global",
25
+ prog=get_prog(),
26
+ )
27
+ parser.disable_interspersed_args()
28
+
29
+ parser.version = get_pip_version()
30
+
31
+ # add the general options
32
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
33
+ parser.add_option_group(gen_opts)
34
+
35
+ # so the help formatter knows
36
+ parser.main = True # type: ignore
37
+
38
+ # create command listing for description
39
+ description = [""] + [
40
+ f"{name:27} {command_info.summary}"
41
+ for name, command_info in commands_dict.items()
42
+ ]
43
+ parser.description = "\n".join(description)
44
+
45
+ return parser
46
+
47
+
48
+ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
49
+ parser = create_main_parser()
50
+
51
+ # Note: parser calls disable_interspersed_args(), so the result of this
52
+ # call is to split the initial args into the general options before the
53
+ # subcommand and everything else.
54
+ # For example:
55
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
56
+ # general_options: ['--timeout==5']
57
+ # args_else: ['install', '--user', 'INITools']
58
+ general_options, args_else = parser.parse_args(args)
59
+
60
+ # --version
61
+ if general_options.version:
62
+ sys.stdout.write(parser.version)
63
+ sys.stdout.write(os.linesep)
64
+ sys.exit()
65
+
66
+ # pip || pip help -> print_help()
67
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
68
+ parser.print_help()
69
+ sys.exit()
70
+
71
+ # the subcommand name
72
+ cmd_name = args_else[0]
73
+
74
+ if cmd_name not in commands_dict:
75
+ guess = get_similar_commands(cmd_name)
76
+
77
+ msg = [f'unknown command "{cmd_name}"']
78
+ if guess:
79
+ msg.append(f'maybe you meant "{guess}"')
80
+
81
+ raise CommandError(" - ".join(msg))
82
+
83
+ # all the args without the subcommand
84
+ cmd_args = args[:]
85
+ cmd_args.remove(cmd_name)
86
+
87
+ return cmd_name, cmd_args
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/parser.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Base option parser setup"""
2
+
3
+ import logging
4
+ import optparse
5
+ import shutil
6
+ import sys
7
+ import textwrap
8
+ from contextlib import suppress
9
+ from typing import Any, Dict, Iterator, List, Tuple
10
+
11
+ from pip._internal.cli.status_codes import UNKNOWN_ERROR
12
+ from pip._internal.configuration import Configuration, ConfigurationError
13
+ from pip._internal.utils.misc import redact_auth_from_url, strtobool
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
19
+ """A prettier/less verbose help formatter for optparse."""
20
+
21
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
22
+ # help position must be aligned with __init__.parseopts.description
23
+ kwargs["max_help_position"] = 30
24
+ kwargs["indent_increment"] = 1
25
+ kwargs["width"] = shutil.get_terminal_size()[0] - 2
26
+ super().__init__(*args, **kwargs)
27
+
28
+ def format_option_strings(self, option: optparse.Option) -> str:
29
+ return self._format_option_strings(option)
30
+
31
+ def _format_option_strings(
32
+ self, option: optparse.Option, mvarfmt: str = " <{}>", optsep: str = ", "
33
+ ) -> str:
34
+ """
35
+ Return a comma-separated list of option strings and metavars.
36
+
37
+ :param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
38
+ :param mvarfmt: metavar format string
39
+ :param optsep: separator
40
+ """
41
+ opts = []
42
+
43
+ if option._short_opts:
44
+ opts.append(option._short_opts[0])
45
+ if option._long_opts:
46
+ opts.append(option._long_opts[0])
47
+ if len(opts) > 1:
48
+ opts.insert(1, optsep)
49
+
50
+ if option.takes_value():
51
+ assert option.dest is not None
52
+ metavar = option.metavar or option.dest.lower()
53
+ opts.append(mvarfmt.format(metavar.lower()))
54
+
55
+ return "".join(opts)
56
+
57
+ def format_heading(self, heading: str) -> str:
58
+ if heading == "Options":
59
+ return ""
60
+ return heading + ":\n"
61
+
62
+ def format_usage(self, usage: str) -> str:
63
+ """
64
+ Ensure there is only one newline between usage and the first heading
65
+ if there is no description.
66
+ """
67
+ msg = "\nUsage: {}\n".format(self.indent_lines(textwrap.dedent(usage), " "))
68
+ return msg
69
+
70
+ def format_description(self, description: str) -> str:
71
+ # leave full control over description to us
72
+ if description:
73
+ if hasattr(self.parser, "main"):
74
+ label = "Commands"
75
+ else:
76
+ label = "Description"
77
+ # some doc strings have initial newlines, some don't
78
+ description = description.lstrip("\n")
79
+ # some doc strings have final newlines and spaces, some don't
80
+ description = description.rstrip()
81
+ # dedent, then reindent
82
+ description = self.indent_lines(textwrap.dedent(description), " ")
83
+ description = f"{label}:\n{description}\n"
84
+ return description
85
+ else:
86
+ return ""
87
+
88
+ def format_epilog(self, epilog: str) -> str:
89
+ # leave full control over epilog to us
90
+ if epilog:
91
+ return epilog
92
+ else:
93
+ return ""
94
+
95
+ def indent_lines(self, text: str, indent: str) -> str:
96
+ new_lines = [indent + line for line in text.split("\n")]
97
+ return "\n".join(new_lines)
98
+
99
+
100
+ class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
101
+ """Custom help formatter for use in ConfigOptionParser.
102
+
103
+ This is updates the defaults before expanding them, allowing
104
+ them to show up correctly in the help listing.
105
+
106
+ Also redact auth from url type options
107
+ """
108
+
109
+ def expand_default(self, option: optparse.Option) -> str:
110
+ default_values = None
111
+ if self.parser is not None:
112
+ assert isinstance(self.parser, ConfigOptionParser)
113
+ self.parser._update_defaults(self.parser.defaults)
114
+ assert option.dest is not None
115
+ default_values = self.parser.defaults.get(option.dest)
116
+ help_text = super().expand_default(option)
117
+
118
+ if default_values and option.metavar == "URL":
119
+ if isinstance(default_values, str):
120
+ default_values = [default_values]
121
+
122
+ # If its not a list, we should abort and just return the help text
123
+ if not isinstance(default_values, list):
124
+ default_values = []
125
+
126
+ for val in default_values:
127
+ help_text = help_text.replace(val, redact_auth_from_url(val))
128
+
129
+ return help_text
130
+
131
+
132
+ class CustomOptionParser(optparse.OptionParser):
133
+ def insert_option_group(
134
+ self, idx: int, *args: Any, **kwargs: Any
135
+ ) -> optparse.OptionGroup:
136
+ """Insert an OptionGroup at a given position."""
137
+ group = self.add_option_group(*args, **kwargs)
138
+
139
+ self.option_groups.pop()
140
+ self.option_groups.insert(idx, group)
141
+
142
+ return group
143
+
144
+ @property
145
+ def option_list_all(self) -> List[optparse.Option]:
146
+ """Get a list of all options, including those in option groups."""
147
+ res = self.option_list[:]
148
+ for i in self.option_groups:
149
+ res.extend(i.option_list)
150
+
151
+ return res
152
+
153
+
154
+ class ConfigOptionParser(CustomOptionParser):
155
+ """Custom option parser which updates its defaults by checking the
156
+ configuration files and environmental variables"""
157
+
158
+ def __init__(
159
+ self,
160
+ *args: Any,
161
+ name: str,
162
+ isolated: bool = False,
163
+ **kwargs: Any,
164
+ ) -> None:
165
+ self.name = name
166
+ self.config = Configuration(isolated)
167
+
168
+ assert self.name
169
+ super().__init__(*args, **kwargs)
170
+
171
+ def check_default(self, option: optparse.Option, key: str, val: Any) -> Any:
172
+ try:
173
+ return option.check_value(key, val)
174
+ except optparse.OptionValueError as exc:
175
+ print(f"An error occurred during configuration: {exc}")
176
+ sys.exit(3)
177
+
178
+ def _get_ordered_configuration_items(self) -> Iterator[Tuple[str, Any]]:
179
+ # Configuration gives keys in an unordered manner. Order them.
180
+ override_order = ["global", self.name, ":env:"]
181
+
182
+ # Pool the options into different groups
183
+ section_items: Dict[str, List[Tuple[str, Any]]] = {
184
+ name: [] for name in override_order
185
+ }
186
+ for section_key, val in self.config.items():
187
+ # ignore empty values
188
+ if not val:
189
+ logger.debug(
190
+ "Ignoring configuration key '%s' as it's value is empty.",
191
+ section_key,
192
+ )
193
+ continue
194
+
195
+ section, key = section_key.split(".", 1)
196
+ if section in override_order:
197
+ section_items[section].append((key, val))
198
+
199
+ # Yield each group in their override order
200
+ for section in override_order:
201
+ for key, val in section_items[section]:
202
+ yield key, val
203
+
204
+ def _update_defaults(self, defaults: Dict[str, Any]) -> Dict[str, Any]:
205
+ """Updates the given defaults with values from the config files and
206
+ the environ. Does a little special handling for certain types of
207
+ options (lists)."""
208
+
209
+ # Accumulate complex default state.
210
+ self.values = optparse.Values(self.defaults)
211
+ late_eval = set()
212
+ # Then set the options with those values
213
+ for key, val in self._get_ordered_configuration_items():
214
+ # '--' because configuration supports only long names
215
+ option = self.get_option("--" + key)
216
+
217
+ # Ignore options not present in this parser. E.g. non-globals put
218
+ # in [global] by users that want them to apply to all applicable
219
+ # commands.
220
+ if option is None:
221
+ continue
222
+
223
+ assert option.dest is not None
224
+
225
+ if option.action in ("store_true", "store_false"):
226
+ try:
227
+ val = strtobool(val)
228
+ except ValueError:
229
+ self.error(
230
+ "{} is not a valid value for {} option, " # noqa
231
+ "please specify a boolean value like yes/no, "
232
+ "true/false or 1/0 instead.".format(val, key)
233
+ )
234
+ elif option.action == "count":
235
+ with suppress(ValueError):
236
+ val = strtobool(val)
237
+ with suppress(ValueError):
238
+ val = int(val)
239
+ if not isinstance(val, int) or val < 0:
240
+ self.error(
241
+ "{} is not a valid value for {} option, " # noqa
242
+ "please instead specify either a non-negative integer "
243
+ "or a boolean value like yes/no or false/true "
244
+ "which is equivalent to 1/0.".format(val, key)
245
+ )
246
+ elif option.action == "append":
247
+ val = val.split()
248
+ val = [self.check_default(option, key, v) for v in val]
249
+ elif option.action == "callback":
250
+ assert option.callback is not None
251
+ late_eval.add(option.dest)
252
+ opt_str = option.get_opt_string()
253
+ val = option.convert_value(opt_str, val)
254
+ # From take_action
255
+ args = option.callback_args or ()
256
+ kwargs = option.callback_kwargs or {}
257
+ option.callback(option, opt_str, val, self, *args, **kwargs)
258
+ else:
259
+ val = self.check_default(option, key, val)
260
+
261
+ defaults[option.dest] = val
262
+
263
+ for key in late_eval:
264
+ defaults[key] = getattr(self.values, key)
265
+ self.values = None
266
+ return defaults
267
+
268
+ def get_default_values(self) -> optparse.Values:
269
+ """Overriding to make updating the defaults after instantiation of
270
+ the option parser possible, _update_defaults() does the dirty work."""
271
+ if not self.process_default_values:
272
+ # Old, pre-Optik 1.5 behaviour.
273
+ return optparse.Values(self.defaults)
274
+
275
+ # Load the configuration, or error out in case of an error
276
+ try:
277
+ self.config.load()
278
+ except ConfigurationError as err:
279
+ self.exit(UNKNOWN_ERROR, str(err))
280
+
281
+ defaults = self._update_defaults(self.defaults.copy()) # ours
282
+ for option in self._get_all_options():
283
+ assert option.dest is not None
284
+ default = defaults.get(option.dest)
285
+ if isinstance(default, str):
286
+ opt_str = option.get_opt_string()
287
+ defaults[option.dest] = option.check_value(opt_str, default)
288
+ return optparse.Values(defaults)
289
+
290
+ def error(self, msg: str) -> None:
291
+ self.print_usage(sys.stderr)
292
+ self.exit(UNKNOWN_ERROR, f"{msg}\n")
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import sys
4
+ from signal import SIGINT, default_int_handler, signal
5
+ from typing import Any, Callable, Iterator, Optional, Tuple
6
+
7
+ from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
8
+ from pip._vendor.progress.spinner import Spinner
9
+ from pip._vendor.rich.progress import (
10
+ BarColumn,
11
+ DownloadColumn,
12
+ FileSizeColumn,
13
+ Progress,
14
+ ProgressColumn,
15
+ SpinnerColumn,
16
+ TextColumn,
17
+ TimeElapsedColumn,
18
+ TimeRemainingColumn,
19
+ TransferSpeedColumn,
20
+ )
21
+
22
+ from pip._internal.utils.compat import WINDOWS
23
+ from pip._internal.utils.logging import get_indentation
24
+ from pip._internal.utils.misc import format_size
25
+
26
+ try:
27
+ from pip._vendor import colorama
28
+ # Lots of different errors can come from this, including SystemError and
29
+ # ImportError.
30
+ except Exception:
31
+ colorama = None
32
+
33
+ DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]]
34
+
35
+
36
+ def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar:
37
+ encoding = getattr(preferred.file, "encoding", None)
38
+
39
+ # If we don't know what encoding this file is in, then we'll just assume
40
+ # that it doesn't support unicode and use the ASCII bar.
41
+ if not encoding:
42
+ return fallback
43
+
44
+ # Collect all of the possible characters we want to use with the preferred
45
+ # bar.
46
+ characters = [
47
+ getattr(preferred, "empty_fill", ""),
48
+ getattr(preferred, "fill", ""),
49
+ ]
50
+ characters += list(getattr(preferred, "phases", []))
51
+
52
+ # Try to decode the characters we're using for the bar using the encoding
53
+ # of the given file, if this works then we'll assume that we can use the
54
+ # fancier bar and if not we'll fall back to the plaintext bar.
55
+ try:
56
+ "".join(characters).encode(encoding)
57
+ except UnicodeEncodeError:
58
+ return fallback
59
+ else:
60
+ return preferred
61
+
62
+
63
+ _BaseBar: Any = _select_progress_class(IncrementalBar, Bar)
64
+
65
+
66
+ class InterruptibleMixin:
67
+ """
68
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
69
+
70
+ This allows downloads to be interrupted without leaving temporary state
71
+ (like hidden cursors) behind.
72
+
73
+ This class is similar to the progress library's existing SigIntMixin
74
+ helper, but as of version 1.2, that helper has the following problems:
75
+
76
+ 1. It calls sys.exit().
77
+ 2. It discards the existing SIGINT handler completely.
78
+ 3. It leaves its own handler in place even after an uninterrupted finish,
79
+ which will have unexpected delayed effects if the user triggers an
80
+ unrelated keyboard interrupt some time after a progress-displaying
81
+ download has already completed, for example.
82
+ """
83
+
84
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
85
+ """
86
+ Save the original SIGINT handler for later.
87
+ """
88
+ # https://github.com/python/mypy/issues/5887
89
+ super().__init__(*args, **kwargs) # type: ignore
90
+
91
+ self.original_handler = signal(SIGINT, self.handle_sigint)
92
+
93
+ # If signal() returns None, the previous handler was not installed from
94
+ # Python, and we cannot restore it. This probably should not happen,
95
+ # but if it does, we must restore something sensible instead, at least.
96
+ # The least bad option should be Python's default SIGINT handler, which
97
+ # just raises KeyboardInterrupt.
98
+ if self.original_handler is None:
99
+ self.original_handler = default_int_handler
100
+
101
+ def finish(self) -> None:
102
+ """
103
+ Restore the original SIGINT handler after finishing.
104
+
105
+ This should happen regardless of whether the progress display finishes
106
+ normally, or gets interrupted.
107
+ """
108
+ super().finish() # type: ignore
109
+ signal(SIGINT, self.original_handler)
110
+
111
+ def handle_sigint(self, signum, frame): # type: ignore
112
+ """
113
+ Call self.finish() before delegating to the original SIGINT handler.
114
+
115
+ This handler should only be in place while the progress display is
116
+ active.
117
+ """
118
+ self.finish()
119
+ self.original_handler(signum, frame)
120
+
121
+
122
+ class SilentBar(Bar):
123
+ def update(self) -> None:
124
+ pass
125
+
126
+
127
+ class BlueEmojiBar(IncrementalBar):
128
+
129
+ suffix = "%(percent)d%%"
130
+ bar_prefix = " "
131
+ bar_suffix = " "
132
+ phases = ("\U0001F539", "\U0001F537", "\U0001F535")
133
+
134
+
135
+ class DownloadProgressMixin:
136
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
137
+ # https://github.com/python/mypy/issues/5887
138
+ super().__init__(*args, **kwargs) # type: ignore
139
+ self.message: str = (" " * (get_indentation() + 2)) + self.message
140
+
141
+ @property
142
+ def downloaded(self) -> str:
143
+ return format_size(self.index) # type: ignore
144
+
145
+ @property
146
+ def download_speed(self) -> str:
147
+ # Avoid zero division errors...
148
+ if self.avg == 0.0: # type: ignore
149
+ return "..."
150
+ return format_size(1 / self.avg) + "/s" # type: ignore
151
+
152
+ @property
153
+ def pretty_eta(self) -> str:
154
+ if self.eta: # type: ignore
155
+ return f"eta {self.eta_td}" # type: ignore
156
+ return ""
157
+
158
+ def iter(self, it): # type: ignore
159
+ for x in it:
160
+ yield x
161
+ # B305 is incorrectly raised here
162
+ # https://github.com/PyCQA/flake8-bugbear/issues/59
163
+ self.next(len(x)) # noqa: B305
164
+ self.finish()
165
+
166
+
167
+ class WindowsMixin:
168
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
169
+ # The Windows terminal does not support the hide/show cursor ANSI codes
170
+ # even with colorama. So we'll ensure that hide_cursor is False on
171
+ # Windows.
172
+ # This call needs to go before the super() call, so that hide_cursor
173
+ # is set in time. The base progress bar class writes the "hide cursor"
174
+ # code to the terminal in its init, so if we don't set this soon
175
+ # enough, we get a "hide" with no corresponding "show"...
176
+ if WINDOWS and self.hide_cursor: # type: ignore
177
+ self.hide_cursor = False
178
+
179
+ # https://github.com/python/mypy/issues/5887
180
+ super().__init__(*args, **kwargs) # type: ignore
181
+
182
+ # Check if we are running on Windows and we have the colorama module,
183
+ # if we do then wrap our file with it.
184
+ if WINDOWS and colorama:
185
+ self.file = colorama.AnsiToWin32(self.file) # type: ignore
186
+ # The progress code expects to be able to call self.file.isatty()
187
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
188
+ # add it.
189
+ self.file.isatty = lambda: self.file.wrapped.isatty()
190
+ # The progress code expects to be able to call self.file.flush()
191
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
192
+ # add it.
193
+ self.file.flush = lambda: self.file.wrapped.flush()
194
+
195
+
196
+ class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
197
+
198
+ file = sys.stdout
199
+ message = "%(percent)d%%"
200
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
201
+
202
+
203
+ class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
204
+ pass
205
+
206
+
207
+ class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
208
+ pass
209
+
210
+
211
+ class DownloadBar(BaseDownloadProgressBar, Bar):
212
+ pass
213
+
214
+
215
+ class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
216
+ pass
217
+
218
+
219
+ class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
220
+ pass
221
+
222
+
223
+ class DownloadProgressSpinner(
224
+ WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
225
+ ):
226
+
227
+ file = sys.stdout
228
+ suffix = "%(downloaded)s %(download_speed)s"
229
+
230
+ def next_phase(self) -> str:
231
+ if not hasattr(self, "_phaser"):
232
+ self._phaser = itertools.cycle(self.phases)
233
+ return next(self._phaser)
234
+
235
+ def update(self) -> None:
236
+ message = self.message % self
237
+ phase = self.next_phase()
238
+ suffix = self.suffix % self
239
+ line = "".join(
240
+ [
241
+ message,
242
+ " " if message else "",
243
+ phase,
244
+ " " if suffix else "",
245
+ suffix,
246
+ ]
247
+ )
248
+
249
+ self.writeln(line)
250
+
251
+
252
+ BAR_TYPES = {
253
+ "off": (DownloadSilentBar, DownloadSilentBar),
254
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
255
+ "ascii": (DownloadBar, DownloadProgressSpinner),
256
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
257
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
258
+ }
259
+
260
+
261
+ def _legacy_progress_bar(
262
+ progress_bar: str, max: Optional[int]
263
+ ) -> DownloadProgressRenderer:
264
+ if max is None or max == 0:
265
+ return BAR_TYPES[progress_bar][1]().iter # type: ignore
266
+ else:
267
+ return BAR_TYPES[progress_bar][0](max=max).iter
268
+
269
+
270
+ #
271
+ # Modern replacement, for our legacy progress bars.
272
+ #
273
+ def _rich_progress_bar(
274
+ iterable: Iterator[bytes],
275
+ *,
276
+ bar_type: str,
277
+ size: int,
278
+ ) -> Iterator[bytes]:
279
+ assert bar_type == "on", "This should only be used in the default mode."
280
+
281
+ if not size:
282
+ total = float("inf")
283
+ columns: Tuple[ProgressColumn, ...] = (
284
+ TextColumn("[progress.description]{task.description}"),
285
+ SpinnerColumn("line", speed=1.5),
286
+ FileSizeColumn(),
287
+ TransferSpeedColumn(),
288
+ TimeElapsedColumn(),
289
+ )
290
+ else:
291
+ total = size
292
+ columns = (
293
+ TextColumn("[progress.description]{task.description}"),
294
+ BarColumn(),
295
+ DownloadColumn(),
296
+ TransferSpeedColumn(),
297
+ TextColumn("eta"),
298
+ TimeRemainingColumn(),
299
+ )
300
+
301
+ progress = Progress(*columns, refresh_per_second=30)
302
+ task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
303
+ with progress:
304
+ for chunk in iterable:
305
+ yield chunk
306
+ progress.update(task_id, advance=len(chunk))
307
+
308
+
309
+ def get_download_progress_renderer(
310
+ *, bar_type: str, size: Optional[int] = None
311
+ ) -> DownloadProgressRenderer:
312
+ """Get an object that can be used to render the download progress.
313
+
314
+ Returns a callable, that takes an iterable to "wrap".
315
+ """
316
+ if bar_type == "on":
317
+ return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
318
+ elif bar_type == "off":
319
+ return iter # no-op, when passed an iterator
320
+ else:
321
+ return _legacy_progress_bar(bar_type, size)
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/req_command.py ADDED
@@ -0,0 +1,506 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Contains the Command base classes that depend on PipSession.
2
+
3
+ The classes in this module are in a separate module so the commands not
4
+ needing download / PackageFinder capability don't unnecessarily import the
5
+ PackageFinder machinery and all its vendored dependencies, etc.
6
+ """
7
+
8
+ import logging
9
+ import os
10
+ import sys
11
+ from functools import partial
12
+ from optparse import Values
13
+ from typing import Any, List, Optional, Tuple
14
+
15
+ from pip._internal.cache import WheelCache
16
+ from pip._internal.cli import cmdoptions
17
+ from pip._internal.cli.base_command import Command
18
+ from pip._internal.cli.command_context import CommandContextMixIn
19
+ from pip._internal.exceptions import CommandError, PreviousBuildDirError
20
+ from pip._internal.index.collector import LinkCollector
21
+ from pip._internal.index.package_finder import PackageFinder
22
+ from pip._internal.models.selection_prefs import SelectionPreferences
23
+ from pip._internal.models.target_python import TargetPython
24
+ from pip._internal.network.session import PipSession
25
+ from pip._internal.operations.prepare import RequirementPreparer
26
+ from pip._internal.req.constructors import (
27
+ install_req_from_editable,
28
+ install_req_from_line,
29
+ install_req_from_parsed_requirement,
30
+ install_req_from_req_string,
31
+ )
32
+ from pip._internal.req.req_file import parse_requirements
33
+ from pip._internal.req.req_install import InstallRequirement
34
+ from pip._internal.req.req_tracker import RequirementTracker
35
+ from pip._internal.resolution.base import BaseResolver
36
+ from pip._internal.self_outdated_check import pip_self_version_check
37
+ from pip._internal.utils.deprecation import deprecated
38
+ from pip._internal.utils.temp_dir import (
39
+ TempDirectory,
40
+ TempDirectoryTypeRegistry,
41
+ tempdir_kinds,
42
+ )
43
+ from pip._internal.utils.virtualenv import running_under_virtualenv
44
+
45
+ logger = logging.getLogger(__name__)
46
+
47
+
48
+ class SessionCommandMixin(CommandContextMixIn):
49
+
50
+ """
51
+ A class mixin for command classes needing _build_session().
52
+ """
53
+
54
+ def __init__(self) -> None:
55
+ super().__init__()
56
+ self._session: Optional[PipSession] = None
57
+
58
+ @classmethod
59
+ def _get_index_urls(cls, options: Values) -> Optional[List[str]]:
60
+ """Return a list of index urls from user-provided options."""
61
+ index_urls = []
62
+ if not getattr(options, "no_index", False):
63
+ url = getattr(options, "index_url", None)
64
+ if url:
65
+ index_urls.append(url)
66
+ urls = getattr(options, "extra_index_urls", None)
67
+ if urls:
68
+ index_urls.extend(urls)
69
+ # Return None rather than an empty list
70
+ return index_urls or None
71
+
72
+ def get_default_session(self, options: Values) -> PipSession:
73
+ """Get a default-managed session."""
74
+ if self._session is None:
75
+ self._session = self.enter_context(self._build_session(options))
76
+ # there's no type annotation on requests.Session, so it's
77
+ # automatically ContextManager[Any] and self._session becomes Any,
78
+ # then https://github.com/python/mypy/issues/7696 kicks in
79
+ assert self._session is not None
80
+ return self._session
81
+
82
+ def _build_session(
83
+ self,
84
+ options: Values,
85
+ retries: Optional[int] = None,
86
+ timeout: Optional[int] = None,
87
+ ) -> PipSession:
88
+ assert not options.cache_dir or os.path.isabs(options.cache_dir)
89
+ session = PipSession(
90
+ cache=(
91
+ os.path.join(options.cache_dir, "http") if options.cache_dir else None
92
+ ),
93
+ retries=retries if retries is not None else options.retries,
94
+ trusted_hosts=options.trusted_hosts,
95
+ index_urls=self._get_index_urls(options),
96
+ )
97
+
98
+ # Handle custom ca-bundles from the user
99
+ if options.cert:
100
+ session.verify = options.cert
101
+
102
+ # Handle SSL client certificate
103
+ if options.client_cert:
104
+ session.cert = options.client_cert
105
+
106
+ # Handle timeouts
107
+ if options.timeout or timeout:
108
+ session.timeout = timeout if timeout is not None else options.timeout
109
+
110
+ # Handle configured proxies
111
+ if options.proxy:
112
+ session.proxies = {
113
+ "http": options.proxy,
114
+ "https": options.proxy,
115
+ }
116
+
117
+ # Determine if we can prompt the user for authentication or not
118
+ session.auth.prompting = not options.no_input
119
+
120
+ return session
121
+
122
+
123
+ class IndexGroupCommand(Command, SessionCommandMixin):
124
+
125
+ """
126
+ Abstract base class for commands with the index_group options.
127
+
128
+ This also corresponds to the commands that permit the pip version check.
129
+ """
130
+
131
+ def handle_pip_version_check(self, options: Values) -> None:
132
+ """
133
+ Do the pip version check if not disabled.
134
+
135
+ This overrides the default behavior of not doing the check.
136
+ """
137
+ # Make sure the index_group options are present.
138
+ assert hasattr(options, "no_index")
139
+
140
+ if options.disable_pip_version_check or options.no_index:
141
+ return
142
+
143
+ # Otherwise, check if we're using the latest version of pip available.
144
+ session = self._build_session(
145
+ options, retries=0, timeout=min(5, options.timeout)
146
+ )
147
+ with session:
148
+ pip_self_version_check(session, options)
149
+
150
+
151
+ KEEPABLE_TEMPDIR_TYPES = [
152
+ tempdir_kinds.BUILD_ENV,
153
+ tempdir_kinds.EPHEM_WHEEL_CACHE,
154
+ tempdir_kinds.REQ_BUILD,
155
+ ]
156
+
157
+
158
+ def warn_if_run_as_root() -> None:
159
+ """Output a warning for sudo users on Unix.
160
+
161
+ In a virtual environment, sudo pip still writes to virtualenv.
162
+ On Windows, users may run pip as Administrator without issues.
163
+ This warning only applies to Unix root users outside of virtualenv.
164
+ """
165
+ if running_under_virtualenv():
166
+ return
167
+ if not hasattr(os, "getuid"):
168
+ return
169
+ # On Windows, there are no "system managed" Python packages. Installing as
170
+ # Administrator via pip is the correct way of updating system environments.
171
+ #
172
+ # We choose sys.platform over utils.compat.WINDOWS here to enable Mypy platform
173
+ # checks: https://mypy.readthedocs.io/en/stable/common_issues.html
174
+ if sys.platform == "win32" or sys.platform == "cygwin":
175
+ return
176
+
177
+ if os.getuid() != 0:
178
+ return
179
+
180
+ logger.warning(
181
+ "Running pip as the 'root' user can result in broken permissions and "
182
+ "conflicting behaviour with the system package manager. "
183
+ "It is recommended to use a virtual environment instead: "
184
+ "https://pip.pypa.io/warnings/venv"
185
+ )
186
+
187
+
188
+ def with_cleanup(func: Any) -> Any:
189
+ """Decorator for common logic related to managing temporary
190
+ directories.
191
+ """
192
+
193
+ def configure_tempdir_registry(registry: TempDirectoryTypeRegistry) -> None:
194
+ for t in KEEPABLE_TEMPDIR_TYPES:
195
+ registry.set_delete(t, False)
196
+
197
+ def wrapper(
198
+ self: RequirementCommand, options: Values, args: List[Any]
199
+ ) -> Optional[int]:
200
+ assert self.tempdir_registry is not None
201
+ if options.no_clean:
202
+ configure_tempdir_registry(self.tempdir_registry)
203
+
204
+ try:
205
+ return func(self, options, args)
206
+ except PreviousBuildDirError:
207
+ # This kind of conflict can occur when the user passes an explicit
208
+ # build directory with a pre-existing folder. In that case we do
209
+ # not want to accidentally remove it.
210
+ configure_tempdir_registry(self.tempdir_registry)
211
+ raise
212
+
213
+ return wrapper
214
+
215
+
216
+ class RequirementCommand(IndexGroupCommand):
217
+ def __init__(self, *args: Any, **kw: Any) -> None:
218
+ super().__init__(*args, **kw)
219
+
220
+ self.cmd_opts.add_option(cmdoptions.no_clean())
221
+
222
+ @staticmethod
223
+ def determine_resolver_variant(options: Values) -> str:
224
+ """Determines which resolver should be used, based on the given options."""
225
+ if "legacy-resolver" in options.deprecated_features_enabled:
226
+ return "legacy"
227
+
228
+ return "2020-resolver"
229
+
230
+ @staticmethod
231
+ def determine_build_failure_suppression(options: Values) -> bool:
232
+ """Determines whether build failures should be suppressed and backtracked on."""
233
+ if "backtrack-on-build-failures" not in options.deprecated_features_enabled:
234
+ return False
235
+
236
+ if "legacy-resolver" in options.deprecated_features_enabled:
237
+ raise CommandError("Cannot backtrack with legacy resolver.")
238
+
239
+ deprecated(
240
+ reason=(
241
+ "Backtracking on build failures can mask issues related to how "
242
+ "a package generates metadata or builds a wheel. This flag will "
243
+ "be removed in pip 22.2."
244
+ ),
245
+ gone_in=None,
246
+ replacement=(
247
+ "avoiding known-bad versions by explicitly telling pip to ignore them "
248
+ "(either directly as requirements, or via a constraints file)"
249
+ ),
250
+ feature_flag=None,
251
+ issue=10655,
252
+ )
253
+ return True
254
+
255
+ @classmethod
256
+ def make_requirement_preparer(
257
+ cls,
258
+ temp_build_dir: TempDirectory,
259
+ options: Values,
260
+ req_tracker: RequirementTracker,
261
+ session: PipSession,
262
+ finder: PackageFinder,
263
+ use_user_site: bool,
264
+ download_dir: Optional[str] = None,
265
+ verbosity: int = 0,
266
+ ) -> RequirementPreparer:
267
+ """
268
+ Create a RequirementPreparer instance for the given parameters.
269
+ """
270
+ temp_build_dir_path = temp_build_dir.path
271
+ assert temp_build_dir_path is not None
272
+
273
+ resolver_variant = cls.determine_resolver_variant(options)
274
+ if resolver_variant == "2020-resolver":
275
+ lazy_wheel = "fast-deps" in options.features_enabled
276
+ if lazy_wheel:
277
+ logger.warning(
278
+ "pip is using lazily downloaded wheels using HTTP "
279
+ "range requests to obtain dependency information. "
280
+ "This experimental feature is enabled through "
281
+ "--use-feature=fast-deps and it is not ready for "
282
+ "production."
283
+ )
284
+ else:
285
+ lazy_wheel = False
286
+ if "fast-deps" in options.features_enabled:
287
+ logger.warning(
288
+ "fast-deps has no effect when used with the legacy resolver."
289
+ )
290
+
291
+ in_tree_build = "out-of-tree-build" not in options.deprecated_features_enabled
292
+ if "in-tree-build" in options.features_enabled:
293
+ deprecated(
294
+ reason="In-tree builds are now the default.",
295
+ replacement="to remove the --use-feature=in-tree-build flag",
296
+ gone_in="22.1",
297
+ )
298
+ if "out-of-tree-build" in options.deprecated_features_enabled:
299
+ deprecated(
300
+ reason="Out-of-tree builds are deprecated.",
301
+ replacement=None,
302
+ gone_in="22.1",
303
+ )
304
+
305
+ if options.progress_bar not in {"on", "off"}:
306
+ deprecated(
307
+ reason="Custom progress bar styles are deprecated",
308
+ replacement="to use the default progress bar style.",
309
+ gone_in="22.1",
310
+ )
311
+
312
+ return RequirementPreparer(
313
+ build_dir=temp_build_dir_path,
314
+ src_dir=options.src_dir,
315
+ download_dir=download_dir,
316
+ build_isolation=options.build_isolation,
317
+ req_tracker=req_tracker,
318
+ session=session,
319
+ progress_bar=options.progress_bar,
320
+ finder=finder,
321
+ require_hashes=options.require_hashes,
322
+ use_user_site=use_user_site,
323
+ lazy_wheel=lazy_wheel,
324
+ verbosity=verbosity,
325
+ in_tree_build=in_tree_build,
326
+ )
327
+
328
+ @classmethod
329
+ def make_resolver(
330
+ cls,
331
+ preparer: RequirementPreparer,
332
+ finder: PackageFinder,
333
+ options: Values,
334
+ wheel_cache: Optional[WheelCache] = None,
335
+ use_user_site: bool = False,
336
+ ignore_installed: bool = True,
337
+ ignore_requires_python: bool = False,
338
+ force_reinstall: bool = False,
339
+ upgrade_strategy: str = "to-satisfy-only",
340
+ use_pep517: Optional[bool] = None,
341
+ py_version_info: Optional[Tuple[int, ...]] = None,
342
+ ) -> BaseResolver:
343
+ """
344
+ Create a Resolver instance for the given parameters.
345
+ """
346
+ make_install_req = partial(
347
+ install_req_from_req_string,
348
+ isolated=options.isolated_mode,
349
+ use_pep517=use_pep517,
350
+ )
351
+ suppress_build_failures = cls.determine_build_failure_suppression(options)
352
+ resolver_variant = cls.determine_resolver_variant(options)
353
+ # The long import name and duplicated invocation is needed to convince
354
+ # Mypy into correctly typechecking. Otherwise it would complain the
355
+ # "Resolver" class being redefined.
356
+ if resolver_variant == "2020-resolver":
357
+ import pip._internal.resolution.resolvelib.resolver
358
+
359
+ return pip._internal.resolution.resolvelib.resolver.Resolver(
360
+ preparer=preparer,
361
+ finder=finder,
362
+ wheel_cache=wheel_cache,
363
+ make_install_req=make_install_req,
364
+ use_user_site=use_user_site,
365
+ ignore_dependencies=options.ignore_dependencies,
366
+ ignore_installed=ignore_installed,
367
+ ignore_requires_python=ignore_requires_python,
368
+ force_reinstall=force_reinstall,
369
+ upgrade_strategy=upgrade_strategy,
370
+ py_version_info=py_version_info,
371
+ suppress_build_failures=suppress_build_failures,
372
+ )
373
+ import pip._internal.resolution.legacy.resolver
374
+
375
+ return pip._internal.resolution.legacy.resolver.Resolver(
376
+ preparer=preparer,
377
+ finder=finder,
378
+ wheel_cache=wheel_cache,
379
+ make_install_req=make_install_req,
380
+ use_user_site=use_user_site,
381
+ ignore_dependencies=options.ignore_dependencies,
382
+ ignore_installed=ignore_installed,
383
+ ignore_requires_python=ignore_requires_python,
384
+ force_reinstall=force_reinstall,
385
+ upgrade_strategy=upgrade_strategy,
386
+ py_version_info=py_version_info,
387
+ )
388
+
389
+ def get_requirements(
390
+ self,
391
+ args: List[str],
392
+ options: Values,
393
+ finder: PackageFinder,
394
+ session: PipSession,
395
+ ) -> List[InstallRequirement]:
396
+ """
397
+ Parse command-line arguments into the corresponding requirements.
398
+ """
399
+ requirements: List[InstallRequirement] = []
400
+ for filename in options.constraints:
401
+ for parsed_req in parse_requirements(
402
+ filename,
403
+ constraint=True,
404
+ finder=finder,
405
+ options=options,
406
+ session=session,
407
+ ):
408
+ req_to_add = install_req_from_parsed_requirement(
409
+ parsed_req,
410
+ isolated=options.isolated_mode,
411
+ user_supplied=False,
412
+ )
413
+ requirements.append(req_to_add)
414
+
415
+ for req in args:
416
+ req_to_add = install_req_from_line(
417
+ req,
418
+ None,
419
+ isolated=options.isolated_mode,
420
+ use_pep517=options.use_pep517,
421
+ user_supplied=True,
422
+ )
423
+ requirements.append(req_to_add)
424
+
425
+ for req in options.editables:
426
+ req_to_add = install_req_from_editable(
427
+ req,
428
+ user_supplied=True,
429
+ isolated=options.isolated_mode,
430
+ use_pep517=options.use_pep517,
431
+ )
432
+ requirements.append(req_to_add)
433
+
434
+ # NOTE: options.require_hashes may be set if --require-hashes is True
435
+ for filename in options.requirements:
436
+ for parsed_req in parse_requirements(
437
+ filename, finder=finder, options=options, session=session
438
+ ):
439
+ req_to_add = install_req_from_parsed_requirement(
440
+ parsed_req,
441
+ isolated=options.isolated_mode,
442
+ use_pep517=options.use_pep517,
443
+ user_supplied=True,
444
+ )
445
+ requirements.append(req_to_add)
446
+
447
+ # If any requirement has hash options, enable hash checking.
448
+ if any(req.has_hash_options for req in requirements):
449
+ options.require_hashes = True
450
+
451
+ if not (args or options.editables or options.requirements):
452
+ opts = {"name": self.name}
453
+ if options.find_links:
454
+ raise CommandError(
455
+ "You must give at least one requirement to {name} "
456
+ '(maybe you meant "pip {name} {links}"?)'.format(
457
+ **dict(opts, links=" ".join(options.find_links))
458
+ )
459
+ )
460
+ else:
461
+ raise CommandError(
462
+ "You must give at least one requirement to {name} "
463
+ '(see "pip help {name}")'.format(**opts)
464
+ )
465
+
466
+ return requirements
467
+
468
+ @staticmethod
469
+ def trace_basic_info(finder: PackageFinder) -> None:
470
+ """
471
+ Trace basic information about the provided objects.
472
+ """
473
+ # Display where finder is looking for packages
474
+ search_scope = finder.search_scope
475
+ locations = search_scope.get_formatted_locations()
476
+ if locations:
477
+ logger.info(locations)
478
+
479
+ def _build_package_finder(
480
+ self,
481
+ options: Values,
482
+ session: PipSession,
483
+ target_python: Optional[TargetPython] = None,
484
+ ignore_requires_python: Optional[bool] = None,
485
+ ) -> PackageFinder:
486
+ """
487
+ Create a package finder appropriate to this requirement command.
488
+
489
+ :param ignore_requires_python: Whether to ignore incompatible
490
+ "Requires-Python" values in links. Defaults to False.
491
+ """
492
+ link_collector = LinkCollector.create(session, options=options)
493
+ selection_prefs = SelectionPreferences(
494
+ allow_yanked=True,
495
+ format_control=options.format_control,
496
+ allow_all_prereleases=options.pre,
497
+ prefer_binary=options.prefer_binary,
498
+ ignore_requires_python=ignore_requires_python,
499
+ )
500
+
501
+ return PackageFinder.create(
502
+ link_collector=link_collector,
503
+ selection_prefs=selection_prefs,
504
+ target_python=target_python,
505
+ use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
506
+ )
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/spinners.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import itertools
3
+ import logging
4
+ import sys
5
+ import time
6
+ from typing import IO, Iterator
7
+
8
+ from pip._vendor.progress import HIDE_CURSOR, SHOW_CURSOR
9
+
10
+ from pip._internal.utils.compat import WINDOWS
11
+ from pip._internal.utils.logging import get_indentation
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ class SpinnerInterface:
17
+ def spin(self) -> None:
18
+ raise NotImplementedError()
19
+
20
+ def finish(self, final_status: str) -> None:
21
+ raise NotImplementedError()
22
+
23
+
24
+ class InteractiveSpinner(SpinnerInterface):
25
+ def __init__(
26
+ self,
27
+ message: str,
28
+ file: IO[str] = None,
29
+ spin_chars: str = "-\\|/",
30
+ # Empirically, 8 updates/second looks nice
31
+ min_update_interval_seconds: float = 0.125,
32
+ ):
33
+ self._message = message
34
+ if file is None:
35
+ file = sys.stdout
36
+ self._file = file
37
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
38
+ self._finished = False
39
+
40
+ self._spin_cycle = itertools.cycle(spin_chars)
41
+
42
+ self._file.write(" " * get_indentation() + self._message + " ... ")
43
+ self._width = 0
44
+
45
+ def _write(self, status: str) -> None:
46
+ assert not self._finished
47
+ # Erase what we wrote before by backspacing to the beginning, writing
48
+ # spaces to overwrite the old text, and then backspacing again
49
+ backup = "\b" * self._width
50
+ self._file.write(backup + " " * self._width + backup)
51
+ # Now we have a blank slate to add our status
52
+ self._file.write(status)
53
+ self._width = len(status)
54
+ self._file.flush()
55
+ self._rate_limiter.reset()
56
+
57
+ def spin(self) -> None:
58
+ if self._finished:
59
+ return
60
+ if not self._rate_limiter.ready():
61
+ return
62
+ self._write(next(self._spin_cycle))
63
+
64
+ def finish(self, final_status: str) -> None:
65
+ if self._finished:
66
+ return
67
+ self._write(final_status)
68
+ self._file.write("\n")
69
+ self._file.flush()
70
+ self._finished = True
71
+
72
+
73
+ # Used for dumb terminals, non-interactive installs (no tty), etc.
74
+ # We still print updates occasionally (once every 60 seconds by default) to
75
+ # act as a keep-alive for systems like Travis-CI that take lack-of-output as
76
+ # an indication that a task has frozen.
77
+ class NonInteractiveSpinner(SpinnerInterface):
78
+ def __init__(self, message: str, min_update_interval_seconds: float = 60.0) -> None:
79
+ self._message = message
80
+ self._finished = False
81
+ self._rate_limiter = RateLimiter(min_update_interval_seconds)
82
+ self._update("started")
83
+
84
+ def _update(self, status: str) -> None:
85
+ assert not self._finished
86
+ self._rate_limiter.reset()
87
+ logger.info("%s: %s", self._message, status)
88
+
89
+ def spin(self) -> None:
90
+ if self._finished:
91
+ return
92
+ if not self._rate_limiter.ready():
93
+ return
94
+ self._update("still running...")
95
+
96
+ def finish(self, final_status: str) -> None:
97
+ if self._finished:
98
+ return
99
+ self._update(f"finished with status '{final_status}'")
100
+ self._finished = True
101
+
102
+
103
+ class RateLimiter:
104
+ def __init__(self, min_update_interval_seconds: float) -> None:
105
+ self._min_update_interval_seconds = min_update_interval_seconds
106
+ self._last_update: float = 0
107
+
108
+ def ready(self) -> bool:
109
+ now = time.time()
110
+ delta = now - self._last_update
111
+ return delta >= self._min_update_interval_seconds
112
+
113
+ def reset(self) -> None:
114
+ self._last_update = time.time()
115
+
116
+
117
+ @contextlib.contextmanager
118
+ def open_spinner(message: str) -> Iterator[SpinnerInterface]:
119
+ # Interactive spinner goes directly to sys.stdout rather than being routed
120
+ # through the logging system, but it acts like it has level INFO,
121
+ # i.e. it's only displayed if we're at level INFO or better.
122
+ # Non-interactive spinner goes through the logging system, so it is always
123
+ # in sync with logging configuration.
124
+ if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO:
125
+ spinner: SpinnerInterface = InteractiveSpinner(message)
126
+ else:
127
+ spinner = NonInteractiveSpinner(message)
128
+ try:
129
+ with hidden_cursor(sys.stdout):
130
+ yield spinner
131
+ except KeyboardInterrupt:
132
+ spinner.finish("canceled")
133
+ raise
134
+ except Exception:
135
+ spinner.finish("error")
136
+ raise
137
+ else:
138
+ spinner.finish("done")
139
+
140
+
141
+ @contextlib.contextmanager
142
+ def hidden_cursor(file: IO[str]) -> Iterator[None]:
143
+ # The Windows terminal does not support the hide/show cursor ANSI codes,
144
+ # even via colorama. So don't even try.
145
+ if WINDOWS:
146
+ yield
147
+ # We don't want to clutter the output with control characters if we're
148
+ # writing to a file, or if the user is running with --quiet.
149
+ # See https://github.com/pypa/pip/issues/3418
150
+ elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO:
151
+ yield
152
+ else:
153
+ file.write(HIDE_CURSOR)
154
+ try:
155
+ yield
156
+ finally:
157
+ file.write(SHOW_CURSOR)
llmeval-env/lib/python3.10/site-packages/pip/_internal/cli/status_codes.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ SUCCESS = 0
2
+ ERROR = 1
3
+ UNKNOWN_ERROR = 2
4
+ VIRTUALENV_NOT_FOUND = 3
5
+ PREVIOUS_BUILD_DIR_ERROR = 4
6
+ NO_MATCHES_FOUND = 23
llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (227 Bytes). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc ADDED
Binary file (19.3 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc ADDED
Binary file (28.1 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc ADDED
Binary file (7.12 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__init__.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ from typing import Iterator, List, Optional, Sequence, Tuple
4
+
5
+ from pip._internal.utils.logging import indent_log
6
+
7
+ from .req_file import parse_requirements
8
+ from .req_install import InstallRequirement
9
+ from .req_set import RequirementSet
10
+
11
+ __all__ = [
12
+ "RequirementSet",
13
+ "InstallRequirement",
14
+ "parse_requirements",
15
+ "install_given_reqs",
16
+ ]
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class InstallationResult:
22
+ def __init__(self, name: str) -> None:
23
+ self.name = name
24
+
25
+ def __repr__(self) -> str:
26
+ return f"InstallationResult(name={self.name!r})"
27
+
28
+
29
+ def _validate_requirements(
30
+ requirements: List[InstallRequirement],
31
+ ) -> Iterator[Tuple[str, InstallRequirement]]:
32
+ for req in requirements:
33
+ assert req.name, f"invalid to-be-installed requirement: {req}"
34
+ yield req.name, req
35
+
36
+
37
+ def install_given_reqs(
38
+ requirements: List[InstallRequirement],
39
+ install_options: List[str],
40
+ global_options: Sequence[str],
41
+ root: Optional[str],
42
+ home: Optional[str],
43
+ prefix: Optional[str],
44
+ warn_script_location: bool,
45
+ use_user_site: bool,
46
+ pycompile: bool,
47
+ ) -> List[InstallationResult]:
48
+ """
49
+ Install everything in the given list.
50
+
51
+ (to be called after having downloaded and unpacked the packages)
52
+ """
53
+ to_install = collections.OrderedDict(_validate_requirements(requirements))
54
+
55
+ if to_install:
56
+ logger.info(
57
+ "Installing collected packages: %s",
58
+ ", ".join(to_install.keys()),
59
+ )
60
+
61
+ installed = []
62
+
63
+ with indent_log():
64
+ for req_name, requirement in to_install.items():
65
+ if requirement.should_reinstall:
66
+ logger.info("Attempting uninstall: %s", req_name)
67
+ with indent_log():
68
+ uninstalled_pathset = requirement.uninstall(auto_confirm=True)
69
+ else:
70
+ uninstalled_pathset = None
71
+
72
+ try:
73
+ requirement.install(
74
+ install_options,
75
+ global_options,
76
+ root=root,
77
+ home=home,
78
+ prefix=prefix,
79
+ warn_script_location=warn_script_location,
80
+ use_user_site=use_user_site,
81
+ pycompile=pycompile,
82
+ )
83
+ except Exception:
84
+ # if install did not succeed, rollback previous uninstall
85
+ if uninstalled_pathset and not requirement.install_succeeded:
86
+ uninstalled_pathset.rollback()
87
+ raise
88
+ else:
89
+ if uninstalled_pathset and requirement.install_succeeded:
90
+ uninstalled_pathset.commit()
91
+
92
+ installed.append(InstallationResult(req_name))
93
+
94
+ return installed
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (2.6 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc ADDED
Binary file (12.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc ADDED
Binary file (22.2 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc ADDED
Binary file (5.84 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc ADDED
Binary file (4.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/constructors.py ADDED
@@ -0,0 +1,490 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Backing implementation for InstallRequirement's various constructors
2
+
3
+ The idea here is that these formed a major chunk of InstallRequirement's size
4
+ so, moving them and support code dedicated to them outside of that class
5
+ helps creates for better understandability for the rest of the code.
6
+
7
+ These are meant to be used elsewhere within pip to create instances of
8
+ InstallRequirement.
9
+ """
10
+
11
+ import logging
12
+ import os
13
+ import re
14
+ from typing import Any, Dict, Optional, Set, Tuple, Union
15
+
16
+ from pip._vendor.packaging.markers import Marker
17
+ from pip._vendor.packaging.requirements import InvalidRequirement, Requirement
18
+ from pip._vendor.packaging.specifiers import Specifier
19
+
20
+ from pip._internal.exceptions import InstallationError
21
+ from pip._internal.models.index import PyPI, TestPyPI
22
+ from pip._internal.models.link import Link
23
+ from pip._internal.models.wheel import Wheel
24
+ from pip._internal.req.req_file import ParsedRequirement
25
+ from pip._internal.req.req_install import InstallRequirement
26
+ from pip._internal.utils.filetypes import is_archive_file
27
+ from pip._internal.utils.misc import is_installable_dir
28
+ from pip._internal.utils.packaging import get_requirement
29
+ from pip._internal.utils.urls import path_to_url
30
+ from pip._internal.vcs import is_url, vcs
31
+
32
+ __all__ = [
33
+ "install_req_from_editable",
34
+ "install_req_from_line",
35
+ "parse_editable",
36
+ ]
37
+
38
+ logger = logging.getLogger(__name__)
39
+ operators = Specifier._operators.keys()
40
+
41
+
42
+ def _strip_extras(path: str) -> Tuple[str, Optional[str]]:
43
+ m = re.match(r"^(.+)(\[[^\]]+\])$", path)
44
+ extras = None
45
+ if m:
46
+ path_no_extras = m.group(1)
47
+ extras = m.group(2)
48
+ else:
49
+ path_no_extras = path
50
+
51
+ return path_no_extras, extras
52
+
53
+
54
+ def convert_extras(extras: Optional[str]) -> Set[str]:
55
+ if not extras:
56
+ return set()
57
+ return get_requirement("placeholder" + extras.lower()).extras
58
+
59
+
60
+ def parse_editable(editable_req: str) -> Tuple[Optional[str], str, Set[str]]:
61
+ """Parses an editable requirement into:
62
+ - a requirement name
63
+ - an URL
64
+ - extras
65
+ - editable options
66
+ Accepted requirements:
67
+ svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir
68
+ .[some_extra]
69
+ """
70
+
71
+ url = editable_req
72
+
73
+ # If a file path is specified with extras, strip off the extras.
74
+ url_no_extras, extras = _strip_extras(url)
75
+
76
+ if os.path.isdir(url_no_extras):
77
+ # Treating it as code that has already been checked out
78
+ url_no_extras = path_to_url(url_no_extras)
79
+
80
+ if url_no_extras.lower().startswith("file:"):
81
+ package_name = Link(url_no_extras).egg_fragment
82
+ if extras:
83
+ return (
84
+ package_name,
85
+ url_no_extras,
86
+ get_requirement("placeholder" + extras.lower()).extras,
87
+ )
88
+ else:
89
+ return package_name, url_no_extras, set()
90
+
91
+ for version_control in vcs:
92
+ if url.lower().startswith(f"{version_control}:"):
93
+ url = f"{version_control}+{url}"
94
+ break
95
+
96
+ link = Link(url)
97
+
98
+ if not link.is_vcs:
99
+ backends = ", ".join(vcs.all_schemes)
100
+ raise InstallationError(
101
+ f"{editable_req} is not a valid editable requirement. "
102
+ f"It should either be a path to a local project or a VCS URL "
103
+ f"(beginning with {backends})."
104
+ )
105
+
106
+ package_name = link.egg_fragment
107
+ if not package_name:
108
+ raise InstallationError(
109
+ "Could not detect requirement name for '{}', please specify one "
110
+ "with #egg=your_package_name".format(editable_req)
111
+ )
112
+ return package_name, url, set()
113
+
114
+
115
+ def check_first_requirement_in_file(filename: str) -> None:
116
+ """Check if file is parsable as a requirements file.
117
+
118
+ This is heavily based on ``pkg_resources.parse_requirements``, but
119
+ simplified to just check the first meaningful line.
120
+
121
+ :raises InvalidRequirement: If the first meaningful line cannot be parsed
122
+ as an requirement.
123
+ """
124
+ with open(filename, encoding="utf-8", errors="ignore") as f:
125
+ # Create a steppable iterator, so we can handle \-continuations.
126
+ lines = (
127
+ line
128
+ for line in (line.strip() for line in f)
129
+ if line and not line.startswith("#") # Skip blank lines/comments.
130
+ )
131
+
132
+ for line in lines:
133
+ # Drop comments -- a hash without a space may be in a URL.
134
+ if " #" in line:
135
+ line = line[: line.find(" #")]
136
+ # If there is a line continuation, drop it, and append the next line.
137
+ if line.endswith("\\"):
138
+ line = line[:-2].strip() + next(lines, "")
139
+ Requirement(line)
140
+ return
141
+
142
+
143
+ def deduce_helpful_msg(req: str) -> str:
144
+ """Returns helpful msg in case requirements file does not exist,
145
+ or cannot be parsed.
146
+
147
+ :params req: Requirements file path
148
+ """
149
+ if not os.path.exists(req):
150
+ return f" File '{req}' does not exist."
151
+ msg = " The path does exist. "
152
+ # Try to parse and check if it is a requirements file.
153
+ try:
154
+ check_first_requirement_in_file(req)
155
+ except InvalidRequirement:
156
+ logger.debug("Cannot parse '%s' as requirements file", req)
157
+ else:
158
+ msg += (
159
+ f"The argument you provided "
160
+ f"({req}) appears to be a"
161
+ f" requirements file. If that is the"
162
+ f" case, use the '-r' flag to install"
163
+ f" the packages specified within it."
164
+ )
165
+ return msg
166
+
167
+
168
+ class RequirementParts:
169
+ def __init__(
170
+ self,
171
+ requirement: Optional[Requirement],
172
+ link: Optional[Link],
173
+ markers: Optional[Marker],
174
+ extras: Set[str],
175
+ ):
176
+ self.requirement = requirement
177
+ self.link = link
178
+ self.markers = markers
179
+ self.extras = extras
180
+
181
+
182
+ def parse_req_from_editable(editable_req: str) -> RequirementParts:
183
+ name, url, extras_override = parse_editable(editable_req)
184
+
185
+ if name is not None:
186
+ try:
187
+ req: Optional[Requirement] = Requirement(name)
188
+ except InvalidRequirement:
189
+ raise InstallationError(f"Invalid requirement: '{name}'")
190
+ else:
191
+ req = None
192
+
193
+ link = Link(url)
194
+
195
+ return RequirementParts(req, link, None, extras_override)
196
+
197
+
198
+ # ---- The actual constructors follow ----
199
+
200
+
201
+ def install_req_from_editable(
202
+ editable_req: str,
203
+ comes_from: Optional[Union[InstallRequirement, str]] = None,
204
+ use_pep517: Optional[bool] = None,
205
+ isolated: bool = False,
206
+ options: Optional[Dict[str, Any]] = None,
207
+ constraint: bool = False,
208
+ user_supplied: bool = False,
209
+ permit_editable_wheels: bool = False,
210
+ ) -> InstallRequirement:
211
+
212
+ parts = parse_req_from_editable(editable_req)
213
+
214
+ return InstallRequirement(
215
+ parts.requirement,
216
+ comes_from=comes_from,
217
+ user_supplied=user_supplied,
218
+ editable=True,
219
+ permit_editable_wheels=permit_editable_wheels,
220
+ link=parts.link,
221
+ constraint=constraint,
222
+ use_pep517=use_pep517,
223
+ isolated=isolated,
224
+ install_options=options.get("install_options", []) if options else [],
225
+ global_options=options.get("global_options", []) if options else [],
226
+ hash_options=options.get("hashes", {}) if options else {},
227
+ extras=parts.extras,
228
+ )
229
+
230
+
231
+ def _looks_like_path(name: str) -> bool:
232
+ """Checks whether the string "looks like" a path on the filesystem.
233
+
234
+ This does not check whether the target actually exists, only judge from the
235
+ appearance.
236
+
237
+ Returns true if any of the following conditions is true:
238
+ * a path separator is found (either os.path.sep or os.path.altsep);
239
+ * a dot is found (which represents the current directory).
240
+ """
241
+ if os.path.sep in name:
242
+ return True
243
+ if os.path.altsep is not None and os.path.altsep in name:
244
+ return True
245
+ if name.startswith("."):
246
+ return True
247
+ return False
248
+
249
+
250
+ def _get_url_from_path(path: str, name: str) -> Optional[str]:
251
+ """
252
+ First, it checks whether a provided path is an installable directory. If it
253
+ is, returns the path.
254
+
255
+ If false, check if the path is an archive file (such as a .whl).
256
+ The function checks if the path is a file. If false, if the path has
257
+ an @, it will treat it as a PEP 440 URL requirement and return the path.
258
+ """
259
+ if _looks_like_path(name) and os.path.isdir(path):
260
+ if is_installable_dir(path):
261
+ return path_to_url(path)
262
+ # TODO: The is_installable_dir test here might not be necessary
263
+ # now that it is done in load_pyproject_toml too.
264
+ raise InstallationError(
265
+ f"Directory {name!r} is not installable. Neither 'setup.py' "
266
+ "nor 'pyproject.toml' found."
267
+ )
268
+ if not is_archive_file(path):
269
+ return None
270
+ if os.path.isfile(path):
271
+ return path_to_url(path)
272
+ urlreq_parts = name.split("@", 1)
273
+ if len(urlreq_parts) >= 2 and not _looks_like_path(urlreq_parts[0]):
274
+ # If the path contains '@' and the part before it does not look
275
+ # like a path, try to treat it as a PEP 440 URL req instead.
276
+ return None
277
+ logger.warning(
278
+ "Requirement %r looks like a filename, but the file does not exist",
279
+ name,
280
+ )
281
+ return path_to_url(path)
282
+
283
+
284
+ def parse_req_from_line(name: str, line_source: Optional[str]) -> RequirementParts:
285
+ if is_url(name):
286
+ marker_sep = "; "
287
+ else:
288
+ marker_sep = ";"
289
+ if marker_sep in name:
290
+ name, markers_as_string = name.split(marker_sep, 1)
291
+ markers_as_string = markers_as_string.strip()
292
+ if not markers_as_string:
293
+ markers = None
294
+ else:
295
+ markers = Marker(markers_as_string)
296
+ else:
297
+ markers = None
298
+ name = name.strip()
299
+ req_as_string = None
300
+ path = os.path.normpath(os.path.abspath(name))
301
+ link = None
302
+ extras_as_string = None
303
+
304
+ if is_url(name):
305
+ link = Link(name)
306
+ else:
307
+ p, extras_as_string = _strip_extras(path)
308
+ url = _get_url_from_path(p, name)
309
+ if url is not None:
310
+ link = Link(url)
311
+
312
+ # it's a local file, dir, or url
313
+ if link:
314
+ # Handle relative file URLs
315
+ if link.scheme == "file" and re.search(r"\.\./", link.url):
316
+ link = Link(path_to_url(os.path.normpath(os.path.abspath(link.path))))
317
+ # wheel file
318
+ if link.is_wheel:
319
+ wheel = Wheel(link.filename) # can raise InvalidWheelFilename
320
+ req_as_string = f"{wheel.name}=={wheel.version}"
321
+ else:
322
+ # set the req to the egg fragment. when it's not there, this
323
+ # will become an 'unnamed' requirement
324
+ req_as_string = link.egg_fragment
325
+
326
+ # a requirement specifier
327
+ else:
328
+ req_as_string = name
329
+
330
+ extras = convert_extras(extras_as_string)
331
+
332
+ def with_source(text: str) -> str:
333
+ if not line_source:
334
+ return text
335
+ return f"{text} (from {line_source})"
336
+
337
+ def _parse_req_string(req_as_string: str) -> Requirement:
338
+ try:
339
+ req = get_requirement(req_as_string)
340
+ except InvalidRequirement:
341
+ if os.path.sep in req_as_string:
342
+ add_msg = "It looks like a path."
343
+ add_msg += deduce_helpful_msg(req_as_string)
344
+ elif "=" in req_as_string and not any(
345
+ op in req_as_string for op in operators
346
+ ):
347
+ add_msg = "= is not a valid operator. Did you mean == ?"
348
+ else:
349
+ add_msg = ""
350
+ msg = with_source(f"Invalid requirement: {req_as_string!r}")
351
+ if add_msg:
352
+ msg += f"\nHint: {add_msg}"
353
+ raise InstallationError(msg)
354
+ else:
355
+ # Deprecate extras after specifiers: "name>=1.0[extras]"
356
+ # This currently works by accident because _strip_extras() parses
357
+ # any extras in the end of the string and those are saved in
358
+ # RequirementParts
359
+ for spec in req.specifier:
360
+ spec_str = str(spec)
361
+ if spec_str.endswith("]"):
362
+ msg = f"Extras after version '{spec_str}'."
363
+ raise InstallationError(msg)
364
+ return req
365
+
366
+ if req_as_string is not None:
367
+ req: Optional[Requirement] = _parse_req_string(req_as_string)
368
+ else:
369
+ req = None
370
+
371
+ return RequirementParts(req, link, markers, extras)
372
+
373
+
374
+ def install_req_from_line(
375
+ name: str,
376
+ comes_from: Optional[Union[str, InstallRequirement]] = None,
377
+ use_pep517: Optional[bool] = None,
378
+ isolated: bool = False,
379
+ options: Optional[Dict[str, Any]] = None,
380
+ constraint: bool = False,
381
+ line_source: Optional[str] = None,
382
+ user_supplied: bool = False,
383
+ ) -> InstallRequirement:
384
+ """Creates an InstallRequirement from a name, which might be a
385
+ requirement, directory containing 'setup.py', filename, or URL.
386
+
387
+ :param line_source: An optional string describing where the line is from,
388
+ for logging purposes in case of an error.
389
+ """
390
+ parts = parse_req_from_line(name, line_source)
391
+
392
+ return InstallRequirement(
393
+ parts.requirement,
394
+ comes_from,
395
+ link=parts.link,
396
+ markers=parts.markers,
397
+ use_pep517=use_pep517,
398
+ isolated=isolated,
399
+ install_options=options.get("install_options", []) if options else [],
400
+ global_options=options.get("global_options", []) if options else [],
401
+ hash_options=options.get("hashes", {}) if options else {},
402
+ constraint=constraint,
403
+ extras=parts.extras,
404
+ user_supplied=user_supplied,
405
+ )
406
+
407
+
408
+ def install_req_from_req_string(
409
+ req_string: str,
410
+ comes_from: Optional[InstallRequirement] = None,
411
+ isolated: bool = False,
412
+ use_pep517: Optional[bool] = None,
413
+ user_supplied: bool = False,
414
+ ) -> InstallRequirement:
415
+ try:
416
+ req = get_requirement(req_string)
417
+ except InvalidRequirement:
418
+ raise InstallationError(f"Invalid requirement: '{req_string}'")
419
+
420
+ domains_not_allowed = [
421
+ PyPI.file_storage_domain,
422
+ TestPyPI.file_storage_domain,
423
+ ]
424
+ if (
425
+ req.url
426
+ and comes_from
427
+ and comes_from.link
428
+ and comes_from.link.netloc in domains_not_allowed
429
+ ):
430
+ # Explicitly disallow pypi packages that depend on external urls
431
+ raise InstallationError(
432
+ "Packages installed from PyPI cannot depend on packages "
433
+ "which are not also hosted on PyPI.\n"
434
+ "{} depends on {} ".format(comes_from.name, req)
435
+ )
436
+
437
+ return InstallRequirement(
438
+ req,
439
+ comes_from,
440
+ isolated=isolated,
441
+ use_pep517=use_pep517,
442
+ user_supplied=user_supplied,
443
+ )
444
+
445
+
446
+ def install_req_from_parsed_requirement(
447
+ parsed_req: ParsedRequirement,
448
+ isolated: bool = False,
449
+ use_pep517: Optional[bool] = None,
450
+ user_supplied: bool = False,
451
+ ) -> InstallRequirement:
452
+ if parsed_req.is_editable:
453
+ req = install_req_from_editable(
454
+ parsed_req.requirement,
455
+ comes_from=parsed_req.comes_from,
456
+ use_pep517=use_pep517,
457
+ constraint=parsed_req.constraint,
458
+ isolated=isolated,
459
+ user_supplied=user_supplied,
460
+ )
461
+
462
+ else:
463
+ req = install_req_from_line(
464
+ parsed_req.requirement,
465
+ comes_from=parsed_req.comes_from,
466
+ use_pep517=use_pep517,
467
+ isolated=isolated,
468
+ options=parsed_req.options,
469
+ constraint=parsed_req.constraint,
470
+ line_source=parsed_req.line_source,
471
+ user_supplied=user_supplied,
472
+ )
473
+ return req
474
+
475
+
476
+ def install_req_from_link_and_ireq(
477
+ link: Link, ireq: InstallRequirement
478
+ ) -> InstallRequirement:
479
+ return InstallRequirement(
480
+ req=ireq.req,
481
+ comes_from=ireq.comes_from,
482
+ editable=ireq.editable,
483
+ link=link,
484
+ markers=ireq.markers,
485
+ use_pep517=ireq.use_pep517,
486
+ isolated=ireq.isolated,
487
+ install_options=ireq.install_options,
488
+ global_options=ireq.global_options,
489
+ hash_options=ireq.hash_options,
490
+ )
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_file.py ADDED
@@ -0,0 +1,536 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Requirements file parsing
3
+ """
4
+
5
+ import optparse
6
+ import os
7
+ import re
8
+ import shlex
9
+ import urllib.parse
10
+ from optparse import Values
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Any,
14
+ Callable,
15
+ Dict,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ Optional,
20
+ Tuple,
21
+ )
22
+
23
+ from pip._internal.cli import cmdoptions
24
+ from pip._internal.exceptions import InstallationError, RequirementsFileParseError
25
+ from pip._internal.models.search_scope import SearchScope
26
+ from pip._internal.network.session import PipSession
27
+ from pip._internal.network.utils import raise_for_status
28
+ from pip._internal.utils.encoding import auto_decode
29
+ from pip._internal.utils.urls import get_url_scheme
30
+
31
+ if TYPE_CHECKING:
32
+ # NoReturn introduced in 3.6.2; imported only for type checking to maintain
33
+ # pip compatibility with older patch versions of Python 3.6
34
+ from typing import NoReturn
35
+
36
+ from pip._internal.index.package_finder import PackageFinder
37
+
38
+ __all__ = ["parse_requirements"]
39
+
40
+ ReqFileLines = Iterable[Tuple[int, str]]
41
+
42
+ LineParser = Callable[[str], Tuple[str, Values]]
43
+
44
+ SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
45
+ COMMENT_RE = re.compile(r"(^|\s+)#.*$")
46
+
47
+ # Matches environment variable-style values in '${MY_VARIABLE_1}' with the
48
+ # variable name consisting of only uppercase letters, digits or the '_'
49
+ # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
50
+ # 2013 Edition.
51
+ ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
52
+
53
+ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
54
+ cmdoptions.index_url,
55
+ cmdoptions.extra_index_url,
56
+ cmdoptions.no_index,
57
+ cmdoptions.constraints,
58
+ cmdoptions.requirements,
59
+ cmdoptions.editable,
60
+ cmdoptions.find_links,
61
+ cmdoptions.no_binary,
62
+ cmdoptions.only_binary,
63
+ cmdoptions.prefer_binary,
64
+ cmdoptions.require_hashes,
65
+ cmdoptions.pre,
66
+ cmdoptions.trusted_host,
67
+ cmdoptions.use_new_feature,
68
+ ]
69
+
70
+ # options to be passed to requirements
71
+ SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
72
+ cmdoptions.install_options,
73
+ cmdoptions.global_options,
74
+ cmdoptions.hash,
75
+ ]
76
+
77
+ # the 'dest' string values
78
+ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
79
+
80
+
81
+ class ParsedRequirement:
82
+ def __init__(
83
+ self,
84
+ requirement: str,
85
+ is_editable: bool,
86
+ comes_from: str,
87
+ constraint: bool,
88
+ options: Optional[Dict[str, Any]] = None,
89
+ line_source: Optional[str] = None,
90
+ ) -> None:
91
+ self.requirement = requirement
92
+ self.is_editable = is_editable
93
+ self.comes_from = comes_from
94
+ self.options = options
95
+ self.constraint = constraint
96
+ self.line_source = line_source
97
+
98
+
99
+ class ParsedLine:
100
+ def __init__(
101
+ self,
102
+ filename: str,
103
+ lineno: int,
104
+ args: str,
105
+ opts: Values,
106
+ constraint: bool,
107
+ ) -> None:
108
+ self.filename = filename
109
+ self.lineno = lineno
110
+ self.opts = opts
111
+ self.constraint = constraint
112
+
113
+ if args:
114
+ self.is_requirement = True
115
+ self.is_editable = False
116
+ self.requirement = args
117
+ elif opts.editables:
118
+ self.is_requirement = True
119
+ self.is_editable = True
120
+ # We don't support multiple -e on one line
121
+ self.requirement = opts.editables[0]
122
+ else:
123
+ self.is_requirement = False
124
+
125
+
126
+ def parse_requirements(
127
+ filename: str,
128
+ session: PipSession,
129
+ finder: Optional["PackageFinder"] = None,
130
+ options: Optional[optparse.Values] = None,
131
+ constraint: bool = False,
132
+ ) -> Iterator[ParsedRequirement]:
133
+ """Parse a requirements file and yield ParsedRequirement instances.
134
+
135
+ :param filename: Path or url of requirements file.
136
+ :param session: PipSession instance.
137
+ :param finder: Instance of pip.index.PackageFinder.
138
+ :param options: cli options.
139
+ :param constraint: If true, parsing a constraint file rather than
140
+ requirements file.
141
+ """
142
+ line_parser = get_line_parser(finder)
143
+ parser = RequirementsFileParser(session, line_parser)
144
+
145
+ for parsed_line in parser.parse(filename, constraint):
146
+ parsed_req = handle_line(
147
+ parsed_line, options=options, finder=finder, session=session
148
+ )
149
+ if parsed_req is not None:
150
+ yield parsed_req
151
+
152
+
153
+ def preprocess(content: str) -> ReqFileLines:
154
+ """Split, filter, and join lines, and return a line iterator
155
+
156
+ :param content: the content of the requirements file
157
+ """
158
+ lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
159
+ lines_enum = join_lines(lines_enum)
160
+ lines_enum = ignore_comments(lines_enum)
161
+ lines_enum = expand_env_variables(lines_enum)
162
+ return lines_enum
163
+
164
+
165
+ def handle_requirement_line(
166
+ line: ParsedLine,
167
+ options: Optional[optparse.Values] = None,
168
+ ) -> ParsedRequirement:
169
+
170
+ # preserve for the nested code path
171
+ line_comes_from = "{} {} (line {})".format(
172
+ "-c" if line.constraint else "-r",
173
+ line.filename,
174
+ line.lineno,
175
+ )
176
+
177
+ assert line.is_requirement
178
+
179
+ if line.is_editable:
180
+ # For editable requirements, we don't support per-requirement
181
+ # options, so just return the parsed requirement.
182
+ return ParsedRequirement(
183
+ requirement=line.requirement,
184
+ is_editable=line.is_editable,
185
+ comes_from=line_comes_from,
186
+ constraint=line.constraint,
187
+ )
188
+ else:
189
+ if options:
190
+ # Disable wheels if the user has specified build options
191
+ cmdoptions.check_install_build_global(options, line.opts)
192
+
193
+ # get the options that apply to requirements
194
+ req_options = {}
195
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
196
+ if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
197
+ req_options[dest] = line.opts.__dict__[dest]
198
+
199
+ line_source = f"line {line.lineno} of {line.filename}"
200
+ return ParsedRequirement(
201
+ requirement=line.requirement,
202
+ is_editable=line.is_editable,
203
+ comes_from=line_comes_from,
204
+ constraint=line.constraint,
205
+ options=req_options,
206
+ line_source=line_source,
207
+ )
208
+
209
+
210
+ def handle_option_line(
211
+ opts: Values,
212
+ filename: str,
213
+ lineno: int,
214
+ finder: Optional["PackageFinder"] = None,
215
+ options: Optional[optparse.Values] = None,
216
+ session: Optional[PipSession] = None,
217
+ ) -> None:
218
+
219
+ if options:
220
+ # percolate options upward
221
+ if opts.require_hashes:
222
+ options.require_hashes = opts.require_hashes
223
+ if opts.features_enabled:
224
+ options.features_enabled.extend(
225
+ f for f in opts.features_enabled if f not in options.features_enabled
226
+ )
227
+
228
+ # set finder options
229
+ if finder:
230
+ find_links = finder.find_links
231
+ index_urls = finder.index_urls
232
+ if opts.index_url:
233
+ index_urls = [opts.index_url]
234
+ if opts.no_index is True:
235
+ index_urls = []
236
+ if opts.extra_index_urls:
237
+ index_urls.extend(opts.extra_index_urls)
238
+ if opts.find_links:
239
+ # FIXME: it would be nice to keep track of the source
240
+ # of the find_links: support a find-links local path
241
+ # relative to a requirements file.
242
+ value = opts.find_links[0]
243
+ req_dir = os.path.dirname(os.path.abspath(filename))
244
+ relative_to_reqs_file = os.path.join(req_dir, value)
245
+ if os.path.exists(relative_to_reqs_file):
246
+ value = relative_to_reqs_file
247
+ find_links.append(value)
248
+
249
+ if session:
250
+ # We need to update the auth urls in session
251
+ session.update_index_urls(index_urls)
252
+
253
+ search_scope = SearchScope(
254
+ find_links=find_links,
255
+ index_urls=index_urls,
256
+ )
257
+ finder.search_scope = search_scope
258
+
259
+ if opts.pre:
260
+ finder.set_allow_all_prereleases()
261
+
262
+ if opts.prefer_binary:
263
+ finder.set_prefer_binary()
264
+
265
+ if session:
266
+ for host in opts.trusted_hosts or []:
267
+ source = f"line {lineno} of {filename}"
268
+ session.add_trusted_host(host, source=source)
269
+
270
+
271
+ def handle_line(
272
+ line: ParsedLine,
273
+ options: Optional[optparse.Values] = None,
274
+ finder: Optional["PackageFinder"] = None,
275
+ session: Optional[PipSession] = None,
276
+ ) -> Optional[ParsedRequirement]:
277
+ """Handle a single parsed requirements line; This can result in
278
+ creating/yielding requirements, or updating the finder.
279
+
280
+ :param line: The parsed line to be processed.
281
+ :param options: CLI options.
282
+ :param finder: The finder - updated by non-requirement lines.
283
+ :param session: The session - updated by non-requirement lines.
284
+
285
+ Returns a ParsedRequirement object if the line is a requirement line,
286
+ otherwise returns None.
287
+
288
+ For lines that contain requirements, the only options that have an effect
289
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
290
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
291
+ ignored.
292
+
293
+ For lines that do not contain requirements, the only options that have an
294
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
295
+ be present, but are ignored. These lines may contain multiple options
296
+ (although our docs imply only one is supported), and all our parsed and
297
+ affect the finder.
298
+ """
299
+
300
+ if line.is_requirement:
301
+ parsed_req = handle_requirement_line(line, options)
302
+ return parsed_req
303
+ else:
304
+ handle_option_line(
305
+ line.opts,
306
+ line.filename,
307
+ line.lineno,
308
+ finder,
309
+ options,
310
+ session,
311
+ )
312
+ return None
313
+
314
+
315
+ class RequirementsFileParser:
316
+ def __init__(
317
+ self,
318
+ session: PipSession,
319
+ line_parser: LineParser,
320
+ ) -> None:
321
+ self._session = session
322
+ self._line_parser = line_parser
323
+
324
+ def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
325
+ """Parse a given file, yielding parsed lines."""
326
+ yield from self._parse_and_recurse(filename, constraint)
327
+
328
+ def _parse_and_recurse(
329
+ self, filename: str, constraint: bool
330
+ ) -> Iterator[ParsedLine]:
331
+ for line in self._parse_file(filename, constraint):
332
+ if not line.is_requirement and (
333
+ line.opts.requirements or line.opts.constraints
334
+ ):
335
+ # parse a nested requirements file
336
+ if line.opts.requirements:
337
+ req_path = line.opts.requirements[0]
338
+ nested_constraint = False
339
+ else:
340
+ req_path = line.opts.constraints[0]
341
+ nested_constraint = True
342
+
343
+ # original file is over http
344
+ if SCHEME_RE.search(filename):
345
+ # do a url join so relative paths work
346
+ req_path = urllib.parse.urljoin(filename, req_path)
347
+ # original file and nested file are paths
348
+ elif not SCHEME_RE.search(req_path):
349
+ # do a join so relative paths work
350
+ req_path = os.path.join(
351
+ os.path.dirname(filename),
352
+ req_path,
353
+ )
354
+
355
+ yield from self._parse_and_recurse(req_path, nested_constraint)
356
+ else:
357
+ yield line
358
+
359
+ def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
360
+ _, content = get_file_content(filename, self._session)
361
+
362
+ lines_enum = preprocess(content)
363
+
364
+ for line_number, line in lines_enum:
365
+ try:
366
+ args_str, opts = self._line_parser(line)
367
+ except OptionParsingError as e:
368
+ # add offending line
369
+ msg = f"Invalid requirement: {line}\n{e.msg}"
370
+ raise RequirementsFileParseError(msg)
371
+
372
+ yield ParsedLine(
373
+ filename,
374
+ line_number,
375
+ args_str,
376
+ opts,
377
+ constraint,
378
+ )
379
+
380
+
381
+ def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
382
+ def parse_line(line: str) -> Tuple[str, Values]:
383
+ # Build new parser for each line since it accumulates appendable
384
+ # options.
385
+ parser = build_parser()
386
+ defaults = parser.get_default_values()
387
+ defaults.index_url = None
388
+ if finder:
389
+ defaults.format_control = finder.format_control
390
+
391
+ args_str, options_str = break_args_options(line)
392
+
393
+ opts, _ = parser.parse_args(shlex.split(options_str), defaults)
394
+
395
+ return args_str, opts
396
+
397
+ return parse_line
398
+
399
+
400
+ def break_args_options(line: str) -> Tuple[str, str]:
401
+ """Break up the line into an args and options string. We only want to shlex
402
+ (and then optparse) the options, not the args. args can contain markers
403
+ which are corrupted by shlex.
404
+ """
405
+ tokens = line.split(" ")
406
+ args = []
407
+ options = tokens[:]
408
+ for token in tokens:
409
+ if token.startswith("-") or token.startswith("--"):
410
+ break
411
+ else:
412
+ args.append(token)
413
+ options.pop(0)
414
+ return " ".join(args), " ".join(options)
415
+
416
+
417
+ class OptionParsingError(Exception):
418
+ def __init__(self, msg: str) -> None:
419
+ self.msg = msg
420
+
421
+
422
+ def build_parser() -> optparse.OptionParser:
423
+ """
424
+ Return a parser for parsing requirement lines
425
+ """
426
+ parser = optparse.OptionParser(add_help_option=False)
427
+
428
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
429
+ for option_factory in option_factories:
430
+ option = option_factory()
431
+ parser.add_option(option)
432
+
433
+ # By default optparse sys.exits on parsing errors. We want to wrap
434
+ # that in our own exception.
435
+ def parser_exit(self: Any, msg: str) -> "NoReturn":
436
+ raise OptionParsingError(msg)
437
+
438
+ # NOTE: mypy disallows assigning to a method
439
+ # https://github.com/python/mypy/issues/2427
440
+ parser.exit = parser_exit # type: ignore
441
+
442
+ return parser
443
+
444
+
445
+ def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
446
+ """Joins a line ending in '\' with the previous line (except when following
447
+ comments). The joined line takes on the index of the first line.
448
+ """
449
+ primary_line_number = None
450
+ new_line: List[str] = []
451
+ for line_number, line in lines_enum:
452
+ if not line.endswith("\\") or COMMENT_RE.match(line):
453
+ if COMMENT_RE.match(line):
454
+ # this ensures comments are always matched later
455
+ line = " " + line
456
+ if new_line:
457
+ new_line.append(line)
458
+ assert primary_line_number is not None
459
+ yield primary_line_number, "".join(new_line)
460
+ new_line = []
461
+ else:
462
+ yield line_number, line
463
+ else:
464
+ if not new_line:
465
+ primary_line_number = line_number
466
+ new_line.append(line.strip("\\"))
467
+
468
+ # last line contains \
469
+ if new_line:
470
+ assert primary_line_number is not None
471
+ yield primary_line_number, "".join(new_line)
472
+
473
+ # TODO: handle space after '\'.
474
+
475
+
476
+ def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
477
+ """
478
+ Strips comments and filter empty lines.
479
+ """
480
+ for line_number, line in lines_enum:
481
+ line = COMMENT_RE.sub("", line)
482
+ line = line.strip()
483
+ if line:
484
+ yield line_number, line
485
+
486
+
487
+ def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
488
+ """Replace all environment variables that can be retrieved via `os.getenv`.
489
+
490
+ The only allowed format for environment variables defined in the
491
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
492
+
493
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
494
+ 2. Ensure consistency across platforms for requirement files.
495
+
496
+ These points are the result of a discussion on the `github pull
497
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
498
+
499
+ Valid characters in variable names follow the `POSIX standard
500
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
501
+ to uppercase letter, digits and the `_` (underscore).
502
+ """
503
+ for line_number, line in lines_enum:
504
+ for env_var, var_name in ENV_VAR_RE.findall(line):
505
+ value = os.getenv(var_name)
506
+ if not value:
507
+ continue
508
+
509
+ line = line.replace(env_var, value)
510
+
511
+ yield line_number, line
512
+
513
+
514
+ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
515
+ """Gets the content of a file; it may be a filename, file: URL, or
516
+ http: URL. Returns (location, content). Content is unicode.
517
+ Respects # -*- coding: declarations on the retrieved files.
518
+
519
+ :param url: File path or url.
520
+ :param session: PipSession instance.
521
+ """
522
+ scheme = get_url_scheme(url)
523
+
524
+ # Pip has special support for file:// URLs (LocalFSAdapter).
525
+ if scheme in ["http", "https", "file"]:
526
+ resp = session.get(url)
527
+ raise_for_status(resp)
528
+ return resp.url, resp.text
529
+
530
+ # Assume this is a bare path.
531
+ try:
532
+ with open(url, "rb") as f:
533
+ content = auto_decode(f.read())
534
+ except OSError as exc:
535
+ raise InstallationError(f"Could not open requirements file: {exc}")
536
+ return url, content
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_install.py ADDED
@@ -0,0 +1,858 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The following comment should be removed at some point in the future.
2
+ # mypy: strict-optional=False
3
+
4
+ import functools
5
+ import logging
6
+ import os
7
+ import shutil
8
+ import sys
9
+ import uuid
10
+ import zipfile
11
+ from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
12
+
13
+ from pip._vendor.packaging.markers import Marker
14
+ from pip._vendor.packaging.requirements import Requirement
15
+ from pip._vendor.packaging.specifiers import SpecifierSet
16
+ from pip._vendor.packaging.utils import canonicalize_name
17
+ from pip._vendor.packaging.version import Version
18
+ from pip._vendor.packaging.version import parse as parse_version
19
+ from pip._vendor.pep517.wrappers import Pep517HookCaller
20
+
21
+ from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
22
+ from pip._internal.exceptions import InstallationError, LegacyInstallFailure
23
+ from pip._internal.locations import get_scheme
24
+ from pip._internal.metadata import (
25
+ BaseDistribution,
26
+ get_default_environment,
27
+ get_directory_distribution,
28
+ )
29
+ from pip._internal.models.link import Link
30
+ from pip._internal.operations.build.metadata import generate_metadata
31
+ from pip._internal.operations.build.metadata_editable import generate_editable_metadata
32
+ from pip._internal.operations.build.metadata_legacy import (
33
+ generate_metadata as generate_metadata_legacy,
34
+ )
35
+ from pip._internal.operations.install.editable_legacy import (
36
+ install_editable as install_editable_legacy,
37
+ )
38
+ from pip._internal.operations.install.legacy import install as install_legacy
39
+ from pip._internal.operations.install.wheel import install_wheel
40
+ from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
41
+ from pip._internal.req.req_uninstall import UninstallPathSet
42
+ from pip._internal.utils.deprecation import deprecated
43
+ from pip._internal.utils.direct_url_helpers import (
44
+ direct_url_for_editable,
45
+ direct_url_from_link,
46
+ )
47
+ from pip._internal.utils.hashes import Hashes
48
+ from pip._internal.utils.misc import (
49
+ ask_path_exists,
50
+ backup_dir,
51
+ display_path,
52
+ hide_url,
53
+ redact_auth_from_url,
54
+ )
55
+ from pip._internal.utils.packaging import safe_extra
56
+ from pip._internal.utils.subprocess import runner_with_spinner_message
57
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
58
+ from pip._internal.utils.virtualenv import running_under_virtualenv
59
+ from pip._internal.vcs import vcs
60
+
61
+ logger = logging.getLogger(__name__)
62
+
63
+
64
+ class InstallRequirement:
65
+ """
66
+ Represents something that may be installed later on, may have information
67
+ about where to fetch the relevant requirement and also contains logic for
68
+ installing the said requirement.
69
+ """
70
+
71
+ def __init__(
72
+ self,
73
+ req: Optional[Requirement],
74
+ comes_from: Optional[Union[str, "InstallRequirement"]],
75
+ editable: bool = False,
76
+ link: Optional[Link] = None,
77
+ markers: Optional[Marker] = None,
78
+ use_pep517: Optional[bool] = None,
79
+ isolated: bool = False,
80
+ install_options: Optional[List[str]] = None,
81
+ global_options: Optional[List[str]] = None,
82
+ hash_options: Optional[Dict[str, List[str]]] = None,
83
+ constraint: bool = False,
84
+ extras: Collection[str] = (),
85
+ user_supplied: bool = False,
86
+ permit_editable_wheels: bool = False,
87
+ ) -> None:
88
+ assert req is None or isinstance(req, Requirement), req
89
+ self.req = req
90
+ self.comes_from = comes_from
91
+ self.constraint = constraint
92
+ self.editable = editable
93
+ self.permit_editable_wheels = permit_editable_wheels
94
+ self.legacy_install_reason: Optional[int] = None
95
+
96
+ # source_dir is the local directory where the linked requirement is
97
+ # located, or unpacked. In case unpacking is needed, creating and
98
+ # populating source_dir is done by the RequirementPreparer. Note this
99
+ # is not necessarily the directory where pyproject.toml or setup.py is
100
+ # located - that one is obtained via unpacked_source_directory.
101
+ self.source_dir: Optional[str] = None
102
+ if self.editable:
103
+ assert link
104
+ if link.is_file:
105
+ self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
106
+
107
+ if link is None and req and req.url:
108
+ # PEP 508 URL requirement
109
+ link = Link(req.url)
110
+ self.link = self.original_link = link
111
+ self.original_link_is_in_wheel_cache = False
112
+
113
+ # Path to any downloaded or already-existing package.
114
+ self.local_file_path: Optional[str] = None
115
+ if self.link and self.link.is_file:
116
+ self.local_file_path = self.link.file_path
117
+
118
+ if extras:
119
+ self.extras = extras
120
+ elif req:
121
+ self.extras = {safe_extra(extra) for extra in req.extras}
122
+ else:
123
+ self.extras = set()
124
+ if markers is None and req:
125
+ markers = req.marker
126
+ self.markers = markers
127
+
128
+ # This holds the Distribution object if this requirement is already installed.
129
+ self.satisfied_by: Optional[BaseDistribution] = None
130
+ # Whether the installation process should try to uninstall an existing
131
+ # distribution before installing this requirement.
132
+ self.should_reinstall = False
133
+ # Temporary build location
134
+ self._temp_build_dir: Optional[TempDirectory] = None
135
+ # Set to True after successful installation
136
+ self.install_succeeded: Optional[bool] = None
137
+ # Supplied options
138
+ self.install_options = install_options if install_options else []
139
+ self.global_options = global_options if global_options else []
140
+ self.hash_options = hash_options if hash_options else {}
141
+ # Set to True after successful preparation of this requirement
142
+ self.prepared = False
143
+ # User supplied requirement are explicitly requested for installation
144
+ # by the user via CLI arguments or requirements files, as opposed to,
145
+ # e.g. dependencies, extras or constraints.
146
+ self.user_supplied = user_supplied
147
+
148
+ self.isolated = isolated
149
+ self.build_env: BuildEnvironment = NoOpBuildEnvironment()
150
+
151
+ # For PEP 517, the directory where we request the project metadata
152
+ # gets stored. We need this to pass to build_wheel, so the backend
153
+ # can ensure that the wheel matches the metadata (see the PEP for
154
+ # details).
155
+ self.metadata_directory: Optional[str] = None
156
+
157
+ # The static build requirements (from pyproject.toml)
158
+ self.pyproject_requires: Optional[List[str]] = None
159
+
160
+ # Build requirements that we will check are available
161
+ self.requirements_to_check: List[str] = []
162
+
163
+ # The PEP 517 backend we should use to build the project
164
+ self.pep517_backend: Optional[Pep517HookCaller] = None
165
+
166
+ # Are we using PEP 517 for this requirement?
167
+ # After pyproject.toml has been loaded, the only valid values are True
168
+ # and False. Before loading, None is valid (meaning "use the default").
169
+ # Setting an explicit value before loading pyproject.toml is supported,
170
+ # but after loading this flag should be treated as read only.
171
+ self.use_pep517 = use_pep517
172
+
173
+ # This requirement needs more preparation before it can be built
174
+ self.needs_more_preparation = False
175
+
176
+ def __str__(self) -> str:
177
+ if self.req:
178
+ s = str(self.req)
179
+ if self.link:
180
+ s += " from {}".format(redact_auth_from_url(self.link.url))
181
+ elif self.link:
182
+ s = redact_auth_from_url(self.link.url)
183
+ else:
184
+ s = "<InstallRequirement>"
185
+ if self.satisfied_by is not None:
186
+ s += " in {}".format(display_path(self.satisfied_by.location))
187
+ if self.comes_from:
188
+ if isinstance(self.comes_from, str):
189
+ comes_from: Optional[str] = self.comes_from
190
+ else:
191
+ comes_from = self.comes_from.from_path()
192
+ if comes_from:
193
+ s += f" (from {comes_from})"
194
+ return s
195
+
196
+ def __repr__(self) -> str:
197
+ return "<{} object: {} editable={!r}>".format(
198
+ self.__class__.__name__, str(self), self.editable
199
+ )
200
+
201
+ def format_debug(self) -> str:
202
+ """An un-tested helper for getting state, for debugging."""
203
+ attributes = vars(self)
204
+ names = sorted(attributes)
205
+
206
+ state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
207
+ return "<{name} object: {{{state}}}>".format(
208
+ name=self.__class__.__name__,
209
+ state=", ".join(state),
210
+ )
211
+
212
+ # Things that are valid for all kinds of requirements?
213
+ @property
214
+ def name(self) -> Optional[str]:
215
+ if self.req is None:
216
+ return None
217
+ return self.req.name
218
+
219
+ @functools.lru_cache() # use cached_property in python 3.8+
220
+ def supports_pyproject_editable(self) -> bool:
221
+ if not self.use_pep517:
222
+ return False
223
+ assert self.pep517_backend
224
+ with self.build_env:
225
+ runner = runner_with_spinner_message(
226
+ "Checking if build backend supports build_editable"
227
+ )
228
+ with self.pep517_backend.subprocess_runner(runner):
229
+ return "build_editable" in self.pep517_backend._supported_features()
230
+
231
+ @property
232
+ def specifier(self) -> SpecifierSet:
233
+ return self.req.specifier
234
+
235
+ @property
236
+ def is_pinned(self) -> bool:
237
+ """Return whether I am pinned to an exact version.
238
+
239
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
240
+ """
241
+ specifiers = self.specifier
242
+ return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
243
+
244
+ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
245
+ if not extras_requested:
246
+ # Provide an extra to safely evaluate the markers
247
+ # without matching any extra
248
+ extras_requested = ("",)
249
+ if self.markers is not None:
250
+ return any(
251
+ self.markers.evaluate({"extra": extra}) for extra in extras_requested
252
+ )
253
+ else:
254
+ return True
255
+
256
+ @property
257
+ def has_hash_options(self) -> bool:
258
+ """Return whether any known-good hashes are specified as options.
259
+
260
+ These activate --require-hashes mode; hashes specified as part of a
261
+ URL do not.
262
+
263
+ """
264
+ return bool(self.hash_options)
265
+
266
+ def hashes(self, trust_internet: bool = True) -> Hashes:
267
+ """Return a hash-comparer that considers my option- and URL-based
268
+ hashes to be known-good.
269
+
270
+ Hashes in URLs--ones embedded in the requirements file, not ones
271
+ downloaded from an index server--are almost peers with ones from
272
+ flags. They satisfy --require-hashes (whether it was implicitly or
273
+ explicitly activated) but do not activate it. md5 and sha224 are not
274
+ allowed in flags, which should nudge people toward good algos. We
275
+ always OR all hashes together, even ones from URLs.
276
+
277
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
278
+ downloaded from the internet, as by populate_link()
279
+
280
+ """
281
+ good_hashes = self.hash_options.copy()
282
+ link = self.link if trust_internet else self.original_link
283
+ if link and link.hash:
284
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
285
+ return Hashes(good_hashes)
286
+
287
+ def from_path(self) -> Optional[str]:
288
+ """Format a nice indicator to show where this "comes from" """
289
+ if self.req is None:
290
+ return None
291
+ s = str(self.req)
292
+ if self.comes_from:
293
+ if isinstance(self.comes_from, str):
294
+ comes_from = self.comes_from
295
+ else:
296
+ comes_from = self.comes_from.from_path()
297
+ if comes_from:
298
+ s += "->" + comes_from
299
+ return s
300
+
301
+ def ensure_build_location(
302
+ self, build_dir: str, autodelete: bool, parallel_builds: bool
303
+ ) -> str:
304
+ assert build_dir is not None
305
+ if self._temp_build_dir is not None:
306
+ assert self._temp_build_dir.path
307
+ return self._temp_build_dir.path
308
+ if self.req is None:
309
+ # Some systems have /tmp as a symlink which confuses custom
310
+ # builds (such as numpy). Thus, we ensure that the real path
311
+ # is returned.
312
+ self._temp_build_dir = TempDirectory(
313
+ kind=tempdir_kinds.REQ_BUILD, globally_managed=True
314
+ )
315
+
316
+ return self._temp_build_dir.path
317
+
318
+ # This is the only remaining place where we manually determine the path
319
+ # for the temporary directory. It is only needed for editables where
320
+ # it is the value of the --src option.
321
+
322
+ # When parallel builds are enabled, add a UUID to the build directory
323
+ # name so multiple builds do not interfere with each other.
324
+ dir_name: str = canonicalize_name(self.name)
325
+ if parallel_builds:
326
+ dir_name = f"{dir_name}_{uuid.uuid4().hex}"
327
+
328
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
329
+ # need this)
330
+ if not os.path.exists(build_dir):
331
+ logger.debug("Creating directory %s", build_dir)
332
+ os.makedirs(build_dir)
333
+ actual_build_dir = os.path.join(build_dir, dir_name)
334
+ # `None` indicates that we respect the globally-configured deletion
335
+ # settings, which is what we actually want when auto-deleting.
336
+ delete_arg = None if autodelete else False
337
+ return TempDirectory(
338
+ path=actual_build_dir,
339
+ delete=delete_arg,
340
+ kind=tempdir_kinds.REQ_BUILD,
341
+ globally_managed=True,
342
+ ).path
343
+
344
+ def _set_requirement(self) -> None:
345
+ """Set requirement after generating metadata."""
346
+ assert self.req is None
347
+ assert self.metadata is not None
348
+ assert self.source_dir is not None
349
+
350
+ # Construct a Requirement object from the generated metadata
351
+ if isinstance(parse_version(self.metadata["Version"]), Version):
352
+ op = "=="
353
+ else:
354
+ op = "==="
355
+
356
+ self.req = Requirement(
357
+ "".join(
358
+ [
359
+ self.metadata["Name"],
360
+ op,
361
+ self.metadata["Version"],
362
+ ]
363
+ )
364
+ )
365
+
366
+ def warn_on_mismatching_name(self) -> None:
367
+ metadata_name = canonicalize_name(self.metadata["Name"])
368
+ if canonicalize_name(self.req.name) == metadata_name:
369
+ # Everything is fine.
370
+ return
371
+
372
+ # If we're here, there's a mismatch. Log a warning about it.
373
+ logger.warning(
374
+ "Generating metadata for package %s "
375
+ "produced metadata for project name %s. Fix your "
376
+ "#egg=%s fragments.",
377
+ self.name,
378
+ metadata_name,
379
+ self.name,
380
+ )
381
+ self.req = Requirement(metadata_name)
382
+
383
+ def check_if_exists(self, use_user_site: bool) -> None:
384
+ """Find an installed distribution that satisfies or conflicts
385
+ with this requirement, and set self.satisfied_by or
386
+ self.should_reinstall appropriately.
387
+ """
388
+ if self.req is None:
389
+ return
390
+ existing_dist = get_default_environment().get_distribution(self.req.name)
391
+ if not existing_dist:
392
+ return
393
+
394
+ version_compatible = self.req.specifier.contains(
395
+ existing_dist.version,
396
+ prereleases=True,
397
+ )
398
+ if not version_compatible:
399
+ self.satisfied_by = None
400
+ if use_user_site:
401
+ if existing_dist.in_usersite:
402
+ self.should_reinstall = True
403
+ elif running_under_virtualenv() and existing_dist.in_site_packages:
404
+ raise InstallationError(
405
+ f"Will not install to the user site because it will "
406
+ f"lack sys.path precedence to {existing_dist.raw_name} "
407
+ f"in {existing_dist.location}"
408
+ )
409
+ else:
410
+ self.should_reinstall = True
411
+ else:
412
+ if self.editable:
413
+ self.should_reinstall = True
414
+ # when installing editables, nothing pre-existing should ever
415
+ # satisfy
416
+ self.satisfied_by = None
417
+ else:
418
+ self.satisfied_by = existing_dist
419
+
420
+ # Things valid for wheels
421
+ @property
422
+ def is_wheel(self) -> bool:
423
+ if not self.link:
424
+ return False
425
+ return self.link.is_wheel
426
+
427
+ # Things valid for sdists
428
+ @property
429
+ def unpacked_source_directory(self) -> str:
430
+ return os.path.join(
431
+ self.source_dir, self.link and self.link.subdirectory_fragment or ""
432
+ )
433
+
434
+ @property
435
+ def setup_py_path(self) -> str:
436
+ assert self.source_dir, f"No source dir for {self}"
437
+ setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
438
+
439
+ return setup_py
440
+
441
+ @property
442
+ def setup_cfg_path(self) -> str:
443
+ assert self.source_dir, f"No source dir for {self}"
444
+ setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
445
+
446
+ return setup_cfg
447
+
448
+ @property
449
+ def pyproject_toml_path(self) -> str:
450
+ assert self.source_dir, f"No source dir for {self}"
451
+ return make_pyproject_path(self.unpacked_source_directory)
452
+
453
+ def load_pyproject_toml(self) -> None:
454
+ """Load the pyproject.toml file.
455
+
456
+ After calling this routine, all of the attributes related to PEP 517
457
+ processing for this requirement have been set. In particular, the
458
+ use_pep517 attribute can be used to determine whether we should
459
+ follow the PEP 517 or legacy (setup.py) code path.
460
+ """
461
+ pyproject_toml_data = load_pyproject_toml(
462
+ self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
463
+ )
464
+
465
+ if pyproject_toml_data is None:
466
+ self.use_pep517 = False
467
+ return
468
+
469
+ self.use_pep517 = True
470
+ requires, backend, check, backend_path = pyproject_toml_data
471
+ self.requirements_to_check = check
472
+ self.pyproject_requires = requires
473
+ self.pep517_backend = Pep517HookCaller(
474
+ self.unpacked_source_directory,
475
+ backend,
476
+ backend_path=backend_path,
477
+ )
478
+
479
+ def isolated_editable_sanity_check(self) -> None:
480
+ """Check that an editable requirement if valid for use with PEP 517/518.
481
+
482
+ This verifies that an editable that has a pyproject.toml either supports PEP 660
483
+ or as a setup.py or a setup.cfg
484
+ """
485
+ if (
486
+ self.editable
487
+ and self.use_pep517
488
+ and not self.supports_pyproject_editable()
489
+ and not os.path.isfile(self.setup_py_path)
490
+ and not os.path.isfile(self.setup_cfg_path)
491
+ ):
492
+ raise InstallationError(
493
+ f"Project {self} has a 'pyproject.toml' and its build "
494
+ f"backend is missing the 'build_editable' hook. Since it does not "
495
+ f"have a 'setup.py' nor a 'setup.cfg', "
496
+ f"it cannot be installed in editable mode. "
497
+ f"Consider using a build backend that supports PEP 660."
498
+ )
499
+
500
+ def prepare_metadata(self) -> None:
501
+ """Ensure that project metadata is available.
502
+
503
+ Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
504
+ Under legacy processing, call setup.py egg-info.
505
+ """
506
+ assert self.source_dir
507
+ details = self.name or f"from {self.link}"
508
+
509
+ if self.use_pep517:
510
+ assert self.pep517_backend is not None
511
+ if (
512
+ self.editable
513
+ and self.permit_editable_wheels
514
+ and self.supports_pyproject_editable()
515
+ ):
516
+ self.metadata_directory = generate_editable_metadata(
517
+ build_env=self.build_env,
518
+ backend=self.pep517_backend,
519
+ details=details,
520
+ )
521
+ else:
522
+ self.metadata_directory = generate_metadata(
523
+ build_env=self.build_env,
524
+ backend=self.pep517_backend,
525
+ details=details,
526
+ )
527
+ else:
528
+ self.metadata_directory = generate_metadata_legacy(
529
+ build_env=self.build_env,
530
+ setup_py_path=self.setup_py_path,
531
+ source_dir=self.unpacked_source_directory,
532
+ isolated=self.isolated,
533
+ details=details,
534
+ )
535
+
536
+ # Act on the newly generated metadata, based on the name and version.
537
+ if not self.name:
538
+ self._set_requirement()
539
+ else:
540
+ self.warn_on_mismatching_name()
541
+
542
+ self.assert_source_matches_version()
543
+
544
+ @property
545
+ def metadata(self) -> Any:
546
+ if not hasattr(self, "_metadata"):
547
+ self._metadata = self.get_dist().metadata
548
+
549
+ return self._metadata
550
+
551
+ def get_dist(self) -> BaseDistribution:
552
+ return get_directory_distribution(self.metadata_directory)
553
+
554
+ def assert_source_matches_version(self) -> None:
555
+ assert self.source_dir
556
+ version = self.metadata["version"]
557
+ if self.req.specifier and version not in self.req.specifier:
558
+ logger.warning(
559
+ "Requested %s, but installing version %s",
560
+ self,
561
+ version,
562
+ )
563
+ else:
564
+ logger.debug(
565
+ "Source in %s has version %s, which satisfies requirement %s",
566
+ display_path(self.source_dir),
567
+ version,
568
+ self,
569
+ )
570
+
571
+ # For both source distributions and editables
572
+ def ensure_has_source_dir(
573
+ self,
574
+ parent_dir: str,
575
+ autodelete: bool = False,
576
+ parallel_builds: bool = False,
577
+ ) -> None:
578
+ """Ensure that a source_dir is set.
579
+
580
+ This will create a temporary build dir if the name of the requirement
581
+ isn't known yet.
582
+
583
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
584
+ Generally src_dir for editables and build_dir for sdists.
585
+ :return: self.source_dir
586
+ """
587
+ if self.source_dir is None:
588
+ self.source_dir = self.ensure_build_location(
589
+ parent_dir,
590
+ autodelete=autodelete,
591
+ parallel_builds=parallel_builds,
592
+ )
593
+
594
+ # For editable installations
595
+ def update_editable(self) -> None:
596
+ if not self.link:
597
+ logger.debug(
598
+ "Cannot update repository at %s; repository location is unknown",
599
+ self.source_dir,
600
+ )
601
+ return
602
+ assert self.editable
603
+ assert self.source_dir
604
+ if self.link.scheme == "file":
605
+ # Static paths don't get updated
606
+ return
607
+ vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
608
+ # Editable requirements are validated in Requirement constructors.
609
+ # So here, if it's neither a path nor a valid VCS URL, it's a bug.
610
+ assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
611
+ hidden_url = hide_url(self.link.url)
612
+ vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
613
+
614
+ # Top-level Actions
615
+ def uninstall(
616
+ self, auto_confirm: bool = False, verbose: bool = False
617
+ ) -> Optional[UninstallPathSet]:
618
+ """
619
+ Uninstall the distribution currently satisfying this requirement.
620
+
621
+ Prompts before removing or modifying files unless
622
+ ``auto_confirm`` is True.
623
+
624
+ Refuses to delete or modify files outside of ``sys.prefix`` -
625
+ thus uninstallation within a virtual environment can only
626
+ modify that virtual environment, even if the virtualenv is
627
+ linked to global site-packages.
628
+
629
+ """
630
+ assert self.req
631
+ dist = get_default_environment().get_distribution(self.req.name)
632
+ if not dist:
633
+ logger.warning("Skipping %s as it is not installed.", self.name)
634
+ return None
635
+ logger.info("Found existing installation: %s", dist)
636
+
637
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
638
+ uninstalled_pathset.remove(auto_confirm, verbose)
639
+ return uninstalled_pathset
640
+
641
+ def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
642
+ def _clean_zip_name(name: str, prefix: str) -> str:
643
+ assert name.startswith(
644
+ prefix + os.path.sep
645
+ ), f"name {name!r} doesn't start with prefix {prefix!r}"
646
+ name = name[len(prefix) + 1 :]
647
+ name = name.replace(os.path.sep, "/")
648
+ return name
649
+
650
+ path = os.path.join(parentdir, path)
651
+ name = _clean_zip_name(path, rootdir)
652
+ return self.name + "/" + name
653
+
654
+ def archive(self, build_dir: Optional[str]) -> None:
655
+ """Saves archive to provided build_dir.
656
+
657
+ Used for saving downloaded VCS requirements as part of `pip download`.
658
+ """
659
+ assert self.source_dir
660
+ if build_dir is None:
661
+ return
662
+
663
+ create_archive = True
664
+ archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
665
+ archive_path = os.path.join(build_dir, archive_name)
666
+
667
+ if os.path.exists(archive_path):
668
+ response = ask_path_exists(
669
+ "The file {} exists. (i)gnore, (w)ipe, "
670
+ "(b)ackup, (a)bort ".format(display_path(archive_path)),
671
+ ("i", "w", "b", "a"),
672
+ )
673
+ if response == "i":
674
+ create_archive = False
675
+ elif response == "w":
676
+ logger.warning("Deleting %s", display_path(archive_path))
677
+ os.remove(archive_path)
678
+ elif response == "b":
679
+ dest_file = backup_dir(archive_path)
680
+ logger.warning(
681
+ "Backing up %s to %s",
682
+ display_path(archive_path),
683
+ display_path(dest_file),
684
+ )
685
+ shutil.move(archive_path, dest_file)
686
+ elif response == "a":
687
+ sys.exit(-1)
688
+
689
+ if not create_archive:
690
+ return
691
+
692
+ zip_output = zipfile.ZipFile(
693
+ archive_path,
694
+ "w",
695
+ zipfile.ZIP_DEFLATED,
696
+ allowZip64=True,
697
+ )
698
+ with zip_output:
699
+ dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
700
+ for dirpath, dirnames, filenames in os.walk(dir):
701
+ for dirname in dirnames:
702
+ dir_arcname = self._get_archive_name(
703
+ dirname,
704
+ parentdir=dirpath,
705
+ rootdir=dir,
706
+ )
707
+ zipdir = zipfile.ZipInfo(dir_arcname + "/")
708
+ zipdir.external_attr = 0x1ED << 16 # 0o755
709
+ zip_output.writestr(zipdir, "")
710
+ for filename in filenames:
711
+ file_arcname = self._get_archive_name(
712
+ filename,
713
+ parentdir=dirpath,
714
+ rootdir=dir,
715
+ )
716
+ filename = os.path.join(dirpath, filename)
717
+ zip_output.write(filename, file_arcname)
718
+
719
+ logger.info("Saved %s", display_path(archive_path))
720
+
721
+ def install(
722
+ self,
723
+ install_options: List[str],
724
+ global_options: Optional[Sequence[str]] = None,
725
+ root: Optional[str] = None,
726
+ home: Optional[str] = None,
727
+ prefix: Optional[str] = None,
728
+ warn_script_location: bool = True,
729
+ use_user_site: bool = False,
730
+ pycompile: bool = True,
731
+ ) -> None:
732
+ scheme = get_scheme(
733
+ self.name,
734
+ user=use_user_site,
735
+ home=home,
736
+ root=root,
737
+ isolated=self.isolated,
738
+ prefix=prefix,
739
+ )
740
+
741
+ global_options = global_options if global_options is not None else []
742
+ if self.editable and not self.is_wheel:
743
+ install_editable_legacy(
744
+ install_options,
745
+ global_options,
746
+ prefix=prefix,
747
+ home=home,
748
+ use_user_site=use_user_site,
749
+ name=self.name,
750
+ setup_py_path=self.setup_py_path,
751
+ isolated=self.isolated,
752
+ build_env=self.build_env,
753
+ unpacked_source_directory=self.unpacked_source_directory,
754
+ )
755
+ self.install_succeeded = True
756
+ return
757
+
758
+ if self.is_wheel:
759
+ assert self.local_file_path
760
+ direct_url = None
761
+ if self.editable:
762
+ direct_url = direct_url_for_editable(self.unpacked_source_directory)
763
+ elif self.original_link:
764
+ direct_url = direct_url_from_link(
765
+ self.original_link,
766
+ self.source_dir,
767
+ self.original_link_is_in_wheel_cache,
768
+ )
769
+ install_wheel(
770
+ self.name,
771
+ self.local_file_path,
772
+ scheme=scheme,
773
+ req_description=str(self.req),
774
+ pycompile=pycompile,
775
+ warn_script_location=warn_script_location,
776
+ direct_url=direct_url,
777
+ requested=self.user_supplied,
778
+ )
779
+ self.install_succeeded = True
780
+ return
781
+
782
+ # TODO: Why don't we do this for editable installs?
783
+
784
+ # Extend the list of global and install options passed on to
785
+ # the setup.py call with the ones from the requirements file.
786
+ # Options specified in requirements file override those
787
+ # specified on the command line, since the last option given
788
+ # to setup.py is the one that is used.
789
+ global_options = list(global_options) + self.global_options
790
+ install_options = list(install_options) + self.install_options
791
+
792
+ try:
793
+ success = install_legacy(
794
+ install_options=install_options,
795
+ global_options=global_options,
796
+ root=root,
797
+ home=home,
798
+ prefix=prefix,
799
+ use_user_site=use_user_site,
800
+ pycompile=pycompile,
801
+ scheme=scheme,
802
+ setup_py_path=self.setup_py_path,
803
+ isolated=self.isolated,
804
+ req_name=self.name,
805
+ build_env=self.build_env,
806
+ unpacked_source_directory=self.unpacked_source_directory,
807
+ req_description=str(self.req),
808
+ )
809
+ except LegacyInstallFailure as exc:
810
+ self.install_succeeded = False
811
+ raise exc
812
+ except Exception:
813
+ self.install_succeeded = True
814
+ raise
815
+
816
+ self.install_succeeded = success
817
+
818
+ if success and self.legacy_install_reason == 8368:
819
+ deprecated(
820
+ reason=(
821
+ "{} was installed using the legacy 'setup.py install' "
822
+ "method, because a wheel could not be built for it.".format(
823
+ self.name
824
+ )
825
+ ),
826
+ replacement="to fix the wheel build issue reported above",
827
+ gone_in=None,
828
+ issue=8368,
829
+ )
830
+
831
+
832
+ def check_invalid_constraint_type(req: InstallRequirement) -> str:
833
+
834
+ # Check for unsupported forms
835
+ problem = ""
836
+ if not req.name:
837
+ problem = "Unnamed requirements are not allowed as constraints"
838
+ elif req.editable:
839
+ problem = "Editable requirements are not allowed as constraints"
840
+ elif req.extras:
841
+ problem = "Constraints cannot have extras"
842
+
843
+ if problem:
844
+ deprecated(
845
+ reason=(
846
+ "Constraints are only allowed to take the form of a package "
847
+ "name and a version specifier. Other forms were originally "
848
+ "permitted as an accident of the implementation, but were "
849
+ "undocumented. The new implementation of the resolver no "
850
+ "longer supports these forms."
851
+ ),
852
+ replacement="replacing the constraint with a requirement",
853
+ # No plan yet for when the new resolver becomes default
854
+ gone_in=None,
855
+ issue=8210,
856
+ )
857
+
858
+ return problem
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_set.py ADDED
@@ -0,0 +1,189 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from collections import OrderedDict
3
+ from typing import Dict, Iterable, List, Optional, Tuple
4
+
5
+ from pip._vendor.packaging.utils import canonicalize_name
6
+
7
+ from pip._internal.exceptions import InstallationError
8
+ from pip._internal.models.wheel import Wheel
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils import compatibility_tags
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ class RequirementSet:
16
+ def __init__(self, check_supported_wheels: bool = True) -> None:
17
+ """Create a RequirementSet."""
18
+
19
+ self.requirements: Dict[str, InstallRequirement] = OrderedDict()
20
+ self.check_supported_wheels = check_supported_wheels
21
+
22
+ self.unnamed_requirements: List[InstallRequirement] = []
23
+
24
+ def __str__(self) -> str:
25
+ requirements = sorted(
26
+ (req for req in self.requirements.values() if not req.comes_from),
27
+ key=lambda req: canonicalize_name(req.name or ""),
28
+ )
29
+ return " ".join(str(req.req) for req in requirements)
30
+
31
+ def __repr__(self) -> str:
32
+ requirements = sorted(
33
+ self.requirements.values(),
34
+ key=lambda req: canonicalize_name(req.name or ""),
35
+ )
36
+
37
+ format_string = "<{classname} object; {count} requirement(s): {reqs}>"
38
+ return format_string.format(
39
+ classname=self.__class__.__name__,
40
+ count=len(requirements),
41
+ reqs=", ".join(str(req.req) for req in requirements),
42
+ )
43
+
44
+ def add_unnamed_requirement(self, install_req: InstallRequirement) -> None:
45
+ assert not install_req.name
46
+ self.unnamed_requirements.append(install_req)
47
+
48
+ def add_named_requirement(self, install_req: InstallRequirement) -> None:
49
+ assert install_req.name
50
+
51
+ project_name = canonicalize_name(install_req.name)
52
+ self.requirements[project_name] = install_req
53
+
54
+ def add_requirement(
55
+ self,
56
+ install_req: InstallRequirement,
57
+ parent_req_name: Optional[str] = None,
58
+ extras_requested: Optional[Iterable[str]] = None,
59
+ ) -> Tuple[List[InstallRequirement], Optional[InstallRequirement]]:
60
+ """Add install_req as a requirement to install.
61
+
62
+ :param parent_req_name: The name of the requirement that needed this
63
+ added. The name is used because when multiple unnamed requirements
64
+ resolve to the same name, we could otherwise end up with dependency
65
+ links that point outside the Requirements set. parent_req must
66
+ already be added. Note that None implies that this is a user
67
+ supplied requirement, vs an inferred one.
68
+ :param extras_requested: an iterable of extras used to evaluate the
69
+ environment markers.
70
+ :return: Additional requirements to scan. That is either [] if
71
+ the requirement is not applicable, or [install_req] if the
72
+ requirement is applicable and has just been added.
73
+ """
74
+ # If the markers do not match, ignore this requirement.
75
+ if not install_req.match_markers(extras_requested):
76
+ logger.info(
77
+ "Ignoring %s: markers '%s' don't match your environment",
78
+ install_req.name,
79
+ install_req.markers,
80
+ )
81
+ return [], None
82
+
83
+ # If the wheel is not supported, raise an error.
84
+ # Should check this after filtering out based on environment markers to
85
+ # allow specifying different wheels based on the environment/OS, in a
86
+ # single requirements file.
87
+ if install_req.link and install_req.link.is_wheel:
88
+ wheel = Wheel(install_req.link.filename)
89
+ tags = compatibility_tags.get_supported()
90
+ if self.check_supported_wheels and not wheel.supported(tags):
91
+ raise InstallationError(
92
+ "{} is not a supported wheel on this platform.".format(
93
+ wheel.filename
94
+ )
95
+ )
96
+
97
+ # This next bit is really a sanity check.
98
+ assert (
99
+ not install_req.user_supplied or parent_req_name is None
100
+ ), "a user supplied req shouldn't have a parent"
101
+
102
+ # Unnamed requirements are scanned again and the requirement won't be
103
+ # added as a dependency until after scanning.
104
+ if not install_req.name:
105
+ self.add_unnamed_requirement(install_req)
106
+ return [install_req], None
107
+
108
+ try:
109
+ existing_req: Optional[InstallRequirement] = self.get_requirement(
110
+ install_req.name
111
+ )
112
+ except KeyError:
113
+ existing_req = None
114
+
115
+ has_conflicting_requirement = (
116
+ parent_req_name is None
117
+ and existing_req
118
+ and not existing_req.constraint
119
+ and existing_req.extras == install_req.extras
120
+ and existing_req.req
121
+ and install_req.req
122
+ and existing_req.req.specifier != install_req.req.specifier
123
+ )
124
+ if has_conflicting_requirement:
125
+ raise InstallationError(
126
+ "Double requirement given: {} (already in {}, name={!r})".format(
127
+ install_req, existing_req, install_req.name
128
+ )
129
+ )
130
+
131
+ # When no existing requirement exists, add the requirement as a
132
+ # dependency and it will be scanned again after.
133
+ if not existing_req:
134
+ self.add_named_requirement(install_req)
135
+ # We'd want to rescan this requirement later
136
+ return [install_req], install_req
137
+
138
+ # Assume there's no need to scan, and that we've already
139
+ # encountered this for scanning.
140
+ if install_req.constraint or not existing_req.constraint:
141
+ return [], existing_req
142
+
143
+ does_not_satisfy_constraint = install_req.link and not (
144
+ existing_req.link and install_req.link.path == existing_req.link.path
145
+ )
146
+ if does_not_satisfy_constraint:
147
+ raise InstallationError(
148
+ "Could not satisfy constraints for '{}': "
149
+ "installation from path or url cannot be "
150
+ "constrained to a version".format(install_req.name)
151
+ )
152
+ # If we're now installing a constraint, mark the existing
153
+ # object for real installation.
154
+ existing_req.constraint = False
155
+ # If we're now installing a user supplied requirement,
156
+ # mark the existing object as such.
157
+ if install_req.user_supplied:
158
+ existing_req.user_supplied = True
159
+ existing_req.extras = tuple(
160
+ sorted(set(existing_req.extras) | set(install_req.extras))
161
+ )
162
+ logger.debug(
163
+ "Setting %s extras to: %s",
164
+ existing_req,
165
+ existing_req.extras,
166
+ )
167
+ # Return the existing requirement for addition to the parent and
168
+ # scanning again.
169
+ return [existing_req], existing_req
170
+
171
+ def has_requirement(self, name: str) -> bool:
172
+ project_name = canonicalize_name(name)
173
+
174
+ return (
175
+ project_name in self.requirements
176
+ and not self.requirements[project_name].constraint
177
+ )
178
+
179
+ def get_requirement(self, name: str) -> InstallRequirement:
180
+ project_name = canonicalize_name(name)
181
+
182
+ if project_name in self.requirements:
183
+ return self.requirements[project_name]
184
+
185
+ raise KeyError(f"No project with the name {name!r}")
186
+
187
+ @property
188
+ def all_requirements(self) -> List[InstallRequirement]:
189
+ return self.unnamed_requirements + list(self.requirements.values())
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import hashlib
3
+ import logging
4
+ import os
5
+ from types import TracebackType
6
+ from typing import Dict, Iterator, Optional, Set, Type, Union
7
+
8
+ from pip._internal.models.link import Link
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils.temp_dir import TempDirectory
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @contextlib.contextmanager
16
+ def update_env_context_manager(**changes: str) -> Iterator[None]:
17
+ target = os.environ
18
+
19
+ # Save values from the target and change them.
20
+ non_existent_marker = object()
21
+ saved_values: Dict[str, Union[object, str]] = {}
22
+ for name, new_value in changes.items():
23
+ try:
24
+ saved_values[name] = target[name]
25
+ except KeyError:
26
+ saved_values[name] = non_existent_marker
27
+ target[name] = new_value
28
+
29
+ try:
30
+ yield
31
+ finally:
32
+ # Restore original values in the target.
33
+ for name, original_value in saved_values.items():
34
+ if original_value is non_existent_marker:
35
+ del target[name]
36
+ else:
37
+ assert isinstance(original_value, str) # for mypy
38
+ target[name] = original_value
39
+
40
+
41
+ @contextlib.contextmanager
42
+ def get_requirement_tracker() -> Iterator["RequirementTracker"]:
43
+ root = os.environ.get("PIP_REQ_TRACKER")
44
+ with contextlib.ExitStack() as ctx:
45
+ if root is None:
46
+ root = ctx.enter_context(TempDirectory(kind="req-tracker")).path
47
+ ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
48
+ logger.debug("Initialized build tracking at %s", root)
49
+
50
+ with RequirementTracker(root) as tracker:
51
+ yield tracker
52
+
53
+
54
+ class RequirementTracker:
55
+ def __init__(self, root: str) -> None:
56
+ self._root = root
57
+ self._entries: Set[InstallRequirement] = set()
58
+ logger.debug("Created build tracker: %s", self._root)
59
+
60
+ def __enter__(self) -> "RequirementTracker":
61
+ logger.debug("Entered build tracker: %s", self._root)
62
+ return self
63
+
64
+ def __exit__(
65
+ self,
66
+ exc_type: Optional[Type[BaseException]],
67
+ exc_val: Optional[BaseException],
68
+ exc_tb: Optional[TracebackType],
69
+ ) -> None:
70
+ self.cleanup()
71
+
72
+ def _entry_path(self, link: Link) -> str:
73
+ hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
74
+ return os.path.join(self._root, hashed)
75
+
76
+ def add(self, req: InstallRequirement) -> None:
77
+ """Add an InstallRequirement to build tracking."""
78
+
79
+ assert req.link
80
+ # Get the file to write information about this requirement.
81
+ entry_path = self._entry_path(req.link)
82
+
83
+ # Try reading from the file. If it exists and can be read from, a build
84
+ # is already in progress, so a LookupError is raised.
85
+ try:
86
+ with open(entry_path) as fp:
87
+ contents = fp.read()
88
+ except FileNotFoundError:
89
+ pass
90
+ else:
91
+ message = "{} is already being built: {}".format(req.link, contents)
92
+ raise LookupError(message)
93
+
94
+ # If we're here, req should really not be building already.
95
+ assert req not in self._entries
96
+
97
+ # Start tracking this requirement.
98
+ with open(entry_path, "w", encoding="utf-8") as fp:
99
+ fp.write(str(req))
100
+ self._entries.add(req)
101
+
102
+ logger.debug("Added %s to build tracker %r", req, self._root)
103
+
104
+ def remove(self, req: InstallRequirement) -> None:
105
+ """Remove an InstallRequirement from build tracking."""
106
+
107
+ assert req.link
108
+ # Delete the created file and the corresponding entries.
109
+ os.unlink(self._entry_path(req.link))
110
+ self._entries.remove(req)
111
+
112
+ logger.debug("Removed %s from build tracker %r", req, self._root)
113
+
114
+ def cleanup(self) -> None:
115
+ for req in set(self._entries):
116
+ self.remove(req)
117
+
118
+ logger.debug("Removed build tracker: %r", self._root)
119
+
120
+ @contextlib.contextmanager
121
+ def track(self, req: InstallRequirement) -> Iterator[None]:
122
+ self.add(req)
123
+ yield
124
+ self.remove(req)
llmeval-env/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import os
3
+ import sys
4
+ import sysconfig
5
+ from importlib.util import cache_from_source
6
+ from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple
7
+
8
+ from pip._internal.exceptions import UninstallationError
9
+ from pip._internal.locations import get_bin_prefix, get_bin_user
10
+ from pip._internal.metadata import BaseDistribution
11
+ from pip._internal.utils.compat import WINDOWS
12
+ from pip._internal.utils.egg_link import egg_link_path_from_location
13
+ from pip._internal.utils.logging import getLogger, indent_log
14
+ from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
15
+ from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
16
+
17
+ logger = getLogger(__name__)
18
+
19
+
20
+ def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]:
21
+ """Create the fully qualified name of the files created by
22
+ {console,gui}_scripts for the given ``dist``.
23
+ Returns the list of file names
24
+ """
25
+ exe_name = os.path.join(bin_dir, script_name)
26
+ yield exe_name
27
+ if not WINDOWS:
28
+ return
29
+ yield f"{exe_name}.exe"
30
+ yield f"{exe_name}.exe.manifest"
31
+ if is_gui:
32
+ yield f"{exe_name}-script.pyw"
33
+ else:
34
+ yield f"{exe_name}-script.py"
35
+
36
+
37
+ def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]:
38
+ @functools.wraps(fn)
39
+ def unique(*args: Any, **kw: Any) -> Iterator[Any]:
40
+ seen: Set[Any] = set()
41
+ for item in fn(*args, **kw):
42
+ if item not in seen:
43
+ seen.add(item)
44
+ yield item
45
+
46
+ return unique
47
+
48
+
49
+ @_unique
50
+ def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]:
51
+ """
52
+ Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
53
+
54
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
55
+ the .pyc and .pyo in the same directory.
56
+
57
+ UninstallPathSet.add() takes care of the __pycache__ .py[co].
58
+
59
+ If RECORD is not found, raises UninstallationError,
60
+ with possible information from the INSTALLER file.
61
+
62
+ https://packaging.python.org/specifications/recording-installed-packages/
63
+ """
64
+ location = dist.location
65
+ assert location is not None, "not installed"
66
+
67
+ entries = dist.iter_declared_entries()
68
+ if entries is None:
69
+ msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
70
+ installer = dist.installer
71
+ if not installer or installer == "pip":
72
+ dep = "{}=={}".format(dist.raw_name, dist.version)
73
+ msg += (
74
+ " You might be able to recover from this via: "
75
+ "'pip install --force-reinstall --no-deps {}'.".format(dep)
76
+ )
77
+ else:
78
+ msg += " Hint: The package was installed by {}.".format(installer)
79
+ raise UninstallationError(msg)
80
+
81
+ for entry in entries:
82
+ path = os.path.join(location, entry)
83
+ yield path
84
+ if path.endswith(".py"):
85
+ dn, fn = os.path.split(path)
86
+ base = fn[:-3]
87
+ path = os.path.join(dn, base + ".pyc")
88
+ yield path
89
+ path = os.path.join(dn, base + ".pyo")
90
+ yield path
91
+
92
+
93
+ def compact(paths: Iterable[str]) -> Set[str]:
94
+ """Compact a path set to contain the minimal number of paths
95
+ necessary to contain all paths in the set. If /a/path/ and
96
+ /a/path/to/a/file.txt are both in the set, leave only the
97
+ shorter path."""
98
+
99
+ sep = os.path.sep
100
+ short_paths: Set[str] = set()
101
+ for path in sorted(paths, key=len):
102
+ should_skip = any(
103
+ path.startswith(shortpath.rstrip("*"))
104
+ and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
105
+ for shortpath in short_paths
106
+ )
107
+ if not should_skip:
108
+ short_paths.add(path)
109
+ return short_paths
110
+
111
+
112
+ def compress_for_rename(paths: Iterable[str]) -> Set[str]:
113
+ """Returns a set containing the paths that need to be renamed.
114
+
115
+ This set may include directories when the original sequence of paths
116
+ included every file on disk.
117
+ """
118
+ case_map = {os.path.normcase(p): p for p in paths}
119
+ remaining = set(case_map)
120
+ unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
121
+ wildcards: Set[str] = set()
122
+
123
+ def norm_join(*a: str) -> str:
124
+ return os.path.normcase(os.path.join(*a))
125
+
126
+ for root in unchecked:
127
+ if any(os.path.normcase(root).startswith(w) for w in wildcards):
128
+ # This directory has already been handled.
129
+ continue
130
+
131
+ all_files: Set[str] = set()
132
+ all_subdirs: Set[str] = set()
133
+ for dirname, subdirs, files in os.walk(root):
134
+ all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
135
+ all_files.update(norm_join(root, dirname, f) for f in files)
136
+ # If all the files we found are in our remaining set of files to
137
+ # remove, then remove them from the latter set and add a wildcard
138
+ # for the directory.
139
+ if not (all_files - remaining):
140
+ remaining.difference_update(all_files)
141
+ wildcards.add(root + os.sep)
142
+
143
+ return set(map(case_map.__getitem__, remaining)) | wildcards
144
+
145
+
146
+ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
147
+ """Returns a tuple of 2 sets of which paths to display to user
148
+
149
+ The first set contains paths that would be deleted. Files of a package
150
+ are not added and the top-level directory of the package has a '*' added
151
+ at the end - to signify that all it's contents are removed.
152
+
153
+ The second set contains files that would have been skipped in the above
154
+ folders.
155
+ """
156
+
157
+ will_remove = set(paths)
158
+ will_skip = set()
159
+
160
+ # Determine folders and files
161
+ folders = set()
162
+ files = set()
163
+ for path in will_remove:
164
+ if path.endswith(".pyc"):
165
+ continue
166
+ if path.endswith("__init__.py") or ".dist-info" in path:
167
+ folders.add(os.path.dirname(path))
168
+ files.add(path)
169
+
170
+ # probably this one https://github.com/python/mypy/issues/390
171
+ _normcased_files = set(map(os.path.normcase, files)) # type: ignore
172
+
173
+ folders = compact(folders)
174
+
175
+ # This walks the tree using os.walk to not miss extra folders
176
+ # that might get added.
177
+ for folder in folders:
178
+ for dirpath, _, dirfiles in os.walk(folder):
179
+ for fname in dirfiles:
180
+ if fname.endswith(".pyc"):
181
+ continue
182
+
183
+ file_ = os.path.join(dirpath, fname)
184
+ if (
185
+ os.path.isfile(file_)
186
+ and os.path.normcase(file_) not in _normcased_files
187
+ ):
188
+ # We are skipping this file. Add it to the set.
189
+ will_skip.add(file_)
190
+
191
+ will_remove = files | {os.path.join(folder, "*") for folder in folders}
192
+
193
+ return will_remove, will_skip
194
+
195
+
196
+ class StashedUninstallPathSet:
197
+ """A set of file rename operations to stash files while
198
+ tentatively uninstalling them."""
199
+
200
+ def __init__(self) -> None:
201
+ # Mapping from source file root to [Adjacent]TempDirectory
202
+ # for files under that directory.
203
+ self._save_dirs: Dict[str, TempDirectory] = {}
204
+ # (old path, new path) tuples for each move that may need
205
+ # to be undone.
206
+ self._moves: List[Tuple[str, str]] = []
207
+
208
+ def _get_directory_stash(self, path: str) -> str:
209
+ """Stashes a directory.
210
+
211
+ Directories are stashed adjacent to their original location if
212
+ possible, or else moved/copied into the user's temp dir."""
213
+
214
+ try:
215
+ save_dir: TempDirectory = AdjacentTempDirectory(path)
216
+ except OSError:
217
+ save_dir = TempDirectory(kind="uninstall")
218
+ self._save_dirs[os.path.normcase(path)] = save_dir
219
+
220
+ return save_dir.path
221
+
222
+ def _get_file_stash(self, path: str) -> str:
223
+ """Stashes a file.
224
+
225
+ If no root has been provided, one will be created for the directory
226
+ in the user's temp directory."""
227
+ path = os.path.normcase(path)
228
+ head, old_head = os.path.dirname(path), None
229
+ save_dir = None
230
+
231
+ while head != old_head:
232
+ try:
233
+ save_dir = self._save_dirs[head]
234
+ break
235
+ except KeyError:
236
+ pass
237
+ head, old_head = os.path.dirname(head), head
238
+ else:
239
+ # Did not find any suitable root
240
+ head = os.path.dirname(path)
241
+ save_dir = TempDirectory(kind="uninstall")
242
+ self._save_dirs[head] = save_dir
243
+
244
+ relpath = os.path.relpath(path, head)
245
+ if relpath and relpath != os.path.curdir:
246
+ return os.path.join(save_dir.path, relpath)
247
+ return save_dir.path
248
+
249
+ def stash(self, path: str) -> str:
250
+ """Stashes the directory or file and returns its new location.
251
+ Handle symlinks as files to avoid modifying the symlink targets.
252
+ """
253
+ path_is_dir = os.path.isdir(path) and not os.path.islink(path)
254
+ if path_is_dir:
255
+ new_path = self._get_directory_stash(path)
256
+ else:
257
+ new_path = self._get_file_stash(path)
258
+
259
+ self._moves.append((path, new_path))
260
+ if path_is_dir and os.path.isdir(new_path):
261
+ # If we're moving a directory, we need to
262
+ # remove the destination first or else it will be
263
+ # moved to inside the existing directory.
264
+ # We just created new_path ourselves, so it will
265
+ # be removable.
266
+ os.rmdir(new_path)
267
+ renames(path, new_path)
268
+ return new_path
269
+
270
+ def commit(self) -> None:
271
+ """Commits the uninstall by removing stashed files."""
272
+ for _, save_dir in self._save_dirs.items():
273
+ save_dir.cleanup()
274
+ self._moves = []
275
+ self._save_dirs = {}
276
+
277
+ def rollback(self) -> None:
278
+ """Undoes the uninstall by moving stashed files back."""
279
+ for p in self._moves:
280
+ logger.info("Moving to %s\n from %s", *p)
281
+
282
+ for new_path, path in self._moves:
283
+ try:
284
+ logger.debug("Replacing %s from %s", new_path, path)
285
+ if os.path.isfile(new_path) or os.path.islink(new_path):
286
+ os.unlink(new_path)
287
+ elif os.path.isdir(new_path):
288
+ rmtree(new_path)
289
+ renames(path, new_path)
290
+ except OSError as ex:
291
+ logger.error("Failed to restore %s", new_path)
292
+ logger.debug("Exception: %s", ex)
293
+
294
+ self.commit()
295
+
296
+ @property
297
+ def can_rollback(self) -> bool:
298
+ return bool(self._moves)
299
+
300
+
301
+ class UninstallPathSet:
302
+ """A set of file paths to be removed in the uninstallation of a
303
+ requirement."""
304
+
305
+ def __init__(self, dist: BaseDistribution) -> None:
306
+ self._paths: Set[str] = set()
307
+ self._refuse: Set[str] = set()
308
+ self._pth: Dict[str, UninstallPthEntries] = {}
309
+ self._dist = dist
310
+ self._moved_paths = StashedUninstallPathSet()
311
+
312
+ def _permitted(self, path: str) -> bool:
313
+ """
314
+ Return True if the given path is one we are permitted to
315
+ remove/modify, False otherwise.
316
+
317
+ """
318
+ return is_local(path)
319
+
320
+ def add(self, path: str) -> None:
321
+ head, tail = os.path.split(path)
322
+
323
+ # we normalize the head to resolve parent directory symlinks, but not
324
+ # the tail, since we only want to uninstall symlinks, not their targets
325
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
326
+
327
+ if not os.path.exists(path):
328
+ return
329
+ if self._permitted(path):
330
+ self._paths.add(path)
331
+ else:
332
+ self._refuse.add(path)
333
+
334
+ # __pycache__ files can show up after 'installed-files.txt' is created,
335
+ # due to imports
336
+ if os.path.splitext(path)[1] == ".py":
337
+ self.add(cache_from_source(path))
338
+
339
+ def add_pth(self, pth_file: str, entry: str) -> None:
340
+ pth_file = normalize_path(pth_file)
341
+ if self._permitted(pth_file):
342
+ if pth_file not in self._pth:
343
+ self._pth[pth_file] = UninstallPthEntries(pth_file)
344
+ self._pth[pth_file].add(entry)
345
+ else:
346
+ self._refuse.add(pth_file)
347
+
348
+ def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
349
+ """Remove paths in ``self._paths`` with confirmation (unless
350
+ ``auto_confirm`` is True)."""
351
+
352
+ if not self._paths:
353
+ logger.info(
354
+ "Can't uninstall '%s'. No files were found to uninstall.",
355
+ self._dist.raw_name,
356
+ )
357
+ return
358
+
359
+ dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
360
+ logger.info("Uninstalling %s:", dist_name_version)
361
+
362
+ with indent_log():
363
+ if auto_confirm or self._allowed_to_proceed(verbose):
364
+ moved = self._moved_paths
365
+
366
+ for_rename = compress_for_rename(self._paths)
367
+
368
+ for path in sorted(compact(for_rename)):
369
+ moved.stash(path)
370
+ logger.verbose("Removing file or directory %s", path)
371
+
372
+ for pth in self._pth.values():
373
+ pth.remove()
374
+
375
+ logger.info("Successfully uninstalled %s", dist_name_version)
376
+
377
+ def _allowed_to_proceed(self, verbose: bool) -> bool:
378
+ """Display which files would be deleted and prompt for confirmation"""
379
+
380
+ def _display(msg: str, paths: Iterable[str]) -> None:
381
+ if not paths:
382
+ return
383
+
384
+ logger.info(msg)
385
+ with indent_log():
386
+ for path in sorted(compact(paths)):
387
+ logger.info(path)
388
+
389
+ if not verbose:
390
+ will_remove, will_skip = compress_for_output_listing(self._paths)
391
+ else:
392
+ # In verbose mode, display all the files that are going to be
393
+ # deleted.
394
+ will_remove = set(self._paths)
395
+ will_skip = set()
396
+
397
+ _display("Would remove:", will_remove)
398
+ _display("Would not remove (might be manually added):", will_skip)
399
+ _display("Would not remove (outside of prefix):", self._refuse)
400
+ if verbose:
401
+ _display("Will actually move:", compress_for_rename(self._paths))
402
+
403
+ return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
404
+
405
+ def rollback(self) -> None:
406
+ """Rollback the changes previously made by remove()."""
407
+ if not self._moved_paths.can_rollback:
408
+ logger.error(
409
+ "Can't roll back %s; was not uninstalled",
410
+ self._dist.raw_name,
411
+ )
412
+ return
413
+ logger.info("Rolling back uninstall of %s", self._dist.raw_name)
414
+ self._moved_paths.rollback()
415
+ for pth in self._pth.values():
416
+ pth.rollback()
417
+
418
+ def commit(self) -> None:
419
+ """Remove temporary save dir: rollback will no longer be possible."""
420
+ self._moved_paths.commit()
421
+
422
+ @classmethod
423
+ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
424
+ dist_location = dist.location
425
+ info_location = dist.info_location
426
+ if dist_location is None:
427
+ logger.info(
428
+ "Not uninstalling %s since it is not installed",
429
+ dist.canonical_name,
430
+ )
431
+ return cls(dist)
432
+
433
+ normalized_dist_location = normalize_path(dist_location)
434
+ if not dist.local:
435
+ logger.info(
436
+ "Not uninstalling %s at %s, outside environment %s",
437
+ dist.canonical_name,
438
+ normalized_dist_location,
439
+ sys.prefix,
440
+ )
441
+ return cls(dist)
442
+
443
+ if normalized_dist_location in {
444
+ p
445
+ for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
446
+ if p
447
+ }:
448
+ logger.info(
449
+ "Not uninstalling %s at %s, as it is in the standard library.",
450
+ dist.canonical_name,
451
+ normalized_dist_location,
452
+ )
453
+ return cls(dist)
454
+
455
+ paths_to_remove = cls(dist)
456
+ develop_egg_link = egg_link_path_from_location(dist.raw_name)
457
+
458
+ # Distribution is installed with metadata in a "flat" .egg-info
459
+ # directory. This means it is not a modern .dist-info installation, an
460
+ # egg, or legacy editable.
461
+ setuptools_flat_installation = (
462
+ dist.installed_with_setuptools_egg_info
463
+ and info_location is not None
464
+ and os.path.exists(info_location)
465
+ # If dist is editable and the location points to a ``.egg-info``,
466
+ # we are in fact in the legacy editable case.
467
+ and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
468
+ )
469
+
470
+ # Uninstall cases order do matter as in the case of 2 installs of the
471
+ # same package, pip needs to uninstall the currently detected version
472
+ if setuptools_flat_installation:
473
+ if info_location is not None:
474
+ paths_to_remove.add(info_location)
475
+ installed_files = dist.iter_declared_entries()
476
+ if installed_files is not None:
477
+ for installed_file in installed_files:
478
+ paths_to_remove.add(os.path.join(dist_location, installed_file))
479
+ # FIXME: need a test for this elif block
480
+ # occurs with --single-version-externally-managed/--record outside
481
+ # of pip
482
+ elif dist.is_file("top_level.txt"):
483
+ try:
484
+ namespace_packages = dist.read_text("namespace_packages.txt")
485
+ except FileNotFoundError:
486
+ namespaces = []
487
+ else:
488
+ namespaces = namespace_packages.splitlines(keepends=False)
489
+ for top_level_pkg in [
490
+ p
491
+ for p in dist.read_text("top_level.txt").splitlines()
492
+ if p and p not in namespaces
493
+ ]:
494
+ path = os.path.join(dist_location, top_level_pkg)
495
+ paths_to_remove.add(path)
496
+ paths_to_remove.add(f"{path}.py")
497
+ paths_to_remove.add(f"{path}.pyc")
498
+ paths_to_remove.add(f"{path}.pyo")
499
+
500
+ elif dist.installed_by_distutils:
501
+ raise UninstallationError(
502
+ "Cannot uninstall {!r}. It is a distutils installed project "
503
+ "and thus we cannot accurately determine which files belong "
504
+ "to it which would lead to only a partial uninstall.".format(
505
+ dist.raw_name,
506
+ )
507
+ )
508
+
509
+ elif dist.installed_as_egg:
510
+ # package installed by easy_install
511
+ # We cannot match on dist.egg_name because it can slightly vary
512
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
513
+ paths_to_remove.add(dist_location)
514
+ easy_install_egg = os.path.split(dist_location)[1]
515
+ easy_install_pth = os.path.join(
516
+ os.path.dirname(dist_location),
517
+ "easy-install.pth",
518
+ )
519
+ paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
520
+
521
+ elif dist.installed_with_dist_info:
522
+ for path in uninstallation_paths(dist):
523
+ paths_to_remove.add(path)
524
+
525
+ elif develop_egg_link:
526
+ # PEP 660 modern editable is handled in the ``.dist-info`` case
527
+ # above, so this only covers the setuptools-style editable.
528
+ with open(develop_egg_link) as fh:
529
+ link_pointer = os.path.normcase(fh.readline().strip())
530
+ assert link_pointer == dist_location, (
531
+ f"Egg-link {link_pointer} does not match installed location of "
532
+ f"{dist.raw_name} (at {dist_location})"
533
+ )
534
+ paths_to_remove.add(develop_egg_link)
535
+ easy_install_pth = os.path.join(
536
+ os.path.dirname(develop_egg_link), "easy-install.pth"
537
+ )
538
+ paths_to_remove.add_pth(easy_install_pth, dist_location)
539
+
540
+ else:
541
+ logger.debug(
542
+ "Not sure how to uninstall: %s - Check: %s",
543
+ dist,
544
+ dist_location,
545
+ )
546
+
547
+ if dist.in_usersite:
548
+ bin_dir = get_bin_user()
549
+ else:
550
+ bin_dir = get_bin_prefix()
551
+
552
+ # find distutils scripts= scripts
553
+ try:
554
+ for script in dist.iterdir("scripts"):
555
+ paths_to_remove.add(os.path.join(bin_dir, script.name))
556
+ if WINDOWS:
557
+ paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat"))
558
+ except (FileNotFoundError, NotADirectoryError):
559
+ pass
560
+
561
+ # find console_scripts and gui_scripts
562
+ def iter_scripts_to_remove(
563
+ dist: BaseDistribution,
564
+ bin_dir: str,
565
+ ) -> Iterator[str]:
566
+ for entry_point in dist.iter_entry_points():
567
+ if entry_point.group == "console_scripts":
568
+ yield from _script_names(bin_dir, entry_point.name, False)
569
+ elif entry_point.group == "gui_scripts":
570
+ yield from _script_names(bin_dir, entry_point.name, True)
571
+
572
+ for s in iter_scripts_to_remove(dist, bin_dir):
573
+ paths_to_remove.add(s)
574
+
575
+ return paths_to_remove
576
+
577
+
578
+ class UninstallPthEntries:
579
+ def __init__(self, pth_file: str) -> None:
580
+ self.file = pth_file
581
+ self.entries: Set[str] = set()
582
+ self._saved_lines: Optional[List[bytes]] = None
583
+
584
+ def add(self, entry: str) -> None:
585
+ entry = os.path.normcase(entry)
586
+ # On Windows, os.path.normcase converts the entry to use
587
+ # backslashes. This is correct for entries that describe absolute
588
+ # paths outside of site-packages, but all the others use forward
589
+ # slashes.
590
+ # os.path.splitdrive is used instead of os.path.isabs because isabs
591
+ # treats non-absolute paths with drive letter markings like c:foo\bar
592
+ # as absolute paths. It also does not recognize UNC paths if they don't
593
+ # have more than "\\sever\share". Valid examples: "\\server\share\" or
594
+ # "\\server\share\folder".
595
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
596
+ entry = entry.replace("\\", "/")
597
+ self.entries.add(entry)
598
+
599
+ def remove(self) -> None:
600
+ logger.verbose("Removing pth entries from %s:", self.file)
601
+
602
+ # If the file doesn't exist, log a warning and return
603
+ if not os.path.isfile(self.file):
604
+ logger.warning("Cannot remove entries from nonexistent file %s", self.file)
605
+ return
606
+ with open(self.file, "rb") as fh:
607
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
608
+ lines = fh.readlines()
609
+ self._saved_lines = lines
610
+ if any(b"\r\n" in line for line in lines):
611
+ endline = "\r\n"
612
+ else:
613
+ endline = "\n"
614
+ # handle missing trailing newline
615
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
616
+ lines[-1] = lines[-1] + endline.encode("utf-8")
617
+ for entry in self.entries:
618
+ try:
619
+ logger.verbose("Removing entry: %s", entry)
620
+ lines.remove((entry + endline).encode("utf-8"))
621
+ except ValueError:
622
+ pass
623
+ with open(self.file, "wb") as fh:
624
+ fh.writelines(lines)
625
+
626
+ def rollback(self) -> bool:
627
+ if self._saved_lines is None:
628
+ logger.error("Cannot roll back changes to %s, none were made", self.file)
629
+ return False
630
+ logger.debug("Rolling %s back to previous state", self.file)
631
+ with open(self.file, "wb") as fh:
632
+ fh.writelines(self._saved_lines)
633
+ return True
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__init__.py ADDED
File without changes
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/_log.cpython-310.pyc ADDED
Binary file (1.52 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc ADDED
Binary file (1.51 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/deprecation.cpython-310.pyc ADDED
Binary file (3.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/direct_url_helpers.cpython-310.pyc ADDED
Binary file (2.08 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc ADDED
Binary file (1.31 kB). View file
 
llmeval-env/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc ADDED
Binary file (1.3 kB). View file