applied-ai-018 commited on
Commit
dca2a1a
·
verified ·
1 Parent(s): 297b47b

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +1 -0
  2. env-llmeval/lib/python3.10/site-packages/numpy.libs/libopenblas64_p-r0-0cf96a72.3.23.dev.so +3 -0
  3. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc +0 -0
  4. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc +0 -0
  5. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc +0 -0
  6. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc +0 -0
  7. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc +0 -0
  8. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc +0 -0
  9. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc +0 -0
  10. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py +1018 -0
  11. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py +87 -0
  12. env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py +321 -0
  13. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py +21 -0
  14. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc +0 -0
  15. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc +0 -0
  16. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc +0 -0
  17. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc +0 -0
  18. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc +0 -0
  19. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/base.py +36 -0
  20. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/installed.py +20 -0
  21. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py +127 -0
  22. env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py +31 -0
  23. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__init__.py +94 -0
  24. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc +0 -0
  25. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc +0 -0
  26. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc +0 -0
  27. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc +0 -0
  28. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc +0 -0
  29. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc +0 -0
  30. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc +0 -0
  31. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_file.py +536 -0
  32. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_install.py +858 -0
  33. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py +124 -0
  34. env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py +633 -0
  35. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__init__.py +0 -0
  36. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  37. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc +0 -0
  38. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc +0 -0
  39. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-310.pyc +0 -0
  40. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc +0 -0
  41. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc +0 -0
  42. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc +0 -0
  43. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc +0 -0
  44. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc +0 -0
  45. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-310.pyc +0 -0
  46. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc +0 -0
  47. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/misc.cpython-310.pyc +0 -0
  48. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc +0 -0
  49. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc +0 -0
  50. env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc +0 -0
.gitattributes CHANGED
@@ -217,3 +217,4 @@ env-llmeval/lib/python3.10/site-packages/torch/lib/libtorch_cuda.so filter=lfs d
217
  env-llmeval/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
218
  env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
219
  env-llmeval/lib/python3.10/site-packages/numpy.libs/libgfortran-040039e1.so.5.0.0 filter=lfs diff=lfs merge=lfs -text
 
 
217
  env-llmeval/lib/python3.10/site-packages/sklearn/_loss/_loss.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
218
  env-llmeval/lib/python3.10/site-packages/aiohttp/_http_parser.cpython-310-x86_64-linux-gnu.so filter=lfs diff=lfs merge=lfs -text
219
  env-llmeval/lib/python3.10/site-packages/numpy.libs/libgfortran-040039e1.so.5.0.0 filter=lfs diff=lfs merge=lfs -text
220
+ env-llmeval/lib/python3.10/site-packages/numpy.libs/libopenblas64_p-r0-0cf96a72.3.23.dev.so filter=lfs diff=lfs merge=lfs -text
env-llmeval/lib/python3.10/site-packages/numpy.libs/libopenblas64_p-r0-0cf96a72.3.23.dev.so ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9254d0854dd7615e11de28d771ae408878ca8123a7ac204f21e4cc7a376cc2e5
3
+ size 35123345
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/autocompletion.cpython-310.pyc ADDED
Binary file (5.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/main.cpython-310.pyc ADDED
Binary file (1.36 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/parser.cpython-310.pyc ADDED
Binary file (9.94 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/progress_bars.cpython-310.pyc ADDED
Binary file (9.23 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/req_command.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/spinners.cpython-310.pyc ADDED
Binary file (4.94 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/__pycache__/status_codes.cpython-310.pyc ADDED
Binary file (344 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/cmdoptions.py ADDED
@@ -0,0 +1,1018 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ shared options and groups
3
+
4
+ The principle here is to define options once, but *not* instantiate them
5
+ globally. One reason being that options with action='append' can carry state
6
+ between parses. pip parses general options twice internally, and shouldn't
7
+ pass on state. To be consistent, all options will follow this design.
8
+ """
9
+
10
+ # The following comment should be removed at some point in the future.
11
+ # mypy: strict-optional=False
12
+
13
+ import logging
14
+ import os
15
+ import textwrap
16
+ from functools import partial
17
+ from optparse import SUPPRESS_HELP, Option, OptionGroup, OptionParser, Values
18
+ from textwrap import dedent
19
+ from typing import Any, Callable, Dict, Optional, Tuple
20
+
21
+ from pip._vendor.packaging.utils import canonicalize_name
22
+
23
+ from pip._internal.cli.parser import ConfigOptionParser
24
+ from pip._internal.cli.progress_bars import BAR_TYPES
25
+ from pip._internal.exceptions import CommandError
26
+ from pip._internal.locations import USER_CACHE_DIR, get_src_prefix
27
+ from pip._internal.models.format_control import FormatControl
28
+ from pip._internal.models.index import PyPI
29
+ from pip._internal.models.target_python import TargetPython
30
+ from pip._internal.utils.hashes import STRONG_HASHES
31
+ from pip._internal.utils.misc import strtobool
32
+
33
+ logger = logging.getLogger(__name__)
34
+
35
+
36
+ def raise_option_error(parser: OptionParser, option: Option, msg: str) -> None:
37
+ """
38
+ Raise an option parsing error using parser.error().
39
+
40
+ Args:
41
+ parser: an OptionParser instance.
42
+ option: an Option instance.
43
+ msg: the error text.
44
+ """
45
+ msg = f"{option} error: {msg}"
46
+ msg = textwrap.fill(" ".join(msg.split()))
47
+ parser.error(msg)
48
+
49
+
50
+ def make_option_group(group: Dict[str, Any], parser: ConfigOptionParser) -> OptionGroup:
51
+ """
52
+ Return an OptionGroup object
53
+ group -- assumed to be dict with 'name' and 'options' keys
54
+ parser -- an optparse Parser
55
+ """
56
+ option_group = OptionGroup(parser, group["name"])
57
+ for option in group["options"]:
58
+ option_group.add_option(option())
59
+ return option_group
60
+
61
+
62
+ def check_install_build_global(
63
+ options: Values, check_options: Optional[Values] = None
64
+ ) -> None:
65
+ """Disable wheels if per-setup.py call options are set.
66
+
67
+ :param options: The OptionParser options to update.
68
+ :param check_options: The options to check, if not supplied defaults to
69
+ options.
70
+ """
71
+ if check_options is None:
72
+ check_options = options
73
+
74
+ def getname(n: str) -> Optional[Any]:
75
+ return getattr(check_options, n, None)
76
+
77
+ names = ["build_options", "global_options", "install_options"]
78
+ if any(map(getname, names)):
79
+ control = options.format_control
80
+ control.disallow_binaries()
81
+ logger.warning(
82
+ "Disabling all use of wheels due to the use of --build-option "
83
+ "/ --global-option / --install-option.",
84
+ )
85
+
86
+
87
+ def check_dist_restriction(options: Values, check_target: bool = False) -> None:
88
+ """Function for determining if custom platform options are allowed.
89
+
90
+ :param options: The OptionParser options.
91
+ :param check_target: Whether or not to check if --target is being used.
92
+ """
93
+ dist_restriction_set = any(
94
+ [
95
+ options.python_version,
96
+ options.platforms,
97
+ options.abis,
98
+ options.implementation,
99
+ ]
100
+ )
101
+
102
+ binary_only = FormatControl(set(), {":all:"})
103
+ sdist_dependencies_allowed = (
104
+ options.format_control != binary_only and not options.ignore_dependencies
105
+ )
106
+
107
+ # Installations or downloads using dist restrictions must not combine
108
+ # source distributions and dist-specific wheels, as they are not
109
+ # guaranteed to be locally compatible.
110
+ if dist_restriction_set and sdist_dependencies_allowed:
111
+ raise CommandError(
112
+ "When restricting platform and interpreter constraints using "
113
+ "--python-version, --platform, --abi, or --implementation, "
114
+ "either --no-deps must be set, or --only-binary=:all: must be "
115
+ "set and --no-binary must not be set (or must be set to "
116
+ ":none:)."
117
+ )
118
+
119
+ if check_target:
120
+ if dist_restriction_set and not options.target_dir:
121
+ raise CommandError(
122
+ "Can not use any platform or abi specific options unless "
123
+ "installing via '--target'"
124
+ )
125
+
126
+
127
+ def _path_option_check(option: Option, opt: str, value: str) -> str:
128
+ return os.path.expanduser(value)
129
+
130
+
131
+ def _package_name_option_check(option: Option, opt: str, value: str) -> str:
132
+ return canonicalize_name(value)
133
+
134
+
135
+ class PipOption(Option):
136
+ TYPES = Option.TYPES + ("path", "package_name")
137
+ TYPE_CHECKER = Option.TYPE_CHECKER.copy()
138
+ TYPE_CHECKER["package_name"] = _package_name_option_check
139
+ TYPE_CHECKER["path"] = _path_option_check
140
+
141
+
142
+ ###########
143
+ # options #
144
+ ###########
145
+
146
+ help_: Callable[..., Option] = partial(
147
+ Option,
148
+ "-h",
149
+ "--help",
150
+ dest="help",
151
+ action="help",
152
+ help="Show help.",
153
+ )
154
+
155
+ debug_mode: Callable[..., Option] = partial(
156
+ Option,
157
+ "--debug",
158
+ dest="debug_mode",
159
+ action="store_true",
160
+ default=False,
161
+ help=(
162
+ "Let unhandled exceptions propagate outside the main subroutine, "
163
+ "instead of logging them to stderr."
164
+ ),
165
+ )
166
+
167
+ isolated_mode: Callable[..., Option] = partial(
168
+ Option,
169
+ "--isolated",
170
+ dest="isolated_mode",
171
+ action="store_true",
172
+ default=False,
173
+ help=(
174
+ "Run pip in an isolated mode, ignoring environment variables and user "
175
+ "configuration."
176
+ ),
177
+ )
178
+
179
+ require_virtualenv: Callable[..., Option] = partial(
180
+ Option,
181
+ "--require-virtualenv",
182
+ "--require-venv",
183
+ dest="require_venv",
184
+ action="store_true",
185
+ default=False,
186
+ help=(
187
+ "Allow pip to only run in a virtual environment; "
188
+ "exit with an error otherwise."
189
+ ),
190
+ )
191
+
192
+ verbose: Callable[..., Option] = partial(
193
+ Option,
194
+ "-v",
195
+ "--verbose",
196
+ dest="verbose",
197
+ action="count",
198
+ default=0,
199
+ help="Give more output. Option is additive, and can be used up to 3 times.",
200
+ )
201
+
202
+ no_color: Callable[..., Option] = partial(
203
+ Option,
204
+ "--no-color",
205
+ dest="no_color",
206
+ action="store_true",
207
+ default=False,
208
+ help="Suppress colored output.",
209
+ )
210
+
211
+ version: Callable[..., Option] = partial(
212
+ Option,
213
+ "-V",
214
+ "--version",
215
+ dest="version",
216
+ action="store_true",
217
+ help="Show version and exit.",
218
+ )
219
+
220
+ quiet: Callable[..., Option] = partial(
221
+ Option,
222
+ "-q",
223
+ "--quiet",
224
+ dest="quiet",
225
+ action="count",
226
+ default=0,
227
+ help=(
228
+ "Give less output. Option is additive, and can be used up to 3"
229
+ " times (corresponding to WARNING, ERROR, and CRITICAL logging"
230
+ " levels)."
231
+ ),
232
+ )
233
+
234
+ progress_bar: Callable[..., Option] = partial(
235
+ Option,
236
+ "--progress-bar",
237
+ dest="progress_bar",
238
+ type="choice",
239
+ choices=list(BAR_TYPES.keys()),
240
+ default="on",
241
+ help=(
242
+ "Specify type of progress to be displayed ["
243
+ + "|".join(BAR_TYPES.keys())
244
+ + "] (default: %default)"
245
+ ),
246
+ )
247
+
248
+ log: Callable[..., Option] = partial(
249
+ PipOption,
250
+ "--log",
251
+ "--log-file",
252
+ "--local-log",
253
+ dest="log",
254
+ metavar="path",
255
+ type="path",
256
+ help="Path to a verbose appending log.",
257
+ )
258
+
259
+ no_input: Callable[..., Option] = partial(
260
+ Option,
261
+ # Don't ask for input
262
+ "--no-input",
263
+ dest="no_input",
264
+ action="store_true",
265
+ default=False,
266
+ help="Disable prompting for input.",
267
+ )
268
+
269
+ proxy: Callable[..., Option] = partial(
270
+ Option,
271
+ "--proxy",
272
+ dest="proxy",
273
+ type="str",
274
+ default="",
275
+ help="Specify a proxy in the form [user:passwd@]proxy.server:port.",
276
+ )
277
+
278
+ retries: Callable[..., Option] = partial(
279
+ Option,
280
+ "--retries",
281
+ dest="retries",
282
+ type="int",
283
+ default=5,
284
+ help="Maximum number of retries each connection should attempt "
285
+ "(default %default times).",
286
+ )
287
+
288
+ timeout: Callable[..., Option] = partial(
289
+ Option,
290
+ "--timeout",
291
+ "--default-timeout",
292
+ metavar="sec",
293
+ dest="timeout",
294
+ type="float",
295
+ default=15,
296
+ help="Set the socket timeout (default %default seconds).",
297
+ )
298
+
299
+
300
+ def exists_action() -> Option:
301
+ return Option(
302
+ # Option when path already exist
303
+ "--exists-action",
304
+ dest="exists_action",
305
+ type="choice",
306
+ choices=["s", "i", "w", "b", "a"],
307
+ default=[],
308
+ action="append",
309
+ metavar="action",
310
+ help="Default action when a path already exists: "
311
+ "(s)witch, (i)gnore, (w)ipe, (b)ackup, (a)bort.",
312
+ )
313
+
314
+
315
+ cert: Callable[..., Option] = partial(
316
+ PipOption,
317
+ "--cert",
318
+ dest="cert",
319
+ type="path",
320
+ metavar="path",
321
+ help=(
322
+ "Path to PEM-encoded CA certificate bundle. "
323
+ "If provided, overrides the default. "
324
+ "See 'SSL Certificate Verification' in pip documentation "
325
+ "for more information."
326
+ ),
327
+ )
328
+
329
+ client_cert: Callable[..., Option] = partial(
330
+ PipOption,
331
+ "--client-cert",
332
+ dest="client_cert",
333
+ type="path",
334
+ default=None,
335
+ metavar="path",
336
+ help="Path to SSL client certificate, a single file containing the "
337
+ "private key and the certificate in PEM format.",
338
+ )
339
+
340
+ index_url: Callable[..., Option] = partial(
341
+ Option,
342
+ "-i",
343
+ "--index-url",
344
+ "--pypi-url",
345
+ dest="index_url",
346
+ metavar="URL",
347
+ default=PyPI.simple_url,
348
+ help="Base URL of the Python Package Index (default %default). "
349
+ "This should point to a repository compliant with PEP 503 "
350
+ "(the simple repository API) or a local directory laid out "
351
+ "in the same format.",
352
+ )
353
+
354
+
355
+ def extra_index_url() -> Option:
356
+ return Option(
357
+ "--extra-index-url",
358
+ dest="extra_index_urls",
359
+ metavar="URL",
360
+ action="append",
361
+ default=[],
362
+ help="Extra URLs of package indexes to use in addition to "
363
+ "--index-url. Should follow the same rules as "
364
+ "--index-url.",
365
+ )
366
+
367
+
368
+ no_index: Callable[..., Option] = partial(
369
+ Option,
370
+ "--no-index",
371
+ dest="no_index",
372
+ action="store_true",
373
+ default=False,
374
+ help="Ignore package index (only looking at --find-links URLs instead).",
375
+ )
376
+
377
+
378
+ def find_links() -> Option:
379
+ return Option(
380
+ "-f",
381
+ "--find-links",
382
+ dest="find_links",
383
+ action="append",
384
+ default=[],
385
+ metavar="url",
386
+ help="If a URL or path to an html file, then parse for links to "
387
+ "archives such as sdist (.tar.gz) or wheel (.whl) files. "
388
+ "If a local path or file:// URL that's a directory, "
389
+ "then look for archives in the directory listing. "
390
+ "Links to VCS project URLs are not supported.",
391
+ )
392
+
393
+
394
+ def trusted_host() -> Option:
395
+ return Option(
396
+ "--trusted-host",
397
+ dest="trusted_hosts",
398
+ action="append",
399
+ metavar="HOSTNAME",
400
+ default=[],
401
+ help="Mark this host or host:port pair as trusted, even though it "
402
+ "does not have valid or any HTTPS.",
403
+ )
404
+
405
+
406
+ def constraints() -> Option:
407
+ return Option(
408
+ "-c",
409
+ "--constraint",
410
+ dest="constraints",
411
+ action="append",
412
+ default=[],
413
+ metavar="file",
414
+ help="Constrain versions using the given constraints file. "
415
+ "This option can be used multiple times.",
416
+ )
417
+
418
+
419
+ def requirements() -> Option:
420
+ return Option(
421
+ "-r",
422
+ "--requirement",
423
+ dest="requirements",
424
+ action="append",
425
+ default=[],
426
+ metavar="file",
427
+ help="Install from the given requirements file. "
428
+ "This option can be used multiple times.",
429
+ )
430
+
431
+
432
+ def editable() -> Option:
433
+ return Option(
434
+ "-e",
435
+ "--editable",
436
+ dest="editables",
437
+ action="append",
438
+ default=[],
439
+ metavar="path/url",
440
+ help=(
441
+ "Install a project in editable mode (i.e. setuptools "
442
+ '"develop mode") from a local project path or a VCS url.'
443
+ ),
444
+ )
445
+
446
+
447
+ def _handle_src(option: Option, opt_str: str, value: str, parser: OptionParser) -> None:
448
+ value = os.path.abspath(value)
449
+ setattr(parser.values, option.dest, value)
450
+
451
+
452
+ src: Callable[..., Option] = partial(
453
+ PipOption,
454
+ "--src",
455
+ "--source",
456
+ "--source-dir",
457
+ "--source-directory",
458
+ dest="src_dir",
459
+ type="path",
460
+ metavar="dir",
461
+ default=get_src_prefix(),
462
+ action="callback",
463
+ callback=_handle_src,
464
+ help="Directory to check out editable projects into. "
465
+ 'The default in a virtualenv is "<venv path>/src". '
466
+ 'The default for global installs is "<current dir>/src".',
467
+ )
468
+
469
+
470
+ def _get_format_control(values: Values, option: Option) -> Any:
471
+ """Get a format_control object."""
472
+ return getattr(values, option.dest)
473
+
474
+
475
+ def _handle_no_binary(
476
+ option: Option, opt_str: str, value: str, parser: OptionParser
477
+ ) -> None:
478
+ existing = _get_format_control(parser.values, option)
479
+ FormatControl.handle_mutual_excludes(
480
+ value,
481
+ existing.no_binary,
482
+ existing.only_binary,
483
+ )
484
+
485
+
486
+ def _handle_only_binary(
487
+ option: Option, opt_str: str, value: str, parser: OptionParser
488
+ ) -> None:
489
+ existing = _get_format_control(parser.values, option)
490
+ FormatControl.handle_mutual_excludes(
491
+ value,
492
+ existing.only_binary,
493
+ existing.no_binary,
494
+ )
495
+
496
+
497
+ def no_binary() -> Option:
498
+ format_control = FormatControl(set(), set())
499
+ return Option(
500
+ "--no-binary",
501
+ dest="format_control",
502
+ action="callback",
503
+ callback=_handle_no_binary,
504
+ type="str",
505
+ default=format_control,
506
+ help="Do not use binary packages. Can be supplied multiple times, and "
507
+ 'each time adds to the existing value. Accepts either ":all:" to '
508
+ 'disable all binary packages, ":none:" to empty the set (notice '
509
+ "the colons), or one or more package names with commas between "
510
+ "them (no colons). Note that some packages are tricky to compile "
511
+ "and may fail to install when this option is used on them.",
512
+ )
513
+
514
+
515
+ def only_binary() -> Option:
516
+ format_control = FormatControl(set(), set())
517
+ return Option(
518
+ "--only-binary",
519
+ dest="format_control",
520
+ action="callback",
521
+ callback=_handle_only_binary,
522
+ type="str",
523
+ default=format_control,
524
+ help="Do not use source packages. Can be supplied multiple times, and "
525
+ 'each time adds to the existing value. Accepts either ":all:" to '
526
+ 'disable all source packages, ":none:" to empty the set, or one '
527
+ "or more package names with commas between them. Packages "
528
+ "without binary distributions will fail to install when this "
529
+ "option is used on them.",
530
+ )
531
+
532
+
533
+ platforms: Callable[..., Option] = partial(
534
+ Option,
535
+ "--platform",
536
+ dest="platforms",
537
+ metavar="platform",
538
+ action="append",
539
+ default=None,
540
+ help=(
541
+ "Only use wheels compatible with <platform>. Defaults to the "
542
+ "platform of the running system. Use this option multiple times to "
543
+ "specify multiple platforms supported by the target interpreter."
544
+ ),
545
+ )
546
+
547
+
548
+ # This was made a separate function for unit-testing purposes.
549
+ def _convert_python_version(value: str) -> Tuple[Tuple[int, ...], Optional[str]]:
550
+ """
551
+ Convert a version string like "3", "37", or "3.7.3" into a tuple of ints.
552
+
553
+ :return: A 2-tuple (version_info, error_msg), where `error_msg` is
554
+ non-None if and only if there was a parsing error.
555
+ """
556
+ if not value:
557
+ # The empty string is the same as not providing a value.
558
+ return (None, None)
559
+
560
+ parts = value.split(".")
561
+ if len(parts) > 3:
562
+ return ((), "at most three version parts are allowed")
563
+
564
+ if len(parts) == 1:
565
+ # Then we are in the case of "3" or "37".
566
+ value = parts[0]
567
+ if len(value) > 1:
568
+ parts = [value[0], value[1:]]
569
+
570
+ try:
571
+ version_info = tuple(int(part) for part in parts)
572
+ except ValueError:
573
+ return ((), "each version part must be an integer")
574
+
575
+ return (version_info, None)
576
+
577
+
578
+ def _handle_python_version(
579
+ option: Option, opt_str: str, value: str, parser: OptionParser
580
+ ) -> None:
581
+ """
582
+ Handle a provided --python-version value.
583
+ """
584
+ version_info, error_msg = _convert_python_version(value)
585
+ if error_msg is not None:
586
+ msg = "invalid --python-version value: {!r}: {}".format(
587
+ value,
588
+ error_msg,
589
+ )
590
+ raise_option_error(parser, option=option, msg=msg)
591
+
592
+ parser.values.python_version = version_info
593
+
594
+
595
+ python_version: Callable[..., Option] = partial(
596
+ Option,
597
+ "--python-version",
598
+ dest="python_version",
599
+ metavar="python_version",
600
+ action="callback",
601
+ callback=_handle_python_version,
602
+ type="str",
603
+ default=None,
604
+ help=dedent(
605
+ """\
606
+ The Python interpreter version to use for wheel and "Requires-Python"
607
+ compatibility checks. Defaults to a version derived from the running
608
+ interpreter. The version can be specified using up to three dot-separated
609
+ integers (e.g. "3" for 3.0.0, "3.7" for 3.7.0, or "3.7.3"). A major-minor
610
+ version can also be given as a string without dots (e.g. "37" for 3.7.0).
611
+ """
612
+ ),
613
+ )
614
+
615
+
616
+ implementation: Callable[..., Option] = partial(
617
+ Option,
618
+ "--implementation",
619
+ dest="implementation",
620
+ metavar="implementation",
621
+ default=None,
622
+ help=(
623
+ "Only use wheels compatible with Python "
624
+ "implementation <implementation>, e.g. 'pp', 'jy', 'cp', "
625
+ " or 'ip'. If not specified, then the current "
626
+ "interpreter implementation is used. Use 'py' to force "
627
+ "implementation-agnostic wheels."
628
+ ),
629
+ )
630
+
631
+
632
+ abis: Callable[..., Option] = partial(
633
+ Option,
634
+ "--abi",
635
+ dest="abis",
636
+ metavar="abi",
637
+ action="append",
638
+ default=None,
639
+ help=(
640
+ "Only use wheels compatible with Python abi <abi>, e.g. 'pypy_41'. "
641
+ "If not specified, then the current interpreter abi tag is used. "
642
+ "Use this option multiple times to specify multiple abis supported "
643
+ "by the target interpreter. Generally you will need to specify "
644
+ "--implementation, --platform, and --python-version when using this "
645
+ "option."
646
+ ),
647
+ )
648
+
649
+
650
+ def add_target_python_options(cmd_opts: OptionGroup) -> None:
651
+ cmd_opts.add_option(platforms())
652
+ cmd_opts.add_option(python_version())
653
+ cmd_opts.add_option(implementation())
654
+ cmd_opts.add_option(abis())
655
+
656
+
657
+ def make_target_python(options: Values) -> TargetPython:
658
+ target_python = TargetPython(
659
+ platforms=options.platforms,
660
+ py_version_info=options.python_version,
661
+ abis=options.abis,
662
+ implementation=options.implementation,
663
+ )
664
+
665
+ return target_python
666
+
667
+
668
+ def prefer_binary() -> Option:
669
+ return Option(
670
+ "--prefer-binary",
671
+ dest="prefer_binary",
672
+ action="store_true",
673
+ default=False,
674
+ help="Prefer older binary packages over newer source packages.",
675
+ )
676
+
677
+
678
+ cache_dir: Callable[..., Option] = partial(
679
+ PipOption,
680
+ "--cache-dir",
681
+ dest="cache_dir",
682
+ default=USER_CACHE_DIR,
683
+ metavar="dir",
684
+ type="path",
685
+ help="Store the cache data in <dir>.",
686
+ )
687
+
688
+
689
+ def _handle_no_cache_dir(
690
+ option: Option, opt: str, value: str, parser: OptionParser
691
+ ) -> None:
692
+ """
693
+ Process a value provided for the --no-cache-dir option.
694
+
695
+ This is an optparse.Option callback for the --no-cache-dir option.
696
+ """
697
+ # The value argument will be None if --no-cache-dir is passed via the
698
+ # command-line, since the option doesn't accept arguments. However,
699
+ # the value can be non-None if the option is triggered e.g. by an
700
+ # environment variable, like PIP_NO_CACHE_DIR=true.
701
+ if value is not None:
702
+ # Then parse the string value to get argument error-checking.
703
+ try:
704
+ strtobool(value)
705
+ except ValueError as exc:
706
+ raise_option_error(parser, option=option, msg=str(exc))
707
+
708
+ # Originally, setting PIP_NO_CACHE_DIR to a value that strtobool()
709
+ # converted to 0 (like "false" or "no") caused cache_dir to be disabled
710
+ # rather than enabled (logic would say the latter). Thus, we disable
711
+ # the cache directory not just on values that parse to True, but (for
712
+ # backwards compatibility reasons) also on values that parse to False.
713
+ # In other words, always set it to False if the option is provided in
714
+ # some (valid) form.
715
+ parser.values.cache_dir = False
716
+
717
+
718
+ no_cache: Callable[..., Option] = partial(
719
+ Option,
720
+ "--no-cache-dir",
721
+ dest="cache_dir",
722
+ action="callback",
723
+ callback=_handle_no_cache_dir,
724
+ help="Disable the cache.",
725
+ )
726
+
727
+ no_deps: Callable[..., Option] = partial(
728
+ Option,
729
+ "--no-deps",
730
+ "--no-dependencies",
731
+ dest="ignore_dependencies",
732
+ action="store_true",
733
+ default=False,
734
+ help="Don't install package dependencies.",
735
+ )
736
+
737
+ ignore_requires_python: Callable[..., Option] = partial(
738
+ Option,
739
+ "--ignore-requires-python",
740
+ dest="ignore_requires_python",
741
+ action="store_true",
742
+ help="Ignore the Requires-Python information.",
743
+ )
744
+
745
+ no_build_isolation: Callable[..., Option] = partial(
746
+ Option,
747
+ "--no-build-isolation",
748
+ dest="build_isolation",
749
+ action="store_false",
750
+ default=True,
751
+ help="Disable isolation when building a modern source distribution. "
752
+ "Build dependencies specified by PEP 518 must be already installed "
753
+ "if this option is used.",
754
+ )
755
+
756
+
757
+ def _handle_no_use_pep517(
758
+ option: Option, opt: str, value: str, parser: OptionParser
759
+ ) -> None:
760
+ """
761
+ Process a value provided for the --no-use-pep517 option.
762
+
763
+ This is an optparse.Option callback for the no_use_pep517 option.
764
+ """
765
+ # Since --no-use-pep517 doesn't accept arguments, the value argument
766
+ # will be None if --no-use-pep517 is passed via the command-line.
767
+ # However, the value can be non-None if the option is triggered e.g.
768
+ # by an environment variable, for example "PIP_NO_USE_PEP517=true".
769
+ if value is not None:
770
+ msg = """A value was passed for --no-use-pep517,
771
+ probably using either the PIP_NO_USE_PEP517 environment variable
772
+ or the "no-use-pep517" config file option. Use an appropriate value
773
+ of the PIP_USE_PEP517 environment variable or the "use-pep517"
774
+ config file option instead.
775
+ """
776
+ raise_option_error(parser, option=option, msg=msg)
777
+
778
+ # Otherwise, --no-use-pep517 was passed via the command-line.
779
+ parser.values.use_pep517 = False
780
+
781
+
782
+ use_pep517: Any = partial(
783
+ Option,
784
+ "--use-pep517",
785
+ dest="use_pep517",
786
+ action="store_true",
787
+ default=None,
788
+ help="Use PEP 517 for building source distributions "
789
+ "(use --no-use-pep517 to force legacy behaviour).",
790
+ )
791
+
792
+ no_use_pep517: Any = partial(
793
+ Option,
794
+ "--no-use-pep517",
795
+ dest="use_pep517",
796
+ action="callback",
797
+ callback=_handle_no_use_pep517,
798
+ default=None,
799
+ help=SUPPRESS_HELP,
800
+ )
801
+
802
+ install_options: Callable[..., Option] = partial(
803
+ Option,
804
+ "--install-option",
805
+ dest="install_options",
806
+ action="append",
807
+ metavar="options",
808
+ help="Extra arguments to be supplied to the setup.py install "
809
+ 'command (use like --install-option="--install-scripts=/usr/local/'
810
+ 'bin"). Use multiple --install-option options to pass multiple '
811
+ "options to setup.py install. If you are using an option with a "
812
+ "directory path, be sure to use absolute path.",
813
+ )
814
+
815
+ build_options: Callable[..., Option] = partial(
816
+ Option,
817
+ "--build-option",
818
+ dest="build_options",
819
+ metavar="options",
820
+ action="append",
821
+ help="Extra arguments to be supplied to 'setup.py bdist_wheel'.",
822
+ )
823
+
824
+ global_options: Callable[..., Option] = partial(
825
+ Option,
826
+ "--global-option",
827
+ dest="global_options",
828
+ action="append",
829
+ metavar="options",
830
+ help="Extra global options to be supplied to the setup.py "
831
+ "call before the install or bdist_wheel command.",
832
+ )
833
+
834
+ no_clean: Callable[..., Option] = partial(
835
+ Option,
836
+ "--no-clean",
837
+ action="store_true",
838
+ default=False,
839
+ help="Don't clean up build directories.",
840
+ )
841
+
842
+ pre: Callable[..., Option] = partial(
843
+ Option,
844
+ "--pre",
845
+ action="store_true",
846
+ default=False,
847
+ help="Include pre-release and development versions. By default, "
848
+ "pip only finds stable versions.",
849
+ )
850
+
851
+ disable_pip_version_check: Callable[..., Option] = partial(
852
+ Option,
853
+ "--disable-pip-version-check",
854
+ dest="disable_pip_version_check",
855
+ action="store_true",
856
+ default=True,
857
+ help="Don't periodically check PyPI to determine whether a new version "
858
+ "of pip is available for download. Implied with --no-index.",
859
+ )
860
+
861
+
862
+ def _handle_merge_hash(
863
+ option: Option, opt_str: str, value: str, parser: OptionParser
864
+ ) -> None:
865
+ """Given a value spelled "algo:digest", append the digest to a list
866
+ pointed to in a dict by the algo name."""
867
+ if not parser.values.hashes:
868
+ parser.values.hashes = {}
869
+ try:
870
+ algo, digest = value.split(":", 1)
871
+ except ValueError:
872
+ parser.error(
873
+ "Arguments to {} must be a hash name " # noqa
874
+ "followed by a value, like --hash=sha256:"
875
+ "abcde...".format(opt_str)
876
+ )
877
+ if algo not in STRONG_HASHES:
878
+ parser.error(
879
+ "Allowed hash algorithms for {} are {}.".format( # noqa
880
+ opt_str, ", ".join(STRONG_HASHES)
881
+ )
882
+ )
883
+ parser.values.hashes.setdefault(algo, []).append(digest)
884
+
885
+
886
+ hash: Callable[..., Option] = partial(
887
+ Option,
888
+ "--hash",
889
+ # Hash values eventually end up in InstallRequirement.hashes due to
890
+ # __dict__ copying in process_line().
891
+ dest="hashes",
892
+ action="callback",
893
+ callback=_handle_merge_hash,
894
+ type="string",
895
+ help="Verify that the package's archive matches this "
896
+ "hash before installing. Example: --hash=sha256:abcdef...",
897
+ )
898
+
899
+
900
+ require_hashes: Callable[..., Option] = partial(
901
+ Option,
902
+ "--require-hashes",
903
+ dest="require_hashes",
904
+ action="store_true",
905
+ default=False,
906
+ help="Require a hash to check each requirement against, for "
907
+ "repeatable installs. This option is implied when any package in a "
908
+ "requirements file has a --hash option.",
909
+ )
910
+
911
+
912
+ list_path: Callable[..., Option] = partial(
913
+ PipOption,
914
+ "--path",
915
+ dest="path",
916
+ type="path",
917
+ action="append",
918
+ help="Restrict to the specified installation path for listing "
919
+ "packages (can be used multiple times).",
920
+ )
921
+
922
+
923
+ def check_list_path_option(options: Values) -> None:
924
+ if options.path and (options.user or options.local):
925
+ raise CommandError("Cannot combine '--path' with '--user' or '--local'")
926
+
927
+
928
+ list_exclude: Callable[..., Option] = partial(
929
+ PipOption,
930
+ "--exclude",
931
+ dest="excludes",
932
+ action="append",
933
+ metavar="package",
934
+ type="package_name",
935
+ help="Exclude specified package from the output",
936
+ )
937
+
938
+
939
+ no_python_version_warning: Callable[..., Option] = partial(
940
+ Option,
941
+ "--no-python-version-warning",
942
+ dest="no_python_version_warning",
943
+ action="store_true",
944
+ default=False,
945
+ help="Silence deprecation warnings for upcoming unsupported Pythons.",
946
+ )
947
+
948
+
949
+ use_new_feature: Callable[..., Option] = partial(
950
+ Option,
951
+ "--use-feature",
952
+ dest="features_enabled",
953
+ metavar="feature",
954
+ action="append",
955
+ default=[],
956
+ choices=["2020-resolver", "fast-deps", "in-tree-build"],
957
+ help="Enable new functionality, that may be backward incompatible.",
958
+ )
959
+
960
+ use_deprecated_feature: Callable[..., Option] = partial(
961
+ Option,
962
+ "--use-deprecated",
963
+ dest="deprecated_features_enabled",
964
+ metavar="feature",
965
+ action="append",
966
+ default=[],
967
+ choices=[
968
+ "legacy-resolver",
969
+ "out-of-tree-build",
970
+ "backtrack-on-build-failures",
971
+ "html5lib",
972
+ ],
973
+ help=("Enable deprecated functionality, that will be removed in the future."),
974
+ )
975
+
976
+
977
+ ##########
978
+ # groups #
979
+ ##########
980
+
981
+ general_group: Dict[str, Any] = {
982
+ "name": "General Options",
983
+ "options": [
984
+ help_,
985
+ debug_mode,
986
+ isolated_mode,
987
+ require_virtualenv,
988
+ verbose,
989
+ version,
990
+ quiet,
991
+ log,
992
+ no_input,
993
+ proxy,
994
+ retries,
995
+ timeout,
996
+ exists_action,
997
+ trusted_host,
998
+ cert,
999
+ client_cert,
1000
+ cache_dir,
1001
+ no_cache,
1002
+ disable_pip_version_check,
1003
+ no_color,
1004
+ no_python_version_warning,
1005
+ use_new_feature,
1006
+ use_deprecated_feature,
1007
+ ],
1008
+ }
1009
+
1010
+ index_group: Dict[str, Any] = {
1011
+ "name": "Package Index Options",
1012
+ "options": [
1013
+ index_url,
1014
+ extra_index_url,
1015
+ no_index,
1016
+ find_links,
1017
+ ],
1018
+ }
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/main_parser.py ADDED
@@ -0,0 +1,87 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """A single place for constructing and exposing the main parser
2
+ """
3
+
4
+ import os
5
+ import sys
6
+ from typing import List, Tuple
7
+
8
+ from pip._internal.cli import cmdoptions
9
+ from pip._internal.cli.parser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
10
+ from pip._internal.commands import commands_dict, get_similar_commands
11
+ from pip._internal.exceptions import CommandError
12
+ from pip._internal.utils.misc import get_pip_version, get_prog
13
+
14
+ __all__ = ["create_main_parser", "parse_command"]
15
+
16
+
17
+ def create_main_parser() -> ConfigOptionParser:
18
+ """Creates and returns the main parser for pip's CLI"""
19
+
20
+ parser = ConfigOptionParser(
21
+ usage="\n%prog <command> [options]",
22
+ add_help_option=False,
23
+ formatter=UpdatingDefaultsHelpFormatter(),
24
+ name="global",
25
+ prog=get_prog(),
26
+ )
27
+ parser.disable_interspersed_args()
28
+
29
+ parser.version = get_pip_version()
30
+
31
+ # add the general options
32
+ gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
33
+ parser.add_option_group(gen_opts)
34
+
35
+ # so the help formatter knows
36
+ parser.main = True # type: ignore
37
+
38
+ # create command listing for description
39
+ description = [""] + [
40
+ f"{name:27} {command_info.summary}"
41
+ for name, command_info in commands_dict.items()
42
+ ]
43
+ parser.description = "\n".join(description)
44
+
45
+ return parser
46
+
47
+
48
+ def parse_command(args: List[str]) -> Tuple[str, List[str]]:
49
+ parser = create_main_parser()
50
+
51
+ # Note: parser calls disable_interspersed_args(), so the result of this
52
+ # call is to split the initial args into the general options before the
53
+ # subcommand and everything else.
54
+ # For example:
55
+ # args: ['--timeout=5', 'install', '--user', 'INITools']
56
+ # general_options: ['--timeout==5']
57
+ # args_else: ['install', '--user', 'INITools']
58
+ general_options, args_else = parser.parse_args(args)
59
+
60
+ # --version
61
+ if general_options.version:
62
+ sys.stdout.write(parser.version)
63
+ sys.stdout.write(os.linesep)
64
+ sys.exit()
65
+
66
+ # pip || pip help -> print_help()
67
+ if not args_else or (args_else[0] == "help" and len(args_else) == 1):
68
+ parser.print_help()
69
+ sys.exit()
70
+
71
+ # the subcommand name
72
+ cmd_name = args_else[0]
73
+
74
+ if cmd_name not in commands_dict:
75
+ guess = get_similar_commands(cmd_name)
76
+
77
+ msg = [f'unknown command "{cmd_name}"']
78
+ if guess:
79
+ msg.append(f'maybe you meant "{guess}"')
80
+
81
+ raise CommandError(" - ".join(msg))
82
+
83
+ # all the args without the subcommand
84
+ cmd_args = args[:]
85
+ cmd_args.remove(cmd_name)
86
+
87
+ return cmd_name, cmd_args
env-llmeval/lib/python3.10/site-packages/pip/_internal/cli/progress_bars.py ADDED
@@ -0,0 +1,321 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import itertools
3
+ import sys
4
+ from signal import SIGINT, default_int_handler, signal
5
+ from typing import Any, Callable, Iterator, Optional, Tuple
6
+
7
+ from pip._vendor.progress.bar import Bar, FillingCirclesBar, IncrementalBar
8
+ from pip._vendor.progress.spinner import Spinner
9
+ from pip._vendor.rich.progress import (
10
+ BarColumn,
11
+ DownloadColumn,
12
+ FileSizeColumn,
13
+ Progress,
14
+ ProgressColumn,
15
+ SpinnerColumn,
16
+ TextColumn,
17
+ TimeElapsedColumn,
18
+ TimeRemainingColumn,
19
+ TransferSpeedColumn,
20
+ )
21
+
22
+ from pip._internal.utils.compat import WINDOWS
23
+ from pip._internal.utils.logging import get_indentation
24
+ from pip._internal.utils.misc import format_size
25
+
26
+ try:
27
+ from pip._vendor import colorama
28
+ # Lots of different errors can come from this, including SystemError and
29
+ # ImportError.
30
+ except Exception:
31
+ colorama = None
32
+
33
+ DownloadProgressRenderer = Callable[[Iterator[bytes]], Iterator[bytes]]
34
+
35
+
36
+ def _select_progress_class(preferred: Bar, fallback: Bar) -> Bar:
37
+ encoding = getattr(preferred.file, "encoding", None)
38
+
39
+ # If we don't know what encoding this file is in, then we'll just assume
40
+ # that it doesn't support unicode and use the ASCII bar.
41
+ if not encoding:
42
+ return fallback
43
+
44
+ # Collect all of the possible characters we want to use with the preferred
45
+ # bar.
46
+ characters = [
47
+ getattr(preferred, "empty_fill", ""),
48
+ getattr(preferred, "fill", ""),
49
+ ]
50
+ characters += list(getattr(preferred, "phases", []))
51
+
52
+ # Try to decode the characters we're using for the bar using the encoding
53
+ # of the given file, if this works then we'll assume that we can use the
54
+ # fancier bar and if not we'll fall back to the plaintext bar.
55
+ try:
56
+ "".join(characters).encode(encoding)
57
+ except UnicodeEncodeError:
58
+ return fallback
59
+ else:
60
+ return preferred
61
+
62
+
63
+ _BaseBar: Any = _select_progress_class(IncrementalBar, Bar)
64
+
65
+
66
+ class InterruptibleMixin:
67
+ """
68
+ Helper to ensure that self.finish() gets called on keyboard interrupt.
69
+
70
+ This allows downloads to be interrupted without leaving temporary state
71
+ (like hidden cursors) behind.
72
+
73
+ This class is similar to the progress library's existing SigIntMixin
74
+ helper, but as of version 1.2, that helper has the following problems:
75
+
76
+ 1. It calls sys.exit().
77
+ 2. It discards the existing SIGINT handler completely.
78
+ 3. It leaves its own handler in place even after an uninterrupted finish,
79
+ which will have unexpected delayed effects if the user triggers an
80
+ unrelated keyboard interrupt some time after a progress-displaying
81
+ download has already completed, for example.
82
+ """
83
+
84
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
85
+ """
86
+ Save the original SIGINT handler for later.
87
+ """
88
+ # https://github.com/python/mypy/issues/5887
89
+ super().__init__(*args, **kwargs) # type: ignore
90
+
91
+ self.original_handler = signal(SIGINT, self.handle_sigint)
92
+
93
+ # If signal() returns None, the previous handler was not installed from
94
+ # Python, and we cannot restore it. This probably should not happen,
95
+ # but if it does, we must restore something sensible instead, at least.
96
+ # The least bad option should be Python's default SIGINT handler, which
97
+ # just raises KeyboardInterrupt.
98
+ if self.original_handler is None:
99
+ self.original_handler = default_int_handler
100
+
101
+ def finish(self) -> None:
102
+ """
103
+ Restore the original SIGINT handler after finishing.
104
+
105
+ This should happen regardless of whether the progress display finishes
106
+ normally, or gets interrupted.
107
+ """
108
+ super().finish() # type: ignore
109
+ signal(SIGINT, self.original_handler)
110
+
111
+ def handle_sigint(self, signum, frame): # type: ignore
112
+ """
113
+ Call self.finish() before delegating to the original SIGINT handler.
114
+
115
+ This handler should only be in place while the progress display is
116
+ active.
117
+ """
118
+ self.finish()
119
+ self.original_handler(signum, frame)
120
+
121
+
122
+ class SilentBar(Bar):
123
+ def update(self) -> None:
124
+ pass
125
+
126
+
127
+ class BlueEmojiBar(IncrementalBar):
128
+
129
+ suffix = "%(percent)d%%"
130
+ bar_prefix = " "
131
+ bar_suffix = " "
132
+ phases = ("\U0001F539", "\U0001F537", "\U0001F535")
133
+
134
+
135
+ class DownloadProgressMixin:
136
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
137
+ # https://github.com/python/mypy/issues/5887
138
+ super().__init__(*args, **kwargs) # type: ignore
139
+ self.message: str = (" " * (get_indentation() + 2)) + self.message
140
+
141
+ @property
142
+ def downloaded(self) -> str:
143
+ return format_size(self.index) # type: ignore
144
+
145
+ @property
146
+ def download_speed(self) -> str:
147
+ # Avoid zero division errors...
148
+ if self.avg == 0.0: # type: ignore
149
+ return "..."
150
+ return format_size(1 / self.avg) + "/s" # type: ignore
151
+
152
+ @property
153
+ def pretty_eta(self) -> str:
154
+ if self.eta: # type: ignore
155
+ return f"eta {self.eta_td}" # type: ignore
156
+ return ""
157
+
158
+ def iter(self, it): # type: ignore
159
+ for x in it:
160
+ yield x
161
+ # B305 is incorrectly raised here
162
+ # https://github.com/PyCQA/flake8-bugbear/issues/59
163
+ self.next(len(x)) # noqa: B305
164
+ self.finish()
165
+
166
+
167
+ class WindowsMixin:
168
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
169
+ # The Windows terminal does not support the hide/show cursor ANSI codes
170
+ # even with colorama. So we'll ensure that hide_cursor is False on
171
+ # Windows.
172
+ # This call needs to go before the super() call, so that hide_cursor
173
+ # is set in time. The base progress bar class writes the "hide cursor"
174
+ # code to the terminal in its init, so if we don't set this soon
175
+ # enough, we get a "hide" with no corresponding "show"...
176
+ if WINDOWS and self.hide_cursor: # type: ignore
177
+ self.hide_cursor = False
178
+
179
+ # https://github.com/python/mypy/issues/5887
180
+ super().__init__(*args, **kwargs) # type: ignore
181
+
182
+ # Check if we are running on Windows and we have the colorama module,
183
+ # if we do then wrap our file with it.
184
+ if WINDOWS and colorama:
185
+ self.file = colorama.AnsiToWin32(self.file) # type: ignore
186
+ # The progress code expects to be able to call self.file.isatty()
187
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
188
+ # add it.
189
+ self.file.isatty = lambda: self.file.wrapped.isatty()
190
+ # The progress code expects to be able to call self.file.flush()
191
+ # but the colorama.AnsiToWin32() object doesn't have that, so we'll
192
+ # add it.
193
+ self.file.flush = lambda: self.file.wrapped.flush()
194
+
195
+
196
+ class BaseDownloadProgressBar(WindowsMixin, InterruptibleMixin, DownloadProgressMixin):
197
+
198
+ file = sys.stdout
199
+ message = "%(percent)d%%"
200
+ suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s"
201
+
202
+
203
+ class DefaultDownloadProgressBar(BaseDownloadProgressBar, _BaseBar):
204
+ pass
205
+
206
+
207
+ class DownloadSilentBar(BaseDownloadProgressBar, SilentBar):
208
+ pass
209
+
210
+
211
+ class DownloadBar(BaseDownloadProgressBar, Bar):
212
+ pass
213
+
214
+
215
+ class DownloadFillingCirclesBar(BaseDownloadProgressBar, FillingCirclesBar):
216
+ pass
217
+
218
+
219
+ class DownloadBlueEmojiProgressBar(BaseDownloadProgressBar, BlueEmojiBar):
220
+ pass
221
+
222
+
223
+ class DownloadProgressSpinner(
224
+ WindowsMixin, InterruptibleMixin, DownloadProgressMixin, Spinner
225
+ ):
226
+
227
+ file = sys.stdout
228
+ suffix = "%(downloaded)s %(download_speed)s"
229
+
230
+ def next_phase(self) -> str:
231
+ if not hasattr(self, "_phaser"):
232
+ self._phaser = itertools.cycle(self.phases)
233
+ return next(self._phaser)
234
+
235
+ def update(self) -> None:
236
+ message = self.message % self
237
+ phase = self.next_phase()
238
+ suffix = self.suffix % self
239
+ line = "".join(
240
+ [
241
+ message,
242
+ " " if message else "",
243
+ phase,
244
+ " " if suffix else "",
245
+ suffix,
246
+ ]
247
+ )
248
+
249
+ self.writeln(line)
250
+
251
+
252
+ BAR_TYPES = {
253
+ "off": (DownloadSilentBar, DownloadSilentBar),
254
+ "on": (DefaultDownloadProgressBar, DownloadProgressSpinner),
255
+ "ascii": (DownloadBar, DownloadProgressSpinner),
256
+ "pretty": (DownloadFillingCirclesBar, DownloadProgressSpinner),
257
+ "emoji": (DownloadBlueEmojiProgressBar, DownloadProgressSpinner),
258
+ }
259
+
260
+
261
+ def _legacy_progress_bar(
262
+ progress_bar: str, max: Optional[int]
263
+ ) -> DownloadProgressRenderer:
264
+ if max is None or max == 0:
265
+ return BAR_TYPES[progress_bar][1]().iter # type: ignore
266
+ else:
267
+ return BAR_TYPES[progress_bar][0](max=max).iter
268
+
269
+
270
+ #
271
+ # Modern replacement, for our legacy progress bars.
272
+ #
273
+ def _rich_progress_bar(
274
+ iterable: Iterator[bytes],
275
+ *,
276
+ bar_type: str,
277
+ size: int,
278
+ ) -> Iterator[bytes]:
279
+ assert bar_type == "on", "This should only be used in the default mode."
280
+
281
+ if not size:
282
+ total = float("inf")
283
+ columns: Tuple[ProgressColumn, ...] = (
284
+ TextColumn("[progress.description]{task.description}"),
285
+ SpinnerColumn("line", speed=1.5),
286
+ FileSizeColumn(),
287
+ TransferSpeedColumn(),
288
+ TimeElapsedColumn(),
289
+ )
290
+ else:
291
+ total = size
292
+ columns = (
293
+ TextColumn("[progress.description]{task.description}"),
294
+ BarColumn(),
295
+ DownloadColumn(),
296
+ TransferSpeedColumn(),
297
+ TextColumn("eta"),
298
+ TimeRemainingColumn(),
299
+ )
300
+
301
+ progress = Progress(*columns, refresh_per_second=30)
302
+ task_id = progress.add_task(" " * (get_indentation() + 2), total=total)
303
+ with progress:
304
+ for chunk in iterable:
305
+ yield chunk
306
+ progress.update(task_id, advance=len(chunk))
307
+
308
+
309
+ def get_download_progress_renderer(
310
+ *, bar_type: str, size: Optional[int] = None
311
+ ) -> DownloadProgressRenderer:
312
+ """Get an object that can be used to render the download progress.
313
+
314
+ Returns a callable, that takes an iterable to "wrap".
315
+ """
316
+ if bar_type == "on":
317
+ return functools.partial(_rich_progress_bar, bar_type=bar_type, size=size)
318
+ elif bar_type == "off":
319
+ return iter # no-op, when passed an iterator
320
+ else:
321
+ return _legacy_progress_bar(bar_type, size)
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__init__.py ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.distributions.sdist import SourceDistribution
3
+ from pip._internal.distributions.wheel import WheelDistribution
4
+ from pip._internal.req.req_install import InstallRequirement
5
+
6
+
7
+ def make_distribution_for_install_requirement(
8
+ install_req: InstallRequirement,
9
+ ) -> AbstractDistribution:
10
+ """Returns a Distribution for the given InstallRequirement"""
11
+ # Editable requirements will always be source distributions. They use the
12
+ # legacy logic until we create a modern standard for them.
13
+ if install_req.editable:
14
+ return SourceDistribution(install_req)
15
+
16
+ # If it's a wheel, it's a WheelDistribution
17
+ if install_req.is_wheel:
18
+ return WheelDistribution(install_req)
19
+
20
+ # Otherwise, a SourceDistribution
21
+ return SourceDistribution(install_req)
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (792 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/base.cpython-310.pyc ADDED
Binary file (1.85 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/installed.cpython-310.pyc ADDED
Binary file (1.23 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/sdist.cpython-310.pyc ADDED
Binary file (4.44 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/__pycache__/wheel.cpython-310.pyc ADDED
Binary file (1.59 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/base.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import abc
2
+
3
+ from pip._internal.index.package_finder import PackageFinder
4
+ from pip._internal.metadata.base import BaseDistribution
5
+ from pip._internal.req import InstallRequirement
6
+
7
+
8
+ class AbstractDistribution(metaclass=abc.ABCMeta):
9
+ """A base class for handling installable artifacts.
10
+
11
+ The requirements for anything installable are as follows:
12
+
13
+ - we must be able to determine the requirement name
14
+ (or we can't correctly handle the non-upgrade case).
15
+
16
+ - for packages with setup requirements, we must also be able
17
+ to determine their requirements without installing additional
18
+ packages (for the same reason as run-time dependencies)
19
+
20
+ - we must be able to create a Distribution object exposing the
21
+ above metadata.
22
+ """
23
+
24
+ def __init__(self, req: InstallRequirement) -> None:
25
+ super().__init__()
26
+ self.req = req
27
+
28
+ @abc.abstractmethod
29
+ def get_metadata_distribution(self) -> BaseDistribution:
30
+ raise NotImplementedError()
31
+
32
+ @abc.abstractmethod
33
+ def prepare_distribution_metadata(
34
+ self, finder: PackageFinder, build_isolation: bool
35
+ ) -> None:
36
+ raise NotImplementedError()
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/installed.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._internal.distributions.base import AbstractDistribution
2
+ from pip._internal.index.package_finder import PackageFinder
3
+ from pip._internal.metadata import BaseDistribution
4
+
5
+
6
+ class InstalledDistribution(AbstractDistribution):
7
+ """Represents an installed package.
8
+
9
+ This does not need any preparation as the required information has already
10
+ been computed.
11
+ """
12
+
13
+ def get_metadata_distribution(self) -> BaseDistribution:
14
+ assert self.req.satisfied_by is not None, "not actually installed"
15
+ return self.req.satisfied_by
16
+
17
+ def prepare_distribution_metadata(
18
+ self, finder: PackageFinder, build_isolation: bool
19
+ ) -> None:
20
+ pass
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/sdist.py ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import logging
2
+ from typing import Iterable, Set, Tuple
3
+
4
+ from pip._internal.build_env import BuildEnvironment
5
+ from pip._internal.distributions.base import AbstractDistribution
6
+ from pip._internal.exceptions import InstallationError
7
+ from pip._internal.index.package_finder import PackageFinder
8
+ from pip._internal.metadata import BaseDistribution
9
+ from pip._internal.utils.subprocess import runner_with_spinner_message
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class SourceDistribution(AbstractDistribution):
15
+ """Represents a source distribution.
16
+
17
+ The preparation step for these needs metadata for the packages to be
18
+ generated, either using PEP 517 or using the legacy `setup.py egg_info`.
19
+ """
20
+
21
+ def get_metadata_distribution(self) -> BaseDistribution:
22
+ return self.req.get_dist()
23
+
24
+ def prepare_distribution_metadata(
25
+ self, finder: PackageFinder, build_isolation: bool
26
+ ) -> None:
27
+ # Load pyproject.toml, to determine whether PEP 517 is to be used
28
+ self.req.load_pyproject_toml()
29
+
30
+ # Set up the build isolation, if this requirement should be isolated
31
+ should_isolate = self.req.use_pep517 and build_isolation
32
+ if should_isolate:
33
+ # Setup an isolated environment and install the build backend static
34
+ # requirements in it.
35
+ self._prepare_build_backend(finder)
36
+ # Check that if the requirement is editable, it either supports PEP 660 or
37
+ # has a setup.py or a setup.cfg. This cannot be done earlier because we need
38
+ # to setup the build backend to verify it supports build_editable, nor can
39
+ # it be done later, because we want to avoid installing build requirements
40
+ # needlessly. Doing it here also works around setuptools generating
41
+ # UNKNOWN.egg-info when running get_requires_for_build_wheel on a directory
42
+ # without setup.py nor setup.cfg.
43
+ self.req.isolated_editable_sanity_check()
44
+ # Install the dynamic build requirements.
45
+ self._install_build_reqs(finder)
46
+
47
+ self.req.prepare_metadata()
48
+
49
+ def _prepare_build_backend(self, finder: PackageFinder) -> None:
50
+ # Isolate in a BuildEnvironment and install the build-time
51
+ # requirements.
52
+ pyproject_requires = self.req.pyproject_requires
53
+ assert pyproject_requires is not None
54
+
55
+ self.req.build_env = BuildEnvironment()
56
+ self.req.build_env.install_requirements(
57
+ finder, pyproject_requires, "overlay", kind="build dependencies"
58
+ )
59
+ conflicting, missing = self.req.build_env.check_requirements(
60
+ self.req.requirements_to_check
61
+ )
62
+ if conflicting:
63
+ self._raise_conflicts("PEP 517/518 supported requirements", conflicting)
64
+ if missing:
65
+ logger.warning(
66
+ "Missing build requirements in pyproject.toml for %s.",
67
+ self.req,
68
+ )
69
+ logger.warning(
70
+ "The project does not specify a build backend, and "
71
+ "pip cannot fall back to setuptools without %s.",
72
+ " and ".join(map(repr, sorted(missing))),
73
+ )
74
+
75
+ def _get_build_requires_wheel(self) -> Iterable[str]:
76
+ with self.req.build_env:
77
+ runner = runner_with_spinner_message("Getting requirements to build wheel")
78
+ backend = self.req.pep517_backend
79
+ assert backend is not None
80
+ with backend.subprocess_runner(runner):
81
+ return backend.get_requires_for_build_wheel()
82
+
83
+ def _get_build_requires_editable(self) -> Iterable[str]:
84
+ with self.req.build_env:
85
+ runner = runner_with_spinner_message(
86
+ "Getting requirements to build editable"
87
+ )
88
+ backend = self.req.pep517_backend
89
+ assert backend is not None
90
+ with backend.subprocess_runner(runner):
91
+ return backend.get_requires_for_build_editable()
92
+
93
+ def _install_build_reqs(self, finder: PackageFinder) -> None:
94
+ # Install any extra build dependencies that the backend requests.
95
+ # This must be done in a second pass, as the pyproject.toml
96
+ # dependencies must be installed before we can call the backend.
97
+ if (
98
+ self.req.editable
99
+ and self.req.permit_editable_wheels
100
+ and self.req.supports_pyproject_editable()
101
+ ):
102
+ build_reqs = self._get_build_requires_editable()
103
+ else:
104
+ build_reqs = self._get_build_requires_wheel()
105
+ conflicting, missing = self.req.build_env.check_requirements(build_reqs)
106
+ if conflicting:
107
+ self._raise_conflicts("the backend dependencies", conflicting)
108
+ self.req.build_env.install_requirements(
109
+ finder, missing, "normal", kind="backend dependencies"
110
+ )
111
+
112
+ def _raise_conflicts(
113
+ self, conflicting_with: str, conflicting_reqs: Set[Tuple[str, str]]
114
+ ) -> None:
115
+ format_string = (
116
+ "Some build dependencies for {requirement} "
117
+ "conflict with {conflicting_with}: {description}."
118
+ )
119
+ error_message = format_string.format(
120
+ requirement=self.req,
121
+ conflicting_with=conflicting_with,
122
+ description=", ".join(
123
+ f"{installed} is incompatible with {wanted}"
124
+ for installed, wanted in sorted(conflicting_reqs)
125
+ ),
126
+ )
127
+ raise InstallationError(error_message)
env-llmeval/lib/python3.10/site-packages/pip/_internal/distributions/wheel.py ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pip._vendor.packaging.utils import canonicalize_name
2
+
3
+ from pip._internal.distributions.base import AbstractDistribution
4
+ from pip._internal.index.package_finder import PackageFinder
5
+ from pip._internal.metadata import (
6
+ BaseDistribution,
7
+ FilesystemWheel,
8
+ get_wheel_distribution,
9
+ )
10
+
11
+
12
+ class WheelDistribution(AbstractDistribution):
13
+ """Represents a wheel distribution.
14
+
15
+ This does not need any preparation as wheels can be directly unpacked.
16
+ """
17
+
18
+ def get_metadata_distribution(self) -> BaseDistribution:
19
+ """Loads the metadata from the wheel file into memory and returns a
20
+ Distribution that uses it, not relying on the wheel file or
21
+ requirement.
22
+ """
23
+ assert self.req.local_file_path, "Set as part of preparation during download"
24
+ assert self.req.name, "Wheels are never unnamed"
25
+ wheel = FilesystemWheel(self.req.local_file_path)
26
+ return get_wheel_distribution(wheel, canonicalize_name(self.req.name))
27
+
28
+ def prepare_distribution_metadata(
29
+ self, finder: PackageFinder, build_isolation: bool
30
+ ) -> None:
31
+ pass
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__init__.py ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import collections
2
+ import logging
3
+ from typing import Iterator, List, Optional, Sequence, Tuple
4
+
5
+ from pip._internal.utils.logging import indent_log
6
+
7
+ from .req_file import parse_requirements
8
+ from .req_install import InstallRequirement
9
+ from .req_set import RequirementSet
10
+
11
+ __all__ = [
12
+ "RequirementSet",
13
+ "InstallRequirement",
14
+ "parse_requirements",
15
+ "install_given_reqs",
16
+ ]
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+
21
+ class InstallationResult:
22
+ def __init__(self, name: str) -> None:
23
+ self.name = name
24
+
25
+ def __repr__(self) -> str:
26
+ return f"InstallationResult(name={self.name!r})"
27
+
28
+
29
+ def _validate_requirements(
30
+ requirements: List[InstallRequirement],
31
+ ) -> Iterator[Tuple[str, InstallRequirement]]:
32
+ for req in requirements:
33
+ assert req.name, f"invalid to-be-installed requirement: {req}"
34
+ yield req.name, req
35
+
36
+
37
+ def install_given_reqs(
38
+ requirements: List[InstallRequirement],
39
+ install_options: List[str],
40
+ global_options: Sequence[str],
41
+ root: Optional[str],
42
+ home: Optional[str],
43
+ prefix: Optional[str],
44
+ warn_script_location: bool,
45
+ use_user_site: bool,
46
+ pycompile: bool,
47
+ ) -> List[InstallationResult]:
48
+ """
49
+ Install everything in the given list.
50
+
51
+ (to be called after having downloaded and unpacked the packages)
52
+ """
53
+ to_install = collections.OrderedDict(_validate_requirements(requirements))
54
+
55
+ if to_install:
56
+ logger.info(
57
+ "Installing collected packages: %s",
58
+ ", ".join(to_install.keys()),
59
+ )
60
+
61
+ installed = []
62
+
63
+ with indent_log():
64
+ for req_name, requirement in to_install.items():
65
+ if requirement.should_reinstall:
66
+ logger.info("Attempting uninstall: %s", req_name)
67
+ with indent_log():
68
+ uninstalled_pathset = requirement.uninstall(auto_confirm=True)
69
+ else:
70
+ uninstalled_pathset = None
71
+
72
+ try:
73
+ requirement.install(
74
+ install_options,
75
+ global_options,
76
+ root=root,
77
+ home=home,
78
+ prefix=prefix,
79
+ warn_script_location=warn_script_location,
80
+ use_user_site=use_user_site,
81
+ pycompile=pycompile,
82
+ )
83
+ except Exception:
84
+ # if install did not succeed, rollback previous uninstall
85
+ if uninstalled_pathset and not requirement.install_succeeded:
86
+ uninstalled_pathset.rollback()
87
+ raise
88
+ else:
89
+ if uninstalled_pathset and requirement.install_succeeded:
90
+ uninstalled_pathset.commit()
91
+
92
+ installed.append(InstallationResult(req_name))
93
+
94
+ return installed
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (2.59 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/constructors.cpython-310.pyc ADDED
Binary file (12.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_file.cpython-310.pyc ADDED
Binary file (13.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_install.cpython-310.pyc ADDED
Binary file (22.2 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_set.cpython-310.pyc ADDED
Binary file (5.83 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_tracker.cpython-310.pyc ADDED
Binary file (4.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/__pycache__/req_uninstall.cpython-310.pyc ADDED
Binary file (18.9 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_file.py ADDED
@@ -0,0 +1,536 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Requirements file parsing
3
+ """
4
+
5
+ import optparse
6
+ import os
7
+ import re
8
+ import shlex
9
+ import urllib.parse
10
+ from optparse import Values
11
+ from typing import (
12
+ TYPE_CHECKING,
13
+ Any,
14
+ Callable,
15
+ Dict,
16
+ Iterable,
17
+ Iterator,
18
+ List,
19
+ Optional,
20
+ Tuple,
21
+ )
22
+
23
+ from pip._internal.cli import cmdoptions
24
+ from pip._internal.exceptions import InstallationError, RequirementsFileParseError
25
+ from pip._internal.models.search_scope import SearchScope
26
+ from pip._internal.network.session import PipSession
27
+ from pip._internal.network.utils import raise_for_status
28
+ from pip._internal.utils.encoding import auto_decode
29
+ from pip._internal.utils.urls import get_url_scheme
30
+
31
+ if TYPE_CHECKING:
32
+ # NoReturn introduced in 3.6.2; imported only for type checking to maintain
33
+ # pip compatibility with older patch versions of Python 3.6
34
+ from typing import NoReturn
35
+
36
+ from pip._internal.index.package_finder import PackageFinder
37
+
38
+ __all__ = ["parse_requirements"]
39
+
40
+ ReqFileLines = Iterable[Tuple[int, str]]
41
+
42
+ LineParser = Callable[[str], Tuple[str, Values]]
43
+
44
+ SCHEME_RE = re.compile(r"^(http|https|file):", re.I)
45
+ COMMENT_RE = re.compile(r"(^|\s+)#.*$")
46
+
47
+ # Matches environment variable-style values in '${MY_VARIABLE_1}' with the
48
+ # variable name consisting of only uppercase letters, digits or the '_'
49
+ # (underscore). This follows the POSIX standard defined in IEEE Std 1003.1,
50
+ # 2013 Edition.
51
+ ENV_VAR_RE = re.compile(r"(?P<var>\$\{(?P<name>[A-Z0-9_]+)\})")
52
+
53
+ SUPPORTED_OPTIONS: List[Callable[..., optparse.Option]] = [
54
+ cmdoptions.index_url,
55
+ cmdoptions.extra_index_url,
56
+ cmdoptions.no_index,
57
+ cmdoptions.constraints,
58
+ cmdoptions.requirements,
59
+ cmdoptions.editable,
60
+ cmdoptions.find_links,
61
+ cmdoptions.no_binary,
62
+ cmdoptions.only_binary,
63
+ cmdoptions.prefer_binary,
64
+ cmdoptions.require_hashes,
65
+ cmdoptions.pre,
66
+ cmdoptions.trusted_host,
67
+ cmdoptions.use_new_feature,
68
+ ]
69
+
70
+ # options to be passed to requirements
71
+ SUPPORTED_OPTIONS_REQ: List[Callable[..., optparse.Option]] = [
72
+ cmdoptions.install_options,
73
+ cmdoptions.global_options,
74
+ cmdoptions.hash,
75
+ ]
76
+
77
+ # the 'dest' string values
78
+ SUPPORTED_OPTIONS_REQ_DEST = [str(o().dest) for o in SUPPORTED_OPTIONS_REQ]
79
+
80
+
81
+ class ParsedRequirement:
82
+ def __init__(
83
+ self,
84
+ requirement: str,
85
+ is_editable: bool,
86
+ comes_from: str,
87
+ constraint: bool,
88
+ options: Optional[Dict[str, Any]] = None,
89
+ line_source: Optional[str] = None,
90
+ ) -> None:
91
+ self.requirement = requirement
92
+ self.is_editable = is_editable
93
+ self.comes_from = comes_from
94
+ self.options = options
95
+ self.constraint = constraint
96
+ self.line_source = line_source
97
+
98
+
99
+ class ParsedLine:
100
+ def __init__(
101
+ self,
102
+ filename: str,
103
+ lineno: int,
104
+ args: str,
105
+ opts: Values,
106
+ constraint: bool,
107
+ ) -> None:
108
+ self.filename = filename
109
+ self.lineno = lineno
110
+ self.opts = opts
111
+ self.constraint = constraint
112
+
113
+ if args:
114
+ self.is_requirement = True
115
+ self.is_editable = False
116
+ self.requirement = args
117
+ elif opts.editables:
118
+ self.is_requirement = True
119
+ self.is_editable = True
120
+ # We don't support multiple -e on one line
121
+ self.requirement = opts.editables[0]
122
+ else:
123
+ self.is_requirement = False
124
+
125
+
126
+ def parse_requirements(
127
+ filename: str,
128
+ session: PipSession,
129
+ finder: Optional["PackageFinder"] = None,
130
+ options: Optional[optparse.Values] = None,
131
+ constraint: bool = False,
132
+ ) -> Iterator[ParsedRequirement]:
133
+ """Parse a requirements file and yield ParsedRequirement instances.
134
+
135
+ :param filename: Path or url of requirements file.
136
+ :param session: PipSession instance.
137
+ :param finder: Instance of pip.index.PackageFinder.
138
+ :param options: cli options.
139
+ :param constraint: If true, parsing a constraint file rather than
140
+ requirements file.
141
+ """
142
+ line_parser = get_line_parser(finder)
143
+ parser = RequirementsFileParser(session, line_parser)
144
+
145
+ for parsed_line in parser.parse(filename, constraint):
146
+ parsed_req = handle_line(
147
+ parsed_line, options=options, finder=finder, session=session
148
+ )
149
+ if parsed_req is not None:
150
+ yield parsed_req
151
+
152
+
153
+ def preprocess(content: str) -> ReqFileLines:
154
+ """Split, filter, and join lines, and return a line iterator
155
+
156
+ :param content: the content of the requirements file
157
+ """
158
+ lines_enum: ReqFileLines = enumerate(content.splitlines(), start=1)
159
+ lines_enum = join_lines(lines_enum)
160
+ lines_enum = ignore_comments(lines_enum)
161
+ lines_enum = expand_env_variables(lines_enum)
162
+ return lines_enum
163
+
164
+
165
+ def handle_requirement_line(
166
+ line: ParsedLine,
167
+ options: Optional[optparse.Values] = None,
168
+ ) -> ParsedRequirement:
169
+
170
+ # preserve for the nested code path
171
+ line_comes_from = "{} {} (line {})".format(
172
+ "-c" if line.constraint else "-r",
173
+ line.filename,
174
+ line.lineno,
175
+ )
176
+
177
+ assert line.is_requirement
178
+
179
+ if line.is_editable:
180
+ # For editable requirements, we don't support per-requirement
181
+ # options, so just return the parsed requirement.
182
+ return ParsedRequirement(
183
+ requirement=line.requirement,
184
+ is_editable=line.is_editable,
185
+ comes_from=line_comes_from,
186
+ constraint=line.constraint,
187
+ )
188
+ else:
189
+ if options:
190
+ # Disable wheels if the user has specified build options
191
+ cmdoptions.check_install_build_global(options, line.opts)
192
+
193
+ # get the options that apply to requirements
194
+ req_options = {}
195
+ for dest in SUPPORTED_OPTIONS_REQ_DEST:
196
+ if dest in line.opts.__dict__ and line.opts.__dict__[dest]:
197
+ req_options[dest] = line.opts.__dict__[dest]
198
+
199
+ line_source = f"line {line.lineno} of {line.filename}"
200
+ return ParsedRequirement(
201
+ requirement=line.requirement,
202
+ is_editable=line.is_editable,
203
+ comes_from=line_comes_from,
204
+ constraint=line.constraint,
205
+ options=req_options,
206
+ line_source=line_source,
207
+ )
208
+
209
+
210
+ def handle_option_line(
211
+ opts: Values,
212
+ filename: str,
213
+ lineno: int,
214
+ finder: Optional["PackageFinder"] = None,
215
+ options: Optional[optparse.Values] = None,
216
+ session: Optional[PipSession] = None,
217
+ ) -> None:
218
+
219
+ if options:
220
+ # percolate options upward
221
+ if opts.require_hashes:
222
+ options.require_hashes = opts.require_hashes
223
+ if opts.features_enabled:
224
+ options.features_enabled.extend(
225
+ f for f in opts.features_enabled if f not in options.features_enabled
226
+ )
227
+
228
+ # set finder options
229
+ if finder:
230
+ find_links = finder.find_links
231
+ index_urls = finder.index_urls
232
+ if opts.index_url:
233
+ index_urls = [opts.index_url]
234
+ if opts.no_index is True:
235
+ index_urls = []
236
+ if opts.extra_index_urls:
237
+ index_urls.extend(opts.extra_index_urls)
238
+ if opts.find_links:
239
+ # FIXME: it would be nice to keep track of the source
240
+ # of the find_links: support a find-links local path
241
+ # relative to a requirements file.
242
+ value = opts.find_links[0]
243
+ req_dir = os.path.dirname(os.path.abspath(filename))
244
+ relative_to_reqs_file = os.path.join(req_dir, value)
245
+ if os.path.exists(relative_to_reqs_file):
246
+ value = relative_to_reqs_file
247
+ find_links.append(value)
248
+
249
+ if session:
250
+ # We need to update the auth urls in session
251
+ session.update_index_urls(index_urls)
252
+
253
+ search_scope = SearchScope(
254
+ find_links=find_links,
255
+ index_urls=index_urls,
256
+ )
257
+ finder.search_scope = search_scope
258
+
259
+ if opts.pre:
260
+ finder.set_allow_all_prereleases()
261
+
262
+ if opts.prefer_binary:
263
+ finder.set_prefer_binary()
264
+
265
+ if session:
266
+ for host in opts.trusted_hosts or []:
267
+ source = f"line {lineno} of {filename}"
268
+ session.add_trusted_host(host, source=source)
269
+
270
+
271
+ def handle_line(
272
+ line: ParsedLine,
273
+ options: Optional[optparse.Values] = None,
274
+ finder: Optional["PackageFinder"] = None,
275
+ session: Optional[PipSession] = None,
276
+ ) -> Optional[ParsedRequirement]:
277
+ """Handle a single parsed requirements line; This can result in
278
+ creating/yielding requirements, or updating the finder.
279
+
280
+ :param line: The parsed line to be processed.
281
+ :param options: CLI options.
282
+ :param finder: The finder - updated by non-requirement lines.
283
+ :param session: The session - updated by non-requirement lines.
284
+
285
+ Returns a ParsedRequirement object if the line is a requirement line,
286
+ otherwise returns None.
287
+
288
+ For lines that contain requirements, the only options that have an effect
289
+ are from SUPPORTED_OPTIONS_REQ, and they are scoped to the
290
+ requirement. Other options from SUPPORTED_OPTIONS may be present, but are
291
+ ignored.
292
+
293
+ For lines that do not contain requirements, the only options that have an
294
+ effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may
295
+ be present, but are ignored. These lines may contain multiple options
296
+ (although our docs imply only one is supported), and all our parsed and
297
+ affect the finder.
298
+ """
299
+
300
+ if line.is_requirement:
301
+ parsed_req = handle_requirement_line(line, options)
302
+ return parsed_req
303
+ else:
304
+ handle_option_line(
305
+ line.opts,
306
+ line.filename,
307
+ line.lineno,
308
+ finder,
309
+ options,
310
+ session,
311
+ )
312
+ return None
313
+
314
+
315
+ class RequirementsFileParser:
316
+ def __init__(
317
+ self,
318
+ session: PipSession,
319
+ line_parser: LineParser,
320
+ ) -> None:
321
+ self._session = session
322
+ self._line_parser = line_parser
323
+
324
+ def parse(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
325
+ """Parse a given file, yielding parsed lines."""
326
+ yield from self._parse_and_recurse(filename, constraint)
327
+
328
+ def _parse_and_recurse(
329
+ self, filename: str, constraint: bool
330
+ ) -> Iterator[ParsedLine]:
331
+ for line in self._parse_file(filename, constraint):
332
+ if not line.is_requirement and (
333
+ line.opts.requirements or line.opts.constraints
334
+ ):
335
+ # parse a nested requirements file
336
+ if line.opts.requirements:
337
+ req_path = line.opts.requirements[0]
338
+ nested_constraint = False
339
+ else:
340
+ req_path = line.opts.constraints[0]
341
+ nested_constraint = True
342
+
343
+ # original file is over http
344
+ if SCHEME_RE.search(filename):
345
+ # do a url join so relative paths work
346
+ req_path = urllib.parse.urljoin(filename, req_path)
347
+ # original file and nested file are paths
348
+ elif not SCHEME_RE.search(req_path):
349
+ # do a join so relative paths work
350
+ req_path = os.path.join(
351
+ os.path.dirname(filename),
352
+ req_path,
353
+ )
354
+
355
+ yield from self._parse_and_recurse(req_path, nested_constraint)
356
+ else:
357
+ yield line
358
+
359
+ def _parse_file(self, filename: str, constraint: bool) -> Iterator[ParsedLine]:
360
+ _, content = get_file_content(filename, self._session)
361
+
362
+ lines_enum = preprocess(content)
363
+
364
+ for line_number, line in lines_enum:
365
+ try:
366
+ args_str, opts = self._line_parser(line)
367
+ except OptionParsingError as e:
368
+ # add offending line
369
+ msg = f"Invalid requirement: {line}\n{e.msg}"
370
+ raise RequirementsFileParseError(msg)
371
+
372
+ yield ParsedLine(
373
+ filename,
374
+ line_number,
375
+ args_str,
376
+ opts,
377
+ constraint,
378
+ )
379
+
380
+
381
+ def get_line_parser(finder: Optional["PackageFinder"]) -> LineParser:
382
+ def parse_line(line: str) -> Tuple[str, Values]:
383
+ # Build new parser for each line since it accumulates appendable
384
+ # options.
385
+ parser = build_parser()
386
+ defaults = parser.get_default_values()
387
+ defaults.index_url = None
388
+ if finder:
389
+ defaults.format_control = finder.format_control
390
+
391
+ args_str, options_str = break_args_options(line)
392
+
393
+ opts, _ = parser.parse_args(shlex.split(options_str), defaults)
394
+
395
+ return args_str, opts
396
+
397
+ return parse_line
398
+
399
+
400
+ def break_args_options(line: str) -> Tuple[str, str]:
401
+ """Break up the line into an args and options string. We only want to shlex
402
+ (and then optparse) the options, not the args. args can contain markers
403
+ which are corrupted by shlex.
404
+ """
405
+ tokens = line.split(" ")
406
+ args = []
407
+ options = tokens[:]
408
+ for token in tokens:
409
+ if token.startswith("-") or token.startswith("--"):
410
+ break
411
+ else:
412
+ args.append(token)
413
+ options.pop(0)
414
+ return " ".join(args), " ".join(options)
415
+
416
+
417
+ class OptionParsingError(Exception):
418
+ def __init__(self, msg: str) -> None:
419
+ self.msg = msg
420
+
421
+
422
+ def build_parser() -> optparse.OptionParser:
423
+ """
424
+ Return a parser for parsing requirement lines
425
+ """
426
+ parser = optparse.OptionParser(add_help_option=False)
427
+
428
+ option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ
429
+ for option_factory in option_factories:
430
+ option = option_factory()
431
+ parser.add_option(option)
432
+
433
+ # By default optparse sys.exits on parsing errors. We want to wrap
434
+ # that in our own exception.
435
+ def parser_exit(self: Any, msg: str) -> "NoReturn":
436
+ raise OptionParsingError(msg)
437
+
438
+ # NOTE: mypy disallows assigning to a method
439
+ # https://github.com/python/mypy/issues/2427
440
+ parser.exit = parser_exit # type: ignore
441
+
442
+ return parser
443
+
444
+
445
+ def join_lines(lines_enum: ReqFileLines) -> ReqFileLines:
446
+ """Joins a line ending in '\' with the previous line (except when following
447
+ comments). The joined line takes on the index of the first line.
448
+ """
449
+ primary_line_number = None
450
+ new_line: List[str] = []
451
+ for line_number, line in lines_enum:
452
+ if not line.endswith("\\") or COMMENT_RE.match(line):
453
+ if COMMENT_RE.match(line):
454
+ # this ensures comments are always matched later
455
+ line = " " + line
456
+ if new_line:
457
+ new_line.append(line)
458
+ assert primary_line_number is not None
459
+ yield primary_line_number, "".join(new_line)
460
+ new_line = []
461
+ else:
462
+ yield line_number, line
463
+ else:
464
+ if not new_line:
465
+ primary_line_number = line_number
466
+ new_line.append(line.strip("\\"))
467
+
468
+ # last line contains \
469
+ if new_line:
470
+ assert primary_line_number is not None
471
+ yield primary_line_number, "".join(new_line)
472
+
473
+ # TODO: handle space after '\'.
474
+
475
+
476
+ def ignore_comments(lines_enum: ReqFileLines) -> ReqFileLines:
477
+ """
478
+ Strips comments and filter empty lines.
479
+ """
480
+ for line_number, line in lines_enum:
481
+ line = COMMENT_RE.sub("", line)
482
+ line = line.strip()
483
+ if line:
484
+ yield line_number, line
485
+
486
+
487
+ def expand_env_variables(lines_enum: ReqFileLines) -> ReqFileLines:
488
+ """Replace all environment variables that can be retrieved via `os.getenv`.
489
+
490
+ The only allowed format for environment variables defined in the
491
+ requirement file is `${MY_VARIABLE_1}` to ensure two things:
492
+
493
+ 1. Strings that contain a `$` aren't accidentally (partially) expanded.
494
+ 2. Ensure consistency across platforms for requirement files.
495
+
496
+ These points are the result of a discussion on the `github pull
497
+ request #3514 <https://github.com/pypa/pip/pull/3514>`_.
498
+
499
+ Valid characters in variable names follow the `POSIX standard
500
+ <http://pubs.opengroup.org/onlinepubs/9699919799/>`_ and are limited
501
+ to uppercase letter, digits and the `_` (underscore).
502
+ """
503
+ for line_number, line in lines_enum:
504
+ for env_var, var_name in ENV_VAR_RE.findall(line):
505
+ value = os.getenv(var_name)
506
+ if not value:
507
+ continue
508
+
509
+ line = line.replace(env_var, value)
510
+
511
+ yield line_number, line
512
+
513
+
514
+ def get_file_content(url: str, session: PipSession) -> Tuple[str, str]:
515
+ """Gets the content of a file; it may be a filename, file: URL, or
516
+ http: URL. Returns (location, content). Content is unicode.
517
+ Respects # -*- coding: declarations on the retrieved files.
518
+
519
+ :param url: File path or url.
520
+ :param session: PipSession instance.
521
+ """
522
+ scheme = get_url_scheme(url)
523
+
524
+ # Pip has special support for file:// URLs (LocalFSAdapter).
525
+ if scheme in ["http", "https", "file"]:
526
+ resp = session.get(url)
527
+ raise_for_status(resp)
528
+ return resp.url, resp.text
529
+
530
+ # Assume this is a bare path.
531
+ try:
532
+ with open(url, "rb") as f:
533
+ content = auto_decode(f.read())
534
+ except OSError as exc:
535
+ raise InstallationError(f"Could not open requirements file: {exc}")
536
+ return url, content
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_install.py ADDED
@@ -0,0 +1,858 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # The following comment should be removed at some point in the future.
2
+ # mypy: strict-optional=False
3
+
4
+ import functools
5
+ import logging
6
+ import os
7
+ import shutil
8
+ import sys
9
+ import uuid
10
+ import zipfile
11
+ from typing import Any, Collection, Dict, Iterable, List, Optional, Sequence, Union
12
+
13
+ from pip._vendor.packaging.markers import Marker
14
+ from pip._vendor.packaging.requirements import Requirement
15
+ from pip._vendor.packaging.specifiers import SpecifierSet
16
+ from pip._vendor.packaging.utils import canonicalize_name
17
+ from pip._vendor.packaging.version import Version
18
+ from pip._vendor.packaging.version import parse as parse_version
19
+ from pip._vendor.pep517.wrappers import Pep517HookCaller
20
+
21
+ from pip._internal.build_env import BuildEnvironment, NoOpBuildEnvironment
22
+ from pip._internal.exceptions import InstallationError, LegacyInstallFailure
23
+ from pip._internal.locations import get_scheme
24
+ from pip._internal.metadata import (
25
+ BaseDistribution,
26
+ get_default_environment,
27
+ get_directory_distribution,
28
+ )
29
+ from pip._internal.models.link import Link
30
+ from pip._internal.operations.build.metadata import generate_metadata
31
+ from pip._internal.operations.build.metadata_editable import generate_editable_metadata
32
+ from pip._internal.operations.build.metadata_legacy import (
33
+ generate_metadata as generate_metadata_legacy,
34
+ )
35
+ from pip._internal.operations.install.editable_legacy import (
36
+ install_editable as install_editable_legacy,
37
+ )
38
+ from pip._internal.operations.install.legacy import install as install_legacy
39
+ from pip._internal.operations.install.wheel import install_wheel
40
+ from pip._internal.pyproject import load_pyproject_toml, make_pyproject_path
41
+ from pip._internal.req.req_uninstall import UninstallPathSet
42
+ from pip._internal.utils.deprecation import deprecated
43
+ from pip._internal.utils.direct_url_helpers import (
44
+ direct_url_for_editable,
45
+ direct_url_from_link,
46
+ )
47
+ from pip._internal.utils.hashes import Hashes
48
+ from pip._internal.utils.misc import (
49
+ ask_path_exists,
50
+ backup_dir,
51
+ display_path,
52
+ hide_url,
53
+ redact_auth_from_url,
54
+ )
55
+ from pip._internal.utils.packaging import safe_extra
56
+ from pip._internal.utils.subprocess import runner_with_spinner_message
57
+ from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
58
+ from pip._internal.utils.virtualenv import running_under_virtualenv
59
+ from pip._internal.vcs import vcs
60
+
61
+ logger = logging.getLogger(__name__)
62
+
63
+
64
+ class InstallRequirement:
65
+ """
66
+ Represents something that may be installed later on, may have information
67
+ about where to fetch the relevant requirement and also contains logic for
68
+ installing the said requirement.
69
+ """
70
+
71
+ def __init__(
72
+ self,
73
+ req: Optional[Requirement],
74
+ comes_from: Optional[Union[str, "InstallRequirement"]],
75
+ editable: bool = False,
76
+ link: Optional[Link] = None,
77
+ markers: Optional[Marker] = None,
78
+ use_pep517: Optional[bool] = None,
79
+ isolated: bool = False,
80
+ install_options: Optional[List[str]] = None,
81
+ global_options: Optional[List[str]] = None,
82
+ hash_options: Optional[Dict[str, List[str]]] = None,
83
+ constraint: bool = False,
84
+ extras: Collection[str] = (),
85
+ user_supplied: bool = False,
86
+ permit_editable_wheels: bool = False,
87
+ ) -> None:
88
+ assert req is None or isinstance(req, Requirement), req
89
+ self.req = req
90
+ self.comes_from = comes_from
91
+ self.constraint = constraint
92
+ self.editable = editable
93
+ self.permit_editable_wheels = permit_editable_wheels
94
+ self.legacy_install_reason: Optional[int] = None
95
+
96
+ # source_dir is the local directory where the linked requirement is
97
+ # located, or unpacked. In case unpacking is needed, creating and
98
+ # populating source_dir is done by the RequirementPreparer. Note this
99
+ # is not necessarily the directory where pyproject.toml or setup.py is
100
+ # located - that one is obtained via unpacked_source_directory.
101
+ self.source_dir: Optional[str] = None
102
+ if self.editable:
103
+ assert link
104
+ if link.is_file:
105
+ self.source_dir = os.path.normpath(os.path.abspath(link.file_path))
106
+
107
+ if link is None and req and req.url:
108
+ # PEP 508 URL requirement
109
+ link = Link(req.url)
110
+ self.link = self.original_link = link
111
+ self.original_link_is_in_wheel_cache = False
112
+
113
+ # Path to any downloaded or already-existing package.
114
+ self.local_file_path: Optional[str] = None
115
+ if self.link and self.link.is_file:
116
+ self.local_file_path = self.link.file_path
117
+
118
+ if extras:
119
+ self.extras = extras
120
+ elif req:
121
+ self.extras = {safe_extra(extra) for extra in req.extras}
122
+ else:
123
+ self.extras = set()
124
+ if markers is None and req:
125
+ markers = req.marker
126
+ self.markers = markers
127
+
128
+ # This holds the Distribution object if this requirement is already installed.
129
+ self.satisfied_by: Optional[BaseDistribution] = None
130
+ # Whether the installation process should try to uninstall an existing
131
+ # distribution before installing this requirement.
132
+ self.should_reinstall = False
133
+ # Temporary build location
134
+ self._temp_build_dir: Optional[TempDirectory] = None
135
+ # Set to True after successful installation
136
+ self.install_succeeded: Optional[bool] = None
137
+ # Supplied options
138
+ self.install_options = install_options if install_options else []
139
+ self.global_options = global_options if global_options else []
140
+ self.hash_options = hash_options if hash_options else {}
141
+ # Set to True after successful preparation of this requirement
142
+ self.prepared = False
143
+ # User supplied requirement are explicitly requested for installation
144
+ # by the user via CLI arguments or requirements files, as opposed to,
145
+ # e.g. dependencies, extras or constraints.
146
+ self.user_supplied = user_supplied
147
+
148
+ self.isolated = isolated
149
+ self.build_env: BuildEnvironment = NoOpBuildEnvironment()
150
+
151
+ # For PEP 517, the directory where we request the project metadata
152
+ # gets stored. We need this to pass to build_wheel, so the backend
153
+ # can ensure that the wheel matches the metadata (see the PEP for
154
+ # details).
155
+ self.metadata_directory: Optional[str] = None
156
+
157
+ # The static build requirements (from pyproject.toml)
158
+ self.pyproject_requires: Optional[List[str]] = None
159
+
160
+ # Build requirements that we will check are available
161
+ self.requirements_to_check: List[str] = []
162
+
163
+ # The PEP 517 backend we should use to build the project
164
+ self.pep517_backend: Optional[Pep517HookCaller] = None
165
+
166
+ # Are we using PEP 517 for this requirement?
167
+ # After pyproject.toml has been loaded, the only valid values are True
168
+ # and False. Before loading, None is valid (meaning "use the default").
169
+ # Setting an explicit value before loading pyproject.toml is supported,
170
+ # but after loading this flag should be treated as read only.
171
+ self.use_pep517 = use_pep517
172
+
173
+ # This requirement needs more preparation before it can be built
174
+ self.needs_more_preparation = False
175
+
176
+ def __str__(self) -> str:
177
+ if self.req:
178
+ s = str(self.req)
179
+ if self.link:
180
+ s += " from {}".format(redact_auth_from_url(self.link.url))
181
+ elif self.link:
182
+ s = redact_auth_from_url(self.link.url)
183
+ else:
184
+ s = "<InstallRequirement>"
185
+ if self.satisfied_by is not None:
186
+ s += " in {}".format(display_path(self.satisfied_by.location))
187
+ if self.comes_from:
188
+ if isinstance(self.comes_from, str):
189
+ comes_from: Optional[str] = self.comes_from
190
+ else:
191
+ comes_from = self.comes_from.from_path()
192
+ if comes_from:
193
+ s += f" (from {comes_from})"
194
+ return s
195
+
196
+ def __repr__(self) -> str:
197
+ return "<{} object: {} editable={!r}>".format(
198
+ self.__class__.__name__, str(self), self.editable
199
+ )
200
+
201
+ def format_debug(self) -> str:
202
+ """An un-tested helper for getting state, for debugging."""
203
+ attributes = vars(self)
204
+ names = sorted(attributes)
205
+
206
+ state = ("{}={!r}".format(attr, attributes[attr]) for attr in sorted(names))
207
+ return "<{name} object: {{{state}}}>".format(
208
+ name=self.__class__.__name__,
209
+ state=", ".join(state),
210
+ )
211
+
212
+ # Things that are valid for all kinds of requirements?
213
+ @property
214
+ def name(self) -> Optional[str]:
215
+ if self.req is None:
216
+ return None
217
+ return self.req.name
218
+
219
+ @functools.lru_cache() # use cached_property in python 3.8+
220
+ def supports_pyproject_editable(self) -> bool:
221
+ if not self.use_pep517:
222
+ return False
223
+ assert self.pep517_backend
224
+ with self.build_env:
225
+ runner = runner_with_spinner_message(
226
+ "Checking if build backend supports build_editable"
227
+ )
228
+ with self.pep517_backend.subprocess_runner(runner):
229
+ return "build_editable" in self.pep517_backend._supported_features()
230
+
231
+ @property
232
+ def specifier(self) -> SpecifierSet:
233
+ return self.req.specifier
234
+
235
+ @property
236
+ def is_pinned(self) -> bool:
237
+ """Return whether I am pinned to an exact version.
238
+
239
+ For example, some-package==1.2 is pinned; some-package>1.2 is not.
240
+ """
241
+ specifiers = self.specifier
242
+ return len(specifiers) == 1 and next(iter(specifiers)).operator in {"==", "==="}
243
+
244
+ def match_markers(self, extras_requested: Optional[Iterable[str]] = None) -> bool:
245
+ if not extras_requested:
246
+ # Provide an extra to safely evaluate the markers
247
+ # without matching any extra
248
+ extras_requested = ("",)
249
+ if self.markers is not None:
250
+ return any(
251
+ self.markers.evaluate({"extra": extra}) for extra in extras_requested
252
+ )
253
+ else:
254
+ return True
255
+
256
+ @property
257
+ def has_hash_options(self) -> bool:
258
+ """Return whether any known-good hashes are specified as options.
259
+
260
+ These activate --require-hashes mode; hashes specified as part of a
261
+ URL do not.
262
+
263
+ """
264
+ return bool(self.hash_options)
265
+
266
+ def hashes(self, trust_internet: bool = True) -> Hashes:
267
+ """Return a hash-comparer that considers my option- and URL-based
268
+ hashes to be known-good.
269
+
270
+ Hashes in URLs--ones embedded in the requirements file, not ones
271
+ downloaded from an index server--are almost peers with ones from
272
+ flags. They satisfy --require-hashes (whether it was implicitly or
273
+ explicitly activated) but do not activate it. md5 and sha224 are not
274
+ allowed in flags, which should nudge people toward good algos. We
275
+ always OR all hashes together, even ones from URLs.
276
+
277
+ :param trust_internet: Whether to trust URL-based (#md5=...) hashes
278
+ downloaded from the internet, as by populate_link()
279
+
280
+ """
281
+ good_hashes = self.hash_options.copy()
282
+ link = self.link if trust_internet else self.original_link
283
+ if link and link.hash:
284
+ good_hashes.setdefault(link.hash_name, []).append(link.hash)
285
+ return Hashes(good_hashes)
286
+
287
+ def from_path(self) -> Optional[str]:
288
+ """Format a nice indicator to show where this "comes from" """
289
+ if self.req is None:
290
+ return None
291
+ s = str(self.req)
292
+ if self.comes_from:
293
+ if isinstance(self.comes_from, str):
294
+ comes_from = self.comes_from
295
+ else:
296
+ comes_from = self.comes_from.from_path()
297
+ if comes_from:
298
+ s += "->" + comes_from
299
+ return s
300
+
301
+ def ensure_build_location(
302
+ self, build_dir: str, autodelete: bool, parallel_builds: bool
303
+ ) -> str:
304
+ assert build_dir is not None
305
+ if self._temp_build_dir is not None:
306
+ assert self._temp_build_dir.path
307
+ return self._temp_build_dir.path
308
+ if self.req is None:
309
+ # Some systems have /tmp as a symlink which confuses custom
310
+ # builds (such as numpy). Thus, we ensure that the real path
311
+ # is returned.
312
+ self._temp_build_dir = TempDirectory(
313
+ kind=tempdir_kinds.REQ_BUILD, globally_managed=True
314
+ )
315
+
316
+ return self._temp_build_dir.path
317
+
318
+ # This is the only remaining place where we manually determine the path
319
+ # for the temporary directory. It is only needed for editables where
320
+ # it is the value of the --src option.
321
+
322
+ # When parallel builds are enabled, add a UUID to the build directory
323
+ # name so multiple builds do not interfere with each other.
324
+ dir_name: str = canonicalize_name(self.name)
325
+ if parallel_builds:
326
+ dir_name = f"{dir_name}_{uuid.uuid4().hex}"
327
+
328
+ # FIXME: Is there a better place to create the build_dir? (hg and bzr
329
+ # need this)
330
+ if not os.path.exists(build_dir):
331
+ logger.debug("Creating directory %s", build_dir)
332
+ os.makedirs(build_dir)
333
+ actual_build_dir = os.path.join(build_dir, dir_name)
334
+ # `None` indicates that we respect the globally-configured deletion
335
+ # settings, which is what we actually want when auto-deleting.
336
+ delete_arg = None if autodelete else False
337
+ return TempDirectory(
338
+ path=actual_build_dir,
339
+ delete=delete_arg,
340
+ kind=tempdir_kinds.REQ_BUILD,
341
+ globally_managed=True,
342
+ ).path
343
+
344
+ def _set_requirement(self) -> None:
345
+ """Set requirement after generating metadata."""
346
+ assert self.req is None
347
+ assert self.metadata is not None
348
+ assert self.source_dir is not None
349
+
350
+ # Construct a Requirement object from the generated metadata
351
+ if isinstance(parse_version(self.metadata["Version"]), Version):
352
+ op = "=="
353
+ else:
354
+ op = "==="
355
+
356
+ self.req = Requirement(
357
+ "".join(
358
+ [
359
+ self.metadata["Name"],
360
+ op,
361
+ self.metadata["Version"],
362
+ ]
363
+ )
364
+ )
365
+
366
+ def warn_on_mismatching_name(self) -> None:
367
+ metadata_name = canonicalize_name(self.metadata["Name"])
368
+ if canonicalize_name(self.req.name) == metadata_name:
369
+ # Everything is fine.
370
+ return
371
+
372
+ # If we're here, there's a mismatch. Log a warning about it.
373
+ logger.warning(
374
+ "Generating metadata for package %s "
375
+ "produced metadata for project name %s. Fix your "
376
+ "#egg=%s fragments.",
377
+ self.name,
378
+ metadata_name,
379
+ self.name,
380
+ )
381
+ self.req = Requirement(metadata_name)
382
+
383
+ def check_if_exists(self, use_user_site: bool) -> None:
384
+ """Find an installed distribution that satisfies or conflicts
385
+ with this requirement, and set self.satisfied_by or
386
+ self.should_reinstall appropriately.
387
+ """
388
+ if self.req is None:
389
+ return
390
+ existing_dist = get_default_environment().get_distribution(self.req.name)
391
+ if not existing_dist:
392
+ return
393
+
394
+ version_compatible = self.req.specifier.contains(
395
+ existing_dist.version,
396
+ prereleases=True,
397
+ )
398
+ if not version_compatible:
399
+ self.satisfied_by = None
400
+ if use_user_site:
401
+ if existing_dist.in_usersite:
402
+ self.should_reinstall = True
403
+ elif running_under_virtualenv() and existing_dist.in_site_packages:
404
+ raise InstallationError(
405
+ f"Will not install to the user site because it will "
406
+ f"lack sys.path precedence to {existing_dist.raw_name} "
407
+ f"in {existing_dist.location}"
408
+ )
409
+ else:
410
+ self.should_reinstall = True
411
+ else:
412
+ if self.editable:
413
+ self.should_reinstall = True
414
+ # when installing editables, nothing pre-existing should ever
415
+ # satisfy
416
+ self.satisfied_by = None
417
+ else:
418
+ self.satisfied_by = existing_dist
419
+
420
+ # Things valid for wheels
421
+ @property
422
+ def is_wheel(self) -> bool:
423
+ if not self.link:
424
+ return False
425
+ return self.link.is_wheel
426
+
427
+ # Things valid for sdists
428
+ @property
429
+ def unpacked_source_directory(self) -> str:
430
+ return os.path.join(
431
+ self.source_dir, self.link and self.link.subdirectory_fragment or ""
432
+ )
433
+
434
+ @property
435
+ def setup_py_path(self) -> str:
436
+ assert self.source_dir, f"No source dir for {self}"
437
+ setup_py = os.path.join(self.unpacked_source_directory, "setup.py")
438
+
439
+ return setup_py
440
+
441
+ @property
442
+ def setup_cfg_path(self) -> str:
443
+ assert self.source_dir, f"No source dir for {self}"
444
+ setup_cfg = os.path.join(self.unpacked_source_directory, "setup.cfg")
445
+
446
+ return setup_cfg
447
+
448
+ @property
449
+ def pyproject_toml_path(self) -> str:
450
+ assert self.source_dir, f"No source dir for {self}"
451
+ return make_pyproject_path(self.unpacked_source_directory)
452
+
453
+ def load_pyproject_toml(self) -> None:
454
+ """Load the pyproject.toml file.
455
+
456
+ After calling this routine, all of the attributes related to PEP 517
457
+ processing for this requirement have been set. In particular, the
458
+ use_pep517 attribute can be used to determine whether we should
459
+ follow the PEP 517 or legacy (setup.py) code path.
460
+ """
461
+ pyproject_toml_data = load_pyproject_toml(
462
+ self.use_pep517, self.pyproject_toml_path, self.setup_py_path, str(self)
463
+ )
464
+
465
+ if pyproject_toml_data is None:
466
+ self.use_pep517 = False
467
+ return
468
+
469
+ self.use_pep517 = True
470
+ requires, backend, check, backend_path = pyproject_toml_data
471
+ self.requirements_to_check = check
472
+ self.pyproject_requires = requires
473
+ self.pep517_backend = Pep517HookCaller(
474
+ self.unpacked_source_directory,
475
+ backend,
476
+ backend_path=backend_path,
477
+ )
478
+
479
+ def isolated_editable_sanity_check(self) -> None:
480
+ """Check that an editable requirement if valid for use with PEP 517/518.
481
+
482
+ This verifies that an editable that has a pyproject.toml either supports PEP 660
483
+ or as a setup.py or a setup.cfg
484
+ """
485
+ if (
486
+ self.editable
487
+ and self.use_pep517
488
+ and not self.supports_pyproject_editable()
489
+ and not os.path.isfile(self.setup_py_path)
490
+ and not os.path.isfile(self.setup_cfg_path)
491
+ ):
492
+ raise InstallationError(
493
+ f"Project {self} has a 'pyproject.toml' and its build "
494
+ f"backend is missing the 'build_editable' hook. Since it does not "
495
+ f"have a 'setup.py' nor a 'setup.cfg', "
496
+ f"it cannot be installed in editable mode. "
497
+ f"Consider using a build backend that supports PEP 660."
498
+ )
499
+
500
+ def prepare_metadata(self) -> None:
501
+ """Ensure that project metadata is available.
502
+
503
+ Under PEP 517 and PEP 660, call the backend hook to prepare the metadata.
504
+ Under legacy processing, call setup.py egg-info.
505
+ """
506
+ assert self.source_dir
507
+ details = self.name or f"from {self.link}"
508
+
509
+ if self.use_pep517:
510
+ assert self.pep517_backend is not None
511
+ if (
512
+ self.editable
513
+ and self.permit_editable_wheels
514
+ and self.supports_pyproject_editable()
515
+ ):
516
+ self.metadata_directory = generate_editable_metadata(
517
+ build_env=self.build_env,
518
+ backend=self.pep517_backend,
519
+ details=details,
520
+ )
521
+ else:
522
+ self.metadata_directory = generate_metadata(
523
+ build_env=self.build_env,
524
+ backend=self.pep517_backend,
525
+ details=details,
526
+ )
527
+ else:
528
+ self.metadata_directory = generate_metadata_legacy(
529
+ build_env=self.build_env,
530
+ setup_py_path=self.setup_py_path,
531
+ source_dir=self.unpacked_source_directory,
532
+ isolated=self.isolated,
533
+ details=details,
534
+ )
535
+
536
+ # Act on the newly generated metadata, based on the name and version.
537
+ if not self.name:
538
+ self._set_requirement()
539
+ else:
540
+ self.warn_on_mismatching_name()
541
+
542
+ self.assert_source_matches_version()
543
+
544
+ @property
545
+ def metadata(self) -> Any:
546
+ if not hasattr(self, "_metadata"):
547
+ self._metadata = self.get_dist().metadata
548
+
549
+ return self._metadata
550
+
551
+ def get_dist(self) -> BaseDistribution:
552
+ return get_directory_distribution(self.metadata_directory)
553
+
554
+ def assert_source_matches_version(self) -> None:
555
+ assert self.source_dir
556
+ version = self.metadata["version"]
557
+ if self.req.specifier and version not in self.req.specifier:
558
+ logger.warning(
559
+ "Requested %s, but installing version %s",
560
+ self,
561
+ version,
562
+ )
563
+ else:
564
+ logger.debug(
565
+ "Source in %s has version %s, which satisfies requirement %s",
566
+ display_path(self.source_dir),
567
+ version,
568
+ self,
569
+ )
570
+
571
+ # For both source distributions and editables
572
+ def ensure_has_source_dir(
573
+ self,
574
+ parent_dir: str,
575
+ autodelete: bool = False,
576
+ parallel_builds: bool = False,
577
+ ) -> None:
578
+ """Ensure that a source_dir is set.
579
+
580
+ This will create a temporary build dir if the name of the requirement
581
+ isn't known yet.
582
+
583
+ :param parent_dir: The ideal pip parent_dir for the source_dir.
584
+ Generally src_dir for editables and build_dir for sdists.
585
+ :return: self.source_dir
586
+ """
587
+ if self.source_dir is None:
588
+ self.source_dir = self.ensure_build_location(
589
+ parent_dir,
590
+ autodelete=autodelete,
591
+ parallel_builds=parallel_builds,
592
+ )
593
+
594
+ # For editable installations
595
+ def update_editable(self) -> None:
596
+ if not self.link:
597
+ logger.debug(
598
+ "Cannot update repository at %s; repository location is unknown",
599
+ self.source_dir,
600
+ )
601
+ return
602
+ assert self.editable
603
+ assert self.source_dir
604
+ if self.link.scheme == "file":
605
+ # Static paths don't get updated
606
+ return
607
+ vcs_backend = vcs.get_backend_for_scheme(self.link.scheme)
608
+ # Editable requirements are validated in Requirement constructors.
609
+ # So here, if it's neither a path nor a valid VCS URL, it's a bug.
610
+ assert vcs_backend, f"Unsupported VCS URL {self.link.url}"
611
+ hidden_url = hide_url(self.link.url)
612
+ vcs_backend.obtain(self.source_dir, url=hidden_url, verbosity=0)
613
+
614
+ # Top-level Actions
615
+ def uninstall(
616
+ self, auto_confirm: bool = False, verbose: bool = False
617
+ ) -> Optional[UninstallPathSet]:
618
+ """
619
+ Uninstall the distribution currently satisfying this requirement.
620
+
621
+ Prompts before removing or modifying files unless
622
+ ``auto_confirm`` is True.
623
+
624
+ Refuses to delete or modify files outside of ``sys.prefix`` -
625
+ thus uninstallation within a virtual environment can only
626
+ modify that virtual environment, even if the virtualenv is
627
+ linked to global site-packages.
628
+
629
+ """
630
+ assert self.req
631
+ dist = get_default_environment().get_distribution(self.req.name)
632
+ if not dist:
633
+ logger.warning("Skipping %s as it is not installed.", self.name)
634
+ return None
635
+ logger.info("Found existing installation: %s", dist)
636
+
637
+ uninstalled_pathset = UninstallPathSet.from_dist(dist)
638
+ uninstalled_pathset.remove(auto_confirm, verbose)
639
+ return uninstalled_pathset
640
+
641
+ def _get_archive_name(self, path: str, parentdir: str, rootdir: str) -> str:
642
+ def _clean_zip_name(name: str, prefix: str) -> str:
643
+ assert name.startswith(
644
+ prefix + os.path.sep
645
+ ), f"name {name!r} doesn't start with prefix {prefix!r}"
646
+ name = name[len(prefix) + 1 :]
647
+ name = name.replace(os.path.sep, "/")
648
+ return name
649
+
650
+ path = os.path.join(parentdir, path)
651
+ name = _clean_zip_name(path, rootdir)
652
+ return self.name + "/" + name
653
+
654
+ def archive(self, build_dir: Optional[str]) -> None:
655
+ """Saves archive to provided build_dir.
656
+
657
+ Used for saving downloaded VCS requirements as part of `pip download`.
658
+ """
659
+ assert self.source_dir
660
+ if build_dir is None:
661
+ return
662
+
663
+ create_archive = True
664
+ archive_name = "{}-{}.zip".format(self.name, self.metadata["version"])
665
+ archive_path = os.path.join(build_dir, archive_name)
666
+
667
+ if os.path.exists(archive_path):
668
+ response = ask_path_exists(
669
+ "The file {} exists. (i)gnore, (w)ipe, "
670
+ "(b)ackup, (a)bort ".format(display_path(archive_path)),
671
+ ("i", "w", "b", "a"),
672
+ )
673
+ if response == "i":
674
+ create_archive = False
675
+ elif response == "w":
676
+ logger.warning("Deleting %s", display_path(archive_path))
677
+ os.remove(archive_path)
678
+ elif response == "b":
679
+ dest_file = backup_dir(archive_path)
680
+ logger.warning(
681
+ "Backing up %s to %s",
682
+ display_path(archive_path),
683
+ display_path(dest_file),
684
+ )
685
+ shutil.move(archive_path, dest_file)
686
+ elif response == "a":
687
+ sys.exit(-1)
688
+
689
+ if not create_archive:
690
+ return
691
+
692
+ zip_output = zipfile.ZipFile(
693
+ archive_path,
694
+ "w",
695
+ zipfile.ZIP_DEFLATED,
696
+ allowZip64=True,
697
+ )
698
+ with zip_output:
699
+ dir = os.path.normcase(os.path.abspath(self.unpacked_source_directory))
700
+ for dirpath, dirnames, filenames in os.walk(dir):
701
+ for dirname in dirnames:
702
+ dir_arcname = self._get_archive_name(
703
+ dirname,
704
+ parentdir=dirpath,
705
+ rootdir=dir,
706
+ )
707
+ zipdir = zipfile.ZipInfo(dir_arcname + "/")
708
+ zipdir.external_attr = 0x1ED << 16 # 0o755
709
+ zip_output.writestr(zipdir, "")
710
+ for filename in filenames:
711
+ file_arcname = self._get_archive_name(
712
+ filename,
713
+ parentdir=dirpath,
714
+ rootdir=dir,
715
+ )
716
+ filename = os.path.join(dirpath, filename)
717
+ zip_output.write(filename, file_arcname)
718
+
719
+ logger.info("Saved %s", display_path(archive_path))
720
+
721
+ def install(
722
+ self,
723
+ install_options: List[str],
724
+ global_options: Optional[Sequence[str]] = None,
725
+ root: Optional[str] = None,
726
+ home: Optional[str] = None,
727
+ prefix: Optional[str] = None,
728
+ warn_script_location: bool = True,
729
+ use_user_site: bool = False,
730
+ pycompile: bool = True,
731
+ ) -> None:
732
+ scheme = get_scheme(
733
+ self.name,
734
+ user=use_user_site,
735
+ home=home,
736
+ root=root,
737
+ isolated=self.isolated,
738
+ prefix=prefix,
739
+ )
740
+
741
+ global_options = global_options if global_options is not None else []
742
+ if self.editable and not self.is_wheel:
743
+ install_editable_legacy(
744
+ install_options,
745
+ global_options,
746
+ prefix=prefix,
747
+ home=home,
748
+ use_user_site=use_user_site,
749
+ name=self.name,
750
+ setup_py_path=self.setup_py_path,
751
+ isolated=self.isolated,
752
+ build_env=self.build_env,
753
+ unpacked_source_directory=self.unpacked_source_directory,
754
+ )
755
+ self.install_succeeded = True
756
+ return
757
+
758
+ if self.is_wheel:
759
+ assert self.local_file_path
760
+ direct_url = None
761
+ if self.editable:
762
+ direct_url = direct_url_for_editable(self.unpacked_source_directory)
763
+ elif self.original_link:
764
+ direct_url = direct_url_from_link(
765
+ self.original_link,
766
+ self.source_dir,
767
+ self.original_link_is_in_wheel_cache,
768
+ )
769
+ install_wheel(
770
+ self.name,
771
+ self.local_file_path,
772
+ scheme=scheme,
773
+ req_description=str(self.req),
774
+ pycompile=pycompile,
775
+ warn_script_location=warn_script_location,
776
+ direct_url=direct_url,
777
+ requested=self.user_supplied,
778
+ )
779
+ self.install_succeeded = True
780
+ return
781
+
782
+ # TODO: Why don't we do this for editable installs?
783
+
784
+ # Extend the list of global and install options passed on to
785
+ # the setup.py call with the ones from the requirements file.
786
+ # Options specified in requirements file override those
787
+ # specified on the command line, since the last option given
788
+ # to setup.py is the one that is used.
789
+ global_options = list(global_options) + self.global_options
790
+ install_options = list(install_options) + self.install_options
791
+
792
+ try:
793
+ success = install_legacy(
794
+ install_options=install_options,
795
+ global_options=global_options,
796
+ root=root,
797
+ home=home,
798
+ prefix=prefix,
799
+ use_user_site=use_user_site,
800
+ pycompile=pycompile,
801
+ scheme=scheme,
802
+ setup_py_path=self.setup_py_path,
803
+ isolated=self.isolated,
804
+ req_name=self.name,
805
+ build_env=self.build_env,
806
+ unpacked_source_directory=self.unpacked_source_directory,
807
+ req_description=str(self.req),
808
+ )
809
+ except LegacyInstallFailure as exc:
810
+ self.install_succeeded = False
811
+ raise exc
812
+ except Exception:
813
+ self.install_succeeded = True
814
+ raise
815
+
816
+ self.install_succeeded = success
817
+
818
+ if success and self.legacy_install_reason == 8368:
819
+ deprecated(
820
+ reason=(
821
+ "{} was installed using the legacy 'setup.py install' "
822
+ "method, because a wheel could not be built for it.".format(
823
+ self.name
824
+ )
825
+ ),
826
+ replacement="to fix the wheel build issue reported above",
827
+ gone_in=None,
828
+ issue=8368,
829
+ )
830
+
831
+
832
+ def check_invalid_constraint_type(req: InstallRequirement) -> str:
833
+
834
+ # Check for unsupported forms
835
+ problem = ""
836
+ if not req.name:
837
+ problem = "Unnamed requirements are not allowed as constraints"
838
+ elif req.editable:
839
+ problem = "Editable requirements are not allowed as constraints"
840
+ elif req.extras:
841
+ problem = "Constraints cannot have extras"
842
+
843
+ if problem:
844
+ deprecated(
845
+ reason=(
846
+ "Constraints are only allowed to take the form of a package "
847
+ "name and a version specifier. Other forms were originally "
848
+ "permitted as an accident of the implementation, but were "
849
+ "undocumented. The new implementation of the resolver no "
850
+ "longer supports these forms."
851
+ ),
852
+ replacement="replacing the constraint with a requirement",
853
+ # No plan yet for when the new resolver becomes default
854
+ gone_in=None,
855
+ issue=8210,
856
+ )
857
+
858
+ return problem
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_tracker.py ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import contextlib
2
+ import hashlib
3
+ import logging
4
+ import os
5
+ from types import TracebackType
6
+ from typing import Dict, Iterator, Optional, Set, Type, Union
7
+
8
+ from pip._internal.models.link import Link
9
+ from pip._internal.req.req_install import InstallRequirement
10
+ from pip._internal.utils.temp_dir import TempDirectory
11
+
12
+ logger = logging.getLogger(__name__)
13
+
14
+
15
+ @contextlib.contextmanager
16
+ def update_env_context_manager(**changes: str) -> Iterator[None]:
17
+ target = os.environ
18
+
19
+ # Save values from the target and change them.
20
+ non_existent_marker = object()
21
+ saved_values: Dict[str, Union[object, str]] = {}
22
+ for name, new_value in changes.items():
23
+ try:
24
+ saved_values[name] = target[name]
25
+ except KeyError:
26
+ saved_values[name] = non_existent_marker
27
+ target[name] = new_value
28
+
29
+ try:
30
+ yield
31
+ finally:
32
+ # Restore original values in the target.
33
+ for name, original_value in saved_values.items():
34
+ if original_value is non_existent_marker:
35
+ del target[name]
36
+ else:
37
+ assert isinstance(original_value, str) # for mypy
38
+ target[name] = original_value
39
+
40
+
41
+ @contextlib.contextmanager
42
+ def get_requirement_tracker() -> Iterator["RequirementTracker"]:
43
+ root = os.environ.get("PIP_REQ_TRACKER")
44
+ with contextlib.ExitStack() as ctx:
45
+ if root is None:
46
+ root = ctx.enter_context(TempDirectory(kind="req-tracker")).path
47
+ ctx.enter_context(update_env_context_manager(PIP_REQ_TRACKER=root))
48
+ logger.debug("Initialized build tracking at %s", root)
49
+
50
+ with RequirementTracker(root) as tracker:
51
+ yield tracker
52
+
53
+
54
+ class RequirementTracker:
55
+ def __init__(self, root: str) -> None:
56
+ self._root = root
57
+ self._entries: Set[InstallRequirement] = set()
58
+ logger.debug("Created build tracker: %s", self._root)
59
+
60
+ def __enter__(self) -> "RequirementTracker":
61
+ logger.debug("Entered build tracker: %s", self._root)
62
+ return self
63
+
64
+ def __exit__(
65
+ self,
66
+ exc_type: Optional[Type[BaseException]],
67
+ exc_val: Optional[BaseException],
68
+ exc_tb: Optional[TracebackType],
69
+ ) -> None:
70
+ self.cleanup()
71
+
72
+ def _entry_path(self, link: Link) -> str:
73
+ hashed = hashlib.sha224(link.url_without_fragment.encode()).hexdigest()
74
+ return os.path.join(self._root, hashed)
75
+
76
+ def add(self, req: InstallRequirement) -> None:
77
+ """Add an InstallRequirement to build tracking."""
78
+
79
+ assert req.link
80
+ # Get the file to write information about this requirement.
81
+ entry_path = self._entry_path(req.link)
82
+
83
+ # Try reading from the file. If it exists and can be read from, a build
84
+ # is already in progress, so a LookupError is raised.
85
+ try:
86
+ with open(entry_path) as fp:
87
+ contents = fp.read()
88
+ except FileNotFoundError:
89
+ pass
90
+ else:
91
+ message = "{} is already being built: {}".format(req.link, contents)
92
+ raise LookupError(message)
93
+
94
+ # If we're here, req should really not be building already.
95
+ assert req not in self._entries
96
+
97
+ # Start tracking this requirement.
98
+ with open(entry_path, "w", encoding="utf-8") as fp:
99
+ fp.write(str(req))
100
+ self._entries.add(req)
101
+
102
+ logger.debug("Added %s to build tracker %r", req, self._root)
103
+
104
+ def remove(self, req: InstallRequirement) -> None:
105
+ """Remove an InstallRequirement from build tracking."""
106
+
107
+ assert req.link
108
+ # Delete the created file and the corresponding entries.
109
+ os.unlink(self._entry_path(req.link))
110
+ self._entries.remove(req)
111
+
112
+ logger.debug("Removed %s from build tracker %r", req, self._root)
113
+
114
+ def cleanup(self) -> None:
115
+ for req in set(self._entries):
116
+ self.remove(req)
117
+
118
+ logger.debug("Removed build tracker: %r", self._root)
119
+
120
+ @contextlib.contextmanager
121
+ def track(self, req: InstallRequirement) -> Iterator[None]:
122
+ self.add(req)
123
+ yield
124
+ self.remove(req)
env-llmeval/lib/python3.10/site-packages/pip/_internal/req/req_uninstall.py ADDED
@@ -0,0 +1,633 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import functools
2
+ import os
3
+ import sys
4
+ import sysconfig
5
+ from importlib.util import cache_from_source
6
+ from typing import Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple
7
+
8
+ from pip._internal.exceptions import UninstallationError
9
+ from pip._internal.locations import get_bin_prefix, get_bin_user
10
+ from pip._internal.metadata import BaseDistribution
11
+ from pip._internal.utils.compat import WINDOWS
12
+ from pip._internal.utils.egg_link import egg_link_path_from_location
13
+ from pip._internal.utils.logging import getLogger, indent_log
14
+ from pip._internal.utils.misc import ask, is_local, normalize_path, renames, rmtree
15
+ from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
16
+
17
+ logger = getLogger(__name__)
18
+
19
+
20
+ def _script_names(bin_dir: str, script_name: str, is_gui: bool) -> Iterator[str]:
21
+ """Create the fully qualified name of the files created by
22
+ {console,gui}_scripts for the given ``dist``.
23
+ Returns the list of file names
24
+ """
25
+ exe_name = os.path.join(bin_dir, script_name)
26
+ yield exe_name
27
+ if not WINDOWS:
28
+ return
29
+ yield f"{exe_name}.exe"
30
+ yield f"{exe_name}.exe.manifest"
31
+ if is_gui:
32
+ yield f"{exe_name}-script.pyw"
33
+ else:
34
+ yield f"{exe_name}-script.py"
35
+
36
+
37
+ def _unique(fn: Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]:
38
+ @functools.wraps(fn)
39
+ def unique(*args: Any, **kw: Any) -> Iterator[Any]:
40
+ seen: Set[Any] = set()
41
+ for item in fn(*args, **kw):
42
+ if item not in seen:
43
+ seen.add(item)
44
+ yield item
45
+
46
+ return unique
47
+
48
+
49
+ @_unique
50
+ def uninstallation_paths(dist: BaseDistribution) -> Iterator[str]:
51
+ """
52
+ Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
53
+
54
+ Yield paths to all the files in RECORD. For each .py file in RECORD, add
55
+ the .pyc and .pyo in the same directory.
56
+
57
+ UninstallPathSet.add() takes care of the __pycache__ .py[co].
58
+
59
+ If RECORD is not found, raises UninstallationError,
60
+ with possible information from the INSTALLER file.
61
+
62
+ https://packaging.python.org/specifications/recording-installed-packages/
63
+ """
64
+ location = dist.location
65
+ assert location is not None, "not installed"
66
+
67
+ entries = dist.iter_declared_entries()
68
+ if entries is None:
69
+ msg = "Cannot uninstall {dist}, RECORD file not found.".format(dist=dist)
70
+ installer = dist.installer
71
+ if not installer or installer == "pip":
72
+ dep = "{}=={}".format(dist.raw_name, dist.version)
73
+ msg += (
74
+ " You might be able to recover from this via: "
75
+ "'pip install --force-reinstall --no-deps {}'.".format(dep)
76
+ )
77
+ else:
78
+ msg += " Hint: The package was installed by {}.".format(installer)
79
+ raise UninstallationError(msg)
80
+
81
+ for entry in entries:
82
+ path = os.path.join(location, entry)
83
+ yield path
84
+ if path.endswith(".py"):
85
+ dn, fn = os.path.split(path)
86
+ base = fn[:-3]
87
+ path = os.path.join(dn, base + ".pyc")
88
+ yield path
89
+ path = os.path.join(dn, base + ".pyo")
90
+ yield path
91
+
92
+
93
+ def compact(paths: Iterable[str]) -> Set[str]:
94
+ """Compact a path set to contain the minimal number of paths
95
+ necessary to contain all paths in the set. If /a/path/ and
96
+ /a/path/to/a/file.txt are both in the set, leave only the
97
+ shorter path."""
98
+
99
+ sep = os.path.sep
100
+ short_paths: Set[str] = set()
101
+ for path in sorted(paths, key=len):
102
+ should_skip = any(
103
+ path.startswith(shortpath.rstrip("*"))
104
+ and path[len(shortpath.rstrip("*").rstrip(sep))] == sep
105
+ for shortpath in short_paths
106
+ )
107
+ if not should_skip:
108
+ short_paths.add(path)
109
+ return short_paths
110
+
111
+
112
+ def compress_for_rename(paths: Iterable[str]) -> Set[str]:
113
+ """Returns a set containing the paths that need to be renamed.
114
+
115
+ This set may include directories when the original sequence of paths
116
+ included every file on disk.
117
+ """
118
+ case_map = {os.path.normcase(p): p for p in paths}
119
+ remaining = set(case_map)
120
+ unchecked = sorted({os.path.split(p)[0] for p in case_map.values()}, key=len)
121
+ wildcards: Set[str] = set()
122
+
123
+ def norm_join(*a: str) -> str:
124
+ return os.path.normcase(os.path.join(*a))
125
+
126
+ for root in unchecked:
127
+ if any(os.path.normcase(root).startswith(w) for w in wildcards):
128
+ # This directory has already been handled.
129
+ continue
130
+
131
+ all_files: Set[str] = set()
132
+ all_subdirs: Set[str] = set()
133
+ for dirname, subdirs, files in os.walk(root):
134
+ all_subdirs.update(norm_join(root, dirname, d) for d in subdirs)
135
+ all_files.update(norm_join(root, dirname, f) for f in files)
136
+ # If all the files we found are in our remaining set of files to
137
+ # remove, then remove them from the latter set and add a wildcard
138
+ # for the directory.
139
+ if not (all_files - remaining):
140
+ remaining.difference_update(all_files)
141
+ wildcards.add(root + os.sep)
142
+
143
+ return set(map(case_map.__getitem__, remaining)) | wildcards
144
+
145
+
146
+ def compress_for_output_listing(paths: Iterable[str]) -> Tuple[Set[str], Set[str]]:
147
+ """Returns a tuple of 2 sets of which paths to display to user
148
+
149
+ The first set contains paths that would be deleted. Files of a package
150
+ are not added and the top-level directory of the package has a '*' added
151
+ at the end - to signify that all it's contents are removed.
152
+
153
+ The second set contains files that would have been skipped in the above
154
+ folders.
155
+ """
156
+
157
+ will_remove = set(paths)
158
+ will_skip = set()
159
+
160
+ # Determine folders and files
161
+ folders = set()
162
+ files = set()
163
+ for path in will_remove:
164
+ if path.endswith(".pyc"):
165
+ continue
166
+ if path.endswith("__init__.py") or ".dist-info" in path:
167
+ folders.add(os.path.dirname(path))
168
+ files.add(path)
169
+
170
+ # probably this one https://github.com/python/mypy/issues/390
171
+ _normcased_files = set(map(os.path.normcase, files)) # type: ignore
172
+
173
+ folders = compact(folders)
174
+
175
+ # This walks the tree using os.walk to not miss extra folders
176
+ # that might get added.
177
+ for folder in folders:
178
+ for dirpath, _, dirfiles in os.walk(folder):
179
+ for fname in dirfiles:
180
+ if fname.endswith(".pyc"):
181
+ continue
182
+
183
+ file_ = os.path.join(dirpath, fname)
184
+ if (
185
+ os.path.isfile(file_)
186
+ and os.path.normcase(file_) not in _normcased_files
187
+ ):
188
+ # We are skipping this file. Add it to the set.
189
+ will_skip.add(file_)
190
+
191
+ will_remove = files | {os.path.join(folder, "*") for folder in folders}
192
+
193
+ return will_remove, will_skip
194
+
195
+
196
+ class StashedUninstallPathSet:
197
+ """A set of file rename operations to stash files while
198
+ tentatively uninstalling them."""
199
+
200
+ def __init__(self) -> None:
201
+ # Mapping from source file root to [Adjacent]TempDirectory
202
+ # for files under that directory.
203
+ self._save_dirs: Dict[str, TempDirectory] = {}
204
+ # (old path, new path) tuples for each move that may need
205
+ # to be undone.
206
+ self._moves: List[Tuple[str, str]] = []
207
+
208
+ def _get_directory_stash(self, path: str) -> str:
209
+ """Stashes a directory.
210
+
211
+ Directories are stashed adjacent to their original location if
212
+ possible, or else moved/copied into the user's temp dir."""
213
+
214
+ try:
215
+ save_dir: TempDirectory = AdjacentTempDirectory(path)
216
+ except OSError:
217
+ save_dir = TempDirectory(kind="uninstall")
218
+ self._save_dirs[os.path.normcase(path)] = save_dir
219
+
220
+ return save_dir.path
221
+
222
+ def _get_file_stash(self, path: str) -> str:
223
+ """Stashes a file.
224
+
225
+ If no root has been provided, one will be created for the directory
226
+ in the user's temp directory."""
227
+ path = os.path.normcase(path)
228
+ head, old_head = os.path.dirname(path), None
229
+ save_dir = None
230
+
231
+ while head != old_head:
232
+ try:
233
+ save_dir = self._save_dirs[head]
234
+ break
235
+ except KeyError:
236
+ pass
237
+ head, old_head = os.path.dirname(head), head
238
+ else:
239
+ # Did not find any suitable root
240
+ head = os.path.dirname(path)
241
+ save_dir = TempDirectory(kind="uninstall")
242
+ self._save_dirs[head] = save_dir
243
+
244
+ relpath = os.path.relpath(path, head)
245
+ if relpath and relpath != os.path.curdir:
246
+ return os.path.join(save_dir.path, relpath)
247
+ return save_dir.path
248
+
249
+ def stash(self, path: str) -> str:
250
+ """Stashes the directory or file and returns its new location.
251
+ Handle symlinks as files to avoid modifying the symlink targets.
252
+ """
253
+ path_is_dir = os.path.isdir(path) and not os.path.islink(path)
254
+ if path_is_dir:
255
+ new_path = self._get_directory_stash(path)
256
+ else:
257
+ new_path = self._get_file_stash(path)
258
+
259
+ self._moves.append((path, new_path))
260
+ if path_is_dir and os.path.isdir(new_path):
261
+ # If we're moving a directory, we need to
262
+ # remove the destination first or else it will be
263
+ # moved to inside the existing directory.
264
+ # We just created new_path ourselves, so it will
265
+ # be removable.
266
+ os.rmdir(new_path)
267
+ renames(path, new_path)
268
+ return new_path
269
+
270
+ def commit(self) -> None:
271
+ """Commits the uninstall by removing stashed files."""
272
+ for _, save_dir in self._save_dirs.items():
273
+ save_dir.cleanup()
274
+ self._moves = []
275
+ self._save_dirs = {}
276
+
277
+ def rollback(self) -> None:
278
+ """Undoes the uninstall by moving stashed files back."""
279
+ for p in self._moves:
280
+ logger.info("Moving to %s\n from %s", *p)
281
+
282
+ for new_path, path in self._moves:
283
+ try:
284
+ logger.debug("Replacing %s from %s", new_path, path)
285
+ if os.path.isfile(new_path) or os.path.islink(new_path):
286
+ os.unlink(new_path)
287
+ elif os.path.isdir(new_path):
288
+ rmtree(new_path)
289
+ renames(path, new_path)
290
+ except OSError as ex:
291
+ logger.error("Failed to restore %s", new_path)
292
+ logger.debug("Exception: %s", ex)
293
+
294
+ self.commit()
295
+
296
+ @property
297
+ def can_rollback(self) -> bool:
298
+ return bool(self._moves)
299
+
300
+
301
+ class UninstallPathSet:
302
+ """A set of file paths to be removed in the uninstallation of a
303
+ requirement."""
304
+
305
+ def __init__(self, dist: BaseDistribution) -> None:
306
+ self._paths: Set[str] = set()
307
+ self._refuse: Set[str] = set()
308
+ self._pth: Dict[str, UninstallPthEntries] = {}
309
+ self._dist = dist
310
+ self._moved_paths = StashedUninstallPathSet()
311
+
312
+ def _permitted(self, path: str) -> bool:
313
+ """
314
+ Return True if the given path is one we are permitted to
315
+ remove/modify, False otherwise.
316
+
317
+ """
318
+ return is_local(path)
319
+
320
+ def add(self, path: str) -> None:
321
+ head, tail = os.path.split(path)
322
+
323
+ # we normalize the head to resolve parent directory symlinks, but not
324
+ # the tail, since we only want to uninstall symlinks, not their targets
325
+ path = os.path.join(normalize_path(head), os.path.normcase(tail))
326
+
327
+ if not os.path.exists(path):
328
+ return
329
+ if self._permitted(path):
330
+ self._paths.add(path)
331
+ else:
332
+ self._refuse.add(path)
333
+
334
+ # __pycache__ files can show up after 'installed-files.txt' is created,
335
+ # due to imports
336
+ if os.path.splitext(path)[1] == ".py":
337
+ self.add(cache_from_source(path))
338
+
339
+ def add_pth(self, pth_file: str, entry: str) -> None:
340
+ pth_file = normalize_path(pth_file)
341
+ if self._permitted(pth_file):
342
+ if pth_file not in self._pth:
343
+ self._pth[pth_file] = UninstallPthEntries(pth_file)
344
+ self._pth[pth_file].add(entry)
345
+ else:
346
+ self._refuse.add(pth_file)
347
+
348
+ def remove(self, auto_confirm: bool = False, verbose: bool = False) -> None:
349
+ """Remove paths in ``self._paths`` with confirmation (unless
350
+ ``auto_confirm`` is True)."""
351
+
352
+ if not self._paths:
353
+ logger.info(
354
+ "Can't uninstall '%s'. No files were found to uninstall.",
355
+ self._dist.raw_name,
356
+ )
357
+ return
358
+
359
+ dist_name_version = f"{self._dist.raw_name}-{self._dist.version}"
360
+ logger.info("Uninstalling %s:", dist_name_version)
361
+
362
+ with indent_log():
363
+ if auto_confirm or self._allowed_to_proceed(verbose):
364
+ moved = self._moved_paths
365
+
366
+ for_rename = compress_for_rename(self._paths)
367
+
368
+ for path in sorted(compact(for_rename)):
369
+ moved.stash(path)
370
+ logger.verbose("Removing file or directory %s", path)
371
+
372
+ for pth in self._pth.values():
373
+ pth.remove()
374
+
375
+ logger.info("Successfully uninstalled %s", dist_name_version)
376
+
377
+ def _allowed_to_proceed(self, verbose: bool) -> bool:
378
+ """Display which files would be deleted and prompt for confirmation"""
379
+
380
+ def _display(msg: str, paths: Iterable[str]) -> None:
381
+ if not paths:
382
+ return
383
+
384
+ logger.info(msg)
385
+ with indent_log():
386
+ for path in sorted(compact(paths)):
387
+ logger.info(path)
388
+
389
+ if not verbose:
390
+ will_remove, will_skip = compress_for_output_listing(self._paths)
391
+ else:
392
+ # In verbose mode, display all the files that are going to be
393
+ # deleted.
394
+ will_remove = set(self._paths)
395
+ will_skip = set()
396
+
397
+ _display("Would remove:", will_remove)
398
+ _display("Would not remove (might be manually added):", will_skip)
399
+ _display("Would not remove (outside of prefix):", self._refuse)
400
+ if verbose:
401
+ _display("Will actually move:", compress_for_rename(self._paths))
402
+
403
+ return ask("Proceed (Y/n)? ", ("y", "n", "")) != "n"
404
+
405
+ def rollback(self) -> None:
406
+ """Rollback the changes previously made by remove()."""
407
+ if not self._moved_paths.can_rollback:
408
+ logger.error(
409
+ "Can't roll back %s; was not uninstalled",
410
+ self._dist.raw_name,
411
+ )
412
+ return
413
+ logger.info("Rolling back uninstall of %s", self._dist.raw_name)
414
+ self._moved_paths.rollback()
415
+ for pth in self._pth.values():
416
+ pth.rollback()
417
+
418
+ def commit(self) -> None:
419
+ """Remove temporary save dir: rollback will no longer be possible."""
420
+ self._moved_paths.commit()
421
+
422
+ @classmethod
423
+ def from_dist(cls, dist: BaseDistribution) -> "UninstallPathSet":
424
+ dist_location = dist.location
425
+ info_location = dist.info_location
426
+ if dist_location is None:
427
+ logger.info(
428
+ "Not uninstalling %s since it is not installed",
429
+ dist.canonical_name,
430
+ )
431
+ return cls(dist)
432
+
433
+ normalized_dist_location = normalize_path(dist_location)
434
+ if not dist.local:
435
+ logger.info(
436
+ "Not uninstalling %s at %s, outside environment %s",
437
+ dist.canonical_name,
438
+ normalized_dist_location,
439
+ sys.prefix,
440
+ )
441
+ return cls(dist)
442
+
443
+ if normalized_dist_location in {
444
+ p
445
+ for p in {sysconfig.get_path("stdlib"), sysconfig.get_path("platstdlib")}
446
+ if p
447
+ }:
448
+ logger.info(
449
+ "Not uninstalling %s at %s, as it is in the standard library.",
450
+ dist.canonical_name,
451
+ normalized_dist_location,
452
+ )
453
+ return cls(dist)
454
+
455
+ paths_to_remove = cls(dist)
456
+ develop_egg_link = egg_link_path_from_location(dist.raw_name)
457
+
458
+ # Distribution is installed with metadata in a "flat" .egg-info
459
+ # directory. This means it is not a modern .dist-info installation, an
460
+ # egg, or legacy editable.
461
+ setuptools_flat_installation = (
462
+ dist.installed_with_setuptools_egg_info
463
+ and info_location is not None
464
+ and os.path.exists(info_location)
465
+ # If dist is editable and the location points to a ``.egg-info``,
466
+ # we are in fact in the legacy editable case.
467
+ and not info_location.endswith(f"{dist.setuptools_filename}.egg-info")
468
+ )
469
+
470
+ # Uninstall cases order do matter as in the case of 2 installs of the
471
+ # same package, pip needs to uninstall the currently detected version
472
+ if setuptools_flat_installation:
473
+ if info_location is not None:
474
+ paths_to_remove.add(info_location)
475
+ installed_files = dist.iter_declared_entries()
476
+ if installed_files is not None:
477
+ for installed_file in installed_files:
478
+ paths_to_remove.add(os.path.join(dist_location, installed_file))
479
+ # FIXME: need a test for this elif block
480
+ # occurs with --single-version-externally-managed/--record outside
481
+ # of pip
482
+ elif dist.is_file("top_level.txt"):
483
+ try:
484
+ namespace_packages = dist.read_text("namespace_packages.txt")
485
+ except FileNotFoundError:
486
+ namespaces = []
487
+ else:
488
+ namespaces = namespace_packages.splitlines(keepends=False)
489
+ for top_level_pkg in [
490
+ p
491
+ for p in dist.read_text("top_level.txt").splitlines()
492
+ if p and p not in namespaces
493
+ ]:
494
+ path = os.path.join(dist_location, top_level_pkg)
495
+ paths_to_remove.add(path)
496
+ paths_to_remove.add(f"{path}.py")
497
+ paths_to_remove.add(f"{path}.pyc")
498
+ paths_to_remove.add(f"{path}.pyo")
499
+
500
+ elif dist.installed_by_distutils:
501
+ raise UninstallationError(
502
+ "Cannot uninstall {!r}. It is a distutils installed project "
503
+ "and thus we cannot accurately determine which files belong "
504
+ "to it which would lead to only a partial uninstall.".format(
505
+ dist.raw_name,
506
+ )
507
+ )
508
+
509
+ elif dist.installed_as_egg:
510
+ # package installed by easy_install
511
+ # We cannot match on dist.egg_name because it can slightly vary
512
+ # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
513
+ paths_to_remove.add(dist_location)
514
+ easy_install_egg = os.path.split(dist_location)[1]
515
+ easy_install_pth = os.path.join(
516
+ os.path.dirname(dist_location),
517
+ "easy-install.pth",
518
+ )
519
+ paths_to_remove.add_pth(easy_install_pth, "./" + easy_install_egg)
520
+
521
+ elif dist.installed_with_dist_info:
522
+ for path in uninstallation_paths(dist):
523
+ paths_to_remove.add(path)
524
+
525
+ elif develop_egg_link:
526
+ # PEP 660 modern editable is handled in the ``.dist-info`` case
527
+ # above, so this only covers the setuptools-style editable.
528
+ with open(develop_egg_link) as fh:
529
+ link_pointer = os.path.normcase(fh.readline().strip())
530
+ assert link_pointer == dist_location, (
531
+ f"Egg-link {link_pointer} does not match installed location of "
532
+ f"{dist.raw_name} (at {dist_location})"
533
+ )
534
+ paths_to_remove.add(develop_egg_link)
535
+ easy_install_pth = os.path.join(
536
+ os.path.dirname(develop_egg_link), "easy-install.pth"
537
+ )
538
+ paths_to_remove.add_pth(easy_install_pth, dist_location)
539
+
540
+ else:
541
+ logger.debug(
542
+ "Not sure how to uninstall: %s - Check: %s",
543
+ dist,
544
+ dist_location,
545
+ )
546
+
547
+ if dist.in_usersite:
548
+ bin_dir = get_bin_user()
549
+ else:
550
+ bin_dir = get_bin_prefix()
551
+
552
+ # find distutils scripts= scripts
553
+ try:
554
+ for script in dist.iterdir("scripts"):
555
+ paths_to_remove.add(os.path.join(bin_dir, script.name))
556
+ if WINDOWS:
557
+ paths_to_remove.add(os.path.join(bin_dir, f"{script.name}.bat"))
558
+ except (FileNotFoundError, NotADirectoryError):
559
+ pass
560
+
561
+ # find console_scripts and gui_scripts
562
+ def iter_scripts_to_remove(
563
+ dist: BaseDistribution,
564
+ bin_dir: str,
565
+ ) -> Iterator[str]:
566
+ for entry_point in dist.iter_entry_points():
567
+ if entry_point.group == "console_scripts":
568
+ yield from _script_names(bin_dir, entry_point.name, False)
569
+ elif entry_point.group == "gui_scripts":
570
+ yield from _script_names(bin_dir, entry_point.name, True)
571
+
572
+ for s in iter_scripts_to_remove(dist, bin_dir):
573
+ paths_to_remove.add(s)
574
+
575
+ return paths_to_remove
576
+
577
+
578
+ class UninstallPthEntries:
579
+ def __init__(self, pth_file: str) -> None:
580
+ self.file = pth_file
581
+ self.entries: Set[str] = set()
582
+ self._saved_lines: Optional[List[bytes]] = None
583
+
584
+ def add(self, entry: str) -> None:
585
+ entry = os.path.normcase(entry)
586
+ # On Windows, os.path.normcase converts the entry to use
587
+ # backslashes. This is correct for entries that describe absolute
588
+ # paths outside of site-packages, but all the others use forward
589
+ # slashes.
590
+ # os.path.splitdrive is used instead of os.path.isabs because isabs
591
+ # treats non-absolute paths with drive letter markings like c:foo\bar
592
+ # as absolute paths. It also does not recognize UNC paths if they don't
593
+ # have more than "\\sever\share". Valid examples: "\\server\share\" or
594
+ # "\\server\share\folder".
595
+ if WINDOWS and not os.path.splitdrive(entry)[0]:
596
+ entry = entry.replace("\\", "/")
597
+ self.entries.add(entry)
598
+
599
+ def remove(self) -> None:
600
+ logger.verbose("Removing pth entries from %s:", self.file)
601
+
602
+ # If the file doesn't exist, log a warning and return
603
+ if not os.path.isfile(self.file):
604
+ logger.warning("Cannot remove entries from nonexistent file %s", self.file)
605
+ return
606
+ with open(self.file, "rb") as fh:
607
+ # windows uses '\r\n' with py3k, but uses '\n' with py2.x
608
+ lines = fh.readlines()
609
+ self._saved_lines = lines
610
+ if any(b"\r\n" in line for line in lines):
611
+ endline = "\r\n"
612
+ else:
613
+ endline = "\n"
614
+ # handle missing trailing newline
615
+ if lines and not lines[-1].endswith(endline.encode("utf-8")):
616
+ lines[-1] = lines[-1] + endline.encode("utf-8")
617
+ for entry in self.entries:
618
+ try:
619
+ logger.verbose("Removing entry: %s", entry)
620
+ lines.remove((entry + endline).encode("utf-8"))
621
+ except ValueError:
622
+ pass
623
+ with open(self.file, "wb") as fh:
624
+ fh.writelines(lines)
625
+
626
+ def rollback(self) -> bool:
627
+ if self._saved_lines is None:
628
+ logger.error("Cannot roll back changes to %s, none were made", self.file)
629
+ return False
630
+ logger.debug("Rolling %s back to previous state", self.file)
631
+ with open(self.file, "wb") as fh:
632
+ fh.writelines(self._saved_lines)
633
+ return True
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__init__.py ADDED
File without changes
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (184 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/appdirs.cpython-310.pyc ADDED
Binary file (1.61 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/compat.cpython-310.pyc ADDED
Binary file (1.5 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/datetime.cpython-310.pyc ADDED
Binary file (507 Bytes). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/distutils_args.cpython-310.pyc ADDED
Binary file (1.09 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/egg_link.cpython-310.pyc ADDED
Binary file (2.14 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/encoding.cpython-310.pyc ADDED
Binary file (1.3 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/entrypoints.cpython-310.pyc ADDED
Binary file (1.29 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/glibc.cpython-310.pyc ADDED
Binary file (1.66 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/hashes.cpython-310.pyc ADDED
Binary file (5.19 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/logging.cpython-310.pyc ADDED
Binary file (9.62 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/misc.cpython-310.pyc ADDED
Binary file (19.4 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/packaging.cpython-310.pyc ADDED
Binary file (2.07 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/temp_dir.cpython-310.pyc ADDED
Binary file (7.29 kB). View file
 
env-llmeval/lib/python3.10/site-packages/pip/_internal/utils/__pycache__/unpacking.cpython-310.pyc ADDED
Binary file (6.64 kB). View file