Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__init__.py +127 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/cache.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/list.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/cache.py +223 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/check.py +53 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/completion.py +96 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/configuration.py +266 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/debug.py +202 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/download.py +140 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/freeze.py +97 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/hash.py +59 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/help.py +41 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/index.py +139 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/install.py +771 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/list.py +363 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/search.py +174 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/show.py +178 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py +105 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/wheel.py +178 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__init__.py +2 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc +0 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/collector.py +648 -0
- env-llmeval/lib/python3.10/site-packages/pip/_internal/index/package_finder.py +1004 -0
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (745 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/build_env.cpython-310.pyc
ADDED
Binary file (9.59 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (8.37 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/configuration.cpython-310.pyc
ADDED
Binary file (11.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/exceptions.cpython-310.pyc
ADDED
Binary file (23.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/main.cpython-310.pyc
ADDED
Binary file (610 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/pyproject.cpython-310.pyc
ADDED
Binary file (3.53 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/self_outdated_check.cpython-310.pyc
ADDED
Binary file (4.57 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/__pycache__/wheel_builder.cpython-310.pyc
ADDED
Binary file (9.13 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__init__.py
ADDED
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
Package containing all pip commands
|
3 |
+
"""
|
4 |
+
|
5 |
+
import importlib
|
6 |
+
from collections import namedtuple
|
7 |
+
from typing import Any, Dict, Optional
|
8 |
+
|
9 |
+
from pip._internal.cli.base_command import Command
|
10 |
+
|
11 |
+
CommandInfo = namedtuple("CommandInfo", "module_path, class_name, summary")
|
12 |
+
|
13 |
+
# This dictionary does a bunch of heavy lifting for help output:
|
14 |
+
# - Enables avoiding additional (costly) imports for presenting `--help`.
|
15 |
+
# - The ordering matters for help display.
|
16 |
+
#
|
17 |
+
# Even though the module path starts with the same "pip._internal.commands"
|
18 |
+
# prefix, the full path makes testing easier (specifically when modifying
|
19 |
+
# `commands_dict` in test setup / teardown).
|
20 |
+
commands_dict: Dict[str, CommandInfo] = {
|
21 |
+
"install": CommandInfo(
|
22 |
+
"pip._internal.commands.install",
|
23 |
+
"InstallCommand",
|
24 |
+
"Install packages.",
|
25 |
+
),
|
26 |
+
"download": CommandInfo(
|
27 |
+
"pip._internal.commands.download",
|
28 |
+
"DownloadCommand",
|
29 |
+
"Download packages.",
|
30 |
+
),
|
31 |
+
"uninstall": CommandInfo(
|
32 |
+
"pip._internal.commands.uninstall",
|
33 |
+
"UninstallCommand",
|
34 |
+
"Uninstall packages.",
|
35 |
+
),
|
36 |
+
"freeze": CommandInfo(
|
37 |
+
"pip._internal.commands.freeze",
|
38 |
+
"FreezeCommand",
|
39 |
+
"Output installed packages in requirements format.",
|
40 |
+
),
|
41 |
+
"list": CommandInfo(
|
42 |
+
"pip._internal.commands.list",
|
43 |
+
"ListCommand",
|
44 |
+
"List installed packages.",
|
45 |
+
),
|
46 |
+
"show": CommandInfo(
|
47 |
+
"pip._internal.commands.show",
|
48 |
+
"ShowCommand",
|
49 |
+
"Show information about installed packages.",
|
50 |
+
),
|
51 |
+
"check": CommandInfo(
|
52 |
+
"pip._internal.commands.check",
|
53 |
+
"CheckCommand",
|
54 |
+
"Verify installed packages have compatible dependencies.",
|
55 |
+
),
|
56 |
+
"config": CommandInfo(
|
57 |
+
"pip._internal.commands.configuration",
|
58 |
+
"ConfigurationCommand",
|
59 |
+
"Manage local and global configuration.",
|
60 |
+
),
|
61 |
+
"search": CommandInfo(
|
62 |
+
"pip._internal.commands.search",
|
63 |
+
"SearchCommand",
|
64 |
+
"Search PyPI for packages.",
|
65 |
+
),
|
66 |
+
"cache": CommandInfo(
|
67 |
+
"pip._internal.commands.cache",
|
68 |
+
"CacheCommand",
|
69 |
+
"Inspect and manage pip's wheel cache.",
|
70 |
+
),
|
71 |
+
"index": CommandInfo(
|
72 |
+
"pip._internal.commands.index",
|
73 |
+
"IndexCommand",
|
74 |
+
"Inspect information available from package indexes.",
|
75 |
+
),
|
76 |
+
"wheel": CommandInfo(
|
77 |
+
"pip._internal.commands.wheel",
|
78 |
+
"WheelCommand",
|
79 |
+
"Build wheels from your requirements.",
|
80 |
+
),
|
81 |
+
"hash": CommandInfo(
|
82 |
+
"pip._internal.commands.hash",
|
83 |
+
"HashCommand",
|
84 |
+
"Compute hashes of package archives.",
|
85 |
+
),
|
86 |
+
"completion": CommandInfo(
|
87 |
+
"pip._internal.commands.completion",
|
88 |
+
"CompletionCommand",
|
89 |
+
"A helper command used for command completion.",
|
90 |
+
),
|
91 |
+
"debug": CommandInfo(
|
92 |
+
"pip._internal.commands.debug",
|
93 |
+
"DebugCommand",
|
94 |
+
"Show information useful for debugging.",
|
95 |
+
),
|
96 |
+
"help": CommandInfo(
|
97 |
+
"pip._internal.commands.help",
|
98 |
+
"HelpCommand",
|
99 |
+
"Show help for commands.",
|
100 |
+
),
|
101 |
+
}
|
102 |
+
|
103 |
+
|
104 |
+
def create_command(name: str, **kwargs: Any) -> Command:
|
105 |
+
"""
|
106 |
+
Create an instance of the Command class with the given name.
|
107 |
+
"""
|
108 |
+
module_path, class_name, summary = commands_dict[name]
|
109 |
+
module = importlib.import_module(module_path)
|
110 |
+
command_class = getattr(module, class_name)
|
111 |
+
command = command_class(name=name, summary=summary, **kwargs)
|
112 |
+
|
113 |
+
return command
|
114 |
+
|
115 |
+
|
116 |
+
def get_similar_commands(name: str) -> Optional[str]:
|
117 |
+
"""Command name auto-correct."""
|
118 |
+
from difflib import get_close_matches
|
119 |
+
|
120 |
+
name = name.lower()
|
121 |
+
|
122 |
+
close_commands = get_close_matches(name, commands_dict.keys())
|
123 |
+
|
124 |
+
if close_commands:
|
125 |
+
return close_commands[0]
|
126 |
+
else:
|
127 |
+
return None
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (3.13 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (6.17 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/check.cpython-310.pyc
ADDED
Binary file (1.56 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/completion.cpython-310.pyc
ADDED
Binary file (3.13 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/configuration.cpython-310.pyc
ADDED
Binary file (8.31 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/debug.cpython-310.pyc
ADDED
Binary file (6.67 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/download.cpython-310.pyc
ADDED
Binary file (3.98 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/freeze.cpython-310.pyc
ADDED
Binary file (2.64 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/hash.cpython-310.pyc
ADDED
Binary file (2.14 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/help.cpython-310.pyc
ADDED
Binary file (1.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/index.cpython-310.pyc
ADDED
Binary file (4.63 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/install.cpython-310.pyc
ADDED
Binary file (17.8 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/list.cpython-310.pyc
ADDED
Binary file (10.4 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/search.cpython-310.pyc
ADDED
Binary file (5.36 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/show.cpython-310.pyc
ADDED
Binary file (6.11 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/uninstall.cpython-310.pyc
ADDED
Binary file (3.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/__pycache__/wheel.cpython-310.pyc
ADDED
Binary file (4.83 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/cache.py
ADDED
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import textwrap
|
3 |
+
from optparse import Values
|
4 |
+
from typing import Any, List
|
5 |
+
|
6 |
+
import pip._internal.utils.filesystem as filesystem
|
7 |
+
from pip._internal.cli.base_command import Command
|
8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
9 |
+
from pip._internal.exceptions import CommandError, PipError
|
10 |
+
from pip._internal.utils.logging import getLogger
|
11 |
+
|
12 |
+
logger = getLogger(__name__)
|
13 |
+
|
14 |
+
|
15 |
+
class CacheCommand(Command):
|
16 |
+
"""
|
17 |
+
Inspect and manage pip's wheel cache.
|
18 |
+
|
19 |
+
Subcommands:
|
20 |
+
|
21 |
+
- dir: Show the cache directory.
|
22 |
+
- info: Show information about the cache.
|
23 |
+
- list: List filenames of packages stored in the cache.
|
24 |
+
- remove: Remove one or more package from the cache.
|
25 |
+
- purge: Remove all items from the cache.
|
26 |
+
|
27 |
+
``<pattern>`` can be a glob expression or a package name.
|
28 |
+
"""
|
29 |
+
|
30 |
+
ignore_require_venv = True
|
31 |
+
usage = """
|
32 |
+
%prog dir
|
33 |
+
%prog info
|
34 |
+
%prog list [<pattern>] [--format=[human, abspath]]
|
35 |
+
%prog remove <pattern>
|
36 |
+
%prog purge
|
37 |
+
"""
|
38 |
+
|
39 |
+
def add_options(self) -> None:
|
40 |
+
|
41 |
+
self.cmd_opts.add_option(
|
42 |
+
"--format",
|
43 |
+
action="store",
|
44 |
+
dest="list_format",
|
45 |
+
default="human",
|
46 |
+
choices=("human", "abspath"),
|
47 |
+
help="Select the output format among: human (default) or abspath",
|
48 |
+
)
|
49 |
+
|
50 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
51 |
+
|
52 |
+
def run(self, options: Values, args: List[str]) -> int:
|
53 |
+
handlers = {
|
54 |
+
"dir": self.get_cache_dir,
|
55 |
+
"info": self.get_cache_info,
|
56 |
+
"list": self.list_cache_items,
|
57 |
+
"remove": self.remove_cache_items,
|
58 |
+
"purge": self.purge_cache,
|
59 |
+
}
|
60 |
+
|
61 |
+
if not options.cache_dir:
|
62 |
+
logger.error("pip cache commands can not function since cache is disabled.")
|
63 |
+
return ERROR
|
64 |
+
|
65 |
+
# Determine action
|
66 |
+
if not args or args[0] not in handlers:
|
67 |
+
logger.error(
|
68 |
+
"Need an action (%s) to perform.",
|
69 |
+
", ".join(sorted(handlers)),
|
70 |
+
)
|
71 |
+
return ERROR
|
72 |
+
|
73 |
+
action = args[0]
|
74 |
+
|
75 |
+
# Error handling happens here, not in the action-handlers.
|
76 |
+
try:
|
77 |
+
handlers[action](options, args[1:])
|
78 |
+
except PipError as e:
|
79 |
+
logger.error(e.args[0])
|
80 |
+
return ERROR
|
81 |
+
|
82 |
+
return SUCCESS
|
83 |
+
|
84 |
+
def get_cache_dir(self, options: Values, args: List[Any]) -> None:
|
85 |
+
if args:
|
86 |
+
raise CommandError("Too many arguments")
|
87 |
+
|
88 |
+
logger.info(options.cache_dir)
|
89 |
+
|
90 |
+
def get_cache_info(self, options: Values, args: List[Any]) -> None:
|
91 |
+
if args:
|
92 |
+
raise CommandError("Too many arguments")
|
93 |
+
|
94 |
+
num_http_files = len(self._find_http_files(options))
|
95 |
+
num_packages = len(self._find_wheels(options, "*"))
|
96 |
+
|
97 |
+
http_cache_location = self._cache_dir(options, "http")
|
98 |
+
wheels_cache_location = self._cache_dir(options, "wheels")
|
99 |
+
http_cache_size = filesystem.format_directory_size(http_cache_location)
|
100 |
+
wheels_cache_size = filesystem.format_directory_size(wheels_cache_location)
|
101 |
+
|
102 |
+
message = (
|
103 |
+
textwrap.dedent(
|
104 |
+
"""
|
105 |
+
Package index page cache location: {http_cache_location}
|
106 |
+
Package index page cache size: {http_cache_size}
|
107 |
+
Number of HTTP files: {num_http_files}
|
108 |
+
Wheels location: {wheels_cache_location}
|
109 |
+
Wheels size: {wheels_cache_size}
|
110 |
+
Number of wheels: {package_count}
|
111 |
+
"""
|
112 |
+
)
|
113 |
+
.format(
|
114 |
+
http_cache_location=http_cache_location,
|
115 |
+
http_cache_size=http_cache_size,
|
116 |
+
num_http_files=num_http_files,
|
117 |
+
wheels_cache_location=wheels_cache_location,
|
118 |
+
package_count=num_packages,
|
119 |
+
wheels_cache_size=wheels_cache_size,
|
120 |
+
)
|
121 |
+
.strip()
|
122 |
+
)
|
123 |
+
|
124 |
+
logger.info(message)
|
125 |
+
|
126 |
+
def list_cache_items(self, options: Values, args: List[Any]) -> None:
|
127 |
+
if len(args) > 1:
|
128 |
+
raise CommandError("Too many arguments")
|
129 |
+
|
130 |
+
if args:
|
131 |
+
pattern = args[0]
|
132 |
+
else:
|
133 |
+
pattern = "*"
|
134 |
+
|
135 |
+
files = self._find_wheels(options, pattern)
|
136 |
+
if options.list_format == "human":
|
137 |
+
self.format_for_human(files)
|
138 |
+
else:
|
139 |
+
self.format_for_abspath(files)
|
140 |
+
|
141 |
+
def format_for_human(self, files: List[str]) -> None:
|
142 |
+
if not files:
|
143 |
+
logger.info("Nothing cached.")
|
144 |
+
return
|
145 |
+
|
146 |
+
results = []
|
147 |
+
for filename in files:
|
148 |
+
wheel = os.path.basename(filename)
|
149 |
+
size = filesystem.format_file_size(filename)
|
150 |
+
results.append(f" - {wheel} ({size})")
|
151 |
+
logger.info("Cache contents:\n")
|
152 |
+
logger.info("\n".join(sorted(results)))
|
153 |
+
|
154 |
+
def format_for_abspath(self, files: List[str]) -> None:
|
155 |
+
if not files:
|
156 |
+
return
|
157 |
+
|
158 |
+
results = []
|
159 |
+
for filename in files:
|
160 |
+
results.append(filename)
|
161 |
+
|
162 |
+
logger.info("\n".join(sorted(results)))
|
163 |
+
|
164 |
+
def remove_cache_items(self, options: Values, args: List[Any]) -> None:
|
165 |
+
if len(args) > 1:
|
166 |
+
raise CommandError("Too many arguments")
|
167 |
+
|
168 |
+
if not args:
|
169 |
+
raise CommandError("Please provide a pattern")
|
170 |
+
|
171 |
+
files = self._find_wheels(options, args[0])
|
172 |
+
|
173 |
+
no_matching_msg = "No matching packages"
|
174 |
+
if args[0] == "*":
|
175 |
+
# Only fetch http files if no specific pattern given
|
176 |
+
files += self._find_http_files(options)
|
177 |
+
else:
|
178 |
+
# Add the pattern to the log message
|
179 |
+
no_matching_msg += ' for pattern "{}"'.format(args[0])
|
180 |
+
|
181 |
+
if not files:
|
182 |
+
logger.warning(no_matching_msg)
|
183 |
+
|
184 |
+
for filename in files:
|
185 |
+
os.unlink(filename)
|
186 |
+
logger.verbose("Removed %s", filename)
|
187 |
+
logger.info("Files removed: %s", len(files))
|
188 |
+
|
189 |
+
def purge_cache(self, options: Values, args: List[Any]) -> None:
|
190 |
+
if args:
|
191 |
+
raise CommandError("Too many arguments")
|
192 |
+
|
193 |
+
return self.remove_cache_items(options, ["*"])
|
194 |
+
|
195 |
+
def _cache_dir(self, options: Values, subdir: str) -> str:
|
196 |
+
return os.path.join(options.cache_dir, subdir)
|
197 |
+
|
198 |
+
def _find_http_files(self, options: Values) -> List[str]:
|
199 |
+
http_dir = self._cache_dir(options, "http")
|
200 |
+
return filesystem.find_files(http_dir, "*")
|
201 |
+
|
202 |
+
def _find_wheels(self, options: Values, pattern: str) -> List[str]:
|
203 |
+
wheel_dir = self._cache_dir(options, "wheels")
|
204 |
+
|
205 |
+
# The wheel filename format, as specified in PEP 427, is:
|
206 |
+
# {distribution}-{version}(-{build})?-{python}-{abi}-{platform}.whl
|
207 |
+
#
|
208 |
+
# Additionally, non-alphanumeric values in the distribution are
|
209 |
+
# normalized to underscores (_), meaning hyphens can never occur
|
210 |
+
# before `-{version}`.
|
211 |
+
#
|
212 |
+
# Given that information:
|
213 |
+
# - If the pattern we're given contains a hyphen (-), the user is
|
214 |
+
# providing at least the version. Thus, we can just append `*.whl`
|
215 |
+
# to match the rest of it.
|
216 |
+
# - If the pattern we're given doesn't contain a hyphen (-), the
|
217 |
+
# user is only providing the name. Thus, we append `-*.whl` to
|
218 |
+
# match the hyphen before the version, followed by anything else.
|
219 |
+
#
|
220 |
+
# PEP 427: https://www.python.org/dev/peps/pep-0427/
|
221 |
+
pattern = pattern + ("*.whl" if "-" in pattern else "-*.whl")
|
222 |
+
|
223 |
+
return filesystem.find_files(wheel_dir, pattern)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/check.py
ADDED
@@ -0,0 +1,53 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from optparse import Values
|
3 |
+
from typing import List
|
4 |
+
|
5 |
+
from pip._internal.cli.base_command import Command
|
6 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
7 |
+
from pip._internal.operations.check import (
|
8 |
+
check_package_set,
|
9 |
+
create_package_set_from_installed,
|
10 |
+
)
|
11 |
+
from pip._internal.utils.misc import write_output
|
12 |
+
|
13 |
+
logger = logging.getLogger(__name__)
|
14 |
+
|
15 |
+
|
16 |
+
class CheckCommand(Command):
|
17 |
+
"""Verify installed packages have compatible dependencies."""
|
18 |
+
|
19 |
+
usage = """
|
20 |
+
%prog [options]"""
|
21 |
+
|
22 |
+
def run(self, options: Values, args: List[str]) -> int:
|
23 |
+
|
24 |
+
package_set, parsing_probs = create_package_set_from_installed()
|
25 |
+
missing, conflicting = check_package_set(package_set)
|
26 |
+
|
27 |
+
for project_name in missing:
|
28 |
+
version = package_set[project_name].version
|
29 |
+
for dependency in missing[project_name]:
|
30 |
+
write_output(
|
31 |
+
"%s %s requires %s, which is not installed.",
|
32 |
+
project_name,
|
33 |
+
version,
|
34 |
+
dependency[0],
|
35 |
+
)
|
36 |
+
|
37 |
+
for project_name in conflicting:
|
38 |
+
version = package_set[project_name].version
|
39 |
+
for dep_name, dep_version, req in conflicting[project_name]:
|
40 |
+
write_output(
|
41 |
+
"%s %s has requirement %s, but you have %s %s.",
|
42 |
+
project_name,
|
43 |
+
version,
|
44 |
+
req,
|
45 |
+
dep_name,
|
46 |
+
dep_version,
|
47 |
+
)
|
48 |
+
|
49 |
+
if missing or conflicting or parsing_probs:
|
50 |
+
return ERROR
|
51 |
+
else:
|
52 |
+
write_output("No broken requirements found.")
|
53 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/completion.py
ADDED
@@ -0,0 +1,96 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
import textwrap
|
3 |
+
from optparse import Values
|
4 |
+
from typing import List
|
5 |
+
|
6 |
+
from pip._internal.cli.base_command import Command
|
7 |
+
from pip._internal.cli.status_codes import SUCCESS
|
8 |
+
from pip._internal.utils.misc import get_prog
|
9 |
+
|
10 |
+
BASE_COMPLETION = """
|
11 |
+
# pip {shell} completion start{script}# pip {shell} completion end
|
12 |
+
"""
|
13 |
+
|
14 |
+
COMPLETION_SCRIPTS = {
|
15 |
+
"bash": """
|
16 |
+
_pip_completion()
|
17 |
+
{{
|
18 |
+
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
|
19 |
+
COMP_CWORD=$COMP_CWORD \\
|
20 |
+
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
|
21 |
+
}}
|
22 |
+
complete -o default -F _pip_completion {prog}
|
23 |
+
""",
|
24 |
+
"zsh": """
|
25 |
+
function _pip_completion {{
|
26 |
+
local words cword
|
27 |
+
read -Ac words
|
28 |
+
read -cn cword
|
29 |
+
reply=( $( COMP_WORDS="$words[*]" \\
|
30 |
+
COMP_CWORD=$(( cword-1 )) \\
|
31 |
+
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
|
32 |
+
}}
|
33 |
+
compctl -K _pip_completion {prog}
|
34 |
+
""",
|
35 |
+
"fish": """
|
36 |
+
function __fish_complete_pip
|
37 |
+
set -lx COMP_WORDS (commandline -o) ""
|
38 |
+
set -lx COMP_CWORD ( \\
|
39 |
+
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
|
40 |
+
)
|
41 |
+
set -lx PIP_AUTO_COMPLETE 1
|
42 |
+
string split \\ -- (eval $COMP_WORDS[1])
|
43 |
+
end
|
44 |
+
complete -fa "(__fish_complete_pip)" -c {prog}
|
45 |
+
""",
|
46 |
+
}
|
47 |
+
|
48 |
+
|
49 |
+
class CompletionCommand(Command):
|
50 |
+
"""A helper command to be used for command completion."""
|
51 |
+
|
52 |
+
ignore_require_venv = True
|
53 |
+
|
54 |
+
def add_options(self) -> None:
|
55 |
+
self.cmd_opts.add_option(
|
56 |
+
"--bash",
|
57 |
+
"-b",
|
58 |
+
action="store_const",
|
59 |
+
const="bash",
|
60 |
+
dest="shell",
|
61 |
+
help="Emit completion code for bash",
|
62 |
+
)
|
63 |
+
self.cmd_opts.add_option(
|
64 |
+
"--zsh",
|
65 |
+
"-z",
|
66 |
+
action="store_const",
|
67 |
+
const="zsh",
|
68 |
+
dest="shell",
|
69 |
+
help="Emit completion code for zsh",
|
70 |
+
)
|
71 |
+
self.cmd_opts.add_option(
|
72 |
+
"--fish",
|
73 |
+
"-f",
|
74 |
+
action="store_const",
|
75 |
+
const="fish",
|
76 |
+
dest="shell",
|
77 |
+
help="Emit completion code for fish",
|
78 |
+
)
|
79 |
+
|
80 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
81 |
+
|
82 |
+
def run(self, options: Values, args: List[str]) -> int:
|
83 |
+
"""Prints the completion code of the given shell"""
|
84 |
+
shells = COMPLETION_SCRIPTS.keys()
|
85 |
+
shell_options = ["--" + shell for shell in sorted(shells)]
|
86 |
+
if options.shell in shells:
|
87 |
+
script = textwrap.dedent(
|
88 |
+
COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
|
89 |
+
)
|
90 |
+
print(BASE_COMPLETION.format(script=script, shell=options.shell))
|
91 |
+
return SUCCESS
|
92 |
+
else:
|
93 |
+
sys.stderr.write(
|
94 |
+
"ERROR: You must pass {}\n".format(" or ".join(shell_options))
|
95 |
+
)
|
96 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/configuration.py
ADDED
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
import subprocess
|
4 |
+
from optparse import Values
|
5 |
+
from typing import Any, List, Optional
|
6 |
+
|
7 |
+
from pip._internal.cli.base_command import Command
|
8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
9 |
+
from pip._internal.configuration import (
|
10 |
+
Configuration,
|
11 |
+
Kind,
|
12 |
+
get_configuration_files,
|
13 |
+
kinds,
|
14 |
+
)
|
15 |
+
from pip._internal.exceptions import PipError
|
16 |
+
from pip._internal.utils.logging import indent_log
|
17 |
+
from pip._internal.utils.misc import get_prog, write_output
|
18 |
+
|
19 |
+
logger = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
|
22 |
+
class ConfigurationCommand(Command):
|
23 |
+
"""
|
24 |
+
Manage local and global configuration.
|
25 |
+
|
26 |
+
Subcommands:
|
27 |
+
|
28 |
+
- list: List the active configuration (or from the file specified)
|
29 |
+
- edit: Edit the configuration file in an editor
|
30 |
+
- get: Get the value associated with name
|
31 |
+
- set: Set the name=value
|
32 |
+
- unset: Unset the value associated with name
|
33 |
+
- debug: List the configuration files and values defined under them
|
34 |
+
|
35 |
+
If none of --user, --global and --site are passed, a virtual
|
36 |
+
environment configuration file is used if one is active and the file
|
37 |
+
exists. Otherwise, all modifications happen to the user file by
|
38 |
+
default.
|
39 |
+
"""
|
40 |
+
|
41 |
+
ignore_require_venv = True
|
42 |
+
usage = """
|
43 |
+
%prog [<file-option>] list
|
44 |
+
%prog [<file-option>] [--editor <editor-path>] edit
|
45 |
+
|
46 |
+
%prog [<file-option>] get name
|
47 |
+
%prog [<file-option>] set name value
|
48 |
+
%prog [<file-option>] unset name
|
49 |
+
%prog [<file-option>] debug
|
50 |
+
"""
|
51 |
+
|
52 |
+
def add_options(self) -> None:
|
53 |
+
self.cmd_opts.add_option(
|
54 |
+
"--editor",
|
55 |
+
dest="editor",
|
56 |
+
action="store",
|
57 |
+
default=None,
|
58 |
+
help=(
|
59 |
+
"Editor to use to edit the file. Uses VISUAL or EDITOR "
|
60 |
+
"environment variables if not provided."
|
61 |
+
),
|
62 |
+
)
|
63 |
+
|
64 |
+
self.cmd_opts.add_option(
|
65 |
+
"--global",
|
66 |
+
dest="global_file",
|
67 |
+
action="store_true",
|
68 |
+
default=False,
|
69 |
+
help="Use the system-wide configuration file only",
|
70 |
+
)
|
71 |
+
|
72 |
+
self.cmd_opts.add_option(
|
73 |
+
"--user",
|
74 |
+
dest="user_file",
|
75 |
+
action="store_true",
|
76 |
+
default=False,
|
77 |
+
help="Use the user configuration file only",
|
78 |
+
)
|
79 |
+
|
80 |
+
self.cmd_opts.add_option(
|
81 |
+
"--site",
|
82 |
+
dest="site_file",
|
83 |
+
action="store_true",
|
84 |
+
default=False,
|
85 |
+
help="Use the current environment configuration file only",
|
86 |
+
)
|
87 |
+
|
88 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
89 |
+
|
90 |
+
def run(self, options: Values, args: List[str]) -> int:
|
91 |
+
handlers = {
|
92 |
+
"list": self.list_values,
|
93 |
+
"edit": self.open_in_editor,
|
94 |
+
"get": self.get_name,
|
95 |
+
"set": self.set_name_value,
|
96 |
+
"unset": self.unset_name,
|
97 |
+
"debug": self.list_config_values,
|
98 |
+
}
|
99 |
+
|
100 |
+
# Determine action
|
101 |
+
if not args or args[0] not in handlers:
|
102 |
+
logger.error(
|
103 |
+
"Need an action (%s) to perform.",
|
104 |
+
", ".join(sorted(handlers)),
|
105 |
+
)
|
106 |
+
return ERROR
|
107 |
+
|
108 |
+
action = args[0]
|
109 |
+
|
110 |
+
# Determine which configuration files are to be loaded
|
111 |
+
# Depends on whether the command is modifying.
|
112 |
+
try:
|
113 |
+
load_only = self._determine_file(
|
114 |
+
options, need_value=(action in ["get", "set", "unset", "edit"])
|
115 |
+
)
|
116 |
+
except PipError as e:
|
117 |
+
logger.error(e.args[0])
|
118 |
+
return ERROR
|
119 |
+
|
120 |
+
# Load a new configuration
|
121 |
+
self.configuration = Configuration(
|
122 |
+
isolated=options.isolated_mode, load_only=load_only
|
123 |
+
)
|
124 |
+
self.configuration.load()
|
125 |
+
|
126 |
+
# Error handling happens here, not in the action-handlers.
|
127 |
+
try:
|
128 |
+
handlers[action](options, args[1:])
|
129 |
+
except PipError as e:
|
130 |
+
logger.error(e.args[0])
|
131 |
+
return ERROR
|
132 |
+
|
133 |
+
return SUCCESS
|
134 |
+
|
135 |
+
def _determine_file(self, options: Values, need_value: bool) -> Optional[Kind]:
|
136 |
+
file_options = [
|
137 |
+
key
|
138 |
+
for key, value in (
|
139 |
+
(kinds.USER, options.user_file),
|
140 |
+
(kinds.GLOBAL, options.global_file),
|
141 |
+
(kinds.SITE, options.site_file),
|
142 |
+
)
|
143 |
+
if value
|
144 |
+
]
|
145 |
+
|
146 |
+
if not file_options:
|
147 |
+
if not need_value:
|
148 |
+
return None
|
149 |
+
# Default to user, unless there's a site file.
|
150 |
+
elif any(
|
151 |
+
os.path.exists(site_config_file)
|
152 |
+
for site_config_file in get_configuration_files()[kinds.SITE]
|
153 |
+
):
|
154 |
+
return kinds.SITE
|
155 |
+
else:
|
156 |
+
return kinds.USER
|
157 |
+
elif len(file_options) == 1:
|
158 |
+
return file_options[0]
|
159 |
+
|
160 |
+
raise PipError(
|
161 |
+
"Need exactly one file to operate upon "
|
162 |
+
"(--user, --site, --global) to perform."
|
163 |
+
)
|
164 |
+
|
165 |
+
def list_values(self, options: Values, args: List[str]) -> None:
|
166 |
+
self._get_n_args(args, "list", n=0)
|
167 |
+
|
168 |
+
for key, value in sorted(self.configuration.items()):
|
169 |
+
write_output("%s=%r", key, value)
|
170 |
+
|
171 |
+
def get_name(self, options: Values, args: List[str]) -> None:
|
172 |
+
key = self._get_n_args(args, "get [name]", n=1)
|
173 |
+
value = self.configuration.get_value(key)
|
174 |
+
|
175 |
+
write_output("%s", value)
|
176 |
+
|
177 |
+
def set_name_value(self, options: Values, args: List[str]) -> None:
|
178 |
+
key, value = self._get_n_args(args, "set [name] [value]", n=2)
|
179 |
+
self.configuration.set_value(key, value)
|
180 |
+
|
181 |
+
self._save_configuration()
|
182 |
+
|
183 |
+
def unset_name(self, options: Values, args: List[str]) -> None:
|
184 |
+
key = self._get_n_args(args, "unset [name]", n=1)
|
185 |
+
self.configuration.unset_value(key)
|
186 |
+
|
187 |
+
self._save_configuration()
|
188 |
+
|
189 |
+
def list_config_values(self, options: Values, args: List[str]) -> None:
|
190 |
+
"""List config key-value pairs across different config files"""
|
191 |
+
self._get_n_args(args, "debug", n=0)
|
192 |
+
|
193 |
+
self.print_env_var_values()
|
194 |
+
# Iterate over config files and print if they exist, and the
|
195 |
+
# key-value pairs present in them if they do
|
196 |
+
for variant, files in sorted(self.configuration.iter_config_files()):
|
197 |
+
write_output("%s:", variant)
|
198 |
+
for fname in files:
|
199 |
+
with indent_log():
|
200 |
+
file_exists = os.path.exists(fname)
|
201 |
+
write_output("%s, exists: %r", fname, file_exists)
|
202 |
+
if file_exists:
|
203 |
+
self.print_config_file_values(variant)
|
204 |
+
|
205 |
+
def print_config_file_values(self, variant: Kind) -> None:
|
206 |
+
"""Get key-value pairs from the file of a variant"""
|
207 |
+
for name, value in self.configuration.get_values_in_config(variant).items():
|
208 |
+
with indent_log():
|
209 |
+
write_output("%s: %s", name, value)
|
210 |
+
|
211 |
+
def print_env_var_values(self) -> None:
|
212 |
+
"""Get key-values pairs present as environment variables"""
|
213 |
+
write_output("%s:", "env_var")
|
214 |
+
with indent_log():
|
215 |
+
for key, value in sorted(self.configuration.get_environ_vars()):
|
216 |
+
env_var = f"PIP_{key.upper()}"
|
217 |
+
write_output("%s=%r", env_var, value)
|
218 |
+
|
219 |
+
def open_in_editor(self, options: Values, args: List[str]) -> None:
|
220 |
+
editor = self._determine_editor(options)
|
221 |
+
|
222 |
+
fname = self.configuration.get_file_to_edit()
|
223 |
+
if fname is None:
|
224 |
+
raise PipError("Could not determine appropriate file.")
|
225 |
+
|
226 |
+
try:
|
227 |
+
subprocess.check_call([editor, fname])
|
228 |
+
except subprocess.CalledProcessError as e:
|
229 |
+
raise PipError(
|
230 |
+
"Editor Subprocess exited with exit code {}".format(e.returncode)
|
231 |
+
)
|
232 |
+
|
233 |
+
def _get_n_args(self, args: List[str], example: str, n: int) -> Any:
|
234 |
+
"""Helper to make sure the command got the right number of arguments"""
|
235 |
+
if len(args) != n:
|
236 |
+
msg = (
|
237 |
+
"Got unexpected number of arguments, expected {}. "
|
238 |
+
'(example: "{} config {}")'
|
239 |
+
).format(n, get_prog(), example)
|
240 |
+
raise PipError(msg)
|
241 |
+
|
242 |
+
if n == 1:
|
243 |
+
return args[0]
|
244 |
+
else:
|
245 |
+
return args
|
246 |
+
|
247 |
+
def _save_configuration(self) -> None:
|
248 |
+
# We successfully ran a modifying command. Need to save the
|
249 |
+
# configuration.
|
250 |
+
try:
|
251 |
+
self.configuration.save()
|
252 |
+
except Exception:
|
253 |
+
logger.exception(
|
254 |
+
"Unable to save configuration. Please report this as a bug."
|
255 |
+
)
|
256 |
+
raise PipError("Internal Error.")
|
257 |
+
|
258 |
+
def _determine_editor(self, options: Values) -> str:
|
259 |
+
if options.editor is not None:
|
260 |
+
return options.editor
|
261 |
+
elif "VISUAL" in os.environ:
|
262 |
+
return os.environ["VISUAL"]
|
263 |
+
elif "EDITOR" in os.environ:
|
264 |
+
return os.environ["EDITOR"]
|
265 |
+
else:
|
266 |
+
raise PipError("Could not determine editor to use.")
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/debug.py
ADDED
@@ -0,0 +1,202 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import locale
|
2 |
+
import logging
|
3 |
+
import os
|
4 |
+
import sys
|
5 |
+
from optparse import Values
|
6 |
+
from types import ModuleType
|
7 |
+
from typing import Any, Dict, List, Optional
|
8 |
+
|
9 |
+
import pip._vendor
|
10 |
+
from pip._vendor.certifi import where
|
11 |
+
from pip._vendor.packaging.version import parse as parse_version
|
12 |
+
|
13 |
+
from pip import __file__ as pip_location
|
14 |
+
from pip._internal.cli import cmdoptions
|
15 |
+
from pip._internal.cli.base_command import Command
|
16 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
17 |
+
from pip._internal.cli.status_codes import SUCCESS
|
18 |
+
from pip._internal.configuration import Configuration
|
19 |
+
from pip._internal.metadata import get_environment
|
20 |
+
from pip._internal.utils.logging import indent_log
|
21 |
+
from pip._internal.utils.misc import get_pip_version
|
22 |
+
|
23 |
+
logger = logging.getLogger(__name__)
|
24 |
+
|
25 |
+
|
26 |
+
def show_value(name: str, value: Any) -> None:
|
27 |
+
logger.info("%s: %s", name, value)
|
28 |
+
|
29 |
+
|
30 |
+
def show_sys_implementation() -> None:
|
31 |
+
logger.info("sys.implementation:")
|
32 |
+
implementation_name = sys.implementation.name
|
33 |
+
with indent_log():
|
34 |
+
show_value("name", implementation_name)
|
35 |
+
|
36 |
+
|
37 |
+
def create_vendor_txt_map() -> Dict[str, str]:
|
38 |
+
vendor_txt_path = os.path.join(
|
39 |
+
os.path.dirname(pip_location), "_vendor", "vendor.txt"
|
40 |
+
)
|
41 |
+
|
42 |
+
with open(vendor_txt_path) as f:
|
43 |
+
# Purge non version specifying lines.
|
44 |
+
# Also, remove any space prefix or suffixes (including comments).
|
45 |
+
lines = [
|
46 |
+
line.strip().split(" ", 1)[0] for line in f.readlines() if "==" in line
|
47 |
+
]
|
48 |
+
|
49 |
+
# Transform into "module" -> version dict.
|
50 |
+
return dict(line.split("==", 1) for line in lines) # type: ignore
|
51 |
+
|
52 |
+
|
53 |
+
def get_module_from_module_name(module_name: str) -> ModuleType:
|
54 |
+
# Module name can be uppercase in vendor.txt for some reason...
|
55 |
+
module_name = module_name.lower()
|
56 |
+
# PATCH: setuptools is actually only pkg_resources.
|
57 |
+
if module_name == "setuptools":
|
58 |
+
module_name = "pkg_resources"
|
59 |
+
|
60 |
+
__import__(f"pip._vendor.{module_name}", globals(), locals(), level=0)
|
61 |
+
return getattr(pip._vendor, module_name)
|
62 |
+
|
63 |
+
|
64 |
+
def get_vendor_version_from_module(module_name: str) -> Optional[str]:
|
65 |
+
module = get_module_from_module_name(module_name)
|
66 |
+
version = getattr(module, "__version__", None)
|
67 |
+
|
68 |
+
if not version:
|
69 |
+
# Try to find version in debundled module info.
|
70 |
+
env = get_environment([os.path.dirname(module.__file__)])
|
71 |
+
dist = env.get_distribution(module_name)
|
72 |
+
if dist:
|
73 |
+
version = str(dist.version)
|
74 |
+
|
75 |
+
return version
|
76 |
+
|
77 |
+
|
78 |
+
def show_actual_vendor_versions(vendor_txt_versions: Dict[str, str]) -> None:
|
79 |
+
"""Log the actual version and print extra info if there is
|
80 |
+
a conflict or if the actual version could not be imported.
|
81 |
+
"""
|
82 |
+
for module_name, expected_version in vendor_txt_versions.items():
|
83 |
+
extra_message = ""
|
84 |
+
actual_version = get_vendor_version_from_module(module_name)
|
85 |
+
if not actual_version:
|
86 |
+
extra_message = (
|
87 |
+
" (Unable to locate actual module version, using"
|
88 |
+
" vendor.txt specified version)"
|
89 |
+
)
|
90 |
+
actual_version = expected_version
|
91 |
+
elif parse_version(actual_version) != parse_version(expected_version):
|
92 |
+
extra_message = (
|
93 |
+
" (CONFLICT: vendor.txt suggests version should"
|
94 |
+
" be {})".format(expected_version)
|
95 |
+
)
|
96 |
+
logger.info("%s==%s%s", module_name, actual_version, extra_message)
|
97 |
+
|
98 |
+
|
99 |
+
def show_vendor_versions() -> None:
|
100 |
+
logger.info("vendored library versions:")
|
101 |
+
|
102 |
+
vendor_txt_versions = create_vendor_txt_map()
|
103 |
+
with indent_log():
|
104 |
+
show_actual_vendor_versions(vendor_txt_versions)
|
105 |
+
|
106 |
+
|
107 |
+
def show_tags(options: Values) -> None:
|
108 |
+
tag_limit = 10
|
109 |
+
|
110 |
+
target_python = make_target_python(options)
|
111 |
+
tags = target_python.get_tags()
|
112 |
+
|
113 |
+
# Display the target options that were explicitly provided.
|
114 |
+
formatted_target = target_python.format_given()
|
115 |
+
suffix = ""
|
116 |
+
if formatted_target:
|
117 |
+
suffix = f" (target: {formatted_target})"
|
118 |
+
|
119 |
+
msg = "Compatible tags: {}{}".format(len(tags), suffix)
|
120 |
+
logger.info(msg)
|
121 |
+
|
122 |
+
if options.verbose < 1 and len(tags) > tag_limit:
|
123 |
+
tags_limited = True
|
124 |
+
tags = tags[:tag_limit]
|
125 |
+
else:
|
126 |
+
tags_limited = False
|
127 |
+
|
128 |
+
with indent_log():
|
129 |
+
for tag in tags:
|
130 |
+
logger.info(str(tag))
|
131 |
+
|
132 |
+
if tags_limited:
|
133 |
+
msg = (
|
134 |
+
"...\n[First {tag_limit} tags shown. Pass --verbose to show all.]"
|
135 |
+
).format(tag_limit=tag_limit)
|
136 |
+
logger.info(msg)
|
137 |
+
|
138 |
+
|
139 |
+
def ca_bundle_info(config: Configuration) -> str:
|
140 |
+
levels = set()
|
141 |
+
for key, _ in config.items():
|
142 |
+
levels.add(key.split(".")[0])
|
143 |
+
|
144 |
+
if not levels:
|
145 |
+
return "Not specified"
|
146 |
+
|
147 |
+
levels_that_override_global = ["install", "wheel", "download"]
|
148 |
+
global_overriding_level = [
|
149 |
+
level for level in levels if level in levels_that_override_global
|
150 |
+
]
|
151 |
+
if not global_overriding_level:
|
152 |
+
return "global"
|
153 |
+
|
154 |
+
if "global" in levels:
|
155 |
+
levels.remove("global")
|
156 |
+
return ", ".join(levels)
|
157 |
+
|
158 |
+
|
159 |
+
class DebugCommand(Command):
|
160 |
+
"""
|
161 |
+
Display debug information.
|
162 |
+
"""
|
163 |
+
|
164 |
+
usage = """
|
165 |
+
%prog <options>"""
|
166 |
+
ignore_require_venv = True
|
167 |
+
|
168 |
+
def add_options(self) -> None:
|
169 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
170 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
171 |
+
self.parser.config.load()
|
172 |
+
|
173 |
+
def run(self, options: Values, args: List[str]) -> int:
|
174 |
+
logger.warning(
|
175 |
+
"This command is only meant for debugging. "
|
176 |
+
"Do not use this with automation for parsing and getting these "
|
177 |
+
"details, since the output and options of this command may "
|
178 |
+
"change without notice."
|
179 |
+
)
|
180 |
+
show_value("pip version", get_pip_version())
|
181 |
+
show_value("sys.version", sys.version)
|
182 |
+
show_value("sys.executable", sys.executable)
|
183 |
+
show_value("sys.getdefaultencoding", sys.getdefaultencoding())
|
184 |
+
show_value("sys.getfilesystemencoding", sys.getfilesystemencoding())
|
185 |
+
show_value(
|
186 |
+
"locale.getpreferredencoding",
|
187 |
+
locale.getpreferredencoding(),
|
188 |
+
)
|
189 |
+
show_value("sys.platform", sys.platform)
|
190 |
+
show_sys_implementation()
|
191 |
+
|
192 |
+
show_value("'cert' config value", ca_bundle_info(self.parser.config))
|
193 |
+
show_value("REQUESTS_CA_BUNDLE", os.environ.get("REQUESTS_CA_BUNDLE"))
|
194 |
+
show_value("CURL_CA_BUNDLE", os.environ.get("CURL_CA_BUNDLE"))
|
195 |
+
show_value("pip._vendor.certifi.where()", where())
|
196 |
+
show_value("pip._vendor.DEBUNDLED", pip._vendor.DEBUNDLED)
|
197 |
+
|
198 |
+
show_vendor_versions()
|
199 |
+
|
200 |
+
show_tags(options)
|
201 |
+
|
202 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/download.py
ADDED
@@ -0,0 +1,140 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
from optparse import Values
|
4 |
+
from typing import List
|
5 |
+
|
6 |
+
from pip._internal.cli import cmdoptions
|
7 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
8 |
+
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
9 |
+
from pip._internal.cli.status_codes import SUCCESS
|
10 |
+
from pip._internal.req.req_tracker import get_requirement_tracker
|
11 |
+
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
|
12 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
13 |
+
|
14 |
+
logger = logging.getLogger(__name__)
|
15 |
+
|
16 |
+
|
17 |
+
class DownloadCommand(RequirementCommand):
|
18 |
+
"""
|
19 |
+
Download packages from:
|
20 |
+
|
21 |
+
- PyPI (and other indexes) using requirement specifiers.
|
22 |
+
- VCS project urls.
|
23 |
+
- Local project directories.
|
24 |
+
- Local or remote source archives.
|
25 |
+
|
26 |
+
pip also supports downloading from "requirements files", which provide
|
27 |
+
an easy way to specify a whole environment to be downloaded.
|
28 |
+
"""
|
29 |
+
|
30 |
+
usage = """
|
31 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
32 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
33 |
+
%prog [options] <vcs project url> ...
|
34 |
+
%prog [options] <local project path> ...
|
35 |
+
%prog [options] <archive url/path> ..."""
|
36 |
+
|
37 |
+
def add_options(self) -> None:
|
38 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
39 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
40 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
41 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
42 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
43 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
44 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
45 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
46 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
47 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
48 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
49 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
50 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
51 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
52 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
53 |
+
|
54 |
+
self.cmd_opts.add_option(
|
55 |
+
"-d",
|
56 |
+
"--dest",
|
57 |
+
"--destination-dir",
|
58 |
+
"--destination-directory",
|
59 |
+
dest="download_dir",
|
60 |
+
metavar="dir",
|
61 |
+
default=os.curdir,
|
62 |
+
help="Download packages into <dir>.",
|
63 |
+
)
|
64 |
+
|
65 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
66 |
+
|
67 |
+
index_opts = cmdoptions.make_option_group(
|
68 |
+
cmdoptions.index_group,
|
69 |
+
self.parser,
|
70 |
+
)
|
71 |
+
|
72 |
+
self.parser.insert_option_group(0, index_opts)
|
73 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
74 |
+
|
75 |
+
@with_cleanup
|
76 |
+
def run(self, options: Values, args: List[str]) -> int:
|
77 |
+
|
78 |
+
options.ignore_installed = True
|
79 |
+
# editable doesn't really make sense for `pip download`, but the bowels
|
80 |
+
# of the RequirementSet code require that property.
|
81 |
+
options.editables = []
|
82 |
+
|
83 |
+
cmdoptions.check_dist_restriction(options)
|
84 |
+
|
85 |
+
options.download_dir = normalize_path(options.download_dir)
|
86 |
+
ensure_dir(options.download_dir)
|
87 |
+
|
88 |
+
session = self.get_default_session(options)
|
89 |
+
|
90 |
+
target_python = make_target_python(options)
|
91 |
+
finder = self._build_package_finder(
|
92 |
+
options=options,
|
93 |
+
session=session,
|
94 |
+
target_python=target_python,
|
95 |
+
ignore_requires_python=options.ignore_requires_python,
|
96 |
+
)
|
97 |
+
|
98 |
+
req_tracker = self.enter_context(get_requirement_tracker())
|
99 |
+
|
100 |
+
directory = TempDirectory(
|
101 |
+
delete=not options.no_clean,
|
102 |
+
kind="download",
|
103 |
+
globally_managed=True,
|
104 |
+
)
|
105 |
+
|
106 |
+
reqs = self.get_requirements(args, options, finder, session)
|
107 |
+
|
108 |
+
preparer = self.make_requirement_preparer(
|
109 |
+
temp_build_dir=directory,
|
110 |
+
options=options,
|
111 |
+
req_tracker=req_tracker,
|
112 |
+
session=session,
|
113 |
+
finder=finder,
|
114 |
+
download_dir=options.download_dir,
|
115 |
+
use_user_site=False,
|
116 |
+
verbosity=self.verbosity,
|
117 |
+
)
|
118 |
+
|
119 |
+
resolver = self.make_resolver(
|
120 |
+
preparer=preparer,
|
121 |
+
finder=finder,
|
122 |
+
options=options,
|
123 |
+
ignore_requires_python=options.ignore_requires_python,
|
124 |
+
py_version_info=options.python_version,
|
125 |
+
)
|
126 |
+
|
127 |
+
self.trace_basic_info(finder)
|
128 |
+
|
129 |
+
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
130 |
+
|
131 |
+
downloaded: List[str] = []
|
132 |
+
for req in requirement_set.requirements.values():
|
133 |
+
if req.satisfied_by is None:
|
134 |
+
assert req.name is not None
|
135 |
+
preparer.save_linked_requirement(req)
|
136 |
+
downloaded.append(req.name)
|
137 |
+
if downloaded:
|
138 |
+
write_output("Successfully downloaded %s", " ".join(downloaded))
|
139 |
+
|
140 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/freeze.py
ADDED
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
from optparse import Values
|
3 |
+
from typing import List
|
4 |
+
|
5 |
+
from pip._internal.cli import cmdoptions
|
6 |
+
from pip._internal.cli.base_command import Command
|
7 |
+
from pip._internal.cli.status_codes import SUCCESS
|
8 |
+
from pip._internal.operations.freeze import freeze
|
9 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
10 |
+
|
11 |
+
DEV_PKGS = {"pip", "setuptools", "distribute", "wheel", "pkg-resources"}
|
12 |
+
|
13 |
+
|
14 |
+
class FreezeCommand(Command):
|
15 |
+
"""
|
16 |
+
Output installed packages in requirements format.
|
17 |
+
|
18 |
+
packages are listed in a case-insensitive sorted order.
|
19 |
+
"""
|
20 |
+
|
21 |
+
usage = """
|
22 |
+
%prog [options]"""
|
23 |
+
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
|
24 |
+
|
25 |
+
def add_options(self) -> None:
|
26 |
+
self.cmd_opts.add_option(
|
27 |
+
"-r",
|
28 |
+
"--requirement",
|
29 |
+
dest="requirements",
|
30 |
+
action="append",
|
31 |
+
default=[],
|
32 |
+
metavar="file",
|
33 |
+
help=(
|
34 |
+
"Use the order in the given requirements file and its "
|
35 |
+
"comments when generating output. This option can be "
|
36 |
+
"used multiple times."
|
37 |
+
),
|
38 |
+
)
|
39 |
+
self.cmd_opts.add_option(
|
40 |
+
"-l",
|
41 |
+
"--local",
|
42 |
+
dest="local",
|
43 |
+
action="store_true",
|
44 |
+
default=False,
|
45 |
+
help=(
|
46 |
+
"If in a virtualenv that has global access, do not output "
|
47 |
+
"globally-installed packages."
|
48 |
+
),
|
49 |
+
)
|
50 |
+
self.cmd_opts.add_option(
|
51 |
+
"--user",
|
52 |
+
dest="user",
|
53 |
+
action="store_true",
|
54 |
+
default=False,
|
55 |
+
help="Only output packages installed in user-site.",
|
56 |
+
)
|
57 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
58 |
+
self.cmd_opts.add_option(
|
59 |
+
"--all",
|
60 |
+
dest="freeze_all",
|
61 |
+
action="store_true",
|
62 |
+
help=(
|
63 |
+
"Do not skip these packages in the output:"
|
64 |
+
" {}".format(", ".join(DEV_PKGS))
|
65 |
+
),
|
66 |
+
)
|
67 |
+
self.cmd_opts.add_option(
|
68 |
+
"--exclude-editable",
|
69 |
+
dest="exclude_editable",
|
70 |
+
action="store_true",
|
71 |
+
help="Exclude editable package from output.",
|
72 |
+
)
|
73 |
+
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
74 |
+
|
75 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
76 |
+
|
77 |
+
def run(self, options: Values, args: List[str]) -> int:
|
78 |
+
skip = set(stdlib_pkgs)
|
79 |
+
if not options.freeze_all:
|
80 |
+
skip.update(DEV_PKGS)
|
81 |
+
|
82 |
+
if options.excludes:
|
83 |
+
skip.update(options.excludes)
|
84 |
+
|
85 |
+
cmdoptions.check_list_path_option(options)
|
86 |
+
|
87 |
+
for line in freeze(
|
88 |
+
requirement=options.requirements,
|
89 |
+
local_only=options.local,
|
90 |
+
user_only=options.user,
|
91 |
+
paths=options.path,
|
92 |
+
isolated=options.isolated_mode,
|
93 |
+
skip=skip,
|
94 |
+
exclude_editable=options.exclude_editable,
|
95 |
+
):
|
96 |
+
sys.stdout.write(line + "\n")
|
97 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/hash.py
ADDED
@@ -0,0 +1,59 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import hashlib
|
2 |
+
import logging
|
3 |
+
import sys
|
4 |
+
from optparse import Values
|
5 |
+
from typing import List
|
6 |
+
|
7 |
+
from pip._internal.cli.base_command import Command
|
8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
9 |
+
from pip._internal.utils.hashes import FAVORITE_HASH, STRONG_HASHES
|
10 |
+
from pip._internal.utils.misc import read_chunks, write_output
|
11 |
+
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
|
14 |
+
|
15 |
+
class HashCommand(Command):
|
16 |
+
"""
|
17 |
+
Compute a hash of a local package archive.
|
18 |
+
|
19 |
+
These can be used with --hash in a requirements file to do repeatable
|
20 |
+
installs.
|
21 |
+
"""
|
22 |
+
|
23 |
+
usage = "%prog [options] <file> ..."
|
24 |
+
ignore_require_venv = True
|
25 |
+
|
26 |
+
def add_options(self) -> None:
|
27 |
+
self.cmd_opts.add_option(
|
28 |
+
"-a",
|
29 |
+
"--algorithm",
|
30 |
+
dest="algorithm",
|
31 |
+
choices=STRONG_HASHES,
|
32 |
+
action="store",
|
33 |
+
default=FAVORITE_HASH,
|
34 |
+
help="The hash algorithm to use: one of {}".format(
|
35 |
+
", ".join(STRONG_HASHES)
|
36 |
+
),
|
37 |
+
)
|
38 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
39 |
+
|
40 |
+
def run(self, options: Values, args: List[str]) -> int:
|
41 |
+
if not args:
|
42 |
+
self.parser.print_usage(sys.stderr)
|
43 |
+
return ERROR
|
44 |
+
|
45 |
+
algorithm = options.algorithm
|
46 |
+
for path in args:
|
47 |
+
write_output(
|
48 |
+
"%s:\n--hash=%s:%s", path, algorithm, _hash_of_file(path, algorithm)
|
49 |
+
)
|
50 |
+
return SUCCESS
|
51 |
+
|
52 |
+
|
53 |
+
def _hash_of_file(path: str, algorithm: str) -> str:
|
54 |
+
"""Return the hash digest of a file."""
|
55 |
+
with open(path, "rb") as archive:
|
56 |
+
hash = hashlib.new(algorithm)
|
57 |
+
for chunk in read_chunks(archive):
|
58 |
+
hash.update(chunk)
|
59 |
+
return hash.hexdigest()
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/help.py
ADDED
@@ -0,0 +1,41 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from optparse import Values
|
2 |
+
from typing import List
|
3 |
+
|
4 |
+
from pip._internal.cli.base_command import Command
|
5 |
+
from pip._internal.cli.status_codes import SUCCESS
|
6 |
+
from pip._internal.exceptions import CommandError
|
7 |
+
|
8 |
+
|
9 |
+
class HelpCommand(Command):
|
10 |
+
"""Show help for commands"""
|
11 |
+
|
12 |
+
usage = """
|
13 |
+
%prog <command>"""
|
14 |
+
ignore_require_venv = True
|
15 |
+
|
16 |
+
def run(self, options: Values, args: List[str]) -> int:
|
17 |
+
from pip._internal.commands import (
|
18 |
+
commands_dict,
|
19 |
+
create_command,
|
20 |
+
get_similar_commands,
|
21 |
+
)
|
22 |
+
|
23 |
+
try:
|
24 |
+
# 'pip help' with no args is handled by pip.__init__.parseopt()
|
25 |
+
cmd_name = args[0] # the command we need help for
|
26 |
+
except IndexError:
|
27 |
+
return SUCCESS
|
28 |
+
|
29 |
+
if cmd_name not in commands_dict:
|
30 |
+
guess = get_similar_commands(cmd_name)
|
31 |
+
|
32 |
+
msg = [f'unknown command "{cmd_name}"']
|
33 |
+
if guess:
|
34 |
+
msg.append(f'maybe you meant "{guess}"')
|
35 |
+
|
36 |
+
raise CommandError(" - ".join(msg))
|
37 |
+
|
38 |
+
command = create_command(cmd_name)
|
39 |
+
command.parser.print_help()
|
40 |
+
|
41 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/index.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from optparse import Values
|
3 |
+
from typing import Any, Iterable, List, Optional, Union
|
4 |
+
|
5 |
+
from pip._vendor.packaging.version import LegacyVersion, Version
|
6 |
+
|
7 |
+
from pip._internal.cli import cmdoptions
|
8 |
+
from pip._internal.cli.req_command import IndexGroupCommand
|
9 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
10 |
+
from pip._internal.commands.search import print_dist_installation_info
|
11 |
+
from pip._internal.exceptions import CommandError, DistributionNotFound, PipError
|
12 |
+
from pip._internal.index.collector import LinkCollector
|
13 |
+
from pip._internal.index.package_finder import PackageFinder
|
14 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
15 |
+
from pip._internal.models.target_python import TargetPython
|
16 |
+
from pip._internal.network.session import PipSession
|
17 |
+
from pip._internal.utils.misc import write_output
|
18 |
+
|
19 |
+
logger = logging.getLogger(__name__)
|
20 |
+
|
21 |
+
|
22 |
+
class IndexCommand(IndexGroupCommand):
|
23 |
+
"""
|
24 |
+
Inspect information available from package indexes.
|
25 |
+
"""
|
26 |
+
|
27 |
+
usage = """
|
28 |
+
%prog versions <package>
|
29 |
+
"""
|
30 |
+
|
31 |
+
def add_options(self) -> None:
|
32 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
33 |
+
|
34 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
35 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
36 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
37 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
38 |
+
|
39 |
+
index_opts = cmdoptions.make_option_group(
|
40 |
+
cmdoptions.index_group,
|
41 |
+
self.parser,
|
42 |
+
)
|
43 |
+
|
44 |
+
self.parser.insert_option_group(0, index_opts)
|
45 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
46 |
+
|
47 |
+
def run(self, options: Values, args: List[str]) -> int:
|
48 |
+
handlers = {
|
49 |
+
"versions": self.get_available_package_versions,
|
50 |
+
}
|
51 |
+
|
52 |
+
logger.warning(
|
53 |
+
"pip index is currently an experimental command. "
|
54 |
+
"It may be removed/changed in a future release "
|
55 |
+
"without prior warning."
|
56 |
+
)
|
57 |
+
|
58 |
+
# Determine action
|
59 |
+
if not args or args[0] not in handlers:
|
60 |
+
logger.error(
|
61 |
+
"Need an action (%s) to perform.",
|
62 |
+
", ".join(sorted(handlers)),
|
63 |
+
)
|
64 |
+
return ERROR
|
65 |
+
|
66 |
+
action = args[0]
|
67 |
+
|
68 |
+
# Error handling happens here, not in the action-handlers.
|
69 |
+
try:
|
70 |
+
handlers[action](options, args[1:])
|
71 |
+
except PipError as e:
|
72 |
+
logger.error(e.args[0])
|
73 |
+
return ERROR
|
74 |
+
|
75 |
+
return SUCCESS
|
76 |
+
|
77 |
+
def _build_package_finder(
|
78 |
+
self,
|
79 |
+
options: Values,
|
80 |
+
session: PipSession,
|
81 |
+
target_python: Optional[TargetPython] = None,
|
82 |
+
ignore_requires_python: Optional[bool] = None,
|
83 |
+
) -> PackageFinder:
|
84 |
+
"""
|
85 |
+
Create a package finder appropriate to the index command.
|
86 |
+
"""
|
87 |
+
link_collector = LinkCollector.create(session, options=options)
|
88 |
+
|
89 |
+
# Pass allow_yanked=False to ignore yanked versions.
|
90 |
+
selection_prefs = SelectionPreferences(
|
91 |
+
allow_yanked=False,
|
92 |
+
allow_all_prereleases=options.pre,
|
93 |
+
ignore_requires_python=ignore_requires_python,
|
94 |
+
)
|
95 |
+
|
96 |
+
return PackageFinder.create(
|
97 |
+
link_collector=link_collector,
|
98 |
+
selection_prefs=selection_prefs,
|
99 |
+
target_python=target_python,
|
100 |
+
use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
|
101 |
+
)
|
102 |
+
|
103 |
+
def get_available_package_versions(self, options: Values, args: List[Any]) -> None:
|
104 |
+
if len(args) != 1:
|
105 |
+
raise CommandError("You need to specify exactly one argument")
|
106 |
+
|
107 |
+
target_python = cmdoptions.make_target_python(options)
|
108 |
+
query = args[0]
|
109 |
+
|
110 |
+
with self._build_session(options) as session:
|
111 |
+
finder = self._build_package_finder(
|
112 |
+
options=options,
|
113 |
+
session=session,
|
114 |
+
target_python=target_python,
|
115 |
+
ignore_requires_python=options.ignore_requires_python,
|
116 |
+
)
|
117 |
+
|
118 |
+
versions: Iterable[Union[LegacyVersion, Version]] = (
|
119 |
+
candidate.version for candidate in finder.find_all_candidates(query)
|
120 |
+
)
|
121 |
+
|
122 |
+
if not options.pre:
|
123 |
+
# Remove prereleases
|
124 |
+
versions = (
|
125 |
+
version for version in versions if not version.is_prerelease
|
126 |
+
)
|
127 |
+
versions = set(versions)
|
128 |
+
|
129 |
+
if not versions:
|
130 |
+
raise DistributionNotFound(
|
131 |
+
"No matching distribution found for {}".format(query)
|
132 |
+
)
|
133 |
+
|
134 |
+
formatted_versions = [str(ver) for ver in sorted(versions, reverse=True)]
|
135 |
+
latest = formatted_versions[0]
|
136 |
+
|
137 |
+
write_output("{} ({})".format(query, latest))
|
138 |
+
write_output("Available versions: {}".format(", ".join(formatted_versions)))
|
139 |
+
print_dist_installation_info(query, latest)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/install.py
ADDED
@@ -0,0 +1,771 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import errno
|
2 |
+
import operator
|
3 |
+
import os
|
4 |
+
import shutil
|
5 |
+
import site
|
6 |
+
from optparse import SUPPRESS_HELP, Values
|
7 |
+
from typing import Iterable, List, Optional
|
8 |
+
|
9 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
10 |
+
|
11 |
+
from pip._internal.cache import WheelCache
|
12 |
+
from pip._internal.cli import cmdoptions
|
13 |
+
from pip._internal.cli.cmdoptions import make_target_python
|
14 |
+
from pip._internal.cli.req_command import (
|
15 |
+
RequirementCommand,
|
16 |
+
warn_if_run_as_root,
|
17 |
+
with_cleanup,
|
18 |
+
)
|
19 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
20 |
+
from pip._internal.exceptions import CommandError, InstallationError
|
21 |
+
from pip._internal.locations import get_scheme
|
22 |
+
from pip._internal.metadata import get_environment
|
23 |
+
from pip._internal.models.format_control import FormatControl
|
24 |
+
from pip._internal.operations.check import ConflictDetails, check_install_conflicts
|
25 |
+
from pip._internal.req import install_given_reqs
|
26 |
+
from pip._internal.req.req_install import InstallRequirement
|
27 |
+
from pip._internal.req.req_tracker import get_requirement_tracker
|
28 |
+
from pip._internal.utils.compat import WINDOWS
|
29 |
+
from pip._internal.utils.distutils_args import parse_distutils_args
|
30 |
+
from pip._internal.utils.filesystem import test_writable_dir
|
31 |
+
from pip._internal.utils.logging import getLogger
|
32 |
+
from pip._internal.utils.misc import (
|
33 |
+
ensure_dir,
|
34 |
+
get_pip_version,
|
35 |
+
protect_pip_from_modification_on_windows,
|
36 |
+
write_output,
|
37 |
+
)
|
38 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
39 |
+
from pip._internal.utils.virtualenv import (
|
40 |
+
running_under_virtualenv,
|
41 |
+
virtualenv_no_global,
|
42 |
+
)
|
43 |
+
from pip._internal.wheel_builder import (
|
44 |
+
BinaryAllowedPredicate,
|
45 |
+
build,
|
46 |
+
should_build_for_install_command,
|
47 |
+
)
|
48 |
+
|
49 |
+
logger = getLogger(__name__)
|
50 |
+
|
51 |
+
|
52 |
+
def get_check_binary_allowed(format_control: FormatControl) -> BinaryAllowedPredicate:
|
53 |
+
def check_binary_allowed(req: InstallRequirement) -> bool:
|
54 |
+
canonical_name = canonicalize_name(req.name or "")
|
55 |
+
allowed_formats = format_control.get_allowed_formats(canonical_name)
|
56 |
+
return "binary" in allowed_formats
|
57 |
+
|
58 |
+
return check_binary_allowed
|
59 |
+
|
60 |
+
|
61 |
+
class InstallCommand(RequirementCommand):
|
62 |
+
"""
|
63 |
+
Install packages from:
|
64 |
+
|
65 |
+
- PyPI (and other indexes) using requirement specifiers.
|
66 |
+
- VCS project urls.
|
67 |
+
- Local project directories.
|
68 |
+
- Local or remote source archives.
|
69 |
+
|
70 |
+
pip also supports installing from "requirements files", which provide
|
71 |
+
an easy way to specify a whole environment to be installed.
|
72 |
+
"""
|
73 |
+
|
74 |
+
usage = """
|
75 |
+
%prog [options] <requirement specifier> [package-index-options] ...
|
76 |
+
%prog [options] -r <requirements file> [package-index-options] ...
|
77 |
+
%prog [options] [-e] <vcs project url> ...
|
78 |
+
%prog [options] [-e] <local project path> ...
|
79 |
+
%prog [options] <archive url/path> ..."""
|
80 |
+
|
81 |
+
def add_options(self) -> None:
|
82 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
83 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
84 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
85 |
+
self.cmd_opts.add_option(cmdoptions.pre())
|
86 |
+
|
87 |
+
self.cmd_opts.add_option(cmdoptions.editable())
|
88 |
+
self.cmd_opts.add_option(
|
89 |
+
"-t",
|
90 |
+
"--target",
|
91 |
+
dest="target_dir",
|
92 |
+
metavar="dir",
|
93 |
+
default=None,
|
94 |
+
help=(
|
95 |
+
"Install packages into <dir>. "
|
96 |
+
"By default this will not replace existing files/folders in "
|
97 |
+
"<dir>. Use --upgrade to replace existing packages in <dir> "
|
98 |
+
"with new versions."
|
99 |
+
),
|
100 |
+
)
|
101 |
+
cmdoptions.add_target_python_options(self.cmd_opts)
|
102 |
+
|
103 |
+
self.cmd_opts.add_option(
|
104 |
+
"--user",
|
105 |
+
dest="use_user_site",
|
106 |
+
action="store_true",
|
107 |
+
help=(
|
108 |
+
"Install to the Python user install directory for your "
|
109 |
+
"platform. Typically ~/.local/, or %APPDATA%\\Python on "
|
110 |
+
"Windows. (See the Python documentation for site.USER_BASE "
|
111 |
+
"for full details.)"
|
112 |
+
),
|
113 |
+
)
|
114 |
+
self.cmd_opts.add_option(
|
115 |
+
"--no-user",
|
116 |
+
dest="use_user_site",
|
117 |
+
action="store_false",
|
118 |
+
help=SUPPRESS_HELP,
|
119 |
+
)
|
120 |
+
self.cmd_opts.add_option(
|
121 |
+
"--root",
|
122 |
+
dest="root_path",
|
123 |
+
metavar="dir",
|
124 |
+
default=None,
|
125 |
+
help="Install everything relative to this alternate root directory.",
|
126 |
+
)
|
127 |
+
self.cmd_opts.add_option(
|
128 |
+
"--prefix",
|
129 |
+
dest="prefix_path",
|
130 |
+
metavar="dir",
|
131 |
+
default=None,
|
132 |
+
help=(
|
133 |
+
"Installation prefix where lib, bin and other top-level "
|
134 |
+
"folders are placed"
|
135 |
+
),
|
136 |
+
)
|
137 |
+
|
138 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
139 |
+
|
140 |
+
self.cmd_opts.add_option(
|
141 |
+
"-U",
|
142 |
+
"--upgrade",
|
143 |
+
dest="upgrade",
|
144 |
+
action="store_true",
|
145 |
+
help=(
|
146 |
+
"Upgrade all specified packages to the newest available "
|
147 |
+
"version. The handling of dependencies depends on the "
|
148 |
+
"upgrade-strategy used."
|
149 |
+
),
|
150 |
+
)
|
151 |
+
|
152 |
+
self.cmd_opts.add_option(
|
153 |
+
"--upgrade-strategy",
|
154 |
+
dest="upgrade_strategy",
|
155 |
+
default="only-if-needed",
|
156 |
+
choices=["only-if-needed", "eager"],
|
157 |
+
help=(
|
158 |
+
"Determines how dependency upgrading should be handled "
|
159 |
+
"[default: %default]. "
|
160 |
+
'"eager" - dependencies are upgraded regardless of '
|
161 |
+
"whether the currently installed version satisfies the "
|
162 |
+
"requirements of the upgraded package(s). "
|
163 |
+
'"only-if-needed" - are upgraded only when they do not '
|
164 |
+
"satisfy the requirements of the upgraded package(s)."
|
165 |
+
),
|
166 |
+
)
|
167 |
+
|
168 |
+
self.cmd_opts.add_option(
|
169 |
+
"--force-reinstall",
|
170 |
+
dest="force_reinstall",
|
171 |
+
action="store_true",
|
172 |
+
help="Reinstall all packages even if they are already up-to-date.",
|
173 |
+
)
|
174 |
+
|
175 |
+
self.cmd_opts.add_option(
|
176 |
+
"-I",
|
177 |
+
"--ignore-installed",
|
178 |
+
dest="ignore_installed",
|
179 |
+
action="store_true",
|
180 |
+
help=(
|
181 |
+
"Ignore the installed packages, overwriting them. "
|
182 |
+
"This can break your system if the existing package "
|
183 |
+
"is of a different version or was installed "
|
184 |
+
"with a different package manager!"
|
185 |
+
),
|
186 |
+
)
|
187 |
+
|
188 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
189 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
190 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
191 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
192 |
+
|
193 |
+
self.cmd_opts.add_option(cmdoptions.install_options())
|
194 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
195 |
+
|
196 |
+
self.cmd_opts.add_option(
|
197 |
+
"--compile",
|
198 |
+
action="store_true",
|
199 |
+
dest="compile",
|
200 |
+
default=True,
|
201 |
+
help="Compile Python source files to bytecode",
|
202 |
+
)
|
203 |
+
|
204 |
+
self.cmd_opts.add_option(
|
205 |
+
"--no-compile",
|
206 |
+
action="store_false",
|
207 |
+
dest="compile",
|
208 |
+
help="Do not compile Python source files to bytecode",
|
209 |
+
)
|
210 |
+
|
211 |
+
self.cmd_opts.add_option(
|
212 |
+
"--no-warn-script-location",
|
213 |
+
action="store_false",
|
214 |
+
dest="warn_script_location",
|
215 |
+
default=True,
|
216 |
+
help="Do not warn when installing scripts outside PATH",
|
217 |
+
)
|
218 |
+
self.cmd_opts.add_option(
|
219 |
+
"--no-warn-conflicts",
|
220 |
+
action="store_false",
|
221 |
+
dest="warn_about_conflicts",
|
222 |
+
default=True,
|
223 |
+
help="Do not warn about broken dependencies",
|
224 |
+
)
|
225 |
+
|
226 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
227 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
228 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
229 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
230 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
231 |
+
|
232 |
+
index_opts = cmdoptions.make_option_group(
|
233 |
+
cmdoptions.index_group,
|
234 |
+
self.parser,
|
235 |
+
)
|
236 |
+
|
237 |
+
self.parser.insert_option_group(0, index_opts)
|
238 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
239 |
+
|
240 |
+
@with_cleanup
|
241 |
+
def run(self, options: Values, args: List[str]) -> int:
|
242 |
+
if options.use_user_site and options.target_dir is not None:
|
243 |
+
raise CommandError("Can not combine '--user' and '--target'")
|
244 |
+
|
245 |
+
cmdoptions.check_install_build_global(options)
|
246 |
+
upgrade_strategy = "to-satisfy-only"
|
247 |
+
if options.upgrade:
|
248 |
+
upgrade_strategy = options.upgrade_strategy
|
249 |
+
|
250 |
+
cmdoptions.check_dist_restriction(options, check_target=True)
|
251 |
+
|
252 |
+
install_options = options.install_options or []
|
253 |
+
|
254 |
+
logger.verbose("Using %s", get_pip_version())
|
255 |
+
options.use_user_site = decide_user_install(
|
256 |
+
options.use_user_site,
|
257 |
+
prefix_path=options.prefix_path,
|
258 |
+
target_dir=options.target_dir,
|
259 |
+
root_path=options.root_path,
|
260 |
+
isolated_mode=options.isolated_mode,
|
261 |
+
)
|
262 |
+
|
263 |
+
target_temp_dir: Optional[TempDirectory] = None
|
264 |
+
target_temp_dir_path: Optional[str] = None
|
265 |
+
if options.target_dir:
|
266 |
+
options.ignore_installed = True
|
267 |
+
options.target_dir = os.path.abspath(options.target_dir)
|
268 |
+
if (
|
269 |
+
# fmt: off
|
270 |
+
os.path.exists(options.target_dir) and
|
271 |
+
not os.path.isdir(options.target_dir)
|
272 |
+
# fmt: on
|
273 |
+
):
|
274 |
+
raise CommandError(
|
275 |
+
"Target path exists but is not a directory, will not continue."
|
276 |
+
)
|
277 |
+
|
278 |
+
# Create a target directory for using with the target option
|
279 |
+
target_temp_dir = TempDirectory(kind="target")
|
280 |
+
target_temp_dir_path = target_temp_dir.path
|
281 |
+
self.enter_context(target_temp_dir)
|
282 |
+
|
283 |
+
global_options = options.global_options or []
|
284 |
+
|
285 |
+
session = self.get_default_session(options)
|
286 |
+
|
287 |
+
target_python = make_target_python(options)
|
288 |
+
finder = self._build_package_finder(
|
289 |
+
options=options,
|
290 |
+
session=session,
|
291 |
+
target_python=target_python,
|
292 |
+
ignore_requires_python=options.ignore_requires_python,
|
293 |
+
)
|
294 |
+
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
295 |
+
|
296 |
+
req_tracker = self.enter_context(get_requirement_tracker())
|
297 |
+
|
298 |
+
directory = TempDirectory(
|
299 |
+
delete=not options.no_clean,
|
300 |
+
kind="install",
|
301 |
+
globally_managed=True,
|
302 |
+
)
|
303 |
+
|
304 |
+
try:
|
305 |
+
reqs = self.get_requirements(args, options, finder, session)
|
306 |
+
|
307 |
+
# Only when installing is it permitted to use PEP 660.
|
308 |
+
# In other circumstances (pip wheel, pip download) we generate
|
309 |
+
# regular (i.e. non editable) metadata and wheels.
|
310 |
+
for req in reqs:
|
311 |
+
req.permit_editable_wheels = True
|
312 |
+
|
313 |
+
reject_location_related_install_options(reqs, options.install_options)
|
314 |
+
|
315 |
+
preparer = self.make_requirement_preparer(
|
316 |
+
temp_build_dir=directory,
|
317 |
+
options=options,
|
318 |
+
req_tracker=req_tracker,
|
319 |
+
session=session,
|
320 |
+
finder=finder,
|
321 |
+
use_user_site=options.use_user_site,
|
322 |
+
verbosity=self.verbosity,
|
323 |
+
)
|
324 |
+
resolver = self.make_resolver(
|
325 |
+
preparer=preparer,
|
326 |
+
finder=finder,
|
327 |
+
options=options,
|
328 |
+
wheel_cache=wheel_cache,
|
329 |
+
use_user_site=options.use_user_site,
|
330 |
+
ignore_installed=options.ignore_installed,
|
331 |
+
ignore_requires_python=options.ignore_requires_python,
|
332 |
+
force_reinstall=options.force_reinstall,
|
333 |
+
upgrade_strategy=upgrade_strategy,
|
334 |
+
use_pep517=options.use_pep517,
|
335 |
+
)
|
336 |
+
|
337 |
+
self.trace_basic_info(finder)
|
338 |
+
|
339 |
+
requirement_set = resolver.resolve(
|
340 |
+
reqs, check_supported_wheels=not options.target_dir
|
341 |
+
)
|
342 |
+
|
343 |
+
try:
|
344 |
+
pip_req = requirement_set.get_requirement("pip")
|
345 |
+
except KeyError:
|
346 |
+
modifying_pip = False
|
347 |
+
else:
|
348 |
+
# If we're not replacing an already installed pip,
|
349 |
+
# we're not modifying it.
|
350 |
+
modifying_pip = pip_req.satisfied_by is None
|
351 |
+
protect_pip_from_modification_on_windows(modifying_pip=modifying_pip)
|
352 |
+
|
353 |
+
check_binary_allowed = get_check_binary_allowed(finder.format_control)
|
354 |
+
|
355 |
+
reqs_to_build = [
|
356 |
+
r
|
357 |
+
for r in requirement_set.requirements.values()
|
358 |
+
if should_build_for_install_command(r, check_binary_allowed)
|
359 |
+
]
|
360 |
+
|
361 |
+
_, build_failures = build(
|
362 |
+
reqs_to_build,
|
363 |
+
wheel_cache=wheel_cache,
|
364 |
+
verify=True,
|
365 |
+
build_options=[],
|
366 |
+
global_options=[],
|
367 |
+
)
|
368 |
+
|
369 |
+
# If we're using PEP 517, we cannot do a legacy setup.py install
|
370 |
+
# so we fail here.
|
371 |
+
pep517_build_failure_names: List[str] = [
|
372 |
+
r.name for r in build_failures if r.use_pep517 # type: ignore
|
373 |
+
]
|
374 |
+
if pep517_build_failure_names:
|
375 |
+
raise InstallationError(
|
376 |
+
"Could not build wheels for {}, which is required to "
|
377 |
+
"install pyproject.toml-based projects".format(
|
378 |
+
", ".join(pep517_build_failure_names)
|
379 |
+
)
|
380 |
+
)
|
381 |
+
|
382 |
+
# For now, we just warn about failures building legacy
|
383 |
+
# requirements, as we'll fall through to a setup.py install for
|
384 |
+
# those.
|
385 |
+
for r in build_failures:
|
386 |
+
if not r.use_pep517:
|
387 |
+
r.legacy_install_reason = 8368
|
388 |
+
|
389 |
+
to_install = resolver.get_installation_order(requirement_set)
|
390 |
+
|
391 |
+
# Check for conflicts in the package set we're installing.
|
392 |
+
conflicts: Optional[ConflictDetails] = None
|
393 |
+
should_warn_about_conflicts = (
|
394 |
+
not options.ignore_dependencies and options.warn_about_conflicts
|
395 |
+
)
|
396 |
+
if should_warn_about_conflicts:
|
397 |
+
conflicts = self._determine_conflicts(to_install)
|
398 |
+
|
399 |
+
# Don't warn about script install locations if
|
400 |
+
# --target or --prefix has been specified
|
401 |
+
warn_script_location = options.warn_script_location
|
402 |
+
if options.target_dir or options.prefix_path:
|
403 |
+
warn_script_location = False
|
404 |
+
|
405 |
+
installed = install_given_reqs(
|
406 |
+
to_install,
|
407 |
+
install_options,
|
408 |
+
global_options,
|
409 |
+
root=options.root_path,
|
410 |
+
home=target_temp_dir_path,
|
411 |
+
prefix=options.prefix_path,
|
412 |
+
warn_script_location=warn_script_location,
|
413 |
+
use_user_site=options.use_user_site,
|
414 |
+
pycompile=options.compile,
|
415 |
+
)
|
416 |
+
|
417 |
+
lib_locations = get_lib_location_guesses(
|
418 |
+
user=options.use_user_site,
|
419 |
+
home=target_temp_dir_path,
|
420 |
+
root=options.root_path,
|
421 |
+
prefix=options.prefix_path,
|
422 |
+
isolated=options.isolated_mode,
|
423 |
+
)
|
424 |
+
env = get_environment(lib_locations)
|
425 |
+
|
426 |
+
installed.sort(key=operator.attrgetter("name"))
|
427 |
+
items = []
|
428 |
+
for result in installed:
|
429 |
+
item = result.name
|
430 |
+
try:
|
431 |
+
installed_dist = env.get_distribution(item)
|
432 |
+
if installed_dist is not None:
|
433 |
+
item = f"{item}-{installed_dist.version}"
|
434 |
+
except Exception:
|
435 |
+
pass
|
436 |
+
items.append(item)
|
437 |
+
|
438 |
+
if conflicts is not None:
|
439 |
+
self._warn_about_conflicts(
|
440 |
+
conflicts,
|
441 |
+
resolver_variant=self.determine_resolver_variant(options),
|
442 |
+
)
|
443 |
+
|
444 |
+
installed_desc = " ".join(items)
|
445 |
+
if installed_desc:
|
446 |
+
write_output(
|
447 |
+
"Successfully installed %s",
|
448 |
+
installed_desc,
|
449 |
+
)
|
450 |
+
except OSError as error:
|
451 |
+
show_traceback = self.verbosity >= 1
|
452 |
+
|
453 |
+
message = create_os_error_message(
|
454 |
+
error,
|
455 |
+
show_traceback,
|
456 |
+
options.use_user_site,
|
457 |
+
)
|
458 |
+
logger.error(message, exc_info=show_traceback) # noqa
|
459 |
+
|
460 |
+
return ERROR
|
461 |
+
|
462 |
+
if options.target_dir:
|
463 |
+
assert target_temp_dir
|
464 |
+
self._handle_target_dir(
|
465 |
+
options.target_dir, target_temp_dir, options.upgrade
|
466 |
+
)
|
467 |
+
|
468 |
+
warn_if_run_as_root()
|
469 |
+
return SUCCESS
|
470 |
+
|
471 |
+
def _handle_target_dir(
|
472 |
+
self, target_dir: str, target_temp_dir: TempDirectory, upgrade: bool
|
473 |
+
) -> None:
|
474 |
+
ensure_dir(target_dir)
|
475 |
+
|
476 |
+
# Checking both purelib and platlib directories for installed
|
477 |
+
# packages to be moved to target directory
|
478 |
+
lib_dir_list = []
|
479 |
+
|
480 |
+
# Checking both purelib and platlib directories for installed
|
481 |
+
# packages to be moved to target directory
|
482 |
+
scheme = get_scheme("", home=target_temp_dir.path)
|
483 |
+
purelib_dir = scheme.purelib
|
484 |
+
platlib_dir = scheme.platlib
|
485 |
+
data_dir = scheme.data
|
486 |
+
|
487 |
+
if os.path.exists(purelib_dir):
|
488 |
+
lib_dir_list.append(purelib_dir)
|
489 |
+
if os.path.exists(platlib_dir) and platlib_dir != purelib_dir:
|
490 |
+
lib_dir_list.append(platlib_dir)
|
491 |
+
if os.path.exists(data_dir):
|
492 |
+
lib_dir_list.append(data_dir)
|
493 |
+
|
494 |
+
for lib_dir in lib_dir_list:
|
495 |
+
for item in os.listdir(lib_dir):
|
496 |
+
if lib_dir == data_dir:
|
497 |
+
ddir = os.path.join(data_dir, item)
|
498 |
+
if any(s.startswith(ddir) for s in lib_dir_list[:-1]):
|
499 |
+
continue
|
500 |
+
target_item_dir = os.path.join(target_dir, item)
|
501 |
+
if os.path.exists(target_item_dir):
|
502 |
+
if not upgrade:
|
503 |
+
logger.warning(
|
504 |
+
"Target directory %s already exists. Specify "
|
505 |
+
"--upgrade to force replacement.",
|
506 |
+
target_item_dir,
|
507 |
+
)
|
508 |
+
continue
|
509 |
+
if os.path.islink(target_item_dir):
|
510 |
+
logger.warning(
|
511 |
+
"Target directory %s already exists and is "
|
512 |
+
"a link. pip will not automatically replace "
|
513 |
+
"links, please remove if replacement is "
|
514 |
+
"desired.",
|
515 |
+
target_item_dir,
|
516 |
+
)
|
517 |
+
continue
|
518 |
+
if os.path.isdir(target_item_dir):
|
519 |
+
shutil.rmtree(target_item_dir)
|
520 |
+
else:
|
521 |
+
os.remove(target_item_dir)
|
522 |
+
|
523 |
+
shutil.move(os.path.join(lib_dir, item), target_item_dir)
|
524 |
+
|
525 |
+
def _determine_conflicts(
|
526 |
+
self, to_install: List[InstallRequirement]
|
527 |
+
) -> Optional[ConflictDetails]:
|
528 |
+
try:
|
529 |
+
return check_install_conflicts(to_install)
|
530 |
+
except Exception:
|
531 |
+
logger.exception(
|
532 |
+
"Error while checking for conflicts. Please file an issue on "
|
533 |
+
"pip's issue tracker: https://github.com/pypa/pip/issues/new"
|
534 |
+
)
|
535 |
+
return None
|
536 |
+
|
537 |
+
def _warn_about_conflicts(
|
538 |
+
self, conflict_details: ConflictDetails, resolver_variant: str
|
539 |
+
) -> None:
|
540 |
+
package_set, (missing, conflicting) = conflict_details
|
541 |
+
if not missing and not conflicting:
|
542 |
+
return
|
543 |
+
|
544 |
+
parts: List[str] = []
|
545 |
+
if resolver_variant == "legacy":
|
546 |
+
parts.append(
|
547 |
+
"pip's legacy dependency resolver does not consider dependency "
|
548 |
+
"conflicts when selecting packages. This behaviour is the "
|
549 |
+
"source of the following dependency conflicts."
|
550 |
+
)
|
551 |
+
else:
|
552 |
+
assert resolver_variant == "2020-resolver"
|
553 |
+
parts.append(
|
554 |
+
"pip's dependency resolver does not currently take into account "
|
555 |
+
"all the packages that are installed. This behaviour is the "
|
556 |
+
"source of the following dependency conflicts."
|
557 |
+
)
|
558 |
+
|
559 |
+
# NOTE: There is some duplication here, with commands/check.py
|
560 |
+
for project_name in missing:
|
561 |
+
version = package_set[project_name][0]
|
562 |
+
for dependency in missing[project_name]:
|
563 |
+
message = (
|
564 |
+
"{name} {version} requires {requirement}, "
|
565 |
+
"which is not installed."
|
566 |
+
).format(
|
567 |
+
name=project_name,
|
568 |
+
version=version,
|
569 |
+
requirement=dependency[1],
|
570 |
+
)
|
571 |
+
parts.append(message)
|
572 |
+
|
573 |
+
for project_name in conflicting:
|
574 |
+
version = package_set[project_name][0]
|
575 |
+
for dep_name, dep_version, req in conflicting[project_name]:
|
576 |
+
message = (
|
577 |
+
"{name} {version} requires {requirement}, but {you} have "
|
578 |
+
"{dep_name} {dep_version} which is incompatible."
|
579 |
+
).format(
|
580 |
+
name=project_name,
|
581 |
+
version=version,
|
582 |
+
requirement=req,
|
583 |
+
dep_name=dep_name,
|
584 |
+
dep_version=dep_version,
|
585 |
+
you=("you" if resolver_variant == "2020-resolver" else "you'll"),
|
586 |
+
)
|
587 |
+
parts.append(message)
|
588 |
+
|
589 |
+
logger.critical("\n".join(parts))
|
590 |
+
|
591 |
+
|
592 |
+
def get_lib_location_guesses(
|
593 |
+
user: bool = False,
|
594 |
+
home: Optional[str] = None,
|
595 |
+
root: Optional[str] = None,
|
596 |
+
isolated: bool = False,
|
597 |
+
prefix: Optional[str] = None,
|
598 |
+
) -> List[str]:
|
599 |
+
scheme = get_scheme(
|
600 |
+
"",
|
601 |
+
user=user,
|
602 |
+
home=home,
|
603 |
+
root=root,
|
604 |
+
isolated=isolated,
|
605 |
+
prefix=prefix,
|
606 |
+
)
|
607 |
+
return [scheme.purelib, scheme.platlib]
|
608 |
+
|
609 |
+
|
610 |
+
def site_packages_writable(root: Optional[str], isolated: bool) -> bool:
|
611 |
+
return all(
|
612 |
+
test_writable_dir(d)
|
613 |
+
for d in set(get_lib_location_guesses(root=root, isolated=isolated))
|
614 |
+
)
|
615 |
+
|
616 |
+
|
617 |
+
def decide_user_install(
|
618 |
+
use_user_site: Optional[bool],
|
619 |
+
prefix_path: Optional[str] = None,
|
620 |
+
target_dir: Optional[str] = None,
|
621 |
+
root_path: Optional[str] = None,
|
622 |
+
isolated_mode: bool = False,
|
623 |
+
) -> bool:
|
624 |
+
"""Determine whether to do a user install based on the input options.
|
625 |
+
|
626 |
+
If use_user_site is False, no additional checks are done.
|
627 |
+
If use_user_site is True, it is checked for compatibility with other
|
628 |
+
options.
|
629 |
+
If use_user_site is None, the default behaviour depends on the environment,
|
630 |
+
which is provided by the other arguments.
|
631 |
+
"""
|
632 |
+
# In some cases (config from tox), use_user_site can be set to an integer
|
633 |
+
# rather than a bool, which 'use_user_site is False' wouldn't catch.
|
634 |
+
if (use_user_site is not None) and (not use_user_site):
|
635 |
+
logger.debug("Non-user install by explicit request")
|
636 |
+
return False
|
637 |
+
|
638 |
+
if use_user_site:
|
639 |
+
if prefix_path:
|
640 |
+
raise CommandError(
|
641 |
+
"Can not combine '--user' and '--prefix' as they imply "
|
642 |
+
"different installation locations"
|
643 |
+
)
|
644 |
+
if virtualenv_no_global():
|
645 |
+
raise InstallationError(
|
646 |
+
"Can not perform a '--user' install. User site-packages "
|
647 |
+
"are not visible in this virtualenv."
|
648 |
+
)
|
649 |
+
logger.debug("User install by explicit request")
|
650 |
+
return True
|
651 |
+
|
652 |
+
# If we are here, user installs have not been explicitly requested/avoided
|
653 |
+
assert use_user_site is None
|
654 |
+
|
655 |
+
# user install incompatible with --prefix/--target
|
656 |
+
if prefix_path or target_dir:
|
657 |
+
logger.debug("Non-user install due to --prefix or --target option")
|
658 |
+
return False
|
659 |
+
|
660 |
+
# If user installs are not enabled, choose a non-user install
|
661 |
+
if not site.ENABLE_USER_SITE:
|
662 |
+
logger.debug("Non-user install because user site-packages disabled")
|
663 |
+
return False
|
664 |
+
|
665 |
+
# If we have permission for a non-user install, do that,
|
666 |
+
# otherwise do a user install.
|
667 |
+
if site_packages_writable(root=root_path, isolated=isolated_mode):
|
668 |
+
logger.debug("Non-user install because site-packages writeable")
|
669 |
+
return False
|
670 |
+
|
671 |
+
logger.info(
|
672 |
+
"Defaulting to user installation because normal site-packages "
|
673 |
+
"is not writeable"
|
674 |
+
)
|
675 |
+
return True
|
676 |
+
|
677 |
+
|
678 |
+
def reject_location_related_install_options(
|
679 |
+
requirements: List[InstallRequirement], options: Optional[List[str]]
|
680 |
+
) -> None:
|
681 |
+
"""If any location-changing --install-option arguments were passed for
|
682 |
+
requirements or on the command-line, then show a deprecation warning.
|
683 |
+
"""
|
684 |
+
|
685 |
+
def format_options(option_names: Iterable[str]) -> List[str]:
|
686 |
+
return ["--{}".format(name.replace("_", "-")) for name in option_names]
|
687 |
+
|
688 |
+
offenders = []
|
689 |
+
|
690 |
+
for requirement in requirements:
|
691 |
+
install_options = requirement.install_options
|
692 |
+
location_options = parse_distutils_args(install_options)
|
693 |
+
if location_options:
|
694 |
+
offenders.append(
|
695 |
+
"{!r} from {}".format(
|
696 |
+
format_options(location_options.keys()), requirement
|
697 |
+
)
|
698 |
+
)
|
699 |
+
|
700 |
+
if options:
|
701 |
+
location_options = parse_distutils_args(options)
|
702 |
+
if location_options:
|
703 |
+
offenders.append(
|
704 |
+
"{!r} from command line".format(format_options(location_options.keys()))
|
705 |
+
)
|
706 |
+
|
707 |
+
if not offenders:
|
708 |
+
return
|
709 |
+
|
710 |
+
raise CommandError(
|
711 |
+
"Location-changing options found in --install-option: {}."
|
712 |
+
" This is unsupported, use pip-level options like --user,"
|
713 |
+
" --prefix, --root, and --target instead.".format("; ".join(offenders))
|
714 |
+
)
|
715 |
+
|
716 |
+
|
717 |
+
def create_os_error_message(
|
718 |
+
error: OSError, show_traceback: bool, using_user_site: bool
|
719 |
+
) -> str:
|
720 |
+
"""Format an error message for an OSError
|
721 |
+
|
722 |
+
It may occur anytime during the execution of the install command.
|
723 |
+
"""
|
724 |
+
parts = []
|
725 |
+
|
726 |
+
# Mention the error if we are not going to show a traceback
|
727 |
+
parts.append("Could not install packages due to an OSError")
|
728 |
+
if not show_traceback:
|
729 |
+
parts.append(": ")
|
730 |
+
parts.append(str(error))
|
731 |
+
else:
|
732 |
+
parts.append(".")
|
733 |
+
|
734 |
+
# Spilt the error indication from a helper message (if any)
|
735 |
+
parts[-1] += "\n"
|
736 |
+
|
737 |
+
# Suggest useful actions to the user:
|
738 |
+
# (1) using user site-packages or (2) verifying the permissions
|
739 |
+
if error.errno == errno.EACCES:
|
740 |
+
user_option_part = "Consider using the `--user` option"
|
741 |
+
permissions_part = "Check the permissions"
|
742 |
+
|
743 |
+
if not running_under_virtualenv() and not using_user_site:
|
744 |
+
parts.extend(
|
745 |
+
[
|
746 |
+
user_option_part,
|
747 |
+
" or ",
|
748 |
+
permissions_part.lower(),
|
749 |
+
]
|
750 |
+
)
|
751 |
+
else:
|
752 |
+
parts.append(permissions_part)
|
753 |
+
parts.append(".\n")
|
754 |
+
|
755 |
+
# Suggest the user to enable Long Paths if path length is
|
756 |
+
# more than 260
|
757 |
+
if (
|
758 |
+
WINDOWS
|
759 |
+
and error.errno == errno.ENOENT
|
760 |
+
and error.filename
|
761 |
+
and len(error.filename) > 260
|
762 |
+
):
|
763 |
+
parts.append(
|
764 |
+
"HINT: This error might have occurred since "
|
765 |
+
"this system does not have Windows Long Path "
|
766 |
+
"support enabled. You can find information on "
|
767 |
+
"how to enable this at "
|
768 |
+
"https://pip.pypa.io/warnings/enable-long-paths\n"
|
769 |
+
)
|
770 |
+
|
771 |
+
return "".join(parts).strip() + "\n"
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/list.py
ADDED
@@ -0,0 +1,363 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import json
|
2 |
+
import logging
|
3 |
+
from optparse import Values
|
4 |
+
from typing import TYPE_CHECKING, Iterator, List, Optional, Sequence, Tuple, cast
|
5 |
+
|
6 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
7 |
+
|
8 |
+
from pip._internal.cli import cmdoptions
|
9 |
+
from pip._internal.cli.req_command import IndexGroupCommand
|
10 |
+
from pip._internal.cli.status_codes import SUCCESS
|
11 |
+
from pip._internal.exceptions import CommandError
|
12 |
+
from pip._internal.index.collector import LinkCollector
|
13 |
+
from pip._internal.index.package_finder import PackageFinder
|
14 |
+
from pip._internal.metadata import BaseDistribution, get_environment
|
15 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
16 |
+
from pip._internal.network.session import PipSession
|
17 |
+
from pip._internal.utils.compat import stdlib_pkgs
|
18 |
+
from pip._internal.utils.misc import tabulate, write_output
|
19 |
+
|
20 |
+
if TYPE_CHECKING:
|
21 |
+
from pip._internal.metadata.base import DistributionVersion
|
22 |
+
|
23 |
+
class _DistWithLatestInfo(BaseDistribution):
|
24 |
+
"""Give the distribution object a couple of extra fields.
|
25 |
+
|
26 |
+
These will be populated during ``get_outdated()``. This is dirty but
|
27 |
+
makes the rest of the code much cleaner.
|
28 |
+
"""
|
29 |
+
|
30 |
+
latest_version: DistributionVersion
|
31 |
+
latest_filetype: str
|
32 |
+
|
33 |
+
_ProcessedDists = Sequence[_DistWithLatestInfo]
|
34 |
+
|
35 |
+
|
36 |
+
from pip._vendor.packaging.version import parse
|
37 |
+
|
38 |
+
logger = logging.getLogger(__name__)
|
39 |
+
|
40 |
+
|
41 |
+
class ListCommand(IndexGroupCommand):
|
42 |
+
"""
|
43 |
+
List installed packages, including editables.
|
44 |
+
|
45 |
+
Packages are listed in a case-insensitive sorted order.
|
46 |
+
"""
|
47 |
+
|
48 |
+
ignore_require_venv = True
|
49 |
+
usage = """
|
50 |
+
%prog [options]"""
|
51 |
+
|
52 |
+
def add_options(self) -> None:
|
53 |
+
self.cmd_opts.add_option(
|
54 |
+
"-o",
|
55 |
+
"--outdated",
|
56 |
+
action="store_true",
|
57 |
+
default=False,
|
58 |
+
help="List outdated packages",
|
59 |
+
)
|
60 |
+
self.cmd_opts.add_option(
|
61 |
+
"-u",
|
62 |
+
"--uptodate",
|
63 |
+
action="store_true",
|
64 |
+
default=False,
|
65 |
+
help="List uptodate packages",
|
66 |
+
)
|
67 |
+
self.cmd_opts.add_option(
|
68 |
+
"-e",
|
69 |
+
"--editable",
|
70 |
+
action="store_true",
|
71 |
+
default=False,
|
72 |
+
help="List editable projects.",
|
73 |
+
)
|
74 |
+
self.cmd_opts.add_option(
|
75 |
+
"-l",
|
76 |
+
"--local",
|
77 |
+
action="store_true",
|
78 |
+
default=False,
|
79 |
+
help=(
|
80 |
+
"If in a virtualenv that has global access, do not list "
|
81 |
+
"globally-installed packages."
|
82 |
+
),
|
83 |
+
)
|
84 |
+
self.cmd_opts.add_option(
|
85 |
+
"--user",
|
86 |
+
dest="user",
|
87 |
+
action="store_true",
|
88 |
+
default=False,
|
89 |
+
help="Only output packages installed in user-site.",
|
90 |
+
)
|
91 |
+
self.cmd_opts.add_option(cmdoptions.list_path())
|
92 |
+
self.cmd_opts.add_option(
|
93 |
+
"--pre",
|
94 |
+
action="store_true",
|
95 |
+
default=False,
|
96 |
+
help=(
|
97 |
+
"Include pre-release and development versions. By default, "
|
98 |
+
"pip only finds stable versions."
|
99 |
+
),
|
100 |
+
)
|
101 |
+
|
102 |
+
self.cmd_opts.add_option(
|
103 |
+
"--format",
|
104 |
+
action="store",
|
105 |
+
dest="list_format",
|
106 |
+
default="columns",
|
107 |
+
choices=("columns", "freeze", "json"),
|
108 |
+
help="Select the output format among: columns (default), freeze, or json",
|
109 |
+
)
|
110 |
+
|
111 |
+
self.cmd_opts.add_option(
|
112 |
+
"--not-required",
|
113 |
+
action="store_true",
|
114 |
+
dest="not_required",
|
115 |
+
help="List packages that are not dependencies of installed packages.",
|
116 |
+
)
|
117 |
+
|
118 |
+
self.cmd_opts.add_option(
|
119 |
+
"--exclude-editable",
|
120 |
+
action="store_false",
|
121 |
+
dest="include_editable",
|
122 |
+
help="Exclude editable package from output.",
|
123 |
+
)
|
124 |
+
self.cmd_opts.add_option(
|
125 |
+
"--include-editable",
|
126 |
+
action="store_true",
|
127 |
+
dest="include_editable",
|
128 |
+
help="Include editable package from output.",
|
129 |
+
default=True,
|
130 |
+
)
|
131 |
+
self.cmd_opts.add_option(cmdoptions.list_exclude())
|
132 |
+
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
|
133 |
+
|
134 |
+
self.parser.insert_option_group(0, index_opts)
|
135 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
136 |
+
|
137 |
+
def _build_package_finder(
|
138 |
+
self, options: Values, session: PipSession
|
139 |
+
) -> PackageFinder:
|
140 |
+
"""
|
141 |
+
Create a package finder appropriate to this list command.
|
142 |
+
"""
|
143 |
+
link_collector = LinkCollector.create(session, options=options)
|
144 |
+
|
145 |
+
# Pass allow_yanked=False to ignore yanked versions.
|
146 |
+
selection_prefs = SelectionPreferences(
|
147 |
+
allow_yanked=False,
|
148 |
+
allow_all_prereleases=options.pre,
|
149 |
+
)
|
150 |
+
|
151 |
+
return PackageFinder.create(
|
152 |
+
link_collector=link_collector,
|
153 |
+
selection_prefs=selection_prefs,
|
154 |
+
use_deprecated_html5lib="html5lib" in options.deprecated_features_enabled,
|
155 |
+
)
|
156 |
+
|
157 |
+
def run(self, options: Values, args: List[str]) -> int:
|
158 |
+
if options.outdated and options.uptodate:
|
159 |
+
raise CommandError("Options --outdated and --uptodate cannot be combined.")
|
160 |
+
|
161 |
+
cmdoptions.check_list_path_option(options)
|
162 |
+
|
163 |
+
skip = set(stdlib_pkgs)
|
164 |
+
if options.excludes:
|
165 |
+
skip.update(canonicalize_name(n) for n in options.excludes)
|
166 |
+
|
167 |
+
packages: "_ProcessedDists" = [
|
168 |
+
cast("_DistWithLatestInfo", d)
|
169 |
+
for d in get_environment(options.path).iter_installed_distributions(
|
170 |
+
local_only=options.local,
|
171 |
+
user_only=options.user,
|
172 |
+
editables_only=options.editable,
|
173 |
+
include_editables=options.include_editable,
|
174 |
+
skip=skip,
|
175 |
+
)
|
176 |
+
]
|
177 |
+
|
178 |
+
# get_not_required must be called firstly in order to find and
|
179 |
+
# filter out all dependencies correctly. Otherwise a package
|
180 |
+
# can't be identified as requirement because some parent packages
|
181 |
+
# could be filtered out before.
|
182 |
+
if options.not_required:
|
183 |
+
packages = self.get_not_required(packages, options)
|
184 |
+
|
185 |
+
if options.outdated:
|
186 |
+
packages = self.get_outdated(packages, options)
|
187 |
+
elif options.uptodate:
|
188 |
+
packages = self.get_uptodate(packages, options)
|
189 |
+
|
190 |
+
self.output_package_listing(packages, options)
|
191 |
+
return SUCCESS
|
192 |
+
|
193 |
+
def get_outdated(
|
194 |
+
self, packages: "_ProcessedDists", options: Values
|
195 |
+
) -> "_ProcessedDists":
|
196 |
+
return [
|
197 |
+
dist
|
198 |
+
for dist in self.iter_packages_latest_infos(packages, options)
|
199 |
+
if parse(str(dist.latest_version)) > parse(str(dist.version))
|
200 |
+
]
|
201 |
+
|
202 |
+
def get_uptodate(
|
203 |
+
self, packages: "_ProcessedDists", options: Values
|
204 |
+
) -> "_ProcessedDists":
|
205 |
+
return [
|
206 |
+
dist
|
207 |
+
for dist in self.iter_packages_latest_infos(packages, options)
|
208 |
+
if parse(str(dist.latest_version)) == parse(str(dist.version))
|
209 |
+
]
|
210 |
+
|
211 |
+
def get_not_required(
|
212 |
+
self, packages: "_ProcessedDists", options: Values
|
213 |
+
) -> "_ProcessedDists":
|
214 |
+
dep_keys = {
|
215 |
+
canonicalize_name(dep.name)
|
216 |
+
for dist in packages
|
217 |
+
for dep in (dist.iter_dependencies() or ())
|
218 |
+
}
|
219 |
+
|
220 |
+
# Create a set to remove duplicate packages, and cast it to a list
|
221 |
+
# to keep the return type consistent with get_outdated and
|
222 |
+
# get_uptodate
|
223 |
+
return list({pkg for pkg in packages if pkg.canonical_name not in dep_keys})
|
224 |
+
|
225 |
+
def iter_packages_latest_infos(
|
226 |
+
self, packages: "_ProcessedDists", options: Values
|
227 |
+
) -> Iterator["_DistWithLatestInfo"]:
|
228 |
+
with self._build_session(options) as session:
|
229 |
+
finder = self._build_package_finder(options, session)
|
230 |
+
|
231 |
+
def latest_info(
|
232 |
+
dist: "_DistWithLatestInfo",
|
233 |
+
) -> Optional["_DistWithLatestInfo"]:
|
234 |
+
all_candidates = finder.find_all_candidates(dist.canonical_name)
|
235 |
+
if not options.pre:
|
236 |
+
# Remove prereleases
|
237 |
+
all_candidates = [
|
238 |
+
candidate
|
239 |
+
for candidate in all_candidates
|
240 |
+
if not candidate.version.is_prerelease
|
241 |
+
]
|
242 |
+
|
243 |
+
evaluator = finder.make_candidate_evaluator(
|
244 |
+
project_name=dist.canonical_name,
|
245 |
+
)
|
246 |
+
best_candidate = evaluator.sort_best_candidate(all_candidates)
|
247 |
+
if best_candidate is None:
|
248 |
+
return None
|
249 |
+
|
250 |
+
remote_version = best_candidate.version
|
251 |
+
if best_candidate.link.is_wheel:
|
252 |
+
typ = "wheel"
|
253 |
+
else:
|
254 |
+
typ = "sdist"
|
255 |
+
dist.latest_version = remote_version
|
256 |
+
dist.latest_filetype = typ
|
257 |
+
return dist
|
258 |
+
|
259 |
+
for dist in map(latest_info, packages):
|
260 |
+
if dist is not None:
|
261 |
+
yield dist
|
262 |
+
|
263 |
+
def output_package_listing(
|
264 |
+
self, packages: "_ProcessedDists", options: Values
|
265 |
+
) -> None:
|
266 |
+
packages = sorted(
|
267 |
+
packages,
|
268 |
+
key=lambda dist: dist.canonical_name,
|
269 |
+
)
|
270 |
+
if options.list_format == "columns" and packages:
|
271 |
+
data, header = format_for_columns(packages, options)
|
272 |
+
self.output_package_listing_columns(data, header)
|
273 |
+
elif options.list_format == "freeze":
|
274 |
+
for dist in packages:
|
275 |
+
if options.verbose >= 1:
|
276 |
+
write_output(
|
277 |
+
"%s==%s (%s)", dist.raw_name, dist.version, dist.location
|
278 |
+
)
|
279 |
+
else:
|
280 |
+
write_output("%s==%s", dist.raw_name, dist.version)
|
281 |
+
elif options.list_format == "json":
|
282 |
+
write_output(format_for_json(packages, options))
|
283 |
+
|
284 |
+
def output_package_listing_columns(
|
285 |
+
self, data: List[List[str]], header: List[str]
|
286 |
+
) -> None:
|
287 |
+
# insert the header first: we need to know the size of column names
|
288 |
+
if len(data) > 0:
|
289 |
+
data.insert(0, header)
|
290 |
+
|
291 |
+
pkg_strings, sizes = tabulate(data)
|
292 |
+
|
293 |
+
# Create and add a separator.
|
294 |
+
if len(data) > 0:
|
295 |
+
pkg_strings.insert(1, " ".join(map(lambda x: "-" * x, sizes)))
|
296 |
+
|
297 |
+
for val in pkg_strings:
|
298 |
+
write_output(val)
|
299 |
+
|
300 |
+
|
301 |
+
def format_for_columns(
|
302 |
+
pkgs: "_ProcessedDists", options: Values
|
303 |
+
) -> Tuple[List[List[str]], List[str]]:
|
304 |
+
"""
|
305 |
+
Convert the package data into something usable
|
306 |
+
by output_package_listing_columns.
|
307 |
+
"""
|
308 |
+
header = ["Package", "Version"]
|
309 |
+
|
310 |
+
running_outdated = options.outdated
|
311 |
+
if running_outdated:
|
312 |
+
header.extend(["Latest", "Type"])
|
313 |
+
|
314 |
+
has_editables = any(x.editable for x in pkgs)
|
315 |
+
if has_editables:
|
316 |
+
header.append("Editable project location")
|
317 |
+
|
318 |
+
if options.verbose >= 1:
|
319 |
+
header.append("Location")
|
320 |
+
if options.verbose >= 1:
|
321 |
+
header.append("Installer")
|
322 |
+
|
323 |
+
data = []
|
324 |
+
for proj in pkgs:
|
325 |
+
# if we're working on the 'outdated' list, separate out the
|
326 |
+
# latest_version and type
|
327 |
+
row = [proj.raw_name, str(proj.version)]
|
328 |
+
|
329 |
+
if running_outdated:
|
330 |
+
row.append(str(proj.latest_version))
|
331 |
+
row.append(proj.latest_filetype)
|
332 |
+
|
333 |
+
if has_editables:
|
334 |
+
row.append(proj.editable_project_location or "")
|
335 |
+
|
336 |
+
if options.verbose >= 1:
|
337 |
+
row.append(proj.location or "")
|
338 |
+
if options.verbose >= 1:
|
339 |
+
row.append(proj.installer)
|
340 |
+
|
341 |
+
data.append(row)
|
342 |
+
|
343 |
+
return data, header
|
344 |
+
|
345 |
+
|
346 |
+
def format_for_json(packages: "_ProcessedDists", options: Values) -> str:
|
347 |
+
data = []
|
348 |
+
for dist in packages:
|
349 |
+
info = {
|
350 |
+
"name": dist.raw_name,
|
351 |
+
"version": str(dist.version),
|
352 |
+
}
|
353 |
+
if options.verbose >= 1:
|
354 |
+
info["location"] = dist.location or ""
|
355 |
+
info["installer"] = dist.installer
|
356 |
+
if options.outdated:
|
357 |
+
info["latest_version"] = str(dist.latest_version)
|
358 |
+
info["latest_filetype"] = dist.latest_filetype
|
359 |
+
editable_project_location = dist.editable_project_location
|
360 |
+
if editable_project_location:
|
361 |
+
info["editable_project_location"] = editable_project_location
|
362 |
+
data.append(info)
|
363 |
+
return json.dumps(data)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/search.py
ADDED
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import shutil
|
3 |
+
import sys
|
4 |
+
import textwrap
|
5 |
+
import xmlrpc.client
|
6 |
+
from collections import OrderedDict
|
7 |
+
from optparse import Values
|
8 |
+
from typing import TYPE_CHECKING, Dict, List, Optional
|
9 |
+
|
10 |
+
from pip._vendor.packaging.version import parse as parse_version
|
11 |
+
|
12 |
+
from pip._internal.cli.base_command import Command
|
13 |
+
from pip._internal.cli.req_command import SessionCommandMixin
|
14 |
+
from pip._internal.cli.status_codes import NO_MATCHES_FOUND, SUCCESS
|
15 |
+
from pip._internal.exceptions import CommandError
|
16 |
+
from pip._internal.metadata import get_default_environment
|
17 |
+
from pip._internal.models.index import PyPI
|
18 |
+
from pip._internal.network.xmlrpc import PipXmlrpcTransport
|
19 |
+
from pip._internal.utils.logging import indent_log
|
20 |
+
from pip._internal.utils.misc import write_output
|
21 |
+
|
22 |
+
if TYPE_CHECKING:
|
23 |
+
from typing import TypedDict
|
24 |
+
|
25 |
+
class TransformedHit(TypedDict):
|
26 |
+
name: str
|
27 |
+
summary: str
|
28 |
+
versions: List[str]
|
29 |
+
|
30 |
+
|
31 |
+
logger = logging.getLogger(__name__)
|
32 |
+
|
33 |
+
|
34 |
+
class SearchCommand(Command, SessionCommandMixin):
|
35 |
+
"""Search for PyPI packages whose name or summary contains <query>."""
|
36 |
+
|
37 |
+
usage = """
|
38 |
+
%prog [options] <query>"""
|
39 |
+
ignore_require_venv = True
|
40 |
+
|
41 |
+
def add_options(self) -> None:
|
42 |
+
self.cmd_opts.add_option(
|
43 |
+
"-i",
|
44 |
+
"--index",
|
45 |
+
dest="index",
|
46 |
+
metavar="URL",
|
47 |
+
default=PyPI.pypi_url,
|
48 |
+
help="Base URL of Python Package Index (default %default)",
|
49 |
+
)
|
50 |
+
|
51 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
52 |
+
|
53 |
+
def run(self, options: Values, args: List[str]) -> int:
|
54 |
+
if not args:
|
55 |
+
raise CommandError("Missing required argument (search query).")
|
56 |
+
query = args
|
57 |
+
pypi_hits = self.search(query, options)
|
58 |
+
hits = transform_hits(pypi_hits)
|
59 |
+
|
60 |
+
terminal_width = None
|
61 |
+
if sys.stdout.isatty():
|
62 |
+
terminal_width = shutil.get_terminal_size()[0]
|
63 |
+
|
64 |
+
print_results(hits, terminal_width=terminal_width)
|
65 |
+
if pypi_hits:
|
66 |
+
return SUCCESS
|
67 |
+
return NO_MATCHES_FOUND
|
68 |
+
|
69 |
+
def search(self, query: List[str], options: Values) -> List[Dict[str, str]]:
|
70 |
+
index_url = options.index
|
71 |
+
|
72 |
+
session = self.get_default_session(options)
|
73 |
+
|
74 |
+
transport = PipXmlrpcTransport(index_url, session)
|
75 |
+
pypi = xmlrpc.client.ServerProxy(index_url, transport)
|
76 |
+
try:
|
77 |
+
hits = pypi.search({"name": query, "summary": query}, "or")
|
78 |
+
except xmlrpc.client.Fault as fault:
|
79 |
+
message = "XMLRPC request failed [code: {code}]\n{string}".format(
|
80 |
+
code=fault.faultCode,
|
81 |
+
string=fault.faultString,
|
82 |
+
)
|
83 |
+
raise CommandError(message)
|
84 |
+
assert isinstance(hits, list)
|
85 |
+
return hits
|
86 |
+
|
87 |
+
|
88 |
+
def transform_hits(hits: List[Dict[str, str]]) -> List["TransformedHit"]:
|
89 |
+
"""
|
90 |
+
The list from pypi is really a list of versions. We want a list of
|
91 |
+
packages with the list of versions stored inline. This converts the
|
92 |
+
list from pypi into one we can use.
|
93 |
+
"""
|
94 |
+
packages: Dict[str, "TransformedHit"] = OrderedDict()
|
95 |
+
for hit in hits:
|
96 |
+
name = hit["name"]
|
97 |
+
summary = hit["summary"]
|
98 |
+
version = hit["version"]
|
99 |
+
|
100 |
+
if name not in packages.keys():
|
101 |
+
packages[name] = {
|
102 |
+
"name": name,
|
103 |
+
"summary": summary,
|
104 |
+
"versions": [version],
|
105 |
+
}
|
106 |
+
else:
|
107 |
+
packages[name]["versions"].append(version)
|
108 |
+
|
109 |
+
# if this is the highest version, replace summary and score
|
110 |
+
if version == highest_version(packages[name]["versions"]):
|
111 |
+
packages[name]["summary"] = summary
|
112 |
+
|
113 |
+
return list(packages.values())
|
114 |
+
|
115 |
+
|
116 |
+
def print_dist_installation_info(name: str, latest: str) -> None:
|
117 |
+
env = get_default_environment()
|
118 |
+
dist = env.get_distribution(name)
|
119 |
+
if dist is not None:
|
120 |
+
with indent_log():
|
121 |
+
if dist.version == latest:
|
122 |
+
write_output("INSTALLED: %s (latest)", dist.version)
|
123 |
+
else:
|
124 |
+
write_output("INSTALLED: %s", dist.version)
|
125 |
+
if parse_version(latest).pre:
|
126 |
+
write_output(
|
127 |
+
"LATEST: %s (pre-release; install"
|
128 |
+
" with `pip install --pre`)",
|
129 |
+
latest,
|
130 |
+
)
|
131 |
+
else:
|
132 |
+
write_output("LATEST: %s", latest)
|
133 |
+
|
134 |
+
|
135 |
+
def print_results(
|
136 |
+
hits: List["TransformedHit"],
|
137 |
+
name_column_width: Optional[int] = None,
|
138 |
+
terminal_width: Optional[int] = None,
|
139 |
+
) -> None:
|
140 |
+
if not hits:
|
141 |
+
return
|
142 |
+
if name_column_width is None:
|
143 |
+
name_column_width = (
|
144 |
+
max(
|
145 |
+
[
|
146 |
+
len(hit["name"]) + len(highest_version(hit.get("versions", ["-"])))
|
147 |
+
for hit in hits
|
148 |
+
]
|
149 |
+
)
|
150 |
+
+ 4
|
151 |
+
)
|
152 |
+
|
153 |
+
for hit in hits:
|
154 |
+
name = hit["name"]
|
155 |
+
summary = hit["summary"] or ""
|
156 |
+
latest = highest_version(hit.get("versions", ["-"]))
|
157 |
+
if terminal_width is not None:
|
158 |
+
target_width = terminal_width - name_column_width - 5
|
159 |
+
if target_width > 10:
|
160 |
+
# wrap and indent summary to fit terminal
|
161 |
+
summary_lines = textwrap.wrap(summary, target_width)
|
162 |
+
summary = ("\n" + " " * (name_column_width + 3)).join(summary_lines)
|
163 |
+
|
164 |
+
name_latest = f"{name} ({latest})"
|
165 |
+
line = f"{name_latest:{name_column_width}} - {summary}"
|
166 |
+
try:
|
167 |
+
write_output(line)
|
168 |
+
print_dist_installation_info(name, latest)
|
169 |
+
except UnicodeEncodeError:
|
170 |
+
pass
|
171 |
+
|
172 |
+
|
173 |
+
def highest_version(versions: List[str]) -> str:
|
174 |
+
return max(versions, key=parse_version)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/show.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from optparse import Values
|
3 |
+
from typing import Iterator, List, NamedTuple, Optional
|
4 |
+
|
5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
6 |
+
|
7 |
+
from pip._internal.cli.base_command import Command
|
8 |
+
from pip._internal.cli.status_codes import ERROR, SUCCESS
|
9 |
+
from pip._internal.metadata import BaseDistribution, get_default_environment
|
10 |
+
from pip._internal.utils.misc import write_output
|
11 |
+
|
12 |
+
logger = logging.getLogger(__name__)
|
13 |
+
|
14 |
+
|
15 |
+
class ShowCommand(Command):
|
16 |
+
"""
|
17 |
+
Show information about one or more installed packages.
|
18 |
+
|
19 |
+
The output is in RFC-compliant mail header format.
|
20 |
+
"""
|
21 |
+
|
22 |
+
usage = """
|
23 |
+
%prog [options] <package> ..."""
|
24 |
+
ignore_require_venv = True
|
25 |
+
|
26 |
+
def add_options(self) -> None:
|
27 |
+
self.cmd_opts.add_option(
|
28 |
+
"-f",
|
29 |
+
"--files",
|
30 |
+
dest="files",
|
31 |
+
action="store_true",
|
32 |
+
default=False,
|
33 |
+
help="Show the full list of installed files for each package.",
|
34 |
+
)
|
35 |
+
|
36 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
37 |
+
|
38 |
+
def run(self, options: Values, args: List[str]) -> int:
|
39 |
+
if not args:
|
40 |
+
logger.warning("ERROR: Please provide a package name or names.")
|
41 |
+
return ERROR
|
42 |
+
query = args
|
43 |
+
|
44 |
+
results = search_packages_info(query)
|
45 |
+
if not print_results(
|
46 |
+
results, list_files=options.files, verbose=options.verbose
|
47 |
+
):
|
48 |
+
return ERROR
|
49 |
+
return SUCCESS
|
50 |
+
|
51 |
+
|
52 |
+
class _PackageInfo(NamedTuple):
|
53 |
+
name: str
|
54 |
+
version: str
|
55 |
+
location: str
|
56 |
+
requires: List[str]
|
57 |
+
required_by: List[str]
|
58 |
+
installer: str
|
59 |
+
metadata_version: str
|
60 |
+
classifiers: List[str]
|
61 |
+
summary: str
|
62 |
+
homepage: str
|
63 |
+
author: str
|
64 |
+
author_email: str
|
65 |
+
license: str
|
66 |
+
entry_points: List[str]
|
67 |
+
files: Optional[List[str]]
|
68 |
+
|
69 |
+
|
70 |
+
def search_packages_info(query: List[str]) -> Iterator[_PackageInfo]:
|
71 |
+
"""
|
72 |
+
Gather details from installed distributions. Print distribution name,
|
73 |
+
version, location, and installed files. Installed files requires a
|
74 |
+
pip generated 'installed-files.txt' in the distributions '.egg-info'
|
75 |
+
directory.
|
76 |
+
"""
|
77 |
+
env = get_default_environment()
|
78 |
+
|
79 |
+
installed = {dist.canonical_name: dist for dist in env.iter_distributions()}
|
80 |
+
query_names = [canonicalize_name(name) for name in query]
|
81 |
+
missing = sorted(
|
82 |
+
[name for name, pkg in zip(query, query_names) if pkg not in installed]
|
83 |
+
)
|
84 |
+
if missing:
|
85 |
+
logger.warning("Package(s) not found: %s", ", ".join(missing))
|
86 |
+
|
87 |
+
def _get_requiring_packages(current_dist: BaseDistribution) -> Iterator[str]:
|
88 |
+
return (
|
89 |
+
dist.metadata["Name"] or "UNKNOWN"
|
90 |
+
for dist in installed.values()
|
91 |
+
if current_dist.canonical_name
|
92 |
+
in {canonicalize_name(d.name) for d in dist.iter_dependencies()}
|
93 |
+
)
|
94 |
+
|
95 |
+
for query_name in query_names:
|
96 |
+
try:
|
97 |
+
dist = installed[query_name]
|
98 |
+
except KeyError:
|
99 |
+
continue
|
100 |
+
|
101 |
+
requires = sorted((req.name for req in dist.iter_dependencies()), key=str.lower)
|
102 |
+
required_by = sorted(_get_requiring_packages(dist), key=str.lower)
|
103 |
+
|
104 |
+
try:
|
105 |
+
entry_points_text = dist.read_text("entry_points.txt")
|
106 |
+
entry_points = entry_points_text.splitlines(keepends=False)
|
107 |
+
except FileNotFoundError:
|
108 |
+
entry_points = []
|
109 |
+
|
110 |
+
files_iter = dist.iter_declared_entries()
|
111 |
+
if files_iter is None:
|
112 |
+
files: Optional[List[str]] = None
|
113 |
+
else:
|
114 |
+
files = sorted(files_iter)
|
115 |
+
|
116 |
+
metadata = dist.metadata
|
117 |
+
|
118 |
+
yield _PackageInfo(
|
119 |
+
name=dist.raw_name,
|
120 |
+
version=str(dist.version),
|
121 |
+
location=dist.location or "",
|
122 |
+
requires=requires,
|
123 |
+
required_by=required_by,
|
124 |
+
installer=dist.installer,
|
125 |
+
metadata_version=dist.metadata_version or "",
|
126 |
+
classifiers=metadata.get_all("Classifier", []),
|
127 |
+
summary=metadata.get("Summary", ""),
|
128 |
+
homepage=metadata.get("Home-page", ""),
|
129 |
+
author=metadata.get("Author", ""),
|
130 |
+
author_email=metadata.get("Author-email", ""),
|
131 |
+
license=metadata.get("License", ""),
|
132 |
+
entry_points=entry_points,
|
133 |
+
files=files,
|
134 |
+
)
|
135 |
+
|
136 |
+
|
137 |
+
def print_results(
|
138 |
+
distributions: Iterator[_PackageInfo],
|
139 |
+
list_files: bool,
|
140 |
+
verbose: bool,
|
141 |
+
) -> bool:
|
142 |
+
"""
|
143 |
+
Print the information from installed distributions found.
|
144 |
+
"""
|
145 |
+
results_printed = False
|
146 |
+
for i, dist in enumerate(distributions):
|
147 |
+
results_printed = True
|
148 |
+
if i > 0:
|
149 |
+
write_output("---")
|
150 |
+
|
151 |
+
write_output("Name: %s", dist.name)
|
152 |
+
write_output("Version: %s", dist.version)
|
153 |
+
write_output("Summary: %s", dist.summary)
|
154 |
+
write_output("Home-page: %s", dist.homepage)
|
155 |
+
write_output("Author: %s", dist.author)
|
156 |
+
write_output("Author-email: %s", dist.author_email)
|
157 |
+
write_output("License: %s", dist.license)
|
158 |
+
write_output("Location: %s", dist.location)
|
159 |
+
write_output("Requires: %s", ", ".join(dist.requires))
|
160 |
+
write_output("Required-by: %s", ", ".join(dist.required_by))
|
161 |
+
|
162 |
+
if verbose:
|
163 |
+
write_output("Metadata-Version: %s", dist.metadata_version)
|
164 |
+
write_output("Installer: %s", dist.installer)
|
165 |
+
write_output("Classifiers:")
|
166 |
+
for classifier in dist.classifiers:
|
167 |
+
write_output(" %s", classifier)
|
168 |
+
write_output("Entry-points:")
|
169 |
+
for entry in dist.entry_points:
|
170 |
+
write_output(" %s", entry.strip())
|
171 |
+
if list_files:
|
172 |
+
write_output("Files:")
|
173 |
+
if dist.files is None:
|
174 |
+
write_output("Cannot locate RECORD or installed-files.txt")
|
175 |
+
else:
|
176 |
+
for line in dist.files:
|
177 |
+
write_output(" %s", line.strip())
|
178 |
+
return results_printed
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/uninstall.py
ADDED
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
from optparse import Values
|
3 |
+
from typing import List
|
4 |
+
|
5 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
6 |
+
|
7 |
+
from pip._internal.cli.base_command import Command
|
8 |
+
from pip._internal.cli.req_command import SessionCommandMixin, warn_if_run_as_root
|
9 |
+
from pip._internal.cli.status_codes import SUCCESS
|
10 |
+
from pip._internal.exceptions import InstallationError
|
11 |
+
from pip._internal.req import parse_requirements
|
12 |
+
from pip._internal.req.constructors import (
|
13 |
+
install_req_from_line,
|
14 |
+
install_req_from_parsed_requirement,
|
15 |
+
)
|
16 |
+
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
|
17 |
+
|
18 |
+
logger = logging.getLogger(__name__)
|
19 |
+
|
20 |
+
|
21 |
+
class UninstallCommand(Command, SessionCommandMixin):
|
22 |
+
"""
|
23 |
+
Uninstall packages.
|
24 |
+
|
25 |
+
pip is able to uninstall most installed packages. Known exceptions are:
|
26 |
+
|
27 |
+
- Pure distutils packages installed with ``python setup.py install``, which
|
28 |
+
leave behind no metadata to determine what files were installed.
|
29 |
+
- Script wrappers installed by ``python setup.py develop``.
|
30 |
+
"""
|
31 |
+
|
32 |
+
usage = """
|
33 |
+
%prog [options] <package> ...
|
34 |
+
%prog [options] -r <requirements file> ..."""
|
35 |
+
|
36 |
+
def add_options(self) -> None:
|
37 |
+
self.cmd_opts.add_option(
|
38 |
+
"-r",
|
39 |
+
"--requirement",
|
40 |
+
dest="requirements",
|
41 |
+
action="append",
|
42 |
+
default=[],
|
43 |
+
metavar="file",
|
44 |
+
help=(
|
45 |
+
"Uninstall all the packages listed in the given requirements "
|
46 |
+
"file. This option can be used multiple times."
|
47 |
+
),
|
48 |
+
)
|
49 |
+
self.cmd_opts.add_option(
|
50 |
+
"-y",
|
51 |
+
"--yes",
|
52 |
+
dest="yes",
|
53 |
+
action="store_true",
|
54 |
+
help="Don't ask for confirmation of uninstall deletions.",
|
55 |
+
)
|
56 |
+
|
57 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
58 |
+
|
59 |
+
def run(self, options: Values, args: List[str]) -> int:
|
60 |
+
session = self.get_default_session(options)
|
61 |
+
|
62 |
+
reqs_to_uninstall = {}
|
63 |
+
for name in args:
|
64 |
+
req = install_req_from_line(
|
65 |
+
name,
|
66 |
+
isolated=options.isolated_mode,
|
67 |
+
)
|
68 |
+
if req.name:
|
69 |
+
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
70 |
+
else:
|
71 |
+
logger.warning(
|
72 |
+
"Invalid requirement: %r ignored -"
|
73 |
+
" the uninstall command expects named"
|
74 |
+
" requirements.",
|
75 |
+
name,
|
76 |
+
)
|
77 |
+
for filename in options.requirements:
|
78 |
+
for parsed_req in parse_requirements(
|
79 |
+
filename, options=options, session=session
|
80 |
+
):
|
81 |
+
req = install_req_from_parsed_requirement(
|
82 |
+
parsed_req, isolated=options.isolated_mode
|
83 |
+
)
|
84 |
+
if req.name:
|
85 |
+
reqs_to_uninstall[canonicalize_name(req.name)] = req
|
86 |
+
if not reqs_to_uninstall:
|
87 |
+
raise InstallationError(
|
88 |
+
f"You must give at least one requirement to {self.name} (see "
|
89 |
+
f'"pip help {self.name}")'
|
90 |
+
)
|
91 |
+
|
92 |
+
protect_pip_from_modification_on_windows(
|
93 |
+
modifying_pip="pip" in reqs_to_uninstall
|
94 |
+
)
|
95 |
+
|
96 |
+
for req in reqs_to_uninstall.values():
|
97 |
+
uninstall_pathset = req.uninstall(
|
98 |
+
auto_confirm=options.yes,
|
99 |
+
verbose=self.verbosity > 0,
|
100 |
+
)
|
101 |
+
if uninstall_pathset:
|
102 |
+
uninstall_pathset.commit()
|
103 |
+
|
104 |
+
warn_if_run_as_root()
|
105 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/commands/wheel.py
ADDED
@@ -0,0 +1,178 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import logging
|
2 |
+
import os
|
3 |
+
import shutil
|
4 |
+
from optparse import Values
|
5 |
+
from typing import List
|
6 |
+
|
7 |
+
from pip._internal.cache import WheelCache
|
8 |
+
from pip._internal.cli import cmdoptions
|
9 |
+
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
|
10 |
+
from pip._internal.cli.status_codes import SUCCESS
|
11 |
+
from pip._internal.exceptions import CommandError
|
12 |
+
from pip._internal.req.req_install import InstallRequirement
|
13 |
+
from pip._internal.req.req_tracker import get_requirement_tracker
|
14 |
+
from pip._internal.utils.misc import ensure_dir, normalize_path
|
15 |
+
from pip._internal.utils.temp_dir import TempDirectory
|
16 |
+
from pip._internal.wheel_builder import build, should_build_for_wheel_command
|
17 |
+
|
18 |
+
logger = logging.getLogger(__name__)
|
19 |
+
|
20 |
+
|
21 |
+
class WheelCommand(RequirementCommand):
|
22 |
+
"""
|
23 |
+
Build Wheel archives for your requirements and dependencies.
|
24 |
+
|
25 |
+
Wheel is a built-package format, and offers the advantage of not
|
26 |
+
recompiling your software during every install. For more details, see the
|
27 |
+
wheel docs: https://wheel.readthedocs.io/en/latest/
|
28 |
+
|
29 |
+
Requirements: setuptools>=0.8, and wheel.
|
30 |
+
|
31 |
+
'pip wheel' uses the bdist_wheel setuptools extension from the wheel
|
32 |
+
package to build individual wheels.
|
33 |
+
|
34 |
+
"""
|
35 |
+
|
36 |
+
usage = """
|
37 |
+
%prog [options] <requirement specifier> ...
|
38 |
+
%prog [options] -r <requirements file> ...
|
39 |
+
%prog [options] [-e] <vcs project url> ...
|
40 |
+
%prog [options] [-e] <local project path> ...
|
41 |
+
%prog [options] <archive url/path> ..."""
|
42 |
+
|
43 |
+
def add_options(self) -> None:
|
44 |
+
|
45 |
+
self.cmd_opts.add_option(
|
46 |
+
"-w",
|
47 |
+
"--wheel-dir",
|
48 |
+
dest="wheel_dir",
|
49 |
+
metavar="dir",
|
50 |
+
default=os.curdir,
|
51 |
+
help=(
|
52 |
+
"Build wheels into <dir>, where the default is the "
|
53 |
+
"current working directory."
|
54 |
+
),
|
55 |
+
)
|
56 |
+
self.cmd_opts.add_option(cmdoptions.no_binary())
|
57 |
+
self.cmd_opts.add_option(cmdoptions.only_binary())
|
58 |
+
self.cmd_opts.add_option(cmdoptions.prefer_binary())
|
59 |
+
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
|
60 |
+
self.cmd_opts.add_option(cmdoptions.use_pep517())
|
61 |
+
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
|
62 |
+
self.cmd_opts.add_option(cmdoptions.constraints())
|
63 |
+
self.cmd_opts.add_option(cmdoptions.editable())
|
64 |
+
self.cmd_opts.add_option(cmdoptions.requirements())
|
65 |
+
self.cmd_opts.add_option(cmdoptions.src())
|
66 |
+
self.cmd_opts.add_option(cmdoptions.ignore_requires_python())
|
67 |
+
self.cmd_opts.add_option(cmdoptions.no_deps())
|
68 |
+
self.cmd_opts.add_option(cmdoptions.progress_bar())
|
69 |
+
|
70 |
+
self.cmd_opts.add_option(
|
71 |
+
"--no-verify",
|
72 |
+
dest="no_verify",
|
73 |
+
action="store_true",
|
74 |
+
default=False,
|
75 |
+
help="Don't verify if built wheel is valid.",
|
76 |
+
)
|
77 |
+
|
78 |
+
self.cmd_opts.add_option(cmdoptions.build_options())
|
79 |
+
self.cmd_opts.add_option(cmdoptions.global_options())
|
80 |
+
|
81 |
+
self.cmd_opts.add_option(
|
82 |
+
"--pre",
|
83 |
+
action="store_true",
|
84 |
+
default=False,
|
85 |
+
help=(
|
86 |
+
"Include pre-release and development versions. By default, "
|
87 |
+
"pip only finds stable versions."
|
88 |
+
),
|
89 |
+
)
|
90 |
+
|
91 |
+
self.cmd_opts.add_option(cmdoptions.require_hashes())
|
92 |
+
|
93 |
+
index_opts = cmdoptions.make_option_group(
|
94 |
+
cmdoptions.index_group,
|
95 |
+
self.parser,
|
96 |
+
)
|
97 |
+
|
98 |
+
self.parser.insert_option_group(0, index_opts)
|
99 |
+
self.parser.insert_option_group(0, self.cmd_opts)
|
100 |
+
|
101 |
+
@with_cleanup
|
102 |
+
def run(self, options: Values, args: List[str]) -> int:
|
103 |
+
cmdoptions.check_install_build_global(options)
|
104 |
+
|
105 |
+
session = self.get_default_session(options)
|
106 |
+
|
107 |
+
finder = self._build_package_finder(options, session)
|
108 |
+
wheel_cache = WheelCache(options.cache_dir, options.format_control)
|
109 |
+
|
110 |
+
options.wheel_dir = normalize_path(options.wheel_dir)
|
111 |
+
ensure_dir(options.wheel_dir)
|
112 |
+
|
113 |
+
req_tracker = self.enter_context(get_requirement_tracker())
|
114 |
+
|
115 |
+
directory = TempDirectory(
|
116 |
+
delete=not options.no_clean,
|
117 |
+
kind="wheel",
|
118 |
+
globally_managed=True,
|
119 |
+
)
|
120 |
+
|
121 |
+
reqs = self.get_requirements(args, options, finder, session)
|
122 |
+
|
123 |
+
preparer = self.make_requirement_preparer(
|
124 |
+
temp_build_dir=directory,
|
125 |
+
options=options,
|
126 |
+
req_tracker=req_tracker,
|
127 |
+
session=session,
|
128 |
+
finder=finder,
|
129 |
+
download_dir=options.wheel_dir,
|
130 |
+
use_user_site=False,
|
131 |
+
verbosity=self.verbosity,
|
132 |
+
)
|
133 |
+
|
134 |
+
resolver = self.make_resolver(
|
135 |
+
preparer=preparer,
|
136 |
+
finder=finder,
|
137 |
+
options=options,
|
138 |
+
wheel_cache=wheel_cache,
|
139 |
+
ignore_requires_python=options.ignore_requires_python,
|
140 |
+
use_pep517=options.use_pep517,
|
141 |
+
)
|
142 |
+
|
143 |
+
self.trace_basic_info(finder)
|
144 |
+
|
145 |
+
requirement_set = resolver.resolve(reqs, check_supported_wheels=True)
|
146 |
+
|
147 |
+
reqs_to_build: List[InstallRequirement] = []
|
148 |
+
for req in requirement_set.requirements.values():
|
149 |
+
if req.is_wheel:
|
150 |
+
preparer.save_linked_requirement(req)
|
151 |
+
elif should_build_for_wheel_command(req):
|
152 |
+
reqs_to_build.append(req)
|
153 |
+
|
154 |
+
# build wheels
|
155 |
+
build_successes, build_failures = build(
|
156 |
+
reqs_to_build,
|
157 |
+
wheel_cache=wheel_cache,
|
158 |
+
verify=(not options.no_verify),
|
159 |
+
build_options=options.build_options or [],
|
160 |
+
global_options=options.global_options or [],
|
161 |
+
)
|
162 |
+
for req in build_successes:
|
163 |
+
assert req.link and req.link.is_wheel
|
164 |
+
assert req.local_file_path
|
165 |
+
# copy from cache to target directory
|
166 |
+
try:
|
167 |
+
shutil.copy(req.local_file_path, options.wheel_dir)
|
168 |
+
except OSError as e:
|
169 |
+
logger.warning(
|
170 |
+
"Building wheel for %s failed: %s",
|
171 |
+
req.name,
|
172 |
+
e,
|
173 |
+
)
|
174 |
+
build_failures.append(req)
|
175 |
+
if len(build_failures) != 0:
|
176 |
+
raise CommandError("Failed to build one or more wheels")
|
177 |
+
|
178 |
+
return SUCCESS
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__init__.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
"""Index interaction code
|
2 |
+
"""
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (219 Bytes). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/collector.cpython-310.pyc
ADDED
Binary file (19.3 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/package_finder.cpython-310.pyc
ADDED
Binary file (28.1 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/__pycache__/sources.cpython-310.pyc
ADDED
Binary file (7.11 kB). View file
|
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/collector.py
ADDED
@@ -0,0 +1,648 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
The main purpose of this module is to expose LinkCollector.collect_sources().
|
3 |
+
"""
|
4 |
+
|
5 |
+
import cgi
|
6 |
+
import collections
|
7 |
+
import functools
|
8 |
+
import itertools
|
9 |
+
import logging
|
10 |
+
import os
|
11 |
+
import re
|
12 |
+
import urllib.parse
|
13 |
+
import urllib.request
|
14 |
+
import xml.etree.ElementTree
|
15 |
+
from html.parser import HTMLParser
|
16 |
+
from optparse import Values
|
17 |
+
from typing import (
|
18 |
+
TYPE_CHECKING,
|
19 |
+
Any,
|
20 |
+
Callable,
|
21 |
+
Dict,
|
22 |
+
Iterable,
|
23 |
+
List,
|
24 |
+
MutableMapping,
|
25 |
+
NamedTuple,
|
26 |
+
Optional,
|
27 |
+
Sequence,
|
28 |
+
Tuple,
|
29 |
+
Union,
|
30 |
+
)
|
31 |
+
|
32 |
+
from pip._vendor import html5lib, requests
|
33 |
+
from pip._vendor.requests import Response
|
34 |
+
from pip._vendor.requests.exceptions import RetryError, SSLError
|
35 |
+
|
36 |
+
from pip._internal.exceptions import NetworkConnectionError
|
37 |
+
from pip._internal.models.link import Link
|
38 |
+
from pip._internal.models.search_scope import SearchScope
|
39 |
+
from pip._internal.network.session import PipSession
|
40 |
+
from pip._internal.network.utils import raise_for_status
|
41 |
+
from pip._internal.utils.deprecation import deprecated
|
42 |
+
from pip._internal.utils.filetypes import is_archive_file
|
43 |
+
from pip._internal.utils.misc import pairwise, redact_auth_from_url
|
44 |
+
from pip._internal.vcs import vcs
|
45 |
+
|
46 |
+
from .sources import CandidatesFromPage, LinkSource, build_source
|
47 |
+
|
48 |
+
if TYPE_CHECKING:
|
49 |
+
from typing import Protocol
|
50 |
+
else:
|
51 |
+
Protocol = object
|
52 |
+
|
53 |
+
logger = logging.getLogger(__name__)
|
54 |
+
|
55 |
+
HTMLElement = xml.etree.ElementTree.Element
|
56 |
+
ResponseHeaders = MutableMapping[str, str]
|
57 |
+
|
58 |
+
|
59 |
+
def _match_vcs_scheme(url: str) -> Optional[str]:
|
60 |
+
"""Look for VCS schemes in the URL.
|
61 |
+
|
62 |
+
Returns the matched VCS scheme, or None if there's no match.
|
63 |
+
"""
|
64 |
+
for scheme in vcs.schemes:
|
65 |
+
if url.lower().startswith(scheme) and url[len(scheme)] in "+:":
|
66 |
+
return scheme
|
67 |
+
return None
|
68 |
+
|
69 |
+
|
70 |
+
class _NotHTML(Exception):
|
71 |
+
def __init__(self, content_type: str, request_desc: str) -> None:
|
72 |
+
super().__init__(content_type, request_desc)
|
73 |
+
self.content_type = content_type
|
74 |
+
self.request_desc = request_desc
|
75 |
+
|
76 |
+
|
77 |
+
def _ensure_html_header(response: Response) -> None:
|
78 |
+
"""Check the Content-Type header to ensure the response contains HTML.
|
79 |
+
|
80 |
+
Raises `_NotHTML` if the content type is not text/html.
|
81 |
+
"""
|
82 |
+
content_type = response.headers.get("Content-Type", "")
|
83 |
+
if not content_type.lower().startswith("text/html"):
|
84 |
+
raise _NotHTML(content_type, response.request.method)
|
85 |
+
|
86 |
+
|
87 |
+
class _NotHTTP(Exception):
|
88 |
+
pass
|
89 |
+
|
90 |
+
|
91 |
+
def _ensure_html_response(url: str, session: PipSession) -> None:
|
92 |
+
"""Send a HEAD request to the URL, and ensure the response contains HTML.
|
93 |
+
|
94 |
+
Raises `_NotHTTP` if the URL is not available for a HEAD request, or
|
95 |
+
`_NotHTML` if the content type is not text/html.
|
96 |
+
"""
|
97 |
+
scheme, netloc, path, query, fragment = urllib.parse.urlsplit(url)
|
98 |
+
if scheme not in {"http", "https"}:
|
99 |
+
raise _NotHTTP()
|
100 |
+
|
101 |
+
resp = session.head(url, allow_redirects=True)
|
102 |
+
raise_for_status(resp)
|
103 |
+
|
104 |
+
_ensure_html_header(resp)
|
105 |
+
|
106 |
+
|
107 |
+
def _get_html_response(url: str, session: PipSession) -> Response:
|
108 |
+
"""Access an HTML page with GET, and return the response.
|
109 |
+
|
110 |
+
This consists of three parts:
|
111 |
+
|
112 |
+
1. If the URL looks suspiciously like an archive, send a HEAD first to
|
113 |
+
check the Content-Type is HTML, to avoid downloading a large file.
|
114 |
+
Raise `_NotHTTP` if the content type cannot be determined, or
|
115 |
+
`_NotHTML` if it is not HTML.
|
116 |
+
2. Actually perform the request. Raise HTTP exceptions on network failures.
|
117 |
+
3. Check the Content-Type header to make sure we got HTML, and raise
|
118 |
+
`_NotHTML` otherwise.
|
119 |
+
"""
|
120 |
+
if is_archive_file(Link(url).filename):
|
121 |
+
_ensure_html_response(url, session=session)
|
122 |
+
|
123 |
+
logger.debug("Getting page %s", redact_auth_from_url(url))
|
124 |
+
|
125 |
+
resp = session.get(
|
126 |
+
url,
|
127 |
+
headers={
|
128 |
+
"Accept": "text/html",
|
129 |
+
# We don't want to blindly returned cached data for
|
130 |
+
# /simple/, because authors generally expecting that
|
131 |
+
# twine upload && pip install will function, but if
|
132 |
+
# they've done a pip install in the last ~10 minutes
|
133 |
+
# it won't. Thus by setting this to zero we will not
|
134 |
+
# blindly use any cached data, however the benefit of
|
135 |
+
# using max-age=0 instead of no-cache, is that we will
|
136 |
+
# still support conditional requests, so we will still
|
137 |
+
# minimize traffic sent in cases where the page hasn't
|
138 |
+
# changed at all, we will just always incur the round
|
139 |
+
# trip for the conditional GET now instead of only
|
140 |
+
# once per 10 minutes.
|
141 |
+
# For more information, please see pypa/pip#5670.
|
142 |
+
"Cache-Control": "max-age=0",
|
143 |
+
},
|
144 |
+
)
|
145 |
+
raise_for_status(resp)
|
146 |
+
|
147 |
+
# The check for archives above only works if the url ends with
|
148 |
+
# something that looks like an archive. However that is not a
|
149 |
+
# requirement of an url. Unless we issue a HEAD request on every
|
150 |
+
# url we cannot know ahead of time for sure if something is HTML
|
151 |
+
# or not. However we can check after we've downloaded it.
|
152 |
+
_ensure_html_header(resp)
|
153 |
+
|
154 |
+
return resp
|
155 |
+
|
156 |
+
|
157 |
+
def _get_encoding_from_headers(headers: ResponseHeaders) -> Optional[str]:
|
158 |
+
"""Determine if we have any encoding information in our headers."""
|
159 |
+
if headers and "Content-Type" in headers:
|
160 |
+
content_type, params = cgi.parse_header(headers["Content-Type"])
|
161 |
+
if "charset" in params:
|
162 |
+
return params["charset"]
|
163 |
+
return None
|
164 |
+
|
165 |
+
|
166 |
+
def _determine_base_url(document: HTMLElement, page_url: str) -> str:
|
167 |
+
"""Determine the HTML document's base URL.
|
168 |
+
|
169 |
+
This looks for a ``<base>`` tag in the HTML document. If present, its href
|
170 |
+
attribute denotes the base URL of anchor tags in the document. If there is
|
171 |
+
no such tag (or if it does not have a valid href attribute), the HTML
|
172 |
+
file's URL is used as the base URL.
|
173 |
+
|
174 |
+
:param document: An HTML document representation. The current
|
175 |
+
implementation expects the result of ``html5lib.parse()``.
|
176 |
+
:param page_url: The URL of the HTML document.
|
177 |
+
|
178 |
+
TODO: Remove when `html5lib` is dropped.
|
179 |
+
"""
|
180 |
+
for base in document.findall(".//base"):
|
181 |
+
href = base.get("href")
|
182 |
+
if href is not None:
|
183 |
+
return href
|
184 |
+
return page_url
|
185 |
+
|
186 |
+
|
187 |
+
def _clean_url_path_part(part: str) -> str:
|
188 |
+
"""
|
189 |
+
Clean a "part" of a URL path (i.e. after splitting on "@" characters).
|
190 |
+
"""
|
191 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
192 |
+
return urllib.parse.quote(urllib.parse.unquote(part))
|
193 |
+
|
194 |
+
|
195 |
+
def _clean_file_url_path(part: str) -> str:
|
196 |
+
"""
|
197 |
+
Clean the first part of a URL path that corresponds to a local
|
198 |
+
filesystem path (i.e. the first part after splitting on "@" characters).
|
199 |
+
"""
|
200 |
+
# We unquote prior to quoting to make sure nothing is double quoted.
|
201 |
+
# Also, on Windows the path part might contain a drive letter which
|
202 |
+
# should not be quoted. On Linux where drive letters do not
|
203 |
+
# exist, the colon should be quoted. We rely on urllib.request
|
204 |
+
# to do the right thing here.
|
205 |
+
return urllib.request.pathname2url(urllib.request.url2pathname(part))
|
206 |
+
|
207 |
+
|
208 |
+
# percent-encoded: /
|
209 |
+
_reserved_chars_re = re.compile("(@|%2F)", re.IGNORECASE)
|
210 |
+
|
211 |
+
|
212 |
+
def _clean_url_path(path: str, is_local_path: bool) -> str:
|
213 |
+
"""
|
214 |
+
Clean the path portion of a URL.
|
215 |
+
"""
|
216 |
+
if is_local_path:
|
217 |
+
clean_func = _clean_file_url_path
|
218 |
+
else:
|
219 |
+
clean_func = _clean_url_path_part
|
220 |
+
|
221 |
+
# Split on the reserved characters prior to cleaning so that
|
222 |
+
# revision strings in VCS URLs are properly preserved.
|
223 |
+
parts = _reserved_chars_re.split(path)
|
224 |
+
|
225 |
+
cleaned_parts = []
|
226 |
+
for to_clean, reserved in pairwise(itertools.chain(parts, [""])):
|
227 |
+
cleaned_parts.append(clean_func(to_clean))
|
228 |
+
# Normalize %xx escapes (e.g. %2f -> %2F)
|
229 |
+
cleaned_parts.append(reserved.upper())
|
230 |
+
|
231 |
+
return "".join(cleaned_parts)
|
232 |
+
|
233 |
+
|
234 |
+
def _clean_link(url: str) -> str:
|
235 |
+
"""
|
236 |
+
Make sure a link is fully quoted.
|
237 |
+
For example, if ' ' occurs in the URL, it will be replaced with "%20",
|
238 |
+
and without double-quoting other characters.
|
239 |
+
"""
|
240 |
+
# Split the URL into parts according to the general structure
|
241 |
+
# `scheme://netloc/path;parameters?query#fragment`.
|
242 |
+
result = urllib.parse.urlparse(url)
|
243 |
+
# If the netloc is empty, then the URL refers to a local filesystem path.
|
244 |
+
is_local_path = not result.netloc
|
245 |
+
path = _clean_url_path(result.path, is_local_path=is_local_path)
|
246 |
+
return urllib.parse.urlunparse(result._replace(path=path))
|
247 |
+
|
248 |
+
|
249 |
+
def _create_link_from_element(
|
250 |
+
element_attribs: Dict[str, Optional[str]],
|
251 |
+
page_url: str,
|
252 |
+
base_url: str,
|
253 |
+
) -> Optional[Link]:
|
254 |
+
"""
|
255 |
+
Convert an anchor element's attributes in a simple repository page to a Link.
|
256 |
+
"""
|
257 |
+
href = element_attribs.get("href")
|
258 |
+
if not href:
|
259 |
+
return None
|
260 |
+
|
261 |
+
url = _clean_link(urllib.parse.urljoin(base_url, href))
|
262 |
+
pyrequire = element_attribs.get("data-requires-python")
|
263 |
+
yanked_reason = element_attribs.get("data-yanked")
|
264 |
+
|
265 |
+
link = Link(
|
266 |
+
url,
|
267 |
+
comes_from=page_url,
|
268 |
+
requires_python=pyrequire,
|
269 |
+
yanked_reason=yanked_reason,
|
270 |
+
)
|
271 |
+
|
272 |
+
return link
|
273 |
+
|
274 |
+
|
275 |
+
class CacheablePageContent:
|
276 |
+
def __init__(self, page: "HTMLPage") -> None:
|
277 |
+
assert page.cache_link_parsing
|
278 |
+
self.page = page
|
279 |
+
|
280 |
+
def __eq__(self, other: object) -> bool:
|
281 |
+
return isinstance(other, type(self)) and self.page.url == other.page.url
|
282 |
+
|
283 |
+
def __hash__(self) -> int:
|
284 |
+
return hash(self.page.url)
|
285 |
+
|
286 |
+
|
287 |
+
class ParseLinks(Protocol):
|
288 |
+
def __call__(
|
289 |
+
self, page: "HTMLPage", use_deprecated_html5lib: bool
|
290 |
+
) -> Iterable[Link]:
|
291 |
+
...
|
292 |
+
|
293 |
+
|
294 |
+
def with_cached_html_pages(fn: ParseLinks) -> ParseLinks:
|
295 |
+
"""
|
296 |
+
Given a function that parses an Iterable[Link] from an HTMLPage, cache the
|
297 |
+
function's result (keyed by CacheablePageContent), unless the HTMLPage
|
298 |
+
`page` has `page.cache_link_parsing == False`.
|
299 |
+
"""
|
300 |
+
|
301 |
+
@functools.lru_cache(maxsize=None)
|
302 |
+
def wrapper(
|
303 |
+
cacheable_page: CacheablePageContent, use_deprecated_html5lib: bool
|
304 |
+
) -> List[Link]:
|
305 |
+
return list(fn(cacheable_page.page, use_deprecated_html5lib))
|
306 |
+
|
307 |
+
@functools.wraps(fn)
|
308 |
+
def wrapper_wrapper(page: "HTMLPage", use_deprecated_html5lib: bool) -> List[Link]:
|
309 |
+
if page.cache_link_parsing:
|
310 |
+
return wrapper(CacheablePageContent(page), use_deprecated_html5lib)
|
311 |
+
return list(fn(page, use_deprecated_html5lib))
|
312 |
+
|
313 |
+
return wrapper_wrapper
|
314 |
+
|
315 |
+
|
316 |
+
def _parse_links_html5lib(page: "HTMLPage") -> Iterable[Link]:
|
317 |
+
"""
|
318 |
+
Parse an HTML document, and yield its anchor elements as Link objects.
|
319 |
+
|
320 |
+
TODO: Remove when `html5lib` is dropped.
|
321 |
+
"""
|
322 |
+
document = html5lib.parse(
|
323 |
+
page.content,
|
324 |
+
transport_encoding=page.encoding,
|
325 |
+
namespaceHTMLElements=False,
|
326 |
+
)
|
327 |
+
|
328 |
+
url = page.url
|
329 |
+
base_url = _determine_base_url(document, url)
|
330 |
+
for anchor in document.findall(".//a"):
|
331 |
+
link = _create_link_from_element(
|
332 |
+
anchor.attrib,
|
333 |
+
page_url=url,
|
334 |
+
base_url=base_url,
|
335 |
+
)
|
336 |
+
if link is None:
|
337 |
+
continue
|
338 |
+
yield link
|
339 |
+
|
340 |
+
|
341 |
+
@with_cached_html_pages
|
342 |
+
def parse_links(page: "HTMLPage", use_deprecated_html5lib: bool) -> Iterable[Link]:
|
343 |
+
"""
|
344 |
+
Parse an HTML document, and yield its anchor elements as Link objects.
|
345 |
+
"""
|
346 |
+
encoding = page.encoding or "utf-8"
|
347 |
+
|
348 |
+
# Check if the page starts with a valid doctype, to decide whether to use
|
349 |
+
# http.parser or (deprecated) html5lib for parsing -- unless explicitly
|
350 |
+
# requested to use html5lib.
|
351 |
+
if not use_deprecated_html5lib:
|
352 |
+
expected_doctype = "<!doctype html>".encode(encoding)
|
353 |
+
actual_start = page.content[: len(expected_doctype)]
|
354 |
+
if actual_start.decode(encoding).lower() != "<!doctype html>":
|
355 |
+
deprecated(
|
356 |
+
reason=(
|
357 |
+
f"The HTML index page being used ({page.url}) is not a proper "
|
358 |
+
"HTML 5 document. This is in violation of PEP 503 which requires "
|
359 |
+
"these pages to be well-formed HTML 5 documents. Please reach out "
|
360 |
+
"to the owners of this index page, and ask them to update this "
|
361 |
+
"index page to a valid HTML 5 document."
|
362 |
+
),
|
363 |
+
replacement=None,
|
364 |
+
gone_in="22.2",
|
365 |
+
issue=10825,
|
366 |
+
)
|
367 |
+
use_deprecated_html5lib = True
|
368 |
+
|
369 |
+
if use_deprecated_html5lib:
|
370 |
+
yield from _parse_links_html5lib(page)
|
371 |
+
return
|
372 |
+
|
373 |
+
parser = HTMLLinkParser()
|
374 |
+
parser.feed(page.content.decode(encoding))
|
375 |
+
|
376 |
+
url = page.url
|
377 |
+
base_url = parser.base_url or url
|
378 |
+
for anchor in parser.anchors:
|
379 |
+
link = _create_link_from_element(
|
380 |
+
anchor,
|
381 |
+
page_url=url,
|
382 |
+
base_url=base_url,
|
383 |
+
)
|
384 |
+
if link is None:
|
385 |
+
continue
|
386 |
+
yield link
|
387 |
+
|
388 |
+
|
389 |
+
class HTMLPage:
|
390 |
+
"""Represents one page, along with its URL"""
|
391 |
+
|
392 |
+
def __init__(
|
393 |
+
self,
|
394 |
+
content: bytes,
|
395 |
+
encoding: Optional[str],
|
396 |
+
url: str,
|
397 |
+
cache_link_parsing: bool = True,
|
398 |
+
) -> None:
|
399 |
+
"""
|
400 |
+
:param encoding: the encoding to decode the given content.
|
401 |
+
:param url: the URL from which the HTML was downloaded.
|
402 |
+
:param cache_link_parsing: whether links parsed from this page's url
|
403 |
+
should be cached. PyPI index urls should
|
404 |
+
have this set to False, for example.
|
405 |
+
"""
|
406 |
+
self.content = content
|
407 |
+
self.encoding = encoding
|
408 |
+
self.url = url
|
409 |
+
self.cache_link_parsing = cache_link_parsing
|
410 |
+
|
411 |
+
def __str__(self) -> str:
|
412 |
+
return redact_auth_from_url(self.url)
|
413 |
+
|
414 |
+
|
415 |
+
class HTMLLinkParser(HTMLParser):
|
416 |
+
"""
|
417 |
+
HTMLParser that keeps the first base HREF and a list of all anchor
|
418 |
+
elements' attributes.
|
419 |
+
"""
|
420 |
+
|
421 |
+
def __init__(self, *args: Any, **kwargs: Any) -> None:
|
422 |
+
super().__init__(*args, **kwargs)
|
423 |
+
self._seen_decl = False
|
424 |
+
self.base_url: Optional[str] = None
|
425 |
+
self.anchors: List[Dict[str, Optional[str]]] = []
|
426 |
+
|
427 |
+
def handle_decl(self, decl: str) -> None:
|
428 |
+
if decl.lower() != "doctype html":
|
429 |
+
self._raise_error()
|
430 |
+
self._seen_decl = True
|
431 |
+
|
432 |
+
def handle_starttag(self, tag: str, attrs: List[Tuple[str, Optional[str]]]) -> None:
|
433 |
+
if not self._seen_decl:
|
434 |
+
self._raise_error()
|
435 |
+
|
436 |
+
if tag == "base" and self.base_url is None:
|
437 |
+
href = self.get_href(attrs)
|
438 |
+
if href is not None:
|
439 |
+
self.base_url = href
|
440 |
+
elif tag == "a":
|
441 |
+
self.anchors.append(dict(attrs))
|
442 |
+
|
443 |
+
def get_href(self, attrs: List[Tuple[str, Optional[str]]]) -> Optional[str]:
|
444 |
+
for name, value in attrs:
|
445 |
+
if name == "href":
|
446 |
+
return value
|
447 |
+
return None
|
448 |
+
|
449 |
+
def _raise_error(self) -> None:
|
450 |
+
raise ValueError(
|
451 |
+
"HTML doctype missing or incorrect. Expected <!DOCTYPE html>.\n\n"
|
452 |
+
"If you believe this error to be incorrect, try passing the "
|
453 |
+
"command line option --use-deprecated=html5lib and please leave "
|
454 |
+
"a comment on the pip issue at https://github.com/pypa/pip/issues/10825."
|
455 |
+
)
|
456 |
+
|
457 |
+
|
458 |
+
def _handle_get_page_fail(
|
459 |
+
link: Link,
|
460 |
+
reason: Union[str, Exception],
|
461 |
+
meth: Optional[Callable[..., None]] = None,
|
462 |
+
) -> None:
|
463 |
+
if meth is None:
|
464 |
+
meth = logger.debug
|
465 |
+
meth("Could not fetch URL %s: %s - skipping", link, reason)
|
466 |
+
|
467 |
+
|
468 |
+
def _make_html_page(response: Response, cache_link_parsing: bool = True) -> HTMLPage:
|
469 |
+
encoding = _get_encoding_from_headers(response.headers)
|
470 |
+
return HTMLPage(
|
471 |
+
response.content,
|
472 |
+
encoding=encoding,
|
473 |
+
url=response.url,
|
474 |
+
cache_link_parsing=cache_link_parsing,
|
475 |
+
)
|
476 |
+
|
477 |
+
|
478 |
+
def _get_html_page(
|
479 |
+
link: Link, session: Optional[PipSession] = None
|
480 |
+
) -> Optional["HTMLPage"]:
|
481 |
+
if session is None:
|
482 |
+
raise TypeError(
|
483 |
+
"_get_html_page() missing 1 required keyword argument: 'session'"
|
484 |
+
)
|
485 |
+
|
486 |
+
url = link.url.split("#", 1)[0]
|
487 |
+
|
488 |
+
# Check for VCS schemes that do not support lookup as web pages.
|
489 |
+
vcs_scheme = _match_vcs_scheme(url)
|
490 |
+
if vcs_scheme:
|
491 |
+
logger.warning(
|
492 |
+
"Cannot look at %s URL %s because it does not support lookup as web pages.",
|
493 |
+
vcs_scheme,
|
494 |
+
link,
|
495 |
+
)
|
496 |
+
return None
|
497 |
+
|
498 |
+
# Tack index.html onto file:// URLs that point to directories
|
499 |
+
scheme, _, path, _, _, _ = urllib.parse.urlparse(url)
|
500 |
+
if scheme == "file" and os.path.isdir(urllib.request.url2pathname(path)):
|
501 |
+
# add trailing slash if not present so urljoin doesn't trim
|
502 |
+
# final segment
|
503 |
+
if not url.endswith("/"):
|
504 |
+
url += "/"
|
505 |
+
url = urllib.parse.urljoin(url, "index.html")
|
506 |
+
logger.debug(" file: URL is directory, getting %s", url)
|
507 |
+
|
508 |
+
try:
|
509 |
+
resp = _get_html_response(url, session=session)
|
510 |
+
except _NotHTTP:
|
511 |
+
logger.warning(
|
512 |
+
"Skipping page %s because it looks like an archive, and cannot "
|
513 |
+
"be checked by a HTTP HEAD request.",
|
514 |
+
link,
|
515 |
+
)
|
516 |
+
except _NotHTML as exc:
|
517 |
+
logger.warning(
|
518 |
+
"Skipping page %s because the %s request got Content-Type: %s."
|
519 |
+
"The only supported Content-Type is text/html",
|
520 |
+
link,
|
521 |
+
exc.request_desc,
|
522 |
+
exc.content_type,
|
523 |
+
)
|
524 |
+
except NetworkConnectionError as exc:
|
525 |
+
_handle_get_page_fail(link, exc)
|
526 |
+
except RetryError as exc:
|
527 |
+
_handle_get_page_fail(link, exc)
|
528 |
+
except SSLError as exc:
|
529 |
+
reason = "There was a problem confirming the ssl certificate: "
|
530 |
+
reason += str(exc)
|
531 |
+
_handle_get_page_fail(link, reason, meth=logger.info)
|
532 |
+
except requests.ConnectionError as exc:
|
533 |
+
_handle_get_page_fail(link, f"connection error: {exc}")
|
534 |
+
except requests.Timeout:
|
535 |
+
_handle_get_page_fail(link, "timed out")
|
536 |
+
else:
|
537 |
+
return _make_html_page(resp, cache_link_parsing=link.cache_link_parsing)
|
538 |
+
return None
|
539 |
+
|
540 |
+
|
541 |
+
class CollectedSources(NamedTuple):
|
542 |
+
find_links: Sequence[Optional[LinkSource]]
|
543 |
+
index_urls: Sequence[Optional[LinkSource]]
|
544 |
+
|
545 |
+
|
546 |
+
class LinkCollector:
|
547 |
+
|
548 |
+
"""
|
549 |
+
Responsible for collecting Link objects from all configured locations,
|
550 |
+
making network requests as needed.
|
551 |
+
|
552 |
+
The class's main method is its collect_sources() method.
|
553 |
+
"""
|
554 |
+
|
555 |
+
def __init__(
|
556 |
+
self,
|
557 |
+
session: PipSession,
|
558 |
+
search_scope: SearchScope,
|
559 |
+
) -> None:
|
560 |
+
self.search_scope = search_scope
|
561 |
+
self.session = session
|
562 |
+
|
563 |
+
@classmethod
|
564 |
+
def create(
|
565 |
+
cls,
|
566 |
+
session: PipSession,
|
567 |
+
options: Values,
|
568 |
+
suppress_no_index: bool = False,
|
569 |
+
) -> "LinkCollector":
|
570 |
+
"""
|
571 |
+
:param session: The Session to use to make requests.
|
572 |
+
:param suppress_no_index: Whether to ignore the --no-index option
|
573 |
+
when constructing the SearchScope object.
|
574 |
+
"""
|
575 |
+
index_urls = [options.index_url] + options.extra_index_urls
|
576 |
+
if options.no_index and not suppress_no_index:
|
577 |
+
logger.debug(
|
578 |
+
"Ignoring indexes: %s",
|
579 |
+
",".join(redact_auth_from_url(url) for url in index_urls),
|
580 |
+
)
|
581 |
+
index_urls = []
|
582 |
+
|
583 |
+
# Make sure find_links is a list before passing to create().
|
584 |
+
find_links = options.find_links or []
|
585 |
+
|
586 |
+
search_scope = SearchScope.create(
|
587 |
+
find_links=find_links,
|
588 |
+
index_urls=index_urls,
|
589 |
+
)
|
590 |
+
link_collector = LinkCollector(
|
591 |
+
session=session,
|
592 |
+
search_scope=search_scope,
|
593 |
+
)
|
594 |
+
return link_collector
|
595 |
+
|
596 |
+
@property
|
597 |
+
def find_links(self) -> List[str]:
|
598 |
+
return self.search_scope.find_links
|
599 |
+
|
600 |
+
def fetch_page(self, location: Link) -> Optional[HTMLPage]:
|
601 |
+
"""
|
602 |
+
Fetch an HTML page containing package links.
|
603 |
+
"""
|
604 |
+
return _get_html_page(location, session=self.session)
|
605 |
+
|
606 |
+
def collect_sources(
|
607 |
+
self,
|
608 |
+
project_name: str,
|
609 |
+
candidates_from_page: CandidatesFromPage,
|
610 |
+
) -> CollectedSources:
|
611 |
+
# The OrderedDict calls deduplicate sources by URL.
|
612 |
+
index_url_sources = collections.OrderedDict(
|
613 |
+
build_source(
|
614 |
+
loc,
|
615 |
+
candidates_from_page=candidates_from_page,
|
616 |
+
page_validator=self.session.is_secure_origin,
|
617 |
+
expand_dir=False,
|
618 |
+
cache_link_parsing=False,
|
619 |
+
)
|
620 |
+
for loc in self.search_scope.get_index_urls_locations(project_name)
|
621 |
+
).values()
|
622 |
+
find_links_sources = collections.OrderedDict(
|
623 |
+
build_source(
|
624 |
+
loc,
|
625 |
+
candidates_from_page=candidates_from_page,
|
626 |
+
page_validator=self.session.is_secure_origin,
|
627 |
+
expand_dir=True,
|
628 |
+
cache_link_parsing=True,
|
629 |
+
)
|
630 |
+
for loc in self.find_links
|
631 |
+
).values()
|
632 |
+
|
633 |
+
if logger.isEnabledFor(logging.DEBUG):
|
634 |
+
lines = [
|
635 |
+
f"* {s.link}"
|
636 |
+
for s in itertools.chain(find_links_sources, index_url_sources)
|
637 |
+
if s is not None and s.link is not None
|
638 |
+
]
|
639 |
+
lines = [
|
640 |
+
f"{len(lines)} location(s) to search "
|
641 |
+
f"for versions of {project_name}:"
|
642 |
+
] + lines
|
643 |
+
logger.debug("\n".join(lines))
|
644 |
+
|
645 |
+
return CollectedSources(
|
646 |
+
find_links=list(find_links_sources),
|
647 |
+
index_urls=list(index_url_sources),
|
648 |
+
)
|
env-llmeval/lib/python3.10/site-packages/pip/_internal/index/package_finder.py
ADDED
@@ -0,0 +1,1004 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Routines related to PyPI, indexes"""
|
2 |
+
|
3 |
+
# The following comment should be removed at some point in the future.
|
4 |
+
# mypy: strict-optional=False
|
5 |
+
|
6 |
+
import functools
|
7 |
+
import itertools
|
8 |
+
import logging
|
9 |
+
import re
|
10 |
+
from typing import FrozenSet, Iterable, List, Optional, Set, Tuple, Union
|
11 |
+
|
12 |
+
from pip._vendor.packaging import specifiers
|
13 |
+
from pip._vendor.packaging.tags import Tag
|
14 |
+
from pip._vendor.packaging.utils import canonicalize_name
|
15 |
+
from pip._vendor.packaging.version import _BaseVersion
|
16 |
+
from pip._vendor.packaging.version import parse as parse_version
|
17 |
+
|
18 |
+
from pip._internal.exceptions import (
|
19 |
+
BestVersionAlreadyInstalled,
|
20 |
+
DistributionNotFound,
|
21 |
+
InvalidWheelFilename,
|
22 |
+
UnsupportedWheel,
|
23 |
+
)
|
24 |
+
from pip._internal.index.collector import LinkCollector, parse_links
|
25 |
+
from pip._internal.models.candidate import InstallationCandidate
|
26 |
+
from pip._internal.models.format_control import FormatControl
|
27 |
+
from pip._internal.models.link import Link
|
28 |
+
from pip._internal.models.search_scope import SearchScope
|
29 |
+
from pip._internal.models.selection_prefs import SelectionPreferences
|
30 |
+
from pip._internal.models.target_python import TargetPython
|
31 |
+
from pip._internal.models.wheel import Wheel
|
32 |
+
from pip._internal.req import InstallRequirement
|
33 |
+
from pip._internal.utils._log import getLogger
|
34 |
+
from pip._internal.utils.filetypes import WHEEL_EXTENSION
|
35 |
+
from pip._internal.utils.hashes import Hashes
|
36 |
+
from pip._internal.utils.logging import indent_log
|
37 |
+
from pip._internal.utils.misc import build_netloc
|
38 |
+
from pip._internal.utils.packaging import check_requires_python
|
39 |
+
from pip._internal.utils.unpacking import SUPPORTED_EXTENSIONS
|
40 |
+
|
41 |
+
__all__ = ["FormatControl", "BestCandidateResult", "PackageFinder"]
|
42 |
+
|
43 |
+
|
44 |
+
logger = getLogger(__name__)
|
45 |
+
|
46 |
+
BuildTag = Union[Tuple[()], Tuple[int, str]]
|
47 |
+
CandidateSortingKey = Tuple[int, int, int, _BaseVersion, Optional[int], BuildTag]
|
48 |
+
|
49 |
+
|
50 |
+
def _check_link_requires_python(
|
51 |
+
link: Link,
|
52 |
+
version_info: Tuple[int, int, int],
|
53 |
+
ignore_requires_python: bool = False,
|
54 |
+
) -> bool:
|
55 |
+
"""
|
56 |
+
Return whether the given Python version is compatible with a link's
|
57 |
+
"Requires-Python" value.
|
58 |
+
|
59 |
+
:param version_info: A 3-tuple of ints representing the Python
|
60 |
+
major-minor-micro version to check.
|
61 |
+
:param ignore_requires_python: Whether to ignore the "Requires-Python"
|
62 |
+
value if the given Python version isn't compatible.
|
63 |
+
"""
|
64 |
+
try:
|
65 |
+
is_compatible = check_requires_python(
|
66 |
+
link.requires_python,
|
67 |
+
version_info=version_info,
|
68 |
+
)
|
69 |
+
except specifiers.InvalidSpecifier:
|
70 |
+
logger.debug(
|
71 |
+
"Ignoring invalid Requires-Python (%r) for link: %s",
|
72 |
+
link.requires_python,
|
73 |
+
link,
|
74 |
+
)
|
75 |
+
else:
|
76 |
+
if not is_compatible:
|
77 |
+
version = ".".join(map(str, version_info))
|
78 |
+
if not ignore_requires_python:
|
79 |
+
logger.verbose(
|
80 |
+
"Link requires a different Python (%s not in: %r): %s",
|
81 |
+
version,
|
82 |
+
link.requires_python,
|
83 |
+
link,
|
84 |
+
)
|
85 |
+
return False
|
86 |
+
|
87 |
+
logger.debug(
|
88 |
+
"Ignoring failed Requires-Python check (%s not in: %r) for link: %s",
|
89 |
+
version,
|
90 |
+
link.requires_python,
|
91 |
+
link,
|
92 |
+
)
|
93 |
+
|
94 |
+
return True
|
95 |
+
|
96 |
+
|
97 |
+
class LinkEvaluator:
|
98 |
+
|
99 |
+
"""
|
100 |
+
Responsible for evaluating links for a particular project.
|
101 |
+
"""
|
102 |
+
|
103 |
+
_py_version_re = re.compile(r"-py([123]\.?[0-9]?)$")
|
104 |
+
|
105 |
+
# Don't include an allow_yanked default value to make sure each call
|
106 |
+
# site considers whether yanked releases are allowed. This also causes
|
107 |
+
# that decision to be made explicit in the calling code, which helps
|
108 |
+
# people when reading the code.
|
109 |
+
def __init__(
|
110 |
+
self,
|
111 |
+
project_name: str,
|
112 |
+
canonical_name: str,
|
113 |
+
formats: FrozenSet[str],
|
114 |
+
target_python: TargetPython,
|
115 |
+
allow_yanked: bool,
|
116 |
+
ignore_requires_python: Optional[bool] = None,
|
117 |
+
) -> None:
|
118 |
+
"""
|
119 |
+
:param project_name: The user supplied package name.
|
120 |
+
:param canonical_name: The canonical package name.
|
121 |
+
:param formats: The formats allowed for this package. Should be a set
|
122 |
+
with 'binary' or 'source' or both in it.
|
123 |
+
:param target_python: The target Python interpreter to use when
|
124 |
+
evaluating link compatibility. This is used, for example, to
|
125 |
+
check wheel compatibility, as well as when checking the Python
|
126 |
+
version, e.g. the Python version embedded in a link filename
|
127 |
+
(or egg fragment) and against an HTML link's optional PEP 503
|
128 |
+
"data-requires-python" attribute.
|
129 |
+
:param allow_yanked: Whether files marked as yanked (in the sense
|
130 |
+
of PEP 592) are permitted to be candidates for install.
|
131 |
+
:param ignore_requires_python: Whether to ignore incompatible
|
132 |
+
PEP 503 "data-requires-python" values in HTML links. Defaults
|
133 |
+
to False.
|
134 |
+
"""
|
135 |
+
if ignore_requires_python is None:
|
136 |
+
ignore_requires_python = False
|
137 |
+
|
138 |
+
self._allow_yanked = allow_yanked
|
139 |
+
self._canonical_name = canonical_name
|
140 |
+
self._ignore_requires_python = ignore_requires_python
|
141 |
+
self._formats = formats
|
142 |
+
self._target_python = target_python
|
143 |
+
|
144 |
+
self.project_name = project_name
|
145 |
+
|
146 |
+
def evaluate_link(self, link: Link) -> Tuple[bool, Optional[str]]:
|
147 |
+
"""
|
148 |
+
Determine whether a link is a candidate for installation.
|
149 |
+
|
150 |
+
:return: A tuple (is_candidate, result), where `result` is (1) a
|
151 |
+
version string if `is_candidate` is True, and (2) if
|
152 |
+
`is_candidate` is False, an optional string to log the reason
|
153 |
+
the link fails to qualify.
|
154 |
+
"""
|
155 |
+
version = None
|
156 |
+
if link.is_yanked and not self._allow_yanked:
|
157 |
+
reason = link.yanked_reason or "<none given>"
|
158 |
+
return (False, f"yanked for reason: {reason}")
|
159 |
+
|
160 |
+
if link.egg_fragment:
|
161 |
+
egg_info = link.egg_fragment
|
162 |
+
ext = link.ext
|
163 |
+
else:
|
164 |
+
egg_info, ext = link.splitext()
|
165 |
+
if not ext:
|
166 |
+
return (False, "not a file")
|
167 |
+
if ext not in SUPPORTED_EXTENSIONS:
|
168 |
+
return (False, f"unsupported archive format: {ext}")
|
169 |
+
if "binary" not in self._formats and ext == WHEEL_EXTENSION:
|
170 |
+
reason = "No binaries permitted for {}".format(self.project_name)
|
171 |
+
return (False, reason)
|
172 |
+
if "macosx10" in link.path and ext == ".zip":
|
173 |
+
return (False, "macosx10 one")
|
174 |
+
if ext == WHEEL_EXTENSION:
|
175 |
+
try:
|
176 |
+
wheel = Wheel(link.filename)
|
177 |
+
except InvalidWheelFilename:
|
178 |
+
return (False, "invalid wheel filename")
|
179 |
+
if canonicalize_name(wheel.name) != self._canonical_name:
|
180 |
+
reason = "wrong project name (not {})".format(self.project_name)
|
181 |
+
return (False, reason)
|
182 |
+
|
183 |
+
supported_tags = self._target_python.get_tags()
|
184 |
+
if not wheel.supported(supported_tags):
|
185 |
+
# Include the wheel's tags in the reason string to
|
186 |
+
# simplify troubleshooting compatibility issues.
|
187 |
+
file_tags = wheel.get_formatted_file_tags()
|
188 |
+
reason = (
|
189 |
+
"none of the wheel's tags ({}) are compatible "
|
190 |
+
"(run pip debug --verbose to show compatible tags)".format(
|
191 |
+
", ".join(file_tags)
|
192 |
+
)
|
193 |
+
)
|
194 |
+
return (False, reason)
|
195 |
+
|
196 |
+
version = wheel.version
|
197 |
+
|
198 |
+
# This should be up by the self.ok_binary check, but see issue 2700.
|
199 |
+
if "source" not in self._formats and ext != WHEEL_EXTENSION:
|
200 |
+
reason = f"No sources permitted for {self.project_name}"
|
201 |
+
return (False, reason)
|
202 |
+
|
203 |
+
if not version:
|
204 |
+
version = _extract_version_from_fragment(
|
205 |
+
egg_info,
|
206 |
+
self._canonical_name,
|
207 |
+
)
|
208 |
+
if not version:
|
209 |
+
reason = f"Missing project version for {self.project_name}"
|
210 |
+
return (False, reason)
|
211 |
+
|
212 |
+
match = self._py_version_re.search(version)
|
213 |
+
if match:
|
214 |
+
version = version[: match.start()]
|
215 |
+
py_version = match.group(1)
|
216 |
+
if py_version != self._target_python.py_version:
|
217 |
+
return (False, "Python version is incorrect")
|
218 |
+
|
219 |
+
supports_python = _check_link_requires_python(
|
220 |
+
link,
|
221 |
+
version_info=self._target_python.py_version_info,
|
222 |
+
ignore_requires_python=self._ignore_requires_python,
|
223 |
+
)
|
224 |
+
if not supports_python:
|
225 |
+
# Return None for the reason text to suppress calling
|
226 |
+
# _log_skipped_link().
|
227 |
+
return (False, None)
|
228 |
+
|
229 |
+
logger.debug("Found link %s, version: %s", link, version)
|
230 |
+
|
231 |
+
return (True, version)
|
232 |
+
|
233 |
+
|
234 |
+
def filter_unallowed_hashes(
|
235 |
+
candidates: List[InstallationCandidate],
|
236 |
+
hashes: Hashes,
|
237 |
+
project_name: str,
|
238 |
+
) -> List[InstallationCandidate]:
|
239 |
+
"""
|
240 |
+
Filter out candidates whose hashes aren't allowed, and return a new
|
241 |
+
list of candidates.
|
242 |
+
|
243 |
+
If at least one candidate has an allowed hash, then all candidates with
|
244 |
+
either an allowed hash or no hash specified are returned. Otherwise,
|
245 |
+
the given candidates are returned.
|
246 |
+
|
247 |
+
Including the candidates with no hash specified when there is a match
|
248 |
+
allows a warning to be logged if there is a more preferred candidate
|
249 |
+
with no hash specified. Returning all candidates in the case of no
|
250 |
+
matches lets pip report the hash of the candidate that would otherwise
|
251 |
+
have been installed (e.g. permitting the user to more easily update
|
252 |
+
their requirements file with the desired hash).
|
253 |
+
"""
|
254 |
+
if not hashes:
|
255 |
+
logger.debug(
|
256 |
+
"Given no hashes to check %s links for project %r: "
|
257 |
+
"discarding no candidates",
|
258 |
+
len(candidates),
|
259 |
+
project_name,
|
260 |
+
)
|
261 |
+
# Make sure we're not returning back the given value.
|
262 |
+
return list(candidates)
|
263 |
+
|
264 |
+
matches_or_no_digest = []
|
265 |
+
# Collect the non-matches for logging purposes.
|
266 |
+
non_matches = []
|
267 |
+
match_count = 0
|
268 |
+
for candidate in candidates:
|
269 |
+
link = candidate.link
|
270 |
+
if not link.has_hash:
|
271 |
+
pass
|
272 |
+
elif link.is_hash_allowed(hashes=hashes):
|
273 |
+
match_count += 1
|
274 |
+
else:
|
275 |
+
non_matches.append(candidate)
|
276 |
+
continue
|
277 |
+
|
278 |
+
matches_or_no_digest.append(candidate)
|
279 |
+
|
280 |
+
if match_count:
|
281 |
+
filtered = matches_or_no_digest
|
282 |
+
else:
|
283 |
+
# Make sure we're not returning back the given value.
|
284 |
+
filtered = list(candidates)
|
285 |
+
|
286 |
+
if len(filtered) == len(candidates):
|
287 |
+
discard_message = "discarding no candidates"
|
288 |
+
else:
|
289 |
+
discard_message = "discarding {} non-matches:\n {}".format(
|
290 |
+
len(non_matches),
|
291 |
+
"\n ".join(str(candidate.link) for candidate in non_matches),
|
292 |
+
)
|
293 |
+
|
294 |
+
logger.debug(
|
295 |
+
"Checked %s links for project %r against %s hashes "
|
296 |
+
"(%s matches, %s no digest): %s",
|
297 |
+
len(candidates),
|
298 |
+
project_name,
|
299 |
+
hashes.digest_count,
|
300 |
+
match_count,
|
301 |
+
len(matches_or_no_digest) - match_count,
|
302 |
+
discard_message,
|
303 |
+
)
|
304 |
+
|
305 |
+
return filtered
|
306 |
+
|
307 |
+
|
308 |
+
class CandidatePreferences:
|
309 |
+
|
310 |
+
"""
|
311 |
+
Encapsulates some of the preferences for filtering and sorting
|
312 |
+
InstallationCandidate objects.
|
313 |
+
"""
|
314 |
+
|
315 |
+
def __init__(
|
316 |
+
self,
|
317 |
+
prefer_binary: bool = False,
|
318 |
+
allow_all_prereleases: bool = False,
|
319 |
+
) -> None:
|
320 |
+
"""
|
321 |
+
:param allow_all_prereleases: Whether to allow all pre-releases.
|
322 |
+
"""
|
323 |
+
self.allow_all_prereleases = allow_all_prereleases
|
324 |
+
self.prefer_binary = prefer_binary
|
325 |
+
|
326 |
+
|
327 |
+
class BestCandidateResult:
|
328 |
+
"""A collection of candidates, returned by `PackageFinder.find_best_candidate`.
|
329 |
+
|
330 |
+
This class is only intended to be instantiated by CandidateEvaluator's
|
331 |
+
`compute_best_candidate()` method.
|
332 |
+
"""
|
333 |
+
|
334 |
+
def __init__(
|
335 |
+
self,
|
336 |
+
candidates: List[InstallationCandidate],
|
337 |
+
applicable_candidates: List[InstallationCandidate],
|
338 |
+
best_candidate: Optional[InstallationCandidate],
|
339 |
+
) -> None:
|
340 |
+
"""
|
341 |
+
:param candidates: A sequence of all available candidates found.
|
342 |
+
:param applicable_candidates: The applicable candidates.
|
343 |
+
:param best_candidate: The most preferred candidate found, or None
|
344 |
+
if no applicable candidates were found.
|
345 |
+
"""
|
346 |
+
assert set(applicable_candidates) <= set(candidates)
|
347 |
+
|
348 |
+
if best_candidate is None:
|
349 |
+
assert not applicable_candidates
|
350 |
+
else:
|
351 |
+
assert best_candidate in applicable_candidates
|
352 |
+
|
353 |
+
self._applicable_candidates = applicable_candidates
|
354 |
+
self._candidates = candidates
|
355 |
+
|
356 |
+
self.best_candidate = best_candidate
|
357 |
+
|
358 |
+
def iter_all(self) -> Iterable[InstallationCandidate]:
|
359 |
+
"""Iterate through all candidates."""
|
360 |
+
return iter(self._candidates)
|
361 |
+
|
362 |
+
def iter_applicable(self) -> Iterable[InstallationCandidate]:
|
363 |
+
"""Iterate through the applicable candidates."""
|
364 |
+
return iter(self._applicable_candidates)
|
365 |
+
|
366 |
+
|
367 |
+
class CandidateEvaluator:
|
368 |
+
|
369 |
+
"""
|
370 |
+
Responsible for filtering and sorting candidates for installation based
|
371 |
+
on what tags are valid.
|
372 |
+
"""
|
373 |
+
|
374 |
+
@classmethod
|
375 |
+
def create(
|
376 |
+
cls,
|
377 |
+
project_name: str,
|
378 |
+
target_python: Optional[TargetPython] = None,
|
379 |
+
prefer_binary: bool = False,
|
380 |
+
allow_all_prereleases: bool = False,
|
381 |
+
specifier: Optional[specifiers.BaseSpecifier] = None,
|
382 |
+
hashes: Optional[Hashes] = None,
|
383 |
+
) -> "CandidateEvaluator":
|
384 |
+
"""Create a CandidateEvaluator object.
|
385 |
+
|
386 |
+
:param target_python: The target Python interpreter to use when
|
387 |
+
checking compatibility. If None (the default), a TargetPython
|
388 |
+
object will be constructed from the running Python.
|
389 |
+
:param specifier: An optional object implementing `filter`
|
390 |
+
(e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
|
391 |
+
versions.
|
392 |
+
:param hashes: An optional collection of allowed hashes.
|
393 |
+
"""
|
394 |
+
if target_python is None:
|
395 |
+
target_python = TargetPython()
|
396 |
+
if specifier is None:
|
397 |
+
specifier = specifiers.SpecifierSet()
|
398 |
+
|
399 |
+
supported_tags = target_python.get_tags()
|
400 |
+
|
401 |
+
return cls(
|
402 |
+
project_name=project_name,
|
403 |
+
supported_tags=supported_tags,
|
404 |
+
specifier=specifier,
|
405 |
+
prefer_binary=prefer_binary,
|
406 |
+
allow_all_prereleases=allow_all_prereleases,
|
407 |
+
hashes=hashes,
|
408 |
+
)
|
409 |
+
|
410 |
+
def __init__(
|
411 |
+
self,
|
412 |
+
project_name: str,
|
413 |
+
supported_tags: List[Tag],
|
414 |
+
specifier: specifiers.BaseSpecifier,
|
415 |
+
prefer_binary: bool = False,
|
416 |
+
allow_all_prereleases: bool = False,
|
417 |
+
hashes: Optional[Hashes] = None,
|
418 |
+
) -> None:
|
419 |
+
"""
|
420 |
+
:param supported_tags: The PEP 425 tags supported by the target
|
421 |
+
Python in order of preference (most preferred first).
|
422 |
+
"""
|
423 |
+
self._allow_all_prereleases = allow_all_prereleases
|
424 |
+
self._hashes = hashes
|
425 |
+
self._prefer_binary = prefer_binary
|
426 |
+
self._project_name = project_name
|
427 |
+
self._specifier = specifier
|
428 |
+
self._supported_tags = supported_tags
|
429 |
+
# Since the index of the tag in the _supported_tags list is used
|
430 |
+
# as a priority, precompute a map from tag to index/priority to be
|
431 |
+
# used in wheel.find_most_preferred_tag.
|
432 |
+
self._wheel_tag_preferences = {
|
433 |
+
tag: idx for idx, tag in enumerate(supported_tags)
|
434 |
+
}
|
435 |
+
|
436 |
+
def get_applicable_candidates(
|
437 |
+
self,
|
438 |
+
candidates: List[InstallationCandidate],
|
439 |
+
) -> List[InstallationCandidate]:
|
440 |
+
"""
|
441 |
+
Return the applicable candidates from a list of candidates.
|
442 |
+
"""
|
443 |
+
# Using None infers from the specifier instead.
|
444 |
+
allow_prereleases = self._allow_all_prereleases or None
|
445 |
+
specifier = self._specifier
|
446 |
+
versions = {
|
447 |
+
str(v)
|
448 |
+
for v in specifier.filter(
|
449 |
+
# We turn the version object into a str here because otherwise
|
450 |
+
# when we're debundled but setuptools isn't, Python will see
|
451 |
+
# packaging.version.Version and
|
452 |
+
# pkg_resources._vendor.packaging.version.Version as different
|
453 |
+
# types. This way we'll use a str as a common data interchange
|
454 |
+
# format. If we stop using the pkg_resources provided specifier
|
455 |
+
# and start using our own, we can drop the cast to str().
|
456 |
+
(str(c.version) for c in candidates),
|
457 |
+
prereleases=allow_prereleases,
|
458 |
+
)
|
459 |
+
}
|
460 |
+
|
461 |
+
# Again, converting version to str to deal with debundling.
|
462 |
+
applicable_candidates = [c for c in candidates if str(c.version) in versions]
|
463 |
+
|
464 |
+
filtered_applicable_candidates = filter_unallowed_hashes(
|
465 |
+
candidates=applicable_candidates,
|
466 |
+
hashes=self._hashes,
|
467 |
+
project_name=self._project_name,
|
468 |
+
)
|
469 |
+
|
470 |
+
return sorted(filtered_applicable_candidates, key=self._sort_key)
|
471 |
+
|
472 |
+
def _sort_key(self, candidate: InstallationCandidate) -> CandidateSortingKey:
|
473 |
+
"""
|
474 |
+
Function to pass as the `key` argument to a call to sorted() to sort
|
475 |
+
InstallationCandidates by preference.
|
476 |
+
|
477 |
+
Returns a tuple such that tuples sorting as greater using Python's
|
478 |
+
default comparison operator are more preferred.
|
479 |
+
|
480 |
+
The preference is as follows:
|
481 |
+
|
482 |
+
First and foremost, candidates with allowed (matching) hashes are
|
483 |
+
always preferred over candidates without matching hashes. This is
|
484 |
+
because e.g. if the only candidate with an allowed hash is yanked,
|
485 |
+
we still want to use that candidate.
|
486 |
+
|
487 |
+
Second, excepting hash considerations, candidates that have been
|
488 |
+
yanked (in the sense of PEP 592) are always less preferred than
|
489 |
+
candidates that haven't been yanked. Then:
|
490 |
+
|
491 |
+
If not finding wheels, they are sorted by version only.
|
492 |
+
If finding wheels, then the sort order is by version, then:
|
493 |
+
1. existing installs
|
494 |
+
2. wheels ordered via Wheel.support_index_min(self._supported_tags)
|
495 |
+
3. source archives
|
496 |
+
If prefer_binary was set, then all wheels are sorted above sources.
|
497 |
+
|
498 |
+
Note: it was considered to embed this logic into the Link
|
499 |
+
comparison operators, but then different sdist links
|
500 |
+
with the same version, would have to be considered equal
|
501 |
+
"""
|
502 |
+
valid_tags = self._supported_tags
|
503 |
+
support_num = len(valid_tags)
|
504 |
+
build_tag: BuildTag = ()
|
505 |
+
binary_preference = 0
|
506 |
+
link = candidate.link
|
507 |
+
if link.is_wheel:
|
508 |
+
# can raise InvalidWheelFilename
|
509 |
+
wheel = Wheel(link.filename)
|
510 |
+
try:
|
511 |
+
pri = -(
|
512 |
+
wheel.find_most_preferred_tag(
|
513 |
+
valid_tags, self._wheel_tag_preferences
|
514 |
+
)
|
515 |
+
)
|
516 |
+
except ValueError:
|
517 |
+
raise UnsupportedWheel(
|
518 |
+
"{} is not a supported wheel for this platform. It "
|
519 |
+
"can't be sorted.".format(wheel.filename)
|
520 |
+
)
|
521 |
+
if self._prefer_binary:
|
522 |
+
binary_preference = 1
|
523 |
+
if wheel.build_tag is not None:
|
524 |
+
match = re.match(r"^(\d+)(.*)$", wheel.build_tag)
|
525 |
+
build_tag_groups = match.groups()
|
526 |
+
build_tag = (int(build_tag_groups[0]), build_tag_groups[1])
|
527 |
+
else: # sdist
|
528 |
+
pri = -(support_num)
|
529 |
+
has_allowed_hash = int(link.is_hash_allowed(self._hashes))
|
530 |
+
yank_value = -1 * int(link.is_yanked) # -1 for yanked.
|
531 |
+
return (
|
532 |
+
has_allowed_hash,
|
533 |
+
yank_value,
|
534 |
+
binary_preference,
|
535 |
+
candidate.version,
|
536 |
+
pri,
|
537 |
+
build_tag,
|
538 |
+
)
|
539 |
+
|
540 |
+
def sort_best_candidate(
|
541 |
+
self,
|
542 |
+
candidates: List[InstallationCandidate],
|
543 |
+
) -> Optional[InstallationCandidate]:
|
544 |
+
"""
|
545 |
+
Return the best candidate per the instance's sort order, or None if
|
546 |
+
no candidate is acceptable.
|
547 |
+
"""
|
548 |
+
if not candidates:
|
549 |
+
return None
|
550 |
+
best_candidate = max(candidates, key=self._sort_key)
|
551 |
+
return best_candidate
|
552 |
+
|
553 |
+
def compute_best_candidate(
|
554 |
+
self,
|
555 |
+
candidates: List[InstallationCandidate],
|
556 |
+
) -> BestCandidateResult:
|
557 |
+
"""
|
558 |
+
Compute and return a `BestCandidateResult` instance.
|
559 |
+
"""
|
560 |
+
applicable_candidates = self.get_applicable_candidates(candidates)
|
561 |
+
|
562 |
+
best_candidate = self.sort_best_candidate(applicable_candidates)
|
563 |
+
|
564 |
+
return BestCandidateResult(
|
565 |
+
candidates,
|
566 |
+
applicable_candidates=applicable_candidates,
|
567 |
+
best_candidate=best_candidate,
|
568 |
+
)
|
569 |
+
|
570 |
+
|
571 |
+
class PackageFinder:
|
572 |
+
"""This finds packages.
|
573 |
+
|
574 |
+
This is meant to match easy_install's technique for looking for
|
575 |
+
packages, by reading pages and looking for appropriate links.
|
576 |
+
"""
|
577 |
+
|
578 |
+
def __init__(
|
579 |
+
self,
|
580 |
+
link_collector: LinkCollector,
|
581 |
+
target_python: TargetPython,
|
582 |
+
allow_yanked: bool,
|
583 |
+
use_deprecated_html5lib: bool,
|
584 |
+
format_control: Optional[FormatControl] = None,
|
585 |
+
candidate_prefs: Optional[CandidatePreferences] = None,
|
586 |
+
ignore_requires_python: Optional[bool] = None,
|
587 |
+
) -> None:
|
588 |
+
"""
|
589 |
+
This constructor is primarily meant to be used by the create() class
|
590 |
+
method and from tests.
|
591 |
+
|
592 |
+
:param format_control: A FormatControl object, used to control
|
593 |
+
the selection of source packages / binary packages when consulting
|
594 |
+
the index and links.
|
595 |
+
:param candidate_prefs: Options to use when creating a
|
596 |
+
CandidateEvaluator object.
|
597 |
+
"""
|
598 |
+
if candidate_prefs is None:
|
599 |
+
candidate_prefs = CandidatePreferences()
|
600 |
+
|
601 |
+
format_control = format_control or FormatControl(set(), set())
|
602 |
+
|
603 |
+
self._allow_yanked = allow_yanked
|
604 |
+
self._candidate_prefs = candidate_prefs
|
605 |
+
self._ignore_requires_python = ignore_requires_python
|
606 |
+
self._link_collector = link_collector
|
607 |
+
self._target_python = target_python
|
608 |
+
self._use_deprecated_html5lib = use_deprecated_html5lib
|
609 |
+
|
610 |
+
self.format_control = format_control
|
611 |
+
|
612 |
+
# These are boring links that have already been logged somehow.
|
613 |
+
self._logged_links: Set[Link] = set()
|
614 |
+
|
615 |
+
# Don't include an allow_yanked default value to make sure each call
|
616 |
+
# site considers whether yanked releases are allowed. This also causes
|
617 |
+
# that decision to be made explicit in the calling code, which helps
|
618 |
+
# people when reading the code.
|
619 |
+
@classmethod
|
620 |
+
def create(
|
621 |
+
cls,
|
622 |
+
link_collector: LinkCollector,
|
623 |
+
selection_prefs: SelectionPreferences,
|
624 |
+
target_python: Optional[TargetPython] = None,
|
625 |
+
*,
|
626 |
+
use_deprecated_html5lib: bool,
|
627 |
+
) -> "PackageFinder":
|
628 |
+
"""Create a PackageFinder.
|
629 |
+
|
630 |
+
:param selection_prefs: The candidate selection preferences, as a
|
631 |
+
SelectionPreferences object.
|
632 |
+
:param target_python: The target Python interpreter to use when
|
633 |
+
checking compatibility. If None (the default), a TargetPython
|
634 |
+
object will be constructed from the running Python.
|
635 |
+
"""
|
636 |
+
if target_python is None:
|
637 |
+
target_python = TargetPython()
|
638 |
+
|
639 |
+
candidate_prefs = CandidatePreferences(
|
640 |
+
prefer_binary=selection_prefs.prefer_binary,
|
641 |
+
allow_all_prereleases=selection_prefs.allow_all_prereleases,
|
642 |
+
)
|
643 |
+
|
644 |
+
return cls(
|
645 |
+
candidate_prefs=candidate_prefs,
|
646 |
+
link_collector=link_collector,
|
647 |
+
target_python=target_python,
|
648 |
+
allow_yanked=selection_prefs.allow_yanked,
|
649 |
+
format_control=selection_prefs.format_control,
|
650 |
+
ignore_requires_python=selection_prefs.ignore_requires_python,
|
651 |
+
use_deprecated_html5lib=use_deprecated_html5lib,
|
652 |
+
)
|
653 |
+
|
654 |
+
@property
|
655 |
+
def target_python(self) -> TargetPython:
|
656 |
+
return self._target_python
|
657 |
+
|
658 |
+
@property
|
659 |
+
def search_scope(self) -> SearchScope:
|
660 |
+
return self._link_collector.search_scope
|
661 |
+
|
662 |
+
@search_scope.setter
|
663 |
+
def search_scope(self, search_scope: SearchScope) -> None:
|
664 |
+
self._link_collector.search_scope = search_scope
|
665 |
+
|
666 |
+
@property
|
667 |
+
def find_links(self) -> List[str]:
|
668 |
+
return self._link_collector.find_links
|
669 |
+
|
670 |
+
@property
|
671 |
+
def index_urls(self) -> List[str]:
|
672 |
+
return self.search_scope.index_urls
|
673 |
+
|
674 |
+
@property
|
675 |
+
def trusted_hosts(self) -> Iterable[str]:
|
676 |
+
for host_port in self._link_collector.session.pip_trusted_origins:
|
677 |
+
yield build_netloc(*host_port)
|
678 |
+
|
679 |
+
@property
|
680 |
+
def allow_all_prereleases(self) -> bool:
|
681 |
+
return self._candidate_prefs.allow_all_prereleases
|
682 |
+
|
683 |
+
def set_allow_all_prereleases(self) -> None:
|
684 |
+
self._candidate_prefs.allow_all_prereleases = True
|
685 |
+
|
686 |
+
@property
|
687 |
+
def prefer_binary(self) -> bool:
|
688 |
+
return self._candidate_prefs.prefer_binary
|
689 |
+
|
690 |
+
def set_prefer_binary(self) -> None:
|
691 |
+
self._candidate_prefs.prefer_binary = True
|
692 |
+
|
693 |
+
def make_link_evaluator(self, project_name: str) -> LinkEvaluator:
|
694 |
+
canonical_name = canonicalize_name(project_name)
|
695 |
+
formats = self.format_control.get_allowed_formats(canonical_name)
|
696 |
+
|
697 |
+
return LinkEvaluator(
|
698 |
+
project_name=project_name,
|
699 |
+
canonical_name=canonical_name,
|
700 |
+
formats=formats,
|
701 |
+
target_python=self._target_python,
|
702 |
+
allow_yanked=self._allow_yanked,
|
703 |
+
ignore_requires_python=self._ignore_requires_python,
|
704 |
+
)
|
705 |
+
|
706 |
+
def _sort_links(self, links: Iterable[Link]) -> List[Link]:
|
707 |
+
"""
|
708 |
+
Returns elements of links in order, non-egg links first, egg links
|
709 |
+
second, while eliminating duplicates
|
710 |
+
"""
|
711 |
+
eggs, no_eggs = [], []
|
712 |
+
seen: Set[Link] = set()
|
713 |
+
for link in links:
|
714 |
+
if link not in seen:
|
715 |
+
seen.add(link)
|
716 |
+
if link.egg_fragment:
|
717 |
+
eggs.append(link)
|
718 |
+
else:
|
719 |
+
no_eggs.append(link)
|
720 |
+
return no_eggs + eggs
|
721 |
+
|
722 |
+
def _log_skipped_link(self, link: Link, reason: str) -> None:
|
723 |
+
if link not in self._logged_links:
|
724 |
+
# Put the link at the end so the reason is more visible and because
|
725 |
+
# the link string is usually very long.
|
726 |
+
logger.debug("Skipping link: %s: %s", reason, link)
|
727 |
+
self._logged_links.add(link)
|
728 |
+
|
729 |
+
def get_install_candidate(
|
730 |
+
self, link_evaluator: LinkEvaluator, link: Link
|
731 |
+
) -> Optional[InstallationCandidate]:
|
732 |
+
"""
|
733 |
+
If the link is a candidate for install, convert it to an
|
734 |
+
InstallationCandidate and return it. Otherwise, return None.
|
735 |
+
"""
|
736 |
+
is_candidate, result = link_evaluator.evaluate_link(link)
|
737 |
+
if not is_candidate:
|
738 |
+
if result:
|
739 |
+
self._log_skipped_link(link, reason=result)
|
740 |
+
return None
|
741 |
+
|
742 |
+
return InstallationCandidate(
|
743 |
+
name=link_evaluator.project_name,
|
744 |
+
link=link,
|
745 |
+
version=result,
|
746 |
+
)
|
747 |
+
|
748 |
+
def evaluate_links(
|
749 |
+
self, link_evaluator: LinkEvaluator, links: Iterable[Link]
|
750 |
+
) -> List[InstallationCandidate]:
|
751 |
+
"""
|
752 |
+
Convert links that are candidates to InstallationCandidate objects.
|
753 |
+
"""
|
754 |
+
candidates = []
|
755 |
+
for link in self._sort_links(links):
|
756 |
+
candidate = self.get_install_candidate(link_evaluator, link)
|
757 |
+
if candidate is not None:
|
758 |
+
candidates.append(candidate)
|
759 |
+
|
760 |
+
return candidates
|
761 |
+
|
762 |
+
def process_project_url(
|
763 |
+
self, project_url: Link, link_evaluator: LinkEvaluator
|
764 |
+
) -> List[InstallationCandidate]:
|
765 |
+
logger.debug(
|
766 |
+
"Fetching project page and analyzing links: %s",
|
767 |
+
project_url,
|
768 |
+
)
|
769 |
+
html_page = self._link_collector.fetch_page(project_url)
|
770 |
+
if html_page is None:
|
771 |
+
return []
|
772 |
+
|
773 |
+
page_links = list(parse_links(html_page, self._use_deprecated_html5lib))
|
774 |
+
|
775 |
+
with indent_log():
|
776 |
+
package_links = self.evaluate_links(
|
777 |
+
link_evaluator,
|
778 |
+
links=page_links,
|
779 |
+
)
|
780 |
+
|
781 |
+
return package_links
|
782 |
+
|
783 |
+
@functools.lru_cache(maxsize=None)
|
784 |
+
def find_all_candidates(self, project_name: str) -> List[InstallationCandidate]:
|
785 |
+
"""Find all available InstallationCandidate for project_name
|
786 |
+
|
787 |
+
This checks index_urls and find_links.
|
788 |
+
All versions found are returned as an InstallationCandidate list.
|
789 |
+
|
790 |
+
See LinkEvaluator.evaluate_link() for details on which files
|
791 |
+
are accepted.
|
792 |
+
"""
|
793 |
+
link_evaluator = self.make_link_evaluator(project_name)
|
794 |
+
|
795 |
+
collected_sources = self._link_collector.collect_sources(
|
796 |
+
project_name=project_name,
|
797 |
+
candidates_from_page=functools.partial(
|
798 |
+
self.process_project_url,
|
799 |
+
link_evaluator=link_evaluator,
|
800 |
+
),
|
801 |
+
)
|
802 |
+
|
803 |
+
page_candidates_it = itertools.chain.from_iterable(
|
804 |
+
source.page_candidates()
|
805 |
+
for sources in collected_sources
|
806 |
+
for source in sources
|
807 |
+
if source is not None
|
808 |
+
)
|
809 |
+
page_candidates = list(page_candidates_it)
|
810 |
+
|
811 |
+
file_links_it = itertools.chain.from_iterable(
|
812 |
+
source.file_links()
|
813 |
+
for sources in collected_sources
|
814 |
+
for source in sources
|
815 |
+
if source is not None
|
816 |
+
)
|
817 |
+
file_candidates = self.evaluate_links(
|
818 |
+
link_evaluator,
|
819 |
+
sorted(file_links_it, reverse=True),
|
820 |
+
)
|
821 |
+
|
822 |
+
if logger.isEnabledFor(logging.DEBUG) and file_candidates:
|
823 |
+
paths = []
|
824 |
+
for candidate in file_candidates:
|
825 |
+
assert candidate.link.url # we need to have a URL
|
826 |
+
try:
|
827 |
+
paths.append(candidate.link.file_path)
|
828 |
+
except Exception:
|
829 |
+
paths.append(candidate.link.url) # it's not a local file
|
830 |
+
|
831 |
+
logger.debug("Local files found: %s", ", ".join(paths))
|
832 |
+
|
833 |
+
# This is an intentional priority ordering
|
834 |
+
return file_candidates + page_candidates
|
835 |
+
|
836 |
+
def make_candidate_evaluator(
|
837 |
+
self,
|
838 |
+
project_name: str,
|
839 |
+
specifier: Optional[specifiers.BaseSpecifier] = None,
|
840 |
+
hashes: Optional[Hashes] = None,
|
841 |
+
) -> CandidateEvaluator:
|
842 |
+
"""Create a CandidateEvaluator object to use."""
|
843 |
+
candidate_prefs = self._candidate_prefs
|
844 |
+
return CandidateEvaluator.create(
|
845 |
+
project_name=project_name,
|
846 |
+
target_python=self._target_python,
|
847 |
+
prefer_binary=candidate_prefs.prefer_binary,
|
848 |
+
allow_all_prereleases=candidate_prefs.allow_all_prereleases,
|
849 |
+
specifier=specifier,
|
850 |
+
hashes=hashes,
|
851 |
+
)
|
852 |
+
|
853 |
+
@functools.lru_cache(maxsize=None)
|
854 |
+
def find_best_candidate(
|
855 |
+
self,
|
856 |
+
project_name: str,
|
857 |
+
specifier: Optional[specifiers.BaseSpecifier] = None,
|
858 |
+
hashes: Optional[Hashes] = None,
|
859 |
+
) -> BestCandidateResult:
|
860 |
+
"""Find matches for the given project and specifier.
|
861 |
+
|
862 |
+
:param specifier: An optional object implementing `filter`
|
863 |
+
(e.g. `packaging.specifiers.SpecifierSet`) to filter applicable
|
864 |
+
versions.
|
865 |
+
|
866 |
+
:return: A `BestCandidateResult` instance.
|
867 |
+
"""
|
868 |
+
candidates = self.find_all_candidates(project_name)
|
869 |
+
candidate_evaluator = self.make_candidate_evaluator(
|
870 |
+
project_name=project_name,
|
871 |
+
specifier=specifier,
|
872 |
+
hashes=hashes,
|
873 |
+
)
|
874 |
+
return candidate_evaluator.compute_best_candidate(candidates)
|
875 |
+
|
876 |
+
def find_requirement(
|
877 |
+
self, req: InstallRequirement, upgrade: bool
|
878 |
+
) -> Optional[InstallationCandidate]:
|
879 |
+
"""Try to find a Link matching req
|
880 |
+
|
881 |
+
Expects req, an InstallRequirement and upgrade, a boolean
|
882 |
+
Returns a InstallationCandidate if found,
|
883 |
+
Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise
|
884 |
+
"""
|
885 |
+
hashes = req.hashes(trust_internet=False)
|
886 |
+
best_candidate_result = self.find_best_candidate(
|
887 |
+
req.name,
|
888 |
+
specifier=req.specifier,
|
889 |
+
hashes=hashes,
|
890 |
+
)
|
891 |
+
best_candidate = best_candidate_result.best_candidate
|
892 |
+
|
893 |
+
installed_version: Optional[_BaseVersion] = None
|
894 |
+
if req.satisfied_by is not None:
|
895 |
+
installed_version = req.satisfied_by.version
|
896 |
+
|
897 |
+
def _format_versions(cand_iter: Iterable[InstallationCandidate]) -> str:
|
898 |
+
# This repeated parse_version and str() conversion is needed to
|
899 |
+
# handle different vendoring sources from pip and pkg_resources.
|
900 |
+
# If we stop using the pkg_resources provided specifier and start
|
901 |
+
# using our own, we can drop the cast to str().
|
902 |
+
return (
|
903 |
+
", ".join(
|
904 |
+
sorted(
|
905 |
+
{str(c.version) for c in cand_iter},
|
906 |
+
key=parse_version,
|
907 |
+
)
|
908 |
+
)
|
909 |
+
or "none"
|
910 |
+
)
|
911 |
+
|
912 |
+
if installed_version is None and best_candidate is None:
|
913 |
+
logger.critical(
|
914 |
+
"Could not find a version that satisfies the requirement %s "
|
915 |
+
"(from versions: %s)",
|
916 |
+
req,
|
917 |
+
_format_versions(best_candidate_result.iter_all()),
|
918 |
+
)
|
919 |
+
|
920 |
+
raise DistributionNotFound(
|
921 |
+
"No matching distribution found for {}".format(req)
|
922 |
+
)
|
923 |
+
|
924 |
+
best_installed = False
|
925 |
+
if installed_version and (
|
926 |
+
best_candidate is None or best_candidate.version <= installed_version
|
927 |
+
):
|
928 |
+
best_installed = True
|
929 |
+
|
930 |
+
if not upgrade and installed_version is not None:
|
931 |
+
if best_installed:
|
932 |
+
logger.debug(
|
933 |
+
"Existing installed version (%s) is most up-to-date and "
|
934 |
+
"satisfies requirement",
|
935 |
+
installed_version,
|
936 |
+
)
|
937 |
+
else:
|
938 |
+
logger.debug(
|
939 |
+
"Existing installed version (%s) satisfies requirement "
|
940 |
+
"(most up-to-date version is %s)",
|
941 |
+
installed_version,
|
942 |
+
best_candidate.version,
|
943 |
+
)
|
944 |
+
return None
|
945 |
+
|
946 |
+
if best_installed:
|
947 |
+
# We have an existing version, and its the best version
|
948 |
+
logger.debug(
|
949 |
+
"Installed version (%s) is most up-to-date (past versions: %s)",
|
950 |
+
installed_version,
|
951 |
+
_format_versions(best_candidate_result.iter_applicable()),
|
952 |
+
)
|
953 |
+
raise BestVersionAlreadyInstalled
|
954 |
+
|
955 |
+
logger.debug(
|
956 |
+
"Using version %s (newest of versions: %s)",
|
957 |
+
best_candidate.version,
|
958 |
+
_format_versions(best_candidate_result.iter_applicable()),
|
959 |
+
)
|
960 |
+
return best_candidate
|
961 |
+
|
962 |
+
|
963 |
+
def _find_name_version_sep(fragment: str, canonical_name: str) -> int:
|
964 |
+
"""Find the separator's index based on the package's canonical name.
|
965 |
+
|
966 |
+
:param fragment: A <package>+<version> filename "fragment" (stem) or
|
967 |
+
egg fragment.
|
968 |
+
:param canonical_name: The package's canonical name.
|
969 |
+
|
970 |
+
This function is needed since the canonicalized name does not necessarily
|
971 |
+
have the same length as the egg info's name part. An example::
|
972 |
+
|
973 |
+
>>> fragment = 'foo__bar-1.0'
|
974 |
+
>>> canonical_name = 'foo-bar'
|
975 |
+
>>> _find_name_version_sep(fragment, canonical_name)
|
976 |
+
8
|
977 |
+
"""
|
978 |
+
# Project name and version must be separated by one single dash. Find all
|
979 |
+
# occurrences of dashes; if the string in front of it matches the canonical
|
980 |
+
# name, this is the one separating the name and version parts.
|
981 |
+
for i, c in enumerate(fragment):
|
982 |
+
if c != "-":
|
983 |
+
continue
|
984 |
+
if canonicalize_name(fragment[:i]) == canonical_name:
|
985 |
+
return i
|
986 |
+
raise ValueError(f"{fragment} does not match {canonical_name}")
|
987 |
+
|
988 |
+
|
989 |
+
def _extract_version_from_fragment(fragment: str, canonical_name: str) -> Optional[str]:
|
990 |
+
"""Parse the version string from a <package>+<version> filename
|
991 |
+
"fragment" (stem) or egg fragment.
|
992 |
+
|
993 |
+
:param fragment: The string to parse. E.g. foo-2.1
|
994 |
+
:param canonical_name: The canonicalized name of the package this
|
995 |
+
belongs to.
|
996 |
+
"""
|
997 |
+
try:
|
998 |
+
version_start = _find_name_version_sep(fragment, canonical_name) + 1
|
999 |
+
except ValueError:
|
1000 |
+
return None
|
1001 |
+
version = fragment[version_start:]
|
1002 |
+
if not version:
|
1003 |
+
return None
|
1004 |
+
return version
|