diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..8435d628d206a6587e969b4bdf6d5468af82f191 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py @@ -0,0 +1,18 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +"""CacheControl import Interface. + +Make it easy to import from cachecontrol without long namespaces. +""" +__author__ = "Eric Larson" +__email__ = "eric@ionrock.org" +__version__ = "0.12.10" + +from .wrapper import CacheControl +from .adapter import CacheControlAdapter +from .controller import CacheController + +import logging +logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..90b27416ba4817d2fb8004f26fcf54201cad976b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a65c0a25bd7cbdd41606e660ce2d4aa46b745d8d Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e6d57ccda25885e67b853ded239cd29179f7f83 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6992bafa15bac6d100ed824f7ce74851c81c747e Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0fe4ddd544c7d8d125e4348d61dff63c5e7e92d7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3ba5c8ff5d13aead462eec45baed19a5ed8fef3f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..cdb3fbd863cd241b5b40c5a2b6f4fd5fe968294d Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0dc7b370cfdda256dcdd77277b0f220325cd2510 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b4698e5ebebcf88bd474eb0bd2002188cca8ca08 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..fb9408e2ad31ef71073c7c866121dc13e95fa972 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py new file mode 100644 index 0000000000000000000000000000000000000000..4266b5ee92a24b5e0ef65689a1b94a98bb4a9b56 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py @@ -0,0 +1,61 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import logging + +from pip._vendor import requests + +from pip._vendor.cachecontrol.adapter import CacheControlAdapter +from pip._vendor.cachecontrol.cache import DictCache +from pip._vendor.cachecontrol.controller import logger + +from argparse import ArgumentParser + + +def setup_logging(): + logger.setLevel(logging.DEBUG) + handler = logging.StreamHandler() + logger.addHandler(handler) + + +def get_session(): + adapter = CacheControlAdapter( + DictCache(), cache_etags=True, serializer=None, heuristic=None + ) + sess = requests.Session() + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + sess.cache_controller = adapter.controller + return sess + + +def get_args(): + parser = ArgumentParser() + parser.add_argument("url", help="The URL to try and cache") + return parser.parse_args() + + +def main(args=None): + args = get_args() + sess = get_session() + + # Make a request to get a response + resp = sess.get(args.url) + + # Turn on logging + setup_logging() + + # try setting the cache + sess.cache_controller.cache_response(resp.request, resp.raw) + + # Now try to get it + if sess.cache_controller.cached_request(resp.request): + print("Cached!") + else: + print("Not cached :(") + + +if __name__ == "__main__": + main() diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py new file mode 100644 index 0000000000000000000000000000000000000000..94c75e1a05b47922945c5233e90e9f936b108b66 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py @@ -0,0 +1,137 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import types +import functools +import zlib + +from pip._vendor.requests.adapters import HTTPAdapter + +from .controller import CacheController, PERMANENT_REDIRECT_STATUSES +from .cache import DictCache +from .filewrapper import CallbackFileWrapper + + +class CacheControlAdapter(HTTPAdapter): + invalidating_methods = {"PUT", "PATCH", "DELETE"} + + def __init__( + self, + cache=None, + cache_etags=True, + controller_class=None, + serializer=None, + heuristic=None, + cacheable_methods=None, + *args, + **kw + ): + super(CacheControlAdapter, self).__init__(*args, **kw) + self.cache = DictCache() if cache is None else cache + self.heuristic = heuristic + self.cacheable_methods = cacheable_methods or ("GET",) + + controller_factory = controller_class or CacheController + self.controller = controller_factory( + self.cache, cache_etags=cache_etags, serializer=serializer + ) + + def send(self, request, cacheable_methods=None, **kw): + """ + Send a request. Use the request information to see if it + exists in the cache and cache the response if we need to and can. + """ + cacheable = cacheable_methods or self.cacheable_methods + if request.method in cacheable: + try: + cached_response = self.controller.cached_request(request) + except zlib.error: + cached_response = None + if cached_response: + return self.build_response(request, cached_response, from_cache=True) + + # check for etags and add headers if appropriate + request.headers.update(self.controller.conditional_headers(request)) + + resp = super(CacheControlAdapter, self).send(request, **kw) + + return resp + + def build_response( + self, request, response, from_cache=False, cacheable_methods=None + ): + """ + Build a response by making a request or using the cache. + + This will end up calling send and returning a potentially + cached response + """ + cacheable = cacheable_methods or self.cacheable_methods + if not from_cache and request.method in cacheable: + # Check for any heuristics that might update headers + # before trying to cache. + if self.heuristic: + response = self.heuristic.apply(response) + + # apply any expiration heuristics + if response.status == 304: + # We must have sent an ETag request. This could mean + # that we've been expired already or that we simply + # have an etag. In either case, we want to try and + # update the cache if that is the case. + cached_response = self.controller.update_cached_response( + request, response + ) + + if cached_response is not response: + from_cache = True + + # We are done with the server response, read a + # possible response body (compliant servers will + # not return one, but we cannot be 100% sure) and + # release the connection back to the pool. + response.read(decode_content=False) + response.release_conn() + + response = cached_response + + # We always cache the 301 responses + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + self.controller.cache_response(request, response) + else: + # Wrap the response file with a wrapper that will cache the + # response when the stream has been consumed. + response._fp = CallbackFileWrapper( + response._fp, + functools.partial( + self.controller.cache_response, request, response + ), + ) + if response.chunked: + super_update_chunk_length = response._update_chunk_length + + def _update_chunk_length(self): + super_update_chunk_length() + if self.chunk_left == 0: + self._fp._close() + + response._update_chunk_length = types.MethodType( + _update_chunk_length, response + ) + + resp = super(CacheControlAdapter, self).build_response(request, response) + + # See if we should invalidate the cache. + if request.method in self.invalidating_methods and resp.ok: + cache_url = self.controller.cache_url(request.url) + self.cache.delete(cache_url) + + # Give the request a from_cache attr to let people use it + resp.from_cache = from_cache + + return resp + + def close(self): + self.cache.close() + super(CacheControlAdapter, self).close() diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py new file mode 100644 index 0000000000000000000000000000000000000000..44e4309d20dfe3190988905258a4159411a662b3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py @@ -0,0 +1,43 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The cache object API for implementing caches. The default is a thread +safe in-memory dictionary. +""" +from threading import Lock + + +class BaseCache(object): + + def get(self, key): + raise NotImplementedError() + + def set(self, key, value, expires=None): + raise NotImplementedError() + + def delete(self, key): + raise NotImplementedError() + + def close(self): + pass + + +class DictCache(BaseCache): + + def __init__(self, init_dict=None): + self.lock = Lock() + self.data = init_dict or {} + + def get(self, key): + return self.data.get(key, None) + + def set(self, key, value, expires=None): + with self.lock: + self.data.update({key: value}) + + def delete(self, key): + with self.lock: + if key in self.data: + self.data.pop(key) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..44becd68439b37acb37166ff4bcfed11ab7f2b48 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py @@ -0,0 +1,6 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from .file_cache import FileCache # noqa +from .redis_cache import RedisCache # noqa diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0754e9fa5b31b175049fd384383669176c3c543b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..6cd1106f880092f55569ff46c6dc8100e7b5ffb9 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py @@ -0,0 +1,150 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import hashlib +import os +from textwrap import dedent + +from ..cache import BaseCache +from ..controller import CacheController + +try: + FileNotFoundError +except NameError: + # py2.X + FileNotFoundError = (IOError, OSError) + + +def _secure_open_write(filename, fmode): + # We only want to write to this file, so open it in write only mode + flags = os.O_WRONLY + + # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only + # will open *new* files. + # We specify this because we want to ensure that the mode we pass is the + # mode of the file. + flags |= os.O_CREAT | os.O_EXCL + + # Do not follow symlinks to prevent someone from making a symlink that + # we follow and insecurely open a cache file. + if hasattr(os, "O_NOFOLLOW"): + flags |= os.O_NOFOLLOW + + # On Windows we'll mark this file as binary + if hasattr(os, "O_BINARY"): + flags |= os.O_BINARY + + # Before we open our file, we want to delete any existing file that is + # there + try: + os.remove(filename) + except (IOError, OSError): + # The file must not exist already, so we can just skip ahead to opening + pass + + # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a + # race condition happens between the os.remove and this line, that an + # error will be raised. Because we utilize a lockfile this should only + # happen if someone is attempting to attack us. + fd = os.open(filename, flags, fmode) + try: + return os.fdopen(fd, "wb") + + except: + # An error occurred wrapping our FD in a file object + os.close(fd) + raise + + +class FileCache(BaseCache): + + def __init__( + self, + directory, + forever=False, + filemode=0o0600, + dirmode=0o0700, + use_dir_lock=None, + lock_class=None, + ): + + if use_dir_lock is not None and lock_class is not None: + raise ValueError("Cannot use use_dir_lock and lock_class together") + + try: + from lockfile import LockFile + from lockfile.mkdirlockfile import MkdirLockFile + except ImportError: + notice = dedent( + """ + NOTE: In order to use the FileCache you must have + lockfile installed. You can install it via pip: + pip install lockfile + """ + ) + raise ImportError(notice) + + else: + if use_dir_lock: + lock_class = MkdirLockFile + + elif lock_class is None: + lock_class = LockFile + + self.directory = directory + self.forever = forever + self.filemode = filemode + self.dirmode = dirmode + self.lock_class = lock_class + + @staticmethod + def encode(x): + return hashlib.sha224(x.encode()).hexdigest() + + def _fn(self, name): + # NOTE: This method should not change as some may depend on it. + # See: https://github.com/ionrock/cachecontrol/issues/63 + hashed = self.encode(name) + parts = list(hashed[:5]) + [hashed] + return os.path.join(self.directory, *parts) + + def get(self, key): + name = self._fn(key) + try: + with open(name, "rb") as fh: + return fh.read() + + except FileNotFoundError: + return None + + def set(self, key, value, expires=None): + name = self._fn(key) + + # Make sure the directory exists + try: + os.makedirs(os.path.dirname(name), self.dirmode) + except (IOError, OSError): + pass + + with self.lock_class(name) as lock: + # Write our actual file + with _secure_open_write(lock.path, self.filemode) as fh: + fh.write(value) + + def delete(self, key): + name = self._fn(key) + if not self.forever: + try: + os.remove(name) + except FileNotFoundError: + pass + + +def url_to_file_path(url, filecache): + """Return the file cache path based on the URL. + + This does not ensure the file exists! + """ + key = CacheController.cache_url(url) + return filecache._fn(key) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py new file mode 100644 index 0000000000000000000000000000000000000000..720b507c523ddb54fef42245f1ace7610f2cf8b5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py @@ -0,0 +1,37 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import division + +from datetime import datetime +from pip._vendor.cachecontrol.cache import BaseCache + + +class RedisCache(BaseCache): + + def __init__(self, conn): + self.conn = conn + + def get(self, key): + return self.conn.get(key) + + def set(self, key, value, expires=None): + if not expires: + self.conn.set(key, value) + else: + expires = expires - datetime.utcnow() + self.conn.setex(key, int(expires.total_seconds()), value) + + def delete(self, key): + self.conn.delete(key) + + def clear(self): + """Helper for clearing all the keys in a database. Use with + caution!""" + for key in self.conn.keys(): + self.conn.delete(key) + + def close(self): + """Redis uses connection pooling, no need to close the connection.""" + pass diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py new file mode 100644 index 0000000000000000000000000000000000000000..ccec9379dba2b03015ce123dd04a042f32431235 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py @@ -0,0 +1,32 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +try: + from urllib.parse import urljoin +except ImportError: + from urlparse import urljoin + + +try: + import cPickle as pickle +except ImportError: + import pickle + +# Handle the case where the requests module has been patched to not have +# urllib3 bundled as part of its source. +try: + from pip._vendor.requests.packages.urllib3.response import HTTPResponse +except ImportError: + from pip._vendor.urllib3.response import HTTPResponse + +try: + from pip._vendor.requests.packages.urllib3.util import is_fp_closed +except ImportError: + from pip._vendor.urllib3.util import is_fp_closed + +# Replicate some six behaviour +try: + text_type = unicode +except NameError: + text_type = str diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py new file mode 100644 index 0000000000000000000000000000000000000000..d7e73802818c336c4316ee1fc596efa54763bb4b --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py @@ -0,0 +1,415 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +""" +The httplib2 algorithms ported for use with requests. +""" +import logging +import re +import calendar +import time +from email.utils import parsedate_tz + +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .cache import DictCache +from .serialize import Serializer + + +logger = logging.getLogger(__name__) + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +PERMANENT_REDIRECT_STATUSES = (301, 308) + + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + + +class CacheController(object): + """An interface to see if request should cached or not. + """ + + def __init__( + self, cache=None, cache_etags=True, serializer=None, status_codes=None + ): + self.cache = DictCache() if cache is None else cache + self.cache_etags = cache_etags + self.serializer = serializer or Serializer() + self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308) + + @classmethod + def _urlnorm(cls, uri): + """Normalize the URL to create a safe key for the cache""" + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise Exception("Only absolute URIs are allowed. uri = %s" % uri) + + scheme = scheme.lower() + authority = authority.lower() + + if not path: + path = "/" + + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + defrag_uri = scheme + "://" + authority + request_uri + + return defrag_uri + + @classmethod + def cache_url(cls, uri): + return cls._urlnorm(uri) + + def parse_cache_control(self, headers): + known_directives = { + # https://tools.ietf.org/html/rfc7234#section-5.2 + "max-age": (int, True), + "max-stale": (int, False), + "min-fresh": (int, True), + "no-cache": (None, False), + "no-store": (None, False), + "no-transform": (None, False), + "only-if-cached": (None, False), + "must-revalidate": (None, False), + "public": (None, False), + "private": (None, False), + "proxy-revalidate": (None, False), + "s-maxage": (int, True), + } + + cc_headers = headers.get("cache-control", headers.get("Cache-Control", "")) + + retval = {} + + for cc_directive in cc_headers.split(","): + if not cc_directive.strip(): + continue + + parts = cc_directive.split("=", 1) + directive = parts[0].strip() + + try: + typ, required = known_directives[directive] + except KeyError: + logger.debug("Ignoring unknown cache-control directive: %s", directive) + continue + + if not typ or not required: + retval[directive] = None + if typ: + try: + retval[directive] = typ(parts[1].strip()) + except IndexError: + if required: + logger.debug( + "Missing value for cache-control " "directive: %s", + directive, + ) + except ValueError: + logger.debug( + "Invalid value for cache-control directive " "%s, must be %s", + directive, + typ.__name__, + ) + + return retval + + def cached_request(self, request): + """ + Return a cached response if it exists in the cache, otherwise + return False. + """ + cache_url = self.cache_url(request.url) + logger.debug('Looking up "%s" in the cache', cache_url) + cc = self.parse_cache_control(request.headers) + + # Bail out if the request insists on fresh data + if "no-cache" in cc: + logger.debug('Request header has "no-cache", cache bypassed') + return False + + if "max-age" in cc and cc["max-age"] == 0: + logger.debug('Request header has "max_age" as 0, cache bypassed') + return False + + # Request allows serving from the cache, let's see if we find something + cache_data = self.cache.get(cache_url) + if cache_data is None: + logger.debug("No cache entry available") + return False + + # Check whether it can be deserialized + resp = self.serializer.loads(request, cache_data) + if not resp: + logger.warning("Cache entry deserialization failed, entry ignored") + return False + + # If we have a cached permanent redirect, return it immediately. We + # don't need to test our response for other headers b/c it is + # intrinsically "cacheable" as it is Permanent. + # + # See: + # https://tools.ietf.org/html/rfc7231#section-6.4.2 + # + # Client can try to refresh the value by repeating the request + # with cache busting headers as usual (ie no-cache). + if int(resp.status) in PERMANENT_REDIRECT_STATUSES: + msg = ( + "Returning cached permanent redirect response " + "(ignoring date and etag information)" + ) + logger.debug(msg) + return resp + + headers = CaseInsensitiveDict(resp.headers) + if not headers or "date" not in headers: + if "etag" not in headers: + # Without date or etag, the cached response can never be used + # and should be deleted. + logger.debug("Purging cached response: no date or etag") + self.cache.delete(cache_url) + logger.debug("Ignoring cached response: no date") + return False + + now = time.time() + date = calendar.timegm(parsedate_tz(headers["date"])) + current_age = max(0, now - date) + logger.debug("Current age based on date: %i", current_age) + + # TODO: There is an assumption that the result will be a + # urllib3 response object. This may not be best since we + # could probably avoid instantiating or constructing the + # response until we know we need it. + resp_cc = self.parse_cache_control(headers) + + # determine freshness + freshness_lifetime = 0 + + # Check the max-age pragma in the cache control header + if "max-age" in resp_cc: + freshness_lifetime = resp_cc["max-age"] + logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime) + + # If there isn't a max-age, check for an expires header + elif "expires" in headers: + expires = parsedate_tz(headers["expires"]) + if expires is not None: + expire_time = calendar.timegm(expires) - date + freshness_lifetime = max(0, expire_time) + logger.debug("Freshness lifetime from expires: %i", freshness_lifetime) + + # Determine if we are setting freshness limit in the + # request. Note, this overrides what was in the response. + if "max-age" in cc: + freshness_lifetime = cc["max-age"] + logger.debug( + "Freshness lifetime from request max-age: %i", freshness_lifetime + ) + + if "min-fresh" in cc: + min_fresh = cc["min-fresh"] + # adjust our current age by our min fresh + current_age += min_fresh + logger.debug("Adjusted current age from min-fresh: %i", current_age) + + # Return entry if it is fresh enough + if freshness_lifetime > current_age: + logger.debug('The response is "fresh", returning cached response') + logger.debug("%i > %i", freshness_lifetime, current_age) + return resp + + # we're not fresh. If we don't have an Etag, clear it out + if "etag" not in headers: + logger.debug('The cached response is "stale" with no etag, purging') + self.cache.delete(cache_url) + + # return the original handler + return False + + def conditional_headers(self, request): + cache_url = self.cache_url(request.url) + resp = self.serializer.loads(request, self.cache.get(cache_url)) + new_headers = {} + + if resp: + headers = CaseInsensitiveDict(resp.headers) + + if "etag" in headers: + new_headers["If-None-Match"] = headers["ETag"] + + if "last-modified" in headers: + new_headers["If-Modified-Since"] = headers["Last-Modified"] + + return new_headers + + def cache_response(self, request, response, body=None, status_codes=None): + """ + Algorithm for caching requests. + + This assumes a requests Response object. + """ + # From httplib2: Don't cache 206's since we aren't going to + # handle byte range requests + cacheable_status_codes = status_codes or self.cacheable_status_codes + if response.status not in cacheable_status_codes: + logger.debug( + "Status code %s not in %s", response.status, cacheable_status_codes + ) + return + + response_headers = CaseInsensitiveDict(response.headers) + + if "date" in response_headers: + date = calendar.timegm(parsedate_tz(response_headers["date"])) + else: + date = 0 + + # If we've been given a body, our response has a Content-Length, that + # Content-Length is valid then we can check to see if the body we've + # been given matches the expected size, and if it doesn't we'll just + # skip trying to cache it. + if ( + body is not None + and "content-length" in response_headers + and response_headers["content-length"].isdigit() + and int(response_headers["content-length"]) != len(body) + ): + return + + cc_req = self.parse_cache_control(request.headers) + cc = self.parse_cache_control(response_headers) + + cache_url = self.cache_url(request.url) + logger.debug('Updating cache with response from "%s"', cache_url) + + # Delete it from the cache if we happen to have it stored there + no_store = False + if "no-store" in cc: + no_store = True + logger.debug('Response header has "no-store"') + if "no-store" in cc_req: + no_store = True + logger.debug('Request header has "no-store"') + if no_store and self.cache.get(cache_url): + logger.debug('Purging existing cache entry to honor "no-store"') + self.cache.delete(cache_url) + if no_store: + return + + # https://tools.ietf.org/html/rfc7234#section-4.1: + # A Vary header field-value of "*" always fails to match. + # Storing such a response leads to a deserialization warning + # during cache lookup and is not allowed to ever be served, + # so storing it can be avoided. + if "*" in response_headers.get("vary", ""): + logger.debug('Response header has "Vary: *"') + return + + # If we've been given an etag, then keep the response + if self.cache_etags and "etag" in response_headers: + expires_time = 0 + if response_headers.get("expires"): + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires) - date + + expires_time = max(expires_time, 14 * 86400) + + logger.debug("etag object cached for {0} seconds".format(expires_time)) + logger.debug("Caching due to etag") + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + # Add to the cache any permanent redirects. We do this before looking + # that the Date headers. + elif int(response.status) in PERMANENT_REDIRECT_STATUSES: + logger.debug("Caching permanent redirect") + self.cache.set(cache_url, self.serializer.dumps(request, response, b"")) + + # Add to the cache if the response headers demand it. If there + # is no date header then we can't do anything about expiring + # the cache. + elif "date" in response_headers: + date = calendar.timegm(parsedate_tz(response_headers["date"])) + # cache when there is a max-age > 0 + if "max-age" in cc and cc["max-age"] > 0: + logger.debug("Caching b/c date exists and max-age > 0") + expires_time = cc["max-age"] + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body), + expires=expires_time, + ) + + # If the request can expire, it means we should cache it + # in the meantime. + elif "expires" in response_headers: + if response_headers["expires"]: + expires = parsedate_tz(response_headers["expires"]) + if expires is not None: + expires_time = calendar.timegm(expires) - date + else: + expires_time = None + + logger.debug( + "Caching b/c of expires header. expires in {0} seconds".format( + expires_time + ) + ) + self.cache.set( + cache_url, + self.serializer.dumps(request, response, body=body), + expires=expires_time, + ) + + def update_cached_response(self, request, response): + """On a 304 we will get a new set of headers that we want to + update our cached value with, assuming we have one. + + This should only ever be called when we've sent an ETag and + gotten a 304 as the response. + """ + cache_url = self.cache_url(request.url) + + cached_response = self.serializer.loads(request, self.cache.get(cache_url)) + + if not cached_response: + # we didn't have a cached response + return response + + # Lets update our headers with the headers from the new request: + # http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1 + # + # The server isn't supposed to send headers that would make + # the cached body invalid. But... just in case, we'll be sure + # to strip out ones we know that might be problmatic due to + # typical assumptions. + excluded_headers = ["content-length"] + + cached_response.headers.update( + dict( + (k, v) + for k, v in response.headers.items() + if k.lower() not in excluded_headers + ) + ) + + # we want a 200 b/c we have content via the cache + cached_response.status = 200 + + # update our cache + self.cache.set(cache_url, self.serializer.dumps(request, cached_response)) + + return cached_response diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..f5ed5f6f6ec0eae90a9f48753622b2b5ee5d4a4f --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py @@ -0,0 +1,111 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from tempfile import NamedTemporaryFile +import mmap + + +class CallbackFileWrapper(object): + """ + Small wrapper around a fp object which will tee everything read into a + buffer, and when that file is closed it will execute a callback with the + contents of that buffer. + + All attributes are proxied to the underlying file object. + + This class uses members with a double underscore (__) leading prefix so as + not to accidentally shadow an attribute. + + The data is stored in a temporary file until it is all available. As long + as the temporary files directory is disk-based (sometimes it's a + memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory + pressure is high. For small files the disk usually won't be used at all, + it'll all be in the filesystem memory cache, so there should be no + performance impact. + """ + + def __init__(self, fp, callback): + self.__buf = NamedTemporaryFile("rb+", delete=True) + self.__fp = fp + self.__callback = callback + + def __getattr__(self, name): + # The vaguaries of garbage collection means that self.__fp is + # not always set. By using __getattribute__ and the private + # name[0] allows looking up the attribute value and raising an + # AttributeError when it doesn't exist. This stop thigns from + # infinitely recursing calls to getattr in the case where + # self.__fp hasn't been set. + # + # [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers + fp = self.__getattribute__("_CallbackFileWrapper__fp") + return getattr(fp, name) + + def __is_fp_closed(self): + try: + return self.__fp.fp is None + + except AttributeError: + pass + + try: + return self.__fp.closed + + except AttributeError: + pass + + # We just don't cache it then. + # TODO: Add some logging here... + return False + + def _close(self): + if self.__callback: + if self.__buf.tell() == 0: + # Empty file: + result = b"" + else: + # Return the data without actually loading it into memory, + # relying on Python's buffer API and mmap(). mmap() just gives + # a view directly into the filesystem's memory cache, so it + # doesn't result in duplicate memory use. + self.__buf.seek(0, 0) + result = memoryview( + mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ) + ) + self.__callback(result) + + # We assign this to None here, because otherwise we can get into + # really tricky problems where the CPython interpreter dead locks + # because the callback is holding a reference to something which + # has a __del__ method. Setting this to None breaks the cycle + # and allows the garbage collector to do it's thing normally. + self.__callback = None + + # Closing the temporary file releases memory and frees disk space. + # Important when caching big files. + self.__buf.close() + + def read(self, amt=None): + data = self.__fp.read(amt) + if data: + # We may be dealing with b'', a sign that things are over: + # it's passed e.g. after we've already closed self.__buf. + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data + + def _safe_read(self, amt): + data = self.__fp._safe_read(amt) + if amt == 2 and data == b"\r\n": + # urllib executes this read to toss the CRLF at the end + # of the chunk. + return data + + self.__buf.write(data) + if self.__is_fp_closed(): + self._close() + + return data diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py new file mode 100644 index 0000000000000000000000000000000000000000..ebe4a96f589474f6f441858de2bb961c5e473c6d --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py @@ -0,0 +1,139 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import calendar +import time + +from email.utils import formatdate, parsedate, parsedate_tz + +from datetime import datetime, timedelta + +TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT" + + +def expire_after(delta, date=None): + date = date or datetime.utcnow() + return date + delta + + +def datetime_to_header(dt): + return formatdate(calendar.timegm(dt.timetuple())) + + +class BaseHeuristic(object): + + def warning(self, response): + """ + Return a valid 1xx warning header value describing the cache + adjustments. + + The response is provided too allow warnings like 113 + http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need + to explicitly say response is over 24 hours old. + """ + return '110 - "Response is Stale"' + + def update_headers(self, response): + """Update the response headers with any new headers. + + NOTE: This SHOULD always include some Warning header to + signify that the response was cached by the client, not + by way of the provided headers. + """ + return {} + + def apply(self, response): + updated_headers = self.update_headers(response) + + if updated_headers: + response.headers.update(updated_headers) + warning_header_value = self.warning(response) + if warning_header_value is not None: + response.headers.update({"Warning": warning_header_value}) + + return response + + +class OneDayCache(BaseHeuristic): + """ + Cache the response by providing an expires 1 day in the + future. + """ + + def update_headers(self, response): + headers = {} + + if "expires" not in response.headers: + date = parsedate(response.headers["date"]) + expires = expire_after(timedelta(days=1), date=datetime(*date[:6])) + headers["expires"] = datetime_to_header(expires) + headers["cache-control"] = "public" + return headers + + +class ExpiresAfter(BaseHeuristic): + """ + Cache **all** requests for a defined time period. + """ + + def __init__(self, **kw): + self.delta = timedelta(**kw) + + def update_headers(self, response): + expires = expire_after(self.delta) + return {"expires": datetime_to_header(expires), "cache-control": "public"} + + def warning(self, response): + tmpl = "110 - Automatically cached for %s. Response might be stale" + return tmpl % self.delta + + +class LastModified(BaseHeuristic): + """ + If there is no Expires header already, fall back on Last-Modified + using the heuristic from + http://tools.ietf.org/html/rfc7234#section-4.2.2 + to calculate a reasonable value. + + Firefox also does something like this per + https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ + http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397 + Unlike mozilla we limit this to 24-hr. + """ + cacheable_by_default_statuses = { + 200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501 + } + + def update_headers(self, resp): + headers = resp.headers + + if "expires" in headers: + return {} + + if "cache-control" in headers and headers["cache-control"] != "public": + return {} + + if resp.status not in self.cacheable_by_default_statuses: + return {} + + if "date" not in headers or "last-modified" not in headers: + return {} + + date = calendar.timegm(parsedate_tz(headers["date"])) + last_modified = parsedate(headers["last-modified"]) + if date is None or last_modified is None: + return {} + + now = time.time() + current_age = max(0, now - date) + delta = date - calendar.timegm(last_modified) + freshness_lifetime = max(0, min(delta / 10, 24 * 3600)) + if freshness_lifetime <= current_age: + return {} + + expires = date + freshness_lifetime + return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))} + + def warning(self, resp): + return None diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py new file mode 100644 index 0000000000000000000000000000000000000000..b075df1868207d2088fab2c60ef85da5508f0a3d --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py @@ -0,0 +1,186 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +import base64 +import io +import json +import zlib + +from pip._vendor import msgpack +from pip._vendor.requests.structures import CaseInsensitiveDict + +from .compat import HTTPResponse, pickle, text_type + + +def _b64_decode_bytes(b): + return base64.b64decode(b.encode("ascii")) + + +def _b64_decode_str(s): + return _b64_decode_bytes(s).decode("utf8") + + +_default_body_read = object() + + +class Serializer(object): + def dumps(self, request, response, body=None): + response_headers = CaseInsensitiveDict(response.headers) + + if body is None: + # When a body isn't passed in, we'll read the response. We + # also update the response with a new file handler to be + # sure it acts as though it was never read. + body = response.read(decode_content=False) + response._fp = io.BytesIO(body) + + # NOTE: This is all a bit weird, but it's really important that on + # Python 2.x these objects are unicode and not str, even when + # they contain only ascii. The problem here is that msgpack + # understands the difference between unicode and bytes and we + # have it set to differentiate between them, however Python 2 + # doesn't know the difference. Forcing these to unicode will be + # enough to have msgpack know the difference. + data = { + u"response": { + u"body": body, + u"headers": dict( + (text_type(k), text_type(v)) for k, v in response.headers.items() + ), + u"status": response.status, + u"version": response.version, + u"reason": text_type(response.reason), + u"strict": response.strict, + u"decode_content": response.decode_content, + } + } + + # Construct our vary headers + data[u"vary"] = {} + if u"vary" in response_headers: + varied_headers = response_headers[u"vary"].split(",") + for header in varied_headers: + header = text_type(header).strip() + header_value = request.headers.get(header, None) + if header_value is not None: + header_value = text_type(header_value) + data[u"vary"][header] = header_value + + return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)]) + + def loads(self, request, data): + # Short circuit if we've been given an empty set of data + if not data: + return + + # Determine what version of the serializer the data was serialized + # with + try: + ver, data = data.split(b",", 1) + except ValueError: + ver = b"cc=0" + + # Make sure that our "ver" is actually a version and isn't a false + # positive from a , being in the data stream. + if ver[:3] != b"cc=": + data = ver + data + ver = b"cc=0" + + # Get the version number out of the cc=N + ver = ver.split(b"=", 1)[-1].decode("ascii") + + # Dispatch to the actual load method for the given version + try: + return getattr(self, "_loads_v{}".format(ver))(request, data) + + except AttributeError: + # This is a version we don't have a loads function for, so we'll + # just treat it as a miss and return None + return + + def prepare_response(self, request, cached): + """Verify our vary headers match and construct a real urllib3 + HTTPResponse object. + """ + # Special case the '*' Vary value as it means we cannot actually + # determine if the cached response is suitable for this request. + # This case is also handled in the controller code when creating + # a cache entry, but is left here for backwards compatibility. + if "*" in cached.get("vary", {}): + return + + # Ensure that the Vary headers for the cached response match our + # request + for header, value in cached.get("vary", {}).items(): + if request.headers.get(header, None) != value: + return + + body_raw = cached["response"].pop("body") + + headers = CaseInsensitiveDict(data=cached["response"]["headers"]) + if headers.get("transfer-encoding", "") == "chunked": + headers.pop("transfer-encoding") + + cached["response"]["headers"] = headers + + try: + body = io.BytesIO(body_raw) + except TypeError: + # This can happen if cachecontrol serialized to v1 format (pickle) + # using Python 2. A Python 2 str(byte string) will be unpickled as + # a Python 3 str (unicode string), which will cause the above to + # fail with: + # + # TypeError: 'str' does not support the buffer interface + body = io.BytesIO(body_raw.encode("utf8")) + + return HTTPResponse(body=body, preload_content=False, **cached["response"]) + + def _loads_v0(self, request, data): + # The original legacy cache data. This doesn't contain enough + # information to construct everything we need, so we'll treat this as + # a miss. + return + + def _loads_v1(self, request, data): + try: + cached = pickle.loads(data) + except ValueError: + return + + return self.prepare_response(request, cached) + + def _loads_v2(self, request, data): + try: + cached = json.loads(zlib.decompress(data).decode("utf8")) + except (ValueError, zlib.error): + return + + # We need to decode the items that we've base64 encoded + cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"]) + cached["response"]["headers"] = dict( + (_b64_decode_str(k), _b64_decode_str(v)) + for k, v in cached["response"]["headers"].items() + ) + cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"]) + cached["vary"] = dict( + (_b64_decode_str(k), _b64_decode_str(v) if v is not None else v) + for k, v in cached["vary"].items() + ) + + return self.prepare_response(request, cached) + + def _loads_v3(self, request, data): + # Due to Python 2 encoding issues, it's impossible to know for sure + # exactly how to load v3 entries, thus we'll treat these as a miss so + # that they get rewritten out as v4 entries. + return + + def _loads_v4(self, request, data): + try: + cached = msgpack.loads(data, raw=False) + except ValueError: + return + + return self.prepare_response(request, cached) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py new file mode 100644 index 0000000000000000000000000000000000000000..b6ee7f2039801c9792dfe6e473843fb0a4bc4a5b --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py @@ -0,0 +1,33 @@ +# SPDX-FileCopyrightText: 2015 Eric Larson +# +# SPDX-License-Identifier: Apache-2.0 + +from .adapter import CacheControlAdapter +from .cache import DictCache + + +def CacheControl( + sess, + cache=None, + cache_etags=True, + serializer=None, + heuristic=None, + controller_class=None, + adapter_class=None, + cacheable_methods=None, +): + + cache = DictCache() if cache is None else cache + adapter_class = adapter_class or CacheControlAdapter + adapter = adapter_class( + cache, + cache_etags=cache_etags, + serializer=serializer, + heuristic=heuristic, + controller_class=controller_class, + cacheable_methods=cacheable_methods, + ) + sess.mount("http://", adapter) + sess.mount("https://", adapter) + + return sess diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..54daafe3c4e3491fb3304dd821ebd438acbbf5cf Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f2e612ee6e27495dafed8bdd1f0d077158b37937 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0f9fda9aa4471688197ca330a6a6a2cf227ce8c7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b5b9ca0e016c0d210b140414d24731caffebb055 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5aaa8ef0334246bcb40b8b3846612ef35dfb2d17 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f53d3f80b5267ae6e521609d9ee7f8e987b73178 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..10aa612052995a4c0a1539007df1fba5cfda084b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..39cc8a5169615c218438d8bb7f7a6cec26386717 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ea0066e53a4dabafdba19b4dd4bd394636ed264e Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..17139d88188534485e3b107305399a1ddfaf3735 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..2b7d86541d55b2690156388678b924c41b9f7791 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1e0ff65f57cd42f474a6896112d4d3619e118a96 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c5b24c390c683ab560716015ea8ef88066b65b0c Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..f5674794758b33d71340c001c21523717d323bf2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c391d66e34b4660177042ee419497e862088005e Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1ec49589de2e0faa044a251ae124a64d886aaf66 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8115bec367ee1bbe3f2150a72cf78c8a52d3f43d Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c9c42db82f1c5e39bb16a219fc6cf074ae000f09 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..1146e1cd4a9383afecc57af3927507df2f13f350 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..c867e0f23cb5dff44b657eb41efc725227b03824 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..21eca646158ceef13974a5e71d1fce7c69364bb4 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..3a531ccb3444e562d198f360557b17b6fc758b78 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..714787ef12835be905708ccf723e7fd678f70e3d Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..139528725086bd33ffb8d36d9b9f1d18965550df Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..67acde8dfba0e266df7e5f3054ef466a9654eac7 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..59c1e808a598fb09c900d8e5887b42c40d0e08b0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..85c26e353e2841b182b97471c90450a58e6e2ae3 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..a2b64cda6009c97d14557510368048197383a881 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/latin1prober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..e9bc6528c231f8ba1a3f9f263c5f167679e3d470 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..b6efc9d3b28f55dd239dd09019f2edaa0a9a461e Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..757ccfea4ad49e6460f038de5d653fbd76b3aae1 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/mbcssm.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..dccbeb9f4edd28e1525824b1ee90385012a8befa Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..5a054ad20567b8db1c7677bd3d36280df42239f2 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..7db38e0fba0a5a541d1bce2187f94a5099b8979b Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/sjisprober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ee114738b73c34e5799ab3796e6d2288d6f92635 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/universaldetector.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6dccf2177f8997060094ab118ba647b5b287a1a0 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/utf8prober.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..8278c4ddd6257b07b26e3b209521095810eaab9f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/version.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__init__.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..ce05fd3027447fdc64986ecdee950643822e45e6 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__init__.py @@ -0,0 +1,26 @@ +__all__ = [ + "__version__", + "AbstractProvider", + "AbstractResolver", + "BaseReporter", + "InconsistentCandidate", + "Resolver", + "RequirementsConflicted", + "ResolutionError", + "ResolutionImpossible", + "ResolutionTooDeep", +] + +__version__ = "0.8.1" + + +from .providers import AbstractProvider, AbstractResolver +from .reporters import BaseReporter +from .resolvers import ( + InconsistentCandidate, + RequirementsConflicted, + ResolutionError, + ResolutionImpossible, + ResolutionTooDeep, + Resolver, +) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..6d8d5239dda98fcc3202e5a9f1dd147223c2fb83 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..86e68fb6f50a1facac141ae58b994bdf04aa82bc Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/providers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ed27e32470ebf57c7827437a8509d831707ea54f Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/reporters.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..25e89eefcb07e7a3520ab47ae77ce62896656ffa Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/resolvers.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..0c08b06d08668a492eeabafcbe7ee8604c1ca225 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/__pycache__/structs.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__init__.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..d43047da1701029cc072a790115868f2b14779ed Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc new file mode 100644 index 0000000000000000000000000000000000000000..ec54b5fd6759be771a215b31fbbf4b82c67f2376 Binary files /dev/null and b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-310.pyc differ diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py new file mode 100644 index 0000000000000000000000000000000000000000..1becc5093c5ab8e196bb9fee415e2381e7158fc3 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py @@ -0,0 +1,6 @@ +__all__ = ["Mapping", "Sequence"] + +try: + from collections.abc import Mapping, Sequence +except ImportError: + from collections import Mapping, Sequence diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/providers.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/providers.py new file mode 100644 index 0000000000000000000000000000000000000000..7d0a9c22a4656951910a9fbb70af59a0706cadde --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/providers.py @@ -0,0 +1,133 @@ +class AbstractProvider(object): + """Delegate class to provide requirement interface for the resolver.""" + + def identify(self, requirement_or_candidate): + """Given a requirement, return an identifier for it. + + This is used to identify a requirement, e.g. whether two requirements + should have their specifier parts merged. + """ + raise NotImplementedError + + def get_preference( + self, + identifier, + resolutions, + candidates, + information, + backtrack_causes, + ): + """Produce a sort key for given requirement based on preference. + + The preference is defined as "I think this requirement should be + resolved first". The lower the return value is, the more preferred + this group of arguments is. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param resolutions: Mapping of candidates currently pinned by the + resolver. Each key is an identifier, and the value a candidate. + The candidate may conflict with requirements from ``information``. + :param candidates: Mapping of each dependency's possible candidates. + Each value is an iterator of candidates. + :param information: Mapping of requirement information of each package. + Each value is an iterator of *requirement information*. + :param backtrack_causes: Sequence of requirement information that were + the requirements that caused the resolver to most recently backtrack. + + A *requirement information* instance is a named tuple with two members: + + * ``requirement`` specifies a requirement contributing to the current + list of candidates. + * ``parent`` specifies the candidate that provides (dependend on) the + requirement, or ``None`` to indicate a root requirement. + + The preference could depend on a various of issues, including (not + necessarily in this order): + + * Is this package pinned in the current resolution result? + * How relaxed is the requirement? Stricter ones should probably be + worked on first? (I don't know, actually.) + * How many possibilities are there to satisfy this requirement? Those + with few left should likely be worked on first, I guess? + * Are there any known conflicts for this requirement? We should + probably work on those with the most known conflicts. + + A sortable value should be returned (this will be used as the ``key`` + parameter of the built-in sorting function). The smaller the value is, + the more preferred this requirement is (i.e. the sorting function + is called with ``reverse=False``). + """ + raise NotImplementedError + + def find_matches(self, identifier, requirements, incompatibilities): + """Find all possible candidates that satisfy given constraints. + + :param identifier: An identifier as returned by ``identify()``. This + identifies the dependency matches of which should be returned. + :param requirements: A mapping of requirements that all returned + candidates must satisfy. Each key is an identifier, and the value + an iterator of requirements for that dependency. + :param incompatibilities: A mapping of known incompatibilities of + each dependency. Each key is an identifier, and the value an + iterator of incompatibilities known to the resolver. All + incompatibilities *must* be excluded from the return value. + + This should try to get candidates based on the requirements' types. + For VCS, local, and archive requirements, the one-and-only match is + returned, and for a "named" requirement, the index(es) should be + consulted to find concrete candidates for this requirement. + + The return value should produce candidates ordered by preference; the + most preferred candidate should come first. The return type may be one + of the following: + + * A callable that returns an iterator that yields candidates. + * An collection of candidates. + * An iterable of candidates. This will be consumed immediately into a + list of candidates. + """ + raise NotImplementedError + + def is_satisfied_by(self, requirement, candidate): + """Whether the given requirement can be satisfied by a candidate. + + The candidate is guarenteed to have been generated from the + requirement. + + A boolean should be returned to indicate whether ``candidate`` is a + viable solution to the requirement. + """ + raise NotImplementedError + + def get_dependencies(self, candidate): + """Get dependencies of a candidate. + + This should return a collection of requirements that `candidate` + specifies as its dependencies. + """ + raise NotImplementedError + + +class AbstractResolver(object): + """The thing that performs the actual resolution work.""" + + base_exception = Exception + + def __init__(self, provider, reporter): + self.provider = provider + self.reporter = reporter + + def resolve(self, requirements, **kwargs): + """Take a collection of constraints, spit out the resolution result. + + This returns a representation of the final resolution state, with one + guarenteed attribute ``mapping`` that contains resolved candidates as + values. The keys are their respective identifiers. + + :param requirements: A collection of constraints. + :param kwargs: Additional keyword arguments that subclasses may accept. + + :raises: ``self.base_exception`` or its subclass. + """ + raise NotImplementedError diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/reporters.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/reporters.py new file mode 100644 index 0000000000000000000000000000000000000000..6695480fff4c87608ac2002dfb341f90ed1a5ce4 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/reporters.py @@ -0,0 +1,43 @@ +class BaseReporter(object): + """Delegate class to provider progress reporting for the resolver.""" + + def starting(self): + """Called before the resolution actually starts.""" + + def starting_round(self, index): + """Called before each round of resolution starts. + + The index is zero-based. + """ + + def ending_round(self, index, state): + """Called before each round of resolution ends. + + This is NOT called if the resolution ends at this round. Use `ending` + if you want to report finalization. The index is zero-based. + """ + + def ending(self, state): + """Called before the resolution ends successfully.""" + + def adding_requirement(self, requirement, parent): + """Called when adding a new requirement into the resolve criteria. + + :param requirement: The additional requirement to be applied to filter + the available candidaites. + :param parent: The candidate that requires ``requirement`` as a + dependency, or None if ``requirement`` is one of the root + requirements passed in from ``Resolver.resolve()``. + """ + + def resolving_conflicts(self, causes): + """Called when starting to attempt requirement conflict resolution. + + :param causes: The information on the collision that caused the backtracking. + """ + + def backtracking(self, candidate): + """Called when rejecting a candidate during backtracking.""" + + def pinning(self, candidate): + """Called when adding a candidate to the potential solution.""" diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/resolvers.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/resolvers.py new file mode 100644 index 0000000000000000000000000000000000000000..787681b03e9ec2fd4490de10cdc95e58c893c8b5 --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/resolvers.py @@ -0,0 +1,482 @@ +import collections +import operator + +from .providers import AbstractResolver +from .structs import DirectedGraph, IteratorMapping, build_iter_view + +RequirementInformation = collections.namedtuple( + "RequirementInformation", ["requirement", "parent"] +) + + +class ResolverException(Exception): + """A base class for all exceptions raised by this module. + + Exceptions derived by this class should all be handled in this module. Any + bubbling pass the resolver should be treated as a bug. + """ + + +class RequirementsConflicted(ResolverException): + def __init__(self, criterion): + super(RequirementsConflicted, self).__init__(criterion) + self.criterion = criterion + + def __str__(self): + return "Requirements conflict: {}".format( + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class InconsistentCandidate(ResolverException): + def __init__(self, candidate, criterion): + super(InconsistentCandidate, self).__init__(candidate, criterion) + self.candidate = candidate + self.criterion = criterion + + def __str__(self): + return "Provided candidate {!r} does not satisfy {}".format( + self.candidate, + ", ".join(repr(r) for r in self.criterion.iter_requirement()), + ) + + +class Criterion(object): + """Representation of possible resolution results of a package. + + This holds three attributes: + + * `information` is a collection of `RequirementInformation` pairs. + Each pair is a requirement contributing to this criterion, and the + candidate that provides the requirement. + * `incompatibilities` is a collection of all known not-to-work candidates + to exclude from consideration. + * `candidates` is a collection containing all possible candidates deducted + from the union of contributing requirements and known incompatibilities. + It should never be empty, except when the criterion is an attribute of a + raised `RequirementsConflicted` (in which case it is always empty). + + .. note:: + This class is intended to be externally immutable. **Do not** mutate + any of its attribute containers. + """ + + def __init__(self, candidates, information, incompatibilities): + self.candidates = candidates + self.information = information + self.incompatibilities = incompatibilities + + def __repr__(self): + requirements = ", ".join( + "({!r}, via={!r})".format(req, parent) + for req, parent in self.information + ) + return "Criterion({})".format(requirements) + + def iter_requirement(self): + return (i.requirement for i in self.information) + + def iter_parent(self): + return (i.parent for i in self.information) + + +class ResolutionError(ResolverException): + pass + + +class ResolutionImpossible(ResolutionError): + def __init__(self, causes): + super(ResolutionImpossible, self).__init__(causes) + # causes is a list of RequirementInformation objects + self.causes = causes + + +class ResolutionTooDeep(ResolutionError): + def __init__(self, round_count): + super(ResolutionTooDeep, self).__init__(round_count) + self.round_count = round_count + + +# Resolution state in a round. +State = collections.namedtuple("State", "mapping criteria backtrack_causes") + + +class Resolution(object): + """Stateful resolution object. + + This is designed as a one-off object that holds information to kick start + the resolution process, and holds the results afterwards. + """ + + def __init__(self, provider, reporter): + self._p = provider + self._r = reporter + self._states = [] + + @property + def state(self): + try: + return self._states[-1] + except IndexError: + raise AttributeError("state") + + def _push_new_state(self): + """Push a new state into history. + + This new state will be used to hold resolution results of the next + coming round. + """ + base = self._states[-1] + state = State( + mapping=base.mapping.copy(), + criteria=base.criteria.copy(), + backtrack_causes=base.backtrack_causes[:], + ) + self._states.append(state) + + def _add_to_criteria(self, criteria, requirement, parent): + self._r.adding_requirement(requirement=requirement, parent=parent) + + identifier = self._p.identify(requirement_or_candidate=requirement) + criterion = criteria.get(identifier) + if criterion: + incompatibilities = list(criterion.incompatibilities) + else: + incompatibilities = [] + + matches = self._p.find_matches( + identifier=identifier, + requirements=IteratorMapping( + criteria, + operator.methodcaller("iter_requirement"), + {identifier: [requirement]}, + ), + incompatibilities=IteratorMapping( + criteria, + operator.attrgetter("incompatibilities"), + {identifier: incompatibilities}, + ), + ) + + if criterion: + information = list(criterion.information) + information.append(RequirementInformation(requirement, parent)) + else: + information = [RequirementInformation(requirement, parent)] + + criterion = Criterion( + candidates=build_iter_view(matches), + information=information, + incompatibilities=incompatibilities, + ) + if not criterion.candidates: + raise RequirementsConflicted(criterion) + criteria[identifier] = criterion + + def _get_preference(self, name): + return self._p.get_preference( + identifier=name, + resolutions=self.state.mapping, + candidates=IteratorMapping( + self.state.criteria, + operator.attrgetter("candidates"), + ), + information=IteratorMapping( + self.state.criteria, + operator.attrgetter("information"), + ), + backtrack_causes=self.state.backtrack_causes, + ) + + def _is_current_pin_satisfying(self, name, criterion): + try: + current_pin = self.state.mapping[name] + except KeyError: + return False + return all( + self._p.is_satisfied_by(requirement=r, candidate=current_pin) + for r in criterion.iter_requirement() + ) + + def _get_updated_criteria(self, candidate): + criteria = self.state.criteria.copy() + for requirement in self._p.get_dependencies(candidate=candidate): + self._add_to_criteria(criteria, requirement, parent=candidate) + return criteria + + def _attempt_to_pin_criterion(self, name): + criterion = self.state.criteria[name] + + causes = [] + for candidate in criterion.candidates: + try: + criteria = self._get_updated_criteria(candidate) + except RequirementsConflicted as e: + causes.append(e.criterion) + continue + + # Check the newly-pinned candidate actually works. This should + # always pass under normal circumstances, but in the case of a + # faulty provider, we will raise an error to notify the implementer + # to fix find_matches() and/or is_satisfied_by(). + satisfied = all( + self._p.is_satisfied_by(requirement=r, candidate=candidate) + for r in criterion.iter_requirement() + ) + if not satisfied: + raise InconsistentCandidate(candidate, criterion) + + self._r.pinning(candidate=candidate) + self.state.criteria.update(criteria) + + # Put newly-pinned candidate at the end. This is essential because + # backtracking looks at this mapping to get the last pin. + self.state.mapping.pop(name, None) + self.state.mapping[name] = candidate + + return [] + + # All candidates tried, nothing works. This criterion is a dead + # end, signal for backtracking. + return causes + + def _backtrack(self): + """Perform backtracking. + + When we enter here, the stack is like this:: + + [ state Z ] + [ state Y ] + [ state X ] + .... earlier states are irrelevant. + + 1. No pins worked for Z, so it does not have a pin. + 2. We want to reset state Y to unpinned, and pin another candidate. + 3. State X holds what state Y was before the pin, but does not + have the incompatibility information gathered in state Y. + + Each iteration of the loop will: + + 1. Discard Z. + 2. Discard Y but remember its incompatibility information gathered + previously, and the failure we're dealing with right now. + 3. Push a new state Y' based on X, and apply the incompatibility + information from Y to Y'. + 4a. If this causes Y' to conflict, we need to backtrack again. Make Y' + the new Z and go back to step 2. + 4b. If the incompatibilities apply cleanly, end backtracking. + """ + while len(self._states) >= 3: + # Remove the state that triggered backtracking. + del self._states[-1] + + # Retrieve the last candidate pin and known incompatibilities. + broken_state = self._states.pop() + name, candidate = broken_state.mapping.popitem() + incompatibilities_from_broken = [ + (k, list(v.incompatibilities)) + for k, v in broken_state.criteria.items() + ] + + # Also mark the newly known incompatibility. + incompatibilities_from_broken.append((name, [candidate])) + + self._r.backtracking(candidate=candidate) + + # Create a new state from the last known-to-work one, and apply + # the previously gathered incompatibility information. + def _patch_criteria(): + for k, incompatibilities in incompatibilities_from_broken: + if not incompatibilities: + continue + try: + criterion = self.state.criteria[k] + except KeyError: + continue + matches = self._p.find_matches( + identifier=k, + requirements=IteratorMapping( + self.state.criteria, + operator.methodcaller("iter_requirement"), + ), + incompatibilities=IteratorMapping( + self.state.criteria, + operator.attrgetter("incompatibilities"), + {k: incompatibilities}, + ), + ) + candidates = build_iter_view(matches) + if not candidates: + return False + incompatibilities.extend(criterion.incompatibilities) + self.state.criteria[k] = Criterion( + candidates=candidates, + information=list(criterion.information), + incompatibilities=incompatibilities, + ) + return True + + self._push_new_state() + success = _patch_criteria() + + # It works! Let's work on this new state. + if success: + return True + + # State does not work after applying known incompatibilities. + # Try the still previous state. + + # No way to backtrack anymore. + return False + + def resolve(self, requirements, max_rounds): + if self._states: + raise RuntimeError("already resolved") + + self._r.starting() + + # Initialize the root state. + self._states = [ + State( + mapping=collections.OrderedDict(), + criteria={}, + backtrack_causes=[], + ) + ] + for r in requirements: + try: + self._add_to_criteria(self.state.criteria, r, parent=None) + except RequirementsConflicted as e: + raise ResolutionImpossible(e.criterion.information) + + # The root state is saved as a sentinel so the first ever pin can have + # something to backtrack to if it fails. The root state is basically + # pinning the virtual "root" package in the graph. + self._push_new_state() + + for round_index in range(max_rounds): + self._r.starting_round(index=round_index) + + unsatisfied_names = [ + key + for key, criterion in self.state.criteria.items() + if not self._is_current_pin_satisfying(key, criterion) + ] + + # All criteria are accounted for. Nothing more to pin, we are done! + if not unsatisfied_names: + self._r.ending(state=self.state) + return self.state + + # Choose the most preferred unpinned criterion to try. + name = min(unsatisfied_names, key=self._get_preference) + failure_causes = self._attempt_to_pin_criterion(name) + + if failure_causes: + causes = [i for c in failure_causes for i in c.information] + # Backtrack if pinning fails. The backtrack process puts us in + # an unpinned state, so we can work on it in the next round. + self._r.resolving_conflicts(causes=causes) + success = self._backtrack() + self.state.backtrack_causes[:] = causes + + # Dead ends everywhere. Give up. + if not success: + raise ResolutionImpossible(self.state.backtrack_causes) + else: + # Pinning was successful. Push a new state to do another pin. + self._push_new_state() + + self._r.ending_round(index=round_index, state=self.state) + + raise ResolutionTooDeep(max_rounds) + + +def _has_route_to_root(criteria, key, all_keys, connected): + if key in connected: + return True + if key not in criteria: + return False + for p in criteria[key].iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey in connected: + connected.add(key) + return True + if _has_route_to_root(criteria, pkey, all_keys, connected): + connected.add(key) + return True + return False + + +Result = collections.namedtuple("Result", "mapping graph criteria") + + +def _build_result(state): + mapping = state.mapping + all_keys = {id(v): k for k, v in mapping.items()} + all_keys[id(None)] = None + + graph = DirectedGraph() + graph.add(None) # Sentinel as root dependencies' parent. + + connected = {None} + for key, criterion in state.criteria.items(): + if not _has_route_to_root(state.criteria, key, all_keys, connected): + continue + if key not in graph: + graph.add(key) + for p in criterion.iter_parent(): + try: + pkey = all_keys[id(p)] + except KeyError: + continue + if pkey not in graph: + graph.add(pkey) + graph.connect(pkey, key) + + return Result( + mapping={k: v for k, v in mapping.items() if k in connected}, + graph=graph, + criteria=state.criteria, + ) + + +class Resolver(AbstractResolver): + """The thing that performs the actual resolution work.""" + + base_exception = ResolverException + + def resolve(self, requirements, max_rounds=100): + """Take a collection of constraints, spit out the resolution result. + + The return value is a representation to the final resolution result. It + is a tuple subclass with three public members: + + * `mapping`: A dict of resolved candidates. Each key is an identifier + of a requirement (as returned by the provider's `identify` method), + and the value is the resolved candidate. + * `graph`: A `DirectedGraph` instance representing the dependency tree. + The vertices are keys of `mapping`, and each edge represents *why* + a particular package is included. A special vertex `None` is + included to represent parents of user-supplied requirements. + * `criteria`: A dict of "criteria" that hold detailed information on + how edges in the graph are derived. Each key is an identifier of a + requirement, and the value is a `Criterion` instance. + + The following exceptions may be raised if a resolution cannot be found: + + * `ResolutionImpossible`: A resolution cannot be found for the given + combination of requirements. The `causes` attribute of the + exception is a list of (requirement, parent), giving the + requirements that could not be satisfied. + * `ResolutionTooDeep`: The dependency tree is too deeply nested and + the resolver gave up. This is usually caused by a circular + dependency, but you can try to resolve this by increasing the + `max_rounds` argument. + """ + resolution = Resolution(self.provider, self.reporter) + state = resolution.resolve(requirements, max_rounds=max_rounds) + return _build_result(state) diff --git a/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/structs.py b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/structs.py new file mode 100644 index 0000000000000000000000000000000000000000..93d1568bd4d5d63281b798b09cedbafdfaa158ee --- /dev/null +++ b/venv/lib/python3.10/site-packages/pip/_vendor/resolvelib/structs.py @@ -0,0 +1,165 @@ +import itertools + +from .compat import collections_abc + + +class DirectedGraph(object): + """A graph structure with directed edges.""" + + def __init__(self): + self._vertices = set() + self._forwards = {} # -> Set[] + self._backwards = {} # -> Set[] + + def __iter__(self): + return iter(self._vertices) + + def __len__(self): + return len(self._vertices) + + def __contains__(self, key): + return key in self._vertices + + def copy(self): + """Return a shallow copy of this graph.""" + other = DirectedGraph() + other._vertices = set(self._vertices) + other._forwards = {k: set(v) for k, v in self._forwards.items()} + other._backwards = {k: set(v) for k, v in self._backwards.items()} + return other + + def add(self, key): + """Add a new vertex to the graph.""" + if key in self._vertices: + raise ValueError("vertex exists") + self._vertices.add(key) + self._forwards[key] = set() + self._backwards[key] = set() + + def remove(self, key): + """Remove a vertex from the graph, disconnecting all edges from/to it.""" + self._vertices.remove(key) + for f in self._forwards.pop(key): + self._backwards[f].remove(key) + for t in self._backwards.pop(key): + self._forwards[t].remove(key) + + def connected(self, f, t): + return f in self._backwards[t] and t in self._forwards[f] + + def connect(self, f, t): + """Connect two existing vertices. + + Nothing happens if the vertices are already connected. + """ + if t not in self._vertices: + raise KeyError(t) + self._forwards[f].add(t) + self._backwards[t].add(f) + + def iter_edges(self): + for f, children in self._forwards.items(): + for t in children: + yield f, t + + def iter_children(self, key): + return iter(self._forwards[key]) + + def iter_parents(self, key): + return iter(self._backwards[key]) + + +class IteratorMapping(collections_abc.Mapping): + def __init__(self, mapping, accessor, appends=None): + self._mapping = mapping + self._accessor = accessor + self._appends = appends or {} + + def __repr__(self): + return "IteratorMapping({!r}, {!r}, {!r})".format( + self._mapping, + self._accessor, + self._appends, + ) + + def __bool__(self): + return bool(self._mapping or self._appends) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __contains__(self, key): + return key in self._mapping or key in self._appends + + def __getitem__(self, k): + try: + v = self._mapping[k] + except KeyError: + return iter(self._appends[k]) + return itertools.chain(self._accessor(v), self._appends.get(k, ())) + + def __iter__(self): + more = (k for k in self._appends if k not in self._mapping) + return itertools.chain(self._mapping, more) + + def __len__(self): + more = sum(1 for k in self._appends if k not in self._mapping) + return len(self._mapping) + more + + +class _FactoryIterableView(object): + """Wrap an iterator factory returned by `find_matches()`. + + Calling `iter()` on this class would invoke the underlying iterator + factory, making it a "collection with ordering" that can be iterated + through multiple times, but lacks random access methods presented in + built-in Python sequence types. + """ + + def __init__(self, factory): + self._factory = factory + + def __repr__(self): + return "{}({})".format(type(self).__name__, list(self._factory())) + + def __bool__(self): + try: + next(self._factory()) + except StopIteration: + return False + return True + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return self._factory() + + +class _SequenceIterableView(object): + """Wrap an iterable returned by find_matches(). + + This is essentially just a proxy to the underlying sequence that provides + the same interface as `_FactoryIterableView`. + """ + + def __init__(self, sequence): + self._sequence = sequence + + def __repr__(self): + return "{}({})".format(type(self).__name__, self._sequence) + + def __bool__(self): + return bool(self._sequence) + + __nonzero__ = __bool__ # XXX: Python 2. + + def __iter__(self): + return iter(self._sequence) + + +def build_iter_view(matches): + """Build an iterable view from the value returned by `find_matches()`.""" + if callable(matches): + return _FactoryIterableView(matches) + if not isinstance(matches, collections_abc.Sequence): + matches = list(matches) + return _SequenceIterableView(matches)