Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py +18 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py +61 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py +137 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py +43 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py +6 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +150 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py +37 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py +32 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py +415 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py +111 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py +139 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py +186 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py +33 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/codingstatemachine.py +88 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cp949prober.py +49 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escprober.py +101 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/hebrewprober.py +292 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhebrewmodel.py +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langrussianmodel.py +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py +23 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__init__.py +189 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/colors.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/bar.py +93 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/colors.py +79 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/counter.py +47 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/spinner.py +45 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__init__.py +517 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-310.pyc +0 -0
- scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-310.pyc +0 -0
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""CacheControl import Interface.
|
6 |
+
|
7 |
+
Make it easy to import from cachecontrol without long namespaces.
|
8 |
+
"""
|
9 |
+
__author__ = "Eric Larson"
|
10 |
+
__email__ = "[email protected]"
|
11 |
+
__version__ = "0.12.10"
|
12 |
+
|
13 |
+
from .wrapper import CacheControl
|
14 |
+
from .adapter import CacheControlAdapter
|
15 |
+
from .controller import CacheController
|
16 |
+
|
17 |
+
import logging
|
18 |
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (645 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc
ADDED
Binary file (1.58 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc
ADDED
Binary file (3.16 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (1.85 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (759 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc
ADDED
Binary file (8.21 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc
ADDED
Binary file (2.8 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc
ADDED
Binary file (4.72 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc
ADDED
Binary file (4.25 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc
ADDED
Binary file (690 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import logging
|
6 |
+
|
7 |
+
from pip._vendor import requests
|
8 |
+
|
9 |
+
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
10 |
+
from pip._vendor.cachecontrol.cache import DictCache
|
11 |
+
from pip._vendor.cachecontrol.controller import logger
|
12 |
+
|
13 |
+
from argparse import ArgumentParser
|
14 |
+
|
15 |
+
|
16 |
+
def setup_logging():
|
17 |
+
logger.setLevel(logging.DEBUG)
|
18 |
+
handler = logging.StreamHandler()
|
19 |
+
logger.addHandler(handler)
|
20 |
+
|
21 |
+
|
22 |
+
def get_session():
|
23 |
+
adapter = CacheControlAdapter(
|
24 |
+
DictCache(), cache_etags=True, serializer=None, heuristic=None
|
25 |
+
)
|
26 |
+
sess = requests.Session()
|
27 |
+
sess.mount("http://", adapter)
|
28 |
+
sess.mount("https://", adapter)
|
29 |
+
|
30 |
+
sess.cache_controller = adapter.controller
|
31 |
+
return sess
|
32 |
+
|
33 |
+
|
34 |
+
def get_args():
|
35 |
+
parser = ArgumentParser()
|
36 |
+
parser.add_argument("url", help="The URL to try and cache")
|
37 |
+
return parser.parse_args()
|
38 |
+
|
39 |
+
|
40 |
+
def main(args=None):
|
41 |
+
args = get_args()
|
42 |
+
sess = get_session()
|
43 |
+
|
44 |
+
# Make a request to get a response
|
45 |
+
resp = sess.get(args.url)
|
46 |
+
|
47 |
+
# Turn on logging
|
48 |
+
setup_logging()
|
49 |
+
|
50 |
+
# try setting the cache
|
51 |
+
sess.cache_controller.cache_response(resp.request, resp.raw)
|
52 |
+
|
53 |
+
# Now try to get it
|
54 |
+
if sess.cache_controller.cached_request(resp.request):
|
55 |
+
print("Cached!")
|
56 |
+
else:
|
57 |
+
print("Not cached :(")
|
58 |
+
|
59 |
+
|
60 |
+
if __name__ == "__main__":
|
61 |
+
main()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import types
|
6 |
+
import functools
|
7 |
+
import zlib
|
8 |
+
|
9 |
+
from pip._vendor.requests.adapters import HTTPAdapter
|
10 |
+
|
11 |
+
from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
|
12 |
+
from .cache import DictCache
|
13 |
+
from .filewrapper import CallbackFileWrapper
|
14 |
+
|
15 |
+
|
16 |
+
class CacheControlAdapter(HTTPAdapter):
|
17 |
+
invalidating_methods = {"PUT", "PATCH", "DELETE"}
|
18 |
+
|
19 |
+
def __init__(
|
20 |
+
self,
|
21 |
+
cache=None,
|
22 |
+
cache_etags=True,
|
23 |
+
controller_class=None,
|
24 |
+
serializer=None,
|
25 |
+
heuristic=None,
|
26 |
+
cacheable_methods=None,
|
27 |
+
*args,
|
28 |
+
**kw
|
29 |
+
):
|
30 |
+
super(CacheControlAdapter, self).__init__(*args, **kw)
|
31 |
+
self.cache = DictCache() if cache is None else cache
|
32 |
+
self.heuristic = heuristic
|
33 |
+
self.cacheable_methods = cacheable_methods or ("GET",)
|
34 |
+
|
35 |
+
controller_factory = controller_class or CacheController
|
36 |
+
self.controller = controller_factory(
|
37 |
+
self.cache, cache_etags=cache_etags, serializer=serializer
|
38 |
+
)
|
39 |
+
|
40 |
+
def send(self, request, cacheable_methods=None, **kw):
|
41 |
+
"""
|
42 |
+
Send a request. Use the request information to see if it
|
43 |
+
exists in the cache and cache the response if we need to and can.
|
44 |
+
"""
|
45 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
46 |
+
if request.method in cacheable:
|
47 |
+
try:
|
48 |
+
cached_response = self.controller.cached_request(request)
|
49 |
+
except zlib.error:
|
50 |
+
cached_response = None
|
51 |
+
if cached_response:
|
52 |
+
return self.build_response(request, cached_response, from_cache=True)
|
53 |
+
|
54 |
+
# check for etags and add headers if appropriate
|
55 |
+
request.headers.update(self.controller.conditional_headers(request))
|
56 |
+
|
57 |
+
resp = super(CacheControlAdapter, self).send(request, **kw)
|
58 |
+
|
59 |
+
return resp
|
60 |
+
|
61 |
+
def build_response(
|
62 |
+
self, request, response, from_cache=False, cacheable_methods=None
|
63 |
+
):
|
64 |
+
"""
|
65 |
+
Build a response by making a request or using the cache.
|
66 |
+
|
67 |
+
This will end up calling send and returning a potentially
|
68 |
+
cached response
|
69 |
+
"""
|
70 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
71 |
+
if not from_cache and request.method in cacheable:
|
72 |
+
# Check for any heuristics that might update headers
|
73 |
+
# before trying to cache.
|
74 |
+
if self.heuristic:
|
75 |
+
response = self.heuristic.apply(response)
|
76 |
+
|
77 |
+
# apply any expiration heuristics
|
78 |
+
if response.status == 304:
|
79 |
+
# We must have sent an ETag request. This could mean
|
80 |
+
# that we've been expired already or that we simply
|
81 |
+
# have an etag. In either case, we want to try and
|
82 |
+
# update the cache if that is the case.
|
83 |
+
cached_response = self.controller.update_cached_response(
|
84 |
+
request, response
|
85 |
+
)
|
86 |
+
|
87 |
+
if cached_response is not response:
|
88 |
+
from_cache = True
|
89 |
+
|
90 |
+
# We are done with the server response, read a
|
91 |
+
# possible response body (compliant servers will
|
92 |
+
# not return one, but we cannot be 100% sure) and
|
93 |
+
# release the connection back to the pool.
|
94 |
+
response.read(decode_content=False)
|
95 |
+
response.release_conn()
|
96 |
+
|
97 |
+
response = cached_response
|
98 |
+
|
99 |
+
# We always cache the 301 responses
|
100 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
101 |
+
self.controller.cache_response(request, response)
|
102 |
+
else:
|
103 |
+
# Wrap the response file with a wrapper that will cache the
|
104 |
+
# response when the stream has been consumed.
|
105 |
+
response._fp = CallbackFileWrapper(
|
106 |
+
response._fp,
|
107 |
+
functools.partial(
|
108 |
+
self.controller.cache_response, request, response
|
109 |
+
),
|
110 |
+
)
|
111 |
+
if response.chunked:
|
112 |
+
super_update_chunk_length = response._update_chunk_length
|
113 |
+
|
114 |
+
def _update_chunk_length(self):
|
115 |
+
super_update_chunk_length()
|
116 |
+
if self.chunk_left == 0:
|
117 |
+
self._fp._close()
|
118 |
+
|
119 |
+
response._update_chunk_length = types.MethodType(
|
120 |
+
_update_chunk_length, response
|
121 |
+
)
|
122 |
+
|
123 |
+
resp = super(CacheControlAdapter, self).build_response(request, response)
|
124 |
+
|
125 |
+
# See if we should invalidate the cache.
|
126 |
+
if request.method in self.invalidating_methods and resp.ok:
|
127 |
+
cache_url = self.controller.cache_url(request.url)
|
128 |
+
self.cache.delete(cache_url)
|
129 |
+
|
130 |
+
# Give the request a from_cache attr to let people use it
|
131 |
+
resp.from_cache = from_cache
|
132 |
+
|
133 |
+
return resp
|
134 |
+
|
135 |
+
def close(self):
|
136 |
+
self.cache.close()
|
137 |
+
super(CacheControlAdapter, self).close()
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""
|
6 |
+
The cache object API for implementing caches. The default is a thread
|
7 |
+
safe in-memory dictionary.
|
8 |
+
"""
|
9 |
+
from threading import Lock
|
10 |
+
|
11 |
+
|
12 |
+
class BaseCache(object):
|
13 |
+
|
14 |
+
def get(self, key):
|
15 |
+
raise NotImplementedError()
|
16 |
+
|
17 |
+
def set(self, key, value, expires=None):
|
18 |
+
raise NotImplementedError()
|
19 |
+
|
20 |
+
def delete(self, key):
|
21 |
+
raise NotImplementedError()
|
22 |
+
|
23 |
+
def close(self):
|
24 |
+
pass
|
25 |
+
|
26 |
+
|
27 |
+
class DictCache(BaseCache):
|
28 |
+
|
29 |
+
def __init__(self, init_dict=None):
|
30 |
+
self.lock = Lock()
|
31 |
+
self.data = init_dict or {}
|
32 |
+
|
33 |
+
def get(self, key):
|
34 |
+
return self.data.get(key, None)
|
35 |
+
|
36 |
+
def set(self, key, value, expires=None):
|
37 |
+
with self.lock:
|
38 |
+
self.data.update({key: value})
|
39 |
+
|
40 |
+
def delete(self, key):
|
41 |
+
with self.lock:
|
42 |
+
if key in self.data:
|
43 |
+
self.data.pop(key)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from .file_cache import FileCache # noqa
|
6 |
+
from .redis_cache import RedisCache # noqa
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (296 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import hashlib
|
6 |
+
import os
|
7 |
+
from textwrap import dedent
|
8 |
+
|
9 |
+
from ..cache import BaseCache
|
10 |
+
from ..controller import CacheController
|
11 |
+
|
12 |
+
try:
|
13 |
+
FileNotFoundError
|
14 |
+
except NameError:
|
15 |
+
# py2.X
|
16 |
+
FileNotFoundError = (IOError, OSError)
|
17 |
+
|
18 |
+
|
19 |
+
def _secure_open_write(filename, fmode):
|
20 |
+
# We only want to write to this file, so open it in write only mode
|
21 |
+
flags = os.O_WRONLY
|
22 |
+
|
23 |
+
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
|
24 |
+
# will open *new* files.
|
25 |
+
# We specify this because we want to ensure that the mode we pass is the
|
26 |
+
# mode of the file.
|
27 |
+
flags |= os.O_CREAT | os.O_EXCL
|
28 |
+
|
29 |
+
# Do not follow symlinks to prevent someone from making a symlink that
|
30 |
+
# we follow and insecurely open a cache file.
|
31 |
+
if hasattr(os, "O_NOFOLLOW"):
|
32 |
+
flags |= os.O_NOFOLLOW
|
33 |
+
|
34 |
+
# On Windows we'll mark this file as binary
|
35 |
+
if hasattr(os, "O_BINARY"):
|
36 |
+
flags |= os.O_BINARY
|
37 |
+
|
38 |
+
# Before we open our file, we want to delete any existing file that is
|
39 |
+
# there
|
40 |
+
try:
|
41 |
+
os.remove(filename)
|
42 |
+
except (IOError, OSError):
|
43 |
+
# The file must not exist already, so we can just skip ahead to opening
|
44 |
+
pass
|
45 |
+
|
46 |
+
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
|
47 |
+
# race condition happens between the os.remove and this line, that an
|
48 |
+
# error will be raised. Because we utilize a lockfile this should only
|
49 |
+
# happen if someone is attempting to attack us.
|
50 |
+
fd = os.open(filename, flags, fmode)
|
51 |
+
try:
|
52 |
+
return os.fdopen(fd, "wb")
|
53 |
+
|
54 |
+
except:
|
55 |
+
# An error occurred wrapping our FD in a file object
|
56 |
+
os.close(fd)
|
57 |
+
raise
|
58 |
+
|
59 |
+
|
60 |
+
class FileCache(BaseCache):
|
61 |
+
|
62 |
+
def __init__(
|
63 |
+
self,
|
64 |
+
directory,
|
65 |
+
forever=False,
|
66 |
+
filemode=0o0600,
|
67 |
+
dirmode=0o0700,
|
68 |
+
use_dir_lock=None,
|
69 |
+
lock_class=None,
|
70 |
+
):
|
71 |
+
|
72 |
+
if use_dir_lock is not None and lock_class is not None:
|
73 |
+
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
74 |
+
|
75 |
+
try:
|
76 |
+
from lockfile import LockFile
|
77 |
+
from lockfile.mkdirlockfile import MkdirLockFile
|
78 |
+
except ImportError:
|
79 |
+
notice = dedent(
|
80 |
+
"""
|
81 |
+
NOTE: In order to use the FileCache you must have
|
82 |
+
lockfile installed. You can install it via pip:
|
83 |
+
pip install lockfile
|
84 |
+
"""
|
85 |
+
)
|
86 |
+
raise ImportError(notice)
|
87 |
+
|
88 |
+
else:
|
89 |
+
if use_dir_lock:
|
90 |
+
lock_class = MkdirLockFile
|
91 |
+
|
92 |
+
elif lock_class is None:
|
93 |
+
lock_class = LockFile
|
94 |
+
|
95 |
+
self.directory = directory
|
96 |
+
self.forever = forever
|
97 |
+
self.filemode = filemode
|
98 |
+
self.dirmode = dirmode
|
99 |
+
self.lock_class = lock_class
|
100 |
+
|
101 |
+
@staticmethod
|
102 |
+
def encode(x):
|
103 |
+
return hashlib.sha224(x.encode()).hexdigest()
|
104 |
+
|
105 |
+
def _fn(self, name):
|
106 |
+
# NOTE: This method should not change as some may depend on it.
|
107 |
+
# See: https://github.com/ionrock/cachecontrol/issues/63
|
108 |
+
hashed = self.encode(name)
|
109 |
+
parts = list(hashed[:5]) + [hashed]
|
110 |
+
return os.path.join(self.directory, *parts)
|
111 |
+
|
112 |
+
def get(self, key):
|
113 |
+
name = self._fn(key)
|
114 |
+
try:
|
115 |
+
with open(name, "rb") as fh:
|
116 |
+
return fh.read()
|
117 |
+
|
118 |
+
except FileNotFoundError:
|
119 |
+
return None
|
120 |
+
|
121 |
+
def set(self, key, value, expires=None):
|
122 |
+
name = self._fn(key)
|
123 |
+
|
124 |
+
# Make sure the directory exists
|
125 |
+
try:
|
126 |
+
os.makedirs(os.path.dirname(name), self.dirmode)
|
127 |
+
except (IOError, OSError):
|
128 |
+
pass
|
129 |
+
|
130 |
+
with self.lock_class(name) as lock:
|
131 |
+
# Write our actual file
|
132 |
+
with _secure_open_write(lock.path, self.filemode) as fh:
|
133 |
+
fh.write(value)
|
134 |
+
|
135 |
+
def delete(self, key):
|
136 |
+
name = self._fn(key)
|
137 |
+
if not self.forever:
|
138 |
+
try:
|
139 |
+
os.remove(name)
|
140 |
+
except FileNotFoundError:
|
141 |
+
pass
|
142 |
+
|
143 |
+
|
144 |
+
def url_to_file_path(url, filecache):
|
145 |
+
"""Return the file cache path based on the URL.
|
146 |
+
|
147 |
+
This does not ensure the file exists!
|
148 |
+
"""
|
149 |
+
key = CacheController.cache_url(url)
|
150 |
+
return filecache._fn(key)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from __future__ import division
|
6 |
+
|
7 |
+
from datetime import datetime
|
8 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
9 |
+
|
10 |
+
|
11 |
+
class RedisCache(BaseCache):
|
12 |
+
|
13 |
+
def __init__(self, conn):
|
14 |
+
self.conn = conn
|
15 |
+
|
16 |
+
def get(self, key):
|
17 |
+
return self.conn.get(key)
|
18 |
+
|
19 |
+
def set(self, key, value, expires=None):
|
20 |
+
if not expires:
|
21 |
+
self.conn.set(key, value)
|
22 |
+
else:
|
23 |
+
expires = expires - datetime.utcnow()
|
24 |
+
self.conn.setex(key, int(expires.total_seconds()), value)
|
25 |
+
|
26 |
+
def delete(self, key):
|
27 |
+
self.conn.delete(key)
|
28 |
+
|
29 |
+
def clear(self):
|
30 |
+
"""Helper for clearing all the keys in a database. Use with
|
31 |
+
caution!"""
|
32 |
+
for key in self.conn.keys():
|
33 |
+
self.conn.delete(key)
|
34 |
+
|
35 |
+
def close(self):
|
36 |
+
"""Redis uses connection pooling, no need to close the connection."""
|
37 |
+
pass
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
try:
|
6 |
+
from urllib.parse import urljoin
|
7 |
+
except ImportError:
|
8 |
+
from urlparse import urljoin
|
9 |
+
|
10 |
+
|
11 |
+
try:
|
12 |
+
import cPickle as pickle
|
13 |
+
except ImportError:
|
14 |
+
import pickle
|
15 |
+
|
16 |
+
# Handle the case where the requests module has been patched to not have
|
17 |
+
# urllib3 bundled as part of its source.
|
18 |
+
try:
|
19 |
+
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
|
20 |
+
except ImportError:
|
21 |
+
from pip._vendor.urllib3.response import HTTPResponse
|
22 |
+
|
23 |
+
try:
|
24 |
+
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
25 |
+
except ImportError:
|
26 |
+
from pip._vendor.urllib3.util import is_fp_closed
|
27 |
+
|
28 |
+
# Replicate some six behaviour
|
29 |
+
try:
|
30 |
+
text_type = unicode
|
31 |
+
except NameError:
|
32 |
+
text_type = str
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py
ADDED
@@ -0,0 +1,415 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""
|
6 |
+
The httplib2 algorithms ported for use with requests.
|
7 |
+
"""
|
8 |
+
import logging
|
9 |
+
import re
|
10 |
+
import calendar
|
11 |
+
import time
|
12 |
+
from email.utils import parsedate_tz
|
13 |
+
|
14 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
15 |
+
|
16 |
+
from .cache import DictCache
|
17 |
+
from .serialize import Serializer
|
18 |
+
|
19 |
+
|
20 |
+
logger = logging.getLogger(__name__)
|
21 |
+
|
22 |
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
23 |
+
|
24 |
+
PERMANENT_REDIRECT_STATUSES = (301, 308)
|
25 |
+
|
26 |
+
|
27 |
+
def parse_uri(uri):
|
28 |
+
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
29 |
+
|
30 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
31 |
+
"""
|
32 |
+
groups = URI.match(uri).groups()
|
33 |
+
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
34 |
+
|
35 |
+
|
36 |
+
class CacheController(object):
|
37 |
+
"""An interface to see if request should cached or not.
|
38 |
+
"""
|
39 |
+
|
40 |
+
def __init__(
|
41 |
+
self, cache=None, cache_etags=True, serializer=None, status_codes=None
|
42 |
+
):
|
43 |
+
self.cache = DictCache() if cache is None else cache
|
44 |
+
self.cache_etags = cache_etags
|
45 |
+
self.serializer = serializer or Serializer()
|
46 |
+
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
47 |
+
|
48 |
+
@classmethod
|
49 |
+
def _urlnorm(cls, uri):
|
50 |
+
"""Normalize the URL to create a safe key for the cache"""
|
51 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
52 |
+
if not scheme or not authority:
|
53 |
+
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
|
54 |
+
|
55 |
+
scheme = scheme.lower()
|
56 |
+
authority = authority.lower()
|
57 |
+
|
58 |
+
if not path:
|
59 |
+
path = "/"
|
60 |
+
|
61 |
+
# Could do syntax based normalization of the URI before
|
62 |
+
# computing the digest. See Section 6.2.2 of Std 66.
|
63 |
+
request_uri = query and "?".join([path, query]) or path
|
64 |
+
defrag_uri = scheme + "://" + authority + request_uri
|
65 |
+
|
66 |
+
return defrag_uri
|
67 |
+
|
68 |
+
@classmethod
|
69 |
+
def cache_url(cls, uri):
|
70 |
+
return cls._urlnorm(uri)
|
71 |
+
|
72 |
+
def parse_cache_control(self, headers):
|
73 |
+
known_directives = {
|
74 |
+
# https://tools.ietf.org/html/rfc7234#section-5.2
|
75 |
+
"max-age": (int, True),
|
76 |
+
"max-stale": (int, False),
|
77 |
+
"min-fresh": (int, True),
|
78 |
+
"no-cache": (None, False),
|
79 |
+
"no-store": (None, False),
|
80 |
+
"no-transform": (None, False),
|
81 |
+
"only-if-cached": (None, False),
|
82 |
+
"must-revalidate": (None, False),
|
83 |
+
"public": (None, False),
|
84 |
+
"private": (None, False),
|
85 |
+
"proxy-revalidate": (None, False),
|
86 |
+
"s-maxage": (int, True),
|
87 |
+
}
|
88 |
+
|
89 |
+
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
90 |
+
|
91 |
+
retval = {}
|
92 |
+
|
93 |
+
for cc_directive in cc_headers.split(","):
|
94 |
+
if not cc_directive.strip():
|
95 |
+
continue
|
96 |
+
|
97 |
+
parts = cc_directive.split("=", 1)
|
98 |
+
directive = parts[0].strip()
|
99 |
+
|
100 |
+
try:
|
101 |
+
typ, required = known_directives[directive]
|
102 |
+
except KeyError:
|
103 |
+
logger.debug("Ignoring unknown cache-control directive: %s", directive)
|
104 |
+
continue
|
105 |
+
|
106 |
+
if not typ or not required:
|
107 |
+
retval[directive] = None
|
108 |
+
if typ:
|
109 |
+
try:
|
110 |
+
retval[directive] = typ(parts[1].strip())
|
111 |
+
except IndexError:
|
112 |
+
if required:
|
113 |
+
logger.debug(
|
114 |
+
"Missing value for cache-control " "directive: %s",
|
115 |
+
directive,
|
116 |
+
)
|
117 |
+
except ValueError:
|
118 |
+
logger.debug(
|
119 |
+
"Invalid value for cache-control directive " "%s, must be %s",
|
120 |
+
directive,
|
121 |
+
typ.__name__,
|
122 |
+
)
|
123 |
+
|
124 |
+
return retval
|
125 |
+
|
126 |
+
def cached_request(self, request):
|
127 |
+
"""
|
128 |
+
Return a cached response if it exists in the cache, otherwise
|
129 |
+
return False.
|
130 |
+
"""
|
131 |
+
cache_url = self.cache_url(request.url)
|
132 |
+
logger.debug('Looking up "%s" in the cache', cache_url)
|
133 |
+
cc = self.parse_cache_control(request.headers)
|
134 |
+
|
135 |
+
# Bail out if the request insists on fresh data
|
136 |
+
if "no-cache" in cc:
|
137 |
+
logger.debug('Request header has "no-cache", cache bypassed')
|
138 |
+
return False
|
139 |
+
|
140 |
+
if "max-age" in cc and cc["max-age"] == 0:
|
141 |
+
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
142 |
+
return False
|
143 |
+
|
144 |
+
# Request allows serving from the cache, let's see if we find something
|
145 |
+
cache_data = self.cache.get(cache_url)
|
146 |
+
if cache_data is None:
|
147 |
+
logger.debug("No cache entry available")
|
148 |
+
return False
|
149 |
+
|
150 |
+
# Check whether it can be deserialized
|
151 |
+
resp = self.serializer.loads(request, cache_data)
|
152 |
+
if not resp:
|
153 |
+
logger.warning("Cache entry deserialization failed, entry ignored")
|
154 |
+
return False
|
155 |
+
|
156 |
+
# If we have a cached permanent redirect, return it immediately. We
|
157 |
+
# don't need to test our response for other headers b/c it is
|
158 |
+
# intrinsically "cacheable" as it is Permanent.
|
159 |
+
#
|
160 |
+
# See:
|
161 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.2
|
162 |
+
#
|
163 |
+
# Client can try to refresh the value by repeating the request
|
164 |
+
# with cache busting headers as usual (ie no-cache).
|
165 |
+
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
|
166 |
+
msg = (
|
167 |
+
"Returning cached permanent redirect response "
|
168 |
+
"(ignoring date and etag information)"
|
169 |
+
)
|
170 |
+
logger.debug(msg)
|
171 |
+
return resp
|
172 |
+
|
173 |
+
headers = CaseInsensitiveDict(resp.headers)
|
174 |
+
if not headers or "date" not in headers:
|
175 |
+
if "etag" not in headers:
|
176 |
+
# Without date or etag, the cached response can never be used
|
177 |
+
# and should be deleted.
|
178 |
+
logger.debug("Purging cached response: no date or etag")
|
179 |
+
self.cache.delete(cache_url)
|
180 |
+
logger.debug("Ignoring cached response: no date")
|
181 |
+
return False
|
182 |
+
|
183 |
+
now = time.time()
|
184 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
185 |
+
current_age = max(0, now - date)
|
186 |
+
logger.debug("Current age based on date: %i", current_age)
|
187 |
+
|
188 |
+
# TODO: There is an assumption that the result will be a
|
189 |
+
# urllib3 response object. This may not be best since we
|
190 |
+
# could probably avoid instantiating or constructing the
|
191 |
+
# response until we know we need it.
|
192 |
+
resp_cc = self.parse_cache_control(headers)
|
193 |
+
|
194 |
+
# determine freshness
|
195 |
+
freshness_lifetime = 0
|
196 |
+
|
197 |
+
# Check the max-age pragma in the cache control header
|
198 |
+
if "max-age" in resp_cc:
|
199 |
+
freshness_lifetime = resp_cc["max-age"]
|
200 |
+
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
201 |
+
|
202 |
+
# If there isn't a max-age, check for an expires header
|
203 |
+
elif "expires" in headers:
|
204 |
+
expires = parsedate_tz(headers["expires"])
|
205 |
+
if expires is not None:
|
206 |
+
expire_time = calendar.timegm(expires) - date
|
207 |
+
freshness_lifetime = max(0, expire_time)
|
208 |
+
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
209 |
+
|
210 |
+
# Determine if we are setting freshness limit in the
|
211 |
+
# request. Note, this overrides what was in the response.
|
212 |
+
if "max-age" in cc:
|
213 |
+
freshness_lifetime = cc["max-age"]
|
214 |
+
logger.debug(
|
215 |
+
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
216 |
+
)
|
217 |
+
|
218 |
+
if "min-fresh" in cc:
|
219 |
+
min_fresh = cc["min-fresh"]
|
220 |
+
# adjust our current age by our min fresh
|
221 |
+
current_age += min_fresh
|
222 |
+
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
223 |
+
|
224 |
+
# Return entry if it is fresh enough
|
225 |
+
if freshness_lifetime > current_age:
|
226 |
+
logger.debug('The response is "fresh", returning cached response')
|
227 |
+
logger.debug("%i > %i", freshness_lifetime, current_age)
|
228 |
+
return resp
|
229 |
+
|
230 |
+
# we're not fresh. If we don't have an Etag, clear it out
|
231 |
+
if "etag" not in headers:
|
232 |
+
logger.debug('The cached response is "stale" with no etag, purging')
|
233 |
+
self.cache.delete(cache_url)
|
234 |
+
|
235 |
+
# return the original handler
|
236 |
+
return False
|
237 |
+
|
238 |
+
def conditional_headers(self, request):
|
239 |
+
cache_url = self.cache_url(request.url)
|
240 |
+
resp = self.serializer.loads(request, self.cache.get(cache_url))
|
241 |
+
new_headers = {}
|
242 |
+
|
243 |
+
if resp:
|
244 |
+
headers = CaseInsensitiveDict(resp.headers)
|
245 |
+
|
246 |
+
if "etag" in headers:
|
247 |
+
new_headers["If-None-Match"] = headers["ETag"]
|
248 |
+
|
249 |
+
if "last-modified" in headers:
|
250 |
+
new_headers["If-Modified-Since"] = headers["Last-Modified"]
|
251 |
+
|
252 |
+
return new_headers
|
253 |
+
|
254 |
+
def cache_response(self, request, response, body=None, status_codes=None):
|
255 |
+
"""
|
256 |
+
Algorithm for caching requests.
|
257 |
+
|
258 |
+
This assumes a requests Response object.
|
259 |
+
"""
|
260 |
+
# From httplib2: Don't cache 206's since we aren't going to
|
261 |
+
# handle byte range requests
|
262 |
+
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
263 |
+
if response.status not in cacheable_status_codes:
|
264 |
+
logger.debug(
|
265 |
+
"Status code %s not in %s", response.status, cacheable_status_codes
|
266 |
+
)
|
267 |
+
return
|
268 |
+
|
269 |
+
response_headers = CaseInsensitiveDict(response.headers)
|
270 |
+
|
271 |
+
if "date" in response_headers:
|
272 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
273 |
+
else:
|
274 |
+
date = 0
|
275 |
+
|
276 |
+
# If we've been given a body, our response has a Content-Length, that
|
277 |
+
# Content-Length is valid then we can check to see if the body we've
|
278 |
+
# been given matches the expected size, and if it doesn't we'll just
|
279 |
+
# skip trying to cache it.
|
280 |
+
if (
|
281 |
+
body is not None
|
282 |
+
and "content-length" in response_headers
|
283 |
+
and response_headers["content-length"].isdigit()
|
284 |
+
and int(response_headers["content-length"]) != len(body)
|
285 |
+
):
|
286 |
+
return
|
287 |
+
|
288 |
+
cc_req = self.parse_cache_control(request.headers)
|
289 |
+
cc = self.parse_cache_control(response_headers)
|
290 |
+
|
291 |
+
cache_url = self.cache_url(request.url)
|
292 |
+
logger.debug('Updating cache with response from "%s"', cache_url)
|
293 |
+
|
294 |
+
# Delete it from the cache if we happen to have it stored there
|
295 |
+
no_store = False
|
296 |
+
if "no-store" in cc:
|
297 |
+
no_store = True
|
298 |
+
logger.debug('Response header has "no-store"')
|
299 |
+
if "no-store" in cc_req:
|
300 |
+
no_store = True
|
301 |
+
logger.debug('Request header has "no-store"')
|
302 |
+
if no_store and self.cache.get(cache_url):
|
303 |
+
logger.debug('Purging existing cache entry to honor "no-store"')
|
304 |
+
self.cache.delete(cache_url)
|
305 |
+
if no_store:
|
306 |
+
return
|
307 |
+
|
308 |
+
# https://tools.ietf.org/html/rfc7234#section-4.1:
|
309 |
+
# A Vary header field-value of "*" always fails to match.
|
310 |
+
# Storing such a response leads to a deserialization warning
|
311 |
+
# during cache lookup and is not allowed to ever be served,
|
312 |
+
# so storing it can be avoided.
|
313 |
+
if "*" in response_headers.get("vary", ""):
|
314 |
+
logger.debug('Response header has "Vary: *"')
|
315 |
+
return
|
316 |
+
|
317 |
+
# If we've been given an etag, then keep the response
|
318 |
+
if self.cache_etags and "etag" in response_headers:
|
319 |
+
expires_time = 0
|
320 |
+
if response_headers.get("expires"):
|
321 |
+
expires = parsedate_tz(response_headers["expires"])
|
322 |
+
if expires is not None:
|
323 |
+
expires_time = calendar.timegm(expires) - date
|
324 |
+
|
325 |
+
expires_time = max(expires_time, 14 * 86400)
|
326 |
+
|
327 |
+
logger.debug("etag object cached for {0} seconds".format(expires_time))
|
328 |
+
logger.debug("Caching due to etag")
|
329 |
+
self.cache.set(
|
330 |
+
cache_url,
|
331 |
+
self.serializer.dumps(request, response, body),
|
332 |
+
expires=expires_time,
|
333 |
+
)
|
334 |
+
|
335 |
+
# Add to the cache any permanent redirects. We do this before looking
|
336 |
+
# that the Date headers.
|
337 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
338 |
+
logger.debug("Caching permanent redirect")
|
339 |
+
self.cache.set(cache_url, self.serializer.dumps(request, response, b""))
|
340 |
+
|
341 |
+
# Add to the cache if the response headers demand it. If there
|
342 |
+
# is no date header then we can't do anything about expiring
|
343 |
+
# the cache.
|
344 |
+
elif "date" in response_headers:
|
345 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
346 |
+
# cache when there is a max-age > 0
|
347 |
+
if "max-age" in cc and cc["max-age"] > 0:
|
348 |
+
logger.debug("Caching b/c date exists and max-age > 0")
|
349 |
+
expires_time = cc["max-age"]
|
350 |
+
self.cache.set(
|
351 |
+
cache_url,
|
352 |
+
self.serializer.dumps(request, response, body),
|
353 |
+
expires=expires_time,
|
354 |
+
)
|
355 |
+
|
356 |
+
# If the request can expire, it means we should cache it
|
357 |
+
# in the meantime.
|
358 |
+
elif "expires" in response_headers:
|
359 |
+
if response_headers["expires"]:
|
360 |
+
expires = parsedate_tz(response_headers["expires"])
|
361 |
+
if expires is not None:
|
362 |
+
expires_time = calendar.timegm(expires) - date
|
363 |
+
else:
|
364 |
+
expires_time = None
|
365 |
+
|
366 |
+
logger.debug(
|
367 |
+
"Caching b/c of expires header. expires in {0} seconds".format(
|
368 |
+
expires_time
|
369 |
+
)
|
370 |
+
)
|
371 |
+
self.cache.set(
|
372 |
+
cache_url,
|
373 |
+
self.serializer.dumps(request, response, body=body),
|
374 |
+
expires=expires_time,
|
375 |
+
)
|
376 |
+
|
377 |
+
def update_cached_response(self, request, response):
|
378 |
+
"""On a 304 we will get a new set of headers that we want to
|
379 |
+
update our cached value with, assuming we have one.
|
380 |
+
|
381 |
+
This should only ever be called when we've sent an ETag and
|
382 |
+
gotten a 304 as the response.
|
383 |
+
"""
|
384 |
+
cache_url = self.cache_url(request.url)
|
385 |
+
|
386 |
+
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
|
387 |
+
|
388 |
+
if not cached_response:
|
389 |
+
# we didn't have a cached response
|
390 |
+
return response
|
391 |
+
|
392 |
+
# Lets update our headers with the headers from the new request:
|
393 |
+
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
|
394 |
+
#
|
395 |
+
# The server isn't supposed to send headers that would make
|
396 |
+
# the cached body invalid. But... just in case, we'll be sure
|
397 |
+
# to strip out ones we know that might be problmatic due to
|
398 |
+
# typical assumptions.
|
399 |
+
excluded_headers = ["content-length"]
|
400 |
+
|
401 |
+
cached_response.headers.update(
|
402 |
+
dict(
|
403 |
+
(k, v)
|
404 |
+
for k, v in response.headers.items()
|
405 |
+
if k.lower() not in excluded_headers
|
406 |
+
)
|
407 |
+
)
|
408 |
+
|
409 |
+
# we want a 200 b/c we have content via the cache
|
410 |
+
cached_response.status = 200
|
411 |
+
|
412 |
+
# update our cache
|
413 |
+
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
|
414 |
+
|
415 |
+
return cached_response
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from tempfile import NamedTemporaryFile
|
6 |
+
import mmap
|
7 |
+
|
8 |
+
|
9 |
+
class CallbackFileWrapper(object):
|
10 |
+
"""
|
11 |
+
Small wrapper around a fp object which will tee everything read into a
|
12 |
+
buffer, and when that file is closed it will execute a callback with the
|
13 |
+
contents of that buffer.
|
14 |
+
|
15 |
+
All attributes are proxied to the underlying file object.
|
16 |
+
|
17 |
+
This class uses members with a double underscore (__) leading prefix so as
|
18 |
+
not to accidentally shadow an attribute.
|
19 |
+
|
20 |
+
The data is stored in a temporary file until it is all available. As long
|
21 |
+
as the temporary files directory is disk-based (sometimes it's a
|
22 |
+
memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
|
23 |
+
pressure is high. For small files the disk usually won't be used at all,
|
24 |
+
it'll all be in the filesystem memory cache, so there should be no
|
25 |
+
performance impact.
|
26 |
+
"""
|
27 |
+
|
28 |
+
def __init__(self, fp, callback):
|
29 |
+
self.__buf = NamedTemporaryFile("rb+", delete=True)
|
30 |
+
self.__fp = fp
|
31 |
+
self.__callback = callback
|
32 |
+
|
33 |
+
def __getattr__(self, name):
|
34 |
+
# The vaguaries of garbage collection means that self.__fp is
|
35 |
+
# not always set. By using __getattribute__ and the private
|
36 |
+
# name[0] allows looking up the attribute value and raising an
|
37 |
+
# AttributeError when it doesn't exist. This stop thigns from
|
38 |
+
# infinitely recursing calls to getattr in the case where
|
39 |
+
# self.__fp hasn't been set.
|
40 |
+
#
|
41 |
+
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
42 |
+
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
43 |
+
return getattr(fp, name)
|
44 |
+
|
45 |
+
def __is_fp_closed(self):
|
46 |
+
try:
|
47 |
+
return self.__fp.fp is None
|
48 |
+
|
49 |
+
except AttributeError:
|
50 |
+
pass
|
51 |
+
|
52 |
+
try:
|
53 |
+
return self.__fp.closed
|
54 |
+
|
55 |
+
except AttributeError:
|
56 |
+
pass
|
57 |
+
|
58 |
+
# We just don't cache it then.
|
59 |
+
# TODO: Add some logging here...
|
60 |
+
return False
|
61 |
+
|
62 |
+
def _close(self):
|
63 |
+
if self.__callback:
|
64 |
+
if self.__buf.tell() == 0:
|
65 |
+
# Empty file:
|
66 |
+
result = b""
|
67 |
+
else:
|
68 |
+
# Return the data without actually loading it into memory,
|
69 |
+
# relying on Python's buffer API and mmap(). mmap() just gives
|
70 |
+
# a view directly into the filesystem's memory cache, so it
|
71 |
+
# doesn't result in duplicate memory use.
|
72 |
+
self.__buf.seek(0, 0)
|
73 |
+
result = memoryview(
|
74 |
+
mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
|
75 |
+
)
|
76 |
+
self.__callback(result)
|
77 |
+
|
78 |
+
# We assign this to None here, because otherwise we can get into
|
79 |
+
# really tricky problems where the CPython interpreter dead locks
|
80 |
+
# because the callback is holding a reference to something which
|
81 |
+
# has a __del__ method. Setting this to None breaks the cycle
|
82 |
+
# and allows the garbage collector to do it's thing normally.
|
83 |
+
self.__callback = None
|
84 |
+
|
85 |
+
# Closing the temporary file releases memory and frees disk space.
|
86 |
+
# Important when caching big files.
|
87 |
+
self.__buf.close()
|
88 |
+
|
89 |
+
def read(self, amt=None):
|
90 |
+
data = self.__fp.read(amt)
|
91 |
+
if data:
|
92 |
+
# We may be dealing with b'', a sign that things are over:
|
93 |
+
# it's passed e.g. after we've already closed self.__buf.
|
94 |
+
self.__buf.write(data)
|
95 |
+
if self.__is_fp_closed():
|
96 |
+
self._close()
|
97 |
+
|
98 |
+
return data
|
99 |
+
|
100 |
+
def _safe_read(self, amt):
|
101 |
+
data = self.__fp._safe_read(amt)
|
102 |
+
if amt == 2 and data == b"\r\n":
|
103 |
+
# urllib executes this read to toss the CRLF at the end
|
104 |
+
# of the chunk.
|
105 |
+
return data
|
106 |
+
|
107 |
+
self.__buf.write(data)
|
108 |
+
if self.__is_fp_closed():
|
109 |
+
self._close()
|
110 |
+
|
111 |
+
return data
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import calendar
|
6 |
+
import time
|
7 |
+
|
8 |
+
from email.utils import formatdate, parsedate, parsedate_tz
|
9 |
+
|
10 |
+
from datetime import datetime, timedelta
|
11 |
+
|
12 |
+
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
13 |
+
|
14 |
+
|
15 |
+
def expire_after(delta, date=None):
|
16 |
+
date = date or datetime.utcnow()
|
17 |
+
return date + delta
|
18 |
+
|
19 |
+
|
20 |
+
def datetime_to_header(dt):
|
21 |
+
return formatdate(calendar.timegm(dt.timetuple()))
|
22 |
+
|
23 |
+
|
24 |
+
class BaseHeuristic(object):
|
25 |
+
|
26 |
+
def warning(self, response):
|
27 |
+
"""
|
28 |
+
Return a valid 1xx warning header value describing the cache
|
29 |
+
adjustments.
|
30 |
+
|
31 |
+
The response is provided too allow warnings like 113
|
32 |
+
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
|
33 |
+
to explicitly say response is over 24 hours old.
|
34 |
+
"""
|
35 |
+
return '110 - "Response is Stale"'
|
36 |
+
|
37 |
+
def update_headers(self, response):
|
38 |
+
"""Update the response headers with any new headers.
|
39 |
+
|
40 |
+
NOTE: This SHOULD always include some Warning header to
|
41 |
+
signify that the response was cached by the client, not
|
42 |
+
by way of the provided headers.
|
43 |
+
"""
|
44 |
+
return {}
|
45 |
+
|
46 |
+
def apply(self, response):
|
47 |
+
updated_headers = self.update_headers(response)
|
48 |
+
|
49 |
+
if updated_headers:
|
50 |
+
response.headers.update(updated_headers)
|
51 |
+
warning_header_value = self.warning(response)
|
52 |
+
if warning_header_value is not None:
|
53 |
+
response.headers.update({"Warning": warning_header_value})
|
54 |
+
|
55 |
+
return response
|
56 |
+
|
57 |
+
|
58 |
+
class OneDayCache(BaseHeuristic):
|
59 |
+
"""
|
60 |
+
Cache the response by providing an expires 1 day in the
|
61 |
+
future.
|
62 |
+
"""
|
63 |
+
|
64 |
+
def update_headers(self, response):
|
65 |
+
headers = {}
|
66 |
+
|
67 |
+
if "expires" not in response.headers:
|
68 |
+
date = parsedate(response.headers["date"])
|
69 |
+
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
|
70 |
+
headers["expires"] = datetime_to_header(expires)
|
71 |
+
headers["cache-control"] = "public"
|
72 |
+
return headers
|
73 |
+
|
74 |
+
|
75 |
+
class ExpiresAfter(BaseHeuristic):
|
76 |
+
"""
|
77 |
+
Cache **all** requests for a defined time period.
|
78 |
+
"""
|
79 |
+
|
80 |
+
def __init__(self, **kw):
|
81 |
+
self.delta = timedelta(**kw)
|
82 |
+
|
83 |
+
def update_headers(self, response):
|
84 |
+
expires = expire_after(self.delta)
|
85 |
+
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
86 |
+
|
87 |
+
def warning(self, response):
|
88 |
+
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
89 |
+
return tmpl % self.delta
|
90 |
+
|
91 |
+
|
92 |
+
class LastModified(BaseHeuristic):
|
93 |
+
"""
|
94 |
+
If there is no Expires header already, fall back on Last-Modified
|
95 |
+
using the heuristic from
|
96 |
+
http://tools.ietf.org/html/rfc7234#section-4.2.2
|
97 |
+
to calculate a reasonable value.
|
98 |
+
|
99 |
+
Firefox also does something like this per
|
100 |
+
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
|
101 |
+
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
102 |
+
Unlike mozilla we limit this to 24-hr.
|
103 |
+
"""
|
104 |
+
cacheable_by_default_statuses = {
|
105 |
+
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
106 |
+
}
|
107 |
+
|
108 |
+
def update_headers(self, resp):
|
109 |
+
headers = resp.headers
|
110 |
+
|
111 |
+
if "expires" in headers:
|
112 |
+
return {}
|
113 |
+
|
114 |
+
if "cache-control" in headers and headers["cache-control"] != "public":
|
115 |
+
return {}
|
116 |
+
|
117 |
+
if resp.status not in self.cacheable_by_default_statuses:
|
118 |
+
return {}
|
119 |
+
|
120 |
+
if "date" not in headers or "last-modified" not in headers:
|
121 |
+
return {}
|
122 |
+
|
123 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
124 |
+
last_modified = parsedate(headers["last-modified"])
|
125 |
+
if date is None or last_modified is None:
|
126 |
+
return {}
|
127 |
+
|
128 |
+
now = time.time()
|
129 |
+
current_age = max(0, now - date)
|
130 |
+
delta = date - calendar.timegm(last_modified)
|
131 |
+
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
|
132 |
+
if freshness_lifetime <= current_age:
|
133 |
+
return {}
|
134 |
+
|
135 |
+
expires = date + freshness_lifetime
|
136 |
+
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
137 |
+
|
138 |
+
def warning(self, resp):
|
139 |
+
return None
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import base64
|
6 |
+
import io
|
7 |
+
import json
|
8 |
+
import zlib
|
9 |
+
|
10 |
+
from pip._vendor import msgpack
|
11 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
12 |
+
|
13 |
+
from .compat import HTTPResponse, pickle, text_type
|
14 |
+
|
15 |
+
|
16 |
+
def _b64_decode_bytes(b):
|
17 |
+
return base64.b64decode(b.encode("ascii"))
|
18 |
+
|
19 |
+
|
20 |
+
def _b64_decode_str(s):
|
21 |
+
return _b64_decode_bytes(s).decode("utf8")
|
22 |
+
|
23 |
+
|
24 |
+
_default_body_read = object()
|
25 |
+
|
26 |
+
|
27 |
+
class Serializer(object):
|
28 |
+
def dumps(self, request, response, body=None):
|
29 |
+
response_headers = CaseInsensitiveDict(response.headers)
|
30 |
+
|
31 |
+
if body is None:
|
32 |
+
# When a body isn't passed in, we'll read the response. We
|
33 |
+
# also update the response with a new file handler to be
|
34 |
+
# sure it acts as though it was never read.
|
35 |
+
body = response.read(decode_content=False)
|
36 |
+
response._fp = io.BytesIO(body)
|
37 |
+
|
38 |
+
# NOTE: This is all a bit weird, but it's really important that on
|
39 |
+
# Python 2.x these objects are unicode and not str, even when
|
40 |
+
# they contain only ascii. The problem here is that msgpack
|
41 |
+
# understands the difference between unicode and bytes and we
|
42 |
+
# have it set to differentiate between them, however Python 2
|
43 |
+
# doesn't know the difference. Forcing these to unicode will be
|
44 |
+
# enough to have msgpack know the difference.
|
45 |
+
data = {
|
46 |
+
u"response": {
|
47 |
+
u"body": body,
|
48 |
+
u"headers": dict(
|
49 |
+
(text_type(k), text_type(v)) for k, v in response.headers.items()
|
50 |
+
),
|
51 |
+
u"status": response.status,
|
52 |
+
u"version": response.version,
|
53 |
+
u"reason": text_type(response.reason),
|
54 |
+
u"strict": response.strict,
|
55 |
+
u"decode_content": response.decode_content,
|
56 |
+
}
|
57 |
+
}
|
58 |
+
|
59 |
+
# Construct our vary headers
|
60 |
+
data[u"vary"] = {}
|
61 |
+
if u"vary" in response_headers:
|
62 |
+
varied_headers = response_headers[u"vary"].split(",")
|
63 |
+
for header in varied_headers:
|
64 |
+
header = text_type(header).strip()
|
65 |
+
header_value = request.headers.get(header, None)
|
66 |
+
if header_value is not None:
|
67 |
+
header_value = text_type(header_value)
|
68 |
+
data[u"vary"][header] = header_value
|
69 |
+
|
70 |
+
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
|
71 |
+
|
72 |
+
def loads(self, request, data):
|
73 |
+
# Short circuit if we've been given an empty set of data
|
74 |
+
if not data:
|
75 |
+
return
|
76 |
+
|
77 |
+
# Determine what version of the serializer the data was serialized
|
78 |
+
# with
|
79 |
+
try:
|
80 |
+
ver, data = data.split(b",", 1)
|
81 |
+
except ValueError:
|
82 |
+
ver = b"cc=0"
|
83 |
+
|
84 |
+
# Make sure that our "ver" is actually a version and isn't a false
|
85 |
+
# positive from a , being in the data stream.
|
86 |
+
if ver[:3] != b"cc=":
|
87 |
+
data = ver + data
|
88 |
+
ver = b"cc=0"
|
89 |
+
|
90 |
+
# Get the version number out of the cc=N
|
91 |
+
ver = ver.split(b"=", 1)[-1].decode("ascii")
|
92 |
+
|
93 |
+
# Dispatch to the actual load method for the given version
|
94 |
+
try:
|
95 |
+
return getattr(self, "_loads_v{}".format(ver))(request, data)
|
96 |
+
|
97 |
+
except AttributeError:
|
98 |
+
# This is a version we don't have a loads function for, so we'll
|
99 |
+
# just treat it as a miss and return None
|
100 |
+
return
|
101 |
+
|
102 |
+
def prepare_response(self, request, cached):
|
103 |
+
"""Verify our vary headers match and construct a real urllib3
|
104 |
+
HTTPResponse object.
|
105 |
+
"""
|
106 |
+
# Special case the '*' Vary value as it means we cannot actually
|
107 |
+
# determine if the cached response is suitable for this request.
|
108 |
+
# This case is also handled in the controller code when creating
|
109 |
+
# a cache entry, but is left here for backwards compatibility.
|
110 |
+
if "*" in cached.get("vary", {}):
|
111 |
+
return
|
112 |
+
|
113 |
+
# Ensure that the Vary headers for the cached response match our
|
114 |
+
# request
|
115 |
+
for header, value in cached.get("vary", {}).items():
|
116 |
+
if request.headers.get(header, None) != value:
|
117 |
+
return
|
118 |
+
|
119 |
+
body_raw = cached["response"].pop("body")
|
120 |
+
|
121 |
+
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
|
122 |
+
if headers.get("transfer-encoding", "") == "chunked":
|
123 |
+
headers.pop("transfer-encoding")
|
124 |
+
|
125 |
+
cached["response"]["headers"] = headers
|
126 |
+
|
127 |
+
try:
|
128 |
+
body = io.BytesIO(body_raw)
|
129 |
+
except TypeError:
|
130 |
+
# This can happen if cachecontrol serialized to v1 format (pickle)
|
131 |
+
# using Python 2. A Python 2 str(byte string) will be unpickled as
|
132 |
+
# a Python 3 str (unicode string), which will cause the above to
|
133 |
+
# fail with:
|
134 |
+
#
|
135 |
+
# TypeError: 'str' does not support the buffer interface
|
136 |
+
body = io.BytesIO(body_raw.encode("utf8"))
|
137 |
+
|
138 |
+
return HTTPResponse(body=body, preload_content=False, **cached["response"])
|
139 |
+
|
140 |
+
def _loads_v0(self, request, data):
|
141 |
+
# The original legacy cache data. This doesn't contain enough
|
142 |
+
# information to construct everything we need, so we'll treat this as
|
143 |
+
# a miss.
|
144 |
+
return
|
145 |
+
|
146 |
+
def _loads_v1(self, request, data):
|
147 |
+
try:
|
148 |
+
cached = pickle.loads(data)
|
149 |
+
except ValueError:
|
150 |
+
return
|
151 |
+
|
152 |
+
return self.prepare_response(request, cached)
|
153 |
+
|
154 |
+
def _loads_v2(self, request, data):
|
155 |
+
try:
|
156 |
+
cached = json.loads(zlib.decompress(data).decode("utf8"))
|
157 |
+
except (ValueError, zlib.error):
|
158 |
+
return
|
159 |
+
|
160 |
+
# We need to decode the items that we've base64 encoded
|
161 |
+
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
|
162 |
+
cached["response"]["headers"] = dict(
|
163 |
+
(_b64_decode_str(k), _b64_decode_str(v))
|
164 |
+
for k, v in cached["response"]["headers"].items()
|
165 |
+
)
|
166 |
+
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
|
167 |
+
cached["vary"] = dict(
|
168 |
+
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
|
169 |
+
for k, v in cached["vary"].items()
|
170 |
+
)
|
171 |
+
|
172 |
+
return self.prepare_response(request, cached)
|
173 |
+
|
174 |
+
def _loads_v3(self, request, data):
|
175 |
+
# Due to Python 2 encoding issues, it's impossible to know for sure
|
176 |
+
# exactly how to load v3 entries, thus we'll treat these as a miss so
|
177 |
+
# that they get rewritten out as v4 entries.
|
178 |
+
return
|
179 |
+
|
180 |
+
def _loads_v4(self, request, data):
|
181 |
+
try:
|
182 |
+
cached = msgpack.loads(data, raw=False)
|
183 |
+
except ValueError:
|
184 |
+
return
|
185 |
+
|
186 |
+
return self.prepare_response(request, cached)
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from .adapter import CacheControlAdapter
|
6 |
+
from .cache import DictCache
|
7 |
+
|
8 |
+
|
9 |
+
def CacheControl(
|
10 |
+
sess,
|
11 |
+
cache=None,
|
12 |
+
cache_etags=True,
|
13 |
+
serializer=None,
|
14 |
+
heuristic=None,
|
15 |
+
controller_class=None,
|
16 |
+
adapter_class=None,
|
17 |
+
cacheable_methods=None,
|
18 |
+
):
|
19 |
+
|
20 |
+
cache = DictCache() if cache is None else cache
|
21 |
+
adapter_class = adapter_class or CacheControlAdapter
|
22 |
+
adapter = adapter_class(
|
23 |
+
cache,
|
24 |
+
cache_etags=cache_etags,
|
25 |
+
serializer=serializer,
|
26 |
+
heuristic=heuristic,
|
27 |
+
controller_class=controller_class,
|
28 |
+
cacheable_methods=cacheable_methods,
|
29 |
+
)
|
30 |
+
sess.mount("http://", adapter)
|
31 |
+
sess.mount("https://", adapter)
|
32 |
+
|
33 |
+
return sess
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/codingstatemachine.py
ADDED
@@ -0,0 +1,88 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is mozilla.org code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
import logging
|
29 |
+
|
30 |
+
from .enums import MachineState
|
31 |
+
|
32 |
+
|
33 |
+
class CodingStateMachine(object):
|
34 |
+
"""
|
35 |
+
A state machine to verify a byte sequence for a particular encoding. For
|
36 |
+
each byte the detector receives, it will feed that byte to every active
|
37 |
+
state machine available, one byte at a time. The state machine changes its
|
38 |
+
state based on its previous state and the byte it receives. There are 3
|
39 |
+
states in a state machine that are of interest to an auto-detector:
|
40 |
+
|
41 |
+
START state: This is the state to start with, or a legal byte sequence
|
42 |
+
(i.e. a valid code point) for character has been identified.
|
43 |
+
|
44 |
+
ME state: This indicates that the state machine identified a byte sequence
|
45 |
+
that is specific to the charset it is designed for and that
|
46 |
+
there is no other possible encoding which can contain this byte
|
47 |
+
sequence. This will to lead to an immediate positive answer for
|
48 |
+
the detector.
|
49 |
+
|
50 |
+
ERROR state: This indicates the state machine identified an illegal byte
|
51 |
+
sequence for that encoding. This will lead to an immediate
|
52 |
+
negative answer for this encoding. Detector will exclude this
|
53 |
+
encoding from consideration from here on.
|
54 |
+
"""
|
55 |
+
def __init__(self, sm):
|
56 |
+
self._model = sm
|
57 |
+
self._curr_byte_pos = 0
|
58 |
+
self._curr_char_len = 0
|
59 |
+
self._curr_state = None
|
60 |
+
self.logger = logging.getLogger(__name__)
|
61 |
+
self.reset()
|
62 |
+
|
63 |
+
def reset(self):
|
64 |
+
self._curr_state = MachineState.START
|
65 |
+
|
66 |
+
def next_state(self, c):
|
67 |
+
# for each byte we get its class
|
68 |
+
# if it is first byte, we also get byte length
|
69 |
+
byte_class = self._model['class_table'][c]
|
70 |
+
if self._curr_state == MachineState.START:
|
71 |
+
self._curr_byte_pos = 0
|
72 |
+
self._curr_char_len = self._model['char_len_table'][byte_class]
|
73 |
+
# from byte's class and state_table, we get its next state
|
74 |
+
curr_state = (self._curr_state * self._model['class_factor']
|
75 |
+
+ byte_class)
|
76 |
+
self._curr_state = self._model['state_table'][curr_state]
|
77 |
+
self._curr_byte_pos += 1
|
78 |
+
return self._curr_state
|
79 |
+
|
80 |
+
def get_current_charlen(self):
|
81 |
+
return self._curr_char_len
|
82 |
+
|
83 |
+
def get_coding_state_machine(self):
|
84 |
+
return self._model['name']
|
85 |
+
|
86 |
+
@property
|
87 |
+
def language(self):
|
88 |
+
return self._model['language']
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/cp949prober.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is mozilla.org code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from .chardistribution import EUCKRDistributionAnalysis
|
29 |
+
from .codingstatemachine import CodingStateMachine
|
30 |
+
from .mbcharsetprober import MultiByteCharSetProber
|
31 |
+
from .mbcssm import CP949_SM_MODEL
|
32 |
+
|
33 |
+
|
34 |
+
class CP949Prober(MultiByteCharSetProber):
|
35 |
+
def __init__(self):
|
36 |
+
super(CP949Prober, self).__init__()
|
37 |
+
self.coding_sm = CodingStateMachine(CP949_SM_MODEL)
|
38 |
+
# NOTE: CP949 is a superset of EUC-KR, so the distribution should be
|
39 |
+
# not different.
|
40 |
+
self.distribution_analyzer = EUCKRDistributionAnalysis()
|
41 |
+
self.reset()
|
42 |
+
|
43 |
+
@property
|
44 |
+
def charset_name(self):
|
45 |
+
return "CP949"
|
46 |
+
|
47 |
+
@property
|
48 |
+
def language(self):
|
49 |
+
return "Korean"
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/escprober.py
ADDED
@@ -0,0 +1,101 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is mozilla.org code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Netscape Communications Corporation.
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 1998
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from .charsetprober import CharSetProber
|
29 |
+
from .codingstatemachine import CodingStateMachine
|
30 |
+
from .enums import LanguageFilter, ProbingState, MachineState
|
31 |
+
from .escsm import (HZ_SM_MODEL, ISO2022CN_SM_MODEL, ISO2022JP_SM_MODEL,
|
32 |
+
ISO2022KR_SM_MODEL)
|
33 |
+
|
34 |
+
|
35 |
+
class EscCharSetProber(CharSetProber):
|
36 |
+
"""
|
37 |
+
This CharSetProber uses a "code scheme" approach for detecting encodings,
|
38 |
+
whereby easily recognizable escape or shift sequences are relied on to
|
39 |
+
identify these encodings.
|
40 |
+
"""
|
41 |
+
|
42 |
+
def __init__(self, lang_filter=None):
|
43 |
+
super(EscCharSetProber, self).__init__(lang_filter=lang_filter)
|
44 |
+
self.coding_sm = []
|
45 |
+
if self.lang_filter & LanguageFilter.CHINESE_SIMPLIFIED:
|
46 |
+
self.coding_sm.append(CodingStateMachine(HZ_SM_MODEL))
|
47 |
+
self.coding_sm.append(CodingStateMachine(ISO2022CN_SM_MODEL))
|
48 |
+
if self.lang_filter & LanguageFilter.JAPANESE:
|
49 |
+
self.coding_sm.append(CodingStateMachine(ISO2022JP_SM_MODEL))
|
50 |
+
if self.lang_filter & LanguageFilter.KOREAN:
|
51 |
+
self.coding_sm.append(CodingStateMachine(ISO2022KR_SM_MODEL))
|
52 |
+
self.active_sm_count = None
|
53 |
+
self._detected_charset = None
|
54 |
+
self._detected_language = None
|
55 |
+
self._state = None
|
56 |
+
self.reset()
|
57 |
+
|
58 |
+
def reset(self):
|
59 |
+
super(EscCharSetProber, self).reset()
|
60 |
+
for coding_sm in self.coding_sm:
|
61 |
+
if not coding_sm:
|
62 |
+
continue
|
63 |
+
coding_sm.active = True
|
64 |
+
coding_sm.reset()
|
65 |
+
self.active_sm_count = len(self.coding_sm)
|
66 |
+
self._detected_charset = None
|
67 |
+
self._detected_language = None
|
68 |
+
|
69 |
+
@property
|
70 |
+
def charset_name(self):
|
71 |
+
return self._detected_charset
|
72 |
+
|
73 |
+
@property
|
74 |
+
def language(self):
|
75 |
+
return self._detected_language
|
76 |
+
|
77 |
+
def get_confidence(self):
|
78 |
+
if self._detected_charset:
|
79 |
+
return 0.99
|
80 |
+
else:
|
81 |
+
return 0.00
|
82 |
+
|
83 |
+
def feed(self, byte_str):
|
84 |
+
for c in byte_str:
|
85 |
+
for coding_sm in self.coding_sm:
|
86 |
+
if not coding_sm or not coding_sm.active:
|
87 |
+
continue
|
88 |
+
coding_state = coding_sm.next_state(c)
|
89 |
+
if coding_state == MachineState.ERROR:
|
90 |
+
coding_sm.active = False
|
91 |
+
self.active_sm_count -= 1
|
92 |
+
if self.active_sm_count <= 0:
|
93 |
+
self._state = ProbingState.NOT_ME
|
94 |
+
return self.state
|
95 |
+
elif coding_state == MachineState.ITS_ME:
|
96 |
+
self._state = ProbingState.FOUND_IT
|
97 |
+
self._detected_charset = coding_sm.get_coding_state_machine()
|
98 |
+
self._detected_language = coding_sm.language
|
99 |
+
return self.state
|
100 |
+
|
101 |
+
return self.state
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/hebrewprober.py
ADDED
@@ -0,0 +1,292 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
######################## BEGIN LICENSE BLOCK ########################
|
2 |
+
# The Original Code is Mozilla Universal charset detector code.
|
3 |
+
#
|
4 |
+
# The Initial Developer of the Original Code is
|
5 |
+
# Shy Shalom
|
6 |
+
# Portions created by the Initial Developer are Copyright (C) 2005
|
7 |
+
# the Initial Developer. All Rights Reserved.
|
8 |
+
#
|
9 |
+
# Contributor(s):
|
10 |
+
# Mark Pilgrim - port to Python
|
11 |
+
#
|
12 |
+
# This library is free software; you can redistribute it and/or
|
13 |
+
# modify it under the terms of the GNU Lesser General Public
|
14 |
+
# License as published by the Free Software Foundation; either
|
15 |
+
# version 2.1 of the License, or (at your option) any later version.
|
16 |
+
#
|
17 |
+
# This library is distributed in the hope that it will be useful,
|
18 |
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
19 |
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
20 |
+
# Lesser General Public License for more details.
|
21 |
+
#
|
22 |
+
# You should have received a copy of the GNU Lesser General Public
|
23 |
+
# License along with this library; if not, write to the Free Software
|
24 |
+
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
25 |
+
# 02110-1301 USA
|
26 |
+
######################### END LICENSE BLOCK #########################
|
27 |
+
|
28 |
+
from .charsetprober import CharSetProber
|
29 |
+
from .enums import ProbingState
|
30 |
+
|
31 |
+
# This prober doesn't actually recognize a language or a charset.
|
32 |
+
# It is a helper prober for the use of the Hebrew model probers
|
33 |
+
|
34 |
+
### General ideas of the Hebrew charset recognition ###
|
35 |
+
#
|
36 |
+
# Four main charsets exist in Hebrew:
|
37 |
+
# "ISO-8859-8" - Visual Hebrew
|
38 |
+
# "windows-1255" - Logical Hebrew
|
39 |
+
# "ISO-8859-8-I" - Logical Hebrew
|
40 |
+
# "x-mac-hebrew" - ?? Logical Hebrew ??
|
41 |
+
#
|
42 |
+
# Both "ISO" charsets use a completely identical set of code points, whereas
|
43 |
+
# "windows-1255" and "x-mac-hebrew" are two different proper supersets of
|
44 |
+
# these code points. windows-1255 defines additional characters in the range
|
45 |
+
# 0x80-0x9F as some misc punctuation marks as well as some Hebrew-specific
|
46 |
+
# diacritics and additional 'Yiddish' ligature letters in the range 0xc0-0xd6.
|
47 |
+
# x-mac-hebrew defines similar additional code points but with a different
|
48 |
+
# mapping.
|
49 |
+
#
|
50 |
+
# As far as an average Hebrew text with no diacritics is concerned, all four
|
51 |
+
# charsets are identical with respect to code points. Meaning that for the
|
52 |
+
# main Hebrew alphabet, all four map the same values to all 27 Hebrew letters
|
53 |
+
# (including final letters).
|
54 |
+
#
|
55 |
+
# The dominant difference between these charsets is their directionality.
|
56 |
+
# "Visual" directionality means that the text is ordered as if the renderer is
|
57 |
+
# not aware of a BIDI rendering algorithm. The renderer sees the text and
|
58 |
+
# draws it from left to right. The text itself when ordered naturally is read
|
59 |
+
# backwards. A buffer of Visual Hebrew generally looks like so:
|
60 |
+
# "[last word of first line spelled backwards] [whole line ordered backwards
|
61 |
+
# and spelled backwards] [first word of first line spelled backwards]
|
62 |
+
# [end of line] [last word of second line] ... etc' "
|
63 |
+
# adding punctuation marks, numbers and English text to visual text is
|
64 |
+
# naturally also "visual" and from left to right.
|
65 |
+
#
|
66 |
+
# "Logical" directionality means the text is ordered "naturally" according to
|
67 |
+
# the order it is read. It is the responsibility of the renderer to display
|
68 |
+
# the text from right to left. A BIDI algorithm is used to place general
|
69 |
+
# punctuation marks, numbers and English text in the text.
|
70 |
+
#
|
71 |
+
# Texts in x-mac-hebrew are almost impossible to find on the Internet. From
|
72 |
+
# what little evidence I could find, it seems that its general directionality
|
73 |
+
# is Logical.
|
74 |
+
#
|
75 |
+
# To sum up all of the above, the Hebrew probing mechanism knows about two
|
76 |
+
# charsets:
|
77 |
+
# Visual Hebrew - "ISO-8859-8" - backwards text - Words and sentences are
|
78 |
+
# backwards while line order is natural. For charset recognition purposes
|
79 |
+
# the line order is unimportant (In fact, for this implementation, even
|
80 |
+
# word order is unimportant).
|
81 |
+
# Logical Hebrew - "windows-1255" - normal, naturally ordered text.
|
82 |
+
#
|
83 |
+
# "ISO-8859-8-I" is a subset of windows-1255 and doesn't need to be
|
84 |
+
# specifically identified.
|
85 |
+
# "x-mac-hebrew" is also identified as windows-1255. A text in x-mac-hebrew
|
86 |
+
# that contain special punctuation marks or diacritics is displayed with
|
87 |
+
# some unconverted characters showing as question marks. This problem might
|
88 |
+
# be corrected using another model prober for x-mac-hebrew. Due to the fact
|
89 |
+
# that x-mac-hebrew texts are so rare, writing another model prober isn't
|
90 |
+
# worth the effort and performance hit.
|
91 |
+
#
|
92 |
+
#### The Prober ####
|
93 |
+
#
|
94 |
+
# The prober is divided between two SBCharSetProbers and a HebrewProber,
|
95 |
+
# all of which are managed, created, fed data, inquired and deleted by the
|
96 |
+
# SBCSGroupProber. The two SBCharSetProbers identify that the text is in
|
97 |
+
# fact some kind of Hebrew, Logical or Visual. The final decision about which
|
98 |
+
# one is it is made by the HebrewProber by combining final-letter scores
|
99 |
+
# with the scores of the two SBCharSetProbers to produce a final answer.
|
100 |
+
#
|
101 |
+
# The SBCSGroupProber is responsible for stripping the original text of HTML
|
102 |
+
# tags, English characters, numbers, low-ASCII punctuation characters, spaces
|
103 |
+
# and new lines. It reduces any sequence of such characters to a single space.
|
104 |
+
# The buffer fed to each prober in the SBCS group prober is pure text in
|
105 |
+
# high-ASCII.
|
106 |
+
# The two SBCharSetProbers (model probers) share the same language model:
|
107 |
+
# Win1255Model.
|
108 |
+
# The first SBCharSetProber uses the model normally as any other
|
109 |
+
# SBCharSetProber does, to recognize windows-1255, upon which this model was
|
110 |
+
# built. The second SBCharSetProber is told to make the pair-of-letter
|
111 |
+
# lookup in the language model backwards. This in practice exactly simulates
|
112 |
+
# a visual Hebrew model using the windows-1255 logical Hebrew model.
|
113 |
+
#
|
114 |
+
# The HebrewProber is not using any language model. All it does is look for
|
115 |
+
# final-letter evidence suggesting the text is either logical Hebrew or visual
|
116 |
+
# Hebrew. Disjointed from the model probers, the results of the HebrewProber
|
117 |
+
# alone are meaningless. HebrewProber always returns 0.00 as confidence
|
118 |
+
# since it never identifies a charset by itself. Instead, the pointer to the
|
119 |
+
# HebrewProber is passed to the model probers as a helper "Name Prober".
|
120 |
+
# When the Group prober receives a positive identification from any prober,
|
121 |
+
# it asks for the name of the charset identified. If the prober queried is a
|
122 |
+
# Hebrew model prober, the model prober forwards the call to the
|
123 |
+
# HebrewProber to make the final decision. In the HebrewProber, the
|
124 |
+
# decision is made according to the final-letters scores maintained and Both
|
125 |
+
# model probers scores. The answer is returned in the form of the name of the
|
126 |
+
# charset identified, either "windows-1255" or "ISO-8859-8".
|
127 |
+
|
128 |
+
class HebrewProber(CharSetProber):
|
129 |
+
# windows-1255 / ISO-8859-8 code points of interest
|
130 |
+
FINAL_KAF = 0xea
|
131 |
+
NORMAL_KAF = 0xeb
|
132 |
+
FINAL_MEM = 0xed
|
133 |
+
NORMAL_MEM = 0xee
|
134 |
+
FINAL_NUN = 0xef
|
135 |
+
NORMAL_NUN = 0xf0
|
136 |
+
FINAL_PE = 0xf3
|
137 |
+
NORMAL_PE = 0xf4
|
138 |
+
FINAL_TSADI = 0xf5
|
139 |
+
NORMAL_TSADI = 0xf6
|
140 |
+
|
141 |
+
# Minimum Visual vs Logical final letter score difference.
|
142 |
+
# If the difference is below this, don't rely solely on the final letter score
|
143 |
+
# distance.
|
144 |
+
MIN_FINAL_CHAR_DISTANCE = 5
|
145 |
+
|
146 |
+
# Minimum Visual vs Logical model score difference.
|
147 |
+
# If the difference is below this, don't rely at all on the model score
|
148 |
+
# distance.
|
149 |
+
MIN_MODEL_DISTANCE = 0.01
|
150 |
+
|
151 |
+
VISUAL_HEBREW_NAME = "ISO-8859-8"
|
152 |
+
LOGICAL_HEBREW_NAME = "windows-1255"
|
153 |
+
|
154 |
+
def __init__(self):
|
155 |
+
super(HebrewProber, self).__init__()
|
156 |
+
self._final_char_logical_score = None
|
157 |
+
self._final_char_visual_score = None
|
158 |
+
self._prev = None
|
159 |
+
self._before_prev = None
|
160 |
+
self._logical_prober = None
|
161 |
+
self._visual_prober = None
|
162 |
+
self.reset()
|
163 |
+
|
164 |
+
def reset(self):
|
165 |
+
self._final_char_logical_score = 0
|
166 |
+
self._final_char_visual_score = 0
|
167 |
+
# The two last characters seen in the previous buffer,
|
168 |
+
# mPrev and mBeforePrev are initialized to space in order to simulate
|
169 |
+
# a word delimiter at the beginning of the data
|
170 |
+
self._prev = ' '
|
171 |
+
self._before_prev = ' '
|
172 |
+
# These probers are owned by the group prober.
|
173 |
+
|
174 |
+
def set_model_probers(self, logicalProber, visualProber):
|
175 |
+
self._logical_prober = logicalProber
|
176 |
+
self._visual_prober = visualProber
|
177 |
+
|
178 |
+
def is_final(self, c):
|
179 |
+
return c in [self.FINAL_KAF, self.FINAL_MEM, self.FINAL_NUN,
|
180 |
+
self.FINAL_PE, self.FINAL_TSADI]
|
181 |
+
|
182 |
+
def is_non_final(self, c):
|
183 |
+
# The normal Tsadi is not a good Non-Final letter due to words like
|
184 |
+
# 'lechotet' (to chat) containing an apostrophe after the tsadi. This
|
185 |
+
# apostrophe is converted to a space in FilterWithoutEnglishLetters
|
186 |
+
# causing the Non-Final tsadi to appear at an end of a word even
|
187 |
+
# though this is not the case in the original text.
|
188 |
+
# The letters Pe and Kaf rarely display a related behavior of not being
|
189 |
+
# a good Non-Final letter. Words like 'Pop', 'Winamp' and 'Mubarak'
|
190 |
+
# for example legally end with a Non-Final Pe or Kaf. However, the
|
191 |
+
# benefit of these letters as Non-Final letters outweighs the damage
|
192 |
+
# since these words are quite rare.
|
193 |
+
return c in [self.NORMAL_KAF, self.NORMAL_MEM,
|
194 |
+
self.NORMAL_NUN, self.NORMAL_PE]
|
195 |
+
|
196 |
+
def feed(self, byte_str):
|
197 |
+
# Final letter analysis for logical-visual decision.
|
198 |
+
# Look for evidence that the received buffer is either logical Hebrew
|
199 |
+
# or visual Hebrew.
|
200 |
+
# The following cases are checked:
|
201 |
+
# 1) A word longer than 1 letter, ending with a final letter. This is
|
202 |
+
# an indication that the text is laid out "naturally" since the
|
203 |
+
# final letter really appears at the end. +1 for logical score.
|
204 |
+
# 2) A word longer than 1 letter, ending with a Non-Final letter. In
|
205 |
+
# normal Hebrew, words ending with Kaf, Mem, Nun, Pe or Tsadi,
|
206 |
+
# should not end with the Non-Final form of that letter. Exceptions
|
207 |
+
# to this rule are mentioned above in isNonFinal(). This is an
|
208 |
+
# indication that the text is laid out backwards. +1 for visual
|
209 |
+
# score
|
210 |
+
# 3) A word longer than 1 letter, starting with a final letter. Final
|
211 |
+
# letters should not appear at the beginning of a word. This is an
|
212 |
+
# indication that the text is laid out backwards. +1 for visual
|
213 |
+
# score.
|
214 |
+
#
|
215 |
+
# The visual score and logical score are accumulated throughout the
|
216 |
+
# text and are finally checked against each other in GetCharSetName().
|
217 |
+
# No checking for final letters in the middle of words is done since
|
218 |
+
# that case is not an indication for either Logical or Visual text.
|
219 |
+
#
|
220 |
+
# We automatically filter out all 7-bit characters (replace them with
|
221 |
+
# spaces) so the word boundary detection works properly. [MAP]
|
222 |
+
|
223 |
+
if self.state == ProbingState.NOT_ME:
|
224 |
+
# Both model probers say it's not them. No reason to continue.
|
225 |
+
return ProbingState.NOT_ME
|
226 |
+
|
227 |
+
byte_str = self.filter_high_byte_only(byte_str)
|
228 |
+
|
229 |
+
for cur in byte_str:
|
230 |
+
if cur == ' ':
|
231 |
+
# We stand on a space - a word just ended
|
232 |
+
if self._before_prev != ' ':
|
233 |
+
# next-to-last char was not a space so self._prev is not a
|
234 |
+
# 1 letter word
|
235 |
+
if self.is_final(self._prev):
|
236 |
+
# case (1) [-2:not space][-1:final letter][cur:space]
|
237 |
+
self._final_char_logical_score += 1
|
238 |
+
elif self.is_non_final(self._prev):
|
239 |
+
# case (2) [-2:not space][-1:Non-Final letter][
|
240 |
+
# cur:space]
|
241 |
+
self._final_char_visual_score += 1
|
242 |
+
else:
|
243 |
+
# Not standing on a space
|
244 |
+
if ((self._before_prev == ' ') and
|
245 |
+
(self.is_final(self._prev)) and (cur != ' ')):
|
246 |
+
# case (3) [-2:space][-1:final letter][cur:not space]
|
247 |
+
self._final_char_visual_score += 1
|
248 |
+
self._before_prev = self._prev
|
249 |
+
self._prev = cur
|
250 |
+
|
251 |
+
# Forever detecting, till the end or until both model probers return
|
252 |
+
# ProbingState.NOT_ME (handled above)
|
253 |
+
return ProbingState.DETECTING
|
254 |
+
|
255 |
+
@property
|
256 |
+
def charset_name(self):
|
257 |
+
# Make the decision: is it Logical or Visual?
|
258 |
+
# If the final letter score distance is dominant enough, rely on it.
|
259 |
+
finalsub = self._final_char_logical_score - self._final_char_visual_score
|
260 |
+
if finalsub >= self.MIN_FINAL_CHAR_DISTANCE:
|
261 |
+
return self.LOGICAL_HEBREW_NAME
|
262 |
+
if finalsub <= -self.MIN_FINAL_CHAR_DISTANCE:
|
263 |
+
return self.VISUAL_HEBREW_NAME
|
264 |
+
|
265 |
+
# It's not dominant enough, try to rely on the model scores instead.
|
266 |
+
modelsub = (self._logical_prober.get_confidence()
|
267 |
+
- self._visual_prober.get_confidence())
|
268 |
+
if modelsub > self.MIN_MODEL_DISTANCE:
|
269 |
+
return self.LOGICAL_HEBREW_NAME
|
270 |
+
if modelsub < -self.MIN_MODEL_DISTANCE:
|
271 |
+
return self.VISUAL_HEBREW_NAME
|
272 |
+
|
273 |
+
# Still no good, back to final letter distance, maybe it'll save the
|
274 |
+
# day.
|
275 |
+
if finalsub < 0.0:
|
276 |
+
return self.VISUAL_HEBREW_NAME
|
277 |
+
|
278 |
+
# (finalsub > 0 - Logical) or (don't know what to do) default to
|
279 |
+
# Logical.
|
280 |
+
return self.LOGICAL_HEBREW_NAME
|
281 |
+
|
282 |
+
@property
|
283 |
+
def language(self):
|
284 |
+
return 'Hebrew'
|
285 |
+
|
286 |
+
@property
|
287 |
+
def state(self):
|
288 |
+
# Remain active as long as any of the model probers are active.
|
289 |
+
if (self._logical_prober.state == ProbingState.NOT_ME) and \
|
290 |
+
(self._visual_prober.state == ProbingState.NOT_ME):
|
291 |
+
return ProbingState.NOT_ME
|
292 |
+
return ProbingState.DETECTING
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langhebrewmodel.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/chardet/langrussianmodel.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__init__.py
ADDED
The diff for this file is too large to render.
See raw diff
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (99.9 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-310.pyc
ADDED
Binary file (663 Bytes). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/pkg_resources/py31compat.py
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import errno
|
3 |
+
import sys
|
4 |
+
|
5 |
+
from pip._vendor import six
|
6 |
+
|
7 |
+
|
8 |
+
def _makedirs_31(path, exist_ok=False):
|
9 |
+
try:
|
10 |
+
os.makedirs(path)
|
11 |
+
except OSError as exc:
|
12 |
+
if not exist_ok or exc.errno != errno.EEXIST:
|
13 |
+
raise
|
14 |
+
|
15 |
+
|
16 |
+
# rely on compatibility behavior until mode considerations
|
17 |
+
# and exists_ok considerations are disentangled.
|
18 |
+
# See https://github.com/pypa/setuptools/pull/1083#issuecomment-315168663
|
19 |
+
needs_makedirs = (
|
20 |
+
six.PY2 or
|
21 |
+
(3, 4) <= sys.version_info < (3, 4, 1)
|
22 |
+
)
|
23 |
+
makedirs = _makedirs_31 if needs_makedirs else os.makedirs
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__init__.py
ADDED
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) 2012 Georgios Verigakis <[email protected]>
|
2 |
+
#
|
3 |
+
# Permission to use, copy, modify, and distribute this software for any
|
4 |
+
# purpose with or without fee is hereby granted, provided that the above
|
5 |
+
# copyright notice and this permission notice appear in all copies.
|
6 |
+
#
|
7 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
8 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
9 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
10 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
11 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
12 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
13 |
+
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
14 |
+
|
15 |
+
from __future__ import division, print_function
|
16 |
+
|
17 |
+
from collections import deque
|
18 |
+
from datetime import timedelta
|
19 |
+
from math import ceil
|
20 |
+
from sys import stderr
|
21 |
+
try:
|
22 |
+
from time import monotonic
|
23 |
+
except ImportError:
|
24 |
+
from time import time as monotonic
|
25 |
+
|
26 |
+
|
27 |
+
__version__ = '1.6'
|
28 |
+
|
29 |
+
HIDE_CURSOR = '\x1b[?25l'
|
30 |
+
SHOW_CURSOR = '\x1b[?25h'
|
31 |
+
|
32 |
+
|
33 |
+
class Infinite(object):
|
34 |
+
file = stderr
|
35 |
+
sma_window = 10 # Simple Moving Average window
|
36 |
+
check_tty = True
|
37 |
+
hide_cursor = True
|
38 |
+
|
39 |
+
def __init__(self, message='', **kwargs):
|
40 |
+
self.index = 0
|
41 |
+
self.start_ts = monotonic()
|
42 |
+
self.avg = 0
|
43 |
+
self._avg_update_ts = self.start_ts
|
44 |
+
self._ts = self.start_ts
|
45 |
+
self._xput = deque(maxlen=self.sma_window)
|
46 |
+
for key, val in kwargs.items():
|
47 |
+
setattr(self, key, val)
|
48 |
+
|
49 |
+
self._max_width = 0
|
50 |
+
self._hidden_cursor = False
|
51 |
+
self.message = message
|
52 |
+
|
53 |
+
if self.file and self.is_tty():
|
54 |
+
if self.hide_cursor:
|
55 |
+
print(HIDE_CURSOR, end='', file=self.file)
|
56 |
+
self._hidden_cursor = True
|
57 |
+
self.writeln('')
|
58 |
+
|
59 |
+
def __del__(self):
|
60 |
+
if self._hidden_cursor:
|
61 |
+
print(SHOW_CURSOR, end='', file=self.file)
|
62 |
+
|
63 |
+
def __getitem__(self, key):
|
64 |
+
if key.startswith('_'):
|
65 |
+
return None
|
66 |
+
return getattr(self, key, None)
|
67 |
+
|
68 |
+
@property
|
69 |
+
def elapsed(self):
|
70 |
+
return int(monotonic() - self.start_ts)
|
71 |
+
|
72 |
+
@property
|
73 |
+
def elapsed_td(self):
|
74 |
+
return timedelta(seconds=self.elapsed)
|
75 |
+
|
76 |
+
def update_avg(self, n, dt):
|
77 |
+
if n > 0:
|
78 |
+
xput_len = len(self._xput)
|
79 |
+
self._xput.append(dt / n)
|
80 |
+
now = monotonic()
|
81 |
+
# update when we're still filling _xput, then after every second
|
82 |
+
if (xput_len < self.sma_window or
|
83 |
+
now - self._avg_update_ts > 1):
|
84 |
+
self.avg = sum(self._xput) / len(self._xput)
|
85 |
+
self._avg_update_ts = now
|
86 |
+
|
87 |
+
def update(self):
|
88 |
+
pass
|
89 |
+
|
90 |
+
def start(self):
|
91 |
+
pass
|
92 |
+
|
93 |
+
def writeln(self, line):
|
94 |
+
if self.file and self.is_tty():
|
95 |
+
width = len(line)
|
96 |
+
if width < self._max_width:
|
97 |
+
# Add padding to cover previous contents
|
98 |
+
line += ' ' * (self._max_width - width)
|
99 |
+
else:
|
100 |
+
self._max_width = width
|
101 |
+
print('\r' + line, end='', file=self.file)
|
102 |
+
self.file.flush()
|
103 |
+
|
104 |
+
def finish(self):
|
105 |
+
if self.file and self.is_tty():
|
106 |
+
print(file=self.file)
|
107 |
+
if self._hidden_cursor:
|
108 |
+
print(SHOW_CURSOR, end='', file=self.file)
|
109 |
+
self._hidden_cursor = False
|
110 |
+
|
111 |
+
def is_tty(self):
|
112 |
+
try:
|
113 |
+
return self.file.isatty() if self.check_tty else True
|
114 |
+
except AttributeError:
|
115 |
+
msg = "%s has no attribute 'isatty'. Try setting check_tty=False." % self
|
116 |
+
raise AttributeError(msg)
|
117 |
+
|
118 |
+
def next(self, n=1):
|
119 |
+
now = monotonic()
|
120 |
+
dt = now - self._ts
|
121 |
+
self.update_avg(n, dt)
|
122 |
+
self._ts = now
|
123 |
+
self.index = self.index + n
|
124 |
+
self.update()
|
125 |
+
|
126 |
+
def iter(self, it):
|
127 |
+
self.iter_value = None
|
128 |
+
with self:
|
129 |
+
for x in it:
|
130 |
+
self.iter_value = x
|
131 |
+
yield x
|
132 |
+
self.next()
|
133 |
+
del self.iter_value
|
134 |
+
|
135 |
+
def __enter__(self):
|
136 |
+
self.start()
|
137 |
+
return self
|
138 |
+
|
139 |
+
def __exit__(self, exc_type, exc_val, exc_tb):
|
140 |
+
self.finish()
|
141 |
+
|
142 |
+
|
143 |
+
class Progress(Infinite):
|
144 |
+
def __init__(self, *args, **kwargs):
|
145 |
+
super(Progress, self).__init__(*args, **kwargs)
|
146 |
+
self.max = kwargs.get('max', 100)
|
147 |
+
|
148 |
+
@property
|
149 |
+
def eta(self):
|
150 |
+
return int(ceil(self.avg * self.remaining))
|
151 |
+
|
152 |
+
@property
|
153 |
+
def eta_td(self):
|
154 |
+
return timedelta(seconds=self.eta)
|
155 |
+
|
156 |
+
@property
|
157 |
+
def percent(self):
|
158 |
+
return self.progress * 100
|
159 |
+
|
160 |
+
@property
|
161 |
+
def progress(self):
|
162 |
+
if self.max == 0:
|
163 |
+
return 0
|
164 |
+
return min(1, self.index / self.max)
|
165 |
+
|
166 |
+
@property
|
167 |
+
def remaining(self):
|
168 |
+
return max(self.max - self.index, 0)
|
169 |
+
|
170 |
+
def start(self):
|
171 |
+
self.update()
|
172 |
+
|
173 |
+
def goto(self, index):
|
174 |
+
incr = index - self.index
|
175 |
+
self.next(incr)
|
176 |
+
|
177 |
+
def iter(self, it):
|
178 |
+
try:
|
179 |
+
self.max = len(it)
|
180 |
+
except TypeError:
|
181 |
+
pass
|
182 |
+
|
183 |
+
self.iter_value = None
|
184 |
+
with self:
|
185 |
+
for x in it:
|
186 |
+
self.iter_value = x
|
187 |
+
yield x
|
188 |
+
self.next()
|
189 |
+
del self.iter_value
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (5.73 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/bar.cpython-310.pyc
ADDED
Binary file (2.7 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/colors.cpython-310.pyc
ADDED
Binary file (1.49 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/counter.cpython-310.pyc
ADDED
Binary file (1.56 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/__pycache__/spinner.cpython-310.pyc
ADDED
Binary file (1.39 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/bar.py
ADDED
@@ -0,0 +1,93 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# Copyright (c) 2012 Georgios Verigakis <[email protected]>
|
4 |
+
#
|
5 |
+
# Permission to use, copy, modify, and distribute this software for any
|
6 |
+
# purpose with or without fee is hereby granted, provided that the above
|
7 |
+
# copyright notice and this permission notice appear in all copies.
|
8 |
+
#
|
9 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
10 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
11 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
12 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
13 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
14 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
15 |
+
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
16 |
+
|
17 |
+
from __future__ import unicode_literals
|
18 |
+
|
19 |
+
import sys
|
20 |
+
|
21 |
+
from . import Progress
|
22 |
+
from .colors import color
|
23 |
+
|
24 |
+
|
25 |
+
class Bar(Progress):
|
26 |
+
width = 32
|
27 |
+
suffix = '%(index)d/%(max)d'
|
28 |
+
bar_prefix = ' |'
|
29 |
+
bar_suffix = '| '
|
30 |
+
empty_fill = ' '
|
31 |
+
fill = '#'
|
32 |
+
color = None
|
33 |
+
|
34 |
+
def update(self):
|
35 |
+
filled_length = int(self.width * self.progress)
|
36 |
+
empty_length = self.width - filled_length
|
37 |
+
|
38 |
+
message = self.message % self
|
39 |
+
bar = color(self.fill * filled_length, fg=self.color)
|
40 |
+
empty = self.empty_fill * empty_length
|
41 |
+
suffix = self.suffix % self
|
42 |
+
line = ''.join([message, self.bar_prefix, bar, empty, self.bar_suffix,
|
43 |
+
suffix])
|
44 |
+
self.writeln(line)
|
45 |
+
|
46 |
+
|
47 |
+
class ChargingBar(Bar):
|
48 |
+
suffix = '%(percent)d%%'
|
49 |
+
bar_prefix = ' '
|
50 |
+
bar_suffix = ' '
|
51 |
+
empty_fill = '∙'
|
52 |
+
fill = '█'
|
53 |
+
|
54 |
+
|
55 |
+
class FillingSquaresBar(ChargingBar):
|
56 |
+
empty_fill = '▢'
|
57 |
+
fill = '▣'
|
58 |
+
|
59 |
+
|
60 |
+
class FillingCirclesBar(ChargingBar):
|
61 |
+
empty_fill = '◯'
|
62 |
+
fill = '◉'
|
63 |
+
|
64 |
+
|
65 |
+
class IncrementalBar(Bar):
|
66 |
+
if sys.platform.startswith('win'):
|
67 |
+
phases = (u' ', u'▌', u'█')
|
68 |
+
else:
|
69 |
+
phases = (' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█')
|
70 |
+
|
71 |
+
def update(self):
|
72 |
+
nphases = len(self.phases)
|
73 |
+
filled_len = self.width * self.progress
|
74 |
+
nfull = int(filled_len) # Number of full chars
|
75 |
+
phase = int((filled_len - nfull) * nphases) # Phase of last char
|
76 |
+
nempty = self.width - nfull # Number of empty chars
|
77 |
+
|
78 |
+
message = self.message % self
|
79 |
+
bar = color(self.phases[-1] * nfull, fg=self.color)
|
80 |
+
current = self.phases[phase] if phase > 0 else ''
|
81 |
+
empty = self.empty_fill * max(0, nempty - len(current))
|
82 |
+
suffix = self.suffix % self
|
83 |
+
line = ''.join([message, self.bar_prefix, bar, current, empty,
|
84 |
+
self.bar_suffix, suffix])
|
85 |
+
self.writeln(line)
|
86 |
+
|
87 |
+
|
88 |
+
class PixelBar(IncrementalBar):
|
89 |
+
phases = ('⡀', '⡄', '⡆', '⡇', '⣇', '⣧', '⣷', '⣿')
|
90 |
+
|
91 |
+
|
92 |
+
class ShadyBar(IncrementalBar):
|
93 |
+
phases = (' ', '░', '▒', '▓', '█')
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/colors.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# Copyright (c) 2020 Georgios Verigakis <[email protected]>
|
4 |
+
#
|
5 |
+
# Permission to use, copy, modify, and distribute this software for any
|
6 |
+
# purpose with or without fee is hereby granted, provided that the above
|
7 |
+
# copyright notice and this permission notice appear in all copies.
|
8 |
+
#
|
9 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
10 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
11 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
12 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
13 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
14 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
15 |
+
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
16 |
+
|
17 |
+
from functools import partial
|
18 |
+
|
19 |
+
|
20 |
+
COLORS = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan',
|
21 |
+
'white')
|
22 |
+
STYLES = ('bold', 'faint', 'italic', 'underline', 'blink', 'blink2',
|
23 |
+
'negative', 'concealed', 'crossed')
|
24 |
+
|
25 |
+
|
26 |
+
def color(s, fg=None, bg=None, style=None):
|
27 |
+
sgr = []
|
28 |
+
|
29 |
+
if fg:
|
30 |
+
if fg in COLORS:
|
31 |
+
sgr.append(str(30 + COLORS.index(fg)))
|
32 |
+
elif isinstance(fg, int) and 0 <= fg <= 255:
|
33 |
+
sgr.append('38;5;%d' % int(fg))
|
34 |
+
else:
|
35 |
+
raise Exception('Invalid color "%s"' % fg)
|
36 |
+
|
37 |
+
if bg:
|
38 |
+
if bg in COLORS:
|
39 |
+
sgr.append(str(40 + COLORS.index(bg)))
|
40 |
+
elif isinstance(bg, int) and 0 <= bg <= 255:
|
41 |
+
sgr.append('48;5;%d' % bg)
|
42 |
+
else:
|
43 |
+
raise Exception('Invalid color "%s"' % bg)
|
44 |
+
|
45 |
+
if style:
|
46 |
+
for st in style.split('+'):
|
47 |
+
if st in STYLES:
|
48 |
+
sgr.append(str(1 + STYLES.index(st)))
|
49 |
+
else:
|
50 |
+
raise Exception('Invalid style "%s"' % st)
|
51 |
+
|
52 |
+
if sgr:
|
53 |
+
prefix = '\x1b[' + ';'.join(sgr) + 'm'
|
54 |
+
suffix = '\x1b[0m'
|
55 |
+
return prefix + s + suffix
|
56 |
+
else:
|
57 |
+
return s
|
58 |
+
|
59 |
+
|
60 |
+
# Foreground shortcuts
|
61 |
+
black = partial(color, fg='black')
|
62 |
+
red = partial(color, fg='red')
|
63 |
+
green = partial(color, fg='green')
|
64 |
+
yellow = partial(color, fg='yellow')
|
65 |
+
blue = partial(color, fg='blue')
|
66 |
+
magenta = partial(color, fg='magenta')
|
67 |
+
cyan = partial(color, fg='cyan')
|
68 |
+
white = partial(color, fg='white')
|
69 |
+
|
70 |
+
# Style shortcuts
|
71 |
+
bold = partial(color, style='bold')
|
72 |
+
faint = partial(color, style='faint')
|
73 |
+
italic = partial(color, style='italic')
|
74 |
+
underline = partial(color, style='underline')
|
75 |
+
blink = partial(color, style='blink')
|
76 |
+
blink2 = partial(color, style='blink2')
|
77 |
+
negative = partial(color, style='negative')
|
78 |
+
concealed = partial(color, style='concealed')
|
79 |
+
crossed = partial(color, style='crossed')
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/counter.py
ADDED
@@ -0,0 +1,47 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# Copyright (c) 2012 Georgios Verigakis <[email protected]>
|
4 |
+
#
|
5 |
+
# Permission to use, copy, modify, and distribute this software for any
|
6 |
+
# purpose with or without fee is hereby granted, provided that the above
|
7 |
+
# copyright notice and this permission notice appear in all copies.
|
8 |
+
#
|
9 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
10 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
11 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
12 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
13 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
14 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
15 |
+
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
16 |
+
|
17 |
+
from __future__ import unicode_literals
|
18 |
+
from . import Infinite, Progress
|
19 |
+
|
20 |
+
|
21 |
+
class Counter(Infinite):
|
22 |
+
def update(self):
|
23 |
+
message = self.message % self
|
24 |
+
line = ''.join([message, str(self.index)])
|
25 |
+
self.writeln(line)
|
26 |
+
|
27 |
+
|
28 |
+
class Countdown(Progress):
|
29 |
+
def update(self):
|
30 |
+
message = self.message % self
|
31 |
+
line = ''.join([message, str(self.remaining)])
|
32 |
+
self.writeln(line)
|
33 |
+
|
34 |
+
|
35 |
+
class Stack(Progress):
|
36 |
+
phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
|
37 |
+
|
38 |
+
def update(self):
|
39 |
+
nphases = len(self.phases)
|
40 |
+
i = min(nphases - 1, int(self.progress * nphases))
|
41 |
+
message = self.message % self
|
42 |
+
line = ''.join([message, self.phases[i]])
|
43 |
+
self.writeln(line)
|
44 |
+
|
45 |
+
|
46 |
+
class Pie(Stack):
|
47 |
+
phases = ('○', '◔', '◑', '◕', '●')
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/progress/spinner.py
ADDED
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# -*- coding: utf-8 -*-
|
2 |
+
|
3 |
+
# Copyright (c) 2012 Georgios Verigakis <[email protected]>
|
4 |
+
#
|
5 |
+
# Permission to use, copy, modify, and distribute this software for any
|
6 |
+
# purpose with or without fee is hereby granted, provided that the above
|
7 |
+
# copyright notice and this permission notice appear in all copies.
|
8 |
+
#
|
9 |
+
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
10 |
+
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
11 |
+
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
12 |
+
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
13 |
+
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
14 |
+
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
15 |
+
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
16 |
+
|
17 |
+
from __future__ import unicode_literals
|
18 |
+
from . import Infinite
|
19 |
+
|
20 |
+
|
21 |
+
class Spinner(Infinite):
|
22 |
+
phases = ('-', '\\', '|', '/')
|
23 |
+
hide_cursor = True
|
24 |
+
|
25 |
+
def update(self):
|
26 |
+
i = self.index % len(self.phases)
|
27 |
+
message = self.message % self
|
28 |
+
line = ''.join([message, self.phases[i]])
|
29 |
+
self.writeln(line)
|
30 |
+
|
31 |
+
|
32 |
+
class PieSpinner(Spinner):
|
33 |
+
phases = ['◷', '◶', '◵', '◴']
|
34 |
+
|
35 |
+
|
36 |
+
class MoonSpinner(Spinner):
|
37 |
+
phases = ['◑', '◒', '◐', '◓']
|
38 |
+
|
39 |
+
|
40 |
+
class LineSpinner(Spinner):
|
41 |
+
phases = ['⎺', '⎻', '⎼', '⎽', '⎼', '⎻']
|
42 |
+
|
43 |
+
|
44 |
+
class PixelSpinner(Spinner):
|
45 |
+
phases = ['⣾', '⣷', '⣯', '⣟', '⡿', '⢿', '⣻', '⣽']
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__init__.py
ADDED
@@ -0,0 +1,517 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright 2016-2018 Julien Danjou
|
2 |
+
# Copyright 2017 Elisey Zanko
|
3 |
+
# Copyright 2016 Étienne Bersac
|
4 |
+
# Copyright 2016 Joshua Harlow
|
5 |
+
# Copyright 2013-2014 Ray Holder
|
6 |
+
#
|
7 |
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
8 |
+
# you may not use this file except in compliance with the License.
|
9 |
+
# You may obtain a copy of the License at
|
10 |
+
#
|
11 |
+
# http://www.apache.org/licenses/LICENSE-2.0
|
12 |
+
#
|
13 |
+
# Unless required by applicable law or agreed to in writing, software
|
14 |
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
15 |
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
16 |
+
# See the License for the specific language governing permissions and
|
17 |
+
# limitations under the License.
|
18 |
+
|
19 |
+
import functools
|
20 |
+
import sys
|
21 |
+
import threading
|
22 |
+
import time
|
23 |
+
import typing as t
|
24 |
+
import warnings
|
25 |
+
from abc import ABC, abstractmethod
|
26 |
+
from concurrent import futures
|
27 |
+
from inspect import iscoroutinefunction
|
28 |
+
|
29 |
+
# Import all built-in retry strategies for easier usage.
|
30 |
+
from .retry import retry_base # noqa
|
31 |
+
from .retry import retry_all # noqa
|
32 |
+
from .retry import retry_always # noqa
|
33 |
+
from .retry import retry_any # noqa
|
34 |
+
from .retry import retry_if_exception # noqa
|
35 |
+
from .retry import retry_if_exception_type # noqa
|
36 |
+
from .retry import retry_if_not_exception_type # noqa
|
37 |
+
from .retry import retry_if_not_result # noqa
|
38 |
+
from .retry import retry_if_result # noqa
|
39 |
+
from .retry import retry_never # noqa
|
40 |
+
from .retry import retry_unless_exception_type # noqa
|
41 |
+
from .retry import retry_if_exception_message # noqa
|
42 |
+
from .retry import retry_if_not_exception_message # noqa
|
43 |
+
|
44 |
+
# Import all nap strategies for easier usage.
|
45 |
+
from .nap import sleep # noqa
|
46 |
+
from .nap import sleep_using_event # noqa
|
47 |
+
|
48 |
+
# Import all built-in stop strategies for easier usage.
|
49 |
+
from .stop import stop_after_attempt # noqa
|
50 |
+
from .stop import stop_after_delay # noqa
|
51 |
+
from .stop import stop_all # noqa
|
52 |
+
from .stop import stop_any # noqa
|
53 |
+
from .stop import stop_never # noqa
|
54 |
+
from .stop import stop_when_event_set # noqa
|
55 |
+
|
56 |
+
# Import all built-in wait strategies for easier usage.
|
57 |
+
from .wait import wait_chain # noqa
|
58 |
+
from .wait import wait_combine # noqa
|
59 |
+
from .wait import wait_exponential # noqa
|
60 |
+
from .wait import wait_fixed # noqa
|
61 |
+
from .wait import wait_incrementing # noqa
|
62 |
+
from .wait import wait_none # noqa
|
63 |
+
from .wait import wait_random # noqa
|
64 |
+
from .wait import wait_random_exponential # noqa
|
65 |
+
from .wait import wait_random_exponential as wait_full_jitter # noqa
|
66 |
+
|
67 |
+
# Import all built-in before strategies for easier usage.
|
68 |
+
from .before import before_log # noqa
|
69 |
+
from .before import before_nothing # noqa
|
70 |
+
|
71 |
+
# Import all built-in after strategies for easier usage.
|
72 |
+
from .after import after_log # noqa
|
73 |
+
from .after import after_nothing # noqa
|
74 |
+
|
75 |
+
# Import all built-in after strategies for easier usage.
|
76 |
+
from .before_sleep import before_sleep_log # noqa
|
77 |
+
from .before_sleep import before_sleep_nothing # noqa
|
78 |
+
|
79 |
+
# Replace a conditional import with a hard-coded None so that pip does
|
80 |
+
# not attempt to use tornado even if it is present in the environment.
|
81 |
+
# If tornado is non-None, tenacity will attempt to execute some code
|
82 |
+
# that is sensitive to the version of tornado, which could break pip
|
83 |
+
# if an old version is found.
|
84 |
+
tornado = None # type: ignore
|
85 |
+
|
86 |
+
if t.TYPE_CHECKING:
|
87 |
+
import types
|
88 |
+
|
89 |
+
from .wait import wait_base
|
90 |
+
from .stop import stop_base
|
91 |
+
|
92 |
+
|
93 |
+
WrappedFn = t.TypeVar("WrappedFn", bound=t.Callable)
|
94 |
+
_RetValT = t.TypeVar("_RetValT")
|
95 |
+
|
96 |
+
|
97 |
+
@t.overload
|
98 |
+
def retry(fn: WrappedFn) -> WrappedFn:
|
99 |
+
pass
|
100 |
+
|
101 |
+
|
102 |
+
@t.overload
|
103 |
+
def retry(*dargs: t.Any, **dkw: t.Any) -> t.Callable[[WrappedFn], WrappedFn]: # noqa
|
104 |
+
pass
|
105 |
+
|
106 |
+
|
107 |
+
def retry(*dargs: t.Any, **dkw: t.Any) -> t.Union[WrappedFn, t.Callable[[WrappedFn], WrappedFn]]: # noqa
|
108 |
+
"""Wrap a function with a new `Retrying` object.
|
109 |
+
|
110 |
+
:param dargs: positional arguments passed to Retrying object
|
111 |
+
:param dkw: keyword arguments passed to the Retrying object
|
112 |
+
"""
|
113 |
+
# support both @retry and @retry() as valid syntax
|
114 |
+
if len(dargs) == 1 and callable(dargs[0]):
|
115 |
+
return retry()(dargs[0])
|
116 |
+
else:
|
117 |
+
|
118 |
+
def wrap(f: WrappedFn) -> WrappedFn:
|
119 |
+
if isinstance(f, retry_base):
|
120 |
+
warnings.warn(
|
121 |
+
f"Got retry_base instance ({f.__class__.__name__}) as callable argument, "
|
122 |
+
f"this will probably hang indefinitely (did you mean retry={f.__class__.__name__}(...)?)"
|
123 |
+
)
|
124 |
+
if iscoroutinefunction(f):
|
125 |
+
r: "BaseRetrying" = AsyncRetrying(*dargs, **dkw)
|
126 |
+
elif tornado and hasattr(tornado.gen, "is_coroutine_function") and tornado.gen.is_coroutine_function(f):
|
127 |
+
r = TornadoRetrying(*dargs, **dkw)
|
128 |
+
else:
|
129 |
+
r = Retrying(*dargs, **dkw)
|
130 |
+
|
131 |
+
return r.wraps(f)
|
132 |
+
|
133 |
+
return wrap
|
134 |
+
|
135 |
+
|
136 |
+
class TryAgain(Exception):
|
137 |
+
"""Always retry the executed function when raised."""
|
138 |
+
|
139 |
+
|
140 |
+
NO_RESULT = object()
|
141 |
+
|
142 |
+
|
143 |
+
class DoAttempt:
|
144 |
+
pass
|
145 |
+
|
146 |
+
|
147 |
+
class DoSleep(float):
|
148 |
+
pass
|
149 |
+
|
150 |
+
|
151 |
+
class BaseAction:
|
152 |
+
"""Base class for representing actions to take by retry object.
|
153 |
+
|
154 |
+
Concrete implementations must define:
|
155 |
+
- __init__: to initialize all necessary fields
|
156 |
+
- REPR_FIELDS: class variable specifying attributes to include in repr(self)
|
157 |
+
- NAME: for identification in retry object methods and callbacks
|
158 |
+
"""
|
159 |
+
|
160 |
+
REPR_FIELDS: t.Sequence[str] = ()
|
161 |
+
NAME: t.Optional[str] = None
|
162 |
+
|
163 |
+
def __repr__(self) -> str:
|
164 |
+
state_str = ", ".join(f"{field}={getattr(self, field)!r}" for field in self.REPR_FIELDS)
|
165 |
+
return f"{self.__class__.__name__}({state_str})"
|
166 |
+
|
167 |
+
def __str__(self) -> str:
|
168 |
+
return repr(self)
|
169 |
+
|
170 |
+
|
171 |
+
class RetryAction(BaseAction):
|
172 |
+
REPR_FIELDS = ("sleep",)
|
173 |
+
NAME = "retry"
|
174 |
+
|
175 |
+
def __init__(self, sleep: t.SupportsFloat) -> None:
|
176 |
+
self.sleep = float(sleep)
|
177 |
+
|
178 |
+
|
179 |
+
_unset = object()
|
180 |
+
|
181 |
+
|
182 |
+
def _first_set(first: t.Union[t.Any, object], second: t.Any) -> t.Any:
|
183 |
+
return second if first is _unset else first
|
184 |
+
|
185 |
+
|
186 |
+
class RetryError(Exception):
|
187 |
+
"""Encapsulates the last attempt instance right before giving up."""
|
188 |
+
|
189 |
+
def __init__(self, last_attempt: "Future") -> None:
|
190 |
+
self.last_attempt = last_attempt
|
191 |
+
super().__init__(last_attempt)
|
192 |
+
|
193 |
+
def reraise(self) -> "t.NoReturn":
|
194 |
+
if self.last_attempt.failed:
|
195 |
+
raise self.last_attempt.result()
|
196 |
+
raise self
|
197 |
+
|
198 |
+
def __str__(self) -> str:
|
199 |
+
return f"{self.__class__.__name__}[{self.last_attempt}]"
|
200 |
+
|
201 |
+
|
202 |
+
class AttemptManager:
|
203 |
+
"""Manage attempt context."""
|
204 |
+
|
205 |
+
def __init__(self, retry_state: "RetryCallState"):
|
206 |
+
self.retry_state = retry_state
|
207 |
+
|
208 |
+
def __enter__(self) -> None:
|
209 |
+
pass
|
210 |
+
|
211 |
+
def __exit__(
|
212 |
+
self,
|
213 |
+
exc_type: t.Optional[t.Type[BaseException]],
|
214 |
+
exc_value: t.Optional[BaseException],
|
215 |
+
traceback: t.Optional["types.TracebackType"],
|
216 |
+
) -> t.Optional[bool]:
|
217 |
+
if isinstance(exc_value, BaseException):
|
218 |
+
self.retry_state.set_exception((exc_type, exc_value, traceback))
|
219 |
+
return True # Swallow exception.
|
220 |
+
else:
|
221 |
+
# We don't have the result, actually.
|
222 |
+
self.retry_state.set_result(None)
|
223 |
+
return None
|
224 |
+
|
225 |
+
|
226 |
+
class BaseRetrying(ABC):
|
227 |
+
def __init__(
|
228 |
+
self,
|
229 |
+
sleep: t.Callable[[t.Union[int, float]], None] = sleep,
|
230 |
+
stop: "stop_base" = stop_never,
|
231 |
+
wait: "wait_base" = wait_none(),
|
232 |
+
retry: retry_base = retry_if_exception_type(),
|
233 |
+
before: t.Callable[["RetryCallState"], None] = before_nothing,
|
234 |
+
after: t.Callable[["RetryCallState"], None] = after_nothing,
|
235 |
+
before_sleep: t.Optional[t.Callable[["RetryCallState"], None]] = None,
|
236 |
+
reraise: bool = False,
|
237 |
+
retry_error_cls: t.Type[RetryError] = RetryError,
|
238 |
+
retry_error_callback: t.Optional[t.Callable[["RetryCallState"], t.Any]] = None,
|
239 |
+
):
|
240 |
+
self.sleep = sleep
|
241 |
+
self.stop = stop
|
242 |
+
self.wait = wait
|
243 |
+
self.retry = retry
|
244 |
+
self.before = before
|
245 |
+
self.after = after
|
246 |
+
self.before_sleep = before_sleep
|
247 |
+
self.reraise = reraise
|
248 |
+
self._local = threading.local()
|
249 |
+
self.retry_error_cls = retry_error_cls
|
250 |
+
self.retry_error_callback = retry_error_callback
|
251 |
+
|
252 |
+
def copy(
|
253 |
+
self,
|
254 |
+
sleep: t.Union[t.Callable[[t.Union[int, float]], None], object] = _unset,
|
255 |
+
stop: t.Union["stop_base", object] = _unset,
|
256 |
+
wait: t.Union["wait_base", object] = _unset,
|
257 |
+
retry: t.Union[retry_base, object] = _unset,
|
258 |
+
before: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
|
259 |
+
after: t.Union[t.Callable[["RetryCallState"], None], object] = _unset,
|
260 |
+
before_sleep: t.Union[t.Optional[t.Callable[["RetryCallState"], None]], object] = _unset,
|
261 |
+
reraise: t.Union[bool, object] = _unset,
|
262 |
+
retry_error_cls: t.Union[t.Type[RetryError], object] = _unset,
|
263 |
+
retry_error_callback: t.Union[t.Optional[t.Callable[["RetryCallState"], t.Any]], object] = _unset,
|
264 |
+
) -> "BaseRetrying":
|
265 |
+
"""Copy this object with some parameters changed if needed."""
|
266 |
+
return self.__class__(
|
267 |
+
sleep=_first_set(sleep, self.sleep),
|
268 |
+
stop=_first_set(stop, self.stop),
|
269 |
+
wait=_first_set(wait, self.wait),
|
270 |
+
retry=_first_set(retry, self.retry),
|
271 |
+
before=_first_set(before, self.before),
|
272 |
+
after=_first_set(after, self.after),
|
273 |
+
before_sleep=_first_set(before_sleep, self.before_sleep),
|
274 |
+
reraise=_first_set(reraise, self.reraise),
|
275 |
+
retry_error_cls=_first_set(retry_error_cls, self.retry_error_cls),
|
276 |
+
retry_error_callback=_first_set(retry_error_callback, self.retry_error_callback),
|
277 |
+
)
|
278 |
+
|
279 |
+
def __repr__(self) -> str:
|
280 |
+
return (
|
281 |
+
f"<{self.__class__.__name__} object at 0x{id(self):x} ("
|
282 |
+
f"stop={self.stop}, "
|
283 |
+
f"wait={self.wait}, "
|
284 |
+
f"sleep={self.sleep}, "
|
285 |
+
f"retry={self.retry}, "
|
286 |
+
f"before={self.before}, "
|
287 |
+
f"after={self.after})>"
|
288 |
+
)
|
289 |
+
|
290 |
+
@property
|
291 |
+
def statistics(self) -> t.Dict[str, t.Any]:
|
292 |
+
"""Return a dictionary of runtime statistics.
|
293 |
+
|
294 |
+
This dictionary will be empty when the controller has never been
|
295 |
+
ran. When it is running or has ran previously it should have (but
|
296 |
+
may not) have useful and/or informational keys and values when
|
297 |
+
running is underway and/or completed.
|
298 |
+
|
299 |
+
.. warning:: The keys in this dictionary **should** be some what
|
300 |
+
stable (not changing), but there existence **may**
|
301 |
+
change between major releases as new statistics are
|
302 |
+
gathered or removed so before accessing keys ensure that
|
303 |
+
they actually exist and handle when they do not.
|
304 |
+
|
305 |
+
.. note:: The values in this dictionary are local to the thread
|
306 |
+
running call (so if multiple threads share the same retrying
|
307 |
+
object - either directly or indirectly) they will each have
|
308 |
+
there own view of statistics they have collected (in the
|
309 |
+
future we may provide a way to aggregate the various
|
310 |
+
statistics from each thread).
|
311 |
+
"""
|
312 |
+
try:
|
313 |
+
return self._local.statistics
|
314 |
+
except AttributeError:
|
315 |
+
self._local.statistics = {}
|
316 |
+
return self._local.statistics
|
317 |
+
|
318 |
+
def wraps(self, f: WrappedFn) -> WrappedFn:
|
319 |
+
"""Wrap a function for retrying.
|
320 |
+
|
321 |
+
:param f: A function to wraps for retrying.
|
322 |
+
"""
|
323 |
+
|
324 |
+
@functools.wraps(f)
|
325 |
+
def wrapped_f(*args: t.Any, **kw: t.Any) -> t.Any:
|
326 |
+
return self(f, *args, **kw)
|
327 |
+
|
328 |
+
def retry_with(*args: t.Any, **kwargs: t.Any) -> WrappedFn:
|
329 |
+
return self.copy(*args, **kwargs).wraps(f)
|
330 |
+
|
331 |
+
wrapped_f.retry = self
|
332 |
+
wrapped_f.retry_with = retry_with
|
333 |
+
|
334 |
+
return wrapped_f
|
335 |
+
|
336 |
+
def begin(self) -> None:
|
337 |
+
self.statistics.clear()
|
338 |
+
self.statistics["start_time"] = time.monotonic()
|
339 |
+
self.statistics["attempt_number"] = 1
|
340 |
+
self.statistics["idle_for"] = 0
|
341 |
+
|
342 |
+
def iter(self, retry_state: "RetryCallState") -> t.Union[DoAttempt, DoSleep, t.Any]: # noqa
|
343 |
+
fut = retry_state.outcome
|
344 |
+
if fut is None:
|
345 |
+
if self.before is not None:
|
346 |
+
self.before(retry_state)
|
347 |
+
return DoAttempt()
|
348 |
+
|
349 |
+
is_explicit_retry = retry_state.outcome.failed and isinstance(retry_state.outcome.exception(), TryAgain)
|
350 |
+
if not (is_explicit_retry or self.retry(retry_state=retry_state)):
|
351 |
+
return fut.result()
|
352 |
+
|
353 |
+
if self.after is not None:
|
354 |
+
self.after(retry_state)
|
355 |
+
|
356 |
+
self.statistics["delay_since_first_attempt"] = retry_state.seconds_since_start
|
357 |
+
if self.stop(retry_state=retry_state):
|
358 |
+
if self.retry_error_callback:
|
359 |
+
return self.retry_error_callback(retry_state)
|
360 |
+
retry_exc = self.retry_error_cls(fut)
|
361 |
+
if self.reraise:
|
362 |
+
raise retry_exc.reraise()
|
363 |
+
raise retry_exc from fut.exception()
|
364 |
+
|
365 |
+
if self.wait:
|
366 |
+
sleep = self.wait(retry_state=retry_state)
|
367 |
+
else:
|
368 |
+
sleep = 0.0
|
369 |
+
retry_state.next_action = RetryAction(sleep)
|
370 |
+
retry_state.idle_for += sleep
|
371 |
+
self.statistics["idle_for"] += sleep
|
372 |
+
self.statistics["attempt_number"] += 1
|
373 |
+
|
374 |
+
if self.before_sleep is not None:
|
375 |
+
self.before_sleep(retry_state)
|
376 |
+
|
377 |
+
return DoSleep(sleep)
|
378 |
+
|
379 |
+
def __iter__(self) -> t.Generator[AttemptManager, None, None]:
|
380 |
+
self.begin()
|
381 |
+
|
382 |
+
retry_state = RetryCallState(self, fn=None, args=(), kwargs={})
|
383 |
+
while True:
|
384 |
+
do = self.iter(retry_state=retry_state)
|
385 |
+
if isinstance(do, DoAttempt):
|
386 |
+
yield AttemptManager(retry_state=retry_state)
|
387 |
+
elif isinstance(do, DoSleep):
|
388 |
+
retry_state.prepare_for_next_attempt()
|
389 |
+
self.sleep(do)
|
390 |
+
else:
|
391 |
+
break
|
392 |
+
|
393 |
+
@abstractmethod
|
394 |
+
def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT:
|
395 |
+
pass
|
396 |
+
|
397 |
+
|
398 |
+
class Retrying(BaseRetrying):
|
399 |
+
"""Retrying controller."""
|
400 |
+
|
401 |
+
def __call__(self, fn: t.Callable[..., _RetValT], *args: t.Any, **kwargs: t.Any) -> _RetValT:
|
402 |
+
self.begin()
|
403 |
+
|
404 |
+
retry_state = RetryCallState(retry_object=self, fn=fn, args=args, kwargs=kwargs)
|
405 |
+
while True:
|
406 |
+
do = self.iter(retry_state=retry_state)
|
407 |
+
if isinstance(do, DoAttempt):
|
408 |
+
try:
|
409 |
+
result = fn(*args, **kwargs)
|
410 |
+
except BaseException: # noqa: B902
|
411 |
+
retry_state.set_exception(sys.exc_info())
|
412 |
+
else:
|
413 |
+
retry_state.set_result(result)
|
414 |
+
elif isinstance(do, DoSleep):
|
415 |
+
retry_state.prepare_for_next_attempt()
|
416 |
+
self.sleep(do)
|
417 |
+
else:
|
418 |
+
return do
|
419 |
+
|
420 |
+
|
421 |
+
class Future(futures.Future):
|
422 |
+
"""Encapsulates a (future or past) attempted call to a target function."""
|
423 |
+
|
424 |
+
def __init__(self, attempt_number: int) -> None:
|
425 |
+
super().__init__()
|
426 |
+
self.attempt_number = attempt_number
|
427 |
+
|
428 |
+
@property
|
429 |
+
def failed(self) -> bool:
|
430 |
+
"""Return whether a exception is being held in this future."""
|
431 |
+
return self.exception() is not None
|
432 |
+
|
433 |
+
@classmethod
|
434 |
+
def construct(cls, attempt_number: int, value: t.Any, has_exception: bool) -> "Future":
|
435 |
+
"""Construct a new Future object."""
|
436 |
+
fut = cls(attempt_number)
|
437 |
+
if has_exception:
|
438 |
+
fut.set_exception(value)
|
439 |
+
else:
|
440 |
+
fut.set_result(value)
|
441 |
+
return fut
|
442 |
+
|
443 |
+
|
444 |
+
class RetryCallState:
|
445 |
+
"""State related to a single call wrapped with Retrying."""
|
446 |
+
|
447 |
+
def __init__(
|
448 |
+
self,
|
449 |
+
retry_object: BaseRetrying,
|
450 |
+
fn: t.Optional[WrappedFn],
|
451 |
+
args: t.Any,
|
452 |
+
kwargs: t.Any,
|
453 |
+
) -> None:
|
454 |
+
#: Retry call start timestamp
|
455 |
+
self.start_time = time.monotonic()
|
456 |
+
#: Retry manager object
|
457 |
+
self.retry_object = retry_object
|
458 |
+
#: Function wrapped by this retry call
|
459 |
+
self.fn = fn
|
460 |
+
#: Arguments of the function wrapped by this retry call
|
461 |
+
self.args = args
|
462 |
+
#: Keyword arguments of the function wrapped by this retry call
|
463 |
+
self.kwargs = kwargs
|
464 |
+
|
465 |
+
#: The number of the current attempt
|
466 |
+
self.attempt_number: int = 1
|
467 |
+
#: Last outcome (result or exception) produced by the function
|
468 |
+
self.outcome: t.Optional[Future] = None
|
469 |
+
#: Timestamp of the last outcome
|
470 |
+
self.outcome_timestamp: t.Optional[float] = None
|
471 |
+
#: Time spent sleeping in retries
|
472 |
+
self.idle_for: float = 0.0
|
473 |
+
#: Next action as decided by the retry manager
|
474 |
+
self.next_action: t.Optional[RetryAction] = None
|
475 |
+
|
476 |
+
@property
|
477 |
+
def seconds_since_start(self) -> t.Optional[float]:
|
478 |
+
if self.outcome_timestamp is None:
|
479 |
+
return None
|
480 |
+
return self.outcome_timestamp - self.start_time
|
481 |
+
|
482 |
+
def prepare_for_next_attempt(self) -> None:
|
483 |
+
self.outcome = None
|
484 |
+
self.outcome_timestamp = None
|
485 |
+
self.attempt_number += 1
|
486 |
+
self.next_action = None
|
487 |
+
|
488 |
+
def set_result(self, val: t.Any) -> None:
|
489 |
+
ts = time.monotonic()
|
490 |
+
fut = Future(self.attempt_number)
|
491 |
+
fut.set_result(val)
|
492 |
+
self.outcome, self.outcome_timestamp = fut, ts
|
493 |
+
|
494 |
+
def set_exception(self, exc_info: t.Tuple[t.Type[BaseException], BaseException, "types.TracebackType"]) -> None:
|
495 |
+
ts = time.monotonic()
|
496 |
+
fut = Future(self.attempt_number)
|
497 |
+
fut.set_exception(exc_info[1])
|
498 |
+
self.outcome, self.outcome_timestamp = fut, ts
|
499 |
+
|
500 |
+
def __repr__(self):
|
501 |
+
if self.outcome is None:
|
502 |
+
result = "none yet"
|
503 |
+
elif self.outcome.failed:
|
504 |
+
exception = self.outcome.exception()
|
505 |
+
result = f"failed ({exception.__class__.__name__} {exception})"
|
506 |
+
else:
|
507 |
+
result = f"returned {self.outcome.result()}"
|
508 |
+
|
509 |
+
slept = float(round(self.idle_for, 2))
|
510 |
+
clsname = self.__class__.__name__
|
511 |
+
return f"<{clsname} {id(self)}: attempt #{self.attempt_number}; slept for {slept}; last result: {result}>"
|
512 |
+
|
513 |
+
|
514 |
+
from pip._vendor.tenacity._asyncio import AsyncRetrying # noqa:E402,I100
|
515 |
+
|
516 |
+
if tornado:
|
517 |
+
from pip._vendor.tenacity.tornadoweb import TornadoRetrying
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (16.4 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/_asyncio.cpython-310.pyc
ADDED
Binary file (2.62 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/_utils.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/after.cpython-310.pyc
ADDED
Binary file (1.23 kB). View file
|
|
scripts/myenv/lib/python3.10/site-packages/pip/_vendor/tenacity/__pycache__/before.cpython-310.pyc
ADDED
Binary file (1.11 kB). View file
|
|