Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py +18 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py +61 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py +137 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py +43 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py +6 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py +150 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py +37 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py +32 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py +415 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py +111 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py +139 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py +186 -0
- venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py +33 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc +0 -0
- venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc +0 -0
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__init__.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""CacheControl import Interface.
|
6 |
+
|
7 |
+
Make it easy to import from cachecontrol without long namespaces.
|
8 |
+
"""
|
9 |
+
__author__ = "Eric Larson"
|
10 |
+
__email__ = "[email protected]"
|
11 |
+
__version__ = "0.12.10"
|
12 |
+
|
13 |
+
from .wrapper import CacheControl
|
14 |
+
from .adapter import CacheControlAdapter
|
15 |
+
from .controller import CacheController
|
16 |
+
|
17 |
+
import logging
|
18 |
+
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (638 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-310.pyc
ADDED
Binary file (1.58 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/adapter.cpython-310.pyc
ADDED
Binary file (3.15 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/cache.cpython-310.pyc
ADDED
Binary file (1.84 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (752 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/controller.cpython-310.pyc
ADDED
Binary file (8.21 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-310.pyc
ADDED
Binary file (2.79 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-310.pyc
ADDED
Binary file (4.71 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/serialize.cpython-310.pyc
ADDED
Binary file (4.25 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-310.pyc
ADDED
Binary file (683 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/_cmd.py
ADDED
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import logging
|
6 |
+
|
7 |
+
from pip._vendor import requests
|
8 |
+
|
9 |
+
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
|
10 |
+
from pip._vendor.cachecontrol.cache import DictCache
|
11 |
+
from pip._vendor.cachecontrol.controller import logger
|
12 |
+
|
13 |
+
from argparse import ArgumentParser
|
14 |
+
|
15 |
+
|
16 |
+
def setup_logging():
|
17 |
+
logger.setLevel(logging.DEBUG)
|
18 |
+
handler = logging.StreamHandler()
|
19 |
+
logger.addHandler(handler)
|
20 |
+
|
21 |
+
|
22 |
+
def get_session():
|
23 |
+
adapter = CacheControlAdapter(
|
24 |
+
DictCache(), cache_etags=True, serializer=None, heuristic=None
|
25 |
+
)
|
26 |
+
sess = requests.Session()
|
27 |
+
sess.mount("http://", adapter)
|
28 |
+
sess.mount("https://", adapter)
|
29 |
+
|
30 |
+
sess.cache_controller = adapter.controller
|
31 |
+
return sess
|
32 |
+
|
33 |
+
|
34 |
+
def get_args():
|
35 |
+
parser = ArgumentParser()
|
36 |
+
parser.add_argument("url", help="The URL to try and cache")
|
37 |
+
return parser.parse_args()
|
38 |
+
|
39 |
+
|
40 |
+
def main(args=None):
|
41 |
+
args = get_args()
|
42 |
+
sess = get_session()
|
43 |
+
|
44 |
+
# Make a request to get a response
|
45 |
+
resp = sess.get(args.url)
|
46 |
+
|
47 |
+
# Turn on logging
|
48 |
+
setup_logging()
|
49 |
+
|
50 |
+
# try setting the cache
|
51 |
+
sess.cache_controller.cache_response(resp.request, resp.raw)
|
52 |
+
|
53 |
+
# Now try to get it
|
54 |
+
if sess.cache_controller.cached_request(resp.request):
|
55 |
+
print("Cached!")
|
56 |
+
else:
|
57 |
+
print("Not cached :(")
|
58 |
+
|
59 |
+
|
60 |
+
if __name__ == "__main__":
|
61 |
+
main()
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/adapter.py
ADDED
@@ -0,0 +1,137 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import types
|
6 |
+
import functools
|
7 |
+
import zlib
|
8 |
+
|
9 |
+
from pip._vendor.requests.adapters import HTTPAdapter
|
10 |
+
|
11 |
+
from .controller import CacheController, PERMANENT_REDIRECT_STATUSES
|
12 |
+
from .cache import DictCache
|
13 |
+
from .filewrapper import CallbackFileWrapper
|
14 |
+
|
15 |
+
|
16 |
+
class CacheControlAdapter(HTTPAdapter):
|
17 |
+
invalidating_methods = {"PUT", "PATCH", "DELETE"}
|
18 |
+
|
19 |
+
def __init__(
|
20 |
+
self,
|
21 |
+
cache=None,
|
22 |
+
cache_etags=True,
|
23 |
+
controller_class=None,
|
24 |
+
serializer=None,
|
25 |
+
heuristic=None,
|
26 |
+
cacheable_methods=None,
|
27 |
+
*args,
|
28 |
+
**kw
|
29 |
+
):
|
30 |
+
super(CacheControlAdapter, self).__init__(*args, **kw)
|
31 |
+
self.cache = DictCache() if cache is None else cache
|
32 |
+
self.heuristic = heuristic
|
33 |
+
self.cacheable_methods = cacheable_methods or ("GET",)
|
34 |
+
|
35 |
+
controller_factory = controller_class or CacheController
|
36 |
+
self.controller = controller_factory(
|
37 |
+
self.cache, cache_etags=cache_etags, serializer=serializer
|
38 |
+
)
|
39 |
+
|
40 |
+
def send(self, request, cacheable_methods=None, **kw):
|
41 |
+
"""
|
42 |
+
Send a request. Use the request information to see if it
|
43 |
+
exists in the cache and cache the response if we need to and can.
|
44 |
+
"""
|
45 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
46 |
+
if request.method in cacheable:
|
47 |
+
try:
|
48 |
+
cached_response = self.controller.cached_request(request)
|
49 |
+
except zlib.error:
|
50 |
+
cached_response = None
|
51 |
+
if cached_response:
|
52 |
+
return self.build_response(request, cached_response, from_cache=True)
|
53 |
+
|
54 |
+
# check for etags and add headers if appropriate
|
55 |
+
request.headers.update(self.controller.conditional_headers(request))
|
56 |
+
|
57 |
+
resp = super(CacheControlAdapter, self).send(request, **kw)
|
58 |
+
|
59 |
+
return resp
|
60 |
+
|
61 |
+
def build_response(
|
62 |
+
self, request, response, from_cache=False, cacheable_methods=None
|
63 |
+
):
|
64 |
+
"""
|
65 |
+
Build a response by making a request or using the cache.
|
66 |
+
|
67 |
+
This will end up calling send and returning a potentially
|
68 |
+
cached response
|
69 |
+
"""
|
70 |
+
cacheable = cacheable_methods or self.cacheable_methods
|
71 |
+
if not from_cache and request.method in cacheable:
|
72 |
+
# Check for any heuristics that might update headers
|
73 |
+
# before trying to cache.
|
74 |
+
if self.heuristic:
|
75 |
+
response = self.heuristic.apply(response)
|
76 |
+
|
77 |
+
# apply any expiration heuristics
|
78 |
+
if response.status == 304:
|
79 |
+
# We must have sent an ETag request. This could mean
|
80 |
+
# that we've been expired already or that we simply
|
81 |
+
# have an etag. In either case, we want to try and
|
82 |
+
# update the cache if that is the case.
|
83 |
+
cached_response = self.controller.update_cached_response(
|
84 |
+
request, response
|
85 |
+
)
|
86 |
+
|
87 |
+
if cached_response is not response:
|
88 |
+
from_cache = True
|
89 |
+
|
90 |
+
# We are done with the server response, read a
|
91 |
+
# possible response body (compliant servers will
|
92 |
+
# not return one, but we cannot be 100% sure) and
|
93 |
+
# release the connection back to the pool.
|
94 |
+
response.read(decode_content=False)
|
95 |
+
response.release_conn()
|
96 |
+
|
97 |
+
response = cached_response
|
98 |
+
|
99 |
+
# We always cache the 301 responses
|
100 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
101 |
+
self.controller.cache_response(request, response)
|
102 |
+
else:
|
103 |
+
# Wrap the response file with a wrapper that will cache the
|
104 |
+
# response when the stream has been consumed.
|
105 |
+
response._fp = CallbackFileWrapper(
|
106 |
+
response._fp,
|
107 |
+
functools.partial(
|
108 |
+
self.controller.cache_response, request, response
|
109 |
+
),
|
110 |
+
)
|
111 |
+
if response.chunked:
|
112 |
+
super_update_chunk_length = response._update_chunk_length
|
113 |
+
|
114 |
+
def _update_chunk_length(self):
|
115 |
+
super_update_chunk_length()
|
116 |
+
if self.chunk_left == 0:
|
117 |
+
self._fp._close()
|
118 |
+
|
119 |
+
response._update_chunk_length = types.MethodType(
|
120 |
+
_update_chunk_length, response
|
121 |
+
)
|
122 |
+
|
123 |
+
resp = super(CacheControlAdapter, self).build_response(request, response)
|
124 |
+
|
125 |
+
# See if we should invalidate the cache.
|
126 |
+
if request.method in self.invalidating_methods and resp.ok:
|
127 |
+
cache_url = self.controller.cache_url(request.url)
|
128 |
+
self.cache.delete(cache_url)
|
129 |
+
|
130 |
+
# Give the request a from_cache attr to let people use it
|
131 |
+
resp.from_cache = from_cache
|
132 |
+
|
133 |
+
return resp
|
134 |
+
|
135 |
+
def close(self):
|
136 |
+
self.cache.close()
|
137 |
+
super(CacheControlAdapter, self).close()
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/cache.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""
|
6 |
+
The cache object API for implementing caches. The default is a thread
|
7 |
+
safe in-memory dictionary.
|
8 |
+
"""
|
9 |
+
from threading import Lock
|
10 |
+
|
11 |
+
|
12 |
+
class BaseCache(object):
|
13 |
+
|
14 |
+
def get(self, key):
|
15 |
+
raise NotImplementedError()
|
16 |
+
|
17 |
+
def set(self, key, value, expires=None):
|
18 |
+
raise NotImplementedError()
|
19 |
+
|
20 |
+
def delete(self, key):
|
21 |
+
raise NotImplementedError()
|
22 |
+
|
23 |
+
def close(self):
|
24 |
+
pass
|
25 |
+
|
26 |
+
|
27 |
+
class DictCache(BaseCache):
|
28 |
+
|
29 |
+
def __init__(self, init_dict=None):
|
30 |
+
self.lock = Lock()
|
31 |
+
self.data = init_dict or {}
|
32 |
+
|
33 |
+
def get(self, key):
|
34 |
+
return self.data.get(key, None)
|
35 |
+
|
36 |
+
def set(self, key, value, expires=None):
|
37 |
+
with self.lock:
|
38 |
+
self.data.update({key: value})
|
39 |
+
|
40 |
+
def delete(self, key):
|
41 |
+
with self.lock:
|
42 |
+
if key in self.data:
|
43 |
+
self.data.pop(key)
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from .file_cache import FileCache # noqa
|
6 |
+
from .redis_cache import RedisCache # noqa
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-310.pyc
ADDED
Binary file (3.36 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/file_cache.py
ADDED
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import hashlib
|
6 |
+
import os
|
7 |
+
from textwrap import dedent
|
8 |
+
|
9 |
+
from ..cache import BaseCache
|
10 |
+
from ..controller import CacheController
|
11 |
+
|
12 |
+
try:
|
13 |
+
FileNotFoundError
|
14 |
+
except NameError:
|
15 |
+
# py2.X
|
16 |
+
FileNotFoundError = (IOError, OSError)
|
17 |
+
|
18 |
+
|
19 |
+
def _secure_open_write(filename, fmode):
|
20 |
+
# We only want to write to this file, so open it in write only mode
|
21 |
+
flags = os.O_WRONLY
|
22 |
+
|
23 |
+
# os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only
|
24 |
+
# will open *new* files.
|
25 |
+
# We specify this because we want to ensure that the mode we pass is the
|
26 |
+
# mode of the file.
|
27 |
+
flags |= os.O_CREAT | os.O_EXCL
|
28 |
+
|
29 |
+
# Do not follow symlinks to prevent someone from making a symlink that
|
30 |
+
# we follow and insecurely open a cache file.
|
31 |
+
if hasattr(os, "O_NOFOLLOW"):
|
32 |
+
flags |= os.O_NOFOLLOW
|
33 |
+
|
34 |
+
# On Windows we'll mark this file as binary
|
35 |
+
if hasattr(os, "O_BINARY"):
|
36 |
+
flags |= os.O_BINARY
|
37 |
+
|
38 |
+
# Before we open our file, we want to delete any existing file that is
|
39 |
+
# there
|
40 |
+
try:
|
41 |
+
os.remove(filename)
|
42 |
+
except (IOError, OSError):
|
43 |
+
# The file must not exist already, so we can just skip ahead to opening
|
44 |
+
pass
|
45 |
+
|
46 |
+
# Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a
|
47 |
+
# race condition happens between the os.remove and this line, that an
|
48 |
+
# error will be raised. Because we utilize a lockfile this should only
|
49 |
+
# happen if someone is attempting to attack us.
|
50 |
+
fd = os.open(filename, flags, fmode)
|
51 |
+
try:
|
52 |
+
return os.fdopen(fd, "wb")
|
53 |
+
|
54 |
+
except:
|
55 |
+
# An error occurred wrapping our FD in a file object
|
56 |
+
os.close(fd)
|
57 |
+
raise
|
58 |
+
|
59 |
+
|
60 |
+
class FileCache(BaseCache):
|
61 |
+
|
62 |
+
def __init__(
|
63 |
+
self,
|
64 |
+
directory,
|
65 |
+
forever=False,
|
66 |
+
filemode=0o0600,
|
67 |
+
dirmode=0o0700,
|
68 |
+
use_dir_lock=None,
|
69 |
+
lock_class=None,
|
70 |
+
):
|
71 |
+
|
72 |
+
if use_dir_lock is not None and lock_class is not None:
|
73 |
+
raise ValueError("Cannot use use_dir_lock and lock_class together")
|
74 |
+
|
75 |
+
try:
|
76 |
+
from lockfile import LockFile
|
77 |
+
from lockfile.mkdirlockfile import MkdirLockFile
|
78 |
+
except ImportError:
|
79 |
+
notice = dedent(
|
80 |
+
"""
|
81 |
+
NOTE: In order to use the FileCache you must have
|
82 |
+
lockfile installed. You can install it via pip:
|
83 |
+
pip install lockfile
|
84 |
+
"""
|
85 |
+
)
|
86 |
+
raise ImportError(notice)
|
87 |
+
|
88 |
+
else:
|
89 |
+
if use_dir_lock:
|
90 |
+
lock_class = MkdirLockFile
|
91 |
+
|
92 |
+
elif lock_class is None:
|
93 |
+
lock_class = LockFile
|
94 |
+
|
95 |
+
self.directory = directory
|
96 |
+
self.forever = forever
|
97 |
+
self.filemode = filemode
|
98 |
+
self.dirmode = dirmode
|
99 |
+
self.lock_class = lock_class
|
100 |
+
|
101 |
+
@staticmethod
|
102 |
+
def encode(x):
|
103 |
+
return hashlib.sha224(x.encode()).hexdigest()
|
104 |
+
|
105 |
+
def _fn(self, name):
|
106 |
+
# NOTE: This method should not change as some may depend on it.
|
107 |
+
# See: https://github.com/ionrock/cachecontrol/issues/63
|
108 |
+
hashed = self.encode(name)
|
109 |
+
parts = list(hashed[:5]) + [hashed]
|
110 |
+
return os.path.join(self.directory, *parts)
|
111 |
+
|
112 |
+
def get(self, key):
|
113 |
+
name = self._fn(key)
|
114 |
+
try:
|
115 |
+
with open(name, "rb") as fh:
|
116 |
+
return fh.read()
|
117 |
+
|
118 |
+
except FileNotFoundError:
|
119 |
+
return None
|
120 |
+
|
121 |
+
def set(self, key, value, expires=None):
|
122 |
+
name = self._fn(key)
|
123 |
+
|
124 |
+
# Make sure the directory exists
|
125 |
+
try:
|
126 |
+
os.makedirs(os.path.dirname(name), self.dirmode)
|
127 |
+
except (IOError, OSError):
|
128 |
+
pass
|
129 |
+
|
130 |
+
with self.lock_class(name) as lock:
|
131 |
+
# Write our actual file
|
132 |
+
with _secure_open_write(lock.path, self.filemode) as fh:
|
133 |
+
fh.write(value)
|
134 |
+
|
135 |
+
def delete(self, key):
|
136 |
+
name = self._fn(key)
|
137 |
+
if not self.forever:
|
138 |
+
try:
|
139 |
+
os.remove(name)
|
140 |
+
except FileNotFoundError:
|
141 |
+
pass
|
142 |
+
|
143 |
+
|
144 |
+
def url_to_file_path(url, filecache):
|
145 |
+
"""Return the file cache path based on the URL.
|
146 |
+
|
147 |
+
This does not ensure the file exists!
|
148 |
+
"""
|
149 |
+
key = CacheController.cache_url(url)
|
150 |
+
return filecache._fn(key)
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/caches/redis_cache.py
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from __future__ import division
|
6 |
+
|
7 |
+
from datetime import datetime
|
8 |
+
from pip._vendor.cachecontrol.cache import BaseCache
|
9 |
+
|
10 |
+
|
11 |
+
class RedisCache(BaseCache):
|
12 |
+
|
13 |
+
def __init__(self, conn):
|
14 |
+
self.conn = conn
|
15 |
+
|
16 |
+
def get(self, key):
|
17 |
+
return self.conn.get(key)
|
18 |
+
|
19 |
+
def set(self, key, value, expires=None):
|
20 |
+
if not expires:
|
21 |
+
self.conn.set(key, value)
|
22 |
+
else:
|
23 |
+
expires = expires - datetime.utcnow()
|
24 |
+
self.conn.setex(key, int(expires.total_seconds()), value)
|
25 |
+
|
26 |
+
def delete(self, key):
|
27 |
+
self.conn.delete(key)
|
28 |
+
|
29 |
+
def clear(self):
|
30 |
+
"""Helper for clearing all the keys in a database. Use with
|
31 |
+
caution!"""
|
32 |
+
for key in self.conn.keys():
|
33 |
+
self.conn.delete(key)
|
34 |
+
|
35 |
+
def close(self):
|
36 |
+
"""Redis uses connection pooling, no need to close the connection."""
|
37 |
+
pass
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/compat.py
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
try:
|
6 |
+
from urllib.parse import urljoin
|
7 |
+
except ImportError:
|
8 |
+
from urlparse import urljoin
|
9 |
+
|
10 |
+
|
11 |
+
try:
|
12 |
+
import cPickle as pickle
|
13 |
+
except ImportError:
|
14 |
+
import pickle
|
15 |
+
|
16 |
+
# Handle the case where the requests module has been patched to not have
|
17 |
+
# urllib3 bundled as part of its source.
|
18 |
+
try:
|
19 |
+
from pip._vendor.requests.packages.urllib3.response import HTTPResponse
|
20 |
+
except ImportError:
|
21 |
+
from pip._vendor.urllib3.response import HTTPResponse
|
22 |
+
|
23 |
+
try:
|
24 |
+
from pip._vendor.requests.packages.urllib3.util import is_fp_closed
|
25 |
+
except ImportError:
|
26 |
+
from pip._vendor.urllib3.util import is_fp_closed
|
27 |
+
|
28 |
+
# Replicate some six behaviour
|
29 |
+
try:
|
30 |
+
text_type = unicode
|
31 |
+
except NameError:
|
32 |
+
text_type = str
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/controller.py
ADDED
@@ -0,0 +1,415 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
"""
|
6 |
+
The httplib2 algorithms ported for use with requests.
|
7 |
+
"""
|
8 |
+
import logging
|
9 |
+
import re
|
10 |
+
import calendar
|
11 |
+
import time
|
12 |
+
from email.utils import parsedate_tz
|
13 |
+
|
14 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
15 |
+
|
16 |
+
from .cache import DictCache
|
17 |
+
from .serialize import Serializer
|
18 |
+
|
19 |
+
|
20 |
+
logger = logging.getLogger(__name__)
|
21 |
+
|
22 |
+
URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
|
23 |
+
|
24 |
+
PERMANENT_REDIRECT_STATUSES = (301, 308)
|
25 |
+
|
26 |
+
|
27 |
+
def parse_uri(uri):
|
28 |
+
"""Parses a URI using the regex given in Appendix B of RFC 3986.
|
29 |
+
|
30 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
31 |
+
"""
|
32 |
+
groups = URI.match(uri).groups()
|
33 |
+
return (groups[1], groups[3], groups[4], groups[6], groups[8])
|
34 |
+
|
35 |
+
|
36 |
+
class CacheController(object):
|
37 |
+
"""An interface to see if request should cached or not.
|
38 |
+
"""
|
39 |
+
|
40 |
+
def __init__(
|
41 |
+
self, cache=None, cache_etags=True, serializer=None, status_codes=None
|
42 |
+
):
|
43 |
+
self.cache = DictCache() if cache is None else cache
|
44 |
+
self.cache_etags = cache_etags
|
45 |
+
self.serializer = serializer or Serializer()
|
46 |
+
self.cacheable_status_codes = status_codes or (200, 203, 300, 301, 308)
|
47 |
+
|
48 |
+
@classmethod
|
49 |
+
def _urlnorm(cls, uri):
|
50 |
+
"""Normalize the URL to create a safe key for the cache"""
|
51 |
+
(scheme, authority, path, query, fragment) = parse_uri(uri)
|
52 |
+
if not scheme or not authority:
|
53 |
+
raise Exception("Only absolute URIs are allowed. uri = %s" % uri)
|
54 |
+
|
55 |
+
scheme = scheme.lower()
|
56 |
+
authority = authority.lower()
|
57 |
+
|
58 |
+
if not path:
|
59 |
+
path = "/"
|
60 |
+
|
61 |
+
# Could do syntax based normalization of the URI before
|
62 |
+
# computing the digest. See Section 6.2.2 of Std 66.
|
63 |
+
request_uri = query and "?".join([path, query]) or path
|
64 |
+
defrag_uri = scheme + "://" + authority + request_uri
|
65 |
+
|
66 |
+
return defrag_uri
|
67 |
+
|
68 |
+
@classmethod
|
69 |
+
def cache_url(cls, uri):
|
70 |
+
return cls._urlnorm(uri)
|
71 |
+
|
72 |
+
def parse_cache_control(self, headers):
|
73 |
+
known_directives = {
|
74 |
+
# https://tools.ietf.org/html/rfc7234#section-5.2
|
75 |
+
"max-age": (int, True),
|
76 |
+
"max-stale": (int, False),
|
77 |
+
"min-fresh": (int, True),
|
78 |
+
"no-cache": (None, False),
|
79 |
+
"no-store": (None, False),
|
80 |
+
"no-transform": (None, False),
|
81 |
+
"only-if-cached": (None, False),
|
82 |
+
"must-revalidate": (None, False),
|
83 |
+
"public": (None, False),
|
84 |
+
"private": (None, False),
|
85 |
+
"proxy-revalidate": (None, False),
|
86 |
+
"s-maxage": (int, True),
|
87 |
+
}
|
88 |
+
|
89 |
+
cc_headers = headers.get("cache-control", headers.get("Cache-Control", ""))
|
90 |
+
|
91 |
+
retval = {}
|
92 |
+
|
93 |
+
for cc_directive in cc_headers.split(","):
|
94 |
+
if not cc_directive.strip():
|
95 |
+
continue
|
96 |
+
|
97 |
+
parts = cc_directive.split("=", 1)
|
98 |
+
directive = parts[0].strip()
|
99 |
+
|
100 |
+
try:
|
101 |
+
typ, required = known_directives[directive]
|
102 |
+
except KeyError:
|
103 |
+
logger.debug("Ignoring unknown cache-control directive: %s", directive)
|
104 |
+
continue
|
105 |
+
|
106 |
+
if not typ or not required:
|
107 |
+
retval[directive] = None
|
108 |
+
if typ:
|
109 |
+
try:
|
110 |
+
retval[directive] = typ(parts[1].strip())
|
111 |
+
except IndexError:
|
112 |
+
if required:
|
113 |
+
logger.debug(
|
114 |
+
"Missing value for cache-control " "directive: %s",
|
115 |
+
directive,
|
116 |
+
)
|
117 |
+
except ValueError:
|
118 |
+
logger.debug(
|
119 |
+
"Invalid value for cache-control directive " "%s, must be %s",
|
120 |
+
directive,
|
121 |
+
typ.__name__,
|
122 |
+
)
|
123 |
+
|
124 |
+
return retval
|
125 |
+
|
126 |
+
def cached_request(self, request):
|
127 |
+
"""
|
128 |
+
Return a cached response if it exists in the cache, otherwise
|
129 |
+
return False.
|
130 |
+
"""
|
131 |
+
cache_url = self.cache_url(request.url)
|
132 |
+
logger.debug('Looking up "%s" in the cache', cache_url)
|
133 |
+
cc = self.parse_cache_control(request.headers)
|
134 |
+
|
135 |
+
# Bail out if the request insists on fresh data
|
136 |
+
if "no-cache" in cc:
|
137 |
+
logger.debug('Request header has "no-cache", cache bypassed')
|
138 |
+
return False
|
139 |
+
|
140 |
+
if "max-age" in cc and cc["max-age"] == 0:
|
141 |
+
logger.debug('Request header has "max_age" as 0, cache bypassed')
|
142 |
+
return False
|
143 |
+
|
144 |
+
# Request allows serving from the cache, let's see if we find something
|
145 |
+
cache_data = self.cache.get(cache_url)
|
146 |
+
if cache_data is None:
|
147 |
+
logger.debug("No cache entry available")
|
148 |
+
return False
|
149 |
+
|
150 |
+
# Check whether it can be deserialized
|
151 |
+
resp = self.serializer.loads(request, cache_data)
|
152 |
+
if not resp:
|
153 |
+
logger.warning("Cache entry deserialization failed, entry ignored")
|
154 |
+
return False
|
155 |
+
|
156 |
+
# If we have a cached permanent redirect, return it immediately. We
|
157 |
+
# don't need to test our response for other headers b/c it is
|
158 |
+
# intrinsically "cacheable" as it is Permanent.
|
159 |
+
#
|
160 |
+
# See:
|
161 |
+
# https://tools.ietf.org/html/rfc7231#section-6.4.2
|
162 |
+
#
|
163 |
+
# Client can try to refresh the value by repeating the request
|
164 |
+
# with cache busting headers as usual (ie no-cache).
|
165 |
+
if int(resp.status) in PERMANENT_REDIRECT_STATUSES:
|
166 |
+
msg = (
|
167 |
+
"Returning cached permanent redirect response "
|
168 |
+
"(ignoring date and etag information)"
|
169 |
+
)
|
170 |
+
logger.debug(msg)
|
171 |
+
return resp
|
172 |
+
|
173 |
+
headers = CaseInsensitiveDict(resp.headers)
|
174 |
+
if not headers or "date" not in headers:
|
175 |
+
if "etag" not in headers:
|
176 |
+
# Without date or etag, the cached response can never be used
|
177 |
+
# and should be deleted.
|
178 |
+
logger.debug("Purging cached response: no date or etag")
|
179 |
+
self.cache.delete(cache_url)
|
180 |
+
logger.debug("Ignoring cached response: no date")
|
181 |
+
return False
|
182 |
+
|
183 |
+
now = time.time()
|
184 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
185 |
+
current_age = max(0, now - date)
|
186 |
+
logger.debug("Current age based on date: %i", current_age)
|
187 |
+
|
188 |
+
# TODO: There is an assumption that the result will be a
|
189 |
+
# urllib3 response object. This may not be best since we
|
190 |
+
# could probably avoid instantiating or constructing the
|
191 |
+
# response until we know we need it.
|
192 |
+
resp_cc = self.parse_cache_control(headers)
|
193 |
+
|
194 |
+
# determine freshness
|
195 |
+
freshness_lifetime = 0
|
196 |
+
|
197 |
+
# Check the max-age pragma in the cache control header
|
198 |
+
if "max-age" in resp_cc:
|
199 |
+
freshness_lifetime = resp_cc["max-age"]
|
200 |
+
logger.debug("Freshness lifetime from max-age: %i", freshness_lifetime)
|
201 |
+
|
202 |
+
# If there isn't a max-age, check for an expires header
|
203 |
+
elif "expires" in headers:
|
204 |
+
expires = parsedate_tz(headers["expires"])
|
205 |
+
if expires is not None:
|
206 |
+
expire_time = calendar.timegm(expires) - date
|
207 |
+
freshness_lifetime = max(0, expire_time)
|
208 |
+
logger.debug("Freshness lifetime from expires: %i", freshness_lifetime)
|
209 |
+
|
210 |
+
# Determine if we are setting freshness limit in the
|
211 |
+
# request. Note, this overrides what was in the response.
|
212 |
+
if "max-age" in cc:
|
213 |
+
freshness_lifetime = cc["max-age"]
|
214 |
+
logger.debug(
|
215 |
+
"Freshness lifetime from request max-age: %i", freshness_lifetime
|
216 |
+
)
|
217 |
+
|
218 |
+
if "min-fresh" in cc:
|
219 |
+
min_fresh = cc["min-fresh"]
|
220 |
+
# adjust our current age by our min fresh
|
221 |
+
current_age += min_fresh
|
222 |
+
logger.debug("Adjusted current age from min-fresh: %i", current_age)
|
223 |
+
|
224 |
+
# Return entry if it is fresh enough
|
225 |
+
if freshness_lifetime > current_age:
|
226 |
+
logger.debug('The response is "fresh", returning cached response')
|
227 |
+
logger.debug("%i > %i", freshness_lifetime, current_age)
|
228 |
+
return resp
|
229 |
+
|
230 |
+
# we're not fresh. If we don't have an Etag, clear it out
|
231 |
+
if "etag" not in headers:
|
232 |
+
logger.debug('The cached response is "stale" with no etag, purging')
|
233 |
+
self.cache.delete(cache_url)
|
234 |
+
|
235 |
+
# return the original handler
|
236 |
+
return False
|
237 |
+
|
238 |
+
def conditional_headers(self, request):
|
239 |
+
cache_url = self.cache_url(request.url)
|
240 |
+
resp = self.serializer.loads(request, self.cache.get(cache_url))
|
241 |
+
new_headers = {}
|
242 |
+
|
243 |
+
if resp:
|
244 |
+
headers = CaseInsensitiveDict(resp.headers)
|
245 |
+
|
246 |
+
if "etag" in headers:
|
247 |
+
new_headers["If-None-Match"] = headers["ETag"]
|
248 |
+
|
249 |
+
if "last-modified" in headers:
|
250 |
+
new_headers["If-Modified-Since"] = headers["Last-Modified"]
|
251 |
+
|
252 |
+
return new_headers
|
253 |
+
|
254 |
+
def cache_response(self, request, response, body=None, status_codes=None):
|
255 |
+
"""
|
256 |
+
Algorithm for caching requests.
|
257 |
+
|
258 |
+
This assumes a requests Response object.
|
259 |
+
"""
|
260 |
+
# From httplib2: Don't cache 206's since we aren't going to
|
261 |
+
# handle byte range requests
|
262 |
+
cacheable_status_codes = status_codes or self.cacheable_status_codes
|
263 |
+
if response.status not in cacheable_status_codes:
|
264 |
+
logger.debug(
|
265 |
+
"Status code %s not in %s", response.status, cacheable_status_codes
|
266 |
+
)
|
267 |
+
return
|
268 |
+
|
269 |
+
response_headers = CaseInsensitiveDict(response.headers)
|
270 |
+
|
271 |
+
if "date" in response_headers:
|
272 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
273 |
+
else:
|
274 |
+
date = 0
|
275 |
+
|
276 |
+
# If we've been given a body, our response has a Content-Length, that
|
277 |
+
# Content-Length is valid then we can check to see if the body we've
|
278 |
+
# been given matches the expected size, and if it doesn't we'll just
|
279 |
+
# skip trying to cache it.
|
280 |
+
if (
|
281 |
+
body is not None
|
282 |
+
and "content-length" in response_headers
|
283 |
+
and response_headers["content-length"].isdigit()
|
284 |
+
and int(response_headers["content-length"]) != len(body)
|
285 |
+
):
|
286 |
+
return
|
287 |
+
|
288 |
+
cc_req = self.parse_cache_control(request.headers)
|
289 |
+
cc = self.parse_cache_control(response_headers)
|
290 |
+
|
291 |
+
cache_url = self.cache_url(request.url)
|
292 |
+
logger.debug('Updating cache with response from "%s"', cache_url)
|
293 |
+
|
294 |
+
# Delete it from the cache if we happen to have it stored there
|
295 |
+
no_store = False
|
296 |
+
if "no-store" in cc:
|
297 |
+
no_store = True
|
298 |
+
logger.debug('Response header has "no-store"')
|
299 |
+
if "no-store" in cc_req:
|
300 |
+
no_store = True
|
301 |
+
logger.debug('Request header has "no-store"')
|
302 |
+
if no_store and self.cache.get(cache_url):
|
303 |
+
logger.debug('Purging existing cache entry to honor "no-store"')
|
304 |
+
self.cache.delete(cache_url)
|
305 |
+
if no_store:
|
306 |
+
return
|
307 |
+
|
308 |
+
# https://tools.ietf.org/html/rfc7234#section-4.1:
|
309 |
+
# A Vary header field-value of "*" always fails to match.
|
310 |
+
# Storing such a response leads to a deserialization warning
|
311 |
+
# during cache lookup and is not allowed to ever be served,
|
312 |
+
# so storing it can be avoided.
|
313 |
+
if "*" in response_headers.get("vary", ""):
|
314 |
+
logger.debug('Response header has "Vary: *"')
|
315 |
+
return
|
316 |
+
|
317 |
+
# If we've been given an etag, then keep the response
|
318 |
+
if self.cache_etags and "etag" in response_headers:
|
319 |
+
expires_time = 0
|
320 |
+
if response_headers.get("expires"):
|
321 |
+
expires = parsedate_tz(response_headers["expires"])
|
322 |
+
if expires is not None:
|
323 |
+
expires_time = calendar.timegm(expires) - date
|
324 |
+
|
325 |
+
expires_time = max(expires_time, 14 * 86400)
|
326 |
+
|
327 |
+
logger.debug("etag object cached for {0} seconds".format(expires_time))
|
328 |
+
logger.debug("Caching due to etag")
|
329 |
+
self.cache.set(
|
330 |
+
cache_url,
|
331 |
+
self.serializer.dumps(request, response, body),
|
332 |
+
expires=expires_time,
|
333 |
+
)
|
334 |
+
|
335 |
+
# Add to the cache any permanent redirects. We do this before looking
|
336 |
+
# that the Date headers.
|
337 |
+
elif int(response.status) in PERMANENT_REDIRECT_STATUSES:
|
338 |
+
logger.debug("Caching permanent redirect")
|
339 |
+
self.cache.set(cache_url, self.serializer.dumps(request, response, b""))
|
340 |
+
|
341 |
+
# Add to the cache if the response headers demand it. If there
|
342 |
+
# is no date header then we can't do anything about expiring
|
343 |
+
# the cache.
|
344 |
+
elif "date" in response_headers:
|
345 |
+
date = calendar.timegm(parsedate_tz(response_headers["date"]))
|
346 |
+
# cache when there is a max-age > 0
|
347 |
+
if "max-age" in cc and cc["max-age"] > 0:
|
348 |
+
logger.debug("Caching b/c date exists and max-age > 0")
|
349 |
+
expires_time = cc["max-age"]
|
350 |
+
self.cache.set(
|
351 |
+
cache_url,
|
352 |
+
self.serializer.dumps(request, response, body),
|
353 |
+
expires=expires_time,
|
354 |
+
)
|
355 |
+
|
356 |
+
# If the request can expire, it means we should cache it
|
357 |
+
# in the meantime.
|
358 |
+
elif "expires" in response_headers:
|
359 |
+
if response_headers["expires"]:
|
360 |
+
expires = parsedate_tz(response_headers["expires"])
|
361 |
+
if expires is not None:
|
362 |
+
expires_time = calendar.timegm(expires) - date
|
363 |
+
else:
|
364 |
+
expires_time = None
|
365 |
+
|
366 |
+
logger.debug(
|
367 |
+
"Caching b/c of expires header. expires in {0} seconds".format(
|
368 |
+
expires_time
|
369 |
+
)
|
370 |
+
)
|
371 |
+
self.cache.set(
|
372 |
+
cache_url,
|
373 |
+
self.serializer.dumps(request, response, body=body),
|
374 |
+
expires=expires_time,
|
375 |
+
)
|
376 |
+
|
377 |
+
def update_cached_response(self, request, response):
|
378 |
+
"""On a 304 we will get a new set of headers that we want to
|
379 |
+
update our cached value with, assuming we have one.
|
380 |
+
|
381 |
+
This should only ever be called when we've sent an ETag and
|
382 |
+
gotten a 304 as the response.
|
383 |
+
"""
|
384 |
+
cache_url = self.cache_url(request.url)
|
385 |
+
|
386 |
+
cached_response = self.serializer.loads(request, self.cache.get(cache_url))
|
387 |
+
|
388 |
+
if not cached_response:
|
389 |
+
# we didn't have a cached response
|
390 |
+
return response
|
391 |
+
|
392 |
+
# Lets update our headers with the headers from the new request:
|
393 |
+
# http://tools.ietf.org/html/draft-ietf-httpbis-p4-conditional-26#section-4.1
|
394 |
+
#
|
395 |
+
# The server isn't supposed to send headers that would make
|
396 |
+
# the cached body invalid. But... just in case, we'll be sure
|
397 |
+
# to strip out ones we know that might be problmatic due to
|
398 |
+
# typical assumptions.
|
399 |
+
excluded_headers = ["content-length"]
|
400 |
+
|
401 |
+
cached_response.headers.update(
|
402 |
+
dict(
|
403 |
+
(k, v)
|
404 |
+
for k, v in response.headers.items()
|
405 |
+
if k.lower() not in excluded_headers
|
406 |
+
)
|
407 |
+
)
|
408 |
+
|
409 |
+
# we want a 200 b/c we have content via the cache
|
410 |
+
cached_response.status = 200
|
411 |
+
|
412 |
+
# update our cache
|
413 |
+
self.cache.set(cache_url, self.serializer.dumps(request, cached_response))
|
414 |
+
|
415 |
+
return cached_response
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/filewrapper.py
ADDED
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from tempfile import NamedTemporaryFile
|
6 |
+
import mmap
|
7 |
+
|
8 |
+
|
9 |
+
class CallbackFileWrapper(object):
|
10 |
+
"""
|
11 |
+
Small wrapper around a fp object which will tee everything read into a
|
12 |
+
buffer, and when that file is closed it will execute a callback with the
|
13 |
+
contents of that buffer.
|
14 |
+
|
15 |
+
All attributes are proxied to the underlying file object.
|
16 |
+
|
17 |
+
This class uses members with a double underscore (__) leading prefix so as
|
18 |
+
not to accidentally shadow an attribute.
|
19 |
+
|
20 |
+
The data is stored in a temporary file until it is all available. As long
|
21 |
+
as the temporary files directory is disk-based (sometimes it's a
|
22 |
+
memory-backed-``tmpfs`` on Linux), data will be unloaded to disk if memory
|
23 |
+
pressure is high. For small files the disk usually won't be used at all,
|
24 |
+
it'll all be in the filesystem memory cache, so there should be no
|
25 |
+
performance impact.
|
26 |
+
"""
|
27 |
+
|
28 |
+
def __init__(self, fp, callback):
|
29 |
+
self.__buf = NamedTemporaryFile("rb+", delete=True)
|
30 |
+
self.__fp = fp
|
31 |
+
self.__callback = callback
|
32 |
+
|
33 |
+
def __getattr__(self, name):
|
34 |
+
# The vaguaries of garbage collection means that self.__fp is
|
35 |
+
# not always set. By using __getattribute__ and the private
|
36 |
+
# name[0] allows looking up the attribute value and raising an
|
37 |
+
# AttributeError when it doesn't exist. This stop thigns from
|
38 |
+
# infinitely recursing calls to getattr in the case where
|
39 |
+
# self.__fp hasn't been set.
|
40 |
+
#
|
41 |
+
# [0] https://docs.python.org/2/reference/expressions.html#atom-identifiers
|
42 |
+
fp = self.__getattribute__("_CallbackFileWrapper__fp")
|
43 |
+
return getattr(fp, name)
|
44 |
+
|
45 |
+
def __is_fp_closed(self):
|
46 |
+
try:
|
47 |
+
return self.__fp.fp is None
|
48 |
+
|
49 |
+
except AttributeError:
|
50 |
+
pass
|
51 |
+
|
52 |
+
try:
|
53 |
+
return self.__fp.closed
|
54 |
+
|
55 |
+
except AttributeError:
|
56 |
+
pass
|
57 |
+
|
58 |
+
# We just don't cache it then.
|
59 |
+
# TODO: Add some logging here...
|
60 |
+
return False
|
61 |
+
|
62 |
+
def _close(self):
|
63 |
+
if self.__callback:
|
64 |
+
if self.__buf.tell() == 0:
|
65 |
+
# Empty file:
|
66 |
+
result = b""
|
67 |
+
else:
|
68 |
+
# Return the data without actually loading it into memory,
|
69 |
+
# relying on Python's buffer API and mmap(). mmap() just gives
|
70 |
+
# a view directly into the filesystem's memory cache, so it
|
71 |
+
# doesn't result in duplicate memory use.
|
72 |
+
self.__buf.seek(0, 0)
|
73 |
+
result = memoryview(
|
74 |
+
mmap.mmap(self.__buf.fileno(), 0, access=mmap.ACCESS_READ)
|
75 |
+
)
|
76 |
+
self.__callback(result)
|
77 |
+
|
78 |
+
# We assign this to None here, because otherwise we can get into
|
79 |
+
# really tricky problems where the CPython interpreter dead locks
|
80 |
+
# because the callback is holding a reference to something which
|
81 |
+
# has a __del__ method. Setting this to None breaks the cycle
|
82 |
+
# and allows the garbage collector to do it's thing normally.
|
83 |
+
self.__callback = None
|
84 |
+
|
85 |
+
# Closing the temporary file releases memory and frees disk space.
|
86 |
+
# Important when caching big files.
|
87 |
+
self.__buf.close()
|
88 |
+
|
89 |
+
def read(self, amt=None):
|
90 |
+
data = self.__fp.read(amt)
|
91 |
+
if data:
|
92 |
+
# We may be dealing with b'', a sign that things are over:
|
93 |
+
# it's passed e.g. after we've already closed self.__buf.
|
94 |
+
self.__buf.write(data)
|
95 |
+
if self.__is_fp_closed():
|
96 |
+
self._close()
|
97 |
+
|
98 |
+
return data
|
99 |
+
|
100 |
+
def _safe_read(self, amt):
|
101 |
+
data = self.__fp._safe_read(amt)
|
102 |
+
if amt == 2 and data == b"\r\n":
|
103 |
+
# urllib executes this read to toss the CRLF at the end
|
104 |
+
# of the chunk.
|
105 |
+
return data
|
106 |
+
|
107 |
+
self.__buf.write(data)
|
108 |
+
if self.__is_fp_closed():
|
109 |
+
self._close()
|
110 |
+
|
111 |
+
return data
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/heuristics.py
ADDED
@@ -0,0 +1,139 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import calendar
|
6 |
+
import time
|
7 |
+
|
8 |
+
from email.utils import formatdate, parsedate, parsedate_tz
|
9 |
+
|
10 |
+
from datetime import datetime, timedelta
|
11 |
+
|
12 |
+
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
|
13 |
+
|
14 |
+
|
15 |
+
def expire_after(delta, date=None):
|
16 |
+
date = date or datetime.utcnow()
|
17 |
+
return date + delta
|
18 |
+
|
19 |
+
|
20 |
+
def datetime_to_header(dt):
|
21 |
+
return formatdate(calendar.timegm(dt.timetuple()))
|
22 |
+
|
23 |
+
|
24 |
+
class BaseHeuristic(object):
|
25 |
+
|
26 |
+
def warning(self, response):
|
27 |
+
"""
|
28 |
+
Return a valid 1xx warning header value describing the cache
|
29 |
+
adjustments.
|
30 |
+
|
31 |
+
The response is provided too allow warnings like 113
|
32 |
+
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
|
33 |
+
to explicitly say response is over 24 hours old.
|
34 |
+
"""
|
35 |
+
return '110 - "Response is Stale"'
|
36 |
+
|
37 |
+
def update_headers(self, response):
|
38 |
+
"""Update the response headers with any new headers.
|
39 |
+
|
40 |
+
NOTE: This SHOULD always include some Warning header to
|
41 |
+
signify that the response was cached by the client, not
|
42 |
+
by way of the provided headers.
|
43 |
+
"""
|
44 |
+
return {}
|
45 |
+
|
46 |
+
def apply(self, response):
|
47 |
+
updated_headers = self.update_headers(response)
|
48 |
+
|
49 |
+
if updated_headers:
|
50 |
+
response.headers.update(updated_headers)
|
51 |
+
warning_header_value = self.warning(response)
|
52 |
+
if warning_header_value is not None:
|
53 |
+
response.headers.update({"Warning": warning_header_value})
|
54 |
+
|
55 |
+
return response
|
56 |
+
|
57 |
+
|
58 |
+
class OneDayCache(BaseHeuristic):
|
59 |
+
"""
|
60 |
+
Cache the response by providing an expires 1 day in the
|
61 |
+
future.
|
62 |
+
"""
|
63 |
+
|
64 |
+
def update_headers(self, response):
|
65 |
+
headers = {}
|
66 |
+
|
67 |
+
if "expires" not in response.headers:
|
68 |
+
date = parsedate(response.headers["date"])
|
69 |
+
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
|
70 |
+
headers["expires"] = datetime_to_header(expires)
|
71 |
+
headers["cache-control"] = "public"
|
72 |
+
return headers
|
73 |
+
|
74 |
+
|
75 |
+
class ExpiresAfter(BaseHeuristic):
|
76 |
+
"""
|
77 |
+
Cache **all** requests for a defined time period.
|
78 |
+
"""
|
79 |
+
|
80 |
+
def __init__(self, **kw):
|
81 |
+
self.delta = timedelta(**kw)
|
82 |
+
|
83 |
+
def update_headers(self, response):
|
84 |
+
expires = expire_after(self.delta)
|
85 |
+
return {"expires": datetime_to_header(expires), "cache-control": "public"}
|
86 |
+
|
87 |
+
def warning(self, response):
|
88 |
+
tmpl = "110 - Automatically cached for %s. Response might be stale"
|
89 |
+
return tmpl % self.delta
|
90 |
+
|
91 |
+
|
92 |
+
class LastModified(BaseHeuristic):
|
93 |
+
"""
|
94 |
+
If there is no Expires header already, fall back on Last-Modified
|
95 |
+
using the heuristic from
|
96 |
+
http://tools.ietf.org/html/rfc7234#section-4.2.2
|
97 |
+
to calculate a reasonable value.
|
98 |
+
|
99 |
+
Firefox also does something like this per
|
100 |
+
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
|
101 |
+
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
|
102 |
+
Unlike mozilla we limit this to 24-hr.
|
103 |
+
"""
|
104 |
+
cacheable_by_default_statuses = {
|
105 |
+
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
|
106 |
+
}
|
107 |
+
|
108 |
+
def update_headers(self, resp):
|
109 |
+
headers = resp.headers
|
110 |
+
|
111 |
+
if "expires" in headers:
|
112 |
+
return {}
|
113 |
+
|
114 |
+
if "cache-control" in headers and headers["cache-control"] != "public":
|
115 |
+
return {}
|
116 |
+
|
117 |
+
if resp.status not in self.cacheable_by_default_statuses:
|
118 |
+
return {}
|
119 |
+
|
120 |
+
if "date" not in headers or "last-modified" not in headers:
|
121 |
+
return {}
|
122 |
+
|
123 |
+
date = calendar.timegm(parsedate_tz(headers["date"]))
|
124 |
+
last_modified = parsedate(headers["last-modified"])
|
125 |
+
if date is None or last_modified is None:
|
126 |
+
return {}
|
127 |
+
|
128 |
+
now = time.time()
|
129 |
+
current_age = max(0, now - date)
|
130 |
+
delta = date - calendar.timegm(last_modified)
|
131 |
+
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
|
132 |
+
if freshness_lifetime <= current_age:
|
133 |
+
return {}
|
134 |
+
|
135 |
+
expires = date + freshness_lifetime
|
136 |
+
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
|
137 |
+
|
138 |
+
def warning(self, resp):
|
139 |
+
return None
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/serialize.py
ADDED
@@ -0,0 +1,186 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
import base64
|
6 |
+
import io
|
7 |
+
import json
|
8 |
+
import zlib
|
9 |
+
|
10 |
+
from pip._vendor import msgpack
|
11 |
+
from pip._vendor.requests.structures import CaseInsensitiveDict
|
12 |
+
|
13 |
+
from .compat import HTTPResponse, pickle, text_type
|
14 |
+
|
15 |
+
|
16 |
+
def _b64_decode_bytes(b):
|
17 |
+
return base64.b64decode(b.encode("ascii"))
|
18 |
+
|
19 |
+
|
20 |
+
def _b64_decode_str(s):
|
21 |
+
return _b64_decode_bytes(s).decode("utf8")
|
22 |
+
|
23 |
+
|
24 |
+
_default_body_read = object()
|
25 |
+
|
26 |
+
|
27 |
+
class Serializer(object):
|
28 |
+
def dumps(self, request, response, body=None):
|
29 |
+
response_headers = CaseInsensitiveDict(response.headers)
|
30 |
+
|
31 |
+
if body is None:
|
32 |
+
# When a body isn't passed in, we'll read the response. We
|
33 |
+
# also update the response with a new file handler to be
|
34 |
+
# sure it acts as though it was never read.
|
35 |
+
body = response.read(decode_content=False)
|
36 |
+
response._fp = io.BytesIO(body)
|
37 |
+
|
38 |
+
# NOTE: This is all a bit weird, but it's really important that on
|
39 |
+
# Python 2.x these objects are unicode and not str, even when
|
40 |
+
# they contain only ascii. The problem here is that msgpack
|
41 |
+
# understands the difference between unicode and bytes and we
|
42 |
+
# have it set to differentiate between them, however Python 2
|
43 |
+
# doesn't know the difference. Forcing these to unicode will be
|
44 |
+
# enough to have msgpack know the difference.
|
45 |
+
data = {
|
46 |
+
u"response": {
|
47 |
+
u"body": body,
|
48 |
+
u"headers": dict(
|
49 |
+
(text_type(k), text_type(v)) for k, v in response.headers.items()
|
50 |
+
),
|
51 |
+
u"status": response.status,
|
52 |
+
u"version": response.version,
|
53 |
+
u"reason": text_type(response.reason),
|
54 |
+
u"strict": response.strict,
|
55 |
+
u"decode_content": response.decode_content,
|
56 |
+
}
|
57 |
+
}
|
58 |
+
|
59 |
+
# Construct our vary headers
|
60 |
+
data[u"vary"] = {}
|
61 |
+
if u"vary" in response_headers:
|
62 |
+
varied_headers = response_headers[u"vary"].split(",")
|
63 |
+
for header in varied_headers:
|
64 |
+
header = text_type(header).strip()
|
65 |
+
header_value = request.headers.get(header, None)
|
66 |
+
if header_value is not None:
|
67 |
+
header_value = text_type(header_value)
|
68 |
+
data[u"vary"][header] = header_value
|
69 |
+
|
70 |
+
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
|
71 |
+
|
72 |
+
def loads(self, request, data):
|
73 |
+
# Short circuit if we've been given an empty set of data
|
74 |
+
if not data:
|
75 |
+
return
|
76 |
+
|
77 |
+
# Determine what version of the serializer the data was serialized
|
78 |
+
# with
|
79 |
+
try:
|
80 |
+
ver, data = data.split(b",", 1)
|
81 |
+
except ValueError:
|
82 |
+
ver = b"cc=0"
|
83 |
+
|
84 |
+
# Make sure that our "ver" is actually a version and isn't a false
|
85 |
+
# positive from a , being in the data stream.
|
86 |
+
if ver[:3] != b"cc=":
|
87 |
+
data = ver + data
|
88 |
+
ver = b"cc=0"
|
89 |
+
|
90 |
+
# Get the version number out of the cc=N
|
91 |
+
ver = ver.split(b"=", 1)[-1].decode("ascii")
|
92 |
+
|
93 |
+
# Dispatch to the actual load method for the given version
|
94 |
+
try:
|
95 |
+
return getattr(self, "_loads_v{}".format(ver))(request, data)
|
96 |
+
|
97 |
+
except AttributeError:
|
98 |
+
# This is a version we don't have a loads function for, so we'll
|
99 |
+
# just treat it as a miss and return None
|
100 |
+
return
|
101 |
+
|
102 |
+
def prepare_response(self, request, cached):
|
103 |
+
"""Verify our vary headers match and construct a real urllib3
|
104 |
+
HTTPResponse object.
|
105 |
+
"""
|
106 |
+
# Special case the '*' Vary value as it means we cannot actually
|
107 |
+
# determine if the cached response is suitable for this request.
|
108 |
+
# This case is also handled in the controller code when creating
|
109 |
+
# a cache entry, but is left here for backwards compatibility.
|
110 |
+
if "*" in cached.get("vary", {}):
|
111 |
+
return
|
112 |
+
|
113 |
+
# Ensure that the Vary headers for the cached response match our
|
114 |
+
# request
|
115 |
+
for header, value in cached.get("vary", {}).items():
|
116 |
+
if request.headers.get(header, None) != value:
|
117 |
+
return
|
118 |
+
|
119 |
+
body_raw = cached["response"].pop("body")
|
120 |
+
|
121 |
+
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
|
122 |
+
if headers.get("transfer-encoding", "") == "chunked":
|
123 |
+
headers.pop("transfer-encoding")
|
124 |
+
|
125 |
+
cached["response"]["headers"] = headers
|
126 |
+
|
127 |
+
try:
|
128 |
+
body = io.BytesIO(body_raw)
|
129 |
+
except TypeError:
|
130 |
+
# This can happen if cachecontrol serialized to v1 format (pickle)
|
131 |
+
# using Python 2. A Python 2 str(byte string) will be unpickled as
|
132 |
+
# a Python 3 str (unicode string), which will cause the above to
|
133 |
+
# fail with:
|
134 |
+
#
|
135 |
+
# TypeError: 'str' does not support the buffer interface
|
136 |
+
body = io.BytesIO(body_raw.encode("utf8"))
|
137 |
+
|
138 |
+
return HTTPResponse(body=body, preload_content=False, **cached["response"])
|
139 |
+
|
140 |
+
def _loads_v0(self, request, data):
|
141 |
+
# The original legacy cache data. This doesn't contain enough
|
142 |
+
# information to construct everything we need, so we'll treat this as
|
143 |
+
# a miss.
|
144 |
+
return
|
145 |
+
|
146 |
+
def _loads_v1(self, request, data):
|
147 |
+
try:
|
148 |
+
cached = pickle.loads(data)
|
149 |
+
except ValueError:
|
150 |
+
return
|
151 |
+
|
152 |
+
return self.prepare_response(request, cached)
|
153 |
+
|
154 |
+
def _loads_v2(self, request, data):
|
155 |
+
try:
|
156 |
+
cached = json.loads(zlib.decompress(data).decode("utf8"))
|
157 |
+
except (ValueError, zlib.error):
|
158 |
+
return
|
159 |
+
|
160 |
+
# We need to decode the items that we've base64 encoded
|
161 |
+
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
|
162 |
+
cached["response"]["headers"] = dict(
|
163 |
+
(_b64_decode_str(k), _b64_decode_str(v))
|
164 |
+
for k, v in cached["response"]["headers"].items()
|
165 |
+
)
|
166 |
+
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
|
167 |
+
cached["vary"] = dict(
|
168 |
+
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
|
169 |
+
for k, v in cached["vary"].items()
|
170 |
+
)
|
171 |
+
|
172 |
+
return self.prepare_response(request, cached)
|
173 |
+
|
174 |
+
def _loads_v3(self, request, data):
|
175 |
+
# Due to Python 2 encoding issues, it's impossible to know for sure
|
176 |
+
# exactly how to load v3 entries, thus we'll treat these as a miss so
|
177 |
+
# that they get rewritten out as v4 entries.
|
178 |
+
return
|
179 |
+
|
180 |
+
def _loads_v4(self, request, data):
|
181 |
+
try:
|
182 |
+
cached = msgpack.loads(data, raw=False)
|
183 |
+
except ValueError:
|
184 |
+
return
|
185 |
+
|
186 |
+
return self.prepare_response(request, cached)
|
venv/lib/python3.10/site-packages/pip/_vendor/cachecontrol/wrapper.py
ADDED
@@ -0,0 +1,33 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SPDX-FileCopyrightText: 2015 Eric Larson
|
2 |
+
#
|
3 |
+
# SPDX-License-Identifier: Apache-2.0
|
4 |
+
|
5 |
+
from .adapter import CacheControlAdapter
|
6 |
+
from .cache import DictCache
|
7 |
+
|
8 |
+
|
9 |
+
def CacheControl(
|
10 |
+
sess,
|
11 |
+
cache=None,
|
12 |
+
cache_etags=True,
|
13 |
+
serializer=None,
|
14 |
+
heuristic=None,
|
15 |
+
controller_class=None,
|
16 |
+
adapter_class=None,
|
17 |
+
cacheable_methods=None,
|
18 |
+
):
|
19 |
+
|
20 |
+
cache = DictCache() if cache is None else cache
|
21 |
+
adapter_class = adapter_class or CacheControlAdapter
|
22 |
+
adapter = adapter_class(
|
23 |
+
cache,
|
24 |
+
cache_etags=cache_etags,
|
25 |
+
serializer=serializer,
|
26 |
+
heuristic=heuristic,
|
27 |
+
controller_class=controller_class,
|
28 |
+
cacheable_methods=cacheable_methods,
|
29 |
+
)
|
30 |
+
sess.mount("http://", adapter)
|
31 |
+
sess.mount("https://", adapter)
|
32 |
+
|
33 |
+
return sess
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (1.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5freq.cpython-310.pyc
ADDED
Binary file (27.2 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/big5prober.cpython-310.pyc
ADDED
Binary file (1.13 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/chardistribution.cpython-310.pyc
ADDED
Binary file (5.74 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-310.pyc
ADDED
Binary file (2.22 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/charsetprober.cpython-310.pyc
ADDED
Binary file (3.48 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-310.pyc
ADDED
Binary file (2.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/compat.cpython-310.pyc
ADDED
Binary file (396 Bytes). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/cp949prober.cpython-310.pyc
ADDED
Binary file (1.13 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/enums.cpython-310.pyc
ADDED
Binary file (2.58 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escprober.cpython-310.pyc
ADDED
Binary file (2.63 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/escsm.cpython-310.pyc
ADDED
Binary file (8.37 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/eucjpprober.cpython-310.pyc
ADDED
Binary file (2.43 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrfreq.cpython-310.pyc
ADDED
Binary file (12.1 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euckrprober.cpython-310.pyc
ADDED
Binary file (1.13 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwfreq.cpython-310.pyc
ADDED
Binary file (27.2 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/euctwprober.cpython-310.pyc
ADDED
Binary file (1.13 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312freq.cpython-310.pyc
ADDED
Binary file (19.1 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/gb2312prober.cpython-310.pyc
ADDED
Binary file (1.14 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/hebrewprober.cpython-310.pyc
ADDED
Binary file (3.01 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/jpcntx.cpython-310.pyc
ADDED
Binary file (37.6 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-310.pyc
ADDED
Binary file (47.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-310.pyc
ADDED
Binary file (46.1 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-310.pyc
ADDED
Binary file (44.6 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-310.pyc
ADDED
Binary file (47.9 kB). View file
|
|
venv/lib/python3.10/site-packages/pip/_vendor/chardet/__pycache__/langthaimodel.cpython-310.pyc
ADDED
Binary file (44.7 kB). View file
|
|