applied-ai-018 commited on
Commit
9f817f0
·
verified ·
1 Parent(s): 9023bfa

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpts/universal/global_step80/zero/16.attention.query_key_value.weight/exp_avg_sq.pt +3 -0
  2. ckpts/universal/global_step80/zero/20.post_attention_layernorm.weight/exp_avg_sq.pt +3 -0
  3. ckpts/universal/global_step80/zero/20.post_attention_layernorm.weight/fp32.pt +3 -0
  4. ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/exp_avg.pt +3 -0
  5. ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/exp_avg_sq.pt +3 -0
  6. ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/fp32.pt +3 -0
  7. venv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc +0 -0
  8. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__init__.py +85 -0
  9. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc +0 -0
  10. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc +0 -0
  11. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc +0 -0
  12. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc +0 -0
  13. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc +0 -0
  14. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc +0 -0
  15. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc +0 -0
  16. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc +0 -0
  17. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc +0 -0
  18. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc +0 -0
  19. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc +0 -0
  20. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py +355 -0
  21. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py +2 -0
  22. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py +569 -0
  23. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/connectionpool.py +1113 -0
  24. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py +0 -0
  25. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc +0 -0
  26. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc +0 -0
  27. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc +0 -0
  28. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc +0 -0
  29. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc +0 -0
  30. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc +0 -0
  31. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc +0 -0
  32. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py +36 -0
  33. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py +0 -0
  34. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc +0 -0
  35. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc +0 -0
  36. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc +0 -0
  37. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py +519 -0
  38. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py +397 -0
  39. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py +314 -0
  40. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py +130 -0
  41. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py +511 -0
  42. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py +922 -0
  43. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py +216 -0
  44. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/exceptions.py +323 -0
  45. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/fields.py +274 -0
  46. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__init__.py +0 -0
  47. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-310.pyc +0 -0
  48. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-310.pyc +0 -0
  49. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py +0 -0
  50. venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-310.pyc +0 -0
ckpts/universal/global_step80/zero/16.attention.query_key_value.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:766db41e5df5912a7868f9511f81f27a7caff605205a90533a5f7c85abfa4bc9
3
+ size 50332843
ckpts/universal/global_step80/zero/20.post_attention_layernorm.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b690476159839c32cd36b353f8651025f9e8234aa966edbd537ab4519bd55320
3
+ size 9387
ckpts/universal/global_step80/zero/20.post_attention_layernorm.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8595a7b74716c9f5dafe85f65140a04e6802b0b0c8252dd68d7285b4e5f1184d
3
+ size 9293
ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/exp_avg.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:397e8f19ba02f25764a206593dcfa83b7766ed51ca0ed02bc0ff1ea3796e123a
3
+ size 9372
ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/exp_avg_sq.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f86b7c4976e73eef6f4a4e01aa37226dab4a883f5e7476f2fb9d531d1b57f22
3
+ size 9387
ckpts/universal/global_step80/zero/9.post_attention_layernorm.weight/fp32.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf9f40aaa2a541e09a985f4b2c8cc24c779a72d429f889d4aa416bd2386752c2
3
+ size 9293
venv/lib/python3.10/site-packages/pip/_vendor/__pycache__/typing_extensions.cpython-310.pyc ADDED
Binary file (66.6 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__init__.py ADDED
@@ -0,0 +1,85 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Python HTTP library with thread-safe connection pooling, file post support, user friendly, and more
3
+ """
4
+ from __future__ import absolute_import
5
+
6
+ # Set default logging handler to avoid "No handler found" warnings.
7
+ import logging
8
+ import warnings
9
+ from logging import NullHandler
10
+
11
+ from . import exceptions
12
+ from ._version import __version__
13
+ from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool, connection_from_url
14
+ from .filepost import encode_multipart_formdata
15
+ from .poolmanager import PoolManager, ProxyManager, proxy_from_url
16
+ from .response import HTTPResponse
17
+ from .util.request import make_headers
18
+ from .util.retry import Retry
19
+ from .util.timeout import Timeout
20
+ from .util.url import get_host
21
+
22
+ __author__ = "Andrey Petrov ([email protected])"
23
+ __license__ = "MIT"
24
+ __version__ = __version__
25
+
26
+ __all__ = (
27
+ "HTTPConnectionPool",
28
+ "HTTPSConnectionPool",
29
+ "PoolManager",
30
+ "ProxyManager",
31
+ "HTTPResponse",
32
+ "Retry",
33
+ "Timeout",
34
+ "add_stderr_logger",
35
+ "connection_from_url",
36
+ "disable_warnings",
37
+ "encode_multipart_formdata",
38
+ "get_host",
39
+ "make_headers",
40
+ "proxy_from_url",
41
+ )
42
+
43
+ logging.getLogger(__name__).addHandler(NullHandler())
44
+
45
+
46
+ def add_stderr_logger(level=logging.DEBUG):
47
+ """
48
+ Helper for quickly adding a StreamHandler to the logger. Useful for
49
+ debugging.
50
+
51
+ Returns the handler after adding it.
52
+ """
53
+ # This method needs to be in this __init__.py to get the __name__ correct
54
+ # even if urllib3 is vendored within another package.
55
+ logger = logging.getLogger(__name__)
56
+ handler = logging.StreamHandler()
57
+ handler.setFormatter(logging.Formatter("%(asctime)s %(levelname)s %(message)s"))
58
+ logger.addHandler(handler)
59
+ logger.setLevel(level)
60
+ logger.debug("Added a stderr logging handler to logger: %s", __name__)
61
+ return handler
62
+
63
+
64
+ # ... Clean up.
65
+ del NullHandler
66
+
67
+
68
+ # All warning filters *must* be appended unless you're really certain that they
69
+ # shouldn't be: otherwise, it's very hard for users to use most Python
70
+ # mechanisms to silence them.
71
+ # SecurityWarning's always go off by default.
72
+ warnings.simplefilter("always", exceptions.SecurityWarning, append=True)
73
+ # SubjectAltNameWarning's should go off once per host
74
+ warnings.simplefilter("default", exceptions.SubjectAltNameWarning, append=True)
75
+ # InsecurePlatformWarning's don't vary between requests, so we keep it default.
76
+ warnings.simplefilter("default", exceptions.InsecurePlatformWarning, append=True)
77
+ # SNIMissingWarnings should go off only once.
78
+ warnings.simplefilter("default", exceptions.SNIMissingWarning, append=True)
79
+
80
+
81
+ def disable_warnings(category=exceptions.HTTPWarning):
82
+ """
83
+ Helper for quickly disabling all urllib3 warnings.
84
+ """
85
+ warnings.simplefilter("ignore", category)
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (2.19 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_collections.cpython-310.pyc ADDED
Binary file (11.4 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/_version.cpython-310.pyc ADDED
Binary file (209 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connection.cpython-310.pyc ADDED
Binary file (13.6 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/connectionpool.cpython-310.pyc ADDED
Binary file (25.5 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/exceptions.cpython-310.pyc ADDED
Binary file (11 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/fields.cpython-310.pyc ADDED
Binary file (8.18 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/filepost.cpython-310.pyc ADDED
Binary file (2.75 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/poolmanager.cpython-310.pyc ADDED
Binary file (15.3 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/request.cpython-310.pyc ADDED
Binary file (5.62 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/__pycache__/response.cpython-310.pyc ADDED
Binary file (20.9 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/_collections.py ADDED
@@ -0,0 +1,355 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ try:
4
+ from collections.abc import Mapping, MutableMapping
5
+ except ImportError:
6
+ from collections import Mapping, MutableMapping
7
+ try:
8
+ from threading import RLock
9
+ except ImportError: # Platform-specific: No threads available
10
+
11
+ class RLock:
12
+ def __enter__(self):
13
+ pass
14
+
15
+ def __exit__(self, exc_type, exc_value, traceback):
16
+ pass
17
+
18
+
19
+ from collections import OrderedDict
20
+
21
+ from .exceptions import InvalidHeader
22
+ from .packages import six
23
+ from .packages.six import iterkeys, itervalues
24
+
25
+ __all__ = ["RecentlyUsedContainer", "HTTPHeaderDict"]
26
+
27
+
28
+ _Null = object()
29
+
30
+
31
+ class RecentlyUsedContainer(MutableMapping):
32
+ """
33
+ Provides a thread-safe dict-like container which maintains up to
34
+ ``maxsize`` keys while throwing away the least-recently-used keys beyond
35
+ ``maxsize``.
36
+
37
+ :param maxsize:
38
+ Maximum number of recent elements to retain.
39
+
40
+ :param dispose_func:
41
+ Every time an item is evicted from the container,
42
+ ``dispose_func(value)`` is called. Callback which will get called
43
+ """
44
+
45
+ ContainerCls = OrderedDict
46
+
47
+ def __init__(self, maxsize=10, dispose_func=None):
48
+ self._maxsize = maxsize
49
+ self.dispose_func = dispose_func
50
+
51
+ self._container = self.ContainerCls()
52
+ self.lock = RLock()
53
+
54
+ def __getitem__(self, key):
55
+ # Re-insert the item, moving it to the end of the eviction line.
56
+ with self.lock:
57
+ item = self._container.pop(key)
58
+ self._container[key] = item
59
+ return item
60
+
61
+ def __setitem__(self, key, value):
62
+ evicted_value = _Null
63
+ with self.lock:
64
+ # Possibly evict the existing value of 'key'
65
+ evicted_value = self._container.get(key, _Null)
66
+ self._container[key] = value
67
+
68
+ # If we didn't evict an existing value, we might have to evict the
69
+ # least recently used item from the beginning of the container.
70
+ if len(self._container) > self._maxsize:
71
+ _key, evicted_value = self._container.popitem(last=False)
72
+
73
+ if self.dispose_func and evicted_value is not _Null:
74
+ self.dispose_func(evicted_value)
75
+
76
+ def __delitem__(self, key):
77
+ with self.lock:
78
+ value = self._container.pop(key)
79
+
80
+ if self.dispose_func:
81
+ self.dispose_func(value)
82
+
83
+ def __len__(self):
84
+ with self.lock:
85
+ return len(self._container)
86
+
87
+ def __iter__(self):
88
+ raise NotImplementedError(
89
+ "Iteration over this class is unlikely to be threadsafe."
90
+ )
91
+
92
+ def clear(self):
93
+ with self.lock:
94
+ # Copy pointers to all values, then wipe the mapping
95
+ values = list(itervalues(self._container))
96
+ self._container.clear()
97
+
98
+ if self.dispose_func:
99
+ for value in values:
100
+ self.dispose_func(value)
101
+
102
+ def keys(self):
103
+ with self.lock:
104
+ return list(iterkeys(self._container))
105
+
106
+
107
+ class HTTPHeaderDict(MutableMapping):
108
+ """
109
+ :param headers:
110
+ An iterable of field-value pairs. Must not contain multiple field names
111
+ when compared case-insensitively.
112
+
113
+ :param kwargs:
114
+ Additional field-value pairs to pass in to ``dict.update``.
115
+
116
+ A ``dict`` like container for storing HTTP Headers.
117
+
118
+ Field names are stored and compared case-insensitively in compliance with
119
+ RFC 7230. Iteration provides the first case-sensitive key seen for each
120
+ case-insensitive pair.
121
+
122
+ Using ``__setitem__`` syntax overwrites fields that compare equal
123
+ case-insensitively in order to maintain ``dict``'s api. For fields that
124
+ compare equal, instead create a new ``HTTPHeaderDict`` and use ``.add``
125
+ in a loop.
126
+
127
+ If multiple fields that are equal case-insensitively are passed to the
128
+ constructor or ``.update``, the behavior is undefined and some will be
129
+ lost.
130
+
131
+ >>> headers = HTTPHeaderDict()
132
+ >>> headers.add('Set-Cookie', 'foo=bar')
133
+ >>> headers.add('set-cookie', 'baz=quxx')
134
+ >>> headers['content-length'] = '7'
135
+ >>> headers['SET-cookie']
136
+ 'foo=bar, baz=quxx'
137
+ >>> headers['Content-Length']
138
+ '7'
139
+ """
140
+
141
+ def __init__(self, headers=None, **kwargs):
142
+ super(HTTPHeaderDict, self).__init__()
143
+ self._container = OrderedDict()
144
+ if headers is not None:
145
+ if isinstance(headers, HTTPHeaderDict):
146
+ self._copy_from(headers)
147
+ else:
148
+ self.extend(headers)
149
+ if kwargs:
150
+ self.extend(kwargs)
151
+
152
+ def __setitem__(self, key, val):
153
+ self._container[key.lower()] = [key, val]
154
+ return self._container[key.lower()]
155
+
156
+ def __getitem__(self, key):
157
+ val = self._container[key.lower()]
158
+ return ", ".join(val[1:])
159
+
160
+ def __delitem__(self, key):
161
+ del self._container[key.lower()]
162
+
163
+ def __contains__(self, key):
164
+ return key.lower() in self._container
165
+
166
+ def __eq__(self, other):
167
+ if not isinstance(other, Mapping) and not hasattr(other, "keys"):
168
+ return False
169
+ if not isinstance(other, type(self)):
170
+ other = type(self)(other)
171
+ return dict((k.lower(), v) for k, v in self.itermerged()) == dict(
172
+ (k.lower(), v) for k, v in other.itermerged()
173
+ )
174
+
175
+ def __ne__(self, other):
176
+ return not self.__eq__(other)
177
+
178
+ if six.PY2: # Python 2
179
+ iterkeys = MutableMapping.iterkeys
180
+ itervalues = MutableMapping.itervalues
181
+
182
+ __marker = object()
183
+
184
+ def __len__(self):
185
+ return len(self._container)
186
+
187
+ def __iter__(self):
188
+ # Only provide the originally cased names
189
+ for vals in self._container.values():
190
+ yield vals[0]
191
+
192
+ def pop(self, key, default=__marker):
193
+ """D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
194
+ If key is not found, d is returned if given, otherwise KeyError is raised.
195
+ """
196
+ # Using the MutableMapping function directly fails due to the private marker.
197
+ # Using ordinary dict.pop would expose the internal structures.
198
+ # So let's reinvent the wheel.
199
+ try:
200
+ value = self[key]
201
+ except KeyError:
202
+ if default is self.__marker:
203
+ raise
204
+ return default
205
+ else:
206
+ del self[key]
207
+ return value
208
+
209
+ def discard(self, key):
210
+ try:
211
+ del self[key]
212
+ except KeyError:
213
+ pass
214
+
215
+ def add(self, key, val):
216
+ """Adds a (name, value) pair, doesn't overwrite the value if it already
217
+ exists.
218
+
219
+ >>> headers = HTTPHeaderDict(foo='bar')
220
+ >>> headers.add('Foo', 'baz')
221
+ >>> headers['foo']
222
+ 'bar, baz'
223
+ """
224
+ key_lower = key.lower()
225
+ new_vals = [key, val]
226
+ # Keep the common case aka no item present as fast as possible
227
+ vals = self._container.setdefault(key_lower, new_vals)
228
+ if new_vals is not vals:
229
+ vals.append(val)
230
+
231
+ def extend(self, *args, **kwargs):
232
+ """Generic import function for any type of header-like object.
233
+ Adapted version of MutableMapping.update in order to insert items
234
+ with self.add instead of self.__setitem__
235
+ """
236
+ if len(args) > 1:
237
+ raise TypeError(
238
+ "extend() takes at most 1 positional "
239
+ "arguments ({0} given)".format(len(args))
240
+ )
241
+ other = args[0] if len(args) >= 1 else ()
242
+
243
+ if isinstance(other, HTTPHeaderDict):
244
+ for key, val in other.iteritems():
245
+ self.add(key, val)
246
+ elif isinstance(other, Mapping):
247
+ for key in other:
248
+ self.add(key, other[key])
249
+ elif hasattr(other, "keys"):
250
+ for key in other.keys():
251
+ self.add(key, other[key])
252
+ else:
253
+ for key, value in other:
254
+ self.add(key, value)
255
+
256
+ for key, value in kwargs.items():
257
+ self.add(key, value)
258
+
259
+ def getlist(self, key, default=__marker):
260
+ """Returns a list of all the values for the named field. Returns an
261
+ empty list if the key doesn't exist."""
262
+ try:
263
+ vals = self._container[key.lower()]
264
+ except KeyError:
265
+ if default is self.__marker:
266
+ return []
267
+ return default
268
+ else:
269
+ return vals[1:]
270
+
271
+ def _prepare_for_method_change(self):
272
+ """
273
+ Remove content-specific header fields before changing the request
274
+ method to GET or HEAD according to RFC 9110, Section 15.4.
275
+ """
276
+ content_specific_headers = [
277
+ "Content-Encoding",
278
+ "Content-Language",
279
+ "Content-Location",
280
+ "Content-Type",
281
+ "Content-Length",
282
+ "Digest",
283
+ "Last-Modified",
284
+ ]
285
+ for header in content_specific_headers:
286
+ self.discard(header)
287
+ return self
288
+
289
+ # Backwards compatibility for httplib
290
+ getheaders = getlist
291
+ getallmatchingheaders = getlist
292
+ iget = getlist
293
+
294
+ # Backwards compatibility for http.cookiejar
295
+ get_all = getlist
296
+
297
+ def __repr__(self):
298
+ return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
299
+
300
+ def _copy_from(self, other):
301
+ for key in other:
302
+ val = other.getlist(key)
303
+ if isinstance(val, list):
304
+ # Don't need to convert tuples
305
+ val = list(val)
306
+ self._container[key.lower()] = [key] + val
307
+
308
+ def copy(self):
309
+ clone = type(self)()
310
+ clone._copy_from(self)
311
+ return clone
312
+
313
+ def iteritems(self):
314
+ """Iterate over all header lines, including duplicate ones."""
315
+ for key in self:
316
+ vals = self._container[key.lower()]
317
+ for val in vals[1:]:
318
+ yield vals[0], val
319
+
320
+ def itermerged(self):
321
+ """Iterate over all headers, merging duplicate ones together."""
322
+ for key in self:
323
+ val = self._container[key.lower()]
324
+ yield val[0], ", ".join(val[1:])
325
+
326
+ def items(self):
327
+ return list(self.iteritems())
328
+
329
+ @classmethod
330
+ def from_httplib(cls, message): # Python 2
331
+ """Read headers from a Python 2 httplib message object."""
332
+ # python2.7 does not expose a proper API for exporting multiheaders
333
+ # efficiently. This function re-reads raw lines from the message
334
+ # object and extracts the multiheaders properly.
335
+ obs_fold_continued_leaders = (" ", "\t")
336
+ headers = []
337
+
338
+ for line in message.headers:
339
+ if line.startswith(obs_fold_continued_leaders):
340
+ if not headers:
341
+ # We received a header line that starts with OWS as described
342
+ # in RFC-7230 S3.2.4. This indicates a multiline header, but
343
+ # there exists no previous header to which we can attach it.
344
+ raise InvalidHeader(
345
+ "Header continuation with no previous header: %s" % line
346
+ )
347
+ else:
348
+ key, value = headers[-1]
349
+ headers[-1] = (key, value + " " + line.strip())
350
+ continue
351
+
352
+ key, value = line.split(":", 1)
353
+ headers.append((key, value.strip()))
354
+
355
+ return cls(headers)
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/_version.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ # This file is protected via CODEOWNERS
2
+ __version__ = "1.26.8"
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/connection.py ADDED
@@ -0,0 +1,569 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import datetime
4
+ import logging
5
+ import os
6
+ import re
7
+ import socket
8
+ import warnings
9
+ from socket import error as SocketError
10
+ from socket import timeout as SocketTimeout
11
+
12
+ from .packages import six
13
+ from .packages.six.moves.http_client import HTTPConnection as _HTTPConnection
14
+ from .packages.six.moves.http_client import HTTPException # noqa: F401
15
+ from .util.proxy import create_proxy_ssl_context
16
+
17
+ try: # Compiled with SSL?
18
+ import ssl
19
+
20
+ BaseSSLError = ssl.SSLError
21
+ except (ImportError, AttributeError): # Platform-specific: No SSL.
22
+ ssl = None
23
+
24
+ class BaseSSLError(BaseException):
25
+ pass
26
+
27
+
28
+ try:
29
+ # Python 3: not a no-op, we're adding this to the namespace so it can be imported.
30
+ ConnectionError = ConnectionError
31
+ except NameError:
32
+ # Python 2
33
+ class ConnectionError(Exception):
34
+ pass
35
+
36
+
37
+ try: # Python 3:
38
+ # Not a no-op, we're adding this to the namespace so it can be imported.
39
+ BrokenPipeError = BrokenPipeError
40
+ except NameError: # Python 2:
41
+
42
+ class BrokenPipeError(Exception):
43
+ pass
44
+
45
+
46
+ from ._collections import HTTPHeaderDict # noqa (historical, removed in v2)
47
+ from ._version import __version__
48
+ from .exceptions import (
49
+ ConnectTimeoutError,
50
+ NewConnectionError,
51
+ SubjectAltNameWarning,
52
+ SystemTimeWarning,
53
+ )
54
+ from .util import SKIP_HEADER, SKIPPABLE_HEADERS, connection
55
+ from .util.ssl_ import (
56
+ assert_fingerprint,
57
+ create_urllib3_context,
58
+ is_ipaddress,
59
+ resolve_cert_reqs,
60
+ resolve_ssl_version,
61
+ ssl_wrap_socket,
62
+ )
63
+ from .util.ssl_match_hostname import CertificateError, match_hostname
64
+
65
+ log = logging.getLogger(__name__)
66
+
67
+ port_by_scheme = {"http": 80, "https": 443}
68
+
69
+ # When it comes time to update this value as a part of regular maintenance
70
+ # (ie test_recent_date is failing) update it to ~6 months before the current date.
71
+ RECENT_DATE = datetime.date(2020, 7, 1)
72
+
73
+ _CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
74
+
75
+
76
+ class HTTPConnection(_HTTPConnection, object):
77
+ """
78
+ Based on :class:`http.client.HTTPConnection` but provides an extra constructor
79
+ backwards-compatibility layer between older and newer Pythons.
80
+
81
+ Additional keyword parameters are used to configure attributes of the connection.
82
+ Accepted parameters include:
83
+
84
+ - ``strict``: See the documentation on :class:`urllib3.connectionpool.HTTPConnectionPool`
85
+ - ``source_address``: Set the source address for the current connection.
86
+ - ``socket_options``: Set specific options on the underlying socket. If not specified, then
87
+ defaults are loaded from ``HTTPConnection.default_socket_options`` which includes disabling
88
+ Nagle's algorithm (sets TCP_NODELAY to 1) unless the connection is behind a proxy.
89
+
90
+ For example, if you wish to enable TCP Keep Alive in addition to the defaults,
91
+ you might pass:
92
+
93
+ .. code-block:: python
94
+
95
+ HTTPConnection.default_socket_options + [
96
+ (socket.SOL_SOCKET, socket.SO_KEEPALIVE, 1),
97
+ ]
98
+
99
+ Or you may want to disable the defaults by passing an empty list (e.g., ``[]``).
100
+ """
101
+
102
+ default_port = port_by_scheme["http"]
103
+
104
+ #: Disable Nagle's algorithm by default.
105
+ #: ``[(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]``
106
+ default_socket_options = [(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)]
107
+
108
+ #: Whether this connection verifies the host's certificate.
109
+ is_verified = False
110
+
111
+ #: Whether this proxy connection (if used) verifies the proxy host's
112
+ #: certificate.
113
+ proxy_is_verified = None
114
+
115
+ def __init__(self, *args, **kw):
116
+ if not six.PY2:
117
+ kw.pop("strict", None)
118
+
119
+ # Pre-set source_address.
120
+ self.source_address = kw.get("source_address")
121
+
122
+ #: The socket options provided by the user. If no options are
123
+ #: provided, we use the default options.
124
+ self.socket_options = kw.pop("socket_options", self.default_socket_options)
125
+
126
+ # Proxy options provided by the user.
127
+ self.proxy = kw.pop("proxy", None)
128
+ self.proxy_config = kw.pop("proxy_config", None)
129
+
130
+ _HTTPConnection.__init__(self, *args, **kw)
131
+
132
+ @property
133
+ def host(self):
134
+ """
135
+ Getter method to remove any trailing dots that indicate the hostname is an FQDN.
136
+
137
+ In general, SSL certificates don't include the trailing dot indicating a
138
+ fully-qualified domain name, and thus, they don't validate properly when
139
+ checked against a domain name that includes the dot. In addition, some
140
+ servers may not expect to receive the trailing dot when provided.
141
+
142
+ However, the hostname with trailing dot is critical to DNS resolution; doing a
143
+ lookup with the trailing dot will properly only resolve the appropriate FQDN,
144
+ whereas a lookup without a trailing dot will search the system's search domain
145
+ list. Thus, it's important to keep the original host around for use only in
146
+ those cases where it's appropriate (i.e., when doing DNS lookup to establish the
147
+ actual TCP connection across which we're going to send HTTP requests).
148
+ """
149
+ return self._dns_host.rstrip(".")
150
+
151
+ @host.setter
152
+ def host(self, value):
153
+ """
154
+ Setter for the `host` property.
155
+
156
+ We assume that only urllib3 uses the _dns_host attribute; httplib itself
157
+ only uses `host`, and it seems reasonable that other libraries follow suit.
158
+ """
159
+ self._dns_host = value
160
+
161
+ def _new_conn(self):
162
+ """Establish a socket connection and set nodelay settings on it.
163
+
164
+ :return: New socket connection.
165
+ """
166
+ extra_kw = {}
167
+ if self.source_address:
168
+ extra_kw["source_address"] = self.source_address
169
+
170
+ if self.socket_options:
171
+ extra_kw["socket_options"] = self.socket_options
172
+
173
+ try:
174
+ conn = connection.create_connection(
175
+ (self._dns_host, self.port), self.timeout, **extra_kw
176
+ )
177
+
178
+ except SocketTimeout:
179
+ raise ConnectTimeoutError(
180
+ self,
181
+ "Connection to %s timed out. (connect timeout=%s)"
182
+ % (self.host, self.timeout),
183
+ )
184
+
185
+ except SocketError as e:
186
+ raise NewConnectionError(
187
+ self, "Failed to establish a new connection: %s" % e
188
+ )
189
+
190
+ return conn
191
+
192
+ def _is_using_tunnel(self):
193
+ # Google App Engine's httplib does not define _tunnel_host
194
+ return getattr(self, "_tunnel_host", None)
195
+
196
+ def _prepare_conn(self, conn):
197
+ self.sock = conn
198
+ if self._is_using_tunnel():
199
+ # TODO: Fix tunnel so it doesn't depend on self.sock state.
200
+ self._tunnel()
201
+ # Mark this connection as not reusable
202
+ self.auto_open = 0
203
+
204
+ def connect(self):
205
+ conn = self._new_conn()
206
+ self._prepare_conn(conn)
207
+
208
+ def putrequest(self, method, url, *args, **kwargs):
209
+ """ """
210
+ # Empty docstring because the indentation of CPython's implementation
211
+ # is broken but we don't want this method in our documentation.
212
+ match = _CONTAINS_CONTROL_CHAR_RE.search(method)
213
+ if match:
214
+ raise ValueError(
215
+ "Method cannot contain non-token characters %r (found at least %r)"
216
+ % (method, match.group())
217
+ )
218
+
219
+ return _HTTPConnection.putrequest(self, method, url, *args, **kwargs)
220
+
221
+ def putheader(self, header, *values):
222
+ """ """
223
+ if not any(isinstance(v, str) and v == SKIP_HEADER for v in values):
224
+ _HTTPConnection.putheader(self, header, *values)
225
+ elif six.ensure_str(header.lower()) not in SKIPPABLE_HEADERS:
226
+ raise ValueError(
227
+ "urllib3.util.SKIP_HEADER only supports '%s'"
228
+ % ("', '".join(map(str.title, sorted(SKIPPABLE_HEADERS))),)
229
+ )
230
+
231
+ def request(self, method, url, body=None, headers=None):
232
+ if headers is None:
233
+ headers = {}
234
+ else:
235
+ # Avoid modifying the headers passed into .request()
236
+ headers = headers.copy()
237
+ if "user-agent" not in (six.ensure_str(k.lower()) for k in headers):
238
+ headers["User-Agent"] = _get_default_user_agent()
239
+ super(HTTPConnection, self).request(method, url, body=body, headers=headers)
240
+
241
+ def request_chunked(self, method, url, body=None, headers=None):
242
+ """
243
+ Alternative to the common request method, which sends the
244
+ body with chunked encoding and not as one block
245
+ """
246
+ headers = headers or {}
247
+ header_keys = set([six.ensure_str(k.lower()) for k in headers])
248
+ skip_accept_encoding = "accept-encoding" in header_keys
249
+ skip_host = "host" in header_keys
250
+ self.putrequest(
251
+ method, url, skip_accept_encoding=skip_accept_encoding, skip_host=skip_host
252
+ )
253
+ if "user-agent" not in header_keys:
254
+ self.putheader("User-Agent", _get_default_user_agent())
255
+ for header, value in headers.items():
256
+ self.putheader(header, value)
257
+ if "transfer-encoding" not in header_keys:
258
+ self.putheader("Transfer-Encoding", "chunked")
259
+ self.endheaders()
260
+
261
+ if body is not None:
262
+ stringish_types = six.string_types + (bytes,)
263
+ if isinstance(body, stringish_types):
264
+ body = (body,)
265
+ for chunk in body:
266
+ if not chunk:
267
+ continue
268
+ if not isinstance(chunk, bytes):
269
+ chunk = chunk.encode("utf8")
270
+ len_str = hex(len(chunk))[2:]
271
+ to_send = bytearray(len_str.encode())
272
+ to_send += b"\r\n"
273
+ to_send += chunk
274
+ to_send += b"\r\n"
275
+ self.send(to_send)
276
+
277
+ # After the if clause, to always have a closed body
278
+ self.send(b"0\r\n\r\n")
279
+
280
+
281
+ class HTTPSConnection(HTTPConnection):
282
+ """
283
+ Many of the parameters to this constructor are passed to the underlying SSL
284
+ socket by means of :py:func:`urllib3.util.ssl_wrap_socket`.
285
+ """
286
+
287
+ default_port = port_by_scheme["https"]
288
+
289
+ cert_reqs = None
290
+ ca_certs = None
291
+ ca_cert_dir = None
292
+ ca_cert_data = None
293
+ ssl_version = None
294
+ assert_fingerprint = None
295
+ tls_in_tls_required = False
296
+
297
+ def __init__(
298
+ self,
299
+ host,
300
+ port=None,
301
+ key_file=None,
302
+ cert_file=None,
303
+ key_password=None,
304
+ strict=None,
305
+ timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
306
+ ssl_context=None,
307
+ server_hostname=None,
308
+ **kw
309
+ ):
310
+
311
+ HTTPConnection.__init__(self, host, port, strict=strict, timeout=timeout, **kw)
312
+
313
+ self.key_file = key_file
314
+ self.cert_file = cert_file
315
+ self.key_password = key_password
316
+ self.ssl_context = ssl_context
317
+ self.server_hostname = server_hostname
318
+
319
+ # Required property for Google AppEngine 1.9.0 which otherwise causes
320
+ # HTTPS requests to go out as HTTP. (See Issue #356)
321
+ self._protocol = "https"
322
+
323
+ def set_cert(
324
+ self,
325
+ key_file=None,
326
+ cert_file=None,
327
+ cert_reqs=None,
328
+ key_password=None,
329
+ ca_certs=None,
330
+ assert_hostname=None,
331
+ assert_fingerprint=None,
332
+ ca_cert_dir=None,
333
+ ca_cert_data=None,
334
+ ):
335
+ """
336
+ This method should only be called once, before the connection is used.
337
+ """
338
+ # If cert_reqs is not provided we'll assume CERT_REQUIRED unless we also
339
+ # have an SSLContext object in which case we'll use its verify_mode.
340
+ if cert_reqs is None:
341
+ if self.ssl_context is not None:
342
+ cert_reqs = self.ssl_context.verify_mode
343
+ else:
344
+ cert_reqs = resolve_cert_reqs(None)
345
+
346
+ self.key_file = key_file
347
+ self.cert_file = cert_file
348
+ self.cert_reqs = cert_reqs
349
+ self.key_password = key_password
350
+ self.assert_hostname = assert_hostname
351
+ self.assert_fingerprint = assert_fingerprint
352
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
353
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
354
+ self.ca_cert_data = ca_cert_data
355
+
356
+ def connect(self):
357
+ # Add certificate verification
358
+ conn = self._new_conn()
359
+ hostname = self.host
360
+ tls_in_tls = False
361
+
362
+ if self._is_using_tunnel():
363
+ if self.tls_in_tls_required:
364
+ conn = self._connect_tls_proxy(hostname, conn)
365
+ tls_in_tls = True
366
+
367
+ self.sock = conn
368
+
369
+ # Calls self._set_hostport(), so self.host is
370
+ # self._tunnel_host below.
371
+ self._tunnel()
372
+ # Mark this connection as not reusable
373
+ self.auto_open = 0
374
+
375
+ # Override the host with the one we're requesting data from.
376
+ hostname = self._tunnel_host
377
+
378
+ server_hostname = hostname
379
+ if self.server_hostname is not None:
380
+ server_hostname = self.server_hostname
381
+
382
+ is_time_off = datetime.date.today() < RECENT_DATE
383
+ if is_time_off:
384
+ warnings.warn(
385
+ (
386
+ "System time is way off (before {0}). This will probably "
387
+ "lead to SSL verification errors"
388
+ ).format(RECENT_DATE),
389
+ SystemTimeWarning,
390
+ )
391
+
392
+ # Wrap socket using verification with the root certs in
393
+ # trusted_root_certs
394
+ default_ssl_context = False
395
+ if self.ssl_context is None:
396
+ default_ssl_context = True
397
+ self.ssl_context = create_urllib3_context(
398
+ ssl_version=resolve_ssl_version(self.ssl_version),
399
+ cert_reqs=resolve_cert_reqs(self.cert_reqs),
400
+ )
401
+
402
+ context = self.ssl_context
403
+ context.verify_mode = resolve_cert_reqs(self.cert_reqs)
404
+
405
+ # Try to load OS default certs if none are given.
406
+ # Works well on Windows (requires Python3.4+)
407
+ if (
408
+ not self.ca_certs
409
+ and not self.ca_cert_dir
410
+ and not self.ca_cert_data
411
+ and default_ssl_context
412
+ and hasattr(context, "load_default_certs")
413
+ ):
414
+ context.load_default_certs()
415
+
416
+ self.sock = ssl_wrap_socket(
417
+ sock=conn,
418
+ keyfile=self.key_file,
419
+ certfile=self.cert_file,
420
+ key_password=self.key_password,
421
+ ca_certs=self.ca_certs,
422
+ ca_cert_dir=self.ca_cert_dir,
423
+ ca_cert_data=self.ca_cert_data,
424
+ server_hostname=server_hostname,
425
+ ssl_context=context,
426
+ tls_in_tls=tls_in_tls,
427
+ )
428
+
429
+ # If we're using all defaults and the connection
430
+ # is TLSv1 or TLSv1.1 we throw a DeprecationWarning
431
+ # for the host.
432
+ if (
433
+ default_ssl_context
434
+ and self.ssl_version is None
435
+ and hasattr(self.sock, "version")
436
+ and self.sock.version() in {"TLSv1", "TLSv1.1"}
437
+ ):
438
+ warnings.warn(
439
+ "Negotiating TLSv1/TLSv1.1 by default is deprecated "
440
+ "and will be disabled in urllib3 v2.0.0. Connecting to "
441
+ "'%s' with '%s' can be enabled by explicitly opting-in "
442
+ "with 'ssl_version'" % (self.host, self.sock.version()),
443
+ DeprecationWarning,
444
+ )
445
+
446
+ if self.assert_fingerprint:
447
+ assert_fingerprint(
448
+ self.sock.getpeercert(binary_form=True), self.assert_fingerprint
449
+ )
450
+ elif (
451
+ context.verify_mode != ssl.CERT_NONE
452
+ and not getattr(context, "check_hostname", False)
453
+ and self.assert_hostname is not False
454
+ ):
455
+ # While urllib3 attempts to always turn off hostname matching from
456
+ # the TLS library, this cannot always be done. So we check whether
457
+ # the TLS Library still thinks it's matching hostnames.
458
+ cert = self.sock.getpeercert()
459
+ if not cert.get("subjectAltName", ()):
460
+ warnings.warn(
461
+ (
462
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
463
+ "`commonName` for now. This feature is being removed by major browsers and "
464
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
465
+ "for details.)".format(hostname)
466
+ ),
467
+ SubjectAltNameWarning,
468
+ )
469
+ _match_hostname(cert, self.assert_hostname or server_hostname)
470
+
471
+ self.is_verified = (
472
+ context.verify_mode == ssl.CERT_REQUIRED
473
+ or self.assert_fingerprint is not None
474
+ )
475
+
476
+ def _connect_tls_proxy(self, hostname, conn):
477
+ """
478
+ Establish a TLS connection to the proxy using the provided SSL context.
479
+ """
480
+ proxy_config = self.proxy_config
481
+ ssl_context = proxy_config.ssl_context
482
+ if ssl_context:
483
+ # If the user provided a proxy context, we assume CA and client
484
+ # certificates have already been set
485
+ return ssl_wrap_socket(
486
+ sock=conn,
487
+ server_hostname=hostname,
488
+ ssl_context=ssl_context,
489
+ )
490
+
491
+ ssl_context = create_proxy_ssl_context(
492
+ self.ssl_version,
493
+ self.cert_reqs,
494
+ self.ca_certs,
495
+ self.ca_cert_dir,
496
+ self.ca_cert_data,
497
+ )
498
+
499
+ # If no cert was provided, use only the default options for server
500
+ # certificate validation
501
+ socket = ssl_wrap_socket(
502
+ sock=conn,
503
+ ca_certs=self.ca_certs,
504
+ ca_cert_dir=self.ca_cert_dir,
505
+ ca_cert_data=self.ca_cert_data,
506
+ server_hostname=hostname,
507
+ ssl_context=ssl_context,
508
+ )
509
+
510
+ if ssl_context.verify_mode != ssl.CERT_NONE and not getattr(
511
+ ssl_context, "check_hostname", False
512
+ ):
513
+ # While urllib3 attempts to always turn off hostname matching from
514
+ # the TLS library, this cannot always be done. So we check whether
515
+ # the TLS Library still thinks it's matching hostnames.
516
+ cert = socket.getpeercert()
517
+ if not cert.get("subjectAltName", ()):
518
+ warnings.warn(
519
+ (
520
+ "Certificate for {0} has no `subjectAltName`, falling back to check for a "
521
+ "`commonName` for now. This feature is being removed by major browsers and "
522
+ "deprecated by RFC 2818. (See https://github.com/urllib3/urllib3/issues/497 "
523
+ "for details.)".format(hostname)
524
+ ),
525
+ SubjectAltNameWarning,
526
+ )
527
+ _match_hostname(cert, hostname)
528
+
529
+ self.proxy_is_verified = ssl_context.verify_mode == ssl.CERT_REQUIRED
530
+ return socket
531
+
532
+
533
+ def _match_hostname(cert, asserted_hostname):
534
+ # Our upstream implementation of ssl.match_hostname()
535
+ # only applies this normalization to IP addresses so it doesn't
536
+ # match DNS SANs so we do the same thing!
537
+ stripped_hostname = asserted_hostname.strip("u[]")
538
+ if is_ipaddress(stripped_hostname):
539
+ asserted_hostname = stripped_hostname
540
+
541
+ try:
542
+ match_hostname(cert, asserted_hostname)
543
+ except CertificateError as e:
544
+ log.warning(
545
+ "Certificate did not match expected hostname: %s. Certificate: %s",
546
+ asserted_hostname,
547
+ cert,
548
+ )
549
+ # Add cert to exception and reraise so client code can inspect
550
+ # the cert when catching the exception, if they want to
551
+ e._peer_cert = cert
552
+ raise
553
+
554
+
555
+ def _get_default_user_agent():
556
+ return "python-urllib3/%s" % __version__
557
+
558
+
559
+ class DummyConnection(object):
560
+ """Used to detect a failed ConnectionCls import."""
561
+
562
+ pass
563
+
564
+
565
+ if not ssl:
566
+ HTTPSConnection = DummyConnection # noqa: F811
567
+
568
+
569
+ VerifiedHTTPSConnection = HTTPSConnection
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/connectionpool.py ADDED
@@ -0,0 +1,1113 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import errno
4
+ import logging
5
+ import re
6
+ import socket
7
+ import sys
8
+ import warnings
9
+ from socket import error as SocketError
10
+ from socket import timeout as SocketTimeout
11
+
12
+ from ._collections import HTTPHeaderDict
13
+ from .connection import (
14
+ BaseSSLError,
15
+ BrokenPipeError,
16
+ DummyConnection,
17
+ HTTPConnection,
18
+ HTTPException,
19
+ HTTPSConnection,
20
+ VerifiedHTTPSConnection,
21
+ port_by_scheme,
22
+ )
23
+ from .exceptions import (
24
+ ClosedPoolError,
25
+ EmptyPoolError,
26
+ HeaderParsingError,
27
+ HostChangedError,
28
+ InsecureRequestWarning,
29
+ LocationValueError,
30
+ MaxRetryError,
31
+ NewConnectionError,
32
+ ProtocolError,
33
+ ProxyError,
34
+ ReadTimeoutError,
35
+ SSLError,
36
+ TimeoutError,
37
+ )
38
+ from .packages import six
39
+ from .packages.six.moves import queue
40
+ from .request import RequestMethods
41
+ from .response import HTTPResponse
42
+ from .util.connection import is_connection_dropped
43
+ from .util.proxy import connection_requires_http_tunnel
44
+ from .util.queue import LifoQueue
45
+ from .util.request import set_file_position
46
+ from .util.response import assert_header_parsing
47
+ from .util.retry import Retry
48
+ from .util.ssl_match_hostname import CertificateError
49
+ from .util.timeout import Timeout
50
+ from .util.url import Url, _encode_target
51
+ from .util.url import _normalize_host as normalize_host
52
+ from .util.url import get_host, parse_url
53
+
54
+ xrange = six.moves.xrange
55
+
56
+ log = logging.getLogger(__name__)
57
+
58
+ _Default = object()
59
+
60
+
61
+ # Pool objects
62
+ class ConnectionPool(object):
63
+ """
64
+ Base class for all connection pools, such as
65
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
66
+
67
+ .. note::
68
+ ConnectionPool.urlopen() does not normalize or percent-encode target URIs
69
+ which is useful if your target server doesn't support percent-encoded
70
+ target URIs.
71
+ """
72
+
73
+ scheme = None
74
+ QueueCls = LifoQueue
75
+
76
+ def __init__(self, host, port=None):
77
+ if not host:
78
+ raise LocationValueError("No host specified.")
79
+
80
+ self.host = _normalize_host(host, scheme=self.scheme)
81
+ self._proxy_host = host.lower()
82
+ self.port = port
83
+
84
+ def __str__(self):
85
+ return "%s(host=%r, port=%r)" % (type(self).__name__, self.host, self.port)
86
+
87
+ def __enter__(self):
88
+ return self
89
+
90
+ def __exit__(self, exc_type, exc_val, exc_tb):
91
+ self.close()
92
+ # Return False to re-raise any potential exceptions
93
+ return False
94
+
95
+ def close(self):
96
+ """
97
+ Close all pooled connections and disable the pool.
98
+ """
99
+ pass
100
+
101
+
102
+ # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
103
+ _blocking_errnos = {errno.EAGAIN, errno.EWOULDBLOCK}
104
+
105
+
106
+ class HTTPConnectionPool(ConnectionPool, RequestMethods):
107
+ """
108
+ Thread-safe connection pool for one host.
109
+
110
+ :param host:
111
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
112
+ :class:`http.client.HTTPConnection`.
113
+
114
+ :param port:
115
+ Port used for this HTTP Connection (None is equivalent to 80), passed
116
+ into :class:`http.client.HTTPConnection`.
117
+
118
+ :param strict:
119
+ Causes BadStatusLine to be raised if the status line can't be parsed
120
+ as a valid HTTP/1.0 or 1.1 status line, passed into
121
+ :class:`http.client.HTTPConnection`.
122
+
123
+ .. note::
124
+ Only works in Python 2. This parameter is ignored in Python 3.
125
+
126
+ :param timeout:
127
+ Socket timeout in seconds for each individual connection. This can
128
+ be a float or integer, which sets the timeout for the HTTP request,
129
+ or an instance of :class:`urllib3.util.Timeout` which gives you more
130
+ fine-grained control over request timeouts. After the constructor has
131
+ been parsed, this is always a `urllib3.util.Timeout` object.
132
+
133
+ :param maxsize:
134
+ Number of connections to save that can be reused. More than 1 is useful
135
+ in multithreaded situations. If ``block`` is set to False, more
136
+ connections will be created but they will not be saved once they've
137
+ been used.
138
+
139
+ :param block:
140
+ If set to True, no more than ``maxsize`` connections will be used at
141
+ a time. When no free connections are available, the call will block
142
+ until a connection has been released. This is a useful side effect for
143
+ particular multithreaded situations where one does not want to use more
144
+ than maxsize connections per host to prevent flooding.
145
+
146
+ :param headers:
147
+ Headers to include with all requests, unless other headers are given
148
+ explicitly.
149
+
150
+ :param retries:
151
+ Retry configuration to use by default with requests in this pool.
152
+
153
+ :param _proxy:
154
+ Parsed proxy URL, should not be used directly, instead, see
155
+ :class:`urllib3.ProxyManager`
156
+
157
+ :param _proxy_headers:
158
+ A dictionary with proxy headers, should not be used directly,
159
+ instead, see :class:`urllib3.ProxyManager`
160
+
161
+ :param \\**conn_kw:
162
+ Additional parameters are used to create fresh :class:`urllib3.connection.HTTPConnection`,
163
+ :class:`urllib3.connection.HTTPSConnection` instances.
164
+ """
165
+
166
+ scheme = "http"
167
+ ConnectionCls = HTTPConnection
168
+ ResponseCls = HTTPResponse
169
+
170
+ def __init__(
171
+ self,
172
+ host,
173
+ port=None,
174
+ strict=False,
175
+ timeout=Timeout.DEFAULT_TIMEOUT,
176
+ maxsize=1,
177
+ block=False,
178
+ headers=None,
179
+ retries=None,
180
+ _proxy=None,
181
+ _proxy_headers=None,
182
+ _proxy_config=None,
183
+ **conn_kw
184
+ ):
185
+ ConnectionPool.__init__(self, host, port)
186
+ RequestMethods.__init__(self, headers)
187
+
188
+ self.strict = strict
189
+
190
+ if not isinstance(timeout, Timeout):
191
+ timeout = Timeout.from_float(timeout)
192
+
193
+ if retries is None:
194
+ retries = Retry.DEFAULT
195
+
196
+ self.timeout = timeout
197
+ self.retries = retries
198
+
199
+ self.pool = self.QueueCls(maxsize)
200
+ self.block = block
201
+
202
+ self.proxy = _proxy
203
+ self.proxy_headers = _proxy_headers or {}
204
+ self.proxy_config = _proxy_config
205
+
206
+ # Fill the queue up so that doing get() on it will block properly
207
+ for _ in xrange(maxsize):
208
+ self.pool.put(None)
209
+
210
+ # These are mostly for testing and debugging purposes.
211
+ self.num_connections = 0
212
+ self.num_requests = 0
213
+ self.conn_kw = conn_kw
214
+
215
+ if self.proxy:
216
+ # Enable Nagle's algorithm for proxies, to avoid packet fragmentation.
217
+ # We cannot know if the user has added default socket options, so we cannot replace the
218
+ # list.
219
+ self.conn_kw.setdefault("socket_options", [])
220
+
221
+ self.conn_kw["proxy"] = self.proxy
222
+ self.conn_kw["proxy_config"] = self.proxy_config
223
+
224
+ def _new_conn(self):
225
+ """
226
+ Return a fresh :class:`HTTPConnection`.
227
+ """
228
+ self.num_connections += 1
229
+ log.debug(
230
+ "Starting new HTTP connection (%d): %s:%s",
231
+ self.num_connections,
232
+ self.host,
233
+ self.port or "80",
234
+ )
235
+
236
+ conn = self.ConnectionCls(
237
+ host=self.host,
238
+ port=self.port,
239
+ timeout=self.timeout.connect_timeout,
240
+ strict=self.strict,
241
+ **self.conn_kw
242
+ )
243
+ return conn
244
+
245
+ def _get_conn(self, timeout=None):
246
+ """
247
+ Get a connection. Will return a pooled connection if one is available.
248
+
249
+ If no connections are available and :prop:`.block` is ``False``, then a
250
+ fresh connection is returned.
251
+
252
+ :param timeout:
253
+ Seconds to wait before giving up and raising
254
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
255
+ :prop:`.block` is ``True``.
256
+ """
257
+ conn = None
258
+ try:
259
+ conn = self.pool.get(block=self.block, timeout=timeout)
260
+
261
+ except AttributeError: # self.pool is None
262
+ raise ClosedPoolError(self, "Pool is closed.")
263
+
264
+ except queue.Empty:
265
+ if self.block:
266
+ raise EmptyPoolError(
267
+ self,
268
+ "Pool reached maximum size and no more connections are allowed.",
269
+ )
270
+ pass # Oh well, we'll create a new connection then
271
+
272
+ # If this is a persistent connection, check if it got disconnected
273
+ if conn and is_connection_dropped(conn):
274
+ log.debug("Resetting dropped connection: %s", self.host)
275
+ conn.close()
276
+ if getattr(conn, "auto_open", 1) == 0:
277
+ # This is a proxied connection that has been mutated by
278
+ # http.client._tunnel() and cannot be reused (since it would
279
+ # attempt to bypass the proxy)
280
+ conn = None
281
+
282
+ return conn or self._new_conn()
283
+
284
+ def _put_conn(self, conn):
285
+ """
286
+ Put a connection back into the pool.
287
+
288
+ :param conn:
289
+ Connection object for the current host and port as returned by
290
+ :meth:`._new_conn` or :meth:`._get_conn`.
291
+
292
+ If the pool is already full, the connection is closed and discarded
293
+ because we exceeded maxsize. If connections are discarded frequently,
294
+ then maxsize should be increased.
295
+
296
+ If the pool is closed, then the connection will be closed and discarded.
297
+ """
298
+ try:
299
+ self.pool.put(conn, block=False)
300
+ return # Everything is dandy, done.
301
+ except AttributeError:
302
+ # self.pool is None.
303
+ pass
304
+ except queue.Full:
305
+ # This should never happen if self.block == True
306
+ log.warning(
307
+ "Connection pool is full, discarding connection: %s. Connection pool size: %s",
308
+ self.host,
309
+ self.pool.qsize(),
310
+ )
311
+ # Connection never got put back into the pool, close it.
312
+ if conn:
313
+ conn.close()
314
+
315
+ def _validate_conn(self, conn):
316
+ """
317
+ Called right before a request is made, after the socket is created.
318
+ """
319
+ pass
320
+
321
+ def _prepare_proxy(self, conn):
322
+ # Nothing to do for HTTP connections.
323
+ pass
324
+
325
+ def _get_timeout(self, timeout):
326
+ """Helper that always returns a :class:`urllib3.util.Timeout`"""
327
+ if timeout is _Default:
328
+ return self.timeout.clone()
329
+
330
+ if isinstance(timeout, Timeout):
331
+ return timeout.clone()
332
+ else:
333
+ # User passed us an int/float. This is for backwards compatibility,
334
+ # can be removed later
335
+ return Timeout.from_float(timeout)
336
+
337
+ def _raise_timeout(self, err, url, timeout_value):
338
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
339
+
340
+ if isinstance(err, SocketTimeout):
341
+ raise ReadTimeoutError(
342
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
343
+ )
344
+
345
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
346
+ # to specifically catch it and throw the timeout error
347
+ if hasattr(err, "errno") and err.errno in _blocking_errnos:
348
+ raise ReadTimeoutError(
349
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
350
+ )
351
+
352
+ # Catch possible read timeouts thrown as SSL errors. If not the
353
+ # case, rethrow the original. We need to do this because of:
354
+ # http://bugs.python.org/issue10272
355
+ if "timed out" in str(err) or "did not complete (read)" in str(
356
+ err
357
+ ): # Python < 2.7.4
358
+ raise ReadTimeoutError(
359
+ self, url, "Read timed out. (read timeout=%s)" % timeout_value
360
+ )
361
+
362
+ def _make_request(
363
+ self, conn, method, url, timeout=_Default, chunked=False, **httplib_request_kw
364
+ ):
365
+ """
366
+ Perform a request on a given urllib connection object taken from our
367
+ pool.
368
+
369
+ :param conn:
370
+ a connection from one of our connection pools
371
+
372
+ :param timeout:
373
+ Socket timeout in seconds for the request. This can be a
374
+ float or integer, which will set the same timeout value for
375
+ the socket connect and the socket read, or an instance of
376
+ :class:`urllib3.util.Timeout`, which gives you more fine-grained
377
+ control over your timeouts.
378
+ """
379
+ self.num_requests += 1
380
+
381
+ timeout_obj = self._get_timeout(timeout)
382
+ timeout_obj.start_connect()
383
+ conn.timeout = timeout_obj.connect_timeout
384
+
385
+ # Trigger any extra validation we need to do.
386
+ try:
387
+ self._validate_conn(conn)
388
+ except (SocketTimeout, BaseSSLError) as e:
389
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
390
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
391
+ raise
392
+
393
+ # conn.request() calls http.client.*.request, not the method in
394
+ # urllib3.request. It also calls makefile (recv) on the socket.
395
+ try:
396
+ if chunked:
397
+ conn.request_chunked(method, url, **httplib_request_kw)
398
+ else:
399
+ conn.request(method, url, **httplib_request_kw)
400
+
401
+ # We are swallowing BrokenPipeError (errno.EPIPE) since the server is
402
+ # legitimately able to close the connection after sending a valid response.
403
+ # With this behaviour, the received response is still readable.
404
+ except BrokenPipeError:
405
+ # Python 3
406
+ pass
407
+ except IOError as e:
408
+ # Python 2 and macOS/Linux
409
+ # EPIPE and ESHUTDOWN are BrokenPipeError on Python 2, and EPROTOTYPE is needed on macOS
410
+ # https://erickt.github.io/blog/2014/11/19/adventures-in-debugging-a-potential-osx-kernel-bug/
411
+ if e.errno not in {
412
+ errno.EPIPE,
413
+ errno.ESHUTDOWN,
414
+ errno.EPROTOTYPE,
415
+ }:
416
+ raise
417
+
418
+ # Reset the timeout for the recv() on the socket
419
+ read_timeout = timeout_obj.read_timeout
420
+
421
+ # App Engine doesn't have a sock attr
422
+ if getattr(conn, "sock", None):
423
+ # In Python 3 socket.py will catch EAGAIN and return None when you
424
+ # try and read into the file pointer created by http.client, which
425
+ # instead raises a BadStatusLine exception. Instead of catching
426
+ # the exception and assuming all BadStatusLine exceptions are read
427
+ # timeouts, check for a zero timeout before making the request.
428
+ if read_timeout == 0:
429
+ raise ReadTimeoutError(
430
+ self, url, "Read timed out. (read timeout=%s)" % read_timeout
431
+ )
432
+ if read_timeout is Timeout.DEFAULT_TIMEOUT:
433
+ conn.sock.settimeout(socket.getdefaulttimeout())
434
+ else: # None or a value
435
+ conn.sock.settimeout(read_timeout)
436
+
437
+ # Receive the response from the server
438
+ try:
439
+ try:
440
+ # Python 2.7, use buffering of HTTP responses
441
+ httplib_response = conn.getresponse(buffering=True)
442
+ except TypeError:
443
+ # Python 3
444
+ try:
445
+ httplib_response = conn.getresponse()
446
+ except BaseException as e:
447
+ # Remove the TypeError from the exception chain in
448
+ # Python 3 (including for exceptions like SystemExit).
449
+ # Otherwise it looks like a bug in the code.
450
+ six.raise_from(e, None)
451
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
452
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
453
+ raise
454
+
455
+ # AppEngine doesn't have a version attr.
456
+ http_version = getattr(conn, "_http_vsn_str", "HTTP/?")
457
+ log.debug(
458
+ '%s://%s:%s "%s %s %s" %s %s',
459
+ self.scheme,
460
+ self.host,
461
+ self.port,
462
+ method,
463
+ url,
464
+ http_version,
465
+ httplib_response.status,
466
+ httplib_response.length,
467
+ )
468
+
469
+ try:
470
+ assert_header_parsing(httplib_response.msg)
471
+ except (HeaderParsingError, TypeError) as hpe: # Platform-specific: Python 3
472
+ log.warning(
473
+ "Failed to parse headers (url=%s): %s",
474
+ self._absolute_url(url),
475
+ hpe,
476
+ exc_info=True,
477
+ )
478
+
479
+ return httplib_response
480
+
481
+ def _absolute_url(self, path):
482
+ return Url(scheme=self.scheme, host=self.host, port=self.port, path=path).url
483
+
484
+ def close(self):
485
+ """
486
+ Close all pooled connections and disable the pool.
487
+ """
488
+ if self.pool is None:
489
+ return
490
+ # Disable access to the pool
491
+ old_pool, self.pool = self.pool, None
492
+
493
+ try:
494
+ while True:
495
+ conn = old_pool.get(block=False)
496
+ if conn:
497
+ conn.close()
498
+
499
+ except queue.Empty:
500
+ pass # Done.
501
+
502
+ def is_same_host(self, url):
503
+ """
504
+ Check if the given ``url`` is a member of the same host as this
505
+ connection pool.
506
+ """
507
+ if url.startswith("/"):
508
+ return True
509
+
510
+ # TODO: Add optional support for socket.gethostbyname checking.
511
+ scheme, host, port = get_host(url)
512
+ if host is not None:
513
+ host = _normalize_host(host, scheme=scheme)
514
+
515
+ # Use explicit default port for comparison when none is given
516
+ if self.port and not port:
517
+ port = port_by_scheme.get(scheme)
518
+ elif not self.port and port == port_by_scheme.get(scheme):
519
+ port = None
520
+
521
+ return (scheme, host, port) == (self.scheme, self.host, self.port)
522
+
523
+ def urlopen(
524
+ self,
525
+ method,
526
+ url,
527
+ body=None,
528
+ headers=None,
529
+ retries=None,
530
+ redirect=True,
531
+ assert_same_host=True,
532
+ timeout=_Default,
533
+ pool_timeout=None,
534
+ release_conn=None,
535
+ chunked=False,
536
+ body_pos=None,
537
+ **response_kw
538
+ ):
539
+ """
540
+ Get a connection from the pool and perform an HTTP request. This is the
541
+ lowest level call for making a request, so you'll need to specify all
542
+ the raw details.
543
+
544
+ .. note::
545
+
546
+ More commonly, it's appropriate to use a convenience method provided
547
+ by :class:`.RequestMethods`, such as :meth:`request`.
548
+
549
+ .. note::
550
+
551
+ `release_conn` will only behave as expected if
552
+ `preload_content=False` because we want to make
553
+ `preload_content=False` the default behaviour someday soon without
554
+ breaking backwards compatibility.
555
+
556
+ :param method:
557
+ HTTP request method (such as GET, POST, PUT, etc.)
558
+
559
+ :param url:
560
+ The URL to perform the request on.
561
+
562
+ :param body:
563
+ Data to send in the request body, either :class:`str`, :class:`bytes`,
564
+ an iterable of :class:`str`/:class:`bytes`, or a file-like object.
565
+
566
+ :param headers:
567
+ Dictionary of custom headers to send, such as User-Agent,
568
+ If-None-Match, etc. If None, pool headers are used. If provided,
569
+ these headers completely replace any pool-specific headers.
570
+
571
+ :param retries:
572
+ Configure the number of retries to allow before raising a
573
+ :class:`~urllib3.exceptions.MaxRetryError` exception.
574
+
575
+ Pass ``None`` to retry until you receive a response. Pass a
576
+ :class:`~urllib3.util.retry.Retry` object for fine-grained control
577
+ over different types of retries.
578
+ Pass an integer number to retry connection errors that many times,
579
+ but no other types of errors. Pass zero to never retry.
580
+
581
+ If ``False``, then retries are disabled and any exception is raised
582
+ immediately. Also, instead of raising a MaxRetryError on redirects,
583
+ the redirect response will be returned.
584
+
585
+ :type retries: :class:`~urllib3.util.retry.Retry`, False, or an int.
586
+
587
+ :param redirect:
588
+ If True, automatically handle redirects (status codes 301, 302,
589
+ 303, 307, 308). Each redirect counts as a retry. Disabling retries
590
+ will disable redirect, too.
591
+
592
+ :param assert_same_host:
593
+ If ``True``, will make sure that the host of the pool requests is
594
+ consistent else will raise HostChangedError. When ``False``, you can
595
+ use the pool on an HTTP proxy and request foreign hosts.
596
+
597
+ :param timeout:
598
+ If specified, overrides the default timeout for this one
599
+ request. It may be a float (in seconds) or an instance of
600
+ :class:`urllib3.util.Timeout`.
601
+
602
+ :param pool_timeout:
603
+ If set and the pool is set to block=True, then this method will
604
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
605
+ connection is available within the time period.
606
+
607
+ :param release_conn:
608
+ If False, then the urlopen call will not release the connection
609
+ back into the pool once a response is received (but will release if
610
+ you read the entire contents of the response such as when
611
+ `preload_content=True`). This is useful if you're not preloading
612
+ the response's content immediately. You will need to call
613
+ ``r.release_conn()`` on the response ``r`` to return the connection
614
+ back into the pool. If None, it takes the value of
615
+ ``response_kw.get('preload_content', True)``.
616
+
617
+ :param chunked:
618
+ If True, urllib3 will send the body using chunked transfer
619
+ encoding. Otherwise, urllib3 will send the body using the standard
620
+ content-length form. Defaults to False.
621
+
622
+ :param int body_pos:
623
+ Position to seek to in file-like body in the event of a retry or
624
+ redirect. Typically this won't need to be set because urllib3 will
625
+ auto-populate the value when needed.
626
+
627
+ :param \\**response_kw:
628
+ Additional parameters are passed to
629
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
630
+ """
631
+
632
+ parsed_url = parse_url(url)
633
+ destination_scheme = parsed_url.scheme
634
+
635
+ if headers is None:
636
+ headers = self.headers
637
+
638
+ if not isinstance(retries, Retry):
639
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
640
+
641
+ if release_conn is None:
642
+ release_conn = response_kw.get("preload_content", True)
643
+
644
+ # Check host
645
+ if assert_same_host and not self.is_same_host(url):
646
+ raise HostChangedError(self, url, retries)
647
+
648
+ # Ensure that the URL we're connecting to is properly encoded
649
+ if url.startswith("/"):
650
+ url = six.ensure_str(_encode_target(url))
651
+ else:
652
+ url = six.ensure_str(parsed_url.url)
653
+
654
+ conn = None
655
+
656
+ # Track whether `conn` needs to be released before
657
+ # returning/raising/recursing. Update this variable if necessary, and
658
+ # leave `release_conn` constant throughout the function. That way, if
659
+ # the function recurses, the original value of `release_conn` will be
660
+ # passed down into the recursive call, and its value will be respected.
661
+ #
662
+ # See issue #651 [1] for details.
663
+ #
664
+ # [1] <https://github.com/urllib3/urllib3/issues/651>
665
+ release_this_conn = release_conn
666
+
667
+ http_tunnel_required = connection_requires_http_tunnel(
668
+ self.proxy, self.proxy_config, destination_scheme
669
+ )
670
+
671
+ # Merge the proxy headers. Only done when not using HTTP CONNECT. We
672
+ # have to copy the headers dict so we can safely change it without those
673
+ # changes being reflected in anyone else's copy.
674
+ if not http_tunnel_required:
675
+ headers = headers.copy()
676
+ headers.update(self.proxy_headers)
677
+
678
+ # Must keep the exception bound to a separate variable or else Python 3
679
+ # complains about UnboundLocalError.
680
+ err = None
681
+
682
+ # Keep track of whether we cleanly exited the except block. This
683
+ # ensures we do proper cleanup in finally.
684
+ clean_exit = False
685
+
686
+ # Rewind body position, if needed. Record current position
687
+ # for future rewinds in the event of a redirect/retry.
688
+ body_pos = set_file_position(body, body_pos)
689
+
690
+ try:
691
+ # Request a connection from the queue.
692
+ timeout_obj = self._get_timeout(timeout)
693
+ conn = self._get_conn(timeout=pool_timeout)
694
+
695
+ conn.timeout = timeout_obj.connect_timeout
696
+
697
+ is_new_proxy_conn = self.proxy is not None and not getattr(
698
+ conn, "sock", None
699
+ )
700
+ if is_new_proxy_conn and http_tunnel_required:
701
+ self._prepare_proxy(conn)
702
+
703
+ # Make the request on the httplib connection object.
704
+ httplib_response = self._make_request(
705
+ conn,
706
+ method,
707
+ url,
708
+ timeout=timeout_obj,
709
+ body=body,
710
+ headers=headers,
711
+ chunked=chunked,
712
+ )
713
+
714
+ # If we're going to release the connection in ``finally:``, then
715
+ # the response doesn't need to know about the connection. Otherwise
716
+ # it will also try to release it and we'll have a double-release
717
+ # mess.
718
+ response_conn = conn if not release_conn else None
719
+
720
+ # Pass method to Response for length checking
721
+ response_kw["request_method"] = method
722
+
723
+ # Import httplib's response into our own wrapper object
724
+ response = self.ResponseCls.from_httplib(
725
+ httplib_response,
726
+ pool=self,
727
+ connection=response_conn,
728
+ retries=retries,
729
+ **response_kw
730
+ )
731
+
732
+ # Everything went great!
733
+ clean_exit = True
734
+
735
+ except EmptyPoolError:
736
+ # Didn't get a connection from the pool, no need to clean up
737
+ clean_exit = True
738
+ release_this_conn = False
739
+ raise
740
+
741
+ except (
742
+ TimeoutError,
743
+ HTTPException,
744
+ SocketError,
745
+ ProtocolError,
746
+ BaseSSLError,
747
+ SSLError,
748
+ CertificateError,
749
+ ) as e:
750
+ # Discard the connection for these exceptions. It will be
751
+ # replaced during the next _get_conn() call.
752
+ clean_exit = False
753
+
754
+ def _is_ssl_error_message_from_http_proxy(ssl_error):
755
+ # We're trying to detect the message 'WRONG_VERSION_NUMBER' but
756
+ # SSLErrors are kinda all over the place when it comes to the message,
757
+ # so we try to cover our bases here!
758
+ message = " ".join(re.split("[^a-z]", str(ssl_error).lower()))
759
+ return (
760
+ "wrong version number" in message or "unknown protocol" in message
761
+ )
762
+
763
+ # Try to detect a common user error with proxies which is to
764
+ # set an HTTP proxy to be HTTPS when it should be 'http://'
765
+ # (ie {'http': 'http://proxy', 'https': 'https://proxy'})
766
+ # Instead we add a nice error message and point to a URL.
767
+ if (
768
+ isinstance(e, BaseSSLError)
769
+ and self.proxy
770
+ and _is_ssl_error_message_from_http_proxy(e)
771
+ ):
772
+ e = ProxyError(
773
+ "Your proxy appears to only use HTTP and not HTTPS, "
774
+ "try changing your proxy URL to be HTTP. See: "
775
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
776
+ "#https-proxy-error-http-proxy",
777
+ SSLError(e),
778
+ )
779
+ elif isinstance(e, (BaseSSLError, CertificateError)):
780
+ e = SSLError(e)
781
+ elif isinstance(e, (SocketError, NewConnectionError)) and self.proxy:
782
+ e = ProxyError("Cannot connect to proxy.", e)
783
+ elif isinstance(e, (SocketError, HTTPException)):
784
+ e = ProtocolError("Connection aborted.", e)
785
+
786
+ retries = retries.increment(
787
+ method, url, error=e, _pool=self, _stacktrace=sys.exc_info()[2]
788
+ )
789
+ retries.sleep()
790
+
791
+ # Keep track of the error for the retry warning.
792
+ err = e
793
+
794
+ finally:
795
+ if not clean_exit:
796
+ # We hit some kind of exception, handled or otherwise. We need
797
+ # to throw the connection away unless explicitly told not to.
798
+ # Close the connection, set the variable to None, and make sure
799
+ # we put the None back in the pool to avoid leaking it.
800
+ conn = conn and conn.close()
801
+ release_this_conn = True
802
+
803
+ if release_this_conn:
804
+ # Put the connection back to be reused. If the connection is
805
+ # expired then it will be None, which will get replaced with a
806
+ # fresh connection during _get_conn.
807
+ self._put_conn(conn)
808
+
809
+ if not conn:
810
+ # Try again
811
+ log.warning(
812
+ "Retrying (%r) after connection broken by '%r': %s", retries, err, url
813
+ )
814
+ return self.urlopen(
815
+ method,
816
+ url,
817
+ body,
818
+ headers,
819
+ retries,
820
+ redirect,
821
+ assert_same_host,
822
+ timeout=timeout,
823
+ pool_timeout=pool_timeout,
824
+ release_conn=release_conn,
825
+ chunked=chunked,
826
+ body_pos=body_pos,
827
+ **response_kw
828
+ )
829
+
830
+ # Handle redirect?
831
+ redirect_location = redirect and response.get_redirect_location()
832
+ if redirect_location:
833
+ if response.status == 303:
834
+ # Change the method according to RFC 9110, Section 15.4.4.
835
+ method = "GET"
836
+ # And lose the body not to transfer anything sensitive.
837
+ body = None
838
+ headers = HTTPHeaderDict(headers)._prepare_for_method_change()
839
+
840
+ try:
841
+ retries = retries.increment(method, url, response=response, _pool=self)
842
+ except MaxRetryError:
843
+ if retries.raise_on_redirect:
844
+ response.drain_conn()
845
+ raise
846
+ return response
847
+
848
+ response.drain_conn()
849
+ retries.sleep_for_retry(response)
850
+ log.debug("Redirecting %s -> %s", url, redirect_location)
851
+ return self.urlopen(
852
+ method,
853
+ redirect_location,
854
+ body,
855
+ headers,
856
+ retries=retries,
857
+ redirect=redirect,
858
+ assert_same_host=assert_same_host,
859
+ timeout=timeout,
860
+ pool_timeout=pool_timeout,
861
+ release_conn=release_conn,
862
+ chunked=chunked,
863
+ body_pos=body_pos,
864
+ **response_kw
865
+ )
866
+
867
+ # Check if we should retry the HTTP response.
868
+ has_retry_after = bool(response.getheader("Retry-After"))
869
+ if retries.is_retry(method, response.status, has_retry_after):
870
+ try:
871
+ retries = retries.increment(method, url, response=response, _pool=self)
872
+ except MaxRetryError:
873
+ if retries.raise_on_status:
874
+ response.drain_conn()
875
+ raise
876
+ return response
877
+
878
+ response.drain_conn()
879
+ retries.sleep(response)
880
+ log.debug("Retry: %s", url)
881
+ return self.urlopen(
882
+ method,
883
+ url,
884
+ body,
885
+ headers,
886
+ retries=retries,
887
+ redirect=redirect,
888
+ assert_same_host=assert_same_host,
889
+ timeout=timeout,
890
+ pool_timeout=pool_timeout,
891
+ release_conn=release_conn,
892
+ chunked=chunked,
893
+ body_pos=body_pos,
894
+ **response_kw
895
+ )
896
+
897
+ return response
898
+
899
+
900
+ class HTTPSConnectionPool(HTTPConnectionPool):
901
+ """
902
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
903
+
904
+ :class:`.HTTPSConnection` uses one of ``assert_fingerprint``,
905
+ ``assert_hostname`` and ``host`` in this order to verify connections.
906
+ If ``assert_hostname`` is False, no verification is done.
907
+
908
+ The ``key_file``, ``cert_file``, ``cert_reqs``, ``ca_certs``,
909
+ ``ca_cert_dir``, ``ssl_version``, ``key_password`` are only used if :mod:`ssl`
910
+ is available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
911
+ the connection socket into an SSL socket.
912
+ """
913
+
914
+ scheme = "https"
915
+ ConnectionCls = HTTPSConnection
916
+
917
+ def __init__(
918
+ self,
919
+ host,
920
+ port=None,
921
+ strict=False,
922
+ timeout=Timeout.DEFAULT_TIMEOUT,
923
+ maxsize=1,
924
+ block=False,
925
+ headers=None,
926
+ retries=None,
927
+ _proxy=None,
928
+ _proxy_headers=None,
929
+ key_file=None,
930
+ cert_file=None,
931
+ cert_reqs=None,
932
+ key_password=None,
933
+ ca_certs=None,
934
+ ssl_version=None,
935
+ assert_hostname=None,
936
+ assert_fingerprint=None,
937
+ ca_cert_dir=None,
938
+ **conn_kw
939
+ ):
940
+
941
+ HTTPConnectionPool.__init__(
942
+ self,
943
+ host,
944
+ port,
945
+ strict,
946
+ timeout,
947
+ maxsize,
948
+ block,
949
+ headers,
950
+ retries,
951
+ _proxy,
952
+ _proxy_headers,
953
+ **conn_kw
954
+ )
955
+
956
+ self.key_file = key_file
957
+ self.cert_file = cert_file
958
+ self.cert_reqs = cert_reqs
959
+ self.key_password = key_password
960
+ self.ca_certs = ca_certs
961
+ self.ca_cert_dir = ca_cert_dir
962
+ self.ssl_version = ssl_version
963
+ self.assert_hostname = assert_hostname
964
+ self.assert_fingerprint = assert_fingerprint
965
+
966
+ def _prepare_conn(self, conn):
967
+ """
968
+ Prepare the ``connection`` for :meth:`urllib3.util.ssl_wrap_socket`
969
+ and establish the tunnel if proxy is used.
970
+ """
971
+
972
+ if isinstance(conn, VerifiedHTTPSConnection):
973
+ conn.set_cert(
974
+ key_file=self.key_file,
975
+ key_password=self.key_password,
976
+ cert_file=self.cert_file,
977
+ cert_reqs=self.cert_reqs,
978
+ ca_certs=self.ca_certs,
979
+ ca_cert_dir=self.ca_cert_dir,
980
+ assert_hostname=self.assert_hostname,
981
+ assert_fingerprint=self.assert_fingerprint,
982
+ )
983
+ conn.ssl_version = self.ssl_version
984
+ return conn
985
+
986
+ def _prepare_proxy(self, conn):
987
+ """
988
+ Establishes a tunnel connection through HTTP CONNECT.
989
+
990
+ Tunnel connection is established early because otherwise httplib would
991
+ improperly set Host: header to proxy's IP:port.
992
+ """
993
+
994
+ conn.set_tunnel(self._proxy_host, self.port, self.proxy_headers)
995
+
996
+ if self.proxy.scheme == "https":
997
+ conn.tls_in_tls_required = True
998
+
999
+ conn.connect()
1000
+
1001
+ def _new_conn(self):
1002
+ """
1003
+ Return a fresh :class:`http.client.HTTPSConnection`.
1004
+ """
1005
+ self.num_connections += 1
1006
+ log.debug(
1007
+ "Starting new HTTPS connection (%d): %s:%s",
1008
+ self.num_connections,
1009
+ self.host,
1010
+ self.port or "443",
1011
+ )
1012
+
1013
+ if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
1014
+ raise SSLError(
1015
+ "Can't connect to HTTPS URL because the SSL module is not available."
1016
+ )
1017
+
1018
+ actual_host = self.host
1019
+ actual_port = self.port
1020
+ if self.proxy is not None:
1021
+ actual_host = self.proxy.host
1022
+ actual_port = self.proxy.port
1023
+
1024
+ conn = self.ConnectionCls(
1025
+ host=actual_host,
1026
+ port=actual_port,
1027
+ timeout=self.timeout.connect_timeout,
1028
+ strict=self.strict,
1029
+ cert_file=self.cert_file,
1030
+ key_file=self.key_file,
1031
+ key_password=self.key_password,
1032
+ **self.conn_kw
1033
+ )
1034
+
1035
+ return self._prepare_conn(conn)
1036
+
1037
+ def _validate_conn(self, conn):
1038
+ """
1039
+ Called right before a request is made, after the socket is created.
1040
+ """
1041
+ super(HTTPSConnectionPool, self)._validate_conn(conn)
1042
+
1043
+ # Force connect early to allow us to validate the connection.
1044
+ if not getattr(conn, "sock", None): # AppEngine might not have `.sock`
1045
+ conn.connect()
1046
+
1047
+ if not conn.is_verified:
1048
+ warnings.warn(
1049
+ (
1050
+ "Unverified HTTPS request is being made to host '%s'. "
1051
+ "Adding certificate verification is strongly advised. See: "
1052
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
1053
+ "#ssl-warnings" % conn.host
1054
+ ),
1055
+ InsecureRequestWarning,
1056
+ )
1057
+
1058
+ if getattr(conn, "proxy_is_verified", None) is False:
1059
+ warnings.warn(
1060
+ (
1061
+ "Unverified HTTPS connection done to an HTTPS proxy. "
1062
+ "Adding certificate verification is strongly advised. See: "
1063
+ "https://urllib3.readthedocs.io/en/1.26.x/advanced-usage.html"
1064
+ "#ssl-warnings"
1065
+ ),
1066
+ InsecureRequestWarning,
1067
+ )
1068
+
1069
+
1070
+ def connection_from_url(url, **kw):
1071
+ """
1072
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
1073
+
1074
+ This is a shortcut for not having to parse out the scheme, host, and port
1075
+ of the url before creating an :class:`.ConnectionPool` instance.
1076
+
1077
+ :param url:
1078
+ Absolute URL string that must include the scheme. Port is optional.
1079
+
1080
+ :param \\**kw:
1081
+ Passes additional parameters to the constructor of the appropriate
1082
+ :class:`.ConnectionPool`. Useful for specifying things like
1083
+ timeout, maxsize, headers, etc.
1084
+
1085
+ Example::
1086
+
1087
+ >>> conn = connection_from_url('http://google.com/')
1088
+ >>> r = conn.request('GET', '/')
1089
+ """
1090
+ scheme, host, port = get_host(url)
1091
+ port = port or port_by_scheme.get(scheme, 80)
1092
+ if scheme == "https":
1093
+ return HTTPSConnectionPool(host, port=port, **kw)
1094
+ else:
1095
+ return HTTPConnectionPool(host, port=port, **kw)
1096
+
1097
+
1098
+ def _normalize_host(host, scheme):
1099
+ """
1100
+ Normalize hosts for comparisons and use with sockets.
1101
+ """
1102
+
1103
+ host = normalize_host(host, scheme)
1104
+
1105
+ # httplib doesn't like it when we include brackets in IPv6 addresses
1106
+ # Specifically, if we include brackets but also pass the port then
1107
+ # httplib crazily doubles up the square brackets on the Host header.
1108
+ # Instead, we need to make sure we never pass ``None`` as the port.
1109
+ # However, for backward compatibility reasons we can't actually
1110
+ # *assert* that. See http://bugs.python.org/issue28539
1111
+ if host.startswith("[") and host.endswith("]"):
1112
+ host = host[1:-1]
1113
+ return host
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (195 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-310.pyc ADDED
Binary file (1.38 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-310.pyc ADDED
Binary file (8.19 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-310.pyc ADDED
Binary file (3.63 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-310.pyc ADDED
Binary file (15.5 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-310.pyc ADDED
Binary file (21.9 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-310.pyc ADDED
Binary file (5.6 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_appengine_environ.py ADDED
@@ -0,0 +1,36 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module provides means to detect the App Engine environment.
3
+ """
4
+
5
+ import os
6
+
7
+
8
+ def is_appengine():
9
+ return is_local_appengine() or is_prod_appengine()
10
+
11
+
12
+ def is_appengine_sandbox():
13
+ """Reports if the app is running in the first generation sandbox.
14
+
15
+ The second generation runtimes are technically still in a sandbox, but it
16
+ is much less restrictive, so generally you shouldn't need to check for it.
17
+ see https://cloud.google.com/appengine/docs/standard/runtimes
18
+ """
19
+ return is_appengine() and os.environ["APPENGINE_RUNTIME"] == "python27"
20
+
21
+
22
+ def is_local_appengine():
23
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
24
+ "SERVER_SOFTWARE", ""
25
+ ).startswith("Development/")
26
+
27
+
28
+ def is_prod_appengine():
29
+ return "APPENGINE_RUNTIME" in os.environ and os.environ.get(
30
+ "SERVER_SOFTWARE", ""
31
+ ).startswith("Google App Engine/")
32
+
33
+
34
+ def is_prod_appengine_mvms():
35
+ """Deprecated."""
36
+ return False
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (212 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-310.pyc ADDED
Binary file (10.7 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-310.pyc ADDED
Binary file (9.1 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/bindings.py ADDED
@@ -0,0 +1,519 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module uses ctypes to bind a whole bunch of functions and constants from
3
+ SecureTransport. The goal here is to provide the low-level API to
4
+ SecureTransport. These are essentially the C-level functions and constants, and
5
+ they're pretty gross to work with.
6
+
7
+ This code is a bastardised version of the code found in Will Bond's oscrypto
8
+ library. An enormous debt is owed to him for blazing this trail for us. For
9
+ that reason, this code should be considered to be covered both by urllib3's
10
+ license and by oscrypto's:
11
+
12
+ Copyright (c) 2015-2016 Will Bond <[email protected]>
13
+
14
+ Permission is hereby granted, free of charge, to any person obtaining a
15
+ copy of this software and associated documentation files (the "Software"),
16
+ to deal in the Software without restriction, including without limitation
17
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
18
+ and/or sell copies of the Software, and to permit persons to whom the
19
+ Software is furnished to do so, subject to the following conditions:
20
+
21
+ The above copyright notice and this permission notice shall be included in
22
+ all copies or substantial portions of the Software.
23
+
24
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
25
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
26
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
27
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
28
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
29
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
30
+ DEALINGS IN THE SOFTWARE.
31
+ """
32
+ from __future__ import absolute_import
33
+
34
+ import platform
35
+ from ctypes import (
36
+ CDLL,
37
+ CFUNCTYPE,
38
+ POINTER,
39
+ c_bool,
40
+ c_byte,
41
+ c_char_p,
42
+ c_int32,
43
+ c_long,
44
+ c_size_t,
45
+ c_uint32,
46
+ c_ulong,
47
+ c_void_p,
48
+ )
49
+ from ctypes.util import find_library
50
+
51
+ from ...packages.six import raise_from
52
+
53
+ if platform.system() != "Darwin":
54
+ raise ImportError("Only macOS is supported")
55
+
56
+ version = platform.mac_ver()[0]
57
+ version_info = tuple(map(int, version.split(".")))
58
+ if version_info < (10, 8):
59
+ raise OSError(
60
+ "Only OS X 10.8 and newer are supported, not %s.%s"
61
+ % (version_info[0], version_info[1])
62
+ )
63
+
64
+
65
+ def load_cdll(name, macos10_16_path):
66
+ """Loads a CDLL by name, falling back to known path on 10.16+"""
67
+ try:
68
+ # Big Sur is technically 11 but we use 10.16 due to the Big Sur
69
+ # beta being labeled as 10.16.
70
+ if version_info >= (10, 16):
71
+ path = macos10_16_path
72
+ else:
73
+ path = find_library(name)
74
+ if not path:
75
+ raise OSError # Caught and reraised as 'ImportError'
76
+ return CDLL(path, use_errno=True)
77
+ except OSError:
78
+ raise_from(ImportError("The library %s failed to load" % name), None)
79
+
80
+
81
+ Security = load_cdll(
82
+ "Security", "/System/Library/Frameworks/Security.framework/Security"
83
+ )
84
+ CoreFoundation = load_cdll(
85
+ "CoreFoundation",
86
+ "/System/Library/Frameworks/CoreFoundation.framework/CoreFoundation",
87
+ )
88
+
89
+
90
+ Boolean = c_bool
91
+ CFIndex = c_long
92
+ CFStringEncoding = c_uint32
93
+ CFData = c_void_p
94
+ CFString = c_void_p
95
+ CFArray = c_void_p
96
+ CFMutableArray = c_void_p
97
+ CFDictionary = c_void_p
98
+ CFError = c_void_p
99
+ CFType = c_void_p
100
+ CFTypeID = c_ulong
101
+
102
+ CFTypeRef = POINTER(CFType)
103
+ CFAllocatorRef = c_void_p
104
+
105
+ OSStatus = c_int32
106
+
107
+ CFDataRef = POINTER(CFData)
108
+ CFStringRef = POINTER(CFString)
109
+ CFArrayRef = POINTER(CFArray)
110
+ CFMutableArrayRef = POINTER(CFMutableArray)
111
+ CFDictionaryRef = POINTER(CFDictionary)
112
+ CFArrayCallBacks = c_void_p
113
+ CFDictionaryKeyCallBacks = c_void_p
114
+ CFDictionaryValueCallBacks = c_void_p
115
+
116
+ SecCertificateRef = POINTER(c_void_p)
117
+ SecExternalFormat = c_uint32
118
+ SecExternalItemType = c_uint32
119
+ SecIdentityRef = POINTER(c_void_p)
120
+ SecItemImportExportFlags = c_uint32
121
+ SecItemImportExportKeyParameters = c_void_p
122
+ SecKeychainRef = POINTER(c_void_p)
123
+ SSLProtocol = c_uint32
124
+ SSLCipherSuite = c_uint32
125
+ SSLContextRef = POINTER(c_void_p)
126
+ SecTrustRef = POINTER(c_void_p)
127
+ SSLConnectionRef = c_uint32
128
+ SecTrustResultType = c_uint32
129
+ SecTrustOptionFlags = c_uint32
130
+ SSLProtocolSide = c_uint32
131
+ SSLConnectionType = c_uint32
132
+ SSLSessionOption = c_uint32
133
+
134
+
135
+ try:
136
+ Security.SecItemImport.argtypes = [
137
+ CFDataRef,
138
+ CFStringRef,
139
+ POINTER(SecExternalFormat),
140
+ POINTER(SecExternalItemType),
141
+ SecItemImportExportFlags,
142
+ POINTER(SecItemImportExportKeyParameters),
143
+ SecKeychainRef,
144
+ POINTER(CFArrayRef),
145
+ ]
146
+ Security.SecItemImport.restype = OSStatus
147
+
148
+ Security.SecCertificateGetTypeID.argtypes = []
149
+ Security.SecCertificateGetTypeID.restype = CFTypeID
150
+
151
+ Security.SecIdentityGetTypeID.argtypes = []
152
+ Security.SecIdentityGetTypeID.restype = CFTypeID
153
+
154
+ Security.SecKeyGetTypeID.argtypes = []
155
+ Security.SecKeyGetTypeID.restype = CFTypeID
156
+
157
+ Security.SecCertificateCreateWithData.argtypes = [CFAllocatorRef, CFDataRef]
158
+ Security.SecCertificateCreateWithData.restype = SecCertificateRef
159
+
160
+ Security.SecCertificateCopyData.argtypes = [SecCertificateRef]
161
+ Security.SecCertificateCopyData.restype = CFDataRef
162
+
163
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
164
+ Security.SecCopyErrorMessageString.restype = CFStringRef
165
+
166
+ Security.SecIdentityCreateWithCertificate.argtypes = [
167
+ CFTypeRef,
168
+ SecCertificateRef,
169
+ POINTER(SecIdentityRef),
170
+ ]
171
+ Security.SecIdentityCreateWithCertificate.restype = OSStatus
172
+
173
+ Security.SecKeychainCreate.argtypes = [
174
+ c_char_p,
175
+ c_uint32,
176
+ c_void_p,
177
+ Boolean,
178
+ c_void_p,
179
+ POINTER(SecKeychainRef),
180
+ ]
181
+ Security.SecKeychainCreate.restype = OSStatus
182
+
183
+ Security.SecKeychainDelete.argtypes = [SecKeychainRef]
184
+ Security.SecKeychainDelete.restype = OSStatus
185
+
186
+ Security.SecPKCS12Import.argtypes = [
187
+ CFDataRef,
188
+ CFDictionaryRef,
189
+ POINTER(CFArrayRef),
190
+ ]
191
+ Security.SecPKCS12Import.restype = OSStatus
192
+
193
+ SSLReadFunc = CFUNCTYPE(OSStatus, SSLConnectionRef, c_void_p, POINTER(c_size_t))
194
+ SSLWriteFunc = CFUNCTYPE(
195
+ OSStatus, SSLConnectionRef, POINTER(c_byte), POINTER(c_size_t)
196
+ )
197
+
198
+ Security.SSLSetIOFuncs.argtypes = [SSLContextRef, SSLReadFunc, SSLWriteFunc]
199
+ Security.SSLSetIOFuncs.restype = OSStatus
200
+
201
+ Security.SSLSetPeerID.argtypes = [SSLContextRef, c_char_p, c_size_t]
202
+ Security.SSLSetPeerID.restype = OSStatus
203
+
204
+ Security.SSLSetCertificate.argtypes = [SSLContextRef, CFArrayRef]
205
+ Security.SSLSetCertificate.restype = OSStatus
206
+
207
+ Security.SSLSetCertificateAuthorities.argtypes = [SSLContextRef, CFTypeRef, Boolean]
208
+ Security.SSLSetCertificateAuthorities.restype = OSStatus
209
+
210
+ Security.SSLSetConnection.argtypes = [SSLContextRef, SSLConnectionRef]
211
+ Security.SSLSetConnection.restype = OSStatus
212
+
213
+ Security.SSLSetPeerDomainName.argtypes = [SSLContextRef, c_char_p, c_size_t]
214
+ Security.SSLSetPeerDomainName.restype = OSStatus
215
+
216
+ Security.SSLHandshake.argtypes = [SSLContextRef]
217
+ Security.SSLHandshake.restype = OSStatus
218
+
219
+ Security.SSLRead.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
220
+ Security.SSLRead.restype = OSStatus
221
+
222
+ Security.SSLWrite.argtypes = [SSLContextRef, c_char_p, c_size_t, POINTER(c_size_t)]
223
+ Security.SSLWrite.restype = OSStatus
224
+
225
+ Security.SSLClose.argtypes = [SSLContextRef]
226
+ Security.SSLClose.restype = OSStatus
227
+
228
+ Security.SSLGetNumberSupportedCiphers.argtypes = [SSLContextRef, POINTER(c_size_t)]
229
+ Security.SSLGetNumberSupportedCiphers.restype = OSStatus
230
+
231
+ Security.SSLGetSupportedCiphers.argtypes = [
232
+ SSLContextRef,
233
+ POINTER(SSLCipherSuite),
234
+ POINTER(c_size_t),
235
+ ]
236
+ Security.SSLGetSupportedCiphers.restype = OSStatus
237
+
238
+ Security.SSLSetEnabledCiphers.argtypes = [
239
+ SSLContextRef,
240
+ POINTER(SSLCipherSuite),
241
+ c_size_t,
242
+ ]
243
+ Security.SSLSetEnabledCiphers.restype = OSStatus
244
+
245
+ Security.SSLGetNumberEnabledCiphers.argtype = [SSLContextRef, POINTER(c_size_t)]
246
+ Security.SSLGetNumberEnabledCiphers.restype = OSStatus
247
+
248
+ Security.SSLGetEnabledCiphers.argtypes = [
249
+ SSLContextRef,
250
+ POINTER(SSLCipherSuite),
251
+ POINTER(c_size_t),
252
+ ]
253
+ Security.SSLGetEnabledCiphers.restype = OSStatus
254
+
255
+ Security.SSLGetNegotiatedCipher.argtypes = [SSLContextRef, POINTER(SSLCipherSuite)]
256
+ Security.SSLGetNegotiatedCipher.restype = OSStatus
257
+
258
+ Security.SSLGetNegotiatedProtocolVersion.argtypes = [
259
+ SSLContextRef,
260
+ POINTER(SSLProtocol),
261
+ ]
262
+ Security.SSLGetNegotiatedProtocolVersion.restype = OSStatus
263
+
264
+ Security.SSLCopyPeerTrust.argtypes = [SSLContextRef, POINTER(SecTrustRef)]
265
+ Security.SSLCopyPeerTrust.restype = OSStatus
266
+
267
+ Security.SecTrustSetAnchorCertificates.argtypes = [SecTrustRef, CFArrayRef]
268
+ Security.SecTrustSetAnchorCertificates.restype = OSStatus
269
+
270
+ Security.SecTrustSetAnchorCertificatesOnly.argstypes = [SecTrustRef, Boolean]
271
+ Security.SecTrustSetAnchorCertificatesOnly.restype = OSStatus
272
+
273
+ Security.SecTrustEvaluate.argtypes = [SecTrustRef, POINTER(SecTrustResultType)]
274
+ Security.SecTrustEvaluate.restype = OSStatus
275
+
276
+ Security.SecTrustGetCertificateCount.argtypes = [SecTrustRef]
277
+ Security.SecTrustGetCertificateCount.restype = CFIndex
278
+
279
+ Security.SecTrustGetCertificateAtIndex.argtypes = [SecTrustRef, CFIndex]
280
+ Security.SecTrustGetCertificateAtIndex.restype = SecCertificateRef
281
+
282
+ Security.SSLCreateContext.argtypes = [
283
+ CFAllocatorRef,
284
+ SSLProtocolSide,
285
+ SSLConnectionType,
286
+ ]
287
+ Security.SSLCreateContext.restype = SSLContextRef
288
+
289
+ Security.SSLSetSessionOption.argtypes = [SSLContextRef, SSLSessionOption, Boolean]
290
+ Security.SSLSetSessionOption.restype = OSStatus
291
+
292
+ Security.SSLSetProtocolVersionMin.argtypes = [SSLContextRef, SSLProtocol]
293
+ Security.SSLSetProtocolVersionMin.restype = OSStatus
294
+
295
+ Security.SSLSetProtocolVersionMax.argtypes = [SSLContextRef, SSLProtocol]
296
+ Security.SSLSetProtocolVersionMax.restype = OSStatus
297
+
298
+ try:
299
+ Security.SSLSetALPNProtocols.argtypes = [SSLContextRef, CFArrayRef]
300
+ Security.SSLSetALPNProtocols.restype = OSStatus
301
+ except AttributeError:
302
+ # Supported only in 10.12+
303
+ pass
304
+
305
+ Security.SecCopyErrorMessageString.argtypes = [OSStatus, c_void_p]
306
+ Security.SecCopyErrorMessageString.restype = CFStringRef
307
+
308
+ Security.SSLReadFunc = SSLReadFunc
309
+ Security.SSLWriteFunc = SSLWriteFunc
310
+ Security.SSLContextRef = SSLContextRef
311
+ Security.SSLProtocol = SSLProtocol
312
+ Security.SSLCipherSuite = SSLCipherSuite
313
+ Security.SecIdentityRef = SecIdentityRef
314
+ Security.SecKeychainRef = SecKeychainRef
315
+ Security.SecTrustRef = SecTrustRef
316
+ Security.SecTrustResultType = SecTrustResultType
317
+ Security.SecExternalFormat = SecExternalFormat
318
+ Security.OSStatus = OSStatus
319
+
320
+ Security.kSecImportExportPassphrase = CFStringRef.in_dll(
321
+ Security, "kSecImportExportPassphrase"
322
+ )
323
+ Security.kSecImportItemIdentity = CFStringRef.in_dll(
324
+ Security, "kSecImportItemIdentity"
325
+ )
326
+
327
+ # CoreFoundation time!
328
+ CoreFoundation.CFRetain.argtypes = [CFTypeRef]
329
+ CoreFoundation.CFRetain.restype = CFTypeRef
330
+
331
+ CoreFoundation.CFRelease.argtypes = [CFTypeRef]
332
+ CoreFoundation.CFRelease.restype = None
333
+
334
+ CoreFoundation.CFGetTypeID.argtypes = [CFTypeRef]
335
+ CoreFoundation.CFGetTypeID.restype = CFTypeID
336
+
337
+ CoreFoundation.CFStringCreateWithCString.argtypes = [
338
+ CFAllocatorRef,
339
+ c_char_p,
340
+ CFStringEncoding,
341
+ ]
342
+ CoreFoundation.CFStringCreateWithCString.restype = CFStringRef
343
+
344
+ CoreFoundation.CFStringGetCStringPtr.argtypes = [CFStringRef, CFStringEncoding]
345
+ CoreFoundation.CFStringGetCStringPtr.restype = c_char_p
346
+
347
+ CoreFoundation.CFStringGetCString.argtypes = [
348
+ CFStringRef,
349
+ c_char_p,
350
+ CFIndex,
351
+ CFStringEncoding,
352
+ ]
353
+ CoreFoundation.CFStringGetCString.restype = c_bool
354
+
355
+ CoreFoundation.CFDataCreate.argtypes = [CFAllocatorRef, c_char_p, CFIndex]
356
+ CoreFoundation.CFDataCreate.restype = CFDataRef
357
+
358
+ CoreFoundation.CFDataGetLength.argtypes = [CFDataRef]
359
+ CoreFoundation.CFDataGetLength.restype = CFIndex
360
+
361
+ CoreFoundation.CFDataGetBytePtr.argtypes = [CFDataRef]
362
+ CoreFoundation.CFDataGetBytePtr.restype = c_void_p
363
+
364
+ CoreFoundation.CFDictionaryCreate.argtypes = [
365
+ CFAllocatorRef,
366
+ POINTER(CFTypeRef),
367
+ POINTER(CFTypeRef),
368
+ CFIndex,
369
+ CFDictionaryKeyCallBacks,
370
+ CFDictionaryValueCallBacks,
371
+ ]
372
+ CoreFoundation.CFDictionaryCreate.restype = CFDictionaryRef
373
+
374
+ CoreFoundation.CFDictionaryGetValue.argtypes = [CFDictionaryRef, CFTypeRef]
375
+ CoreFoundation.CFDictionaryGetValue.restype = CFTypeRef
376
+
377
+ CoreFoundation.CFArrayCreate.argtypes = [
378
+ CFAllocatorRef,
379
+ POINTER(CFTypeRef),
380
+ CFIndex,
381
+ CFArrayCallBacks,
382
+ ]
383
+ CoreFoundation.CFArrayCreate.restype = CFArrayRef
384
+
385
+ CoreFoundation.CFArrayCreateMutable.argtypes = [
386
+ CFAllocatorRef,
387
+ CFIndex,
388
+ CFArrayCallBacks,
389
+ ]
390
+ CoreFoundation.CFArrayCreateMutable.restype = CFMutableArrayRef
391
+
392
+ CoreFoundation.CFArrayAppendValue.argtypes = [CFMutableArrayRef, c_void_p]
393
+ CoreFoundation.CFArrayAppendValue.restype = None
394
+
395
+ CoreFoundation.CFArrayGetCount.argtypes = [CFArrayRef]
396
+ CoreFoundation.CFArrayGetCount.restype = CFIndex
397
+
398
+ CoreFoundation.CFArrayGetValueAtIndex.argtypes = [CFArrayRef, CFIndex]
399
+ CoreFoundation.CFArrayGetValueAtIndex.restype = c_void_p
400
+
401
+ CoreFoundation.kCFAllocatorDefault = CFAllocatorRef.in_dll(
402
+ CoreFoundation, "kCFAllocatorDefault"
403
+ )
404
+ CoreFoundation.kCFTypeArrayCallBacks = c_void_p.in_dll(
405
+ CoreFoundation, "kCFTypeArrayCallBacks"
406
+ )
407
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks = c_void_p.in_dll(
408
+ CoreFoundation, "kCFTypeDictionaryKeyCallBacks"
409
+ )
410
+ CoreFoundation.kCFTypeDictionaryValueCallBacks = c_void_p.in_dll(
411
+ CoreFoundation, "kCFTypeDictionaryValueCallBacks"
412
+ )
413
+
414
+ CoreFoundation.CFTypeRef = CFTypeRef
415
+ CoreFoundation.CFArrayRef = CFArrayRef
416
+ CoreFoundation.CFStringRef = CFStringRef
417
+ CoreFoundation.CFDictionaryRef = CFDictionaryRef
418
+
419
+ except (AttributeError):
420
+ raise ImportError("Error initializing ctypes")
421
+
422
+
423
+ class CFConst(object):
424
+ """
425
+ A class object that acts as essentially a namespace for CoreFoundation
426
+ constants.
427
+ """
428
+
429
+ kCFStringEncodingUTF8 = CFStringEncoding(0x08000100)
430
+
431
+
432
+ class SecurityConst(object):
433
+ """
434
+ A class object that acts as essentially a namespace for Security constants.
435
+ """
436
+
437
+ kSSLSessionOptionBreakOnServerAuth = 0
438
+
439
+ kSSLProtocol2 = 1
440
+ kSSLProtocol3 = 2
441
+ kTLSProtocol1 = 4
442
+ kTLSProtocol11 = 7
443
+ kTLSProtocol12 = 8
444
+ # SecureTransport does not support TLS 1.3 even if there's a constant for it
445
+ kTLSProtocol13 = 10
446
+ kTLSProtocolMaxSupported = 999
447
+
448
+ kSSLClientSide = 1
449
+ kSSLStreamType = 0
450
+
451
+ kSecFormatPEMSequence = 10
452
+
453
+ kSecTrustResultInvalid = 0
454
+ kSecTrustResultProceed = 1
455
+ # This gap is present on purpose: this was kSecTrustResultConfirm, which
456
+ # is deprecated.
457
+ kSecTrustResultDeny = 3
458
+ kSecTrustResultUnspecified = 4
459
+ kSecTrustResultRecoverableTrustFailure = 5
460
+ kSecTrustResultFatalTrustFailure = 6
461
+ kSecTrustResultOtherError = 7
462
+
463
+ errSSLProtocol = -9800
464
+ errSSLWouldBlock = -9803
465
+ errSSLClosedGraceful = -9805
466
+ errSSLClosedNoNotify = -9816
467
+ errSSLClosedAbort = -9806
468
+
469
+ errSSLXCertChainInvalid = -9807
470
+ errSSLCrypto = -9809
471
+ errSSLInternal = -9810
472
+ errSSLCertExpired = -9814
473
+ errSSLCertNotYetValid = -9815
474
+ errSSLUnknownRootCert = -9812
475
+ errSSLNoRootCert = -9813
476
+ errSSLHostNameMismatch = -9843
477
+ errSSLPeerHandshakeFail = -9824
478
+ errSSLPeerUserCancelled = -9839
479
+ errSSLWeakPeerEphemeralDHKey = -9850
480
+ errSSLServerAuthCompleted = -9841
481
+ errSSLRecordOverflow = -9847
482
+
483
+ errSecVerifyFailed = -67808
484
+ errSecNoTrustSettings = -25263
485
+ errSecItemNotFound = -25300
486
+ errSecInvalidTrustSettings = -25262
487
+
488
+ # Cipher suites. We only pick the ones our default cipher string allows.
489
+ # Source: https://developer.apple.com/documentation/security/1550981-ssl_cipher_suite_values
490
+ TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384 = 0xC02C
491
+ TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384 = 0xC030
492
+ TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256 = 0xC02B
493
+ TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256 = 0xC02F
494
+ TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA9
495
+ TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256 = 0xCCA8
496
+ TLS_DHE_RSA_WITH_AES_256_GCM_SHA384 = 0x009F
497
+ TLS_DHE_RSA_WITH_AES_128_GCM_SHA256 = 0x009E
498
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384 = 0xC024
499
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384 = 0xC028
500
+ TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA = 0xC00A
501
+ TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA = 0xC014
502
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA256 = 0x006B
503
+ TLS_DHE_RSA_WITH_AES_256_CBC_SHA = 0x0039
504
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256 = 0xC023
505
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256 = 0xC027
506
+ TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA = 0xC009
507
+ TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA = 0xC013
508
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA256 = 0x0067
509
+ TLS_DHE_RSA_WITH_AES_128_CBC_SHA = 0x0033
510
+ TLS_RSA_WITH_AES_256_GCM_SHA384 = 0x009D
511
+ TLS_RSA_WITH_AES_128_GCM_SHA256 = 0x009C
512
+ TLS_RSA_WITH_AES_256_CBC_SHA256 = 0x003D
513
+ TLS_RSA_WITH_AES_128_CBC_SHA256 = 0x003C
514
+ TLS_RSA_WITH_AES_256_CBC_SHA = 0x0035
515
+ TLS_RSA_WITH_AES_128_CBC_SHA = 0x002F
516
+ TLS_AES_128_GCM_SHA256 = 0x1301
517
+ TLS_AES_256_GCM_SHA384 = 0x1302
518
+ TLS_AES_128_CCM_8_SHA256 = 0x1305
519
+ TLS_AES_128_CCM_SHA256 = 0x1304
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/_securetransport/low_level.py ADDED
@@ -0,0 +1,397 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Low-level helpers for the SecureTransport bindings.
3
+
4
+ These are Python functions that are not directly related to the high-level APIs
5
+ but are necessary to get them to work. They include a whole bunch of low-level
6
+ CoreFoundation messing about and memory management. The concerns in this module
7
+ are almost entirely about trying to avoid memory leaks and providing
8
+ appropriate and useful assistance to the higher-level code.
9
+ """
10
+ import base64
11
+ import ctypes
12
+ import itertools
13
+ import os
14
+ import re
15
+ import ssl
16
+ import struct
17
+ import tempfile
18
+
19
+ from .bindings import CFConst, CoreFoundation, Security
20
+
21
+ # This regular expression is used to grab PEM data out of a PEM bundle.
22
+ _PEM_CERTS_RE = re.compile(
23
+ b"-----BEGIN CERTIFICATE-----\n(.*?)\n-----END CERTIFICATE-----", re.DOTALL
24
+ )
25
+
26
+
27
+ def _cf_data_from_bytes(bytestring):
28
+ """
29
+ Given a bytestring, create a CFData object from it. This CFData object must
30
+ be CFReleased by the caller.
31
+ """
32
+ return CoreFoundation.CFDataCreate(
33
+ CoreFoundation.kCFAllocatorDefault, bytestring, len(bytestring)
34
+ )
35
+
36
+
37
+ def _cf_dictionary_from_tuples(tuples):
38
+ """
39
+ Given a list of Python tuples, create an associated CFDictionary.
40
+ """
41
+ dictionary_size = len(tuples)
42
+
43
+ # We need to get the dictionary keys and values out in the same order.
44
+ keys = (t[0] for t in tuples)
45
+ values = (t[1] for t in tuples)
46
+ cf_keys = (CoreFoundation.CFTypeRef * dictionary_size)(*keys)
47
+ cf_values = (CoreFoundation.CFTypeRef * dictionary_size)(*values)
48
+
49
+ return CoreFoundation.CFDictionaryCreate(
50
+ CoreFoundation.kCFAllocatorDefault,
51
+ cf_keys,
52
+ cf_values,
53
+ dictionary_size,
54
+ CoreFoundation.kCFTypeDictionaryKeyCallBacks,
55
+ CoreFoundation.kCFTypeDictionaryValueCallBacks,
56
+ )
57
+
58
+
59
+ def _cfstr(py_bstr):
60
+ """
61
+ Given a Python binary data, create a CFString.
62
+ The string must be CFReleased by the caller.
63
+ """
64
+ c_str = ctypes.c_char_p(py_bstr)
65
+ cf_str = CoreFoundation.CFStringCreateWithCString(
66
+ CoreFoundation.kCFAllocatorDefault,
67
+ c_str,
68
+ CFConst.kCFStringEncodingUTF8,
69
+ )
70
+ return cf_str
71
+
72
+
73
+ def _create_cfstring_array(lst):
74
+ """
75
+ Given a list of Python binary data, create an associated CFMutableArray.
76
+ The array must be CFReleased by the caller.
77
+
78
+ Raises an ssl.SSLError on failure.
79
+ """
80
+ cf_arr = None
81
+ try:
82
+ cf_arr = CoreFoundation.CFArrayCreateMutable(
83
+ CoreFoundation.kCFAllocatorDefault,
84
+ 0,
85
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
86
+ )
87
+ if not cf_arr:
88
+ raise MemoryError("Unable to allocate memory!")
89
+ for item in lst:
90
+ cf_str = _cfstr(item)
91
+ if not cf_str:
92
+ raise MemoryError("Unable to allocate memory!")
93
+ try:
94
+ CoreFoundation.CFArrayAppendValue(cf_arr, cf_str)
95
+ finally:
96
+ CoreFoundation.CFRelease(cf_str)
97
+ except BaseException as e:
98
+ if cf_arr:
99
+ CoreFoundation.CFRelease(cf_arr)
100
+ raise ssl.SSLError("Unable to allocate array: %s" % (e,))
101
+ return cf_arr
102
+
103
+
104
+ def _cf_string_to_unicode(value):
105
+ """
106
+ Creates a Unicode string from a CFString object. Used entirely for error
107
+ reporting.
108
+
109
+ Yes, it annoys me quite a lot that this function is this complex.
110
+ """
111
+ value_as_void_p = ctypes.cast(value, ctypes.POINTER(ctypes.c_void_p))
112
+
113
+ string = CoreFoundation.CFStringGetCStringPtr(
114
+ value_as_void_p, CFConst.kCFStringEncodingUTF8
115
+ )
116
+ if string is None:
117
+ buffer = ctypes.create_string_buffer(1024)
118
+ result = CoreFoundation.CFStringGetCString(
119
+ value_as_void_p, buffer, 1024, CFConst.kCFStringEncodingUTF8
120
+ )
121
+ if not result:
122
+ raise OSError("Error copying C string from CFStringRef")
123
+ string = buffer.value
124
+ if string is not None:
125
+ string = string.decode("utf-8")
126
+ return string
127
+
128
+
129
+ def _assert_no_error(error, exception_class=None):
130
+ """
131
+ Checks the return code and throws an exception if there is an error to
132
+ report
133
+ """
134
+ if error == 0:
135
+ return
136
+
137
+ cf_error_string = Security.SecCopyErrorMessageString(error, None)
138
+ output = _cf_string_to_unicode(cf_error_string)
139
+ CoreFoundation.CFRelease(cf_error_string)
140
+
141
+ if output is None or output == u"":
142
+ output = u"OSStatus %s" % error
143
+
144
+ if exception_class is None:
145
+ exception_class = ssl.SSLError
146
+
147
+ raise exception_class(output)
148
+
149
+
150
+ def _cert_array_from_pem(pem_bundle):
151
+ """
152
+ Given a bundle of certs in PEM format, turns them into a CFArray of certs
153
+ that can be used to validate a cert chain.
154
+ """
155
+ # Normalize the PEM bundle's line endings.
156
+ pem_bundle = pem_bundle.replace(b"\r\n", b"\n")
157
+
158
+ der_certs = [
159
+ base64.b64decode(match.group(1)) for match in _PEM_CERTS_RE.finditer(pem_bundle)
160
+ ]
161
+ if not der_certs:
162
+ raise ssl.SSLError("No root certificates specified")
163
+
164
+ cert_array = CoreFoundation.CFArrayCreateMutable(
165
+ CoreFoundation.kCFAllocatorDefault,
166
+ 0,
167
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
168
+ )
169
+ if not cert_array:
170
+ raise ssl.SSLError("Unable to allocate memory!")
171
+
172
+ try:
173
+ for der_bytes in der_certs:
174
+ certdata = _cf_data_from_bytes(der_bytes)
175
+ if not certdata:
176
+ raise ssl.SSLError("Unable to allocate memory!")
177
+ cert = Security.SecCertificateCreateWithData(
178
+ CoreFoundation.kCFAllocatorDefault, certdata
179
+ )
180
+ CoreFoundation.CFRelease(certdata)
181
+ if not cert:
182
+ raise ssl.SSLError("Unable to build cert object!")
183
+
184
+ CoreFoundation.CFArrayAppendValue(cert_array, cert)
185
+ CoreFoundation.CFRelease(cert)
186
+ except Exception:
187
+ # We need to free the array before the exception bubbles further.
188
+ # We only want to do that if an error occurs: otherwise, the caller
189
+ # should free.
190
+ CoreFoundation.CFRelease(cert_array)
191
+ raise
192
+
193
+ return cert_array
194
+
195
+
196
+ def _is_cert(item):
197
+ """
198
+ Returns True if a given CFTypeRef is a certificate.
199
+ """
200
+ expected = Security.SecCertificateGetTypeID()
201
+ return CoreFoundation.CFGetTypeID(item) == expected
202
+
203
+
204
+ def _is_identity(item):
205
+ """
206
+ Returns True if a given CFTypeRef is an identity.
207
+ """
208
+ expected = Security.SecIdentityGetTypeID()
209
+ return CoreFoundation.CFGetTypeID(item) == expected
210
+
211
+
212
+ def _temporary_keychain():
213
+ """
214
+ This function creates a temporary Mac keychain that we can use to work with
215
+ credentials. This keychain uses a one-time password and a temporary file to
216
+ store the data. We expect to have one keychain per socket. The returned
217
+ SecKeychainRef must be freed by the caller, including calling
218
+ SecKeychainDelete.
219
+
220
+ Returns a tuple of the SecKeychainRef and the path to the temporary
221
+ directory that contains it.
222
+ """
223
+ # Unfortunately, SecKeychainCreate requires a path to a keychain. This
224
+ # means we cannot use mkstemp to use a generic temporary file. Instead,
225
+ # we're going to create a temporary directory and a filename to use there.
226
+ # This filename will be 8 random bytes expanded into base64. We also need
227
+ # some random bytes to password-protect the keychain we're creating, so we
228
+ # ask for 40 random bytes.
229
+ random_bytes = os.urandom(40)
230
+ filename = base64.b16encode(random_bytes[:8]).decode("utf-8")
231
+ password = base64.b16encode(random_bytes[8:]) # Must be valid UTF-8
232
+ tempdirectory = tempfile.mkdtemp()
233
+
234
+ keychain_path = os.path.join(tempdirectory, filename).encode("utf-8")
235
+
236
+ # We now want to create the keychain itself.
237
+ keychain = Security.SecKeychainRef()
238
+ status = Security.SecKeychainCreate(
239
+ keychain_path, len(password), password, False, None, ctypes.byref(keychain)
240
+ )
241
+ _assert_no_error(status)
242
+
243
+ # Having created the keychain, we want to pass it off to the caller.
244
+ return keychain, tempdirectory
245
+
246
+
247
+ def _load_items_from_file(keychain, path):
248
+ """
249
+ Given a single file, loads all the trust objects from it into arrays and
250
+ the keychain.
251
+ Returns a tuple of lists: the first list is a list of identities, the
252
+ second a list of certs.
253
+ """
254
+ certificates = []
255
+ identities = []
256
+ result_array = None
257
+
258
+ with open(path, "rb") as f:
259
+ raw_filedata = f.read()
260
+
261
+ try:
262
+ filedata = CoreFoundation.CFDataCreate(
263
+ CoreFoundation.kCFAllocatorDefault, raw_filedata, len(raw_filedata)
264
+ )
265
+ result_array = CoreFoundation.CFArrayRef()
266
+ result = Security.SecItemImport(
267
+ filedata, # cert data
268
+ None, # Filename, leaving it out for now
269
+ None, # What the type of the file is, we don't care
270
+ None, # what's in the file, we don't care
271
+ 0, # import flags
272
+ None, # key params, can include passphrase in the future
273
+ keychain, # The keychain to insert into
274
+ ctypes.byref(result_array), # Results
275
+ )
276
+ _assert_no_error(result)
277
+
278
+ # A CFArray is not very useful to us as an intermediary
279
+ # representation, so we are going to extract the objects we want
280
+ # and then free the array. We don't need to keep hold of keys: the
281
+ # keychain already has them!
282
+ result_count = CoreFoundation.CFArrayGetCount(result_array)
283
+ for index in range(result_count):
284
+ item = CoreFoundation.CFArrayGetValueAtIndex(result_array, index)
285
+ item = ctypes.cast(item, CoreFoundation.CFTypeRef)
286
+
287
+ if _is_cert(item):
288
+ CoreFoundation.CFRetain(item)
289
+ certificates.append(item)
290
+ elif _is_identity(item):
291
+ CoreFoundation.CFRetain(item)
292
+ identities.append(item)
293
+ finally:
294
+ if result_array:
295
+ CoreFoundation.CFRelease(result_array)
296
+
297
+ CoreFoundation.CFRelease(filedata)
298
+
299
+ return (identities, certificates)
300
+
301
+
302
+ def _load_client_cert_chain(keychain, *paths):
303
+ """
304
+ Load certificates and maybe keys from a number of files. Has the end goal
305
+ of returning a CFArray containing one SecIdentityRef, and then zero or more
306
+ SecCertificateRef objects, suitable for use as a client certificate trust
307
+ chain.
308
+ """
309
+ # Ok, the strategy.
310
+ #
311
+ # This relies on knowing that macOS will not give you a SecIdentityRef
312
+ # unless you have imported a key into a keychain. This is a somewhat
313
+ # artificial limitation of macOS (for example, it doesn't necessarily
314
+ # affect iOS), but there is nothing inside Security.framework that lets you
315
+ # get a SecIdentityRef without having a key in a keychain.
316
+ #
317
+ # So the policy here is we take all the files and iterate them in order.
318
+ # Each one will use SecItemImport to have one or more objects loaded from
319
+ # it. We will also point at a keychain that macOS can use to work with the
320
+ # private key.
321
+ #
322
+ # Once we have all the objects, we'll check what we actually have. If we
323
+ # already have a SecIdentityRef in hand, fab: we'll use that. Otherwise,
324
+ # we'll take the first certificate (which we assume to be our leaf) and
325
+ # ask the keychain to give us a SecIdentityRef with that cert's associated
326
+ # key.
327
+ #
328
+ # We'll then return a CFArray containing the trust chain: one
329
+ # SecIdentityRef and then zero-or-more SecCertificateRef objects. The
330
+ # responsibility for freeing this CFArray will be with the caller. This
331
+ # CFArray must remain alive for the entire connection, so in practice it
332
+ # will be stored with a single SSLSocket, along with the reference to the
333
+ # keychain.
334
+ certificates = []
335
+ identities = []
336
+
337
+ # Filter out bad paths.
338
+ paths = (path for path in paths if path)
339
+
340
+ try:
341
+ for file_path in paths:
342
+ new_identities, new_certs = _load_items_from_file(keychain, file_path)
343
+ identities.extend(new_identities)
344
+ certificates.extend(new_certs)
345
+
346
+ # Ok, we have everything. The question is: do we have an identity? If
347
+ # not, we want to grab one from the first cert we have.
348
+ if not identities:
349
+ new_identity = Security.SecIdentityRef()
350
+ status = Security.SecIdentityCreateWithCertificate(
351
+ keychain, certificates[0], ctypes.byref(new_identity)
352
+ )
353
+ _assert_no_error(status)
354
+ identities.append(new_identity)
355
+
356
+ # We now want to release the original certificate, as we no longer
357
+ # need it.
358
+ CoreFoundation.CFRelease(certificates.pop(0))
359
+
360
+ # We now need to build a new CFArray that holds the trust chain.
361
+ trust_chain = CoreFoundation.CFArrayCreateMutable(
362
+ CoreFoundation.kCFAllocatorDefault,
363
+ 0,
364
+ ctypes.byref(CoreFoundation.kCFTypeArrayCallBacks),
365
+ )
366
+ for item in itertools.chain(identities, certificates):
367
+ # ArrayAppendValue does a CFRetain on the item. That's fine,
368
+ # because the finally block will release our other refs to them.
369
+ CoreFoundation.CFArrayAppendValue(trust_chain, item)
370
+
371
+ return trust_chain
372
+ finally:
373
+ for obj in itertools.chain(identities, certificates):
374
+ CoreFoundation.CFRelease(obj)
375
+
376
+
377
+ TLS_PROTOCOL_VERSIONS = {
378
+ "SSLv2": (0, 2),
379
+ "SSLv3": (3, 0),
380
+ "TLSv1": (3, 1),
381
+ "TLSv1.1": (3, 2),
382
+ "TLSv1.2": (3, 3),
383
+ }
384
+
385
+
386
+ def _build_tls_unknown_ca_alert(version):
387
+ """
388
+ Builds a TLS alert record for an unknown CA.
389
+ """
390
+ ver_maj, ver_min = TLS_PROTOCOL_VERSIONS[version]
391
+ severity_fatal = 0x02
392
+ description_unknown_ca = 0x30
393
+ msg = struct.pack(">BB", severity_fatal, description_unknown_ca)
394
+ msg_len = len(msg)
395
+ record_type_alert = 0x15
396
+ record = struct.pack(">BBBH", record_type_alert, ver_maj, ver_min, msg_len) + msg
397
+ return record
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/appengine.py ADDED
@@ -0,0 +1,314 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ This module provides a pool manager that uses Google App Engine's
3
+ `URLFetch Service <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
4
+
5
+ Example usage::
6
+
7
+ from pip._vendor.urllib3 import PoolManager
8
+ from pip._vendor.urllib3.contrib.appengine import AppEngineManager, is_appengine_sandbox
9
+
10
+ if is_appengine_sandbox():
11
+ # AppEngineManager uses AppEngine's URLFetch API behind the scenes
12
+ http = AppEngineManager()
13
+ else:
14
+ # PoolManager uses a socket-level API behind the scenes
15
+ http = PoolManager()
16
+
17
+ r = http.request('GET', 'https://google.com/')
18
+
19
+ There are `limitations <https://cloud.google.com/appengine/docs/python/\
20
+ urlfetch/#Python_Quotas_and_limits>`_ to the URLFetch service and it may not be
21
+ the best choice for your application. There are three options for using
22
+ urllib3 on Google App Engine:
23
+
24
+ 1. You can use :class:`AppEngineManager` with URLFetch. URLFetch is
25
+ cost-effective in many circumstances as long as your usage is within the
26
+ limitations.
27
+ 2. You can use a normal :class:`~urllib3.PoolManager` by enabling sockets.
28
+ Sockets also have `limitations and restrictions
29
+ <https://cloud.google.com/appengine/docs/python/sockets/\
30
+ #limitations-and-restrictions>`_ and have a lower free quota than URLFetch.
31
+ To use sockets, be sure to specify the following in your ``app.yaml``::
32
+
33
+ env_variables:
34
+ GAE_USE_SOCKETS_HTTPLIB : 'true'
35
+
36
+ 3. If you are using `App Engine Flexible
37
+ <https://cloud.google.com/appengine/docs/flexible/>`_, you can use the standard
38
+ :class:`PoolManager` without any configuration or special environment variables.
39
+ """
40
+
41
+ from __future__ import absolute_import
42
+
43
+ import io
44
+ import logging
45
+ import warnings
46
+
47
+ from ..exceptions import (
48
+ HTTPError,
49
+ HTTPWarning,
50
+ MaxRetryError,
51
+ ProtocolError,
52
+ SSLError,
53
+ TimeoutError,
54
+ )
55
+ from ..packages.six.moves.urllib.parse import urljoin
56
+ from ..request import RequestMethods
57
+ from ..response import HTTPResponse
58
+ from ..util.retry import Retry
59
+ from ..util.timeout import Timeout
60
+ from . import _appengine_environ
61
+
62
+ try:
63
+ from google.appengine.api import urlfetch
64
+ except ImportError:
65
+ urlfetch = None
66
+
67
+
68
+ log = logging.getLogger(__name__)
69
+
70
+
71
+ class AppEnginePlatformWarning(HTTPWarning):
72
+ pass
73
+
74
+
75
+ class AppEnginePlatformError(HTTPError):
76
+ pass
77
+
78
+
79
+ class AppEngineManager(RequestMethods):
80
+ """
81
+ Connection manager for Google App Engine sandbox applications.
82
+
83
+ This manager uses the URLFetch service directly instead of using the
84
+ emulated httplib, and is subject to URLFetch limitations as described in
85
+ the App Engine documentation `here
86
+ <https://cloud.google.com/appengine/docs/python/urlfetch>`_.
87
+
88
+ Notably it will raise an :class:`AppEnginePlatformError` if:
89
+ * URLFetch is not available.
90
+ * If you attempt to use this on App Engine Flexible, as full socket
91
+ support is available.
92
+ * If a request size is more than 10 megabytes.
93
+ * If a response size is more than 32 megabytes.
94
+ * If you use an unsupported request method such as OPTIONS.
95
+
96
+ Beyond those cases, it will raise normal urllib3 errors.
97
+ """
98
+
99
+ def __init__(
100
+ self,
101
+ headers=None,
102
+ retries=None,
103
+ validate_certificate=True,
104
+ urlfetch_retries=True,
105
+ ):
106
+ if not urlfetch:
107
+ raise AppEnginePlatformError(
108
+ "URLFetch is not available in this environment."
109
+ )
110
+
111
+ warnings.warn(
112
+ "urllib3 is using URLFetch on Google App Engine sandbox instead "
113
+ "of sockets. To use sockets directly instead of URLFetch see "
114
+ "https://urllib3.readthedocs.io/en/1.26.x/reference/urllib3.contrib.html.",
115
+ AppEnginePlatformWarning,
116
+ )
117
+
118
+ RequestMethods.__init__(self, headers)
119
+ self.validate_certificate = validate_certificate
120
+ self.urlfetch_retries = urlfetch_retries
121
+
122
+ self.retries = retries or Retry.DEFAULT
123
+
124
+ def __enter__(self):
125
+ return self
126
+
127
+ def __exit__(self, exc_type, exc_val, exc_tb):
128
+ # Return False to re-raise any potential exceptions
129
+ return False
130
+
131
+ def urlopen(
132
+ self,
133
+ method,
134
+ url,
135
+ body=None,
136
+ headers=None,
137
+ retries=None,
138
+ redirect=True,
139
+ timeout=Timeout.DEFAULT_TIMEOUT,
140
+ **response_kw
141
+ ):
142
+
143
+ retries = self._get_retries(retries, redirect)
144
+
145
+ try:
146
+ follow_redirects = redirect and retries.redirect != 0 and retries.total
147
+ response = urlfetch.fetch(
148
+ url,
149
+ payload=body,
150
+ method=method,
151
+ headers=headers or {},
152
+ allow_truncated=False,
153
+ follow_redirects=self.urlfetch_retries and follow_redirects,
154
+ deadline=self._get_absolute_timeout(timeout),
155
+ validate_certificate=self.validate_certificate,
156
+ )
157
+ except urlfetch.DeadlineExceededError as e:
158
+ raise TimeoutError(self, e)
159
+
160
+ except urlfetch.InvalidURLError as e:
161
+ if "too large" in str(e):
162
+ raise AppEnginePlatformError(
163
+ "URLFetch request too large, URLFetch only "
164
+ "supports requests up to 10mb in size.",
165
+ e,
166
+ )
167
+ raise ProtocolError(e)
168
+
169
+ except urlfetch.DownloadError as e:
170
+ if "Too many redirects" in str(e):
171
+ raise MaxRetryError(self, url, reason=e)
172
+ raise ProtocolError(e)
173
+
174
+ except urlfetch.ResponseTooLargeError as e:
175
+ raise AppEnginePlatformError(
176
+ "URLFetch response too large, URLFetch only supports"
177
+ "responses up to 32mb in size.",
178
+ e,
179
+ )
180
+
181
+ except urlfetch.SSLCertificateError as e:
182
+ raise SSLError(e)
183
+
184
+ except urlfetch.InvalidMethodError as e:
185
+ raise AppEnginePlatformError(
186
+ "URLFetch does not support method: %s" % method, e
187
+ )
188
+
189
+ http_response = self._urlfetch_response_to_http_response(
190
+ response, retries=retries, **response_kw
191
+ )
192
+
193
+ # Handle redirect?
194
+ redirect_location = redirect and http_response.get_redirect_location()
195
+ if redirect_location:
196
+ # Check for redirect response
197
+ if self.urlfetch_retries and retries.raise_on_redirect:
198
+ raise MaxRetryError(self, url, "too many redirects")
199
+ else:
200
+ if http_response.status == 303:
201
+ method = "GET"
202
+
203
+ try:
204
+ retries = retries.increment(
205
+ method, url, response=http_response, _pool=self
206
+ )
207
+ except MaxRetryError:
208
+ if retries.raise_on_redirect:
209
+ raise MaxRetryError(self, url, "too many redirects")
210
+ return http_response
211
+
212
+ retries.sleep_for_retry(http_response)
213
+ log.debug("Redirecting %s -> %s", url, redirect_location)
214
+ redirect_url = urljoin(url, redirect_location)
215
+ return self.urlopen(
216
+ method,
217
+ redirect_url,
218
+ body,
219
+ headers,
220
+ retries=retries,
221
+ redirect=redirect,
222
+ timeout=timeout,
223
+ **response_kw
224
+ )
225
+
226
+ # Check if we should retry the HTTP response.
227
+ has_retry_after = bool(http_response.getheader("Retry-After"))
228
+ if retries.is_retry(method, http_response.status, has_retry_after):
229
+ retries = retries.increment(method, url, response=http_response, _pool=self)
230
+ log.debug("Retry: %s", url)
231
+ retries.sleep(http_response)
232
+ return self.urlopen(
233
+ method,
234
+ url,
235
+ body=body,
236
+ headers=headers,
237
+ retries=retries,
238
+ redirect=redirect,
239
+ timeout=timeout,
240
+ **response_kw
241
+ )
242
+
243
+ return http_response
244
+
245
+ def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
246
+
247
+ if is_prod_appengine():
248
+ # Production GAE handles deflate encoding automatically, but does
249
+ # not remove the encoding header.
250
+ content_encoding = urlfetch_resp.headers.get("content-encoding")
251
+
252
+ if content_encoding == "deflate":
253
+ del urlfetch_resp.headers["content-encoding"]
254
+
255
+ transfer_encoding = urlfetch_resp.headers.get("transfer-encoding")
256
+ # We have a full response's content,
257
+ # so let's make sure we don't report ourselves as chunked data.
258
+ if transfer_encoding == "chunked":
259
+ encodings = transfer_encoding.split(",")
260
+ encodings.remove("chunked")
261
+ urlfetch_resp.headers["transfer-encoding"] = ",".join(encodings)
262
+
263
+ original_response = HTTPResponse(
264
+ # In order for decoding to work, we must present the content as
265
+ # a file-like object.
266
+ body=io.BytesIO(urlfetch_resp.content),
267
+ msg=urlfetch_resp.header_msg,
268
+ headers=urlfetch_resp.headers,
269
+ status=urlfetch_resp.status_code,
270
+ **response_kw
271
+ )
272
+
273
+ return HTTPResponse(
274
+ body=io.BytesIO(urlfetch_resp.content),
275
+ headers=urlfetch_resp.headers,
276
+ status=urlfetch_resp.status_code,
277
+ original_response=original_response,
278
+ **response_kw
279
+ )
280
+
281
+ def _get_absolute_timeout(self, timeout):
282
+ if timeout is Timeout.DEFAULT_TIMEOUT:
283
+ return None # Defer to URLFetch's default.
284
+ if isinstance(timeout, Timeout):
285
+ if timeout._read is not None or timeout._connect is not None:
286
+ warnings.warn(
287
+ "URLFetch does not support granular timeout settings, "
288
+ "reverting to total or default URLFetch timeout.",
289
+ AppEnginePlatformWarning,
290
+ )
291
+ return timeout.total
292
+ return timeout
293
+
294
+ def _get_retries(self, retries, redirect):
295
+ if not isinstance(retries, Retry):
296
+ retries = Retry.from_int(retries, redirect=redirect, default=self.retries)
297
+
298
+ if retries.connect or retries.read or retries.redirect:
299
+ warnings.warn(
300
+ "URLFetch only supports total retries and does not "
301
+ "recognize connect, read, or redirect retry parameters.",
302
+ AppEnginePlatformWarning,
303
+ )
304
+
305
+ return retries
306
+
307
+
308
+ # Alias methods from _appengine_environ to maintain public API interface.
309
+
310
+ is_appengine = _appengine_environ.is_appengine
311
+ is_appengine_sandbox = _appengine_environ.is_appengine_sandbox
312
+ is_local_appengine = _appengine_environ.is_local_appengine
313
+ is_prod_appengine = _appengine_environ.is_prod_appengine
314
+ is_prod_appengine_mvms = _appengine_environ.is_prod_appengine_mvms
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/ntlmpool.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ NTLM authenticating pool, contributed by erikcederstran
3
+
4
+ Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
5
+ """
6
+ from __future__ import absolute_import
7
+
8
+ import warnings
9
+ from logging import getLogger
10
+
11
+ from ntlm import ntlm
12
+
13
+ from .. import HTTPSConnectionPool
14
+ from ..packages.six.moves.http_client import HTTPSConnection
15
+
16
+ warnings.warn(
17
+ "The 'urllib3.contrib.ntlmpool' module is deprecated and will be removed "
18
+ "in urllib3 v2.0 release, urllib3 is not able to support it properly due "
19
+ "to reasons listed in issue: https://github.com/urllib3/urllib3/issues/2282. "
20
+ "If you are a user of this module please comment in the mentioned issue.",
21
+ DeprecationWarning,
22
+ )
23
+
24
+ log = getLogger(__name__)
25
+
26
+
27
+ class NTLMConnectionPool(HTTPSConnectionPool):
28
+ """
29
+ Implements an NTLM authentication version of an urllib3 connection pool
30
+ """
31
+
32
+ scheme = "https"
33
+
34
+ def __init__(self, user, pw, authurl, *args, **kwargs):
35
+ """
36
+ authurl is a random URL on the server that is protected by NTLM.
37
+ user is the Windows user, probably in the DOMAIN\\username format.
38
+ pw is the password for the user.
39
+ """
40
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
41
+ self.authurl = authurl
42
+ self.rawuser = user
43
+ user_parts = user.split("\\", 1)
44
+ self.domain = user_parts[0].upper()
45
+ self.user = user_parts[1]
46
+ self.pw = pw
47
+
48
+ def _new_conn(self):
49
+ # Performs the NTLM handshake that secures the connection. The socket
50
+ # must be kept open while requests are performed.
51
+ self.num_connections += 1
52
+ log.debug(
53
+ "Starting NTLM HTTPS connection no. %d: https://%s%s",
54
+ self.num_connections,
55
+ self.host,
56
+ self.authurl,
57
+ )
58
+
59
+ headers = {"Connection": "Keep-Alive"}
60
+ req_header = "Authorization"
61
+ resp_header = "www-authenticate"
62
+
63
+ conn = HTTPSConnection(host=self.host, port=self.port)
64
+
65
+ # Send negotiation message
66
+ headers[req_header] = "NTLM %s" % ntlm.create_NTLM_NEGOTIATE_MESSAGE(
67
+ self.rawuser
68
+ )
69
+ log.debug("Request headers: %s", headers)
70
+ conn.request("GET", self.authurl, None, headers)
71
+ res = conn.getresponse()
72
+ reshdr = dict(res.getheaders())
73
+ log.debug("Response status: %s %s", res.status, res.reason)
74
+ log.debug("Response headers: %s", reshdr)
75
+ log.debug("Response data: %s [...]", res.read(100))
76
+
77
+ # Remove the reference to the socket, so that it can not be closed by
78
+ # the response object (we want to keep the socket open)
79
+ res.fp = None
80
+
81
+ # Server should respond with a challenge message
82
+ auth_header_values = reshdr[resp_header].split(", ")
83
+ auth_header_value = None
84
+ for s in auth_header_values:
85
+ if s[:5] == "NTLM ":
86
+ auth_header_value = s[5:]
87
+ if auth_header_value is None:
88
+ raise Exception(
89
+ "Unexpected %s response header: %s" % (resp_header, reshdr[resp_header])
90
+ )
91
+
92
+ # Send authentication message
93
+ ServerChallenge, NegotiateFlags = ntlm.parse_NTLM_CHALLENGE_MESSAGE(
94
+ auth_header_value
95
+ )
96
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(
97
+ ServerChallenge, self.user, self.domain, self.pw, NegotiateFlags
98
+ )
99
+ headers[req_header] = "NTLM %s" % auth_msg
100
+ log.debug("Request headers: %s", headers)
101
+ conn.request("GET", self.authurl, None, headers)
102
+ res = conn.getresponse()
103
+ log.debug("Response status: %s %s", res.status, res.reason)
104
+ log.debug("Response headers: %s", dict(res.getheaders()))
105
+ log.debug("Response data: %s [...]", res.read()[:100])
106
+ if res.status != 200:
107
+ if res.status == 401:
108
+ raise Exception("Server rejected request: wrong username or password")
109
+ raise Exception("Wrong server response: %s %s" % (res.status, res.reason))
110
+
111
+ res.fp = None
112
+ log.debug("Connection established")
113
+ return conn
114
+
115
+ def urlopen(
116
+ self,
117
+ method,
118
+ url,
119
+ body=None,
120
+ headers=None,
121
+ retries=3,
122
+ redirect=True,
123
+ assert_same_host=True,
124
+ ):
125
+ if headers is None:
126
+ headers = {}
127
+ headers["Connection"] = "Keep-Alive"
128
+ return super(NTLMConnectionPool, self).urlopen(
129
+ method, url, body, headers, retries, redirect, assert_same_host
130
+ )
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/pyopenssl.py ADDED
@@ -0,0 +1,511 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ TLS with SNI_-support for Python 2. Follow these instructions if you would
3
+ like to verify TLS certificates in Python 2. Note, the default libraries do
4
+ *not* do certificate checking; you need to do additional work to validate
5
+ certificates yourself.
6
+
7
+ This needs the following packages installed:
8
+
9
+ * `pyOpenSSL`_ (tested with 16.0.0)
10
+ * `cryptography`_ (minimum 1.3.4, from pyopenssl)
11
+ * `idna`_ (minimum 2.0, from cryptography)
12
+
13
+ However, pyopenssl depends on cryptography, which depends on idna, so while we
14
+ use all three directly here we end up having relatively few packages required.
15
+
16
+ You can install them with the following command:
17
+
18
+ .. code-block:: bash
19
+
20
+ $ python -m pip install pyopenssl cryptography idna
21
+
22
+ To activate certificate checking, call
23
+ :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code
24
+ before you begin making HTTP requests. This can be done in a ``sitecustomize``
25
+ module, or at any other time before your application begins using ``urllib3``,
26
+ like this:
27
+
28
+ .. code-block:: python
29
+
30
+ try:
31
+ import pip._vendor.urllib3.contrib.pyopenssl as pyopenssl
32
+ pyopenssl.inject_into_urllib3()
33
+ except ImportError:
34
+ pass
35
+
36
+ Now you can use :mod:`urllib3` as you normally would, and it will support SNI
37
+ when the required modules are installed.
38
+
39
+ Activating this module also has the positive side effect of disabling SSL/TLS
40
+ compression in Python 2 (see `CRIME attack`_).
41
+
42
+ .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
43
+ .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
44
+ .. _pyopenssl: https://www.pyopenssl.org
45
+ .. _cryptography: https://cryptography.io
46
+ .. _idna: https://github.com/kjd/idna
47
+ """
48
+ from __future__ import absolute_import
49
+
50
+ import OpenSSL.SSL
51
+ from cryptography import x509
52
+ from cryptography.hazmat.backends.openssl import backend as openssl_backend
53
+ from cryptography.hazmat.backends.openssl.x509 import _Certificate
54
+
55
+ try:
56
+ from cryptography.x509 import UnsupportedExtension
57
+ except ImportError:
58
+ # UnsupportedExtension is gone in cryptography >= 2.1.0
59
+ class UnsupportedExtension(Exception):
60
+ pass
61
+
62
+
63
+ from io import BytesIO
64
+ from socket import error as SocketError
65
+ from socket import timeout
66
+
67
+ try: # Platform-specific: Python 2
68
+ from socket import _fileobject
69
+ except ImportError: # Platform-specific: Python 3
70
+ _fileobject = None
71
+ from ..packages.backports.makefile import backport_makefile
72
+
73
+ import logging
74
+ import ssl
75
+ import sys
76
+
77
+ from .. import util
78
+ from ..packages import six
79
+ from ..util.ssl_ import PROTOCOL_TLS_CLIENT
80
+
81
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
82
+
83
+ # SNI always works.
84
+ HAS_SNI = True
85
+
86
+ # Map from urllib3 to PyOpenSSL compatible parameter-values.
87
+ _openssl_versions = {
88
+ util.PROTOCOL_TLS: OpenSSL.SSL.SSLv23_METHOD,
89
+ PROTOCOL_TLS_CLIENT: OpenSSL.SSL.SSLv23_METHOD,
90
+ ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
91
+ }
92
+
93
+ if hasattr(ssl, "PROTOCOL_SSLv3") and hasattr(OpenSSL.SSL, "SSLv3_METHOD"):
94
+ _openssl_versions[ssl.PROTOCOL_SSLv3] = OpenSSL.SSL.SSLv3_METHOD
95
+
96
+ if hasattr(ssl, "PROTOCOL_TLSv1_1") and hasattr(OpenSSL.SSL, "TLSv1_1_METHOD"):
97
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
98
+
99
+ if hasattr(ssl, "PROTOCOL_TLSv1_2") and hasattr(OpenSSL.SSL, "TLSv1_2_METHOD"):
100
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
101
+
102
+
103
+ _stdlib_to_openssl_verify = {
104
+ ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
105
+ ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
106
+ ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
107
+ + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
108
+ }
109
+ _openssl_to_stdlib_verify = dict((v, k) for k, v in _stdlib_to_openssl_verify.items())
110
+
111
+ # OpenSSL will only write 16K at a time
112
+ SSL_WRITE_BLOCKSIZE = 16384
113
+
114
+ orig_util_HAS_SNI = util.HAS_SNI
115
+ orig_util_SSLContext = util.ssl_.SSLContext
116
+
117
+
118
+ log = logging.getLogger(__name__)
119
+
120
+
121
+ def inject_into_urllib3():
122
+ "Monkey-patch urllib3 with PyOpenSSL-backed SSL-support."
123
+
124
+ _validate_dependencies_met()
125
+
126
+ util.SSLContext = PyOpenSSLContext
127
+ util.ssl_.SSLContext = PyOpenSSLContext
128
+ util.HAS_SNI = HAS_SNI
129
+ util.ssl_.HAS_SNI = HAS_SNI
130
+ util.IS_PYOPENSSL = True
131
+ util.ssl_.IS_PYOPENSSL = True
132
+
133
+
134
+ def extract_from_urllib3():
135
+ "Undo monkey-patching by :func:`inject_into_urllib3`."
136
+
137
+ util.SSLContext = orig_util_SSLContext
138
+ util.ssl_.SSLContext = orig_util_SSLContext
139
+ util.HAS_SNI = orig_util_HAS_SNI
140
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
141
+ util.IS_PYOPENSSL = False
142
+ util.ssl_.IS_PYOPENSSL = False
143
+
144
+
145
+ def _validate_dependencies_met():
146
+ """
147
+ Verifies that PyOpenSSL's package-level dependencies have been met.
148
+ Throws `ImportError` if they are not met.
149
+ """
150
+ # Method added in `cryptography==1.1`; not available in older versions
151
+ from cryptography.x509.extensions import Extensions
152
+
153
+ if getattr(Extensions, "get_extension_for_class", None) is None:
154
+ raise ImportError(
155
+ "'cryptography' module missing required functionality. "
156
+ "Try upgrading to v1.3.4 or newer."
157
+ )
158
+
159
+ # pyOpenSSL 0.14 and above use cryptography for OpenSSL bindings. The _x509
160
+ # attribute is only present on those versions.
161
+ from OpenSSL.crypto import X509
162
+
163
+ x509 = X509()
164
+ if getattr(x509, "_x509", None) is None:
165
+ raise ImportError(
166
+ "'pyOpenSSL' module missing required functionality. "
167
+ "Try upgrading to v0.14 or newer."
168
+ )
169
+
170
+
171
+ def _dnsname_to_stdlib(name):
172
+ """
173
+ Converts a dNSName SubjectAlternativeName field to the form used by the
174
+ standard library on the given Python version.
175
+
176
+ Cryptography produces a dNSName as a unicode string that was idna-decoded
177
+ from ASCII bytes. We need to idna-encode that string to get it back, and
178
+ then on Python 3 we also need to convert to unicode via UTF-8 (the stdlib
179
+ uses PyUnicode_FromStringAndSize on it, which decodes via UTF-8).
180
+
181
+ If the name cannot be idna-encoded then we return None signalling that
182
+ the name given should be skipped.
183
+ """
184
+
185
+ def idna_encode(name):
186
+ """
187
+ Borrowed wholesale from the Python Cryptography Project. It turns out
188
+ that we can't just safely call `idna.encode`: it can explode for
189
+ wildcard names. This avoids that problem.
190
+ """
191
+ from pip._vendor import idna
192
+
193
+ try:
194
+ for prefix in [u"*.", u"."]:
195
+ if name.startswith(prefix):
196
+ name = name[len(prefix) :]
197
+ return prefix.encode("ascii") + idna.encode(name)
198
+ return idna.encode(name)
199
+ except idna.core.IDNAError:
200
+ return None
201
+
202
+ # Don't send IPv6 addresses through the IDNA encoder.
203
+ if ":" in name:
204
+ return name
205
+
206
+ name = idna_encode(name)
207
+ if name is None:
208
+ return None
209
+ elif sys.version_info >= (3, 0):
210
+ name = name.decode("utf-8")
211
+ return name
212
+
213
+
214
+ def get_subj_alt_name(peer_cert):
215
+ """
216
+ Given an PyOpenSSL certificate, provides all the subject alternative names.
217
+ """
218
+ # Pass the cert to cryptography, which has much better APIs for this.
219
+ if hasattr(peer_cert, "to_cryptography"):
220
+ cert = peer_cert.to_cryptography()
221
+ else:
222
+ # This is technically using private APIs, but should work across all
223
+ # relevant versions before PyOpenSSL got a proper API for this.
224
+ cert = _Certificate(openssl_backend, peer_cert._x509)
225
+
226
+ # We want to find the SAN extension. Ask Cryptography to locate it (it's
227
+ # faster than looping in Python)
228
+ try:
229
+ ext = cert.extensions.get_extension_for_class(x509.SubjectAlternativeName).value
230
+ except x509.ExtensionNotFound:
231
+ # No such extension, return the empty list.
232
+ return []
233
+ except (
234
+ x509.DuplicateExtension,
235
+ UnsupportedExtension,
236
+ x509.UnsupportedGeneralNameType,
237
+ UnicodeError,
238
+ ) as e:
239
+ # A problem has been found with the quality of the certificate. Assume
240
+ # no SAN field is present.
241
+ log.warning(
242
+ "A problem was encountered with the certificate that prevented "
243
+ "urllib3 from finding the SubjectAlternativeName field. This can "
244
+ "affect certificate validation. The error was %s",
245
+ e,
246
+ )
247
+ return []
248
+
249
+ # We want to return dNSName and iPAddress fields. We need to cast the IPs
250
+ # back to strings because the match_hostname function wants them as
251
+ # strings.
252
+ # Sadly the DNS names need to be idna encoded and then, on Python 3, UTF-8
253
+ # decoded. This is pretty frustrating, but that's what the standard library
254
+ # does with certificates, and so we need to attempt to do the same.
255
+ # We also want to skip over names which cannot be idna encoded.
256
+ names = [
257
+ ("DNS", name)
258
+ for name in map(_dnsname_to_stdlib, ext.get_values_for_type(x509.DNSName))
259
+ if name is not None
260
+ ]
261
+ names.extend(
262
+ ("IP Address", str(name)) for name in ext.get_values_for_type(x509.IPAddress)
263
+ )
264
+
265
+ return names
266
+
267
+
268
+ class WrappedSocket(object):
269
+ """API-compatibility wrapper for Python OpenSSL's Connection-class.
270
+
271
+ Note: _makefile_refs, _drop() and _reuse() are needed for the garbage
272
+ collector of pypy.
273
+ """
274
+
275
+ def __init__(self, connection, socket, suppress_ragged_eofs=True):
276
+ self.connection = connection
277
+ self.socket = socket
278
+ self.suppress_ragged_eofs = suppress_ragged_eofs
279
+ self._makefile_refs = 0
280
+ self._closed = False
281
+
282
+ def fileno(self):
283
+ return self.socket.fileno()
284
+
285
+ # Copy-pasted from Python 3.5 source code
286
+ def _decref_socketios(self):
287
+ if self._makefile_refs > 0:
288
+ self._makefile_refs -= 1
289
+ if self._closed:
290
+ self.close()
291
+
292
+ def recv(self, *args, **kwargs):
293
+ try:
294
+ data = self.connection.recv(*args, **kwargs)
295
+ except OpenSSL.SSL.SysCallError as e:
296
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
297
+ return b""
298
+ else:
299
+ raise SocketError(str(e))
300
+ except OpenSSL.SSL.ZeroReturnError:
301
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
302
+ return b""
303
+ else:
304
+ raise
305
+ except OpenSSL.SSL.WantReadError:
306
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
307
+ raise timeout("The read operation timed out")
308
+ else:
309
+ return self.recv(*args, **kwargs)
310
+
311
+ # TLS 1.3 post-handshake authentication
312
+ except OpenSSL.SSL.Error as e:
313
+ raise ssl.SSLError("read error: %r" % e)
314
+ else:
315
+ return data
316
+
317
+ def recv_into(self, *args, **kwargs):
318
+ try:
319
+ return self.connection.recv_into(*args, **kwargs)
320
+ except OpenSSL.SSL.SysCallError as e:
321
+ if self.suppress_ragged_eofs and e.args == (-1, "Unexpected EOF"):
322
+ return 0
323
+ else:
324
+ raise SocketError(str(e))
325
+ except OpenSSL.SSL.ZeroReturnError:
326
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
327
+ return 0
328
+ else:
329
+ raise
330
+ except OpenSSL.SSL.WantReadError:
331
+ if not util.wait_for_read(self.socket, self.socket.gettimeout()):
332
+ raise timeout("The read operation timed out")
333
+ else:
334
+ return self.recv_into(*args, **kwargs)
335
+
336
+ # TLS 1.3 post-handshake authentication
337
+ except OpenSSL.SSL.Error as e:
338
+ raise ssl.SSLError("read error: %r" % e)
339
+
340
+ def settimeout(self, timeout):
341
+ return self.socket.settimeout(timeout)
342
+
343
+ def _send_until_done(self, data):
344
+ while True:
345
+ try:
346
+ return self.connection.send(data)
347
+ except OpenSSL.SSL.WantWriteError:
348
+ if not util.wait_for_write(self.socket, self.socket.gettimeout()):
349
+ raise timeout()
350
+ continue
351
+ except OpenSSL.SSL.SysCallError as e:
352
+ raise SocketError(str(e))
353
+
354
+ def sendall(self, data):
355
+ total_sent = 0
356
+ while total_sent < len(data):
357
+ sent = self._send_until_done(
358
+ data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE]
359
+ )
360
+ total_sent += sent
361
+
362
+ def shutdown(self):
363
+ # FIXME rethrow compatible exceptions should we ever use this
364
+ self.connection.shutdown()
365
+
366
+ def close(self):
367
+ if self._makefile_refs < 1:
368
+ try:
369
+ self._closed = True
370
+ return self.connection.close()
371
+ except OpenSSL.SSL.Error:
372
+ return
373
+ else:
374
+ self._makefile_refs -= 1
375
+
376
+ def getpeercert(self, binary_form=False):
377
+ x509 = self.connection.get_peer_certificate()
378
+
379
+ if not x509:
380
+ return x509
381
+
382
+ if binary_form:
383
+ return OpenSSL.crypto.dump_certificate(OpenSSL.crypto.FILETYPE_ASN1, x509)
384
+
385
+ return {
386
+ "subject": ((("commonName", x509.get_subject().CN),),),
387
+ "subjectAltName": get_subj_alt_name(x509),
388
+ }
389
+
390
+ def version(self):
391
+ return self.connection.get_protocol_version_name()
392
+
393
+ def _reuse(self):
394
+ self._makefile_refs += 1
395
+
396
+ def _drop(self):
397
+ if self._makefile_refs < 1:
398
+ self.close()
399
+ else:
400
+ self._makefile_refs -= 1
401
+
402
+
403
+ if _fileobject: # Platform-specific: Python 2
404
+
405
+ def makefile(self, mode, bufsize=-1):
406
+ self._makefile_refs += 1
407
+ return _fileobject(self, mode, bufsize, close=True)
408
+
409
+
410
+ else: # Platform-specific: Python 3
411
+ makefile = backport_makefile
412
+
413
+ WrappedSocket.makefile = makefile
414
+
415
+
416
+ class PyOpenSSLContext(object):
417
+ """
418
+ I am a wrapper class for the PyOpenSSL ``Context`` object. I am responsible
419
+ for translating the interface of the standard library ``SSLContext`` object
420
+ to calls into PyOpenSSL.
421
+ """
422
+
423
+ def __init__(self, protocol):
424
+ self.protocol = _openssl_versions[protocol]
425
+ self._ctx = OpenSSL.SSL.Context(self.protocol)
426
+ self._options = 0
427
+ self.check_hostname = False
428
+
429
+ @property
430
+ def options(self):
431
+ return self._options
432
+
433
+ @options.setter
434
+ def options(self, value):
435
+ self._options = value
436
+ self._ctx.set_options(value)
437
+
438
+ @property
439
+ def verify_mode(self):
440
+ return _openssl_to_stdlib_verify[self._ctx.get_verify_mode()]
441
+
442
+ @verify_mode.setter
443
+ def verify_mode(self, value):
444
+ self._ctx.set_verify(_stdlib_to_openssl_verify[value], _verify_callback)
445
+
446
+ def set_default_verify_paths(self):
447
+ self._ctx.set_default_verify_paths()
448
+
449
+ def set_ciphers(self, ciphers):
450
+ if isinstance(ciphers, six.text_type):
451
+ ciphers = ciphers.encode("utf-8")
452
+ self._ctx.set_cipher_list(ciphers)
453
+
454
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
455
+ if cafile is not None:
456
+ cafile = cafile.encode("utf-8")
457
+ if capath is not None:
458
+ capath = capath.encode("utf-8")
459
+ try:
460
+ self._ctx.load_verify_locations(cafile, capath)
461
+ if cadata is not None:
462
+ self._ctx.load_verify_locations(BytesIO(cadata))
463
+ except OpenSSL.SSL.Error as e:
464
+ raise ssl.SSLError("unable to load trusted certificates: %r" % e)
465
+
466
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
467
+ self._ctx.use_certificate_chain_file(certfile)
468
+ if password is not None:
469
+ if not isinstance(password, six.binary_type):
470
+ password = password.encode("utf-8")
471
+ self._ctx.set_passwd_cb(lambda *_: password)
472
+ self._ctx.use_privatekey_file(keyfile or certfile)
473
+
474
+ def set_alpn_protocols(self, protocols):
475
+ protocols = [six.ensure_binary(p) for p in protocols]
476
+ return self._ctx.set_alpn_protos(protocols)
477
+
478
+ def wrap_socket(
479
+ self,
480
+ sock,
481
+ server_side=False,
482
+ do_handshake_on_connect=True,
483
+ suppress_ragged_eofs=True,
484
+ server_hostname=None,
485
+ ):
486
+ cnx = OpenSSL.SSL.Connection(self._ctx, sock)
487
+
488
+ if isinstance(server_hostname, six.text_type): # Platform-specific: Python 3
489
+ server_hostname = server_hostname.encode("utf-8")
490
+
491
+ if server_hostname is not None:
492
+ cnx.set_tlsext_host_name(server_hostname)
493
+
494
+ cnx.set_connect_state()
495
+
496
+ while True:
497
+ try:
498
+ cnx.do_handshake()
499
+ except OpenSSL.SSL.WantReadError:
500
+ if not util.wait_for_read(sock, sock.gettimeout()):
501
+ raise timeout("select timed out")
502
+ continue
503
+ except OpenSSL.SSL.Error as e:
504
+ raise ssl.SSLError("bad handshake: %r" % e)
505
+ break
506
+
507
+ return WrappedSocket(cnx, sock)
508
+
509
+
510
+ def _verify_callback(cnx, x509, err_no, err_depth, return_code):
511
+ return err_no == 0
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/securetransport.py ADDED
@@ -0,0 +1,922 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ SecureTranport support for urllib3 via ctypes.
3
+
4
+ This makes platform-native TLS available to urllib3 users on macOS without the
5
+ use of a compiler. This is an important feature because the Python Package
6
+ Index is moving to become a TLSv1.2-or-higher server, and the default OpenSSL
7
+ that ships with macOS is not capable of doing TLSv1.2. The only way to resolve
8
+ this is to give macOS users an alternative solution to the problem, and that
9
+ solution is to use SecureTransport.
10
+
11
+ We use ctypes here because this solution must not require a compiler. That's
12
+ because pip is not allowed to require a compiler either.
13
+
14
+ This is not intended to be a seriously long-term solution to this problem.
15
+ The hope is that PEP 543 will eventually solve this issue for us, at which
16
+ point we can retire this contrib module. But in the short term, we need to
17
+ solve the impending tire fire that is Python on Mac without this kind of
18
+ contrib module. So...here we are.
19
+
20
+ To use this module, simply import and inject it::
21
+
22
+ import pip._vendor.urllib3.contrib.securetransport as securetransport
23
+ securetransport.inject_into_urllib3()
24
+
25
+ Happy TLSing!
26
+
27
+ This code is a bastardised version of the code found in Will Bond's oscrypto
28
+ library. An enormous debt is owed to him for blazing this trail for us. For
29
+ that reason, this code should be considered to be covered both by urllib3's
30
+ license and by oscrypto's:
31
+
32
+ .. code-block::
33
+
34
+ Copyright (c) 2015-2016 Will Bond <[email protected]>
35
+
36
+ Permission is hereby granted, free of charge, to any person obtaining a
37
+ copy of this software and associated documentation files (the "Software"),
38
+ to deal in the Software without restriction, including without limitation
39
+ the rights to use, copy, modify, merge, publish, distribute, sublicense,
40
+ and/or sell copies of the Software, and to permit persons to whom the
41
+ Software is furnished to do so, subject to the following conditions:
42
+
43
+ The above copyright notice and this permission notice shall be included in
44
+ all copies or substantial portions of the Software.
45
+
46
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
47
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
48
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
49
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
50
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
51
+ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
52
+ DEALINGS IN THE SOFTWARE.
53
+ """
54
+ from __future__ import absolute_import
55
+
56
+ import contextlib
57
+ import ctypes
58
+ import errno
59
+ import os.path
60
+ import shutil
61
+ import socket
62
+ import ssl
63
+ import struct
64
+ import threading
65
+ import weakref
66
+
67
+ from pip._vendor import six
68
+
69
+ from .. import util
70
+ from ..util.ssl_ import PROTOCOL_TLS_CLIENT
71
+ from ._securetransport.bindings import CoreFoundation, Security, SecurityConst
72
+ from ._securetransport.low_level import (
73
+ _assert_no_error,
74
+ _build_tls_unknown_ca_alert,
75
+ _cert_array_from_pem,
76
+ _create_cfstring_array,
77
+ _load_client_cert_chain,
78
+ _temporary_keychain,
79
+ )
80
+
81
+ try: # Platform-specific: Python 2
82
+ from socket import _fileobject
83
+ except ImportError: # Platform-specific: Python 3
84
+ _fileobject = None
85
+ from ..packages.backports.makefile import backport_makefile
86
+
87
+ __all__ = ["inject_into_urllib3", "extract_from_urllib3"]
88
+
89
+ # SNI always works
90
+ HAS_SNI = True
91
+
92
+ orig_util_HAS_SNI = util.HAS_SNI
93
+ orig_util_SSLContext = util.ssl_.SSLContext
94
+
95
+ # This dictionary is used by the read callback to obtain a handle to the
96
+ # calling wrapped socket. This is a pretty silly approach, but for now it'll
97
+ # do. I feel like I should be able to smuggle a handle to the wrapped socket
98
+ # directly in the SSLConnectionRef, but for now this approach will work I
99
+ # guess.
100
+ #
101
+ # We need to lock around this structure for inserts, but we don't do it for
102
+ # reads/writes in the callbacks. The reasoning here goes as follows:
103
+ #
104
+ # 1. It is not possible to call into the callbacks before the dictionary is
105
+ # populated, so once in the callback the id must be in the dictionary.
106
+ # 2. The callbacks don't mutate the dictionary, they only read from it, and
107
+ # so cannot conflict with any of the insertions.
108
+ #
109
+ # This is good: if we had to lock in the callbacks we'd drastically slow down
110
+ # the performance of this code.
111
+ _connection_refs = weakref.WeakValueDictionary()
112
+ _connection_ref_lock = threading.Lock()
113
+
114
+ # Limit writes to 16kB. This is OpenSSL's limit, but we'll cargo-cult it over
115
+ # for no better reason than we need *a* limit, and this one is right there.
116
+ SSL_WRITE_BLOCKSIZE = 16384
117
+
118
+ # This is our equivalent of util.ssl_.DEFAULT_CIPHERS, but expanded out to
119
+ # individual cipher suites. We need to do this because this is how
120
+ # SecureTransport wants them.
121
+ CIPHER_SUITES = [
122
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,
123
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,
124
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,
125
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256,
126
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_CHACHA20_POLY1305_SHA256,
127
+ SecurityConst.TLS_ECDHE_RSA_WITH_CHACHA20_POLY1305_SHA256,
128
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_GCM_SHA384,
129
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_GCM_SHA256,
130
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA384,
131
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_256_CBC_SHA,
132
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA256,
133
+ SecurityConst.TLS_ECDHE_ECDSA_WITH_AES_128_CBC_SHA,
134
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA384,
135
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_256_CBC_SHA,
136
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA256,
137
+ SecurityConst.TLS_ECDHE_RSA_WITH_AES_128_CBC_SHA,
138
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA256,
139
+ SecurityConst.TLS_DHE_RSA_WITH_AES_256_CBC_SHA,
140
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA256,
141
+ SecurityConst.TLS_DHE_RSA_WITH_AES_128_CBC_SHA,
142
+ SecurityConst.TLS_AES_256_GCM_SHA384,
143
+ SecurityConst.TLS_AES_128_GCM_SHA256,
144
+ SecurityConst.TLS_RSA_WITH_AES_256_GCM_SHA384,
145
+ SecurityConst.TLS_RSA_WITH_AES_128_GCM_SHA256,
146
+ SecurityConst.TLS_AES_128_CCM_8_SHA256,
147
+ SecurityConst.TLS_AES_128_CCM_SHA256,
148
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA256,
149
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA256,
150
+ SecurityConst.TLS_RSA_WITH_AES_256_CBC_SHA,
151
+ SecurityConst.TLS_RSA_WITH_AES_128_CBC_SHA,
152
+ ]
153
+
154
+ # Basically this is simple: for PROTOCOL_SSLv23 we turn it into a low of
155
+ # TLSv1 and a high of TLSv1.2. For everything else, we pin to that version.
156
+ # TLSv1 to 1.2 are supported on macOS 10.8+
157
+ _protocol_to_min_max = {
158
+ util.PROTOCOL_TLS: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
159
+ PROTOCOL_TLS_CLIENT: (SecurityConst.kTLSProtocol1, SecurityConst.kTLSProtocol12),
160
+ }
161
+
162
+ if hasattr(ssl, "PROTOCOL_SSLv2"):
163
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv2] = (
164
+ SecurityConst.kSSLProtocol2,
165
+ SecurityConst.kSSLProtocol2,
166
+ )
167
+ if hasattr(ssl, "PROTOCOL_SSLv3"):
168
+ _protocol_to_min_max[ssl.PROTOCOL_SSLv3] = (
169
+ SecurityConst.kSSLProtocol3,
170
+ SecurityConst.kSSLProtocol3,
171
+ )
172
+ if hasattr(ssl, "PROTOCOL_TLSv1"):
173
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1] = (
174
+ SecurityConst.kTLSProtocol1,
175
+ SecurityConst.kTLSProtocol1,
176
+ )
177
+ if hasattr(ssl, "PROTOCOL_TLSv1_1"):
178
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_1] = (
179
+ SecurityConst.kTLSProtocol11,
180
+ SecurityConst.kTLSProtocol11,
181
+ )
182
+ if hasattr(ssl, "PROTOCOL_TLSv1_2"):
183
+ _protocol_to_min_max[ssl.PROTOCOL_TLSv1_2] = (
184
+ SecurityConst.kTLSProtocol12,
185
+ SecurityConst.kTLSProtocol12,
186
+ )
187
+
188
+
189
+ def inject_into_urllib3():
190
+ """
191
+ Monkey-patch urllib3 with SecureTransport-backed SSL-support.
192
+ """
193
+ util.SSLContext = SecureTransportContext
194
+ util.ssl_.SSLContext = SecureTransportContext
195
+ util.HAS_SNI = HAS_SNI
196
+ util.ssl_.HAS_SNI = HAS_SNI
197
+ util.IS_SECURETRANSPORT = True
198
+ util.ssl_.IS_SECURETRANSPORT = True
199
+
200
+
201
+ def extract_from_urllib3():
202
+ """
203
+ Undo monkey-patching by :func:`inject_into_urllib3`.
204
+ """
205
+ util.SSLContext = orig_util_SSLContext
206
+ util.ssl_.SSLContext = orig_util_SSLContext
207
+ util.HAS_SNI = orig_util_HAS_SNI
208
+ util.ssl_.HAS_SNI = orig_util_HAS_SNI
209
+ util.IS_SECURETRANSPORT = False
210
+ util.ssl_.IS_SECURETRANSPORT = False
211
+
212
+
213
+ def _read_callback(connection_id, data_buffer, data_length_pointer):
214
+ """
215
+ SecureTransport read callback. This is called by ST to request that data
216
+ be returned from the socket.
217
+ """
218
+ wrapped_socket = None
219
+ try:
220
+ wrapped_socket = _connection_refs.get(connection_id)
221
+ if wrapped_socket is None:
222
+ return SecurityConst.errSSLInternal
223
+ base_socket = wrapped_socket.socket
224
+
225
+ requested_length = data_length_pointer[0]
226
+
227
+ timeout = wrapped_socket.gettimeout()
228
+ error = None
229
+ read_count = 0
230
+
231
+ try:
232
+ while read_count < requested_length:
233
+ if timeout is None or timeout >= 0:
234
+ if not util.wait_for_read(base_socket, timeout):
235
+ raise socket.error(errno.EAGAIN, "timed out")
236
+
237
+ remaining = requested_length - read_count
238
+ buffer = (ctypes.c_char * remaining).from_address(
239
+ data_buffer + read_count
240
+ )
241
+ chunk_size = base_socket.recv_into(buffer, remaining)
242
+ read_count += chunk_size
243
+ if not chunk_size:
244
+ if not read_count:
245
+ return SecurityConst.errSSLClosedGraceful
246
+ break
247
+ except (socket.error) as e:
248
+ error = e.errno
249
+
250
+ if error is not None and error != errno.EAGAIN:
251
+ data_length_pointer[0] = read_count
252
+ if error == errno.ECONNRESET or error == errno.EPIPE:
253
+ return SecurityConst.errSSLClosedAbort
254
+ raise
255
+
256
+ data_length_pointer[0] = read_count
257
+
258
+ if read_count != requested_length:
259
+ return SecurityConst.errSSLWouldBlock
260
+
261
+ return 0
262
+ except Exception as e:
263
+ if wrapped_socket is not None:
264
+ wrapped_socket._exception = e
265
+ return SecurityConst.errSSLInternal
266
+
267
+
268
+ def _write_callback(connection_id, data_buffer, data_length_pointer):
269
+ """
270
+ SecureTransport write callback. This is called by ST to request that data
271
+ actually be sent on the network.
272
+ """
273
+ wrapped_socket = None
274
+ try:
275
+ wrapped_socket = _connection_refs.get(connection_id)
276
+ if wrapped_socket is None:
277
+ return SecurityConst.errSSLInternal
278
+ base_socket = wrapped_socket.socket
279
+
280
+ bytes_to_write = data_length_pointer[0]
281
+ data = ctypes.string_at(data_buffer, bytes_to_write)
282
+
283
+ timeout = wrapped_socket.gettimeout()
284
+ error = None
285
+ sent = 0
286
+
287
+ try:
288
+ while sent < bytes_to_write:
289
+ if timeout is None or timeout >= 0:
290
+ if not util.wait_for_write(base_socket, timeout):
291
+ raise socket.error(errno.EAGAIN, "timed out")
292
+ chunk_sent = base_socket.send(data)
293
+ sent += chunk_sent
294
+
295
+ # This has some needless copying here, but I'm not sure there's
296
+ # much value in optimising this data path.
297
+ data = data[chunk_sent:]
298
+ except (socket.error) as e:
299
+ error = e.errno
300
+
301
+ if error is not None and error != errno.EAGAIN:
302
+ data_length_pointer[0] = sent
303
+ if error == errno.ECONNRESET or error == errno.EPIPE:
304
+ return SecurityConst.errSSLClosedAbort
305
+ raise
306
+
307
+ data_length_pointer[0] = sent
308
+
309
+ if sent != bytes_to_write:
310
+ return SecurityConst.errSSLWouldBlock
311
+
312
+ return 0
313
+ except Exception as e:
314
+ if wrapped_socket is not None:
315
+ wrapped_socket._exception = e
316
+ return SecurityConst.errSSLInternal
317
+
318
+
319
+ # We need to keep these two objects references alive: if they get GC'd while
320
+ # in use then SecureTransport could attempt to call a function that is in freed
321
+ # memory. That would be...uh...bad. Yeah, that's the word. Bad.
322
+ _read_callback_pointer = Security.SSLReadFunc(_read_callback)
323
+ _write_callback_pointer = Security.SSLWriteFunc(_write_callback)
324
+
325
+
326
+ class WrappedSocket(object):
327
+ """
328
+ API-compatibility wrapper for Python's OpenSSL wrapped socket object.
329
+
330
+ Note: _makefile_refs, _drop(), and _reuse() are needed for the garbage
331
+ collector of PyPy.
332
+ """
333
+
334
+ def __init__(self, socket):
335
+ self.socket = socket
336
+ self.context = None
337
+ self._makefile_refs = 0
338
+ self._closed = False
339
+ self._exception = None
340
+ self._keychain = None
341
+ self._keychain_dir = None
342
+ self._client_cert_chain = None
343
+
344
+ # We save off the previously-configured timeout and then set it to
345
+ # zero. This is done because we use select and friends to handle the
346
+ # timeouts, but if we leave the timeout set on the lower socket then
347
+ # Python will "kindly" call select on that socket again for us. Avoid
348
+ # that by forcing the timeout to zero.
349
+ self._timeout = self.socket.gettimeout()
350
+ self.socket.settimeout(0)
351
+
352
+ @contextlib.contextmanager
353
+ def _raise_on_error(self):
354
+ """
355
+ A context manager that can be used to wrap calls that do I/O from
356
+ SecureTransport. If any of the I/O callbacks hit an exception, this
357
+ context manager will correctly propagate the exception after the fact.
358
+ This avoids silently swallowing those exceptions.
359
+
360
+ It also correctly forces the socket closed.
361
+ """
362
+ self._exception = None
363
+
364
+ # We explicitly don't catch around this yield because in the unlikely
365
+ # event that an exception was hit in the block we don't want to swallow
366
+ # it.
367
+ yield
368
+ if self._exception is not None:
369
+ exception, self._exception = self._exception, None
370
+ self.close()
371
+ raise exception
372
+
373
+ def _set_ciphers(self):
374
+ """
375
+ Sets up the allowed ciphers. By default this matches the set in
376
+ util.ssl_.DEFAULT_CIPHERS, at least as supported by macOS. This is done
377
+ custom and doesn't allow changing at this time, mostly because parsing
378
+ OpenSSL cipher strings is going to be a freaking nightmare.
379
+ """
380
+ ciphers = (Security.SSLCipherSuite * len(CIPHER_SUITES))(*CIPHER_SUITES)
381
+ result = Security.SSLSetEnabledCiphers(
382
+ self.context, ciphers, len(CIPHER_SUITES)
383
+ )
384
+ _assert_no_error(result)
385
+
386
+ def _set_alpn_protocols(self, protocols):
387
+ """
388
+ Sets up the ALPN protocols on the context.
389
+ """
390
+ if not protocols:
391
+ return
392
+ protocols_arr = _create_cfstring_array(protocols)
393
+ try:
394
+ result = Security.SSLSetALPNProtocols(self.context, protocols_arr)
395
+ _assert_no_error(result)
396
+ finally:
397
+ CoreFoundation.CFRelease(protocols_arr)
398
+
399
+ def _custom_validate(self, verify, trust_bundle):
400
+ """
401
+ Called when we have set custom validation. We do this in two cases:
402
+ first, when cert validation is entirely disabled; and second, when
403
+ using a custom trust DB.
404
+ Raises an SSLError if the connection is not trusted.
405
+ """
406
+ # If we disabled cert validation, just say: cool.
407
+ if not verify:
408
+ return
409
+
410
+ successes = (
411
+ SecurityConst.kSecTrustResultUnspecified,
412
+ SecurityConst.kSecTrustResultProceed,
413
+ )
414
+ try:
415
+ trust_result = self._evaluate_trust(trust_bundle)
416
+ if trust_result in successes:
417
+ return
418
+ reason = "error code: %d" % (trust_result,)
419
+ except Exception as e:
420
+ # Do not trust on error
421
+ reason = "exception: %r" % (e,)
422
+
423
+ # SecureTransport does not send an alert nor shuts down the connection.
424
+ rec = _build_tls_unknown_ca_alert(self.version())
425
+ self.socket.sendall(rec)
426
+ # close the connection immediately
427
+ # l_onoff = 1, activate linger
428
+ # l_linger = 0, linger for 0 seoncds
429
+ opts = struct.pack("ii", 1, 0)
430
+ self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER, opts)
431
+ self.close()
432
+ raise ssl.SSLError("certificate verify failed, %s" % reason)
433
+
434
+ def _evaluate_trust(self, trust_bundle):
435
+ # We want data in memory, so load it up.
436
+ if os.path.isfile(trust_bundle):
437
+ with open(trust_bundle, "rb") as f:
438
+ trust_bundle = f.read()
439
+
440
+ cert_array = None
441
+ trust = Security.SecTrustRef()
442
+
443
+ try:
444
+ # Get a CFArray that contains the certs we want.
445
+ cert_array = _cert_array_from_pem(trust_bundle)
446
+
447
+ # Ok, now the hard part. We want to get the SecTrustRef that ST has
448
+ # created for this connection, shove our CAs into it, tell ST to
449
+ # ignore everything else it knows, and then ask if it can build a
450
+ # chain. This is a buuuunch of code.
451
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
452
+ _assert_no_error(result)
453
+ if not trust:
454
+ raise ssl.SSLError("Failed to copy trust reference")
455
+
456
+ result = Security.SecTrustSetAnchorCertificates(trust, cert_array)
457
+ _assert_no_error(result)
458
+
459
+ result = Security.SecTrustSetAnchorCertificatesOnly(trust, True)
460
+ _assert_no_error(result)
461
+
462
+ trust_result = Security.SecTrustResultType()
463
+ result = Security.SecTrustEvaluate(trust, ctypes.byref(trust_result))
464
+ _assert_no_error(result)
465
+ finally:
466
+ if trust:
467
+ CoreFoundation.CFRelease(trust)
468
+
469
+ if cert_array is not None:
470
+ CoreFoundation.CFRelease(cert_array)
471
+
472
+ return trust_result.value
473
+
474
+ def handshake(
475
+ self,
476
+ server_hostname,
477
+ verify,
478
+ trust_bundle,
479
+ min_version,
480
+ max_version,
481
+ client_cert,
482
+ client_key,
483
+ client_key_passphrase,
484
+ alpn_protocols,
485
+ ):
486
+ """
487
+ Actually performs the TLS handshake. This is run automatically by
488
+ wrapped socket, and shouldn't be needed in user code.
489
+ """
490
+ # First, we do the initial bits of connection setup. We need to create
491
+ # a context, set its I/O funcs, and set the connection reference.
492
+ self.context = Security.SSLCreateContext(
493
+ None, SecurityConst.kSSLClientSide, SecurityConst.kSSLStreamType
494
+ )
495
+ result = Security.SSLSetIOFuncs(
496
+ self.context, _read_callback_pointer, _write_callback_pointer
497
+ )
498
+ _assert_no_error(result)
499
+
500
+ # Here we need to compute the handle to use. We do this by taking the
501
+ # id of self modulo 2**31 - 1. If this is already in the dictionary, we
502
+ # just keep incrementing by one until we find a free space.
503
+ with _connection_ref_lock:
504
+ handle = id(self) % 2147483647
505
+ while handle in _connection_refs:
506
+ handle = (handle + 1) % 2147483647
507
+ _connection_refs[handle] = self
508
+
509
+ result = Security.SSLSetConnection(self.context, handle)
510
+ _assert_no_error(result)
511
+
512
+ # If we have a server hostname, we should set that too.
513
+ if server_hostname:
514
+ if not isinstance(server_hostname, bytes):
515
+ server_hostname = server_hostname.encode("utf-8")
516
+
517
+ result = Security.SSLSetPeerDomainName(
518
+ self.context, server_hostname, len(server_hostname)
519
+ )
520
+ _assert_no_error(result)
521
+
522
+ # Setup the ciphers.
523
+ self._set_ciphers()
524
+
525
+ # Setup the ALPN protocols.
526
+ self._set_alpn_protocols(alpn_protocols)
527
+
528
+ # Set the minimum and maximum TLS versions.
529
+ result = Security.SSLSetProtocolVersionMin(self.context, min_version)
530
+ _assert_no_error(result)
531
+
532
+ result = Security.SSLSetProtocolVersionMax(self.context, max_version)
533
+ _assert_no_error(result)
534
+
535
+ # If there's a trust DB, we need to use it. We do that by telling
536
+ # SecureTransport to break on server auth. We also do that if we don't
537
+ # want to validate the certs at all: we just won't actually do any
538
+ # authing in that case.
539
+ if not verify or trust_bundle is not None:
540
+ result = Security.SSLSetSessionOption(
541
+ self.context, SecurityConst.kSSLSessionOptionBreakOnServerAuth, True
542
+ )
543
+ _assert_no_error(result)
544
+
545
+ # If there's a client cert, we need to use it.
546
+ if client_cert:
547
+ self._keychain, self._keychain_dir = _temporary_keychain()
548
+ self._client_cert_chain = _load_client_cert_chain(
549
+ self._keychain, client_cert, client_key
550
+ )
551
+ result = Security.SSLSetCertificate(self.context, self._client_cert_chain)
552
+ _assert_no_error(result)
553
+
554
+ while True:
555
+ with self._raise_on_error():
556
+ result = Security.SSLHandshake(self.context)
557
+
558
+ if result == SecurityConst.errSSLWouldBlock:
559
+ raise socket.timeout("handshake timed out")
560
+ elif result == SecurityConst.errSSLServerAuthCompleted:
561
+ self._custom_validate(verify, trust_bundle)
562
+ continue
563
+ else:
564
+ _assert_no_error(result)
565
+ break
566
+
567
+ def fileno(self):
568
+ return self.socket.fileno()
569
+
570
+ # Copy-pasted from Python 3.5 source code
571
+ def _decref_socketios(self):
572
+ if self._makefile_refs > 0:
573
+ self._makefile_refs -= 1
574
+ if self._closed:
575
+ self.close()
576
+
577
+ def recv(self, bufsiz):
578
+ buffer = ctypes.create_string_buffer(bufsiz)
579
+ bytes_read = self.recv_into(buffer, bufsiz)
580
+ data = buffer[:bytes_read]
581
+ return data
582
+
583
+ def recv_into(self, buffer, nbytes=None):
584
+ # Read short on EOF.
585
+ if self._closed:
586
+ return 0
587
+
588
+ if nbytes is None:
589
+ nbytes = len(buffer)
590
+
591
+ buffer = (ctypes.c_char * nbytes).from_buffer(buffer)
592
+ processed_bytes = ctypes.c_size_t(0)
593
+
594
+ with self._raise_on_error():
595
+ result = Security.SSLRead(
596
+ self.context, buffer, nbytes, ctypes.byref(processed_bytes)
597
+ )
598
+
599
+ # There are some result codes that we want to treat as "not always
600
+ # errors". Specifically, those are errSSLWouldBlock,
601
+ # errSSLClosedGraceful, and errSSLClosedNoNotify.
602
+ if result == SecurityConst.errSSLWouldBlock:
603
+ # If we didn't process any bytes, then this was just a time out.
604
+ # However, we can get errSSLWouldBlock in situations when we *did*
605
+ # read some data, and in those cases we should just read "short"
606
+ # and return.
607
+ if processed_bytes.value == 0:
608
+ # Timed out, no data read.
609
+ raise socket.timeout("recv timed out")
610
+ elif result in (
611
+ SecurityConst.errSSLClosedGraceful,
612
+ SecurityConst.errSSLClosedNoNotify,
613
+ ):
614
+ # The remote peer has closed this connection. We should do so as
615
+ # well. Note that we don't actually return here because in
616
+ # principle this could actually be fired along with return data.
617
+ # It's unlikely though.
618
+ self.close()
619
+ else:
620
+ _assert_no_error(result)
621
+
622
+ # Ok, we read and probably succeeded. We should return whatever data
623
+ # was actually read.
624
+ return processed_bytes.value
625
+
626
+ def settimeout(self, timeout):
627
+ self._timeout = timeout
628
+
629
+ def gettimeout(self):
630
+ return self._timeout
631
+
632
+ def send(self, data):
633
+ processed_bytes = ctypes.c_size_t(0)
634
+
635
+ with self._raise_on_error():
636
+ result = Security.SSLWrite(
637
+ self.context, data, len(data), ctypes.byref(processed_bytes)
638
+ )
639
+
640
+ if result == SecurityConst.errSSLWouldBlock and processed_bytes.value == 0:
641
+ # Timed out
642
+ raise socket.timeout("send timed out")
643
+ else:
644
+ _assert_no_error(result)
645
+
646
+ # We sent, and probably succeeded. Tell them how much we sent.
647
+ return processed_bytes.value
648
+
649
+ def sendall(self, data):
650
+ total_sent = 0
651
+ while total_sent < len(data):
652
+ sent = self.send(data[total_sent : total_sent + SSL_WRITE_BLOCKSIZE])
653
+ total_sent += sent
654
+
655
+ def shutdown(self):
656
+ with self._raise_on_error():
657
+ Security.SSLClose(self.context)
658
+
659
+ def close(self):
660
+ # TODO: should I do clean shutdown here? Do I have to?
661
+ if self._makefile_refs < 1:
662
+ self._closed = True
663
+ if self.context:
664
+ CoreFoundation.CFRelease(self.context)
665
+ self.context = None
666
+ if self._client_cert_chain:
667
+ CoreFoundation.CFRelease(self._client_cert_chain)
668
+ self._client_cert_chain = None
669
+ if self._keychain:
670
+ Security.SecKeychainDelete(self._keychain)
671
+ CoreFoundation.CFRelease(self._keychain)
672
+ shutil.rmtree(self._keychain_dir)
673
+ self._keychain = self._keychain_dir = None
674
+ return self.socket.close()
675
+ else:
676
+ self._makefile_refs -= 1
677
+
678
+ def getpeercert(self, binary_form=False):
679
+ # Urgh, annoying.
680
+ #
681
+ # Here's how we do this:
682
+ #
683
+ # 1. Call SSLCopyPeerTrust to get hold of the trust object for this
684
+ # connection.
685
+ # 2. Call SecTrustGetCertificateAtIndex for index 0 to get the leaf.
686
+ # 3. To get the CN, call SecCertificateCopyCommonName and process that
687
+ # string so that it's of the appropriate type.
688
+ # 4. To get the SAN, we need to do something a bit more complex:
689
+ # a. Call SecCertificateCopyValues to get the data, requesting
690
+ # kSecOIDSubjectAltName.
691
+ # b. Mess about with this dictionary to try to get the SANs out.
692
+ #
693
+ # This is gross. Really gross. It's going to be a few hundred LoC extra
694
+ # just to repeat something that SecureTransport can *already do*. So my
695
+ # operating assumption at this time is that what we want to do is
696
+ # instead to just flag to urllib3 that it shouldn't do its own hostname
697
+ # validation when using SecureTransport.
698
+ if not binary_form:
699
+ raise ValueError("SecureTransport only supports dumping binary certs")
700
+ trust = Security.SecTrustRef()
701
+ certdata = None
702
+ der_bytes = None
703
+
704
+ try:
705
+ # Grab the trust store.
706
+ result = Security.SSLCopyPeerTrust(self.context, ctypes.byref(trust))
707
+ _assert_no_error(result)
708
+ if not trust:
709
+ # Probably we haven't done the handshake yet. No biggie.
710
+ return None
711
+
712
+ cert_count = Security.SecTrustGetCertificateCount(trust)
713
+ if not cert_count:
714
+ # Also a case that might happen if we haven't handshaked.
715
+ # Handshook? Handshaken?
716
+ return None
717
+
718
+ leaf = Security.SecTrustGetCertificateAtIndex(trust, 0)
719
+ assert leaf
720
+
721
+ # Ok, now we want the DER bytes.
722
+ certdata = Security.SecCertificateCopyData(leaf)
723
+ assert certdata
724
+
725
+ data_length = CoreFoundation.CFDataGetLength(certdata)
726
+ data_buffer = CoreFoundation.CFDataGetBytePtr(certdata)
727
+ der_bytes = ctypes.string_at(data_buffer, data_length)
728
+ finally:
729
+ if certdata:
730
+ CoreFoundation.CFRelease(certdata)
731
+ if trust:
732
+ CoreFoundation.CFRelease(trust)
733
+
734
+ return der_bytes
735
+
736
+ def version(self):
737
+ protocol = Security.SSLProtocol()
738
+ result = Security.SSLGetNegotiatedProtocolVersion(
739
+ self.context, ctypes.byref(protocol)
740
+ )
741
+ _assert_no_error(result)
742
+ if protocol.value == SecurityConst.kTLSProtocol13:
743
+ raise ssl.SSLError("SecureTransport does not support TLS 1.3")
744
+ elif protocol.value == SecurityConst.kTLSProtocol12:
745
+ return "TLSv1.2"
746
+ elif protocol.value == SecurityConst.kTLSProtocol11:
747
+ return "TLSv1.1"
748
+ elif protocol.value == SecurityConst.kTLSProtocol1:
749
+ return "TLSv1"
750
+ elif protocol.value == SecurityConst.kSSLProtocol3:
751
+ return "SSLv3"
752
+ elif protocol.value == SecurityConst.kSSLProtocol2:
753
+ return "SSLv2"
754
+ else:
755
+ raise ssl.SSLError("Unknown TLS version: %r" % protocol)
756
+
757
+ def _reuse(self):
758
+ self._makefile_refs += 1
759
+
760
+ def _drop(self):
761
+ if self._makefile_refs < 1:
762
+ self.close()
763
+ else:
764
+ self._makefile_refs -= 1
765
+
766
+
767
+ if _fileobject: # Platform-specific: Python 2
768
+
769
+ def makefile(self, mode, bufsize=-1):
770
+ self._makefile_refs += 1
771
+ return _fileobject(self, mode, bufsize, close=True)
772
+
773
+
774
+ else: # Platform-specific: Python 3
775
+
776
+ def makefile(self, mode="r", buffering=None, *args, **kwargs):
777
+ # We disable buffering with SecureTransport because it conflicts with
778
+ # the buffering that ST does internally (see issue #1153 for more).
779
+ buffering = 0
780
+ return backport_makefile(self, mode, buffering, *args, **kwargs)
781
+
782
+
783
+ WrappedSocket.makefile = makefile
784
+
785
+
786
+ class SecureTransportContext(object):
787
+ """
788
+ I am a wrapper class for the SecureTransport library, to translate the
789
+ interface of the standard library ``SSLContext`` object to calls into
790
+ SecureTransport.
791
+ """
792
+
793
+ def __init__(self, protocol):
794
+ self._min_version, self._max_version = _protocol_to_min_max[protocol]
795
+ self._options = 0
796
+ self._verify = False
797
+ self._trust_bundle = None
798
+ self._client_cert = None
799
+ self._client_key = None
800
+ self._client_key_passphrase = None
801
+ self._alpn_protocols = None
802
+
803
+ @property
804
+ def check_hostname(self):
805
+ """
806
+ SecureTransport cannot have its hostname checking disabled. For more,
807
+ see the comment on getpeercert() in this file.
808
+ """
809
+ return True
810
+
811
+ @check_hostname.setter
812
+ def check_hostname(self, value):
813
+ """
814
+ SecureTransport cannot have its hostname checking disabled. For more,
815
+ see the comment on getpeercert() in this file.
816
+ """
817
+ pass
818
+
819
+ @property
820
+ def options(self):
821
+ # TODO: Well, crap.
822
+ #
823
+ # So this is the bit of the code that is the most likely to cause us
824
+ # trouble. Essentially we need to enumerate all of the SSL options that
825
+ # users might want to use and try to see if we can sensibly translate
826
+ # them, or whether we should just ignore them.
827
+ return self._options
828
+
829
+ @options.setter
830
+ def options(self, value):
831
+ # TODO: Update in line with above.
832
+ self._options = value
833
+
834
+ @property
835
+ def verify_mode(self):
836
+ return ssl.CERT_REQUIRED if self._verify else ssl.CERT_NONE
837
+
838
+ @verify_mode.setter
839
+ def verify_mode(self, value):
840
+ self._verify = True if value == ssl.CERT_REQUIRED else False
841
+
842
+ def set_default_verify_paths(self):
843
+ # So, this has to do something a bit weird. Specifically, what it does
844
+ # is nothing.
845
+ #
846
+ # This means that, if we had previously had load_verify_locations
847
+ # called, this does not undo that. We need to do that because it turns
848
+ # out that the rest of the urllib3 code will attempt to load the
849
+ # default verify paths if it hasn't been told about any paths, even if
850
+ # the context itself was sometime earlier. We resolve that by just
851
+ # ignoring it.
852
+ pass
853
+
854
+ def load_default_certs(self):
855
+ return self.set_default_verify_paths()
856
+
857
+ def set_ciphers(self, ciphers):
858
+ # For now, we just require the default cipher string.
859
+ if ciphers != util.ssl_.DEFAULT_CIPHERS:
860
+ raise ValueError("SecureTransport doesn't support custom cipher strings")
861
+
862
+ def load_verify_locations(self, cafile=None, capath=None, cadata=None):
863
+ # OK, we only really support cadata and cafile.
864
+ if capath is not None:
865
+ raise ValueError("SecureTransport does not support cert directories")
866
+
867
+ # Raise if cafile does not exist.
868
+ if cafile is not None:
869
+ with open(cafile):
870
+ pass
871
+
872
+ self._trust_bundle = cafile or cadata
873
+
874
+ def load_cert_chain(self, certfile, keyfile=None, password=None):
875
+ self._client_cert = certfile
876
+ self._client_key = keyfile
877
+ self._client_cert_passphrase = password
878
+
879
+ def set_alpn_protocols(self, protocols):
880
+ """
881
+ Sets the ALPN protocols that will later be set on the context.
882
+
883
+ Raises a NotImplementedError if ALPN is not supported.
884
+ """
885
+ if not hasattr(Security, "SSLSetALPNProtocols"):
886
+ raise NotImplementedError(
887
+ "SecureTransport supports ALPN only in macOS 10.12+"
888
+ )
889
+ self._alpn_protocols = [six.ensure_binary(p) for p in protocols]
890
+
891
+ def wrap_socket(
892
+ self,
893
+ sock,
894
+ server_side=False,
895
+ do_handshake_on_connect=True,
896
+ suppress_ragged_eofs=True,
897
+ server_hostname=None,
898
+ ):
899
+ # So, what do we do here? Firstly, we assert some properties. This is a
900
+ # stripped down shim, so there is some functionality we don't support.
901
+ # See PEP 543 for the real deal.
902
+ assert not server_side
903
+ assert do_handshake_on_connect
904
+ assert suppress_ragged_eofs
905
+
906
+ # Ok, we're good to go. Now we want to create the wrapped socket object
907
+ # and store it in the appropriate place.
908
+ wrapped_socket = WrappedSocket(sock)
909
+
910
+ # Now we can handshake
911
+ wrapped_socket.handshake(
912
+ server_hostname,
913
+ self._verify,
914
+ self._trust_bundle,
915
+ self._min_version,
916
+ self._max_version,
917
+ self._client_cert,
918
+ self._client_key,
919
+ self._client_key_passphrase,
920
+ self._alpn_protocols,
921
+ )
922
+ return wrapped_socket
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/contrib/socks.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ This module contains provisional support for SOCKS proxies from within
4
+ urllib3. This module supports SOCKS4, SOCKS4A (an extension of SOCKS4), and
5
+ SOCKS5. To enable its functionality, either install PySocks or install this
6
+ module with the ``socks`` extra.
7
+
8
+ The SOCKS implementation supports the full range of urllib3 features. It also
9
+ supports the following SOCKS features:
10
+
11
+ - SOCKS4A (``proxy_url='socks4a://...``)
12
+ - SOCKS4 (``proxy_url='socks4://...``)
13
+ - SOCKS5 with remote DNS (``proxy_url='socks5h://...``)
14
+ - SOCKS5 with local DNS (``proxy_url='socks5://...``)
15
+ - Usernames and passwords for the SOCKS proxy
16
+
17
+ .. note::
18
+ It is recommended to use ``socks5h://`` or ``socks4a://`` schemes in
19
+ your ``proxy_url`` to ensure that DNS resolution is done from the remote
20
+ server instead of client-side when connecting to a domain name.
21
+
22
+ SOCKS4 supports IPv4 and domain names with the SOCKS4A extension. SOCKS5
23
+ supports IPv4, IPv6, and domain names.
24
+
25
+ When connecting to a SOCKS4 proxy the ``username`` portion of the ``proxy_url``
26
+ will be sent as the ``userid`` section of the SOCKS request:
27
+
28
+ .. code-block:: python
29
+
30
+ proxy_url="socks4a://<userid>@proxy-host"
31
+
32
+ When connecting to a SOCKS5 proxy the ``username`` and ``password`` portion
33
+ of the ``proxy_url`` will be sent as the username/password to authenticate
34
+ with the proxy:
35
+
36
+ .. code-block:: python
37
+
38
+ proxy_url="socks5h://<username>:<password>@proxy-host"
39
+
40
+ """
41
+ from __future__ import absolute_import
42
+
43
+ try:
44
+ import socks
45
+ except ImportError:
46
+ import warnings
47
+
48
+ from ..exceptions import DependencyWarning
49
+
50
+ warnings.warn(
51
+ (
52
+ "SOCKS support in urllib3 requires the installation of optional "
53
+ "dependencies: specifically, PySocks. For more information, see "
54
+ "https://urllib3.readthedocs.io/en/1.26.x/contrib.html#socks-proxies"
55
+ ),
56
+ DependencyWarning,
57
+ )
58
+ raise
59
+
60
+ from socket import error as SocketError
61
+ from socket import timeout as SocketTimeout
62
+
63
+ from ..connection import HTTPConnection, HTTPSConnection
64
+ from ..connectionpool import HTTPConnectionPool, HTTPSConnectionPool
65
+ from ..exceptions import ConnectTimeoutError, NewConnectionError
66
+ from ..poolmanager import PoolManager
67
+ from ..util.url import parse_url
68
+
69
+ try:
70
+ import ssl
71
+ except ImportError:
72
+ ssl = None
73
+
74
+
75
+ class SOCKSConnection(HTTPConnection):
76
+ """
77
+ A plain-text HTTP connection that connects via a SOCKS proxy.
78
+ """
79
+
80
+ def __init__(self, *args, **kwargs):
81
+ self._socks_options = kwargs.pop("_socks_options")
82
+ super(SOCKSConnection, self).__init__(*args, **kwargs)
83
+
84
+ def _new_conn(self):
85
+ """
86
+ Establish a new connection via the SOCKS proxy.
87
+ """
88
+ extra_kw = {}
89
+ if self.source_address:
90
+ extra_kw["source_address"] = self.source_address
91
+
92
+ if self.socket_options:
93
+ extra_kw["socket_options"] = self.socket_options
94
+
95
+ try:
96
+ conn = socks.create_connection(
97
+ (self.host, self.port),
98
+ proxy_type=self._socks_options["socks_version"],
99
+ proxy_addr=self._socks_options["proxy_host"],
100
+ proxy_port=self._socks_options["proxy_port"],
101
+ proxy_username=self._socks_options["username"],
102
+ proxy_password=self._socks_options["password"],
103
+ proxy_rdns=self._socks_options["rdns"],
104
+ timeout=self.timeout,
105
+ **extra_kw
106
+ )
107
+
108
+ except SocketTimeout:
109
+ raise ConnectTimeoutError(
110
+ self,
111
+ "Connection to %s timed out. (connect timeout=%s)"
112
+ % (self.host, self.timeout),
113
+ )
114
+
115
+ except socks.ProxyError as e:
116
+ # This is fragile as hell, but it seems to be the only way to raise
117
+ # useful errors here.
118
+ if e.socket_err:
119
+ error = e.socket_err
120
+ if isinstance(error, SocketTimeout):
121
+ raise ConnectTimeoutError(
122
+ self,
123
+ "Connection to %s timed out. (connect timeout=%s)"
124
+ % (self.host, self.timeout),
125
+ )
126
+ else:
127
+ raise NewConnectionError(
128
+ self, "Failed to establish a new connection: %s" % error
129
+ )
130
+ else:
131
+ raise NewConnectionError(
132
+ self, "Failed to establish a new connection: %s" % e
133
+ )
134
+
135
+ except SocketError as e: # Defensive: PySocks should catch all these.
136
+ raise NewConnectionError(
137
+ self, "Failed to establish a new connection: %s" % e
138
+ )
139
+
140
+ return conn
141
+
142
+
143
+ # We don't need to duplicate the Verified/Unverified distinction from
144
+ # urllib3/connection.py here because the HTTPSConnection will already have been
145
+ # correctly set to either the Verified or Unverified form by that module. This
146
+ # means the SOCKSHTTPSConnection will automatically be the correct type.
147
+ class SOCKSHTTPSConnection(SOCKSConnection, HTTPSConnection):
148
+ pass
149
+
150
+
151
+ class SOCKSHTTPConnectionPool(HTTPConnectionPool):
152
+ ConnectionCls = SOCKSConnection
153
+
154
+
155
+ class SOCKSHTTPSConnectionPool(HTTPSConnectionPool):
156
+ ConnectionCls = SOCKSHTTPSConnection
157
+
158
+
159
+ class SOCKSProxyManager(PoolManager):
160
+ """
161
+ A version of the urllib3 ProxyManager that routes connections via the
162
+ defined SOCKS proxy.
163
+ """
164
+
165
+ pool_classes_by_scheme = {
166
+ "http": SOCKSHTTPConnectionPool,
167
+ "https": SOCKSHTTPSConnectionPool,
168
+ }
169
+
170
+ def __init__(
171
+ self,
172
+ proxy_url,
173
+ username=None,
174
+ password=None,
175
+ num_pools=10,
176
+ headers=None,
177
+ **connection_pool_kw
178
+ ):
179
+ parsed = parse_url(proxy_url)
180
+
181
+ if username is None and password is None and parsed.auth is not None:
182
+ split = parsed.auth.split(":")
183
+ if len(split) == 2:
184
+ username, password = split
185
+ if parsed.scheme == "socks5":
186
+ socks_version = socks.PROXY_TYPE_SOCKS5
187
+ rdns = False
188
+ elif parsed.scheme == "socks5h":
189
+ socks_version = socks.PROXY_TYPE_SOCKS5
190
+ rdns = True
191
+ elif parsed.scheme == "socks4":
192
+ socks_version = socks.PROXY_TYPE_SOCKS4
193
+ rdns = False
194
+ elif parsed.scheme == "socks4a":
195
+ socks_version = socks.PROXY_TYPE_SOCKS4
196
+ rdns = True
197
+ else:
198
+ raise ValueError("Unable to determine SOCKS version from %s" % proxy_url)
199
+
200
+ self.proxy_url = proxy_url
201
+
202
+ socks_options = {
203
+ "socks_version": socks_version,
204
+ "proxy_host": parsed.host,
205
+ "proxy_port": parsed.port,
206
+ "username": username,
207
+ "password": password,
208
+ "rdns": rdns,
209
+ }
210
+ connection_pool_kw["_socks_options"] = socks_options
211
+
212
+ super(SOCKSProxyManager, self).__init__(
213
+ num_pools, headers, **connection_pool_kw
214
+ )
215
+
216
+ self.pool_classes_by_scheme = SOCKSProxyManager.pool_classes_by_scheme
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/exceptions.py ADDED
@@ -0,0 +1,323 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ from .packages.six.moves.http_client import IncompleteRead as httplib_IncompleteRead
4
+
5
+ # Base Exceptions
6
+
7
+
8
+ class HTTPError(Exception):
9
+ """Base exception used by this module."""
10
+
11
+ pass
12
+
13
+
14
+ class HTTPWarning(Warning):
15
+ """Base warning used by this module."""
16
+
17
+ pass
18
+
19
+
20
+ class PoolError(HTTPError):
21
+ """Base exception for errors caused within a pool."""
22
+
23
+ def __init__(self, pool, message):
24
+ self.pool = pool
25
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
26
+
27
+ def __reduce__(self):
28
+ # For pickling purposes.
29
+ return self.__class__, (None, None)
30
+
31
+
32
+ class RequestError(PoolError):
33
+ """Base exception for PoolErrors that have associated URLs."""
34
+
35
+ def __init__(self, pool, url, message):
36
+ self.url = url
37
+ PoolError.__init__(self, pool, message)
38
+
39
+ def __reduce__(self):
40
+ # For pickling purposes.
41
+ return self.__class__, (None, self.url, None)
42
+
43
+
44
+ class SSLError(HTTPError):
45
+ """Raised when SSL certificate fails in an HTTPS connection."""
46
+
47
+ pass
48
+
49
+
50
+ class ProxyError(HTTPError):
51
+ """Raised when the connection to a proxy fails."""
52
+
53
+ def __init__(self, message, error, *args):
54
+ super(ProxyError, self).__init__(message, error, *args)
55
+ self.original_error = error
56
+
57
+
58
+ class DecodeError(HTTPError):
59
+ """Raised when automatic decoding based on Content-Type fails."""
60
+
61
+ pass
62
+
63
+
64
+ class ProtocolError(HTTPError):
65
+ """Raised when something unexpected happens mid-request/response."""
66
+
67
+ pass
68
+
69
+
70
+ #: Renamed to ProtocolError but aliased for backwards compatibility.
71
+ ConnectionError = ProtocolError
72
+
73
+
74
+ # Leaf Exceptions
75
+
76
+
77
+ class MaxRetryError(RequestError):
78
+ """Raised when the maximum number of retries is exceeded.
79
+
80
+ :param pool: The connection pool
81
+ :type pool: :class:`~urllib3.connectionpool.HTTPConnectionPool`
82
+ :param string url: The requested Url
83
+ :param exceptions.Exception reason: The underlying error
84
+
85
+ """
86
+
87
+ def __init__(self, pool, url, reason=None):
88
+ self.reason = reason
89
+
90
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (url, reason)
91
+
92
+ RequestError.__init__(self, pool, url, message)
93
+
94
+
95
+ class HostChangedError(RequestError):
96
+ """Raised when an existing pool gets a request for a foreign host."""
97
+
98
+ def __init__(self, pool, url, retries=3):
99
+ message = "Tried to open a foreign host with url: %s" % url
100
+ RequestError.__init__(self, pool, url, message)
101
+ self.retries = retries
102
+
103
+
104
+ class TimeoutStateError(HTTPError):
105
+ """Raised when passing an invalid state to a timeout"""
106
+
107
+ pass
108
+
109
+
110
+ class TimeoutError(HTTPError):
111
+ """Raised when a socket timeout error occurs.
112
+
113
+ Catching this error will catch both :exc:`ReadTimeoutErrors
114
+ <ReadTimeoutError>` and :exc:`ConnectTimeoutErrors <ConnectTimeoutError>`.
115
+ """
116
+
117
+ pass
118
+
119
+
120
+ class ReadTimeoutError(TimeoutError, RequestError):
121
+ """Raised when a socket timeout occurs while receiving data from a server"""
122
+
123
+ pass
124
+
125
+
126
+ # This timeout error does not have a URL attached and needs to inherit from the
127
+ # base HTTPError
128
+ class ConnectTimeoutError(TimeoutError):
129
+ """Raised when a socket timeout occurs while connecting to a server"""
130
+
131
+ pass
132
+
133
+
134
+ class NewConnectionError(ConnectTimeoutError, PoolError):
135
+ """Raised when we fail to establish a new connection. Usually ECONNREFUSED."""
136
+
137
+ pass
138
+
139
+
140
+ class EmptyPoolError(PoolError):
141
+ """Raised when a pool runs out of connections and no more are allowed."""
142
+
143
+ pass
144
+
145
+
146
+ class ClosedPoolError(PoolError):
147
+ """Raised when a request enters a pool after the pool has been closed."""
148
+
149
+ pass
150
+
151
+
152
+ class LocationValueError(ValueError, HTTPError):
153
+ """Raised when there is something wrong with a given URL input."""
154
+
155
+ pass
156
+
157
+
158
+ class LocationParseError(LocationValueError):
159
+ """Raised when get_host or similar fails to parse the URL input."""
160
+
161
+ def __init__(self, location):
162
+ message = "Failed to parse: %s" % location
163
+ HTTPError.__init__(self, message)
164
+
165
+ self.location = location
166
+
167
+
168
+ class URLSchemeUnknown(LocationValueError):
169
+ """Raised when a URL input has an unsupported scheme."""
170
+
171
+ def __init__(self, scheme):
172
+ message = "Not supported URL scheme %s" % scheme
173
+ super(URLSchemeUnknown, self).__init__(message)
174
+
175
+ self.scheme = scheme
176
+
177
+
178
+ class ResponseError(HTTPError):
179
+ """Used as a container for an error reason supplied in a MaxRetryError."""
180
+
181
+ GENERIC_ERROR = "too many error responses"
182
+ SPECIFIC_ERROR = "too many {status_code} error responses"
183
+
184
+
185
+ class SecurityWarning(HTTPWarning):
186
+ """Warned when performing security reducing actions"""
187
+
188
+ pass
189
+
190
+
191
+ class SubjectAltNameWarning(SecurityWarning):
192
+ """Warned when connecting to a host with a certificate missing a SAN."""
193
+
194
+ pass
195
+
196
+
197
+ class InsecureRequestWarning(SecurityWarning):
198
+ """Warned when making an unverified HTTPS request."""
199
+
200
+ pass
201
+
202
+
203
+ class SystemTimeWarning(SecurityWarning):
204
+ """Warned when system time is suspected to be wrong"""
205
+
206
+ pass
207
+
208
+
209
+ class InsecurePlatformWarning(SecurityWarning):
210
+ """Warned when certain TLS/SSL configuration is not available on a platform."""
211
+
212
+ pass
213
+
214
+
215
+ class SNIMissingWarning(HTTPWarning):
216
+ """Warned when making a HTTPS request without SNI available."""
217
+
218
+ pass
219
+
220
+
221
+ class DependencyWarning(HTTPWarning):
222
+ """
223
+ Warned when an attempt is made to import a module with missing optional
224
+ dependencies.
225
+ """
226
+
227
+ pass
228
+
229
+
230
+ class ResponseNotChunked(ProtocolError, ValueError):
231
+ """Response needs to be chunked in order to read it as chunks."""
232
+
233
+ pass
234
+
235
+
236
+ class BodyNotHttplibCompatible(HTTPError):
237
+ """
238
+ Body should be :class:`http.client.HTTPResponse` like
239
+ (have an fp attribute which returns raw chunks) for read_chunked().
240
+ """
241
+
242
+ pass
243
+
244
+
245
+ class IncompleteRead(HTTPError, httplib_IncompleteRead):
246
+ """
247
+ Response length doesn't match expected Content-Length
248
+
249
+ Subclass of :class:`http.client.IncompleteRead` to allow int value
250
+ for ``partial`` to avoid creating large objects on streamed reads.
251
+ """
252
+
253
+ def __init__(self, partial, expected):
254
+ super(IncompleteRead, self).__init__(partial, expected)
255
+
256
+ def __repr__(self):
257
+ return "IncompleteRead(%i bytes read, %i more expected)" % (
258
+ self.partial,
259
+ self.expected,
260
+ )
261
+
262
+
263
+ class InvalidChunkLength(HTTPError, httplib_IncompleteRead):
264
+ """Invalid chunk length in a chunked response."""
265
+
266
+ def __init__(self, response, length):
267
+ super(InvalidChunkLength, self).__init__(
268
+ response.tell(), response.length_remaining
269
+ )
270
+ self.response = response
271
+ self.length = length
272
+
273
+ def __repr__(self):
274
+ return "InvalidChunkLength(got length %r, %i bytes read)" % (
275
+ self.length,
276
+ self.partial,
277
+ )
278
+
279
+
280
+ class InvalidHeader(HTTPError):
281
+ """The header provided was somehow invalid."""
282
+
283
+ pass
284
+
285
+
286
+ class ProxySchemeUnknown(AssertionError, URLSchemeUnknown):
287
+ """ProxyManager does not support the supplied scheme"""
288
+
289
+ # TODO(t-8ch): Stop inheriting from AssertionError in v2.0.
290
+
291
+ def __init__(self, scheme):
292
+ # 'localhost' is here because our URL parser parses
293
+ # localhost:8080 -> scheme=localhost, remove if we fix this.
294
+ if scheme == "localhost":
295
+ scheme = None
296
+ if scheme is None:
297
+ message = "Proxy URL had no scheme, should start with http:// or https://"
298
+ else:
299
+ message = (
300
+ "Proxy URL had unsupported scheme %s, should use http:// or https://"
301
+ % scheme
302
+ )
303
+ super(ProxySchemeUnknown, self).__init__(message)
304
+
305
+
306
+ class ProxySchemeUnsupported(ValueError):
307
+ """Fetching HTTPS resources through HTTPS proxies is unsupported"""
308
+
309
+ pass
310
+
311
+
312
+ class HeaderParsingError(HTTPError):
313
+ """Raised by assert_header_parsing, but we convert it to a log.warning statement."""
314
+
315
+ def __init__(self, defects, unparsed_data):
316
+ message = "%s, unparsed data: %r" % (defects or "Unknown", unparsed_data)
317
+ super(HeaderParsingError, self).__init__(message)
318
+
319
+
320
+ class UnrewindableBodyError(HTTPError):
321
+ """urllib3 encountered an error when trying to rewind a body"""
322
+
323
+ pass
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/fields.py ADDED
@@ -0,0 +1,274 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from __future__ import absolute_import
2
+
3
+ import email.utils
4
+ import mimetypes
5
+ import re
6
+
7
+ from .packages import six
8
+
9
+
10
+ def guess_content_type(filename, default="application/octet-stream"):
11
+ """
12
+ Guess the "Content-Type" of a file.
13
+
14
+ :param filename:
15
+ The filename to guess the "Content-Type" of using :mod:`mimetypes`.
16
+ :param default:
17
+ If no "Content-Type" can be guessed, default to `default`.
18
+ """
19
+ if filename:
20
+ return mimetypes.guess_type(filename)[0] or default
21
+ return default
22
+
23
+
24
+ def format_header_param_rfc2231(name, value):
25
+ """
26
+ Helper function to format and quote a single header parameter using the
27
+ strategy defined in RFC 2231.
28
+
29
+ Particularly useful for header parameters which might contain
30
+ non-ASCII values, like file names. This follows
31
+ `RFC 2388 Section 4.4 <https://tools.ietf.org/html/rfc2388#section-4.4>`_.
32
+
33
+ :param name:
34
+ The name of the parameter, a string expected to be ASCII only.
35
+ :param value:
36
+ The value of the parameter, provided as ``bytes`` or `str``.
37
+ :ret:
38
+ An RFC-2231-formatted unicode string.
39
+ """
40
+ if isinstance(value, six.binary_type):
41
+ value = value.decode("utf-8")
42
+
43
+ if not any(ch in value for ch in '"\\\r\n'):
44
+ result = u'%s="%s"' % (name, value)
45
+ try:
46
+ result.encode("ascii")
47
+ except (UnicodeEncodeError, UnicodeDecodeError):
48
+ pass
49
+ else:
50
+ return result
51
+
52
+ if six.PY2: # Python 2:
53
+ value = value.encode("utf-8")
54
+
55
+ # encode_rfc2231 accepts an encoded string and returns an ascii-encoded
56
+ # string in Python 2 but accepts and returns unicode strings in Python 3
57
+ value = email.utils.encode_rfc2231(value, "utf-8")
58
+ value = "%s*=%s" % (name, value)
59
+
60
+ if six.PY2: # Python 2:
61
+ value = value.decode("utf-8")
62
+
63
+ return value
64
+
65
+
66
+ _HTML5_REPLACEMENTS = {
67
+ u"\u0022": u"%22",
68
+ # Replace "\" with "\\".
69
+ u"\u005C": u"\u005C\u005C",
70
+ }
71
+
72
+ # All control characters from 0x00 to 0x1F *except* 0x1B.
73
+ _HTML5_REPLACEMENTS.update(
74
+ {
75
+ six.unichr(cc): u"%{:02X}".format(cc)
76
+ for cc in range(0x00, 0x1F + 1)
77
+ if cc not in (0x1B,)
78
+ }
79
+ )
80
+
81
+
82
+ def _replace_multiple(value, needles_and_replacements):
83
+ def replacer(match):
84
+ return needles_and_replacements[match.group(0)]
85
+
86
+ pattern = re.compile(
87
+ r"|".join([re.escape(needle) for needle in needles_and_replacements.keys()])
88
+ )
89
+
90
+ result = pattern.sub(replacer, value)
91
+
92
+ return result
93
+
94
+
95
+ def format_header_param_html5(name, value):
96
+ """
97
+ Helper function to format and quote a single header parameter using the
98
+ HTML5 strategy.
99
+
100
+ Particularly useful for header parameters which might contain
101
+ non-ASCII values, like file names. This follows the `HTML5 Working Draft
102
+ Section 4.10.22.7`_ and matches the behavior of curl and modern browsers.
103
+
104
+ .. _HTML5 Working Draft Section 4.10.22.7:
105
+ https://w3c.github.io/html/sec-forms.html#multipart-form-data
106
+
107
+ :param name:
108
+ The name of the parameter, a string expected to be ASCII only.
109
+ :param value:
110
+ The value of the parameter, provided as ``bytes`` or `str``.
111
+ :ret:
112
+ A unicode string, stripped of troublesome characters.
113
+ """
114
+ if isinstance(value, six.binary_type):
115
+ value = value.decode("utf-8")
116
+
117
+ value = _replace_multiple(value, _HTML5_REPLACEMENTS)
118
+
119
+ return u'%s="%s"' % (name, value)
120
+
121
+
122
+ # For backwards-compatibility.
123
+ format_header_param = format_header_param_html5
124
+
125
+
126
+ class RequestField(object):
127
+ """
128
+ A data container for request body parameters.
129
+
130
+ :param name:
131
+ The name of this request field. Must be unicode.
132
+ :param data:
133
+ The data/value body.
134
+ :param filename:
135
+ An optional filename of the request field. Must be unicode.
136
+ :param headers:
137
+ An optional dict-like object of headers to initially use for the field.
138
+ :param header_formatter:
139
+ An optional callable that is used to encode and format the headers. By
140
+ default, this is :func:`format_header_param_html5`.
141
+ """
142
+
143
+ def __init__(
144
+ self,
145
+ name,
146
+ data,
147
+ filename=None,
148
+ headers=None,
149
+ header_formatter=format_header_param_html5,
150
+ ):
151
+ self._name = name
152
+ self._filename = filename
153
+ self.data = data
154
+ self.headers = {}
155
+ if headers:
156
+ self.headers = dict(headers)
157
+ self.header_formatter = header_formatter
158
+
159
+ @classmethod
160
+ def from_tuples(cls, fieldname, value, header_formatter=format_header_param_html5):
161
+ """
162
+ A :class:`~urllib3.fields.RequestField` factory from old-style tuple parameters.
163
+
164
+ Supports constructing :class:`~urllib3.fields.RequestField` from
165
+ parameter of key/value strings AND key/filetuple. A filetuple is a
166
+ (filename, data, MIME type) tuple where the MIME type is optional.
167
+ For example::
168
+
169
+ 'foo': 'bar',
170
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
171
+ 'realfile': ('barfile.txt', open('realfile').read()),
172
+ 'typedfile': ('bazfile.bin', open('bazfile').read(), 'image/jpeg'),
173
+ 'nonamefile': 'contents of nonamefile field',
174
+
175
+ Field names and filenames must be unicode.
176
+ """
177
+ if isinstance(value, tuple):
178
+ if len(value) == 3:
179
+ filename, data, content_type = value
180
+ else:
181
+ filename, data = value
182
+ content_type = guess_content_type(filename)
183
+ else:
184
+ filename = None
185
+ content_type = None
186
+ data = value
187
+
188
+ request_param = cls(
189
+ fieldname, data, filename=filename, header_formatter=header_formatter
190
+ )
191
+ request_param.make_multipart(content_type=content_type)
192
+
193
+ return request_param
194
+
195
+ def _render_part(self, name, value):
196
+ """
197
+ Overridable helper function to format a single header parameter. By
198
+ default, this calls ``self.header_formatter``.
199
+
200
+ :param name:
201
+ The name of the parameter, a string expected to be ASCII only.
202
+ :param value:
203
+ The value of the parameter, provided as a unicode string.
204
+ """
205
+
206
+ return self.header_formatter(name, value)
207
+
208
+ def _render_parts(self, header_parts):
209
+ """
210
+ Helper function to format and quote a single header.
211
+
212
+ Useful for single headers that are composed of multiple items. E.g.,
213
+ 'Content-Disposition' fields.
214
+
215
+ :param header_parts:
216
+ A sequence of (k, v) tuples or a :class:`dict` of (k, v) to format
217
+ as `k1="v1"; k2="v2"; ...`.
218
+ """
219
+ parts = []
220
+ iterable = header_parts
221
+ if isinstance(header_parts, dict):
222
+ iterable = header_parts.items()
223
+
224
+ for name, value in iterable:
225
+ if value is not None:
226
+ parts.append(self._render_part(name, value))
227
+
228
+ return u"; ".join(parts)
229
+
230
+ def render_headers(self):
231
+ """
232
+ Renders the headers for this request field.
233
+ """
234
+ lines = []
235
+
236
+ sort_keys = ["Content-Disposition", "Content-Type", "Content-Location"]
237
+ for sort_key in sort_keys:
238
+ if self.headers.get(sort_key, False):
239
+ lines.append(u"%s: %s" % (sort_key, self.headers[sort_key]))
240
+
241
+ for header_name, header_value in self.headers.items():
242
+ if header_name not in sort_keys:
243
+ if header_value:
244
+ lines.append(u"%s: %s" % (header_name, header_value))
245
+
246
+ lines.append(u"\r\n")
247
+ return u"\r\n".join(lines)
248
+
249
+ def make_multipart(
250
+ self, content_disposition=None, content_type=None, content_location=None
251
+ ):
252
+ """
253
+ Makes this request field into a multipart request field.
254
+
255
+ This method overrides "Content-Disposition", "Content-Type" and
256
+ "Content-Location" headers to the request parameter.
257
+
258
+ :param content_type:
259
+ The 'Content-Type' of the request body.
260
+ :param content_location:
261
+ The 'Content-Location' of the request body.
262
+
263
+ """
264
+ self.headers["Content-Disposition"] = content_disposition or u"form-data"
265
+ self.headers["Content-Disposition"] += u"; ".join(
266
+ [
267
+ u"",
268
+ self._render_parts(
269
+ ((u"name", self._name), (u"filename", self._filename))
270
+ ),
271
+ ]
272
+ )
273
+ self.headers["Content-Type"] = content_type
274
+ self.headers["Content-Location"] = content_location
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (196 Bytes). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/__pycache__/six.cpython-310.pyc ADDED
Binary file (27.7 kB). View file
 
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__init__.py ADDED
File without changes
venv/lib/python3.10/site-packages/pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-310.pyc ADDED
Binary file (206 Bytes). View file