Add files using upload-large-folder tool
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- llmeval-env/lib/python3.10/site-packages/aiohttp/__init__.py +240 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_cparser.pxd +158 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_find_header.pxd +2 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_headers.pxi +83 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.pyi +6 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.pyx +35 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_http_parser.pyx +838 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_http_writer.pyx +163 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so +0 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/_websocket.pyx +56 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/base_protocol.py +95 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/client.py +1366 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/client_exceptions.py +346 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/client_proto.py +296 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/client_reqrep.py +1207 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/client_ws.py +315 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/compression_utils.py +157 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/connector.py +1511 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/cookiejar.py +419 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/hdrs.py +108 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/helpers.py +1029 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/http.py +72 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/http_exceptions.py +106 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/http_parser.py +1041 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/http_websocket.py +740 -0
- llmeval-env/lib/python3.10/site-packages/aiohttp/http_writer.py +198 -0
llmeval-env/lib/python3.10/site-packages/aiohttp/__init__.py
ADDED
@@ -0,0 +1,240 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
__version__ = "3.9.5"
|
2 |
+
|
3 |
+
from typing import TYPE_CHECKING, Tuple
|
4 |
+
|
5 |
+
from . import hdrs as hdrs
|
6 |
+
from .client import (
|
7 |
+
BaseConnector as BaseConnector,
|
8 |
+
ClientConnectionError as ClientConnectionError,
|
9 |
+
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
10 |
+
ClientConnectorError as ClientConnectorError,
|
11 |
+
ClientConnectorSSLError as ClientConnectorSSLError,
|
12 |
+
ClientError as ClientError,
|
13 |
+
ClientHttpProxyError as ClientHttpProxyError,
|
14 |
+
ClientOSError as ClientOSError,
|
15 |
+
ClientPayloadError as ClientPayloadError,
|
16 |
+
ClientProxyConnectionError as ClientProxyConnectionError,
|
17 |
+
ClientRequest as ClientRequest,
|
18 |
+
ClientResponse as ClientResponse,
|
19 |
+
ClientResponseError as ClientResponseError,
|
20 |
+
ClientSession as ClientSession,
|
21 |
+
ClientSSLError as ClientSSLError,
|
22 |
+
ClientTimeout as ClientTimeout,
|
23 |
+
ClientWebSocketResponse as ClientWebSocketResponse,
|
24 |
+
ContentTypeError as ContentTypeError,
|
25 |
+
Fingerprint as Fingerprint,
|
26 |
+
InvalidURL as InvalidURL,
|
27 |
+
NamedPipeConnector as NamedPipeConnector,
|
28 |
+
RequestInfo as RequestInfo,
|
29 |
+
ServerConnectionError as ServerConnectionError,
|
30 |
+
ServerDisconnectedError as ServerDisconnectedError,
|
31 |
+
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
32 |
+
ServerTimeoutError as ServerTimeoutError,
|
33 |
+
TCPConnector as TCPConnector,
|
34 |
+
TooManyRedirects as TooManyRedirects,
|
35 |
+
UnixConnector as UnixConnector,
|
36 |
+
WSServerHandshakeError as WSServerHandshakeError,
|
37 |
+
request as request,
|
38 |
+
)
|
39 |
+
from .cookiejar import CookieJar as CookieJar, DummyCookieJar as DummyCookieJar
|
40 |
+
from .formdata import FormData as FormData
|
41 |
+
from .helpers import BasicAuth, ChainMapProxy, ETag
|
42 |
+
from .http import (
|
43 |
+
HttpVersion as HttpVersion,
|
44 |
+
HttpVersion10 as HttpVersion10,
|
45 |
+
HttpVersion11 as HttpVersion11,
|
46 |
+
WebSocketError as WebSocketError,
|
47 |
+
WSCloseCode as WSCloseCode,
|
48 |
+
WSMessage as WSMessage,
|
49 |
+
WSMsgType as WSMsgType,
|
50 |
+
)
|
51 |
+
from .multipart import (
|
52 |
+
BadContentDispositionHeader as BadContentDispositionHeader,
|
53 |
+
BadContentDispositionParam as BadContentDispositionParam,
|
54 |
+
BodyPartReader as BodyPartReader,
|
55 |
+
MultipartReader as MultipartReader,
|
56 |
+
MultipartWriter as MultipartWriter,
|
57 |
+
content_disposition_filename as content_disposition_filename,
|
58 |
+
parse_content_disposition as parse_content_disposition,
|
59 |
+
)
|
60 |
+
from .payload import (
|
61 |
+
PAYLOAD_REGISTRY as PAYLOAD_REGISTRY,
|
62 |
+
AsyncIterablePayload as AsyncIterablePayload,
|
63 |
+
BufferedReaderPayload as BufferedReaderPayload,
|
64 |
+
BytesIOPayload as BytesIOPayload,
|
65 |
+
BytesPayload as BytesPayload,
|
66 |
+
IOBasePayload as IOBasePayload,
|
67 |
+
JsonPayload as JsonPayload,
|
68 |
+
Payload as Payload,
|
69 |
+
StringIOPayload as StringIOPayload,
|
70 |
+
StringPayload as StringPayload,
|
71 |
+
TextIOPayload as TextIOPayload,
|
72 |
+
get_payload as get_payload,
|
73 |
+
payload_type as payload_type,
|
74 |
+
)
|
75 |
+
from .payload_streamer import streamer as streamer
|
76 |
+
from .resolver import (
|
77 |
+
AsyncResolver as AsyncResolver,
|
78 |
+
DefaultResolver as DefaultResolver,
|
79 |
+
ThreadedResolver as ThreadedResolver,
|
80 |
+
)
|
81 |
+
from .streams import (
|
82 |
+
EMPTY_PAYLOAD as EMPTY_PAYLOAD,
|
83 |
+
DataQueue as DataQueue,
|
84 |
+
EofStream as EofStream,
|
85 |
+
FlowControlDataQueue as FlowControlDataQueue,
|
86 |
+
StreamReader as StreamReader,
|
87 |
+
)
|
88 |
+
from .tracing import (
|
89 |
+
TraceConfig as TraceConfig,
|
90 |
+
TraceConnectionCreateEndParams as TraceConnectionCreateEndParams,
|
91 |
+
TraceConnectionCreateStartParams as TraceConnectionCreateStartParams,
|
92 |
+
TraceConnectionQueuedEndParams as TraceConnectionQueuedEndParams,
|
93 |
+
TraceConnectionQueuedStartParams as TraceConnectionQueuedStartParams,
|
94 |
+
TraceConnectionReuseconnParams as TraceConnectionReuseconnParams,
|
95 |
+
TraceDnsCacheHitParams as TraceDnsCacheHitParams,
|
96 |
+
TraceDnsCacheMissParams as TraceDnsCacheMissParams,
|
97 |
+
TraceDnsResolveHostEndParams as TraceDnsResolveHostEndParams,
|
98 |
+
TraceDnsResolveHostStartParams as TraceDnsResolveHostStartParams,
|
99 |
+
TraceRequestChunkSentParams as TraceRequestChunkSentParams,
|
100 |
+
TraceRequestEndParams as TraceRequestEndParams,
|
101 |
+
TraceRequestExceptionParams as TraceRequestExceptionParams,
|
102 |
+
TraceRequestRedirectParams as TraceRequestRedirectParams,
|
103 |
+
TraceRequestStartParams as TraceRequestStartParams,
|
104 |
+
TraceResponseChunkReceivedParams as TraceResponseChunkReceivedParams,
|
105 |
+
)
|
106 |
+
|
107 |
+
if TYPE_CHECKING:
|
108 |
+
# At runtime these are lazy-loaded at the bottom of the file.
|
109 |
+
from .worker import (
|
110 |
+
GunicornUVLoopWebWorker as GunicornUVLoopWebWorker,
|
111 |
+
GunicornWebWorker as GunicornWebWorker,
|
112 |
+
)
|
113 |
+
|
114 |
+
__all__: Tuple[str, ...] = (
|
115 |
+
"hdrs",
|
116 |
+
# client
|
117 |
+
"BaseConnector",
|
118 |
+
"ClientConnectionError",
|
119 |
+
"ClientConnectorCertificateError",
|
120 |
+
"ClientConnectorError",
|
121 |
+
"ClientConnectorSSLError",
|
122 |
+
"ClientError",
|
123 |
+
"ClientHttpProxyError",
|
124 |
+
"ClientOSError",
|
125 |
+
"ClientPayloadError",
|
126 |
+
"ClientProxyConnectionError",
|
127 |
+
"ClientResponse",
|
128 |
+
"ClientRequest",
|
129 |
+
"ClientResponseError",
|
130 |
+
"ClientSSLError",
|
131 |
+
"ClientSession",
|
132 |
+
"ClientTimeout",
|
133 |
+
"ClientWebSocketResponse",
|
134 |
+
"ContentTypeError",
|
135 |
+
"Fingerprint",
|
136 |
+
"InvalidURL",
|
137 |
+
"RequestInfo",
|
138 |
+
"ServerConnectionError",
|
139 |
+
"ServerDisconnectedError",
|
140 |
+
"ServerFingerprintMismatch",
|
141 |
+
"ServerTimeoutError",
|
142 |
+
"TCPConnector",
|
143 |
+
"TooManyRedirects",
|
144 |
+
"UnixConnector",
|
145 |
+
"NamedPipeConnector",
|
146 |
+
"WSServerHandshakeError",
|
147 |
+
"request",
|
148 |
+
# cookiejar
|
149 |
+
"CookieJar",
|
150 |
+
"DummyCookieJar",
|
151 |
+
# formdata
|
152 |
+
"FormData",
|
153 |
+
# helpers
|
154 |
+
"BasicAuth",
|
155 |
+
"ChainMapProxy",
|
156 |
+
"ETag",
|
157 |
+
# http
|
158 |
+
"HttpVersion",
|
159 |
+
"HttpVersion10",
|
160 |
+
"HttpVersion11",
|
161 |
+
"WSMsgType",
|
162 |
+
"WSCloseCode",
|
163 |
+
"WSMessage",
|
164 |
+
"WebSocketError",
|
165 |
+
# multipart
|
166 |
+
"BadContentDispositionHeader",
|
167 |
+
"BadContentDispositionParam",
|
168 |
+
"BodyPartReader",
|
169 |
+
"MultipartReader",
|
170 |
+
"MultipartWriter",
|
171 |
+
"content_disposition_filename",
|
172 |
+
"parse_content_disposition",
|
173 |
+
# payload
|
174 |
+
"AsyncIterablePayload",
|
175 |
+
"BufferedReaderPayload",
|
176 |
+
"BytesIOPayload",
|
177 |
+
"BytesPayload",
|
178 |
+
"IOBasePayload",
|
179 |
+
"JsonPayload",
|
180 |
+
"PAYLOAD_REGISTRY",
|
181 |
+
"Payload",
|
182 |
+
"StringIOPayload",
|
183 |
+
"StringPayload",
|
184 |
+
"TextIOPayload",
|
185 |
+
"get_payload",
|
186 |
+
"payload_type",
|
187 |
+
# payload_streamer
|
188 |
+
"streamer",
|
189 |
+
# resolver
|
190 |
+
"AsyncResolver",
|
191 |
+
"DefaultResolver",
|
192 |
+
"ThreadedResolver",
|
193 |
+
# streams
|
194 |
+
"DataQueue",
|
195 |
+
"EMPTY_PAYLOAD",
|
196 |
+
"EofStream",
|
197 |
+
"FlowControlDataQueue",
|
198 |
+
"StreamReader",
|
199 |
+
# tracing
|
200 |
+
"TraceConfig",
|
201 |
+
"TraceConnectionCreateEndParams",
|
202 |
+
"TraceConnectionCreateStartParams",
|
203 |
+
"TraceConnectionQueuedEndParams",
|
204 |
+
"TraceConnectionQueuedStartParams",
|
205 |
+
"TraceConnectionReuseconnParams",
|
206 |
+
"TraceDnsCacheHitParams",
|
207 |
+
"TraceDnsCacheMissParams",
|
208 |
+
"TraceDnsResolveHostEndParams",
|
209 |
+
"TraceDnsResolveHostStartParams",
|
210 |
+
"TraceRequestChunkSentParams",
|
211 |
+
"TraceRequestEndParams",
|
212 |
+
"TraceRequestExceptionParams",
|
213 |
+
"TraceRequestRedirectParams",
|
214 |
+
"TraceRequestStartParams",
|
215 |
+
"TraceResponseChunkReceivedParams",
|
216 |
+
# workers (imported lazily with __getattr__)
|
217 |
+
"GunicornUVLoopWebWorker",
|
218 |
+
"GunicornWebWorker",
|
219 |
+
)
|
220 |
+
|
221 |
+
|
222 |
+
def __dir__() -> Tuple[str, ...]:
|
223 |
+
return __all__ + ("__author__", "__doc__")
|
224 |
+
|
225 |
+
|
226 |
+
def __getattr__(name: str) -> object:
|
227 |
+
global GunicornUVLoopWebWorker, GunicornWebWorker
|
228 |
+
|
229 |
+
# Importing gunicorn takes a long time (>100ms), so only import if actually needed.
|
230 |
+
if name in ("GunicornUVLoopWebWorker", "GunicornWebWorker"):
|
231 |
+
try:
|
232 |
+
from .worker import GunicornUVLoopWebWorker as guv, GunicornWebWorker as gw
|
233 |
+
except ImportError:
|
234 |
+
return None
|
235 |
+
|
236 |
+
GunicornUVLoopWebWorker = guv # type: ignore[misc]
|
237 |
+
GunicornWebWorker = gw # type: ignore[misc]
|
238 |
+
return guv if name == "GunicornUVLoopWebWorker" else gw
|
239 |
+
|
240 |
+
raise AttributeError(f"module {__name__} has no attribute {name}")
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/__init__.cpython-310.pyc
ADDED
Binary file (4.16 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/client.cpython-310.pyc
ADDED
Binary file (31.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/client_proto.cpython-310.pyc
ADDED
Binary file (6.95 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/compression_utils.cpython-310.pyc
ADDED
Binary file (5.23 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/cookiejar.cpython-310.pyc
ADDED
Binary file (10.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/helpers.cpython-310.pyc
ADDED
Binary file (30.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/locks.cpython-310.pyc
ADDED
Binary file (1.65 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/log.cpython-310.pyc
ADDED
Binary file (460 Bytes). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/payload.cpython-310.pyc
ADDED
Binary file (13.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/pytest_plugin.cpython-310.pyc
ADDED
Binary file (9.72 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/streams.cpython-310.pyc
ADDED
Binary file (18.9 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/test_utils.cpython-310.pyc
ADDED
Binary file (21 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/tracing.cpython-310.pyc
ADDED
Binary file (14.3 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_log.cpython-310.pyc
ADDED
Binary file (7.38 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_middlewares.cpython-310.pyc
ADDED
Binary file (3.86 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_protocol.cpython-310.pyc
ADDED
Binary file (17.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_request.cpython-310.pyc
ADDED
Binary file (24.4 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_response.cpython-310.pyc
ADDED
Binary file (21.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_routedef.cpython-310.pyc
ADDED
Binary file (7.66 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_runner.cpython-310.pyc
ADDED
Binary file (12.1 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_server.cpython-310.pyc
ADDED
Binary file (3.34 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/__pycache__/web_ws.cpython-310.pyc
ADDED
Binary file (14.8 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_cparser.pxd
ADDED
@@ -0,0 +1,158 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from libc.stdint cimport int32_t, uint8_t, uint16_t, uint64_t
|
2 |
+
|
3 |
+
|
4 |
+
cdef extern from "../vendor/llhttp/build/llhttp.h":
|
5 |
+
|
6 |
+
struct llhttp__internal_s:
|
7 |
+
int32_t _index
|
8 |
+
void* _span_pos0
|
9 |
+
void* _span_cb0
|
10 |
+
int32_t error
|
11 |
+
const char* reason
|
12 |
+
const char* error_pos
|
13 |
+
void* data
|
14 |
+
void* _current
|
15 |
+
uint64_t content_length
|
16 |
+
uint8_t type
|
17 |
+
uint8_t method
|
18 |
+
uint8_t http_major
|
19 |
+
uint8_t http_minor
|
20 |
+
uint8_t header_state
|
21 |
+
uint8_t lenient_flags
|
22 |
+
uint8_t upgrade
|
23 |
+
uint8_t finish
|
24 |
+
uint16_t flags
|
25 |
+
uint16_t status_code
|
26 |
+
void* settings
|
27 |
+
|
28 |
+
ctypedef llhttp__internal_s llhttp__internal_t
|
29 |
+
ctypedef llhttp__internal_t llhttp_t
|
30 |
+
|
31 |
+
ctypedef int (*llhttp_data_cb)(llhttp_t*, const char *at, size_t length) except -1
|
32 |
+
ctypedef int (*llhttp_cb)(llhttp_t*) except -1
|
33 |
+
|
34 |
+
struct llhttp_settings_s:
|
35 |
+
llhttp_cb on_message_begin
|
36 |
+
llhttp_data_cb on_url
|
37 |
+
llhttp_data_cb on_status
|
38 |
+
llhttp_data_cb on_header_field
|
39 |
+
llhttp_data_cb on_header_value
|
40 |
+
llhttp_cb on_headers_complete
|
41 |
+
llhttp_data_cb on_body
|
42 |
+
llhttp_cb on_message_complete
|
43 |
+
llhttp_cb on_chunk_header
|
44 |
+
llhttp_cb on_chunk_complete
|
45 |
+
|
46 |
+
llhttp_cb on_url_complete
|
47 |
+
llhttp_cb on_status_complete
|
48 |
+
llhttp_cb on_header_field_complete
|
49 |
+
llhttp_cb on_header_value_complete
|
50 |
+
|
51 |
+
ctypedef llhttp_settings_s llhttp_settings_t
|
52 |
+
|
53 |
+
enum llhttp_errno:
|
54 |
+
HPE_OK,
|
55 |
+
HPE_INTERNAL,
|
56 |
+
HPE_STRICT,
|
57 |
+
HPE_LF_EXPECTED,
|
58 |
+
HPE_UNEXPECTED_CONTENT_LENGTH,
|
59 |
+
HPE_CLOSED_CONNECTION,
|
60 |
+
HPE_INVALID_METHOD,
|
61 |
+
HPE_INVALID_URL,
|
62 |
+
HPE_INVALID_CONSTANT,
|
63 |
+
HPE_INVALID_VERSION,
|
64 |
+
HPE_INVALID_HEADER_TOKEN,
|
65 |
+
HPE_INVALID_CONTENT_LENGTH,
|
66 |
+
HPE_INVALID_CHUNK_SIZE,
|
67 |
+
HPE_INVALID_STATUS,
|
68 |
+
HPE_INVALID_EOF_STATE,
|
69 |
+
HPE_INVALID_TRANSFER_ENCODING,
|
70 |
+
HPE_CB_MESSAGE_BEGIN,
|
71 |
+
HPE_CB_HEADERS_COMPLETE,
|
72 |
+
HPE_CB_MESSAGE_COMPLETE,
|
73 |
+
HPE_CB_CHUNK_HEADER,
|
74 |
+
HPE_CB_CHUNK_COMPLETE,
|
75 |
+
HPE_PAUSED,
|
76 |
+
HPE_PAUSED_UPGRADE,
|
77 |
+
HPE_USER
|
78 |
+
|
79 |
+
ctypedef llhttp_errno llhttp_errno_t
|
80 |
+
|
81 |
+
enum llhttp_flags:
|
82 |
+
F_CHUNKED,
|
83 |
+
F_CONTENT_LENGTH
|
84 |
+
|
85 |
+
enum llhttp_type:
|
86 |
+
HTTP_REQUEST,
|
87 |
+
HTTP_RESPONSE,
|
88 |
+
HTTP_BOTH
|
89 |
+
|
90 |
+
enum llhttp_method:
|
91 |
+
HTTP_DELETE,
|
92 |
+
HTTP_GET,
|
93 |
+
HTTP_HEAD,
|
94 |
+
HTTP_POST,
|
95 |
+
HTTP_PUT,
|
96 |
+
HTTP_CONNECT,
|
97 |
+
HTTP_OPTIONS,
|
98 |
+
HTTP_TRACE,
|
99 |
+
HTTP_COPY,
|
100 |
+
HTTP_LOCK,
|
101 |
+
HTTP_MKCOL,
|
102 |
+
HTTP_MOVE,
|
103 |
+
HTTP_PROPFIND,
|
104 |
+
HTTP_PROPPATCH,
|
105 |
+
HTTP_SEARCH,
|
106 |
+
HTTP_UNLOCK,
|
107 |
+
HTTP_BIND,
|
108 |
+
HTTP_REBIND,
|
109 |
+
HTTP_UNBIND,
|
110 |
+
HTTP_ACL,
|
111 |
+
HTTP_REPORT,
|
112 |
+
HTTP_MKACTIVITY,
|
113 |
+
HTTP_CHECKOUT,
|
114 |
+
HTTP_MERGE,
|
115 |
+
HTTP_MSEARCH,
|
116 |
+
HTTP_NOTIFY,
|
117 |
+
HTTP_SUBSCRIBE,
|
118 |
+
HTTP_UNSUBSCRIBE,
|
119 |
+
HTTP_PATCH,
|
120 |
+
HTTP_PURGE,
|
121 |
+
HTTP_MKCALENDAR,
|
122 |
+
HTTP_LINK,
|
123 |
+
HTTP_UNLINK,
|
124 |
+
HTTP_SOURCE,
|
125 |
+
HTTP_PRI,
|
126 |
+
HTTP_DESCRIBE,
|
127 |
+
HTTP_ANNOUNCE,
|
128 |
+
HTTP_SETUP,
|
129 |
+
HTTP_PLAY,
|
130 |
+
HTTP_PAUSE,
|
131 |
+
HTTP_TEARDOWN,
|
132 |
+
HTTP_GET_PARAMETER,
|
133 |
+
HTTP_SET_PARAMETER,
|
134 |
+
HTTP_REDIRECT,
|
135 |
+
HTTP_RECORD,
|
136 |
+
HTTP_FLUSH
|
137 |
+
|
138 |
+
ctypedef llhttp_method llhttp_method_t;
|
139 |
+
|
140 |
+
void llhttp_settings_init(llhttp_settings_t* settings)
|
141 |
+
void llhttp_init(llhttp_t* parser, llhttp_type type,
|
142 |
+
const llhttp_settings_t* settings)
|
143 |
+
|
144 |
+
llhttp_errno_t llhttp_execute(llhttp_t* parser, const char* data, size_t len)
|
145 |
+
|
146 |
+
int llhttp_should_keep_alive(const llhttp_t* parser)
|
147 |
+
|
148 |
+
void llhttp_resume_after_upgrade(llhttp_t* parser)
|
149 |
+
|
150 |
+
llhttp_errno_t llhttp_get_errno(const llhttp_t* parser)
|
151 |
+
const char* llhttp_get_error_reason(const llhttp_t* parser)
|
152 |
+
const char* llhttp_get_error_pos(const llhttp_t* parser)
|
153 |
+
|
154 |
+
const char* llhttp_method_name(llhttp_method_t method)
|
155 |
+
|
156 |
+
void llhttp_set_lenient_headers(llhttp_t* parser, int enabled)
|
157 |
+
void llhttp_set_lenient_optional_cr_before_lf(llhttp_t* parser, int enabled)
|
158 |
+
void llhttp_set_lenient_spaces_after_chunk_size(llhttp_t* parser, int enabled)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_find_header.pxd
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
cdef extern from "_find_header.h":
|
2 |
+
int find_header(char *, int)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_headers.pxi
ADDED
@@ -0,0 +1,83 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# The file is autogenerated from aiohttp/hdrs.py
|
2 |
+
# Run ./tools/gen.py to update it after the origin changing.
|
3 |
+
|
4 |
+
from . import hdrs
|
5 |
+
cdef tuple headers = (
|
6 |
+
hdrs.ACCEPT,
|
7 |
+
hdrs.ACCEPT_CHARSET,
|
8 |
+
hdrs.ACCEPT_ENCODING,
|
9 |
+
hdrs.ACCEPT_LANGUAGE,
|
10 |
+
hdrs.ACCEPT_RANGES,
|
11 |
+
hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS,
|
12 |
+
hdrs.ACCESS_CONTROL_ALLOW_HEADERS,
|
13 |
+
hdrs.ACCESS_CONTROL_ALLOW_METHODS,
|
14 |
+
hdrs.ACCESS_CONTROL_ALLOW_ORIGIN,
|
15 |
+
hdrs.ACCESS_CONTROL_EXPOSE_HEADERS,
|
16 |
+
hdrs.ACCESS_CONTROL_MAX_AGE,
|
17 |
+
hdrs.ACCESS_CONTROL_REQUEST_HEADERS,
|
18 |
+
hdrs.ACCESS_CONTROL_REQUEST_METHOD,
|
19 |
+
hdrs.AGE,
|
20 |
+
hdrs.ALLOW,
|
21 |
+
hdrs.AUTHORIZATION,
|
22 |
+
hdrs.CACHE_CONTROL,
|
23 |
+
hdrs.CONNECTION,
|
24 |
+
hdrs.CONTENT_DISPOSITION,
|
25 |
+
hdrs.CONTENT_ENCODING,
|
26 |
+
hdrs.CONTENT_LANGUAGE,
|
27 |
+
hdrs.CONTENT_LENGTH,
|
28 |
+
hdrs.CONTENT_LOCATION,
|
29 |
+
hdrs.CONTENT_MD5,
|
30 |
+
hdrs.CONTENT_RANGE,
|
31 |
+
hdrs.CONTENT_TRANSFER_ENCODING,
|
32 |
+
hdrs.CONTENT_TYPE,
|
33 |
+
hdrs.COOKIE,
|
34 |
+
hdrs.DATE,
|
35 |
+
hdrs.DESTINATION,
|
36 |
+
hdrs.DIGEST,
|
37 |
+
hdrs.ETAG,
|
38 |
+
hdrs.EXPECT,
|
39 |
+
hdrs.EXPIRES,
|
40 |
+
hdrs.FORWARDED,
|
41 |
+
hdrs.FROM,
|
42 |
+
hdrs.HOST,
|
43 |
+
hdrs.IF_MATCH,
|
44 |
+
hdrs.IF_MODIFIED_SINCE,
|
45 |
+
hdrs.IF_NONE_MATCH,
|
46 |
+
hdrs.IF_RANGE,
|
47 |
+
hdrs.IF_UNMODIFIED_SINCE,
|
48 |
+
hdrs.KEEP_ALIVE,
|
49 |
+
hdrs.LAST_EVENT_ID,
|
50 |
+
hdrs.LAST_MODIFIED,
|
51 |
+
hdrs.LINK,
|
52 |
+
hdrs.LOCATION,
|
53 |
+
hdrs.MAX_FORWARDS,
|
54 |
+
hdrs.ORIGIN,
|
55 |
+
hdrs.PRAGMA,
|
56 |
+
hdrs.PROXY_AUTHENTICATE,
|
57 |
+
hdrs.PROXY_AUTHORIZATION,
|
58 |
+
hdrs.RANGE,
|
59 |
+
hdrs.REFERER,
|
60 |
+
hdrs.RETRY_AFTER,
|
61 |
+
hdrs.SEC_WEBSOCKET_ACCEPT,
|
62 |
+
hdrs.SEC_WEBSOCKET_EXTENSIONS,
|
63 |
+
hdrs.SEC_WEBSOCKET_KEY,
|
64 |
+
hdrs.SEC_WEBSOCKET_KEY1,
|
65 |
+
hdrs.SEC_WEBSOCKET_PROTOCOL,
|
66 |
+
hdrs.SEC_WEBSOCKET_VERSION,
|
67 |
+
hdrs.SERVER,
|
68 |
+
hdrs.SET_COOKIE,
|
69 |
+
hdrs.TE,
|
70 |
+
hdrs.TRAILER,
|
71 |
+
hdrs.TRANSFER_ENCODING,
|
72 |
+
hdrs.URI,
|
73 |
+
hdrs.UPGRADE,
|
74 |
+
hdrs.USER_AGENT,
|
75 |
+
hdrs.VARY,
|
76 |
+
hdrs.VIA,
|
77 |
+
hdrs.WWW_AUTHENTICATE,
|
78 |
+
hdrs.WANT_DIGEST,
|
79 |
+
hdrs.WARNING,
|
80 |
+
hdrs.X_FORWARDED_FOR,
|
81 |
+
hdrs.X_FORWARDED_HOST,
|
82 |
+
hdrs.X_FORWARDED_PROTO,
|
83 |
+
)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (509 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.pyi
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from typing import Any
|
2 |
+
|
3 |
+
class reify:
|
4 |
+
def __init__(self, wrapped: Any) -> None: ...
|
5 |
+
def __get__(self, inst: Any, owner: Any) -> Any: ...
|
6 |
+
def __set__(self, inst: Any, value: Any) -> None: ...
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_helpers.pyx
ADDED
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
cdef class reify:
|
2 |
+
"""Use as a class method decorator. It operates almost exactly like
|
3 |
+
the Python `@property` decorator, but it puts the result of the
|
4 |
+
method it decorates into the instance dict after the first call,
|
5 |
+
effectively replacing the function it decorates with an instance
|
6 |
+
variable. It is, in Python parlance, a data descriptor.
|
7 |
+
|
8 |
+
"""
|
9 |
+
|
10 |
+
cdef object wrapped
|
11 |
+
cdef object name
|
12 |
+
|
13 |
+
def __init__(self, wrapped):
|
14 |
+
self.wrapped = wrapped
|
15 |
+
self.name = wrapped.__name__
|
16 |
+
|
17 |
+
@property
|
18 |
+
def __doc__(self):
|
19 |
+
return self.wrapped.__doc__
|
20 |
+
|
21 |
+
def __get__(self, inst, owner):
|
22 |
+
try:
|
23 |
+
try:
|
24 |
+
return inst._cache[self.name]
|
25 |
+
except KeyError:
|
26 |
+
val = self.wrapped(inst)
|
27 |
+
inst._cache[self.name] = val
|
28 |
+
return val
|
29 |
+
except AttributeError:
|
30 |
+
if inst is None:
|
31 |
+
return self
|
32 |
+
raise
|
33 |
+
|
34 |
+
def __set__(self, inst, value):
|
35 |
+
raise AttributeError("reified property is read-only")
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_http_parser.pyx
ADDED
@@ -0,0 +1,838 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#cython: language_level=3
|
2 |
+
#
|
3 |
+
# Based on https://github.com/MagicStack/httptools
|
4 |
+
#
|
5 |
+
|
6 |
+
from cpython cimport (
|
7 |
+
Py_buffer,
|
8 |
+
PyBUF_SIMPLE,
|
9 |
+
PyBuffer_Release,
|
10 |
+
PyBytes_AsString,
|
11 |
+
PyBytes_AsStringAndSize,
|
12 |
+
PyObject_GetBuffer,
|
13 |
+
)
|
14 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc
|
15 |
+
from libc.limits cimport ULLONG_MAX
|
16 |
+
from libc.string cimport memcpy
|
17 |
+
|
18 |
+
from multidict import CIMultiDict as _CIMultiDict, CIMultiDictProxy as _CIMultiDictProxy
|
19 |
+
from yarl import URL as _URL
|
20 |
+
|
21 |
+
from aiohttp import hdrs
|
22 |
+
from aiohttp.helpers import DEBUG, set_exception
|
23 |
+
|
24 |
+
from .http_exceptions import (
|
25 |
+
BadHttpMessage,
|
26 |
+
BadStatusLine,
|
27 |
+
ContentLengthError,
|
28 |
+
InvalidHeader,
|
29 |
+
InvalidURLError,
|
30 |
+
LineTooLong,
|
31 |
+
PayloadEncodingError,
|
32 |
+
TransferEncodingError,
|
33 |
+
)
|
34 |
+
from .http_parser import DeflateBuffer as _DeflateBuffer
|
35 |
+
from .http_writer import (
|
36 |
+
HttpVersion as _HttpVersion,
|
37 |
+
HttpVersion10 as _HttpVersion10,
|
38 |
+
HttpVersion11 as _HttpVersion11,
|
39 |
+
)
|
40 |
+
from .streams import EMPTY_PAYLOAD as _EMPTY_PAYLOAD, StreamReader as _StreamReader
|
41 |
+
|
42 |
+
cimport cython
|
43 |
+
|
44 |
+
from aiohttp cimport _cparser as cparser
|
45 |
+
|
46 |
+
include "_headers.pxi"
|
47 |
+
|
48 |
+
from aiohttp cimport _find_header
|
49 |
+
|
50 |
+
DEF DEFAULT_FREELIST_SIZE = 250
|
51 |
+
|
52 |
+
cdef extern from "Python.h":
|
53 |
+
int PyByteArray_Resize(object, Py_ssize_t) except -1
|
54 |
+
Py_ssize_t PyByteArray_Size(object) except -1
|
55 |
+
char* PyByteArray_AsString(object)
|
56 |
+
|
57 |
+
__all__ = ('HttpRequestParser', 'HttpResponseParser',
|
58 |
+
'RawRequestMessage', 'RawResponseMessage')
|
59 |
+
|
60 |
+
cdef object URL = _URL
|
61 |
+
cdef object URL_build = URL.build
|
62 |
+
cdef object CIMultiDict = _CIMultiDict
|
63 |
+
cdef object CIMultiDictProxy = _CIMultiDictProxy
|
64 |
+
cdef object HttpVersion = _HttpVersion
|
65 |
+
cdef object HttpVersion10 = _HttpVersion10
|
66 |
+
cdef object HttpVersion11 = _HttpVersion11
|
67 |
+
cdef object SEC_WEBSOCKET_KEY1 = hdrs.SEC_WEBSOCKET_KEY1
|
68 |
+
cdef object CONTENT_ENCODING = hdrs.CONTENT_ENCODING
|
69 |
+
cdef object EMPTY_PAYLOAD = _EMPTY_PAYLOAD
|
70 |
+
cdef object StreamReader = _StreamReader
|
71 |
+
cdef object DeflateBuffer = _DeflateBuffer
|
72 |
+
|
73 |
+
|
74 |
+
cdef inline object extend(object buf, const char* at, size_t length):
|
75 |
+
cdef Py_ssize_t s
|
76 |
+
cdef char* ptr
|
77 |
+
s = PyByteArray_Size(buf)
|
78 |
+
PyByteArray_Resize(buf, s + length)
|
79 |
+
ptr = PyByteArray_AsString(buf)
|
80 |
+
memcpy(ptr + s, at, length)
|
81 |
+
|
82 |
+
|
83 |
+
DEF METHODS_COUNT = 46;
|
84 |
+
|
85 |
+
cdef list _http_method = []
|
86 |
+
|
87 |
+
for i in range(METHODS_COUNT):
|
88 |
+
_http_method.append(
|
89 |
+
cparser.llhttp_method_name(<cparser.llhttp_method_t> i).decode('ascii'))
|
90 |
+
|
91 |
+
|
92 |
+
cdef inline str http_method_str(int i):
|
93 |
+
if i < METHODS_COUNT:
|
94 |
+
return <str>_http_method[i]
|
95 |
+
else:
|
96 |
+
return "<unknown>"
|
97 |
+
|
98 |
+
cdef inline object find_header(bytes raw_header):
|
99 |
+
cdef Py_ssize_t size
|
100 |
+
cdef char *buf
|
101 |
+
cdef int idx
|
102 |
+
PyBytes_AsStringAndSize(raw_header, &buf, &size)
|
103 |
+
idx = _find_header.find_header(buf, size)
|
104 |
+
if idx == -1:
|
105 |
+
return raw_header.decode('utf-8', 'surrogateescape')
|
106 |
+
return headers[idx]
|
107 |
+
|
108 |
+
|
109 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
110 |
+
cdef class RawRequestMessage:
|
111 |
+
cdef readonly str method
|
112 |
+
cdef readonly str path
|
113 |
+
cdef readonly object version # HttpVersion
|
114 |
+
cdef readonly object headers # CIMultiDict
|
115 |
+
cdef readonly object raw_headers # tuple
|
116 |
+
cdef readonly object should_close
|
117 |
+
cdef readonly object compression
|
118 |
+
cdef readonly object upgrade
|
119 |
+
cdef readonly object chunked
|
120 |
+
cdef readonly object url # yarl.URL
|
121 |
+
|
122 |
+
def __init__(self, method, path, version, headers, raw_headers,
|
123 |
+
should_close, compression, upgrade, chunked, url):
|
124 |
+
self.method = method
|
125 |
+
self.path = path
|
126 |
+
self.version = version
|
127 |
+
self.headers = headers
|
128 |
+
self.raw_headers = raw_headers
|
129 |
+
self.should_close = should_close
|
130 |
+
self.compression = compression
|
131 |
+
self.upgrade = upgrade
|
132 |
+
self.chunked = chunked
|
133 |
+
self.url = url
|
134 |
+
|
135 |
+
def __repr__(self):
|
136 |
+
info = []
|
137 |
+
info.append(("method", self.method))
|
138 |
+
info.append(("path", self.path))
|
139 |
+
info.append(("version", self.version))
|
140 |
+
info.append(("headers", self.headers))
|
141 |
+
info.append(("raw_headers", self.raw_headers))
|
142 |
+
info.append(("should_close", self.should_close))
|
143 |
+
info.append(("compression", self.compression))
|
144 |
+
info.append(("upgrade", self.upgrade))
|
145 |
+
info.append(("chunked", self.chunked))
|
146 |
+
info.append(("url", self.url))
|
147 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
148 |
+
return '<RawRequestMessage(' + sinfo + ')>'
|
149 |
+
|
150 |
+
def _replace(self, **dct):
|
151 |
+
cdef RawRequestMessage ret
|
152 |
+
ret = _new_request_message(self.method,
|
153 |
+
self.path,
|
154 |
+
self.version,
|
155 |
+
self.headers,
|
156 |
+
self.raw_headers,
|
157 |
+
self.should_close,
|
158 |
+
self.compression,
|
159 |
+
self.upgrade,
|
160 |
+
self.chunked,
|
161 |
+
self.url)
|
162 |
+
if "method" in dct:
|
163 |
+
ret.method = dct["method"]
|
164 |
+
if "path" in dct:
|
165 |
+
ret.path = dct["path"]
|
166 |
+
if "version" in dct:
|
167 |
+
ret.version = dct["version"]
|
168 |
+
if "headers" in dct:
|
169 |
+
ret.headers = dct["headers"]
|
170 |
+
if "raw_headers" in dct:
|
171 |
+
ret.raw_headers = dct["raw_headers"]
|
172 |
+
if "should_close" in dct:
|
173 |
+
ret.should_close = dct["should_close"]
|
174 |
+
if "compression" in dct:
|
175 |
+
ret.compression = dct["compression"]
|
176 |
+
if "upgrade" in dct:
|
177 |
+
ret.upgrade = dct["upgrade"]
|
178 |
+
if "chunked" in dct:
|
179 |
+
ret.chunked = dct["chunked"]
|
180 |
+
if "url" in dct:
|
181 |
+
ret.url = dct["url"]
|
182 |
+
return ret
|
183 |
+
|
184 |
+
cdef _new_request_message(str method,
|
185 |
+
str path,
|
186 |
+
object version,
|
187 |
+
object headers,
|
188 |
+
object raw_headers,
|
189 |
+
bint should_close,
|
190 |
+
object compression,
|
191 |
+
bint upgrade,
|
192 |
+
bint chunked,
|
193 |
+
object url):
|
194 |
+
cdef RawRequestMessage ret
|
195 |
+
ret = RawRequestMessage.__new__(RawRequestMessage)
|
196 |
+
ret.method = method
|
197 |
+
ret.path = path
|
198 |
+
ret.version = version
|
199 |
+
ret.headers = headers
|
200 |
+
ret.raw_headers = raw_headers
|
201 |
+
ret.should_close = should_close
|
202 |
+
ret.compression = compression
|
203 |
+
ret.upgrade = upgrade
|
204 |
+
ret.chunked = chunked
|
205 |
+
ret.url = url
|
206 |
+
return ret
|
207 |
+
|
208 |
+
|
209 |
+
@cython.freelist(DEFAULT_FREELIST_SIZE)
|
210 |
+
cdef class RawResponseMessage:
|
211 |
+
cdef readonly object version # HttpVersion
|
212 |
+
cdef readonly int code
|
213 |
+
cdef readonly str reason
|
214 |
+
cdef readonly object headers # CIMultiDict
|
215 |
+
cdef readonly object raw_headers # tuple
|
216 |
+
cdef readonly object should_close
|
217 |
+
cdef readonly object compression
|
218 |
+
cdef readonly object upgrade
|
219 |
+
cdef readonly object chunked
|
220 |
+
|
221 |
+
def __init__(self, version, code, reason, headers, raw_headers,
|
222 |
+
should_close, compression, upgrade, chunked):
|
223 |
+
self.version = version
|
224 |
+
self.code = code
|
225 |
+
self.reason = reason
|
226 |
+
self.headers = headers
|
227 |
+
self.raw_headers = raw_headers
|
228 |
+
self.should_close = should_close
|
229 |
+
self.compression = compression
|
230 |
+
self.upgrade = upgrade
|
231 |
+
self.chunked = chunked
|
232 |
+
|
233 |
+
def __repr__(self):
|
234 |
+
info = []
|
235 |
+
info.append(("version", self.version))
|
236 |
+
info.append(("code", self.code))
|
237 |
+
info.append(("reason", self.reason))
|
238 |
+
info.append(("headers", self.headers))
|
239 |
+
info.append(("raw_headers", self.raw_headers))
|
240 |
+
info.append(("should_close", self.should_close))
|
241 |
+
info.append(("compression", self.compression))
|
242 |
+
info.append(("upgrade", self.upgrade))
|
243 |
+
info.append(("chunked", self.chunked))
|
244 |
+
sinfo = ', '.join(name + '=' + repr(val) for name, val in info)
|
245 |
+
return '<RawResponseMessage(' + sinfo + ')>'
|
246 |
+
|
247 |
+
|
248 |
+
cdef _new_response_message(object version,
|
249 |
+
int code,
|
250 |
+
str reason,
|
251 |
+
object headers,
|
252 |
+
object raw_headers,
|
253 |
+
bint should_close,
|
254 |
+
object compression,
|
255 |
+
bint upgrade,
|
256 |
+
bint chunked):
|
257 |
+
cdef RawResponseMessage ret
|
258 |
+
ret = RawResponseMessage.__new__(RawResponseMessage)
|
259 |
+
ret.version = version
|
260 |
+
ret.code = code
|
261 |
+
ret.reason = reason
|
262 |
+
ret.headers = headers
|
263 |
+
ret.raw_headers = raw_headers
|
264 |
+
ret.should_close = should_close
|
265 |
+
ret.compression = compression
|
266 |
+
ret.upgrade = upgrade
|
267 |
+
ret.chunked = chunked
|
268 |
+
return ret
|
269 |
+
|
270 |
+
|
271 |
+
@cython.internal
|
272 |
+
cdef class HttpParser:
|
273 |
+
|
274 |
+
cdef:
|
275 |
+
cparser.llhttp_t* _cparser
|
276 |
+
cparser.llhttp_settings_t* _csettings
|
277 |
+
|
278 |
+
bytearray _raw_name
|
279 |
+
bytearray _raw_value
|
280 |
+
bint _has_value
|
281 |
+
|
282 |
+
object _protocol
|
283 |
+
object _loop
|
284 |
+
object _timer
|
285 |
+
|
286 |
+
size_t _max_line_size
|
287 |
+
size_t _max_field_size
|
288 |
+
size_t _max_headers
|
289 |
+
bint _response_with_body
|
290 |
+
bint _read_until_eof
|
291 |
+
|
292 |
+
bint _started
|
293 |
+
object _url
|
294 |
+
bytearray _buf
|
295 |
+
str _path
|
296 |
+
str _reason
|
297 |
+
object _headers
|
298 |
+
list _raw_headers
|
299 |
+
bint _upgraded
|
300 |
+
list _messages
|
301 |
+
object _payload
|
302 |
+
bint _payload_error
|
303 |
+
object _payload_exception
|
304 |
+
object _last_error
|
305 |
+
bint _auto_decompress
|
306 |
+
int _limit
|
307 |
+
|
308 |
+
str _content_encoding
|
309 |
+
|
310 |
+
Py_buffer py_buf
|
311 |
+
|
312 |
+
def __cinit__(self):
|
313 |
+
self._cparser = <cparser.llhttp_t*> \
|
314 |
+
PyMem_Malloc(sizeof(cparser.llhttp_t))
|
315 |
+
if self._cparser is NULL:
|
316 |
+
raise MemoryError()
|
317 |
+
|
318 |
+
self._csettings = <cparser.llhttp_settings_t*> \
|
319 |
+
PyMem_Malloc(sizeof(cparser.llhttp_settings_t))
|
320 |
+
if self._csettings is NULL:
|
321 |
+
raise MemoryError()
|
322 |
+
|
323 |
+
def __dealloc__(self):
|
324 |
+
PyMem_Free(self._cparser)
|
325 |
+
PyMem_Free(self._csettings)
|
326 |
+
|
327 |
+
cdef _init(
|
328 |
+
self, cparser.llhttp_type mode,
|
329 |
+
object protocol, object loop, int limit,
|
330 |
+
object timer=None,
|
331 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
332 |
+
size_t max_field_size=8190, payload_exception=None,
|
333 |
+
bint response_with_body=True, bint read_until_eof=False,
|
334 |
+
bint auto_decompress=True,
|
335 |
+
):
|
336 |
+
cparser.llhttp_settings_init(self._csettings)
|
337 |
+
cparser.llhttp_init(self._cparser, mode, self._csettings)
|
338 |
+
self._cparser.data = <void*>self
|
339 |
+
self._cparser.content_length = 0
|
340 |
+
|
341 |
+
self._protocol = protocol
|
342 |
+
self._loop = loop
|
343 |
+
self._timer = timer
|
344 |
+
|
345 |
+
self._buf = bytearray()
|
346 |
+
self._payload = None
|
347 |
+
self._payload_error = 0
|
348 |
+
self._payload_exception = payload_exception
|
349 |
+
self._messages = []
|
350 |
+
|
351 |
+
self._raw_name = bytearray()
|
352 |
+
self._raw_value = bytearray()
|
353 |
+
self._has_value = False
|
354 |
+
|
355 |
+
self._max_line_size = max_line_size
|
356 |
+
self._max_headers = max_headers
|
357 |
+
self._max_field_size = max_field_size
|
358 |
+
self._response_with_body = response_with_body
|
359 |
+
self._read_until_eof = read_until_eof
|
360 |
+
self._upgraded = False
|
361 |
+
self._auto_decompress = auto_decompress
|
362 |
+
self._content_encoding = None
|
363 |
+
|
364 |
+
self._csettings.on_url = cb_on_url
|
365 |
+
self._csettings.on_status = cb_on_status
|
366 |
+
self._csettings.on_header_field = cb_on_header_field
|
367 |
+
self._csettings.on_header_value = cb_on_header_value
|
368 |
+
self._csettings.on_headers_complete = cb_on_headers_complete
|
369 |
+
self._csettings.on_body = cb_on_body
|
370 |
+
self._csettings.on_message_begin = cb_on_message_begin
|
371 |
+
self._csettings.on_message_complete = cb_on_message_complete
|
372 |
+
self._csettings.on_chunk_header = cb_on_chunk_header
|
373 |
+
self._csettings.on_chunk_complete = cb_on_chunk_complete
|
374 |
+
|
375 |
+
self._last_error = None
|
376 |
+
self._limit = limit
|
377 |
+
|
378 |
+
cdef _process_header(self):
|
379 |
+
if self._raw_name:
|
380 |
+
raw_name = bytes(self._raw_name)
|
381 |
+
raw_value = bytes(self._raw_value)
|
382 |
+
|
383 |
+
name = find_header(raw_name)
|
384 |
+
value = raw_value.decode('utf-8', 'surrogateescape')
|
385 |
+
|
386 |
+
self._headers.add(name, value)
|
387 |
+
|
388 |
+
if name is CONTENT_ENCODING:
|
389 |
+
self._content_encoding = value
|
390 |
+
|
391 |
+
PyByteArray_Resize(self._raw_name, 0)
|
392 |
+
PyByteArray_Resize(self._raw_value, 0)
|
393 |
+
self._has_value = False
|
394 |
+
self._raw_headers.append((raw_name, raw_value))
|
395 |
+
|
396 |
+
cdef _on_header_field(self, char* at, size_t length):
|
397 |
+
cdef Py_ssize_t size
|
398 |
+
cdef char *buf
|
399 |
+
if self._has_value:
|
400 |
+
self._process_header()
|
401 |
+
|
402 |
+
size = PyByteArray_Size(self._raw_name)
|
403 |
+
PyByteArray_Resize(self._raw_name, size + length)
|
404 |
+
buf = PyByteArray_AsString(self._raw_name)
|
405 |
+
memcpy(buf + size, at, length)
|
406 |
+
|
407 |
+
cdef _on_header_value(self, char* at, size_t length):
|
408 |
+
cdef Py_ssize_t size
|
409 |
+
cdef char *buf
|
410 |
+
|
411 |
+
size = PyByteArray_Size(self._raw_value)
|
412 |
+
PyByteArray_Resize(self._raw_value, size + length)
|
413 |
+
buf = PyByteArray_AsString(self._raw_value)
|
414 |
+
memcpy(buf + size, at, length)
|
415 |
+
self._has_value = True
|
416 |
+
|
417 |
+
cdef _on_headers_complete(self):
|
418 |
+
self._process_header()
|
419 |
+
|
420 |
+
method = http_method_str(self._cparser.method)
|
421 |
+
should_close = not cparser.llhttp_should_keep_alive(self._cparser)
|
422 |
+
upgrade = self._cparser.upgrade
|
423 |
+
chunked = self._cparser.flags & cparser.F_CHUNKED
|
424 |
+
|
425 |
+
raw_headers = tuple(self._raw_headers)
|
426 |
+
headers = CIMultiDictProxy(self._headers)
|
427 |
+
|
428 |
+
if upgrade or self._cparser.method == cparser.HTTP_CONNECT:
|
429 |
+
self._upgraded = True
|
430 |
+
|
431 |
+
# do not support old websocket spec
|
432 |
+
if SEC_WEBSOCKET_KEY1 in headers:
|
433 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
434 |
+
|
435 |
+
encoding = None
|
436 |
+
enc = self._content_encoding
|
437 |
+
if enc is not None:
|
438 |
+
self._content_encoding = None
|
439 |
+
enc = enc.lower()
|
440 |
+
if enc in ('gzip', 'deflate', 'br'):
|
441 |
+
encoding = enc
|
442 |
+
|
443 |
+
if self._cparser.type == cparser.HTTP_REQUEST:
|
444 |
+
msg = _new_request_message(
|
445 |
+
method, self._path,
|
446 |
+
self.http_version(), headers, raw_headers,
|
447 |
+
should_close, encoding, upgrade, chunked, self._url)
|
448 |
+
else:
|
449 |
+
msg = _new_response_message(
|
450 |
+
self.http_version(), self._cparser.status_code, self._reason,
|
451 |
+
headers, raw_headers, should_close, encoding,
|
452 |
+
upgrade, chunked)
|
453 |
+
|
454 |
+
if (
|
455 |
+
ULLONG_MAX > self._cparser.content_length > 0 or chunked or
|
456 |
+
self._cparser.method == cparser.HTTP_CONNECT or
|
457 |
+
(self._cparser.status_code >= 199 and
|
458 |
+
self._cparser.content_length == 0 and
|
459 |
+
self._read_until_eof)
|
460 |
+
):
|
461 |
+
payload = StreamReader(
|
462 |
+
self._protocol, timer=self._timer, loop=self._loop,
|
463 |
+
limit=self._limit)
|
464 |
+
else:
|
465 |
+
payload = EMPTY_PAYLOAD
|
466 |
+
|
467 |
+
self._payload = payload
|
468 |
+
if encoding is not None and self._auto_decompress:
|
469 |
+
self._payload = DeflateBuffer(payload, encoding)
|
470 |
+
|
471 |
+
if not self._response_with_body:
|
472 |
+
payload = EMPTY_PAYLOAD
|
473 |
+
|
474 |
+
self._messages.append((msg, payload))
|
475 |
+
|
476 |
+
cdef _on_message_complete(self):
|
477 |
+
self._payload.feed_eof()
|
478 |
+
self._payload = None
|
479 |
+
|
480 |
+
cdef _on_chunk_header(self):
|
481 |
+
self._payload.begin_http_chunk_receiving()
|
482 |
+
|
483 |
+
cdef _on_chunk_complete(self):
|
484 |
+
self._payload.end_http_chunk_receiving()
|
485 |
+
|
486 |
+
cdef object _on_status_complete(self):
|
487 |
+
pass
|
488 |
+
|
489 |
+
cdef inline http_version(self):
|
490 |
+
cdef cparser.llhttp_t* parser = self._cparser
|
491 |
+
|
492 |
+
if parser.http_major == 1:
|
493 |
+
if parser.http_minor == 0:
|
494 |
+
return HttpVersion10
|
495 |
+
elif parser.http_minor == 1:
|
496 |
+
return HttpVersion11
|
497 |
+
|
498 |
+
return HttpVersion(parser.http_major, parser.http_minor)
|
499 |
+
|
500 |
+
### Public API ###
|
501 |
+
|
502 |
+
def feed_eof(self):
|
503 |
+
cdef bytes desc
|
504 |
+
|
505 |
+
if self._payload is not None:
|
506 |
+
if self._cparser.flags & cparser.F_CHUNKED:
|
507 |
+
raise TransferEncodingError(
|
508 |
+
"Not enough data for satisfy transfer length header.")
|
509 |
+
elif self._cparser.flags & cparser.F_CONTENT_LENGTH:
|
510 |
+
raise ContentLengthError(
|
511 |
+
"Not enough data for satisfy content length header.")
|
512 |
+
elif cparser.llhttp_get_errno(self._cparser) != cparser.HPE_OK:
|
513 |
+
desc = cparser.llhttp_get_error_reason(self._cparser)
|
514 |
+
raise PayloadEncodingError(desc.decode('latin-1'))
|
515 |
+
else:
|
516 |
+
self._payload.feed_eof()
|
517 |
+
elif self._started:
|
518 |
+
self._on_headers_complete()
|
519 |
+
if self._messages:
|
520 |
+
return self._messages[-1][0]
|
521 |
+
|
522 |
+
def feed_data(self, data):
|
523 |
+
cdef:
|
524 |
+
size_t data_len
|
525 |
+
size_t nb
|
526 |
+
cdef cparser.llhttp_errno_t errno
|
527 |
+
|
528 |
+
PyObject_GetBuffer(data, &self.py_buf, PyBUF_SIMPLE)
|
529 |
+
data_len = <size_t>self.py_buf.len
|
530 |
+
|
531 |
+
errno = cparser.llhttp_execute(
|
532 |
+
self._cparser,
|
533 |
+
<char*>self.py_buf.buf,
|
534 |
+
data_len)
|
535 |
+
|
536 |
+
if errno is cparser.HPE_PAUSED_UPGRADE:
|
537 |
+
cparser.llhttp_resume_after_upgrade(self._cparser)
|
538 |
+
|
539 |
+
nb = cparser.llhttp_get_error_pos(self._cparser) - <char*>self.py_buf.buf
|
540 |
+
|
541 |
+
PyBuffer_Release(&self.py_buf)
|
542 |
+
|
543 |
+
if errno not in (cparser.HPE_OK, cparser.HPE_PAUSED_UPGRADE):
|
544 |
+
if self._payload_error == 0:
|
545 |
+
if self._last_error is not None:
|
546 |
+
ex = self._last_error
|
547 |
+
self._last_error = None
|
548 |
+
else:
|
549 |
+
after = cparser.llhttp_get_error_pos(self._cparser)
|
550 |
+
before = data[:after - <char*>self.py_buf.buf]
|
551 |
+
after_b = after.split(b"\r\n", 1)[0]
|
552 |
+
before = before.rsplit(b"\r\n", 1)[-1]
|
553 |
+
data = before + after_b
|
554 |
+
pointer = " " * (len(repr(before))-1) + "^"
|
555 |
+
ex = parser_error_from_errno(self._cparser, data, pointer)
|
556 |
+
self._payload = None
|
557 |
+
raise ex
|
558 |
+
|
559 |
+
if self._messages:
|
560 |
+
messages = self._messages
|
561 |
+
self._messages = []
|
562 |
+
else:
|
563 |
+
messages = ()
|
564 |
+
|
565 |
+
if self._upgraded:
|
566 |
+
return messages, True, data[nb:]
|
567 |
+
else:
|
568 |
+
return messages, False, b''
|
569 |
+
|
570 |
+
def set_upgraded(self, val):
|
571 |
+
self._upgraded = val
|
572 |
+
|
573 |
+
|
574 |
+
cdef class HttpRequestParser(HttpParser):
|
575 |
+
|
576 |
+
def __init__(
|
577 |
+
self, protocol, loop, int limit, timer=None,
|
578 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
579 |
+
size_t max_field_size=8190, payload_exception=None,
|
580 |
+
bint response_with_body=True, bint read_until_eof=False,
|
581 |
+
bint auto_decompress=True,
|
582 |
+
):
|
583 |
+
self._init(cparser.HTTP_REQUEST, protocol, loop, limit, timer,
|
584 |
+
max_line_size, max_headers, max_field_size,
|
585 |
+
payload_exception, response_with_body, read_until_eof,
|
586 |
+
auto_decompress)
|
587 |
+
|
588 |
+
cdef object _on_status_complete(self):
|
589 |
+
cdef int idx1, idx2
|
590 |
+
if not self._buf:
|
591 |
+
return
|
592 |
+
self._path = self._buf.decode('utf-8', 'surrogateescape')
|
593 |
+
try:
|
594 |
+
idx3 = len(self._path)
|
595 |
+
if self._cparser.method == cparser.HTTP_CONNECT:
|
596 |
+
# authority-form,
|
597 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
598 |
+
self._url = URL.build(authority=self._path, encoded=True)
|
599 |
+
elif idx3 > 1 and self._path[0] == '/':
|
600 |
+
# origin-form,
|
601 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
602 |
+
idx1 = self._path.find("?")
|
603 |
+
if idx1 == -1:
|
604 |
+
query = ""
|
605 |
+
idx2 = self._path.find("#")
|
606 |
+
if idx2 == -1:
|
607 |
+
path = self._path
|
608 |
+
fragment = ""
|
609 |
+
else:
|
610 |
+
path = self._path[0: idx2]
|
611 |
+
fragment = self._path[idx2+1:]
|
612 |
+
|
613 |
+
else:
|
614 |
+
path = self._path[0:idx1]
|
615 |
+
idx1 += 1
|
616 |
+
idx2 = self._path.find("#", idx1+1)
|
617 |
+
if idx2 == -1:
|
618 |
+
query = self._path[idx1:]
|
619 |
+
fragment = ""
|
620 |
+
else:
|
621 |
+
query = self._path[idx1: idx2]
|
622 |
+
fragment = self._path[idx2+1:]
|
623 |
+
|
624 |
+
self._url = URL.build(
|
625 |
+
path=path,
|
626 |
+
query_string=query,
|
627 |
+
fragment=fragment,
|
628 |
+
encoded=True,
|
629 |
+
)
|
630 |
+
else:
|
631 |
+
# absolute-form for proxy maybe,
|
632 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
633 |
+
self._url = URL(self._path, encoded=True)
|
634 |
+
finally:
|
635 |
+
PyByteArray_Resize(self._buf, 0)
|
636 |
+
|
637 |
+
|
638 |
+
cdef class HttpResponseParser(HttpParser):
|
639 |
+
|
640 |
+
def __init__(
|
641 |
+
self, protocol, loop, int limit, timer=None,
|
642 |
+
size_t max_line_size=8190, size_t max_headers=32768,
|
643 |
+
size_t max_field_size=8190, payload_exception=None,
|
644 |
+
bint response_with_body=True, bint read_until_eof=False,
|
645 |
+
bint auto_decompress=True
|
646 |
+
):
|
647 |
+
self._init(cparser.HTTP_RESPONSE, protocol, loop, limit, timer,
|
648 |
+
max_line_size, max_headers, max_field_size,
|
649 |
+
payload_exception, response_with_body, read_until_eof,
|
650 |
+
auto_decompress)
|
651 |
+
# Use strict parsing on dev mode, so users are warned about broken servers.
|
652 |
+
if not DEBUG:
|
653 |
+
cparser.llhttp_set_lenient_headers(self._cparser, 1)
|
654 |
+
cparser.llhttp_set_lenient_optional_cr_before_lf(self._cparser, 1)
|
655 |
+
cparser.llhttp_set_lenient_spaces_after_chunk_size(self._cparser, 1)
|
656 |
+
|
657 |
+
cdef object _on_status_complete(self):
|
658 |
+
if self._buf:
|
659 |
+
self._reason = self._buf.decode('utf-8', 'surrogateescape')
|
660 |
+
PyByteArray_Resize(self._buf, 0)
|
661 |
+
else:
|
662 |
+
self._reason = self._reason or ''
|
663 |
+
|
664 |
+
cdef int cb_on_message_begin(cparser.llhttp_t* parser) except -1:
|
665 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
666 |
+
|
667 |
+
pyparser._started = True
|
668 |
+
pyparser._headers = CIMultiDict()
|
669 |
+
pyparser._raw_headers = []
|
670 |
+
PyByteArray_Resize(pyparser._buf, 0)
|
671 |
+
pyparser._path = None
|
672 |
+
pyparser._reason = None
|
673 |
+
return 0
|
674 |
+
|
675 |
+
|
676 |
+
cdef int cb_on_url(cparser.llhttp_t* parser,
|
677 |
+
const char *at, size_t length) except -1:
|
678 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
679 |
+
try:
|
680 |
+
if length > pyparser._max_line_size:
|
681 |
+
raise LineTooLong(
|
682 |
+
'Status line is too long', pyparser._max_line_size, length)
|
683 |
+
extend(pyparser._buf, at, length)
|
684 |
+
except BaseException as ex:
|
685 |
+
pyparser._last_error = ex
|
686 |
+
return -1
|
687 |
+
else:
|
688 |
+
return 0
|
689 |
+
|
690 |
+
|
691 |
+
cdef int cb_on_status(cparser.llhttp_t* parser,
|
692 |
+
const char *at, size_t length) except -1:
|
693 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
694 |
+
cdef str reason
|
695 |
+
try:
|
696 |
+
if length > pyparser._max_line_size:
|
697 |
+
raise LineTooLong(
|
698 |
+
'Status line is too long', pyparser._max_line_size, length)
|
699 |
+
extend(pyparser._buf, at, length)
|
700 |
+
except BaseException as ex:
|
701 |
+
pyparser._last_error = ex
|
702 |
+
return -1
|
703 |
+
else:
|
704 |
+
return 0
|
705 |
+
|
706 |
+
|
707 |
+
cdef int cb_on_header_field(cparser.llhttp_t* parser,
|
708 |
+
const char *at, size_t length) except -1:
|
709 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
710 |
+
cdef Py_ssize_t size
|
711 |
+
try:
|
712 |
+
pyparser._on_status_complete()
|
713 |
+
size = len(pyparser._raw_name) + length
|
714 |
+
if size > pyparser._max_field_size:
|
715 |
+
raise LineTooLong(
|
716 |
+
'Header name is too long', pyparser._max_field_size, size)
|
717 |
+
pyparser._on_header_field(at, length)
|
718 |
+
except BaseException as ex:
|
719 |
+
pyparser._last_error = ex
|
720 |
+
return -1
|
721 |
+
else:
|
722 |
+
return 0
|
723 |
+
|
724 |
+
|
725 |
+
cdef int cb_on_header_value(cparser.llhttp_t* parser,
|
726 |
+
const char *at, size_t length) except -1:
|
727 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
728 |
+
cdef Py_ssize_t size
|
729 |
+
try:
|
730 |
+
size = len(pyparser._raw_value) + length
|
731 |
+
if size > pyparser._max_field_size:
|
732 |
+
raise LineTooLong(
|
733 |
+
'Header value is too long', pyparser._max_field_size, size)
|
734 |
+
pyparser._on_header_value(at, length)
|
735 |
+
except BaseException as ex:
|
736 |
+
pyparser._last_error = ex
|
737 |
+
return -1
|
738 |
+
else:
|
739 |
+
return 0
|
740 |
+
|
741 |
+
|
742 |
+
cdef int cb_on_headers_complete(cparser.llhttp_t* parser) except -1:
|
743 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
744 |
+
try:
|
745 |
+
pyparser._on_status_complete()
|
746 |
+
pyparser._on_headers_complete()
|
747 |
+
except BaseException as exc:
|
748 |
+
pyparser._last_error = exc
|
749 |
+
return -1
|
750 |
+
else:
|
751 |
+
if (
|
752 |
+
pyparser._cparser.upgrade or
|
753 |
+
pyparser._cparser.method == cparser.HTTP_CONNECT
|
754 |
+
):
|
755 |
+
return 2
|
756 |
+
else:
|
757 |
+
return 0
|
758 |
+
|
759 |
+
|
760 |
+
cdef int cb_on_body(cparser.llhttp_t* parser,
|
761 |
+
const char *at, size_t length) except -1:
|
762 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
763 |
+
cdef bytes body = at[:length]
|
764 |
+
try:
|
765 |
+
pyparser._payload.feed_data(body, length)
|
766 |
+
except BaseException as underlying_exc:
|
767 |
+
reraised_exc = underlying_exc
|
768 |
+
if pyparser._payload_exception is not None:
|
769 |
+
reraised_exc = pyparser._payload_exception(str(underlying_exc))
|
770 |
+
|
771 |
+
set_exception(pyparser._payload, reraised_exc, underlying_exc)
|
772 |
+
|
773 |
+
pyparser._payload_error = 1
|
774 |
+
return -1
|
775 |
+
else:
|
776 |
+
return 0
|
777 |
+
|
778 |
+
|
779 |
+
cdef int cb_on_message_complete(cparser.llhttp_t* parser) except -1:
|
780 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
781 |
+
try:
|
782 |
+
pyparser._started = False
|
783 |
+
pyparser._on_message_complete()
|
784 |
+
except BaseException as exc:
|
785 |
+
pyparser._last_error = exc
|
786 |
+
return -1
|
787 |
+
else:
|
788 |
+
return 0
|
789 |
+
|
790 |
+
|
791 |
+
cdef int cb_on_chunk_header(cparser.llhttp_t* parser) except -1:
|
792 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
793 |
+
try:
|
794 |
+
pyparser._on_chunk_header()
|
795 |
+
except BaseException as exc:
|
796 |
+
pyparser._last_error = exc
|
797 |
+
return -1
|
798 |
+
else:
|
799 |
+
return 0
|
800 |
+
|
801 |
+
|
802 |
+
cdef int cb_on_chunk_complete(cparser.llhttp_t* parser) except -1:
|
803 |
+
cdef HttpParser pyparser = <HttpParser>parser.data
|
804 |
+
try:
|
805 |
+
pyparser._on_chunk_complete()
|
806 |
+
except BaseException as exc:
|
807 |
+
pyparser._last_error = exc
|
808 |
+
return -1
|
809 |
+
else:
|
810 |
+
return 0
|
811 |
+
|
812 |
+
|
813 |
+
cdef parser_error_from_errno(cparser.llhttp_t* parser, data, pointer):
|
814 |
+
cdef cparser.llhttp_errno_t errno = cparser.llhttp_get_errno(parser)
|
815 |
+
cdef bytes desc = cparser.llhttp_get_error_reason(parser)
|
816 |
+
|
817 |
+
err_msg = "{}:\n\n {!r}\n {}".format(desc.decode("latin-1"), data, pointer)
|
818 |
+
|
819 |
+
if errno in {cparser.HPE_CB_MESSAGE_BEGIN,
|
820 |
+
cparser.HPE_CB_HEADERS_COMPLETE,
|
821 |
+
cparser.HPE_CB_MESSAGE_COMPLETE,
|
822 |
+
cparser.HPE_CB_CHUNK_HEADER,
|
823 |
+
cparser.HPE_CB_CHUNK_COMPLETE,
|
824 |
+
cparser.HPE_INVALID_CONSTANT,
|
825 |
+
cparser.HPE_INVALID_HEADER_TOKEN,
|
826 |
+
cparser.HPE_INVALID_CONTENT_LENGTH,
|
827 |
+
cparser.HPE_INVALID_CHUNK_SIZE,
|
828 |
+
cparser.HPE_INVALID_EOF_STATE,
|
829 |
+
cparser.HPE_INVALID_TRANSFER_ENCODING}:
|
830 |
+
return BadHttpMessage(err_msg)
|
831 |
+
elif errno in {cparser.HPE_INVALID_STATUS,
|
832 |
+
cparser.HPE_INVALID_METHOD,
|
833 |
+
cparser.HPE_INVALID_VERSION}:
|
834 |
+
return BadStatusLine(error=err_msg)
|
835 |
+
elif errno == cparser.HPE_INVALID_URL:
|
836 |
+
return InvalidURLError(err_msg)
|
837 |
+
|
838 |
+
return BadHttpMessage(err_msg)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_http_writer.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (459 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_http_writer.pyx
ADDED
@@ -0,0 +1,163 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from cpython.bytes cimport PyBytes_FromStringAndSize
|
2 |
+
from cpython.exc cimport PyErr_NoMemory
|
3 |
+
from cpython.mem cimport PyMem_Free, PyMem_Malloc, PyMem_Realloc
|
4 |
+
from cpython.object cimport PyObject_Str
|
5 |
+
from libc.stdint cimport uint8_t, uint64_t
|
6 |
+
from libc.string cimport memcpy
|
7 |
+
|
8 |
+
from multidict import istr
|
9 |
+
|
10 |
+
DEF BUF_SIZE = 16 * 1024 # 16KiB
|
11 |
+
cdef char BUFFER[BUF_SIZE]
|
12 |
+
|
13 |
+
cdef object _istr = istr
|
14 |
+
|
15 |
+
|
16 |
+
# ----------------- writer ---------------------------
|
17 |
+
|
18 |
+
cdef struct Writer:
|
19 |
+
char *buf
|
20 |
+
Py_ssize_t size
|
21 |
+
Py_ssize_t pos
|
22 |
+
|
23 |
+
|
24 |
+
cdef inline void _init_writer(Writer* writer):
|
25 |
+
writer.buf = &BUFFER[0]
|
26 |
+
writer.size = BUF_SIZE
|
27 |
+
writer.pos = 0
|
28 |
+
|
29 |
+
|
30 |
+
cdef inline void _release_writer(Writer* writer):
|
31 |
+
if writer.buf != BUFFER:
|
32 |
+
PyMem_Free(writer.buf)
|
33 |
+
|
34 |
+
|
35 |
+
cdef inline int _write_byte(Writer* writer, uint8_t ch):
|
36 |
+
cdef char * buf
|
37 |
+
cdef Py_ssize_t size
|
38 |
+
|
39 |
+
if writer.pos == writer.size:
|
40 |
+
# reallocate
|
41 |
+
size = writer.size + BUF_SIZE
|
42 |
+
if writer.buf == BUFFER:
|
43 |
+
buf = <char*>PyMem_Malloc(size)
|
44 |
+
if buf == NULL:
|
45 |
+
PyErr_NoMemory()
|
46 |
+
return -1
|
47 |
+
memcpy(buf, writer.buf, writer.size)
|
48 |
+
else:
|
49 |
+
buf = <char*>PyMem_Realloc(writer.buf, size)
|
50 |
+
if buf == NULL:
|
51 |
+
PyErr_NoMemory()
|
52 |
+
return -1
|
53 |
+
writer.buf = buf
|
54 |
+
writer.size = size
|
55 |
+
writer.buf[writer.pos] = <char>ch
|
56 |
+
writer.pos += 1
|
57 |
+
return 0
|
58 |
+
|
59 |
+
|
60 |
+
cdef inline int _write_utf8(Writer* writer, Py_UCS4 symbol):
|
61 |
+
cdef uint64_t utf = <uint64_t> symbol
|
62 |
+
|
63 |
+
if utf < 0x80:
|
64 |
+
return _write_byte(writer, <uint8_t>utf)
|
65 |
+
elif utf < 0x800:
|
66 |
+
if _write_byte(writer, <uint8_t>(0xc0 | (utf >> 6))) < 0:
|
67 |
+
return -1
|
68 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
69 |
+
elif 0xD800 <= utf <= 0xDFFF:
|
70 |
+
# surogate pair, ignored
|
71 |
+
return 0
|
72 |
+
elif utf < 0x10000:
|
73 |
+
if _write_byte(writer, <uint8_t>(0xe0 | (utf >> 12))) < 0:
|
74 |
+
return -1
|
75 |
+
if _write_byte(writer, <uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
76 |
+
return -1
|
77 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
78 |
+
elif utf > 0x10FFFF:
|
79 |
+
# symbol is too large
|
80 |
+
return 0
|
81 |
+
else:
|
82 |
+
if _write_byte(writer, <uint8_t>(0xf0 | (utf >> 18))) < 0:
|
83 |
+
return -1
|
84 |
+
if _write_byte(writer,
|
85 |
+
<uint8_t>(0x80 | ((utf >> 12) & 0x3f))) < 0:
|
86 |
+
return -1
|
87 |
+
if _write_byte(writer,
|
88 |
+
<uint8_t>(0x80 | ((utf >> 6) & 0x3f))) < 0:
|
89 |
+
return -1
|
90 |
+
return _write_byte(writer, <uint8_t>(0x80 | (utf & 0x3f)))
|
91 |
+
|
92 |
+
|
93 |
+
cdef inline int _write_str(Writer* writer, str s):
|
94 |
+
cdef Py_UCS4 ch
|
95 |
+
for ch in s:
|
96 |
+
if _write_utf8(writer, ch) < 0:
|
97 |
+
return -1
|
98 |
+
|
99 |
+
|
100 |
+
# --------------- _serialize_headers ----------------------
|
101 |
+
|
102 |
+
cdef str to_str(object s):
|
103 |
+
typ = type(s)
|
104 |
+
if typ is str:
|
105 |
+
return <str>s
|
106 |
+
elif typ is _istr:
|
107 |
+
return PyObject_Str(s)
|
108 |
+
elif not isinstance(s, str):
|
109 |
+
raise TypeError("Cannot serialize non-str key {!r}".format(s))
|
110 |
+
else:
|
111 |
+
return str(s)
|
112 |
+
|
113 |
+
|
114 |
+
cdef void _safe_header(str string) except *:
|
115 |
+
if "\r" in string or "\n" in string:
|
116 |
+
raise ValueError(
|
117 |
+
"Newline or carriage return character detected in HTTP status message or "
|
118 |
+
"header. This is a potential security issue."
|
119 |
+
)
|
120 |
+
|
121 |
+
|
122 |
+
def _serialize_headers(str status_line, headers):
|
123 |
+
cdef Writer writer
|
124 |
+
cdef object key
|
125 |
+
cdef object val
|
126 |
+
cdef bytes ret
|
127 |
+
|
128 |
+
_init_writer(&writer)
|
129 |
+
|
130 |
+
for key, val in headers.items():
|
131 |
+
_safe_header(to_str(key))
|
132 |
+
_safe_header(to_str(val))
|
133 |
+
|
134 |
+
try:
|
135 |
+
if _write_str(&writer, status_line) < 0:
|
136 |
+
raise
|
137 |
+
if _write_byte(&writer, b'\r') < 0:
|
138 |
+
raise
|
139 |
+
if _write_byte(&writer, b'\n') < 0:
|
140 |
+
raise
|
141 |
+
|
142 |
+
for key, val in headers.items():
|
143 |
+
if _write_str(&writer, to_str(key)) < 0:
|
144 |
+
raise
|
145 |
+
if _write_byte(&writer, b':') < 0:
|
146 |
+
raise
|
147 |
+
if _write_byte(&writer, b' ') < 0:
|
148 |
+
raise
|
149 |
+
if _write_str(&writer, to_str(val)) < 0:
|
150 |
+
raise
|
151 |
+
if _write_byte(&writer, b'\r') < 0:
|
152 |
+
raise
|
153 |
+
if _write_byte(&writer, b'\n') < 0:
|
154 |
+
raise
|
155 |
+
|
156 |
+
if _write_byte(&writer, b'\r') < 0:
|
157 |
+
raise
|
158 |
+
if _write_byte(&writer, b'\n') < 0:
|
159 |
+
raise
|
160 |
+
|
161 |
+
return PyBytes_FromStringAndSize(writer.buf, writer.pos)
|
162 |
+
finally:
|
163 |
+
_release_writer(&writer)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_websocket.cpython-310-x86_64-linux-gnu.so
ADDED
Binary file (234 kB). View file
|
|
llmeval-env/lib/python3.10/site-packages/aiohttp/_websocket.pyx
ADDED
@@ -0,0 +1,56 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from cpython cimport PyBytes_AsString
|
2 |
+
|
3 |
+
|
4 |
+
#from cpython cimport PyByteArray_AsString # cython still not exports that
|
5 |
+
cdef extern from "Python.h":
|
6 |
+
char* PyByteArray_AsString(bytearray ba) except NULL
|
7 |
+
|
8 |
+
from libc.stdint cimport uint32_t, uint64_t, uintmax_t
|
9 |
+
|
10 |
+
|
11 |
+
def _websocket_mask_cython(object mask, object data):
|
12 |
+
"""Note, this function mutates its `data` argument
|
13 |
+
"""
|
14 |
+
cdef:
|
15 |
+
Py_ssize_t data_len, i
|
16 |
+
# bit operations on signed integers are implementation-specific
|
17 |
+
unsigned char * in_buf
|
18 |
+
const unsigned char * mask_buf
|
19 |
+
uint32_t uint32_msk
|
20 |
+
uint64_t uint64_msk
|
21 |
+
|
22 |
+
assert len(mask) == 4
|
23 |
+
|
24 |
+
if not isinstance(mask, bytes):
|
25 |
+
mask = bytes(mask)
|
26 |
+
|
27 |
+
if isinstance(data, bytearray):
|
28 |
+
data = <bytearray>data
|
29 |
+
else:
|
30 |
+
data = bytearray(data)
|
31 |
+
|
32 |
+
data_len = len(data)
|
33 |
+
in_buf = <unsigned char*>PyByteArray_AsString(data)
|
34 |
+
mask_buf = <const unsigned char*>PyBytes_AsString(mask)
|
35 |
+
uint32_msk = (<uint32_t*>mask_buf)[0]
|
36 |
+
|
37 |
+
# TODO: align in_data ptr to achieve even faster speeds
|
38 |
+
# does it need in python ?! malloc() always aligns to sizeof(long) bytes
|
39 |
+
|
40 |
+
if sizeof(size_t) >= 8:
|
41 |
+
uint64_msk = uint32_msk
|
42 |
+
uint64_msk = (uint64_msk << 32) | uint32_msk
|
43 |
+
|
44 |
+
while data_len >= 8:
|
45 |
+
(<uint64_t*>in_buf)[0] ^= uint64_msk
|
46 |
+
in_buf += 8
|
47 |
+
data_len -= 8
|
48 |
+
|
49 |
+
|
50 |
+
while data_len >= 4:
|
51 |
+
(<uint32_t*>in_buf)[0] ^= uint32_msk
|
52 |
+
in_buf += 4
|
53 |
+
data_len -= 4
|
54 |
+
|
55 |
+
for i in range(0, data_len):
|
56 |
+
in_buf[i] ^= mask_buf[i]
|
llmeval-env/lib/python3.10/site-packages/aiohttp/base_protocol.py
ADDED
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
from typing import Optional, cast
|
3 |
+
|
4 |
+
from .helpers import set_exception
|
5 |
+
from .tcp_helpers import tcp_nodelay
|
6 |
+
|
7 |
+
|
8 |
+
class BaseProtocol(asyncio.Protocol):
|
9 |
+
__slots__ = (
|
10 |
+
"_loop",
|
11 |
+
"_paused",
|
12 |
+
"_drain_waiter",
|
13 |
+
"_connection_lost",
|
14 |
+
"_reading_paused",
|
15 |
+
"transport",
|
16 |
+
)
|
17 |
+
|
18 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
19 |
+
self._loop: asyncio.AbstractEventLoop = loop
|
20 |
+
self._paused = False
|
21 |
+
self._drain_waiter: Optional[asyncio.Future[None]] = None
|
22 |
+
self._reading_paused = False
|
23 |
+
|
24 |
+
self.transport: Optional[asyncio.Transport] = None
|
25 |
+
|
26 |
+
@property
|
27 |
+
def connected(self) -> bool:
|
28 |
+
"""Return True if the connection is open."""
|
29 |
+
return self.transport is not None
|
30 |
+
|
31 |
+
def pause_writing(self) -> None:
|
32 |
+
assert not self._paused
|
33 |
+
self._paused = True
|
34 |
+
|
35 |
+
def resume_writing(self) -> None:
|
36 |
+
assert self._paused
|
37 |
+
self._paused = False
|
38 |
+
|
39 |
+
waiter = self._drain_waiter
|
40 |
+
if waiter is not None:
|
41 |
+
self._drain_waiter = None
|
42 |
+
if not waiter.done():
|
43 |
+
waiter.set_result(None)
|
44 |
+
|
45 |
+
def pause_reading(self) -> None:
|
46 |
+
if not self._reading_paused and self.transport is not None:
|
47 |
+
try:
|
48 |
+
self.transport.pause_reading()
|
49 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
50 |
+
pass
|
51 |
+
self._reading_paused = True
|
52 |
+
|
53 |
+
def resume_reading(self) -> None:
|
54 |
+
if self._reading_paused and self.transport is not None:
|
55 |
+
try:
|
56 |
+
self.transport.resume_reading()
|
57 |
+
except (AttributeError, NotImplementedError, RuntimeError):
|
58 |
+
pass
|
59 |
+
self._reading_paused = False
|
60 |
+
|
61 |
+
def connection_made(self, transport: asyncio.BaseTransport) -> None:
|
62 |
+
tr = cast(asyncio.Transport, transport)
|
63 |
+
tcp_nodelay(tr, True)
|
64 |
+
self.transport = tr
|
65 |
+
|
66 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
67 |
+
# Wake up the writer if currently paused.
|
68 |
+
self.transport = None
|
69 |
+
if not self._paused:
|
70 |
+
return
|
71 |
+
waiter = self._drain_waiter
|
72 |
+
if waiter is None:
|
73 |
+
return
|
74 |
+
self._drain_waiter = None
|
75 |
+
if waiter.done():
|
76 |
+
return
|
77 |
+
if exc is None:
|
78 |
+
waiter.set_result(None)
|
79 |
+
else:
|
80 |
+
set_exception(
|
81 |
+
waiter,
|
82 |
+
ConnectionError("Connection lost"),
|
83 |
+
exc,
|
84 |
+
)
|
85 |
+
|
86 |
+
async def _drain_helper(self) -> None:
|
87 |
+
if not self.connected:
|
88 |
+
raise ConnectionResetError("Connection lost")
|
89 |
+
if not self._paused:
|
90 |
+
return
|
91 |
+
waiter = self._drain_waiter
|
92 |
+
if waiter is None:
|
93 |
+
waiter = self._loop.create_future()
|
94 |
+
self._drain_waiter = waiter
|
95 |
+
await asyncio.shield(waiter)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/client.py
ADDED
@@ -0,0 +1,1366 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP Client for asyncio."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import base64
|
5 |
+
import hashlib
|
6 |
+
import json
|
7 |
+
import os
|
8 |
+
import sys
|
9 |
+
import traceback
|
10 |
+
import warnings
|
11 |
+
from contextlib import suppress
|
12 |
+
from types import SimpleNamespace, TracebackType
|
13 |
+
from typing import (
|
14 |
+
TYPE_CHECKING,
|
15 |
+
Any,
|
16 |
+
Awaitable,
|
17 |
+
Callable,
|
18 |
+
Coroutine,
|
19 |
+
Final,
|
20 |
+
FrozenSet,
|
21 |
+
Generator,
|
22 |
+
Generic,
|
23 |
+
Iterable,
|
24 |
+
List,
|
25 |
+
Mapping,
|
26 |
+
Optional,
|
27 |
+
Set,
|
28 |
+
Tuple,
|
29 |
+
Type,
|
30 |
+
TypeVar,
|
31 |
+
Union,
|
32 |
+
)
|
33 |
+
|
34 |
+
import attr
|
35 |
+
from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr
|
36 |
+
from yarl import URL
|
37 |
+
|
38 |
+
from . import hdrs, http, payload
|
39 |
+
from .abc import AbstractCookieJar
|
40 |
+
from .client_exceptions import (
|
41 |
+
ClientConnectionError as ClientConnectionError,
|
42 |
+
ClientConnectorCertificateError as ClientConnectorCertificateError,
|
43 |
+
ClientConnectorError as ClientConnectorError,
|
44 |
+
ClientConnectorSSLError as ClientConnectorSSLError,
|
45 |
+
ClientError as ClientError,
|
46 |
+
ClientHttpProxyError as ClientHttpProxyError,
|
47 |
+
ClientOSError as ClientOSError,
|
48 |
+
ClientPayloadError as ClientPayloadError,
|
49 |
+
ClientProxyConnectionError as ClientProxyConnectionError,
|
50 |
+
ClientResponseError as ClientResponseError,
|
51 |
+
ClientSSLError as ClientSSLError,
|
52 |
+
ContentTypeError as ContentTypeError,
|
53 |
+
InvalidURL as InvalidURL,
|
54 |
+
ServerConnectionError as ServerConnectionError,
|
55 |
+
ServerDisconnectedError as ServerDisconnectedError,
|
56 |
+
ServerFingerprintMismatch as ServerFingerprintMismatch,
|
57 |
+
ServerTimeoutError as ServerTimeoutError,
|
58 |
+
TooManyRedirects as TooManyRedirects,
|
59 |
+
WSServerHandshakeError as WSServerHandshakeError,
|
60 |
+
)
|
61 |
+
from .client_reqrep import (
|
62 |
+
ClientRequest as ClientRequest,
|
63 |
+
ClientResponse as ClientResponse,
|
64 |
+
Fingerprint as Fingerprint,
|
65 |
+
RequestInfo as RequestInfo,
|
66 |
+
_merge_ssl_params,
|
67 |
+
)
|
68 |
+
from .client_ws import ClientWebSocketResponse as ClientWebSocketResponse
|
69 |
+
from .connector import (
|
70 |
+
BaseConnector as BaseConnector,
|
71 |
+
NamedPipeConnector as NamedPipeConnector,
|
72 |
+
TCPConnector as TCPConnector,
|
73 |
+
UnixConnector as UnixConnector,
|
74 |
+
)
|
75 |
+
from .cookiejar import CookieJar
|
76 |
+
from .helpers import (
|
77 |
+
_SENTINEL,
|
78 |
+
DEBUG,
|
79 |
+
BasicAuth,
|
80 |
+
TimeoutHandle,
|
81 |
+
ceil_timeout,
|
82 |
+
get_env_proxy_for_url,
|
83 |
+
get_running_loop,
|
84 |
+
method_must_be_empty_body,
|
85 |
+
sentinel,
|
86 |
+
strip_auth_from_url,
|
87 |
+
)
|
88 |
+
from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter
|
89 |
+
from .http_websocket import WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse
|
90 |
+
from .streams import FlowControlDataQueue
|
91 |
+
from .tracing import Trace, TraceConfig
|
92 |
+
from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL
|
93 |
+
|
94 |
+
__all__ = (
|
95 |
+
# client_exceptions
|
96 |
+
"ClientConnectionError",
|
97 |
+
"ClientConnectorCertificateError",
|
98 |
+
"ClientConnectorError",
|
99 |
+
"ClientConnectorSSLError",
|
100 |
+
"ClientError",
|
101 |
+
"ClientHttpProxyError",
|
102 |
+
"ClientOSError",
|
103 |
+
"ClientPayloadError",
|
104 |
+
"ClientProxyConnectionError",
|
105 |
+
"ClientResponseError",
|
106 |
+
"ClientSSLError",
|
107 |
+
"ContentTypeError",
|
108 |
+
"InvalidURL",
|
109 |
+
"ServerConnectionError",
|
110 |
+
"ServerDisconnectedError",
|
111 |
+
"ServerFingerprintMismatch",
|
112 |
+
"ServerTimeoutError",
|
113 |
+
"TooManyRedirects",
|
114 |
+
"WSServerHandshakeError",
|
115 |
+
# client_reqrep
|
116 |
+
"ClientRequest",
|
117 |
+
"ClientResponse",
|
118 |
+
"Fingerprint",
|
119 |
+
"RequestInfo",
|
120 |
+
# connector
|
121 |
+
"BaseConnector",
|
122 |
+
"TCPConnector",
|
123 |
+
"UnixConnector",
|
124 |
+
"NamedPipeConnector",
|
125 |
+
# client_ws
|
126 |
+
"ClientWebSocketResponse",
|
127 |
+
# client
|
128 |
+
"ClientSession",
|
129 |
+
"ClientTimeout",
|
130 |
+
"request",
|
131 |
+
)
|
132 |
+
|
133 |
+
|
134 |
+
if TYPE_CHECKING:
|
135 |
+
from ssl import SSLContext
|
136 |
+
else:
|
137 |
+
SSLContext = None
|
138 |
+
|
139 |
+
|
140 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
141 |
+
class ClientTimeout:
|
142 |
+
total: Optional[float] = None
|
143 |
+
connect: Optional[float] = None
|
144 |
+
sock_read: Optional[float] = None
|
145 |
+
sock_connect: Optional[float] = None
|
146 |
+
ceil_threshold: float = 5
|
147 |
+
|
148 |
+
# pool_queue_timeout: Optional[float] = None
|
149 |
+
# dns_resolution_timeout: Optional[float] = None
|
150 |
+
# socket_connect_timeout: Optional[float] = None
|
151 |
+
# connection_acquiring_timeout: Optional[float] = None
|
152 |
+
# new_connection_timeout: Optional[float] = None
|
153 |
+
# http_header_timeout: Optional[float] = None
|
154 |
+
# response_body_timeout: Optional[float] = None
|
155 |
+
|
156 |
+
# to create a timeout specific for a single request, either
|
157 |
+
# - create a completely new one to overwrite the default
|
158 |
+
# - or use http://www.attrs.org/en/stable/api.html#attr.evolve
|
159 |
+
# to overwrite the defaults
|
160 |
+
|
161 |
+
|
162 |
+
# 5 Minute default read timeout
|
163 |
+
DEFAULT_TIMEOUT: Final[ClientTimeout] = ClientTimeout(total=5 * 60)
|
164 |
+
|
165 |
+
_RetType = TypeVar("_RetType")
|
166 |
+
_CharsetResolver = Callable[[ClientResponse, bytes], str]
|
167 |
+
|
168 |
+
|
169 |
+
class ClientSession:
|
170 |
+
"""First-class interface for making HTTP requests."""
|
171 |
+
|
172 |
+
ATTRS = frozenset(
|
173 |
+
[
|
174 |
+
"_base_url",
|
175 |
+
"_source_traceback",
|
176 |
+
"_connector",
|
177 |
+
"requote_redirect_url",
|
178 |
+
"_loop",
|
179 |
+
"_cookie_jar",
|
180 |
+
"_connector_owner",
|
181 |
+
"_default_auth",
|
182 |
+
"_version",
|
183 |
+
"_json_serialize",
|
184 |
+
"_requote_redirect_url",
|
185 |
+
"_timeout",
|
186 |
+
"_raise_for_status",
|
187 |
+
"_auto_decompress",
|
188 |
+
"_trust_env",
|
189 |
+
"_default_headers",
|
190 |
+
"_skip_auto_headers",
|
191 |
+
"_request_class",
|
192 |
+
"_response_class",
|
193 |
+
"_ws_response_class",
|
194 |
+
"_trace_configs",
|
195 |
+
"_read_bufsize",
|
196 |
+
"_max_line_size",
|
197 |
+
"_max_field_size",
|
198 |
+
"_resolve_charset",
|
199 |
+
]
|
200 |
+
)
|
201 |
+
|
202 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
203 |
+
_connector: Optional[BaseConnector] = None
|
204 |
+
|
205 |
+
def __init__(
|
206 |
+
self,
|
207 |
+
base_url: Optional[StrOrURL] = None,
|
208 |
+
*,
|
209 |
+
connector: Optional[BaseConnector] = None,
|
210 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
211 |
+
cookies: Optional[LooseCookies] = None,
|
212 |
+
headers: Optional[LooseHeaders] = None,
|
213 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
214 |
+
auth: Optional[BasicAuth] = None,
|
215 |
+
json_serialize: JSONEncoder = json.dumps,
|
216 |
+
request_class: Type[ClientRequest] = ClientRequest,
|
217 |
+
response_class: Type[ClientResponse] = ClientResponse,
|
218 |
+
ws_response_class: Type[ClientWebSocketResponse] = ClientWebSocketResponse,
|
219 |
+
version: HttpVersion = http.HttpVersion11,
|
220 |
+
cookie_jar: Optional[AbstractCookieJar] = None,
|
221 |
+
connector_owner: bool = True,
|
222 |
+
raise_for_status: Union[
|
223 |
+
bool, Callable[[ClientResponse], Awaitable[None]]
|
224 |
+
] = False,
|
225 |
+
read_timeout: Union[float, _SENTINEL] = sentinel,
|
226 |
+
conn_timeout: Optional[float] = None,
|
227 |
+
timeout: Union[object, ClientTimeout] = sentinel,
|
228 |
+
auto_decompress: bool = True,
|
229 |
+
trust_env: bool = False,
|
230 |
+
requote_redirect_url: bool = True,
|
231 |
+
trace_configs: Optional[List[TraceConfig]] = None,
|
232 |
+
read_bufsize: int = 2**16,
|
233 |
+
max_line_size: int = 8190,
|
234 |
+
max_field_size: int = 8190,
|
235 |
+
fallback_charset_resolver: _CharsetResolver = lambda r, b: "utf-8",
|
236 |
+
) -> None:
|
237 |
+
# We initialise _connector to None immediately, as it's referenced in __del__()
|
238 |
+
# and could cause issues if an exception occurs during initialisation.
|
239 |
+
self._connector: Optional[BaseConnector] = None
|
240 |
+
if timeout is sentinel or timeout is None:
|
241 |
+
self._timeout = DEFAULT_TIMEOUT
|
242 |
+
if read_timeout is not sentinel:
|
243 |
+
warnings.warn(
|
244 |
+
"read_timeout is deprecated, " "use timeout argument instead",
|
245 |
+
DeprecationWarning,
|
246 |
+
stacklevel=2,
|
247 |
+
)
|
248 |
+
self._timeout = attr.evolve(self._timeout, total=read_timeout)
|
249 |
+
if conn_timeout is not None:
|
250 |
+
self._timeout = attr.evolve(self._timeout, connect=conn_timeout)
|
251 |
+
warnings.warn(
|
252 |
+
"conn_timeout is deprecated, " "use timeout argument instead",
|
253 |
+
DeprecationWarning,
|
254 |
+
stacklevel=2,
|
255 |
+
)
|
256 |
+
else:
|
257 |
+
if not isinstance(timeout, ClientTimeout):
|
258 |
+
raise ValueError(
|
259 |
+
f"timeout parameter cannot be of {type(timeout)} type, "
|
260 |
+
"please use 'timeout=ClientTimeout(...)'",
|
261 |
+
)
|
262 |
+
self._timeout = timeout
|
263 |
+
if read_timeout is not sentinel:
|
264 |
+
raise ValueError(
|
265 |
+
"read_timeout and timeout parameters "
|
266 |
+
"conflict, please setup "
|
267 |
+
"timeout.read"
|
268 |
+
)
|
269 |
+
if conn_timeout is not None:
|
270 |
+
raise ValueError(
|
271 |
+
"conn_timeout and timeout parameters "
|
272 |
+
"conflict, please setup "
|
273 |
+
"timeout.connect"
|
274 |
+
)
|
275 |
+
if loop is None:
|
276 |
+
if connector is not None:
|
277 |
+
loop = connector._loop
|
278 |
+
|
279 |
+
loop = get_running_loop(loop)
|
280 |
+
|
281 |
+
if base_url is None or isinstance(base_url, URL):
|
282 |
+
self._base_url: Optional[URL] = base_url
|
283 |
+
else:
|
284 |
+
self._base_url = URL(base_url)
|
285 |
+
assert (
|
286 |
+
self._base_url.origin() == self._base_url
|
287 |
+
), "Only absolute URLs without path part are supported"
|
288 |
+
|
289 |
+
if connector is None:
|
290 |
+
connector = TCPConnector(loop=loop)
|
291 |
+
|
292 |
+
if connector._loop is not loop:
|
293 |
+
raise RuntimeError("Session and connector has to use same event loop")
|
294 |
+
|
295 |
+
self._loop = loop
|
296 |
+
|
297 |
+
if loop.get_debug():
|
298 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
299 |
+
|
300 |
+
if cookie_jar is None:
|
301 |
+
cookie_jar = CookieJar(loop=loop)
|
302 |
+
self._cookie_jar = cookie_jar
|
303 |
+
|
304 |
+
if cookies is not None:
|
305 |
+
self._cookie_jar.update_cookies(cookies)
|
306 |
+
|
307 |
+
self._connector = connector
|
308 |
+
self._connector_owner = connector_owner
|
309 |
+
self._default_auth = auth
|
310 |
+
self._version = version
|
311 |
+
self._json_serialize = json_serialize
|
312 |
+
self._raise_for_status = raise_for_status
|
313 |
+
self._auto_decompress = auto_decompress
|
314 |
+
self._trust_env = trust_env
|
315 |
+
self._requote_redirect_url = requote_redirect_url
|
316 |
+
self._read_bufsize = read_bufsize
|
317 |
+
self._max_line_size = max_line_size
|
318 |
+
self._max_field_size = max_field_size
|
319 |
+
|
320 |
+
# Convert to list of tuples
|
321 |
+
if headers:
|
322 |
+
real_headers: CIMultiDict[str] = CIMultiDict(headers)
|
323 |
+
else:
|
324 |
+
real_headers = CIMultiDict()
|
325 |
+
self._default_headers: CIMultiDict[str] = real_headers
|
326 |
+
if skip_auto_headers is not None:
|
327 |
+
self._skip_auto_headers = frozenset(istr(i) for i in skip_auto_headers)
|
328 |
+
else:
|
329 |
+
self._skip_auto_headers = frozenset()
|
330 |
+
|
331 |
+
self._request_class = request_class
|
332 |
+
self._response_class = response_class
|
333 |
+
self._ws_response_class = ws_response_class
|
334 |
+
|
335 |
+
self._trace_configs = trace_configs or []
|
336 |
+
for trace_config in self._trace_configs:
|
337 |
+
trace_config.freeze()
|
338 |
+
|
339 |
+
self._resolve_charset = fallback_charset_resolver
|
340 |
+
|
341 |
+
def __init_subclass__(cls: Type["ClientSession"]) -> None:
|
342 |
+
warnings.warn(
|
343 |
+
"Inheritance class {} from ClientSession "
|
344 |
+
"is discouraged".format(cls.__name__),
|
345 |
+
DeprecationWarning,
|
346 |
+
stacklevel=2,
|
347 |
+
)
|
348 |
+
|
349 |
+
if DEBUG:
|
350 |
+
|
351 |
+
def __setattr__(self, name: str, val: Any) -> None:
|
352 |
+
if name not in self.ATTRS:
|
353 |
+
warnings.warn(
|
354 |
+
"Setting custom ClientSession.{} attribute "
|
355 |
+
"is discouraged".format(name),
|
356 |
+
DeprecationWarning,
|
357 |
+
stacklevel=2,
|
358 |
+
)
|
359 |
+
super().__setattr__(name, val)
|
360 |
+
|
361 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
362 |
+
if not self.closed:
|
363 |
+
kwargs = {"source": self}
|
364 |
+
_warnings.warn(
|
365 |
+
f"Unclosed client session {self!r}", ResourceWarning, **kwargs
|
366 |
+
)
|
367 |
+
context = {"client_session": self, "message": "Unclosed client session"}
|
368 |
+
if self._source_traceback is not None:
|
369 |
+
context["source_traceback"] = self._source_traceback
|
370 |
+
self._loop.call_exception_handler(context)
|
371 |
+
|
372 |
+
def request(
|
373 |
+
self, method: str, url: StrOrURL, **kwargs: Any
|
374 |
+
) -> "_RequestContextManager":
|
375 |
+
"""Perform HTTP request."""
|
376 |
+
return _RequestContextManager(self._request(method, url, **kwargs))
|
377 |
+
|
378 |
+
def _build_url(self, str_or_url: StrOrURL) -> URL:
|
379 |
+
url = URL(str_or_url)
|
380 |
+
if self._base_url is None:
|
381 |
+
return url
|
382 |
+
else:
|
383 |
+
assert not url.is_absolute() and url.path.startswith("/")
|
384 |
+
return self._base_url.join(url)
|
385 |
+
|
386 |
+
async def _request(
|
387 |
+
self,
|
388 |
+
method: str,
|
389 |
+
str_or_url: StrOrURL,
|
390 |
+
*,
|
391 |
+
params: Optional[Mapping[str, str]] = None,
|
392 |
+
data: Any = None,
|
393 |
+
json: Any = None,
|
394 |
+
cookies: Optional[LooseCookies] = None,
|
395 |
+
headers: Optional[LooseHeaders] = None,
|
396 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
397 |
+
auth: Optional[BasicAuth] = None,
|
398 |
+
allow_redirects: bool = True,
|
399 |
+
max_redirects: int = 10,
|
400 |
+
compress: Optional[str] = None,
|
401 |
+
chunked: Optional[bool] = None,
|
402 |
+
expect100: bool = False,
|
403 |
+
raise_for_status: Union[
|
404 |
+
None, bool, Callable[[ClientResponse], Awaitable[None]]
|
405 |
+
] = None,
|
406 |
+
read_until_eof: bool = True,
|
407 |
+
proxy: Optional[StrOrURL] = None,
|
408 |
+
proxy_auth: Optional[BasicAuth] = None,
|
409 |
+
timeout: Union[ClientTimeout, _SENTINEL] = sentinel,
|
410 |
+
verify_ssl: Optional[bool] = None,
|
411 |
+
fingerprint: Optional[bytes] = None,
|
412 |
+
ssl_context: Optional[SSLContext] = None,
|
413 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
414 |
+
server_hostname: Optional[str] = None,
|
415 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
416 |
+
trace_request_ctx: Optional[SimpleNamespace] = None,
|
417 |
+
read_bufsize: Optional[int] = None,
|
418 |
+
auto_decompress: Optional[bool] = None,
|
419 |
+
max_line_size: Optional[int] = None,
|
420 |
+
max_field_size: Optional[int] = None,
|
421 |
+
) -> ClientResponse:
|
422 |
+
|
423 |
+
# NOTE: timeout clamps existing connect and read timeouts. We cannot
|
424 |
+
# set the default to None because we need to detect if the user wants
|
425 |
+
# to use the existing timeouts by setting timeout to None.
|
426 |
+
|
427 |
+
if self.closed:
|
428 |
+
raise RuntimeError("Session is closed")
|
429 |
+
|
430 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
431 |
+
|
432 |
+
if data is not None and json is not None:
|
433 |
+
raise ValueError(
|
434 |
+
"data and json parameters can not be used at the same time"
|
435 |
+
)
|
436 |
+
elif json is not None:
|
437 |
+
data = payload.JsonPayload(json, dumps=self._json_serialize)
|
438 |
+
|
439 |
+
if not isinstance(chunked, bool) and chunked is not None:
|
440 |
+
warnings.warn("Chunk size is deprecated #1615", DeprecationWarning)
|
441 |
+
|
442 |
+
redirects = 0
|
443 |
+
history = []
|
444 |
+
version = self._version
|
445 |
+
params = params or {}
|
446 |
+
|
447 |
+
# Merge with default headers and transform to CIMultiDict
|
448 |
+
headers = self._prepare_headers(headers)
|
449 |
+
proxy_headers = self._prepare_headers(proxy_headers)
|
450 |
+
|
451 |
+
try:
|
452 |
+
url = self._build_url(str_or_url)
|
453 |
+
except ValueError as e:
|
454 |
+
raise InvalidURL(str_or_url) from e
|
455 |
+
|
456 |
+
skip_headers = set(self._skip_auto_headers)
|
457 |
+
if skip_auto_headers is not None:
|
458 |
+
for i in skip_auto_headers:
|
459 |
+
skip_headers.add(istr(i))
|
460 |
+
|
461 |
+
if proxy is not None:
|
462 |
+
try:
|
463 |
+
proxy = URL(proxy)
|
464 |
+
except ValueError as e:
|
465 |
+
raise InvalidURL(proxy) from e
|
466 |
+
|
467 |
+
if timeout is sentinel:
|
468 |
+
real_timeout: ClientTimeout = self._timeout
|
469 |
+
else:
|
470 |
+
if not isinstance(timeout, ClientTimeout):
|
471 |
+
real_timeout = ClientTimeout(total=timeout)
|
472 |
+
else:
|
473 |
+
real_timeout = timeout
|
474 |
+
# timeout is cumulative for all request operations
|
475 |
+
# (request, redirects, responses, data consuming)
|
476 |
+
tm = TimeoutHandle(
|
477 |
+
self._loop, real_timeout.total, ceil_threshold=real_timeout.ceil_threshold
|
478 |
+
)
|
479 |
+
handle = tm.start()
|
480 |
+
|
481 |
+
if read_bufsize is None:
|
482 |
+
read_bufsize = self._read_bufsize
|
483 |
+
|
484 |
+
if auto_decompress is None:
|
485 |
+
auto_decompress = self._auto_decompress
|
486 |
+
|
487 |
+
if max_line_size is None:
|
488 |
+
max_line_size = self._max_line_size
|
489 |
+
|
490 |
+
if max_field_size is None:
|
491 |
+
max_field_size = self._max_field_size
|
492 |
+
|
493 |
+
traces = [
|
494 |
+
Trace(
|
495 |
+
self,
|
496 |
+
trace_config,
|
497 |
+
trace_config.trace_config_ctx(trace_request_ctx=trace_request_ctx),
|
498 |
+
)
|
499 |
+
for trace_config in self._trace_configs
|
500 |
+
]
|
501 |
+
|
502 |
+
for trace in traces:
|
503 |
+
await trace.send_request_start(method, url.update_query(params), headers)
|
504 |
+
|
505 |
+
timer = tm.timer()
|
506 |
+
try:
|
507 |
+
with timer:
|
508 |
+
while True:
|
509 |
+
url, auth_from_url = strip_auth_from_url(url)
|
510 |
+
if auth and auth_from_url:
|
511 |
+
raise ValueError(
|
512 |
+
"Cannot combine AUTH argument with "
|
513 |
+
"credentials encoded in URL"
|
514 |
+
)
|
515 |
+
|
516 |
+
if auth is None:
|
517 |
+
auth = auth_from_url
|
518 |
+
if auth is None:
|
519 |
+
auth = self._default_auth
|
520 |
+
# It would be confusing if we support explicit
|
521 |
+
# Authorization header with auth argument
|
522 |
+
if (
|
523 |
+
headers is not None
|
524 |
+
and auth is not None
|
525 |
+
and hdrs.AUTHORIZATION in headers
|
526 |
+
):
|
527 |
+
raise ValueError(
|
528 |
+
"Cannot combine AUTHORIZATION header "
|
529 |
+
"with AUTH argument or credentials "
|
530 |
+
"encoded in URL"
|
531 |
+
)
|
532 |
+
|
533 |
+
all_cookies = self._cookie_jar.filter_cookies(url)
|
534 |
+
|
535 |
+
if cookies is not None:
|
536 |
+
tmp_cookie_jar = CookieJar()
|
537 |
+
tmp_cookie_jar.update_cookies(cookies)
|
538 |
+
req_cookies = tmp_cookie_jar.filter_cookies(url)
|
539 |
+
if req_cookies:
|
540 |
+
all_cookies.load(req_cookies)
|
541 |
+
|
542 |
+
if proxy is not None:
|
543 |
+
proxy = URL(proxy)
|
544 |
+
elif self._trust_env:
|
545 |
+
with suppress(LookupError):
|
546 |
+
proxy, proxy_auth = get_env_proxy_for_url(url)
|
547 |
+
|
548 |
+
req = self._request_class(
|
549 |
+
method,
|
550 |
+
url,
|
551 |
+
params=params,
|
552 |
+
headers=headers,
|
553 |
+
skip_auto_headers=skip_headers,
|
554 |
+
data=data,
|
555 |
+
cookies=all_cookies,
|
556 |
+
auth=auth,
|
557 |
+
version=version,
|
558 |
+
compress=compress,
|
559 |
+
chunked=chunked,
|
560 |
+
expect100=expect100,
|
561 |
+
loop=self._loop,
|
562 |
+
response_class=self._response_class,
|
563 |
+
proxy=proxy,
|
564 |
+
proxy_auth=proxy_auth,
|
565 |
+
timer=timer,
|
566 |
+
session=self,
|
567 |
+
ssl=ssl if ssl is not None else True,
|
568 |
+
server_hostname=server_hostname,
|
569 |
+
proxy_headers=proxy_headers,
|
570 |
+
traces=traces,
|
571 |
+
trust_env=self.trust_env,
|
572 |
+
)
|
573 |
+
|
574 |
+
# connection timeout
|
575 |
+
try:
|
576 |
+
async with ceil_timeout(
|
577 |
+
real_timeout.connect,
|
578 |
+
ceil_threshold=real_timeout.ceil_threshold,
|
579 |
+
):
|
580 |
+
assert self._connector is not None
|
581 |
+
conn = await self._connector.connect(
|
582 |
+
req, traces=traces, timeout=real_timeout
|
583 |
+
)
|
584 |
+
except asyncio.TimeoutError as exc:
|
585 |
+
raise ServerTimeoutError(
|
586 |
+
"Connection timeout " "to host {}".format(url)
|
587 |
+
) from exc
|
588 |
+
|
589 |
+
assert conn.transport is not None
|
590 |
+
|
591 |
+
assert conn.protocol is not None
|
592 |
+
conn.protocol.set_response_params(
|
593 |
+
timer=timer,
|
594 |
+
skip_payload=method_must_be_empty_body(method),
|
595 |
+
read_until_eof=read_until_eof,
|
596 |
+
auto_decompress=auto_decompress,
|
597 |
+
read_timeout=real_timeout.sock_read,
|
598 |
+
read_bufsize=read_bufsize,
|
599 |
+
timeout_ceil_threshold=self._connector._timeout_ceil_threshold,
|
600 |
+
max_line_size=max_line_size,
|
601 |
+
max_field_size=max_field_size,
|
602 |
+
)
|
603 |
+
|
604 |
+
try:
|
605 |
+
try:
|
606 |
+
resp = await req.send(conn)
|
607 |
+
try:
|
608 |
+
await resp.start(conn)
|
609 |
+
except BaseException:
|
610 |
+
resp.close()
|
611 |
+
raise
|
612 |
+
except BaseException:
|
613 |
+
conn.close()
|
614 |
+
raise
|
615 |
+
except ClientError:
|
616 |
+
raise
|
617 |
+
except OSError as exc:
|
618 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
619 |
+
raise
|
620 |
+
raise ClientOSError(*exc.args) from exc
|
621 |
+
|
622 |
+
self._cookie_jar.update_cookies(resp.cookies, resp.url)
|
623 |
+
|
624 |
+
# redirects
|
625 |
+
if resp.status in (301, 302, 303, 307, 308) and allow_redirects:
|
626 |
+
|
627 |
+
for trace in traces:
|
628 |
+
await trace.send_request_redirect(
|
629 |
+
method, url.update_query(params), headers, resp
|
630 |
+
)
|
631 |
+
|
632 |
+
redirects += 1
|
633 |
+
history.append(resp)
|
634 |
+
if max_redirects and redirects >= max_redirects:
|
635 |
+
resp.close()
|
636 |
+
raise TooManyRedirects(
|
637 |
+
history[0].request_info, tuple(history)
|
638 |
+
)
|
639 |
+
|
640 |
+
# For 301 and 302, mimic IE, now changed in RFC
|
641 |
+
# https://github.com/kennethreitz/requests/pull/269
|
642 |
+
if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (
|
643 |
+
resp.status in (301, 302) and resp.method == hdrs.METH_POST
|
644 |
+
):
|
645 |
+
method = hdrs.METH_GET
|
646 |
+
data = None
|
647 |
+
if headers.get(hdrs.CONTENT_LENGTH):
|
648 |
+
headers.pop(hdrs.CONTENT_LENGTH)
|
649 |
+
|
650 |
+
r_url = resp.headers.get(hdrs.LOCATION) or resp.headers.get(
|
651 |
+
hdrs.URI
|
652 |
+
)
|
653 |
+
if r_url is None:
|
654 |
+
# see github.com/aio-libs/aiohttp/issues/2022
|
655 |
+
break
|
656 |
+
else:
|
657 |
+
# reading from correct redirection
|
658 |
+
# response is forbidden
|
659 |
+
resp.release()
|
660 |
+
|
661 |
+
try:
|
662 |
+
parsed_url = URL(
|
663 |
+
r_url, encoded=not self._requote_redirect_url
|
664 |
+
)
|
665 |
+
|
666 |
+
except ValueError as e:
|
667 |
+
raise InvalidURL(r_url) from e
|
668 |
+
|
669 |
+
scheme = parsed_url.scheme
|
670 |
+
if scheme not in ("http", "https", ""):
|
671 |
+
resp.close()
|
672 |
+
raise ValueError("Can redirect only to http or https")
|
673 |
+
elif not scheme:
|
674 |
+
parsed_url = url.join(parsed_url)
|
675 |
+
|
676 |
+
if url.origin() != parsed_url.origin():
|
677 |
+
auth = None
|
678 |
+
headers.pop(hdrs.AUTHORIZATION, None)
|
679 |
+
|
680 |
+
url = parsed_url
|
681 |
+
params = {}
|
682 |
+
resp.release()
|
683 |
+
continue
|
684 |
+
|
685 |
+
break
|
686 |
+
|
687 |
+
# check response status
|
688 |
+
if raise_for_status is None:
|
689 |
+
raise_for_status = self._raise_for_status
|
690 |
+
|
691 |
+
if raise_for_status is None:
|
692 |
+
pass
|
693 |
+
elif callable(raise_for_status):
|
694 |
+
await raise_for_status(resp)
|
695 |
+
elif raise_for_status:
|
696 |
+
resp.raise_for_status()
|
697 |
+
|
698 |
+
# register connection
|
699 |
+
if handle is not None:
|
700 |
+
if resp.connection is not None:
|
701 |
+
resp.connection.add_callback(handle.cancel)
|
702 |
+
else:
|
703 |
+
handle.cancel()
|
704 |
+
|
705 |
+
resp._history = tuple(history)
|
706 |
+
|
707 |
+
for trace in traces:
|
708 |
+
await trace.send_request_end(
|
709 |
+
method, url.update_query(params), headers, resp
|
710 |
+
)
|
711 |
+
return resp
|
712 |
+
|
713 |
+
except BaseException as e:
|
714 |
+
# cleanup timer
|
715 |
+
tm.close()
|
716 |
+
if handle:
|
717 |
+
handle.cancel()
|
718 |
+
handle = None
|
719 |
+
|
720 |
+
for trace in traces:
|
721 |
+
await trace.send_request_exception(
|
722 |
+
method, url.update_query(params), headers, e
|
723 |
+
)
|
724 |
+
raise
|
725 |
+
|
726 |
+
def ws_connect(
|
727 |
+
self,
|
728 |
+
url: StrOrURL,
|
729 |
+
*,
|
730 |
+
method: str = hdrs.METH_GET,
|
731 |
+
protocols: Iterable[str] = (),
|
732 |
+
timeout: float = 10.0,
|
733 |
+
receive_timeout: Optional[float] = None,
|
734 |
+
autoclose: bool = True,
|
735 |
+
autoping: bool = True,
|
736 |
+
heartbeat: Optional[float] = None,
|
737 |
+
auth: Optional[BasicAuth] = None,
|
738 |
+
origin: Optional[str] = None,
|
739 |
+
params: Optional[Mapping[str, str]] = None,
|
740 |
+
headers: Optional[LooseHeaders] = None,
|
741 |
+
proxy: Optional[StrOrURL] = None,
|
742 |
+
proxy_auth: Optional[BasicAuth] = None,
|
743 |
+
ssl: Union[SSLContext, bool, None, Fingerprint] = True,
|
744 |
+
verify_ssl: Optional[bool] = None,
|
745 |
+
fingerprint: Optional[bytes] = None,
|
746 |
+
ssl_context: Optional[SSLContext] = None,
|
747 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
748 |
+
compress: int = 0,
|
749 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
750 |
+
) -> "_WSRequestContextManager":
|
751 |
+
"""Initiate websocket connection."""
|
752 |
+
return _WSRequestContextManager(
|
753 |
+
self._ws_connect(
|
754 |
+
url,
|
755 |
+
method=method,
|
756 |
+
protocols=protocols,
|
757 |
+
timeout=timeout,
|
758 |
+
receive_timeout=receive_timeout,
|
759 |
+
autoclose=autoclose,
|
760 |
+
autoping=autoping,
|
761 |
+
heartbeat=heartbeat,
|
762 |
+
auth=auth,
|
763 |
+
origin=origin,
|
764 |
+
params=params,
|
765 |
+
headers=headers,
|
766 |
+
proxy=proxy,
|
767 |
+
proxy_auth=proxy_auth,
|
768 |
+
ssl=ssl,
|
769 |
+
verify_ssl=verify_ssl,
|
770 |
+
fingerprint=fingerprint,
|
771 |
+
ssl_context=ssl_context,
|
772 |
+
proxy_headers=proxy_headers,
|
773 |
+
compress=compress,
|
774 |
+
max_msg_size=max_msg_size,
|
775 |
+
)
|
776 |
+
)
|
777 |
+
|
778 |
+
async def _ws_connect(
|
779 |
+
self,
|
780 |
+
url: StrOrURL,
|
781 |
+
*,
|
782 |
+
method: str = hdrs.METH_GET,
|
783 |
+
protocols: Iterable[str] = (),
|
784 |
+
timeout: float = 10.0,
|
785 |
+
receive_timeout: Optional[float] = None,
|
786 |
+
autoclose: bool = True,
|
787 |
+
autoping: bool = True,
|
788 |
+
heartbeat: Optional[float] = None,
|
789 |
+
auth: Optional[BasicAuth] = None,
|
790 |
+
origin: Optional[str] = None,
|
791 |
+
params: Optional[Mapping[str, str]] = None,
|
792 |
+
headers: Optional[LooseHeaders] = None,
|
793 |
+
proxy: Optional[StrOrURL] = None,
|
794 |
+
proxy_auth: Optional[BasicAuth] = None,
|
795 |
+
ssl: Optional[Union[SSLContext, bool, Fingerprint]] = True,
|
796 |
+
verify_ssl: Optional[bool] = None,
|
797 |
+
fingerprint: Optional[bytes] = None,
|
798 |
+
ssl_context: Optional[SSLContext] = None,
|
799 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
800 |
+
compress: int = 0,
|
801 |
+
max_msg_size: int = 4 * 1024 * 1024,
|
802 |
+
) -> ClientWebSocketResponse:
|
803 |
+
|
804 |
+
if headers is None:
|
805 |
+
real_headers: CIMultiDict[str] = CIMultiDict()
|
806 |
+
else:
|
807 |
+
real_headers = CIMultiDict(headers)
|
808 |
+
|
809 |
+
default_headers = {
|
810 |
+
hdrs.UPGRADE: "websocket",
|
811 |
+
hdrs.CONNECTION: "Upgrade",
|
812 |
+
hdrs.SEC_WEBSOCKET_VERSION: "13",
|
813 |
+
}
|
814 |
+
|
815 |
+
for key, value in default_headers.items():
|
816 |
+
real_headers.setdefault(key, value)
|
817 |
+
|
818 |
+
sec_key = base64.b64encode(os.urandom(16))
|
819 |
+
real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode()
|
820 |
+
|
821 |
+
if protocols:
|
822 |
+
real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ",".join(protocols)
|
823 |
+
if origin is not None:
|
824 |
+
real_headers[hdrs.ORIGIN] = origin
|
825 |
+
if compress:
|
826 |
+
extstr = ws_ext_gen(compress=compress)
|
827 |
+
real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr
|
828 |
+
|
829 |
+
# For the sake of backward compatibility, if user passes in None, convert it to True
|
830 |
+
if ssl is None:
|
831 |
+
ssl = True
|
832 |
+
ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
833 |
+
|
834 |
+
# send request
|
835 |
+
resp = await self.request(
|
836 |
+
method,
|
837 |
+
url,
|
838 |
+
params=params,
|
839 |
+
headers=real_headers,
|
840 |
+
read_until_eof=False,
|
841 |
+
auth=auth,
|
842 |
+
proxy=proxy,
|
843 |
+
proxy_auth=proxy_auth,
|
844 |
+
ssl=ssl,
|
845 |
+
proxy_headers=proxy_headers,
|
846 |
+
)
|
847 |
+
|
848 |
+
try:
|
849 |
+
# check handshake
|
850 |
+
if resp.status != 101:
|
851 |
+
raise WSServerHandshakeError(
|
852 |
+
resp.request_info,
|
853 |
+
resp.history,
|
854 |
+
message="Invalid response status",
|
855 |
+
status=resp.status,
|
856 |
+
headers=resp.headers,
|
857 |
+
)
|
858 |
+
|
859 |
+
if resp.headers.get(hdrs.UPGRADE, "").lower() != "websocket":
|
860 |
+
raise WSServerHandshakeError(
|
861 |
+
resp.request_info,
|
862 |
+
resp.history,
|
863 |
+
message="Invalid upgrade header",
|
864 |
+
status=resp.status,
|
865 |
+
headers=resp.headers,
|
866 |
+
)
|
867 |
+
|
868 |
+
if resp.headers.get(hdrs.CONNECTION, "").lower() != "upgrade":
|
869 |
+
raise WSServerHandshakeError(
|
870 |
+
resp.request_info,
|
871 |
+
resp.history,
|
872 |
+
message="Invalid connection header",
|
873 |
+
status=resp.status,
|
874 |
+
headers=resp.headers,
|
875 |
+
)
|
876 |
+
|
877 |
+
# key calculation
|
878 |
+
r_key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, "")
|
879 |
+
match = base64.b64encode(hashlib.sha1(sec_key + WS_KEY).digest()).decode()
|
880 |
+
if r_key != match:
|
881 |
+
raise WSServerHandshakeError(
|
882 |
+
resp.request_info,
|
883 |
+
resp.history,
|
884 |
+
message="Invalid challenge response",
|
885 |
+
status=resp.status,
|
886 |
+
headers=resp.headers,
|
887 |
+
)
|
888 |
+
|
889 |
+
# websocket protocol
|
890 |
+
protocol = None
|
891 |
+
if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers:
|
892 |
+
resp_protocols = [
|
893 |
+
proto.strip()
|
894 |
+
for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(",")
|
895 |
+
]
|
896 |
+
|
897 |
+
for proto in resp_protocols:
|
898 |
+
if proto in protocols:
|
899 |
+
protocol = proto
|
900 |
+
break
|
901 |
+
|
902 |
+
# websocket compress
|
903 |
+
notakeover = False
|
904 |
+
if compress:
|
905 |
+
compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS)
|
906 |
+
if compress_hdrs:
|
907 |
+
try:
|
908 |
+
compress, notakeover = ws_ext_parse(compress_hdrs)
|
909 |
+
except WSHandshakeError as exc:
|
910 |
+
raise WSServerHandshakeError(
|
911 |
+
resp.request_info,
|
912 |
+
resp.history,
|
913 |
+
message=exc.args[0],
|
914 |
+
status=resp.status,
|
915 |
+
headers=resp.headers,
|
916 |
+
) from exc
|
917 |
+
else:
|
918 |
+
compress = 0
|
919 |
+
notakeover = False
|
920 |
+
|
921 |
+
conn = resp.connection
|
922 |
+
assert conn is not None
|
923 |
+
conn_proto = conn.protocol
|
924 |
+
assert conn_proto is not None
|
925 |
+
transport = conn.transport
|
926 |
+
assert transport is not None
|
927 |
+
reader: FlowControlDataQueue[WSMessage] = FlowControlDataQueue(
|
928 |
+
conn_proto, 2**16, loop=self._loop
|
929 |
+
)
|
930 |
+
conn_proto.set_parser(WebSocketReader(reader, max_msg_size), reader)
|
931 |
+
writer = WebSocketWriter(
|
932 |
+
conn_proto,
|
933 |
+
transport,
|
934 |
+
use_mask=True,
|
935 |
+
compress=compress,
|
936 |
+
notakeover=notakeover,
|
937 |
+
)
|
938 |
+
except BaseException:
|
939 |
+
resp.close()
|
940 |
+
raise
|
941 |
+
else:
|
942 |
+
return self._ws_response_class(
|
943 |
+
reader,
|
944 |
+
writer,
|
945 |
+
protocol,
|
946 |
+
resp,
|
947 |
+
timeout,
|
948 |
+
autoclose,
|
949 |
+
autoping,
|
950 |
+
self._loop,
|
951 |
+
receive_timeout=receive_timeout,
|
952 |
+
heartbeat=heartbeat,
|
953 |
+
compress=compress,
|
954 |
+
client_notakeover=notakeover,
|
955 |
+
)
|
956 |
+
|
957 |
+
def _prepare_headers(self, headers: Optional[LooseHeaders]) -> "CIMultiDict[str]":
|
958 |
+
"""Add default headers and transform it to CIMultiDict"""
|
959 |
+
# Convert headers to MultiDict
|
960 |
+
result = CIMultiDict(self._default_headers)
|
961 |
+
if headers:
|
962 |
+
if not isinstance(headers, (MultiDictProxy, MultiDict)):
|
963 |
+
headers = CIMultiDict(headers)
|
964 |
+
added_names: Set[str] = set()
|
965 |
+
for key, value in headers.items():
|
966 |
+
if key in added_names:
|
967 |
+
result.add(key, value)
|
968 |
+
else:
|
969 |
+
result[key] = value
|
970 |
+
added_names.add(key)
|
971 |
+
return result
|
972 |
+
|
973 |
+
def get(
|
974 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
975 |
+
) -> "_RequestContextManager":
|
976 |
+
"""Perform HTTP GET request."""
|
977 |
+
return _RequestContextManager(
|
978 |
+
self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)
|
979 |
+
)
|
980 |
+
|
981 |
+
def options(
|
982 |
+
self, url: StrOrURL, *, allow_redirects: bool = True, **kwargs: Any
|
983 |
+
) -> "_RequestContextManager":
|
984 |
+
"""Perform HTTP OPTIONS request."""
|
985 |
+
return _RequestContextManager(
|
986 |
+
self._request(
|
987 |
+
hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs
|
988 |
+
)
|
989 |
+
)
|
990 |
+
|
991 |
+
def head(
|
992 |
+
self, url: StrOrURL, *, allow_redirects: bool = False, **kwargs: Any
|
993 |
+
) -> "_RequestContextManager":
|
994 |
+
"""Perform HTTP HEAD request."""
|
995 |
+
return _RequestContextManager(
|
996 |
+
self._request(
|
997 |
+
hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs
|
998 |
+
)
|
999 |
+
)
|
1000 |
+
|
1001 |
+
def post(
|
1002 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1003 |
+
) -> "_RequestContextManager":
|
1004 |
+
"""Perform HTTP POST request."""
|
1005 |
+
return _RequestContextManager(
|
1006 |
+
self._request(hdrs.METH_POST, url, data=data, **kwargs)
|
1007 |
+
)
|
1008 |
+
|
1009 |
+
def put(
|
1010 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1011 |
+
) -> "_RequestContextManager":
|
1012 |
+
"""Perform HTTP PUT request."""
|
1013 |
+
return _RequestContextManager(
|
1014 |
+
self._request(hdrs.METH_PUT, url, data=data, **kwargs)
|
1015 |
+
)
|
1016 |
+
|
1017 |
+
def patch(
|
1018 |
+
self, url: StrOrURL, *, data: Any = None, **kwargs: Any
|
1019 |
+
) -> "_RequestContextManager":
|
1020 |
+
"""Perform HTTP PATCH request."""
|
1021 |
+
return _RequestContextManager(
|
1022 |
+
self._request(hdrs.METH_PATCH, url, data=data, **kwargs)
|
1023 |
+
)
|
1024 |
+
|
1025 |
+
def delete(self, url: StrOrURL, **kwargs: Any) -> "_RequestContextManager":
|
1026 |
+
"""Perform HTTP DELETE request."""
|
1027 |
+
return _RequestContextManager(self._request(hdrs.METH_DELETE, url, **kwargs))
|
1028 |
+
|
1029 |
+
async def close(self) -> None:
|
1030 |
+
"""Close underlying connector.
|
1031 |
+
|
1032 |
+
Release all acquired resources.
|
1033 |
+
"""
|
1034 |
+
if not self.closed:
|
1035 |
+
if self._connector is not None and self._connector_owner:
|
1036 |
+
await self._connector.close()
|
1037 |
+
self._connector = None
|
1038 |
+
|
1039 |
+
@property
|
1040 |
+
def closed(self) -> bool:
|
1041 |
+
"""Is client session closed.
|
1042 |
+
|
1043 |
+
A readonly property.
|
1044 |
+
"""
|
1045 |
+
return self._connector is None or self._connector.closed
|
1046 |
+
|
1047 |
+
@property
|
1048 |
+
def connector(self) -> Optional[BaseConnector]:
|
1049 |
+
"""Connector instance used for the session."""
|
1050 |
+
return self._connector
|
1051 |
+
|
1052 |
+
@property
|
1053 |
+
def cookie_jar(self) -> AbstractCookieJar:
|
1054 |
+
"""The session cookies."""
|
1055 |
+
return self._cookie_jar
|
1056 |
+
|
1057 |
+
@property
|
1058 |
+
def version(self) -> Tuple[int, int]:
|
1059 |
+
"""The session HTTP protocol version."""
|
1060 |
+
return self._version
|
1061 |
+
|
1062 |
+
@property
|
1063 |
+
def requote_redirect_url(self) -> bool:
|
1064 |
+
"""Do URL requoting on redirection handling."""
|
1065 |
+
return self._requote_redirect_url
|
1066 |
+
|
1067 |
+
@requote_redirect_url.setter
|
1068 |
+
def requote_redirect_url(self, val: bool) -> None:
|
1069 |
+
"""Do URL requoting on redirection handling."""
|
1070 |
+
warnings.warn(
|
1071 |
+
"session.requote_redirect_url modification " "is deprecated #2778",
|
1072 |
+
DeprecationWarning,
|
1073 |
+
stacklevel=2,
|
1074 |
+
)
|
1075 |
+
self._requote_redirect_url = val
|
1076 |
+
|
1077 |
+
@property
|
1078 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
1079 |
+
"""Session's loop."""
|
1080 |
+
warnings.warn(
|
1081 |
+
"client.loop property is deprecated", DeprecationWarning, stacklevel=2
|
1082 |
+
)
|
1083 |
+
return self._loop
|
1084 |
+
|
1085 |
+
@property
|
1086 |
+
def timeout(self) -> ClientTimeout:
|
1087 |
+
"""Timeout for the session."""
|
1088 |
+
return self._timeout
|
1089 |
+
|
1090 |
+
@property
|
1091 |
+
def headers(self) -> "CIMultiDict[str]":
|
1092 |
+
"""The default headers of the client session."""
|
1093 |
+
return self._default_headers
|
1094 |
+
|
1095 |
+
@property
|
1096 |
+
def skip_auto_headers(self) -> FrozenSet[istr]:
|
1097 |
+
"""Headers for which autogeneration should be skipped"""
|
1098 |
+
return self._skip_auto_headers
|
1099 |
+
|
1100 |
+
@property
|
1101 |
+
def auth(self) -> Optional[BasicAuth]:
|
1102 |
+
"""An object that represents HTTP Basic Authorization"""
|
1103 |
+
return self._default_auth
|
1104 |
+
|
1105 |
+
@property
|
1106 |
+
def json_serialize(self) -> JSONEncoder:
|
1107 |
+
"""Json serializer callable"""
|
1108 |
+
return self._json_serialize
|
1109 |
+
|
1110 |
+
@property
|
1111 |
+
def connector_owner(self) -> bool:
|
1112 |
+
"""Should connector be closed on session closing"""
|
1113 |
+
return self._connector_owner
|
1114 |
+
|
1115 |
+
@property
|
1116 |
+
def raise_for_status(
|
1117 |
+
self,
|
1118 |
+
) -> Union[bool, Callable[[ClientResponse], Awaitable[None]]]:
|
1119 |
+
"""Should `ClientResponse.raise_for_status()` be called for each response."""
|
1120 |
+
return self._raise_for_status
|
1121 |
+
|
1122 |
+
@property
|
1123 |
+
def auto_decompress(self) -> bool:
|
1124 |
+
"""Should the body response be automatically decompressed."""
|
1125 |
+
return self._auto_decompress
|
1126 |
+
|
1127 |
+
@property
|
1128 |
+
def trust_env(self) -> bool:
|
1129 |
+
"""
|
1130 |
+
Should proxies information from environment or netrc be trusted.
|
1131 |
+
|
1132 |
+
Information is from HTTP_PROXY / HTTPS_PROXY environment variables
|
1133 |
+
or ~/.netrc file if present.
|
1134 |
+
"""
|
1135 |
+
return self._trust_env
|
1136 |
+
|
1137 |
+
@property
|
1138 |
+
def trace_configs(self) -> List[TraceConfig]:
|
1139 |
+
"""A list of TraceConfig instances used for client tracing"""
|
1140 |
+
return self._trace_configs
|
1141 |
+
|
1142 |
+
def detach(self) -> None:
|
1143 |
+
"""Detach connector from session without closing the former.
|
1144 |
+
|
1145 |
+
Session is switched to closed state anyway.
|
1146 |
+
"""
|
1147 |
+
self._connector = None
|
1148 |
+
|
1149 |
+
def __enter__(self) -> None:
|
1150 |
+
raise TypeError("Use async with instead")
|
1151 |
+
|
1152 |
+
def __exit__(
|
1153 |
+
self,
|
1154 |
+
exc_type: Optional[Type[BaseException]],
|
1155 |
+
exc_val: Optional[BaseException],
|
1156 |
+
exc_tb: Optional[TracebackType],
|
1157 |
+
) -> None:
|
1158 |
+
# __exit__ should exist in pair with __enter__ but never executed
|
1159 |
+
pass # pragma: no cover
|
1160 |
+
|
1161 |
+
async def __aenter__(self) -> "ClientSession":
|
1162 |
+
return self
|
1163 |
+
|
1164 |
+
async def __aexit__(
|
1165 |
+
self,
|
1166 |
+
exc_type: Optional[Type[BaseException]],
|
1167 |
+
exc_val: Optional[BaseException],
|
1168 |
+
exc_tb: Optional[TracebackType],
|
1169 |
+
) -> None:
|
1170 |
+
await self.close()
|
1171 |
+
|
1172 |
+
|
1173 |
+
class _BaseRequestContextManager(Coroutine[Any, Any, _RetType], Generic[_RetType]):
|
1174 |
+
|
1175 |
+
__slots__ = ("_coro", "_resp")
|
1176 |
+
|
1177 |
+
def __init__(self, coro: Coroutine["asyncio.Future[Any]", None, _RetType]) -> None:
|
1178 |
+
self._coro = coro
|
1179 |
+
|
1180 |
+
def send(self, arg: None) -> "asyncio.Future[Any]":
|
1181 |
+
return self._coro.send(arg)
|
1182 |
+
|
1183 |
+
def throw(self, *args: Any, **kwargs: Any) -> "asyncio.Future[Any]":
|
1184 |
+
return self._coro.throw(*args, **kwargs)
|
1185 |
+
|
1186 |
+
def close(self) -> None:
|
1187 |
+
return self._coro.close()
|
1188 |
+
|
1189 |
+
def __await__(self) -> Generator[Any, None, _RetType]:
|
1190 |
+
ret = self._coro.__await__()
|
1191 |
+
return ret
|
1192 |
+
|
1193 |
+
def __iter__(self) -> Generator[Any, None, _RetType]:
|
1194 |
+
return self.__await__()
|
1195 |
+
|
1196 |
+
async def __aenter__(self) -> _RetType:
|
1197 |
+
self._resp = await self._coro
|
1198 |
+
return self._resp
|
1199 |
+
|
1200 |
+
|
1201 |
+
class _RequestContextManager(_BaseRequestContextManager[ClientResponse]):
|
1202 |
+
__slots__ = ()
|
1203 |
+
|
1204 |
+
async def __aexit__(
|
1205 |
+
self,
|
1206 |
+
exc_type: Optional[Type[BaseException]],
|
1207 |
+
exc: Optional[BaseException],
|
1208 |
+
tb: Optional[TracebackType],
|
1209 |
+
) -> None:
|
1210 |
+
# We're basing behavior on the exception as it can be caused by
|
1211 |
+
# user code unrelated to the status of the connection. If you
|
1212 |
+
# would like to close a connection you must do that
|
1213 |
+
# explicitly. Otherwise connection error handling should kick in
|
1214 |
+
# and close/recycle the connection as required.
|
1215 |
+
self._resp.release()
|
1216 |
+
await self._resp.wait_for_close()
|
1217 |
+
|
1218 |
+
|
1219 |
+
class _WSRequestContextManager(_BaseRequestContextManager[ClientWebSocketResponse]):
|
1220 |
+
__slots__ = ()
|
1221 |
+
|
1222 |
+
async def __aexit__(
|
1223 |
+
self,
|
1224 |
+
exc_type: Optional[Type[BaseException]],
|
1225 |
+
exc: Optional[BaseException],
|
1226 |
+
tb: Optional[TracebackType],
|
1227 |
+
) -> None:
|
1228 |
+
await self._resp.close()
|
1229 |
+
|
1230 |
+
|
1231 |
+
class _SessionRequestContextManager:
|
1232 |
+
|
1233 |
+
__slots__ = ("_coro", "_resp", "_session")
|
1234 |
+
|
1235 |
+
def __init__(
|
1236 |
+
self,
|
1237 |
+
coro: Coroutine["asyncio.Future[Any]", None, ClientResponse],
|
1238 |
+
session: ClientSession,
|
1239 |
+
) -> None:
|
1240 |
+
self._coro = coro
|
1241 |
+
self._resp: Optional[ClientResponse] = None
|
1242 |
+
self._session = session
|
1243 |
+
|
1244 |
+
async def __aenter__(self) -> ClientResponse:
|
1245 |
+
try:
|
1246 |
+
self._resp = await self._coro
|
1247 |
+
except BaseException:
|
1248 |
+
await self._session.close()
|
1249 |
+
raise
|
1250 |
+
else:
|
1251 |
+
return self._resp
|
1252 |
+
|
1253 |
+
async def __aexit__(
|
1254 |
+
self,
|
1255 |
+
exc_type: Optional[Type[BaseException]],
|
1256 |
+
exc: Optional[BaseException],
|
1257 |
+
tb: Optional[TracebackType],
|
1258 |
+
) -> None:
|
1259 |
+
assert self._resp is not None
|
1260 |
+
self._resp.close()
|
1261 |
+
await self._session.close()
|
1262 |
+
|
1263 |
+
|
1264 |
+
def request(
|
1265 |
+
method: str,
|
1266 |
+
url: StrOrURL,
|
1267 |
+
*,
|
1268 |
+
params: Optional[Mapping[str, str]] = None,
|
1269 |
+
data: Any = None,
|
1270 |
+
json: Any = None,
|
1271 |
+
headers: Optional[LooseHeaders] = None,
|
1272 |
+
skip_auto_headers: Optional[Iterable[str]] = None,
|
1273 |
+
auth: Optional[BasicAuth] = None,
|
1274 |
+
allow_redirects: bool = True,
|
1275 |
+
max_redirects: int = 10,
|
1276 |
+
compress: Optional[str] = None,
|
1277 |
+
chunked: Optional[bool] = None,
|
1278 |
+
expect100: bool = False,
|
1279 |
+
raise_for_status: Optional[bool] = None,
|
1280 |
+
read_until_eof: bool = True,
|
1281 |
+
proxy: Optional[StrOrURL] = None,
|
1282 |
+
proxy_auth: Optional[BasicAuth] = None,
|
1283 |
+
timeout: Union[ClientTimeout, object] = sentinel,
|
1284 |
+
cookies: Optional[LooseCookies] = None,
|
1285 |
+
version: HttpVersion = http.HttpVersion11,
|
1286 |
+
connector: Optional[BaseConnector] = None,
|
1287 |
+
read_bufsize: Optional[int] = None,
|
1288 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1289 |
+
max_line_size: int = 8190,
|
1290 |
+
max_field_size: int = 8190,
|
1291 |
+
) -> _SessionRequestContextManager:
|
1292 |
+
"""Constructs and sends a request.
|
1293 |
+
|
1294 |
+
Returns response object.
|
1295 |
+
method - HTTP method
|
1296 |
+
url - request url
|
1297 |
+
params - (optional) Dictionary or bytes to be sent in the query
|
1298 |
+
string of the new request
|
1299 |
+
data - (optional) Dictionary, bytes, or file-like object to
|
1300 |
+
send in the body of the request
|
1301 |
+
json - (optional) Any json compatible python object
|
1302 |
+
headers - (optional) Dictionary of HTTP Headers to send with
|
1303 |
+
the request
|
1304 |
+
cookies - (optional) Dict object to send with the request
|
1305 |
+
auth - (optional) BasicAuth named tuple represent HTTP Basic Auth
|
1306 |
+
auth - aiohttp.helpers.BasicAuth
|
1307 |
+
allow_redirects - (optional) If set to False, do not follow
|
1308 |
+
redirects
|
1309 |
+
version - Request HTTP version.
|
1310 |
+
compress - Set to True if request has to be compressed
|
1311 |
+
with deflate encoding.
|
1312 |
+
chunked - Set to chunk size for chunked transfer encoding.
|
1313 |
+
expect100 - Expect 100-continue response from server.
|
1314 |
+
connector - BaseConnector sub-class instance to support
|
1315 |
+
connection pooling.
|
1316 |
+
read_until_eof - Read response until eof if response
|
1317 |
+
does not have Content-Length header.
|
1318 |
+
loop - Optional event loop.
|
1319 |
+
timeout - Optional ClientTimeout settings structure, 5min
|
1320 |
+
total timeout by default.
|
1321 |
+
Usage::
|
1322 |
+
>>> import aiohttp
|
1323 |
+
>>> resp = await aiohttp.request('GET', 'http://python.org/')
|
1324 |
+
>>> resp
|
1325 |
+
<ClientResponse(python.org/) [200]>
|
1326 |
+
>>> data = await resp.read()
|
1327 |
+
"""
|
1328 |
+
connector_owner = False
|
1329 |
+
if connector is None:
|
1330 |
+
connector_owner = True
|
1331 |
+
connector = TCPConnector(loop=loop, force_close=True)
|
1332 |
+
|
1333 |
+
session = ClientSession(
|
1334 |
+
loop=loop,
|
1335 |
+
cookies=cookies,
|
1336 |
+
version=version,
|
1337 |
+
timeout=timeout,
|
1338 |
+
connector=connector,
|
1339 |
+
connector_owner=connector_owner,
|
1340 |
+
)
|
1341 |
+
|
1342 |
+
return _SessionRequestContextManager(
|
1343 |
+
session._request(
|
1344 |
+
method,
|
1345 |
+
url,
|
1346 |
+
params=params,
|
1347 |
+
data=data,
|
1348 |
+
json=json,
|
1349 |
+
headers=headers,
|
1350 |
+
skip_auto_headers=skip_auto_headers,
|
1351 |
+
auth=auth,
|
1352 |
+
allow_redirects=allow_redirects,
|
1353 |
+
max_redirects=max_redirects,
|
1354 |
+
compress=compress,
|
1355 |
+
chunked=chunked,
|
1356 |
+
expect100=expect100,
|
1357 |
+
raise_for_status=raise_for_status,
|
1358 |
+
read_until_eof=read_until_eof,
|
1359 |
+
proxy=proxy,
|
1360 |
+
proxy_auth=proxy_auth,
|
1361 |
+
read_bufsize=read_bufsize,
|
1362 |
+
max_line_size=max_line_size,
|
1363 |
+
max_field_size=max_field_size,
|
1364 |
+
),
|
1365 |
+
session,
|
1366 |
+
)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/client_exceptions.py
ADDED
@@ -0,0 +1,346 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP related errors."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import warnings
|
5 |
+
from typing import TYPE_CHECKING, Any, Optional, Tuple, Union
|
6 |
+
|
7 |
+
from .http_parser import RawResponseMessage
|
8 |
+
from .typedefs import LooseHeaders
|
9 |
+
|
10 |
+
try:
|
11 |
+
import ssl
|
12 |
+
|
13 |
+
SSLContext = ssl.SSLContext
|
14 |
+
except ImportError: # pragma: no cover
|
15 |
+
ssl = SSLContext = None # type: ignore[assignment]
|
16 |
+
|
17 |
+
|
18 |
+
if TYPE_CHECKING:
|
19 |
+
from .client_reqrep import ClientResponse, ConnectionKey, Fingerprint, RequestInfo
|
20 |
+
else:
|
21 |
+
RequestInfo = ClientResponse = ConnectionKey = None
|
22 |
+
|
23 |
+
__all__ = (
|
24 |
+
"ClientError",
|
25 |
+
"ClientConnectionError",
|
26 |
+
"ClientOSError",
|
27 |
+
"ClientConnectorError",
|
28 |
+
"ClientProxyConnectionError",
|
29 |
+
"ClientSSLError",
|
30 |
+
"ClientConnectorSSLError",
|
31 |
+
"ClientConnectorCertificateError",
|
32 |
+
"ServerConnectionError",
|
33 |
+
"ServerTimeoutError",
|
34 |
+
"ServerDisconnectedError",
|
35 |
+
"ServerFingerprintMismatch",
|
36 |
+
"ClientResponseError",
|
37 |
+
"ClientHttpProxyError",
|
38 |
+
"WSServerHandshakeError",
|
39 |
+
"ContentTypeError",
|
40 |
+
"ClientPayloadError",
|
41 |
+
"InvalidURL",
|
42 |
+
)
|
43 |
+
|
44 |
+
|
45 |
+
class ClientError(Exception):
|
46 |
+
"""Base class for client connection errors."""
|
47 |
+
|
48 |
+
|
49 |
+
class ClientResponseError(ClientError):
|
50 |
+
"""Base class for exceptions that occur after getting a response.
|
51 |
+
|
52 |
+
request_info: An instance of RequestInfo.
|
53 |
+
history: A sequence of responses, if redirects occurred.
|
54 |
+
status: HTTP status code.
|
55 |
+
message: Error message.
|
56 |
+
headers: Response headers.
|
57 |
+
"""
|
58 |
+
|
59 |
+
def __init__(
|
60 |
+
self,
|
61 |
+
request_info: RequestInfo,
|
62 |
+
history: Tuple[ClientResponse, ...],
|
63 |
+
*,
|
64 |
+
code: Optional[int] = None,
|
65 |
+
status: Optional[int] = None,
|
66 |
+
message: str = "",
|
67 |
+
headers: Optional[LooseHeaders] = None,
|
68 |
+
) -> None:
|
69 |
+
self.request_info = request_info
|
70 |
+
if code is not None:
|
71 |
+
if status is not None:
|
72 |
+
raise ValueError(
|
73 |
+
"Both code and status arguments are provided; "
|
74 |
+
"code is deprecated, use status instead"
|
75 |
+
)
|
76 |
+
warnings.warn(
|
77 |
+
"code argument is deprecated, use status instead",
|
78 |
+
DeprecationWarning,
|
79 |
+
stacklevel=2,
|
80 |
+
)
|
81 |
+
if status is not None:
|
82 |
+
self.status = status
|
83 |
+
elif code is not None:
|
84 |
+
self.status = code
|
85 |
+
else:
|
86 |
+
self.status = 0
|
87 |
+
self.message = message
|
88 |
+
self.headers = headers
|
89 |
+
self.history = history
|
90 |
+
self.args = (request_info, history)
|
91 |
+
|
92 |
+
def __str__(self) -> str:
|
93 |
+
return "{}, message={!r}, url={!r}".format(
|
94 |
+
self.status,
|
95 |
+
self.message,
|
96 |
+
self.request_info.real_url,
|
97 |
+
)
|
98 |
+
|
99 |
+
def __repr__(self) -> str:
|
100 |
+
args = f"{self.request_info!r}, {self.history!r}"
|
101 |
+
if self.status != 0:
|
102 |
+
args += f", status={self.status!r}"
|
103 |
+
if self.message != "":
|
104 |
+
args += f", message={self.message!r}"
|
105 |
+
if self.headers is not None:
|
106 |
+
args += f", headers={self.headers!r}"
|
107 |
+
return f"{type(self).__name__}({args})"
|
108 |
+
|
109 |
+
@property
|
110 |
+
def code(self) -> int:
|
111 |
+
warnings.warn(
|
112 |
+
"code property is deprecated, use status instead",
|
113 |
+
DeprecationWarning,
|
114 |
+
stacklevel=2,
|
115 |
+
)
|
116 |
+
return self.status
|
117 |
+
|
118 |
+
@code.setter
|
119 |
+
def code(self, value: int) -> None:
|
120 |
+
warnings.warn(
|
121 |
+
"code property is deprecated, use status instead",
|
122 |
+
DeprecationWarning,
|
123 |
+
stacklevel=2,
|
124 |
+
)
|
125 |
+
self.status = value
|
126 |
+
|
127 |
+
|
128 |
+
class ContentTypeError(ClientResponseError):
|
129 |
+
"""ContentType found is not valid."""
|
130 |
+
|
131 |
+
|
132 |
+
class WSServerHandshakeError(ClientResponseError):
|
133 |
+
"""websocket server handshake error."""
|
134 |
+
|
135 |
+
|
136 |
+
class ClientHttpProxyError(ClientResponseError):
|
137 |
+
"""HTTP proxy error.
|
138 |
+
|
139 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
140 |
+
proxy responds with status other than ``200 OK``
|
141 |
+
on ``CONNECT`` request.
|
142 |
+
"""
|
143 |
+
|
144 |
+
|
145 |
+
class TooManyRedirects(ClientResponseError):
|
146 |
+
"""Client was redirected too many times."""
|
147 |
+
|
148 |
+
|
149 |
+
class ClientConnectionError(ClientError):
|
150 |
+
"""Base class for client socket errors."""
|
151 |
+
|
152 |
+
|
153 |
+
class ClientOSError(ClientConnectionError, OSError):
|
154 |
+
"""OSError error."""
|
155 |
+
|
156 |
+
|
157 |
+
class ClientConnectorError(ClientOSError):
|
158 |
+
"""Client connector error.
|
159 |
+
|
160 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
161 |
+
a connection can not be established.
|
162 |
+
"""
|
163 |
+
|
164 |
+
def __init__(self, connection_key: ConnectionKey, os_error: OSError) -> None:
|
165 |
+
self._conn_key = connection_key
|
166 |
+
self._os_error = os_error
|
167 |
+
super().__init__(os_error.errno, os_error.strerror)
|
168 |
+
self.args = (connection_key, os_error)
|
169 |
+
|
170 |
+
@property
|
171 |
+
def os_error(self) -> OSError:
|
172 |
+
return self._os_error
|
173 |
+
|
174 |
+
@property
|
175 |
+
def host(self) -> str:
|
176 |
+
return self._conn_key.host
|
177 |
+
|
178 |
+
@property
|
179 |
+
def port(self) -> Optional[int]:
|
180 |
+
return self._conn_key.port
|
181 |
+
|
182 |
+
@property
|
183 |
+
def ssl(self) -> Union[SSLContext, bool, "Fingerprint"]:
|
184 |
+
return self._conn_key.ssl
|
185 |
+
|
186 |
+
def __str__(self) -> str:
|
187 |
+
return "Cannot connect to host {0.host}:{0.port} ssl:{1} [{2}]".format(
|
188 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
189 |
+
)
|
190 |
+
|
191 |
+
# OSError.__reduce__ does too much black magick
|
192 |
+
__reduce__ = BaseException.__reduce__
|
193 |
+
|
194 |
+
|
195 |
+
class ClientProxyConnectionError(ClientConnectorError):
|
196 |
+
"""Proxy connection error.
|
197 |
+
|
198 |
+
Raised in :class:`aiohttp.connector.TCPConnector` if
|
199 |
+
connection to proxy can not be established.
|
200 |
+
"""
|
201 |
+
|
202 |
+
|
203 |
+
class UnixClientConnectorError(ClientConnectorError):
|
204 |
+
"""Unix connector error.
|
205 |
+
|
206 |
+
Raised in :py:class:`aiohttp.connector.UnixConnector`
|
207 |
+
if connection to unix socket can not be established.
|
208 |
+
"""
|
209 |
+
|
210 |
+
def __init__(
|
211 |
+
self, path: str, connection_key: ConnectionKey, os_error: OSError
|
212 |
+
) -> None:
|
213 |
+
self._path = path
|
214 |
+
super().__init__(connection_key, os_error)
|
215 |
+
|
216 |
+
@property
|
217 |
+
def path(self) -> str:
|
218 |
+
return self._path
|
219 |
+
|
220 |
+
def __str__(self) -> str:
|
221 |
+
return "Cannot connect to unix socket {0.path} ssl:{1} [{2}]".format(
|
222 |
+
self, "default" if self.ssl is True else self.ssl, self.strerror
|
223 |
+
)
|
224 |
+
|
225 |
+
|
226 |
+
class ServerConnectionError(ClientConnectionError):
|
227 |
+
"""Server connection errors."""
|
228 |
+
|
229 |
+
|
230 |
+
class ServerDisconnectedError(ServerConnectionError):
|
231 |
+
"""Server disconnected."""
|
232 |
+
|
233 |
+
def __init__(self, message: Union[RawResponseMessage, str, None] = None) -> None:
|
234 |
+
if message is None:
|
235 |
+
message = "Server disconnected"
|
236 |
+
|
237 |
+
self.args = (message,)
|
238 |
+
self.message = message
|
239 |
+
|
240 |
+
|
241 |
+
class ServerTimeoutError(ServerConnectionError, asyncio.TimeoutError):
|
242 |
+
"""Server timeout error."""
|
243 |
+
|
244 |
+
|
245 |
+
class ServerFingerprintMismatch(ServerConnectionError):
|
246 |
+
"""SSL certificate does not match expected fingerprint."""
|
247 |
+
|
248 |
+
def __init__(self, expected: bytes, got: bytes, host: str, port: int) -> None:
|
249 |
+
self.expected = expected
|
250 |
+
self.got = got
|
251 |
+
self.host = host
|
252 |
+
self.port = port
|
253 |
+
self.args = (expected, got, host, port)
|
254 |
+
|
255 |
+
def __repr__(self) -> str:
|
256 |
+
return "<{} expected={!r} got={!r} host={!r} port={!r}>".format(
|
257 |
+
self.__class__.__name__, self.expected, self.got, self.host, self.port
|
258 |
+
)
|
259 |
+
|
260 |
+
|
261 |
+
class ClientPayloadError(ClientError):
|
262 |
+
"""Response payload error."""
|
263 |
+
|
264 |
+
|
265 |
+
class InvalidURL(ClientError, ValueError):
|
266 |
+
"""Invalid URL.
|
267 |
+
|
268 |
+
URL used for fetching is malformed, e.g. it doesn't contains host
|
269 |
+
part.
|
270 |
+
"""
|
271 |
+
|
272 |
+
# Derive from ValueError for backward compatibility
|
273 |
+
|
274 |
+
def __init__(self, url: Any) -> None:
|
275 |
+
# The type of url is not yarl.URL because the exception can be raised
|
276 |
+
# on URL(url) call
|
277 |
+
super().__init__(url)
|
278 |
+
|
279 |
+
@property
|
280 |
+
def url(self) -> Any:
|
281 |
+
return self.args[0]
|
282 |
+
|
283 |
+
def __repr__(self) -> str:
|
284 |
+
return f"<{self.__class__.__name__} {self.url}>"
|
285 |
+
|
286 |
+
|
287 |
+
class ClientSSLError(ClientConnectorError):
|
288 |
+
"""Base error for ssl.*Errors."""
|
289 |
+
|
290 |
+
|
291 |
+
if ssl is not None:
|
292 |
+
cert_errors = (ssl.CertificateError,)
|
293 |
+
cert_errors_bases = (
|
294 |
+
ClientSSLError,
|
295 |
+
ssl.CertificateError,
|
296 |
+
)
|
297 |
+
|
298 |
+
ssl_errors = (ssl.SSLError,)
|
299 |
+
ssl_error_bases = (ClientSSLError, ssl.SSLError)
|
300 |
+
else: # pragma: no cover
|
301 |
+
cert_errors = tuple()
|
302 |
+
cert_errors_bases = (
|
303 |
+
ClientSSLError,
|
304 |
+
ValueError,
|
305 |
+
)
|
306 |
+
|
307 |
+
ssl_errors = tuple()
|
308 |
+
ssl_error_bases = (ClientSSLError,)
|
309 |
+
|
310 |
+
|
311 |
+
class ClientConnectorSSLError(*ssl_error_bases): # type: ignore[misc]
|
312 |
+
"""Response ssl error."""
|
313 |
+
|
314 |
+
|
315 |
+
class ClientConnectorCertificateError(*cert_errors_bases): # type: ignore[misc]
|
316 |
+
"""Response certificate error."""
|
317 |
+
|
318 |
+
def __init__(
|
319 |
+
self, connection_key: ConnectionKey, certificate_error: Exception
|
320 |
+
) -> None:
|
321 |
+
self._conn_key = connection_key
|
322 |
+
self._certificate_error = certificate_error
|
323 |
+
self.args = (connection_key, certificate_error)
|
324 |
+
|
325 |
+
@property
|
326 |
+
def certificate_error(self) -> Exception:
|
327 |
+
return self._certificate_error
|
328 |
+
|
329 |
+
@property
|
330 |
+
def host(self) -> str:
|
331 |
+
return self._conn_key.host
|
332 |
+
|
333 |
+
@property
|
334 |
+
def port(self) -> Optional[int]:
|
335 |
+
return self._conn_key.port
|
336 |
+
|
337 |
+
@property
|
338 |
+
def ssl(self) -> bool:
|
339 |
+
return self._conn_key.is_ssl
|
340 |
+
|
341 |
+
def __str__(self) -> str:
|
342 |
+
return (
|
343 |
+
"Cannot connect to host {0.host}:{0.port} ssl:{0.ssl} "
|
344 |
+
"[{0.certificate_error.__class__.__name__}: "
|
345 |
+
"{0.certificate_error.args}]".format(self)
|
346 |
+
)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/client_proto.py
ADDED
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
from contextlib import suppress
|
3 |
+
from typing import Any, Optional, Tuple
|
4 |
+
|
5 |
+
from .base_protocol import BaseProtocol
|
6 |
+
from .client_exceptions import (
|
7 |
+
ClientOSError,
|
8 |
+
ClientPayloadError,
|
9 |
+
ServerDisconnectedError,
|
10 |
+
ServerTimeoutError,
|
11 |
+
)
|
12 |
+
from .helpers import (
|
13 |
+
_EXC_SENTINEL,
|
14 |
+
BaseTimerContext,
|
15 |
+
set_exception,
|
16 |
+
status_code_must_be_empty_body,
|
17 |
+
)
|
18 |
+
from .http import HttpResponseParser, RawResponseMessage
|
19 |
+
from .http_exceptions import HttpProcessingError
|
20 |
+
from .streams import EMPTY_PAYLOAD, DataQueue, StreamReader
|
21 |
+
|
22 |
+
|
23 |
+
class ResponseHandler(BaseProtocol, DataQueue[Tuple[RawResponseMessage, StreamReader]]):
|
24 |
+
"""Helper class to adapt between Protocol and StreamReader."""
|
25 |
+
|
26 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
27 |
+
BaseProtocol.__init__(self, loop=loop)
|
28 |
+
DataQueue.__init__(self, loop)
|
29 |
+
|
30 |
+
self._should_close = False
|
31 |
+
|
32 |
+
self._payload: Optional[StreamReader] = None
|
33 |
+
self._skip_payload = False
|
34 |
+
self._payload_parser = None
|
35 |
+
|
36 |
+
self._timer = None
|
37 |
+
|
38 |
+
self._tail = b""
|
39 |
+
self._upgraded = False
|
40 |
+
self._parser: Optional[HttpResponseParser] = None
|
41 |
+
|
42 |
+
self._read_timeout: Optional[float] = None
|
43 |
+
self._read_timeout_handle: Optional[asyncio.TimerHandle] = None
|
44 |
+
|
45 |
+
self._timeout_ceil_threshold: Optional[float] = 5
|
46 |
+
|
47 |
+
@property
|
48 |
+
def upgraded(self) -> bool:
|
49 |
+
return self._upgraded
|
50 |
+
|
51 |
+
@property
|
52 |
+
def should_close(self) -> bool:
|
53 |
+
if self._payload is not None and not self._payload.is_eof() or self._upgraded:
|
54 |
+
return True
|
55 |
+
|
56 |
+
return (
|
57 |
+
self._should_close
|
58 |
+
or self._upgraded
|
59 |
+
or self.exception() is not None
|
60 |
+
or self._payload_parser is not None
|
61 |
+
or len(self) > 0
|
62 |
+
or bool(self._tail)
|
63 |
+
)
|
64 |
+
|
65 |
+
def force_close(self) -> None:
|
66 |
+
self._should_close = True
|
67 |
+
|
68 |
+
def close(self) -> None:
|
69 |
+
transport = self.transport
|
70 |
+
if transport is not None:
|
71 |
+
transport.close()
|
72 |
+
self.transport = None
|
73 |
+
self._payload = None
|
74 |
+
self._drop_timeout()
|
75 |
+
|
76 |
+
def is_connected(self) -> bool:
|
77 |
+
return self.transport is not None and not self.transport.is_closing()
|
78 |
+
|
79 |
+
def connection_lost(self, exc: Optional[BaseException]) -> None:
|
80 |
+
self._drop_timeout()
|
81 |
+
|
82 |
+
original_connection_error = exc
|
83 |
+
reraised_exc = original_connection_error
|
84 |
+
|
85 |
+
connection_closed_cleanly = original_connection_error is None
|
86 |
+
|
87 |
+
if self._payload_parser is not None:
|
88 |
+
with suppress(Exception): # FIXME: log this somehow?
|
89 |
+
self._payload_parser.feed_eof()
|
90 |
+
|
91 |
+
uncompleted = None
|
92 |
+
if self._parser is not None:
|
93 |
+
try:
|
94 |
+
uncompleted = self._parser.feed_eof()
|
95 |
+
except Exception as underlying_exc:
|
96 |
+
if self._payload is not None:
|
97 |
+
client_payload_exc_msg = (
|
98 |
+
f"Response payload is not completed: {underlying_exc !r}"
|
99 |
+
)
|
100 |
+
if not connection_closed_cleanly:
|
101 |
+
client_payload_exc_msg = (
|
102 |
+
f"{client_payload_exc_msg !s}. "
|
103 |
+
f"{original_connection_error !r}"
|
104 |
+
)
|
105 |
+
set_exception(
|
106 |
+
self._payload,
|
107 |
+
ClientPayloadError(client_payload_exc_msg),
|
108 |
+
underlying_exc,
|
109 |
+
)
|
110 |
+
|
111 |
+
if not self.is_eof():
|
112 |
+
if isinstance(original_connection_error, OSError):
|
113 |
+
reraised_exc = ClientOSError(*original_connection_error.args)
|
114 |
+
if connection_closed_cleanly:
|
115 |
+
reraised_exc = ServerDisconnectedError(uncompleted)
|
116 |
+
# assigns self._should_close to True as side effect,
|
117 |
+
# we do it anyway below
|
118 |
+
underlying_non_eof_exc = (
|
119 |
+
_EXC_SENTINEL
|
120 |
+
if connection_closed_cleanly
|
121 |
+
else original_connection_error
|
122 |
+
)
|
123 |
+
assert underlying_non_eof_exc is not None
|
124 |
+
assert reraised_exc is not None
|
125 |
+
self.set_exception(reraised_exc, underlying_non_eof_exc)
|
126 |
+
|
127 |
+
self._should_close = True
|
128 |
+
self._parser = None
|
129 |
+
self._payload = None
|
130 |
+
self._payload_parser = None
|
131 |
+
self._reading_paused = False
|
132 |
+
|
133 |
+
super().connection_lost(reraised_exc)
|
134 |
+
|
135 |
+
def eof_received(self) -> None:
|
136 |
+
# should call parser.feed_eof() most likely
|
137 |
+
self._drop_timeout()
|
138 |
+
|
139 |
+
def pause_reading(self) -> None:
|
140 |
+
super().pause_reading()
|
141 |
+
self._drop_timeout()
|
142 |
+
|
143 |
+
def resume_reading(self) -> None:
|
144 |
+
super().resume_reading()
|
145 |
+
self._reschedule_timeout()
|
146 |
+
|
147 |
+
def set_exception(
|
148 |
+
self,
|
149 |
+
exc: BaseException,
|
150 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
151 |
+
) -> None:
|
152 |
+
self._should_close = True
|
153 |
+
self._drop_timeout()
|
154 |
+
super().set_exception(exc, exc_cause)
|
155 |
+
|
156 |
+
def set_parser(self, parser: Any, payload: Any) -> None:
|
157 |
+
# TODO: actual types are:
|
158 |
+
# parser: WebSocketReader
|
159 |
+
# payload: FlowControlDataQueue
|
160 |
+
# but they are not generi enough
|
161 |
+
# Need an ABC for both types
|
162 |
+
self._payload = payload
|
163 |
+
self._payload_parser = parser
|
164 |
+
|
165 |
+
self._drop_timeout()
|
166 |
+
|
167 |
+
if self._tail:
|
168 |
+
data, self._tail = self._tail, b""
|
169 |
+
self.data_received(data)
|
170 |
+
|
171 |
+
def set_response_params(
|
172 |
+
self,
|
173 |
+
*,
|
174 |
+
timer: Optional[BaseTimerContext] = None,
|
175 |
+
skip_payload: bool = False,
|
176 |
+
read_until_eof: bool = False,
|
177 |
+
auto_decompress: bool = True,
|
178 |
+
read_timeout: Optional[float] = None,
|
179 |
+
read_bufsize: int = 2**16,
|
180 |
+
timeout_ceil_threshold: float = 5,
|
181 |
+
max_line_size: int = 8190,
|
182 |
+
max_field_size: int = 8190,
|
183 |
+
) -> None:
|
184 |
+
self._skip_payload = skip_payload
|
185 |
+
|
186 |
+
self._read_timeout = read_timeout
|
187 |
+
|
188 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
189 |
+
|
190 |
+
self._parser = HttpResponseParser(
|
191 |
+
self,
|
192 |
+
self._loop,
|
193 |
+
read_bufsize,
|
194 |
+
timer=timer,
|
195 |
+
payload_exception=ClientPayloadError,
|
196 |
+
response_with_body=not skip_payload,
|
197 |
+
read_until_eof=read_until_eof,
|
198 |
+
auto_decompress=auto_decompress,
|
199 |
+
max_line_size=max_line_size,
|
200 |
+
max_field_size=max_field_size,
|
201 |
+
)
|
202 |
+
|
203 |
+
if self._tail:
|
204 |
+
data, self._tail = self._tail, b""
|
205 |
+
self.data_received(data)
|
206 |
+
|
207 |
+
def _drop_timeout(self) -> None:
|
208 |
+
if self._read_timeout_handle is not None:
|
209 |
+
self._read_timeout_handle.cancel()
|
210 |
+
self._read_timeout_handle = None
|
211 |
+
|
212 |
+
def _reschedule_timeout(self) -> None:
|
213 |
+
timeout = self._read_timeout
|
214 |
+
if self._read_timeout_handle is not None:
|
215 |
+
self._read_timeout_handle.cancel()
|
216 |
+
|
217 |
+
if timeout:
|
218 |
+
self._read_timeout_handle = self._loop.call_later(
|
219 |
+
timeout, self._on_read_timeout
|
220 |
+
)
|
221 |
+
else:
|
222 |
+
self._read_timeout_handle = None
|
223 |
+
|
224 |
+
def start_timeout(self) -> None:
|
225 |
+
self._reschedule_timeout()
|
226 |
+
|
227 |
+
def _on_read_timeout(self) -> None:
|
228 |
+
exc = ServerTimeoutError("Timeout on reading data from socket")
|
229 |
+
self.set_exception(exc)
|
230 |
+
if self._payload is not None:
|
231 |
+
set_exception(self._payload, exc)
|
232 |
+
|
233 |
+
def data_received(self, data: bytes) -> None:
|
234 |
+
self._reschedule_timeout()
|
235 |
+
|
236 |
+
if not data:
|
237 |
+
return
|
238 |
+
|
239 |
+
# custom payload parser
|
240 |
+
if self._payload_parser is not None:
|
241 |
+
eof, tail = self._payload_parser.feed_data(data)
|
242 |
+
if eof:
|
243 |
+
self._payload = None
|
244 |
+
self._payload_parser = None
|
245 |
+
|
246 |
+
if tail:
|
247 |
+
self.data_received(tail)
|
248 |
+
return
|
249 |
+
else:
|
250 |
+
if self._upgraded or self._parser is None:
|
251 |
+
# i.e. websocket connection, websocket parser is not set yet
|
252 |
+
self._tail += data
|
253 |
+
else:
|
254 |
+
# parse http messages
|
255 |
+
try:
|
256 |
+
messages, upgraded, tail = self._parser.feed_data(data)
|
257 |
+
except BaseException as underlying_exc:
|
258 |
+
if self.transport is not None:
|
259 |
+
# connection.release() could be called BEFORE
|
260 |
+
# data_received(), the transport is already
|
261 |
+
# closed in this case
|
262 |
+
self.transport.close()
|
263 |
+
# should_close is True after the call
|
264 |
+
self.set_exception(HttpProcessingError(), underlying_exc)
|
265 |
+
return
|
266 |
+
|
267 |
+
self._upgraded = upgraded
|
268 |
+
|
269 |
+
payload: Optional[StreamReader] = None
|
270 |
+
for message, payload in messages:
|
271 |
+
if message.should_close:
|
272 |
+
self._should_close = True
|
273 |
+
|
274 |
+
self._payload = payload
|
275 |
+
|
276 |
+
if self._skip_payload or status_code_must_be_empty_body(
|
277 |
+
message.code
|
278 |
+
):
|
279 |
+
self.feed_data((message, EMPTY_PAYLOAD), 0)
|
280 |
+
else:
|
281 |
+
self.feed_data((message, payload), 0)
|
282 |
+
if payload is not None:
|
283 |
+
# new message(s) was processed
|
284 |
+
# register timeout handler unsubscribing
|
285 |
+
# either on end-of-stream or immediately for
|
286 |
+
# EMPTY_PAYLOAD
|
287 |
+
if payload is not EMPTY_PAYLOAD:
|
288 |
+
payload.on_eof(self._drop_timeout)
|
289 |
+
else:
|
290 |
+
self._drop_timeout()
|
291 |
+
|
292 |
+
if tail:
|
293 |
+
if upgraded:
|
294 |
+
self.data_received(tail)
|
295 |
+
else:
|
296 |
+
self._tail = tail
|
llmeval-env/lib/python3.10/site-packages/aiohttp/client_reqrep.py
ADDED
@@ -0,0 +1,1207 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import codecs
|
3 |
+
import contextlib
|
4 |
+
import functools
|
5 |
+
import io
|
6 |
+
import re
|
7 |
+
import sys
|
8 |
+
import traceback
|
9 |
+
import warnings
|
10 |
+
from hashlib import md5, sha1, sha256
|
11 |
+
from http.cookies import CookieError, Morsel, SimpleCookie
|
12 |
+
from types import MappingProxyType, TracebackType
|
13 |
+
from typing import (
|
14 |
+
TYPE_CHECKING,
|
15 |
+
Any,
|
16 |
+
Callable,
|
17 |
+
Dict,
|
18 |
+
Iterable,
|
19 |
+
List,
|
20 |
+
Mapping,
|
21 |
+
Optional,
|
22 |
+
Tuple,
|
23 |
+
Type,
|
24 |
+
Union,
|
25 |
+
cast,
|
26 |
+
)
|
27 |
+
|
28 |
+
import attr
|
29 |
+
from multidict import CIMultiDict, CIMultiDictProxy, MultiDict, MultiDictProxy
|
30 |
+
from yarl import URL
|
31 |
+
|
32 |
+
from . import hdrs, helpers, http, multipart, payload
|
33 |
+
from .abc import AbstractStreamWriter
|
34 |
+
from .client_exceptions import (
|
35 |
+
ClientConnectionError,
|
36 |
+
ClientOSError,
|
37 |
+
ClientResponseError,
|
38 |
+
ContentTypeError,
|
39 |
+
InvalidURL,
|
40 |
+
ServerFingerprintMismatch,
|
41 |
+
)
|
42 |
+
from .compression_utils import HAS_BROTLI
|
43 |
+
from .formdata import FormData
|
44 |
+
from .helpers import (
|
45 |
+
BaseTimerContext,
|
46 |
+
BasicAuth,
|
47 |
+
HeadersMixin,
|
48 |
+
TimerNoop,
|
49 |
+
basicauth_from_netrc,
|
50 |
+
netrc_from_env,
|
51 |
+
noop,
|
52 |
+
reify,
|
53 |
+
set_exception,
|
54 |
+
set_result,
|
55 |
+
)
|
56 |
+
from .http import (
|
57 |
+
SERVER_SOFTWARE,
|
58 |
+
HttpVersion,
|
59 |
+
HttpVersion10,
|
60 |
+
HttpVersion11,
|
61 |
+
StreamWriter,
|
62 |
+
)
|
63 |
+
from .log import client_logger
|
64 |
+
from .streams import StreamReader
|
65 |
+
from .typedefs import (
|
66 |
+
DEFAULT_JSON_DECODER,
|
67 |
+
JSONDecoder,
|
68 |
+
LooseCookies,
|
69 |
+
LooseHeaders,
|
70 |
+
RawHeaders,
|
71 |
+
)
|
72 |
+
|
73 |
+
try:
|
74 |
+
import ssl
|
75 |
+
from ssl import SSLContext
|
76 |
+
except ImportError: # pragma: no cover
|
77 |
+
ssl = None # type: ignore[assignment]
|
78 |
+
SSLContext = object # type: ignore[misc,assignment]
|
79 |
+
|
80 |
+
|
81 |
+
__all__ = ("ClientRequest", "ClientResponse", "RequestInfo", "Fingerprint")
|
82 |
+
|
83 |
+
|
84 |
+
if TYPE_CHECKING:
|
85 |
+
from .client import ClientSession
|
86 |
+
from .connector import Connection
|
87 |
+
from .tracing import Trace
|
88 |
+
|
89 |
+
|
90 |
+
_CONTAINS_CONTROL_CHAR_RE = re.compile(r"[^-!#$%&'*+.^_`|~0-9a-zA-Z]")
|
91 |
+
json_re = re.compile(r"^application/(?:[\w.+-]+?\+)?json")
|
92 |
+
|
93 |
+
|
94 |
+
def _gen_default_accept_encoding() -> str:
|
95 |
+
return "gzip, deflate, br" if HAS_BROTLI else "gzip, deflate"
|
96 |
+
|
97 |
+
|
98 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
99 |
+
class ContentDisposition:
|
100 |
+
type: Optional[str]
|
101 |
+
parameters: "MappingProxyType[str, str]"
|
102 |
+
filename: Optional[str]
|
103 |
+
|
104 |
+
|
105 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
106 |
+
class RequestInfo:
|
107 |
+
url: URL
|
108 |
+
method: str
|
109 |
+
headers: "CIMultiDictProxy[str]"
|
110 |
+
real_url: URL = attr.ib()
|
111 |
+
|
112 |
+
@real_url.default
|
113 |
+
def real_url_default(self) -> URL:
|
114 |
+
return self.url
|
115 |
+
|
116 |
+
|
117 |
+
class Fingerprint:
|
118 |
+
HASHFUNC_BY_DIGESTLEN = {
|
119 |
+
16: md5,
|
120 |
+
20: sha1,
|
121 |
+
32: sha256,
|
122 |
+
}
|
123 |
+
|
124 |
+
def __init__(self, fingerprint: bytes) -> None:
|
125 |
+
digestlen = len(fingerprint)
|
126 |
+
hashfunc = self.HASHFUNC_BY_DIGESTLEN.get(digestlen)
|
127 |
+
if not hashfunc:
|
128 |
+
raise ValueError("fingerprint has invalid length")
|
129 |
+
elif hashfunc is md5 or hashfunc is sha1:
|
130 |
+
raise ValueError(
|
131 |
+
"md5 and sha1 are insecure and " "not supported. Use sha256."
|
132 |
+
)
|
133 |
+
self._hashfunc = hashfunc
|
134 |
+
self._fingerprint = fingerprint
|
135 |
+
|
136 |
+
@property
|
137 |
+
def fingerprint(self) -> bytes:
|
138 |
+
return self._fingerprint
|
139 |
+
|
140 |
+
def check(self, transport: asyncio.Transport) -> None:
|
141 |
+
if not transport.get_extra_info("sslcontext"):
|
142 |
+
return
|
143 |
+
sslobj = transport.get_extra_info("ssl_object")
|
144 |
+
cert = sslobj.getpeercert(binary_form=True)
|
145 |
+
got = self._hashfunc(cert).digest()
|
146 |
+
if got != self._fingerprint:
|
147 |
+
host, port, *_ = transport.get_extra_info("peername")
|
148 |
+
raise ServerFingerprintMismatch(self._fingerprint, got, host, port)
|
149 |
+
|
150 |
+
|
151 |
+
if ssl is not None:
|
152 |
+
SSL_ALLOWED_TYPES = (ssl.SSLContext, bool, Fingerprint, type(None))
|
153 |
+
else: # pragma: no cover
|
154 |
+
SSL_ALLOWED_TYPES = (bool, type(None))
|
155 |
+
|
156 |
+
|
157 |
+
def _merge_ssl_params(
|
158 |
+
ssl: Union["SSLContext", bool, Fingerprint],
|
159 |
+
verify_ssl: Optional[bool],
|
160 |
+
ssl_context: Optional["SSLContext"],
|
161 |
+
fingerprint: Optional[bytes],
|
162 |
+
) -> Union["SSLContext", bool, Fingerprint]:
|
163 |
+
if ssl is None:
|
164 |
+
ssl = True # Double check for backwards compatibility
|
165 |
+
if verify_ssl is not None and not verify_ssl:
|
166 |
+
warnings.warn(
|
167 |
+
"verify_ssl is deprecated, use ssl=False instead",
|
168 |
+
DeprecationWarning,
|
169 |
+
stacklevel=3,
|
170 |
+
)
|
171 |
+
if ssl is not True:
|
172 |
+
raise ValueError(
|
173 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
174 |
+
"parameters are mutually exclusive"
|
175 |
+
)
|
176 |
+
else:
|
177 |
+
ssl = False
|
178 |
+
if ssl_context is not None:
|
179 |
+
warnings.warn(
|
180 |
+
"ssl_context is deprecated, use ssl=context instead",
|
181 |
+
DeprecationWarning,
|
182 |
+
stacklevel=3,
|
183 |
+
)
|
184 |
+
if ssl is not True:
|
185 |
+
raise ValueError(
|
186 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
187 |
+
"parameters are mutually exclusive"
|
188 |
+
)
|
189 |
+
else:
|
190 |
+
ssl = ssl_context
|
191 |
+
if fingerprint is not None:
|
192 |
+
warnings.warn(
|
193 |
+
"fingerprint is deprecated, " "use ssl=Fingerprint(fingerprint) instead",
|
194 |
+
DeprecationWarning,
|
195 |
+
stacklevel=3,
|
196 |
+
)
|
197 |
+
if ssl is not True:
|
198 |
+
raise ValueError(
|
199 |
+
"verify_ssl, ssl_context, fingerprint and ssl "
|
200 |
+
"parameters are mutually exclusive"
|
201 |
+
)
|
202 |
+
else:
|
203 |
+
ssl = Fingerprint(fingerprint)
|
204 |
+
if not isinstance(ssl, SSL_ALLOWED_TYPES):
|
205 |
+
raise TypeError(
|
206 |
+
"ssl should be SSLContext, bool, Fingerprint or None, "
|
207 |
+
"got {!r} instead.".format(ssl)
|
208 |
+
)
|
209 |
+
return ssl
|
210 |
+
|
211 |
+
|
212 |
+
@attr.s(auto_attribs=True, slots=True, frozen=True)
|
213 |
+
class ConnectionKey:
|
214 |
+
# the key should contain an information about used proxy / TLS
|
215 |
+
# to prevent reusing wrong connections from a pool
|
216 |
+
host: str
|
217 |
+
port: Optional[int]
|
218 |
+
is_ssl: bool
|
219 |
+
ssl: Union[SSLContext, bool, Fingerprint]
|
220 |
+
proxy: Optional[URL]
|
221 |
+
proxy_auth: Optional[BasicAuth]
|
222 |
+
proxy_headers_hash: Optional[int] # hash(CIMultiDict)
|
223 |
+
|
224 |
+
|
225 |
+
def _is_expected_content_type(
|
226 |
+
response_content_type: str, expected_content_type: str
|
227 |
+
) -> bool:
|
228 |
+
if expected_content_type == "application/json":
|
229 |
+
return json_re.match(response_content_type) is not None
|
230 |
+
return expected_content_type in response_content_type
|
231 |
+
|
232 |
+
|
233 |
+
class ClientRequest:
|
234 |
+
GET_METHODS = {
|
235 |
+
hdrs.METH_GET,
|
236 |
+
hdrs.METH_HEAD,
|
237 |
+
hdrs.METH_OPTIONS,
|
238 |
+
hdrs.METH_TRACE,
|
239 |
+
}
|
240 |
+
POST_METHODS = {hdrs.METH_PATCH, hdrs.METH_POST, hdrs.METH_PUT}
|
241 |
+
ALL_METHODS = GET_METHODS.union(POST_METHODS).union({hdrs.METH_DELETE})
|
242 |
+
|
243 |
+
DEFAULT_HEADERS = {
|
244 |
+
hdrs.ACCEPT: "*/*",
|
245 |
+
hdrs.ACCEPT_ENCODING: _gen_default_accept_encoding(),
|
246 |
+
}
|
247 |
+
|
248 |
+
body = b""
|
249 |
+
auth = None
|
250 |
+
response = None
|
251 |
+
|
252 |
+
__writer = None # async task for streaming data
|
253 |
+
_continue = None # waiter future for '100 Continue' response
|
254 |
+
|
255 |
+
# N.B.
|
256 |
+
# Adding __del__ method with self._writer closing doesn't make sense
|
257 |
+
# because _writer is instance method, thus it keeps a reference to self.
|
258 |
+
# Until writer has finished finalizer will not be called.
|
259 |
+
|
260 |
+
def __init__(
|
261 |
+
self,
|
262 |
+
method: str,
|
263 |
+
url: URL,
|
264 |
+
*,
|
265 |
+
params: Optional[Mapping[str, str]] = None,
|
266 |
+
headers: Optional[LooseHeaders] = None,
|
267 |
+
skip_auto_headers: Iterable[str] = frozenset(),
|
268 |
+
data: Any = None,
|
269 |
+
cookies: Optional[LooseCookies] = None,
|
270 |
+
auth: Optional[BasicAuth] = None,
|
271 |
+
version: http.HttpVersion = http.HttpVersion11,
|
272 |
+
compress: Optional[str] = None,
|
273 |
+
chunked: Optional[bool] = None,
|
274 |
+
expect100: bool = False,
|
275 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
276 |
+
response_class: Optional[Type["ClientResponse"]] = None,
|
277 |
+
proxy: Optional[URL] = None,
|
278 |
+
proxy_auth: Optional[BasicAuth] = None,
|
279 |
+
timer: Optional[BaseTimerContext] = None,
|
280 |
+
session: Optional["ClientSession"] = None,
|
281 |
+
ssl: Union[SSLContext, bool, Fingerprint] = True,
|
282 |
+
proxy_headers: Optional[LooseHeaders] = None,
|
283 |
+
traces: Optional[List["Trace"]] = None,
|
284 |
+
trust_env: bool = False,
|
285 |
+
server_hostname: Optional[str] = None,
|
286 |
+
):
|
287 |
+
if loop is None:
|
288 |
+
loop = asyncio.get_event_loop()
|
289 |
+
|
290 |
+
match = _CONTAINS_CONTROL_CHAR_RE.search(method)
|
291 |
+
if match:
|
292 |
+
raise ValueError(
|
293 |
+
f"Method cannot contain non-token characters {method!r} "
|
294 |
+
"(found at least {match.group()!r})"
|
295 |
+
)
|
296 |
+
|
297 |
+
assert isinstance(url, URL), url
|
298 |
+
assert isinstance(proxy, (URL, type(None))), proxy
|
299 |
+
# FIXME: session is None in tests only, need to fix tests
|
300 |
+
# assert session is not None
|
301 |
+
self._session = cast("ClientSession", session)
|
302 |
+
if params:
|
303 |
+
q = MultiDict(url.query)
|
304 |
+
url2 = url.with_query(params)
|
305 |
+
q.extend(url2.query)
|
306 |
+
url = url.with_query(q)
|
307 |
+
self.original_url = url
|
308 |
+
self.url = url.with_fragment(None)
|
309 |
+
self.method = method.upper()
|
310 |
+
self.chunked = chunked
|
311 |
+
self.compress = compress
|
312 |
+
self.loop = loop
|
313 |
+
self.length = None
|
314 |
+
if response_class is None:
|
315 |
+
real_response_class = ClientResponse
|
316 |
+
else:
|
317 |
+
real_response_class = response_class
|
318 |
+
self.response_class: Type[ClientResponse] = real_response_class
|
319 |
+
self._timer = timer if timer is not None else TimerNoop()
|
320 |
+
self._ssl = ssl if ssl is not None else True
|
321 |
+
self.server_hostname = server_hostname
|
322 |
+
|
323 |
+
if loop.get_debug():
|
324 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
325 |
+
|
326 |
+
self.update_version(version)
|
327 |
+
self.update_host(url)
|
328 |
+
self.update_headers(headers)
|
329 |
+
self.update_auto_headers(skip_auto_headers)
|
330 |
+
self.update_cookies(cookies)
|
331 |
+
self.update_content_encoding(data)
|
332 |
+
self.update_auth(auth, trust_env)
|
333 |
+
self.update_proxy(proxy, proxy_auth, proxy_headers)
|
334 |
+
|
335 |
+
self.update_body_from_data(data)
|
336 |
+
if data is not None or self.method not in self.GET_METHODS:
|
337 |
+
self.update_transfer_encoding()
|
338 |
+
self.update_expect_continue(expect100)
|
339 |
+
if traces is None:
|
340 |
+
traces = []
|
341 |
+
self._traces = traces
|
342 |
+
|
343 |
+
def __reset_writer(self, _: object = None) -> None:
|
344 |
+
self.__writer = None
|
345 |
+
|
346 |
+
@property
|
347 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
348 |
+
return self.__writer
|
349 |
+
|
350 |
+
@_writer.setter
|
351 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
352 |
+
if self.__writer is not None:
|
353 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
354 |
+
self.__writer = writer
|
355 |
+
if writer is not None:
|
356 |
+
writer.add_done_callback(self.__reset_writer)
|
357 |
+
|
358 |
+
def is_ssl(self) -> bool:
|
359 |
+
return self.url.scheme in ("https", "wss")
|
360 |
+
|
361 |
+
@property
|
362 |
+
def ssl(self) -> Union["SSLContext", bool, Fingerprint]:
|
363 |
+
return self._ssl
|
364 |
+
|
365 |
+
@property
|
366 |
+
def connection_key(self) -> ConnectionKey:
|
367 |
+
proxy_headers = self.proxy_headers
|
368 |
+
if proxy_headers:
|
369 |
+
h: Optional[int] = hash(tuple((k, v) for k, v in proxy_headers.items()))
|
370 |
+
else:
|
371 |
+
h = None
|
372 |
+
return ConnectionKey(
|
373 |
+
self.host,
|
374 |
+
self.port,
|
375 |
+
self.is_ssl(),
|
376 |
+
self.ssl,
|
377 |
+
self.proxy,
|
378 |
+
self.proxy_auth,
|
379 |
+
h,
|
380 |
+
)
|
381 |
+
|
382 |
+
@property
|
383 |
+
def host(self) -> str:
|
384 |
+
ret = self.url.raw_host
|
385 |
+
assert ret is not None
|
386 |
+
return ret
|
387 |
+
|
388 |
+
@property
|
389 |
+
def port(self) -> Optional[int]:
|
390 |
+
return self.url.port
|
391 |
+
|
392 |
+
@property
|
393 |
+
def request_info(self) -> RequestInfo:
|
394 |
+
headers: CIMultiDictProxy[str] = CIMultiDictProxy(self.headers)
|
395 |
+
return RequestInfo(self.url, self.method, headers, self.original_url)
|
396 |
+
|
397 |
+
def update_host(self, url: URL) -> None:
|
398 |
+
"""Update destination host, port and connection type (ssl)."""
|
399 |
+
# get host/port
|
400 |
+
if not url.raw_host:
|
401 |
+
raise InvalidURL(url)
|
402 |
+
|
403 |
+
# basic auth info
|
404 |
+
username, password = url.user, url.password
|
405 |
+
if username:
|
406 |
+
self.auth = helpers.BasicAuth(username, password or "")
|
407 |
+
|
408 |
+
def update_version(self, version: Union[http.HttpVersion, str]) -> None:
|
409 |
+
"""Convert request version to two elements tuple.
|
410 |
+
|
411 |
+
parser HTTP version '1.1' => (1, 1)
|
412 |
+
"""
|
413 |
+
if isinstance(version, str):
|
414 |
+
v = [part.strip() for part in version.split(".", 1)]
|
415 |
+
try:
|
416 |
+
version = http.HttpVersion(int(v[0]), int(v[1]))
|
417 |
+
except ValueError:
|
418 |
+
raise ValueError(
|
419 |
+
f"Can not parse http version number: {version}"
|
420 |
+
) from None
|
421 |
+
self.version = version
|
422 |
+
|
423 |
+
def update_headers(self, headers: Optional[LooseHeaders]) -> None:
|
424 |
+
"""Update request headers."""
|
425 |
+
self.headers: CIMultiDict[str] = CIMultiDict()
|
426 |
+
|
427 |
+
# add host
|
428 |
+
netloc = cast(str, self.url.raw_host)
|
429 |
+
if helpers.is_ipv6_address(netloc):
|
430 |
+
netloc = f"[{netloc}]"
|
431 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636.
|
432 |
+
netloc = netloc.rstrip(".")
|
433 |
+
if self.url.port is not None and not self.url.is_default_port():
|
434 |
+
netloc += ":" + str(self.url.port)
|
435 |
+
self.headers[hdrs.HOST] = netloc
|
436 |
+
|
437 |
+
if headers:
|
438 |
+
if isinstance(headers, (dict, MultiDictProxy, MultiDict)):
|
439 |
+
headers = headers.items() # type: ignore[assignment]
|
440 |
+
|
441 |
+
for key, value in headers: # type: ignore[misc]
|
442 |
+
# A special case for Host header
|
443 |
+
if key.lower() == "host":
|
444 |
+
self.headers[key] = value
|
445 |
+
else:
|
446 |
+
self.headers.add(key, value)
|
447 |
+
|
448 |
+
def update_auto_headers(self, skip_auto_headers: Iterable[str]) -> None:
|
449 |
+
self.skip_auto_headers = CIMultiDict(
|
450 |
+
(hdr, None) for hdr in sorted(skip_auto_headers)
|
451 |
+
)
|
452 |
+
used_headers = self.headers.copy()
|
453 |
+
used_headers.extend(self.skip_auto_headers) # type: ignore[arg-type]
|
454 |
+
|
455 |
+
for hdr, val in self.DEFAULT_HEADERS.items():
|
456 |
+
if hdr not in used_headers:
|
457 |
+
self.headers.add(hdr, val)
|
458 |
+
|
459 |
+
if hdrs.USER_AGENT not in used_headers:
|
460 |
+
self.headers[hdrs.USER_AGENT] = SERVER_SOFTWARE
|
461 |
+
|
462 |
+
def update_cookies(self, cookies: Optional[LooseCookies]) -> None:
|
463 |
+
"""Update request cookies header."""
|
464 |
+
if not cookies:
|
465 |
+
return
|
466 |
+
|
467 |
+
c = SimpleCookie()
|
468 |
+
if hdrs.COOKIE in self.headers:
|
469 |
+
c.load(self.headers.get(hdrs.COOKIE, ""))
|
470 |
+
del self.headers[hdrs.COOKIE]
|
471 |
+
|
472 |
+
if isinstance(cookies, Mapping):
|
473 |
+
iter_cookies = cookies.items()
|
474 |
+
else:
|
475 |
+
iter_cookies = cookies # type: ignore[assignment]
|
476 |
+
for name, value in iter_cookies:
|
477 |
+
if isinstance(value, Morsel):
|
478 |
+
# Preserve coded_value
|
479 |
+
mrsl_val = value.get(value.key, Morsel())
|
480 |
+
mrsl_val.set(value.key, value.value, value.coded_value)
|
481 |
+
c[name] = mrsl_val
|
482 |
+
else:
|
483 |
+
c[name] = value # type: ignore[assignment]
|
484 |
+
|
485 |
+
self.headers[hdrs.COOKIE] = c.output(header="", sep=";").strip()
|
486 |
+
|
487 |
+
def update_content_encoding(self, data: Any) -> None:
|
488 |
+
"""Set request content encoding."""
|
489 |
+
if data is None:
|
490 |
+
return
|
491 |
+
|
492 |
+
enc = self.headers.get(hdrs.CONTENT_ENCODING, "").lower()
|
493 |
+
if enc:
|
494 |
+
if self.compress:
|
495 |
+
raise ValueError(
|
496 |
+
"compress can not be set " "if Content-Encoding header is set"
|
497 |
+
)
|
498 |
+
elif self.compress:
|
499 |
+
if not isinstance(self.compress, str):
|
500 |
+
self.compress = "deflate"
|
501 |
+
self.headers[hdrs.CONTENT_ENCODING] = self.compress
|
502 |
+
self.chunked = True # enable chunked, no need to deal with length
|
503 |
+
|
504 |
+
def update_transfer_encoding(self) -> None:
|
505 |
+
"""Analyze transfer-encoding header."""
|
506 |
+
te = self.headers.get(hdrs.TRANSFER_ENCODING, "").lower()
|
507 |
+
|
508 |
+
if "chunked" in te:
|
509 |
+
if self.chunked:
|
510 |
+
raise ValueError(
|
511 |
+
"chunked can not be set "
|
512 |
+
'if "Transfer-Encoding: chunked" header is set'
|
513 |
+
)
|
514 |
+
|
515 |
+
elif self.chunked:
|
516 |
+
if hdrs.CONTENT_LENGTH in self.headers:
|
517 |
+
raise ValueError(
|
518 |
+
"chunked can not be set " "if Content-Length header is set"
|
519 |
+
)
|
520 |
+
|
521 |
+
self.headers[hdrs.TRANSFER_ENCODING] = "chunked"
|
522 |
+
else:
|
523 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
524 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(len(self.body))
|
525 |
+
|
526 |
+
def update_auth(self, auth: Optional[BasicAuth], trust_env: bool = False) -> None:
|
527 |
+
"""Set basic auth."""
|
528 |
+
if auth is None:
|
529 |
+
auth = self.auth
|
530 |
+
if auth is None and trust_env and self.url.host is not None:
|
531 |
+
netrc_obj = netrc_from_env()
|
532 |
+
with contextlib.suppress(LookupError):
|
533 |
+
auth = basicauth_from_netrc(netrc_obj, self.url.host)
|
534 |
+
if auth is None:
|
535 |
+
return
|
536 |
+
|
537 |
+
if not isinstance(auth, helpers.BasicAuth):
|
538 |
+
raise TypeError("BasicAuth() tuple is required instead")
|
539 |
+
|
540 |
+
self.headers[hdrs.AUTHORIZATION] = auth.encode()
|
541 |
+
|
542 |
+
def update_body_from_data(self, body: Any) -> None:
|
543 |
+
if body is None:
|
544 |
+
return
|
545 |
+
|
546 |
+
# FormData
|
547 |
+
if isinstance(body, FormData):
|
548 |
+
body = body()
|
549 |
+
|
550 |
+
try:
|
551 |
+
body = payload.PAYLOAD_REGISTRY.get(body, disposition=None)
|
552 |
+
except payload.LookupError:
|
553 |
+
body = FormData(body)()
|
554 |
+
|
555 |
+
self.body = body
|
556 |
+
|
557 |
+
# enable chunked encoding if needed
|
558 |
+
if not self.chunked:
|
559 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
560 |
+
size = body.size
|
561 |
+
if size is None:
|
562 |
+
self.chunked = True
|
563 |
+
else:
|
564 |
+
if hdrs.CONTENT_LENGTH not in self.headers:
|
565 |
+
self.headers[hdrs.CONTENT_LENGTH] = str(size)
|
566 |
+
|
567 |
+
# copy payload headers
|
568 |
+
assert body.headers
|
569 |
+
for (key, value) in body.headers.items():
|
570 |
+
if key in self.headers:
|
571 |
+
continue
|
572 |
+
if key in self.skip_auto_headers:
|
573 |
+
continue
|
574 |
+
self.headers[key] = value
|
575 |
+
|
576 |
+
def update_expect_continue(self, expect: bool = False) -> None:
|
577 |
+
if expect:
|
578 |
+
self.headers[hdrs.EXPECT] = "100-continue"
|
579 |
+
elif self.headers.get(hdrs.EXPECT, "").lower() == "100-continue":
|
580 |
+
expect = True
|
581 |
+
|
582 |
+
if expect:
|
583 |
+
self._continue = self.loop.create_future()
|
584 |
+
|
585 |
+
def update_proxy(
|
586 |
+
self,
|
587 |
+
proxy: Optional[URL],
|
588 |
+
proxy_auth: Optional[BasicAuth],
|
589 |
+
proxy_headers: Optional[LooseHeaders],
|
590 |
+
) -> None:
|
591 |
+
if proxy_auth and not isinstance(proxy_auth, helpers.BasicAuth):
|
592 |
+
raise ValueError("proxy_auth must be None or BasicAuth() tuple")
|
593 |
+
self.proxy = proxy
|
594 |
+
self.proxy_auth = proxy_auth
|
595 |
+
self.proxy_headers = proxy_headers
|
596 |
+
|
597 |
+
def keep_alive(self) -> bool:
|
598 |
+
if self.version < HttpVersion10:
|
599 |
+
# keep alive not supported at all
|
600 |
+
return False
|
601 |
+
if self.version == HttpVersion10:
|
602 |
+
if self.headers.get(hdrs.CONNECTION) == "keep-alive":
|
603 |
+
return True
|
604 |
+
else: # no headers means we close for Http 1.0
|
605 |
+
return False
|
606 |
+
elif self.headers.get(hdrs.CONNECTION) == "close":
|
607 |
+
return False
|
608 |
+
|
609 |
+
return True
|
610 |
+
|
611 |
+
async def write_bytes(
|
612 |
+
self, writer: AbstractStreamWriter, conn: "Connection"
|
613 |
+
) -> None:
|
614 |
+
"""Support coroutines that yields bytes objects."""
|
615 |
+
# 100 response
|
616 |
+
if self._continue is not None:
|
617 |
+
try:
|
618 |
+
await writer.drain()
|
619 |
+
await self._continue
|
620 |
+
except asyncio.CancelledError:
|
621 |
+
return
|
622 |
+
|
623 |
+
protocol = conn.protocol
|
624 |
+
assert protocol is not None
|
625 |
+
try:
|
626 |
+
if isinstance(self.body, payload.Payload):
|
627 |
+
await self.body.write(writer)
|
628 |
+
else:
|
629 |
+
if isinstance(self.body, (bytes, bytearray)):
|
630 |
+
self.body = (self.body,) # type: ignore[assignment]
|
631 |
+
|
632 |
+
for chunk in self.body:
|
633 |
+
await writer.write(chunk) # type: ignore[arg-type]
|
634 |
+
except OSError as underlying_exc:
|
635 |
+
reraised_exc = underlying_exc
|
636 |
+
|
637 |
+
exc_is_not_timeout = underlying_exc.errno is not None or not isinstance(
|
638 |
+
underlying_exc, asyncio.TimeoutError
|
639 |
+
)
|
640 |
+
if exc_is_not_timeout:
|
641 |
+
reraised_exc = ClientOSError(
|
642 |
+
underlying_exc.errno,
|
643 |
+
f"Can not write request body for {self.url !s}",
|
644 |
+
)
|
645 |
+
|
646 |
+
set_exception(protocol, reraised_exc, underlying_exc)
|
647 |
+
except asyncio.CancelledError:
|
648 |
+
await writer.write_eof()
|
649 |
+
except Exception as underlying_exc:
|
650 |
+
set_exception(
|
651 |
+
protocol,
|
652 |
+
ClientConnectionError(
|
653 |
+
f"Failed to send bytes into the underlying connection {conn !s}",
|
654 |
+
),
|
655 |
+
underlying_exc,
|
656 |
+
)
|
657 |
+
else:
|
658 |
+
await writer.write_eof()
|
659 |
+
protocol.start_timeout()
|
660 |
+
|
661 |
+
async def send(self, conn: "Connection") -> "ClientResponse":
|
662 |
+
# Specify request target:
|
663 |
+
# - CONNECT request must send authority form URI
|
664 |
+
# - not CONNECT proxy must send absolute form URI
|
665 |
+
# - most common is origin form URI
|
666 |
+
if self.method == hdrs.METH_CONNECT:
|
667 |
+
connect_host = self.url.raw_host
|
668 |
+
assert connect_host is not None
|
669 |
+
if helpers.is_ipv6_address(connect_host):
|
670 |
+
connect_host = f"[{connect_host}]"
|
671 |
+
path = f"{connect_host}:{self.url.port}"
|
672 |
+
elif self.proxy and not self.is_ssl():
|
673 |
+
path = str(self.url)
|
674 |
+
else:
|
675 |
+
path = self.url.raw_path
|
676 |
+
if self.url.raw_query_string:
|
677 |
+
path += "?" + self.url.raw_query_string
|
678 |
+
|
679 |
+
protocol = conn.protocol
|
680 |
+
assert protocol is not None
|
681 |
+
writer = StreamWriter(
|
682 |
+
protocol,
|
683 |
+
self.loop,
|
684 |
+
on_chunk_sent=functools.partial(
|
685 |
+
self._on_chunk_request_sent, self.method, self.url
|
686 |
+
),
|
687 |
+
on_headers_sent=functools.partial(
|
688 |
+
self._on_headers_request_sent, self.method, self.url
|
689 |
+
),
|
690 |
+
)
|
691 |
+
|
692 |
+
if self.compress:
|
693 |
+
writer.enable_compression(self.compress)
|
694 |
+
|
695 |
+
if self.chunked is not None:
|
696 |
+
writer.enable_chunking()
|
697 |
+
|
698 |
+
# set default content-type
|
699 |
+
if (
|
700 |
+
self.method in self.POST_METHODS
|
701 |
+
and hdrs.CONTENT_TYPE not in self.skip_auto_headers
|
702 |
+
and hdrs.CONTENT_TYPE not in self.headers
|
703 |
+
):
|
704 |
+
self.headers[hdrs.CONTENT_TYPE] = "application/octet-stream"
|
705 |
+
|
706 |
+
# set the connection header
|
707 |
+
connection = self.headers.get(hdrs.CONNECTION)
|
708 |
+
if not connection:
|
709 |
+
if self.keep_alive():
|
710 |
+
if self.version == HttpVersion10:
|
711 |
+
connection = "keep-alive"
|
712 |
+
else:
|
713 |
+
if self.version == HttpVersion11:
|
714 |
+
connection = "close"
|
715 |
+
|
716 |
+
if connection is not None:
|
717 |
+
self.headers[hdrs.CONNECTION] = connection
|
718 |
+
|
719 |
+
# status + headers
|
720 |
+
status_line = "{0} {1} HTTP/{v.major}.{v.minor}".format(
|
721 |
+
self.method, path, v=self.version
|
722 |
+
)
|
723 |
+
await writer.write_headers(status_line, self.headers)
|
724 |
+
|
725 |
+
self._writer = self.loop.create_task(self.write_bytes(writer, conn))
|
726 |
+
|
727 |
+
response_class = self.response_class
|
728 |
+
assert response_class is not None
|
729 |
+
self.response = response_class(
|
730 |
+
self.method,
|
731 |
+
self.original_url,
|
732 |
+
writer=self._writer,
|
733 |
+
continue100=self._continue,
|
734 |
+
timer=self._timer,
|
735 |
+
request_info=self.request_info,
|
736 |
+
traces=self._traces,
|
737 |
+
loop=self.loop,
|
738 |
+
session=self._session,
|
739 |
+
)
|
740 |
+
return self.response
|
741 |
+
|
742 |
+
async def close(self) -> None:
|
743 |
+
if self._writer is not None:
|
744 |
+
with contextlib.suppress(asyncio.CancelledError):
|
745 |
+
await self._writer
|
746 |
+
|
747 |
+
def terminate(self) -> None:
|
748 |
+
if self._writer is not None:
|
749 |
+
if not self.loop.is_closed():
|
750 |
+
self._writer.cancel()
|
751 |
+
self._writer.remove_done_callback(self.__reset_writer)
|
752 |
+
self._writer = None
|
753 |
+
|
754 |
+
async def _on_chunk_request_sent(self, method: str, url: URL, chunk: bytes) -> None:
|
755 |
+
for trace in self._traces:
|
756 |
+
await trace.send_request_chunk_sent(method, url, chunk)
|
757 |
+
|
758 |
+
async def _on_headers_request_sent(
|
759 |
+
self, method: str, url: URL, headers: "CIMultiDict[str]"
|
760 |
+
) -> None:
|
761 |
+
for trace in self._traces:
|
762 |
+
await trace.send_request_headers(method, url, headers)
|
763 |
+
|
764 |
+
|
765 |
+
class ClientResponse(HeadersMixin):
|
766 |
+
|
767 |
+
# Some of these attributes are None when created,
|
768 |
+
# but will be set by the start() method.
|
769 |
+
# As the end user will likely never see the None values, we cheat the types below.
|
770 |
+
# from the Status-Line of the response
|
771 |
+
version: Optional[HttpVersion] = None # HTTP-Version
|
772 |
+
status: int = None # type: ignore[assignment] # Status-Code
|
773 |
+
reason: Optional[str] = None # Reason-Phrase
|
774 |
+
|
775 |
+
content: StreamReader = None # type: ignore[assignment] # Payload stream
|
776 |
+
_headers: CIMultiDictProxy[str] = None # type: ignore[assignment]
|
777 |
+
_raw_headers: RawHeaders = None # type: ignore[assignment]
|
778 |
+
|
779 |
+
_connection = None # current connection
|
780 |
+
_source_traceback: Optional[traceback.StackSummary] = None
|
781 |
+
# set up by ClientRequest after ClientResponse object creation
|
782 |
+
# post-init stage allows to not change ctor signature
|
783 |
+
_closed = True # to allow __del__ for non-initialized properly response
|
784 |
+
_released = False
|
785 |
+
__writer = None
|
786 |
+
|
787 |
+
def __init__(
|
788 |
+
self,
|
789 |
+
method: str,
|
790 |
+
url: URL,
|
791 |
+
*,
|
792 |
+
writer: "asyncio.Task[None]",
|
793 |
+
continue100: Optional["asyncio.Future[bool]"],
|
794 |
+
timer: BaseTimerContext,
|
795 |
+
request_info: RequestInfo,
|
796 |
+
traces: List["Trace"],
|
797 |
+
loop: asyncio.AbstractEventLoop,
|
798 |
+
session: "ClientSession",
|
799 |
+
) -> None:
|
800 |
+
assert isinstance(url, URL)
|
801 |
+
|
802 |
+
self.method = method
|
803 |
+
self.cookies = SimpleCookie()
|
804 |
+
|
805 |
+
self._real_url = url
|
806 |
+
self._url = url.with_fragment(None)
|
807 |
+
self._body: Any = None
|
808 |
+
self._writer: Optional[asyncio.Task[None]] = writer
|
809 |
+
self._continue = continue100 # None by default
|
810 |
+
self._closed = True
|
811 |
+
self._history: Tuple[ClientResponse, ...] = ()
|
812 |
+
self._request_info = request_info
|
813 |
+
self._timer = timer if timer is not None else TimerNoop()
|
814 |
+
self._cache: Dict[str, Any] = {}
|
815 |
+
self._traces = traces
|
816 |
+
self._loop = loop
|
817 |
+
# store a reference to session #1985
|
818 |
+
self._session: Optional[ClientSession] = session
|
819 |
+
# Save reference to _resolve_charset, so that get_encoding() will still
|
820 |
+
# work after the response has finished reading the body.
|
821 |
+
if session is None:
|
822 |
+
# TODO: Fix session=None in tests (see ClientRequest.__init__).
|
823 |
+
self._resolve_charset: Callable[
|
824 |
+
["ClientResponse", bytes], str
|
825 |
+
] = lambda *_: "utf-8"
|
826 |
+
else:
|
827 |
+
self._resolve_charset = session._resolve_charset
|
828 |
+
if loop.get_debug():
|
829 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
830 |
+
|
831 |
+
def __reset_writer(self, _: object = None) -> None:
|
832 |
+
self.__writer = None
|
833 |
+
|
834 |
+
@property
|
835 |
+
def _writer(self) -> Optional["asyncio.Task[None]"]:
|
836 |
+
return self.__writer
|
837 |
+
|
838 |
+
@_writer.setter
|
839 |
+
def _writer(self, writer: Optional["asyncio.Task[None]"]) -> None:
|
840 |
+
if self.__writer is not None:
|
841 |
+
self.__writer.remove_done_callback(self.__reset_writer)
|
842 |
+
self.__writer = writer
|
843 |
+
if writer is not None:
|
844 |
+
writer.add_done_callback(self.__reset_writer)
|
845 |
+
|
846 |
+
@reify
|
847 |
+
def url(self) -> URL:
|
848 |
+
return self._url
|
849 |
+
|
850 |
+
@reify
|
851 |
+
def url_obj(self) -> URL:
|
852 |
+
warnings.warn("Deprecated, use .url #1654", DeprecationWarning, stacklevel=2)
|
853 |
+
return self._url
|
854 |
+
|
855 |
+
@reify
|
856 |
+
def real_url(self) -> URL:
|
857 |
+
return self._real_url
|
858 |
+
|
859 |
+
@reify
|
860 |
+
def host(self) -> str:
|
861 |
+
assert self._url.host is not None
|
862 |
+
return self._url.host
|
863 |
+
|
864 |
+
@reify
|
865 |
+
def headers(self) -> "CIMultiDictProxy[str]":
|
866 |
+
return self._headers
|
867 |
+
|
868 |
+
@reify
|
869 |
+
def raw_headers(self) -> RawHeaders:
|
870 |
+
return self._raw_headers
|
871 |
+
|
872 |
+
@reify
|
873 |
+
def request_info(self) -> RequestInfo:
|
874 |
+
return self._request_info
|
875 |
+
|
876 |
+
@reify
|
877 |
+
def content_disposition(self) -> Optional[ContentDisposition]:
|
878 |
+
raw = self._headers.get(hdrs.CONTENT_DISPOSITION)
|
879 |
+
if raw is None:
|
880 |
+
return None
|
881 |
+
disposition_type, params_dct = multipart.parse_content_disposition(raw)
|
882 |
+
params = MappingProxyType(params_dct)
|
883 |
+
filename = multipart.content_disposition_filename(params)
|
884 |
+
return ContentDisposition(disposition_type, params, filename)
|
885 |
+
|
886 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
887 |
+
if self._closed:
|
888 |
+
return
|
889 |
+
|
890 |
+
if self._connection is not None:
|
891 |
+
self._connection.release()
|
892 |
+
self._cleanup_writer()
|
893 |
+
|
894 |
+
if self._loop.get_debug():
|
895 |
+
kwargs = {"source": self}
|
896 |
+
_warnings.warn(f"Unclosed response {self!r}", ResourceWarning, **kwargs)
|
897 |
+
context = {"client_response": self, "message": "Unclosed response"}
|
898 |
+
if self._source_traceback:
|
899 |
+
context["source_traceback"] = self._source_traceback
|
900 |
+
self._loop.call_exception_handler(context)
|
901 |
+
|
902 |
+
def __repr__(self) -> str:
|
903 |
+
out = io.StringIO()
|
904 |
+
ascii_encodable_url = str(self.url)
|
905 |
+
if self.reason:
|
906 |
+
ascii_encodable_reason = self.reason.encode(
|
907 |
+
"ascii", "backslashreplace"
|
908 |
+
).decode("ascii")
|
909 |
+
else:
|
910 |
+
ascii_encodable_reason = "None"
|
911 |
+
print(
|
912 |
+
"<ClientResponse({}) [{} {}]>".format(
|
913 |
+
ascii_encodable_url, self.status, ascii_encodable_reason
|
914 |
+
),
|
915 |
+
file=out,
|
916 |
+
)
|
917 |
+
print(self.headers, file=out)
|
918 |
+
return out.getvalue()
|
919 |
+
|
920 |
+
@property
|
921 |
+
def connection(self) -> Optional["Connection"]:
|
922 |
+
return self._connection
|
923 |
+
|
924 |
+
@reify
|
925 |
+
def history(self) -> Tuple["ClientResponse", ...]:
|
926 |
+
"""A sequence of of responses, if redirects occurred."""
|
927 |
+
return self._history
|
928 |
+
|
929 |
+
@reify
|
930 |
+
def links(self) -> "MultiDictProxy[MultiDictProxy[Union[str, URL]]]":
|
931 |
+
links_str = ", ".join(self.headers.getall("link", []))
|
932 |
+
|
933 |
+
if not links_str:
|
934 |
+
return MultiDictProxy(MultiDict())
|
935 |
+
|
936 |
+
links: MultiDict[MultiDictProxy[Union[str, URL]]] = MultiDict()
|
937 |
+
|
938 |
+
for val in re.split(r",(?=\s*<)", links_str):
|
939 |
+
match = re.match(r"\s*<(.*)>(.*)", val)
|
940 |
+
if match is None: # pragma: no cover
|
941 |
+
# the check exists to suppress mypy error
|
942 |
+
continue
|
943 |
+
url, params_str = match.groups()
|
944 |
+
params = params_str.split(";")[1:]
|
945 |
+
|
946 |
+
link: MultiDict[Union[str, URL]] = MultiDict()
|
947 |
+
|
948 |
+
for param in params:
|
949 |
+
match = re.match(r"^\s*(\S*)\s*=\s*(['\"]?)(.*?)(\2)\s*$", param, re.M)
|
950 |
+
if match is None: # pragma: no cover
|
951 |
+
# the check exists to suppress mypy error
|
952 |
+
continue
|
953 |
+
key, _, value, _ = match.groups()
|
954 |
+
|
955 |
+
link.add(key, value)
|
956 |
+
|
957 |
+
key = link.get("rel", url)
|
958 |
+
|
959 |
+
link.add("url", self.url.join(URL(url)))
|
960 |
+
|
961 |
+
links.add(str(key), MultiDictProxy(link))
|
962 |
+
|
963 |
+
return MultiDictProxy(links)
|
964 |
+
|
965 |
+
async def start(self, connection: "Connection") -> "ClientResponse":
|
966 |
+
"""Start response processing."""
|
967 |
+
self._closed = False
|
968 |
+
self._protocol = connection.protocol
|
969 |
+
self._connection = connection
|
970 |
+
|
971 |
+
with self._timer:
|
972 |
+
while True:
|
973 |
+
# read response
|
974 |
+
try:
|
975 |
+
protocol = self._protocol
|
976 |
+
message, payload = await protocol.read() # type: ignore[union-attr]
|
977 |
+
except http.HttpProcessingError as exc:
|
978 |
+
raise ClientResponseError(
|
979 |
+
self.request_info,
|
980 |
+
self.history,
|
981 |
+
status=exc.code,
|
982 |
+
message=exc.message,
|
983 |
+
headers=exc.headers,
|
984 |
+
) from exc
|
985 |
+
|
986 |
+
if message.code < 100 or message.code > 199 or message.code == 101:
|
987 |
+
break
|
988 |
+
|
989 |
+
if self._continue is not None:
|
990 |
+
set_result(self._continue, True)
|
991 |
+
self._continue = None
|
992 |
+
|
993 |
+
# payload eof handler
|
994 |
+
payload.on_eof(self._response_eof)
|
995 |
+
|
996 |
+
# response status
|
997 |
+
self.version = message.version
|
998 |
+
self.status = message.code
|
999 |
+
self.reason = message.reason
|
1000 |
+
|
1001 |
+
# headers
|
1002 |
+
self._headers = message.headers # type is CIMultiDictProxy
|
1003 |
+
self._raw_headers = message.raw_headers # type is Tuple[bytes, bytes]
|
1004 |
+
|
1005 |
+
# payload
|
1006 |
+
self.content = payload
|
1007 |
+
|
1008 |
+
# cookies
|
1009 |
+
for hdr in self.headers.getall(hdrs.SET_COOKIE, ()):
|
1010 |
+
try:
|
1011 |
+
self.cookies.load(hdr)
|
1012 |
+
except CookieError as exc:
|
1013 |
+
client_logger.warning("Can not load response cookies: %s", exc)
|
1014 |
+
return self
|
1015 |
+
|
1016 |
+
def _response_eof(self) -> None:
|
1017 |
+
if self._closed:
|
1018 |
+
return
|
1019 |
+
|
1020 |
+
# protocol could be None because connection could be detached
|
1021 |
+
protocol = self._connection and self._connection.protocol
|
1022 |
+
if protocol is not None and protocol.upgraded:
|
1023 |
+
return
|
1024 |
+
|
1025 |
+
self._closed = True
|
1026 |
+
self._cleanup_writer()
|
1027 |
+
self._release_connection()
|
1028 |
+
|
1029 |
+
@property
|
1030 |
+
def closed(self) -> bool:
|
1031 |
+
return self._closed
|
1032 |
+
|
1033 |
+
def close(self) -> None:
|
1034 |
+
if not self._released:
|
1035 |
+
self._notify_content()
|
1036 |
+
|
1037 |
+
self._closed = True
|
1038 |
+
if self._loop is None or self._loop.is_closed():
|
1039 |
+
return
|
1040 |
+
|
1041 |
+
self._cleanup_writer()
|
1042 |
+
if self._connection is not None:
|
1043 |
+
self._connection.close()
|
1044 |
+
self._connection = None
|
1045 |
+
|
1046 |
+
def release(self) -> Any:
|
1047 |
+
if not self._released:
|
1048 |
+
self._notify_content()
|
1049 |
+
|
1050 |
+
self._closed = True
|
1051 |
+
|
1052 |
+
self._cleanup_writer()
|
1053 |
+
self._release_connection()
|
1054 |
+
return noop()
|
1055 |
+
|
1056 |
+
@property
|
1057 |
+
def ok(self) -> bool:
|
1058 |
+
"""Returns ``True`` if ``status`` is less than ``400``, ``False`` if not.
|
1059 |
+
|
1060 |
+
This is **not** a check for ``200 OK`` but a check that the response
|
1061 |
+
status is under 400.
|
1062 |
+
"""
|
1063 |
+
return 400 > self.status
|
1064 |
+
|
1065 |
+
def raise_for_status(self) -> None:
|
1066 |
+
if not self.ok:
|
1067 |
+
# reason should always be not None for a started response
|
1068 |
+
assert self.reason is not None
|
1069 |
+
self.release()
|
1070 |
+
raise ClientResponseError(
|
1071 |
+
self.request_info,
|
1072 |
+
self.history,
|
1073 |
+
status=self.status,
|
1074 |
+
message=self.reason,
|
1075 |
+
headers=self.headers,
|
1076 |
+
)
|
1077 |
+
|
1078 |
+
def _release_connection(self) -> None:
|
1079 |
+
if self._connection is not None:
|
1080 |
+
if self._writer is None:
|
1081 |
+
self._connection.release()
|
1082 |
+
self._connection = None
|
1083 |
+
else:
|
1084 |
+
self._writer.add_done_callback(lambda f: self._release_connection())
|
1085 |
+
|
1086 |
+
async def _wait_released(self) -> None:
|
1087 |
+
if self._writer is not None:
|
1088 |
+
await self._writer
|
1089 |
+
self._release_connection()
|
1090 |
+
|
1091 |
+
def _cleanup_writer(self) -> None:
|
1092 |
+
if self._writer is not None:
|
1093 |
+
self._writer.cancel()
|
1094 |
+
self._session = None
|
1095 |
+
|
1096 |
+
def _notify_content(self) -> None:
|
1097 |
+
content = self.content
|
1098 |
+
if content and content.exception() is None:
|
1099 |
+
set_exception(content, ClientConnectionError("Connection closed"))
|
1100 |
+
self._released = True
|
1101 |
+
|
1102 |
+
async def wait_for_close(self) -> None:
|
1103 |
+
if self._writer is not None:
|
1104 |
+
await self._writer
|
1105 |
+
self.release()
|
1106 |
+
|
1107 |
+
async def read(self) -> bytes:
|
1108 |
+
"""Read response payload."""
|
1109 |
+
if self._body is None:
|
1110 |
+
try:
|
1111 |
+
self._body = await self.content.read()
|
1112 |
+
for trace in self._traces:
|
1113 |
+
await trace.send_response_chunk_received(
|
1114 |
+
self.method, self.url, self._body
|
1115 |
+
)
|
1116 |
+
except BaseException:
|
1117 |
+
self.close()
|
1118 |
+
raise
|
1119 |
+
elif self._released: # Response explicitly released
|
1120 |
+
raise ClientConnectionError("Connection closed")
|
1121 |
+
|
1122 |
+
protocol = self._connection and self._connection.protocol
|
1123 |
+
if protocol is None or not protocol.upgraded:
|
1124 |
+
await self._wait_released() # Underlying connection released
|
1125 |
+
return self._body # type: ignore[no-any-return]
|
1126 |
+
|
1127 |
+
def get_encoding(self) -> str:
|
1128 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
1129 |
+
mimetype = helpers.parse_mimetype(ctype)
|
1130 |
+
|
1131 |
+
encoding = mimetype.parameters.get("charset")
|
1132 |
+
if encoding:
|
1133 |
+
with contextlib.suppress(LookupError):
|
1134 |
+
return codecs.lookup(encoding).name
|
1135 |
+
|
1136 |
+
if mimetype.type == "application" and (
|
1137 |
+
mimetype.subtype == "json" or mimetype.subtype == "rdap"
|
1138 |
+
):
|
1139 |
+
# RFC 7159 states that the default encoding is UTF-8.
|
1140 |
+
# RFC 7483 defines application/rdap+json
|
1141 |
+
return "utf-8"
|
1142 |
+
|
1143 |
+
if self._body is None:
|
1144 |
+
raise RuntimeError(
|
1145 |
+
"Cannot compute fallback encoding of a not yet read body"
|
1146 |
+
)
|
1147 |
+
|
1148 |
+
return self._resolve_charset(self, self._body)
|
1149 |
+
|
1150 |
+
async def text(self, encoding: Optional[str] = None, errors: str = "strict") -> str:
|
1151 |
+
"""Read response payload and decode."""
|
1152 |
+
if self._body is None:
|
1153 |
+
await self.read()
|
1154 |
+
|
1155 |
+
if encoding is None:
|
1156 |
+
encoding = self.get_encoding()
|
1157 |
+
|
1158 |
+
return self._body.decode( # type: ignore[no-any-return,union-attr]
|
1159 |
+
encoding, errors=errors
|
1160 |
+
)
|
1161 |
+
|
1162 |
+
async def json(
|
1163 |
+
self,
|
1164 |
+
*,
|
1165 |
+
encoding: Optional[str] = None,
|
1166 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
1167 |
+
content_type: Optional[str] = "application/json",
|
1168 |
+
) -> Any:
|
1169 |
+
"""Read and decodes JSON response."""
|
1170 |
+
if self._body is None:
|
1171 |
+
await self.read()
|
1172 |
+
|
1173 |
+
if content_type:
|
1174 |
+
ctype = self.headers.get(hdrs.CONTENT_TYPE, "").lower()
|
1175 |
+
if not _is_expected_content_type(ctype, content_type):
|
1176 |
+
raise ContentTypeError(
|
1177 |
+
self.request_info,
|
1178 |
+
self.history,
|
1179 |
+
message=(
|
1180 |
+
"Attempt to decode JSON with " "unexpected mimetype: %s" % ctype
|
1181 |
+
),
|
1182 |
+
headers=self.headers,
|
1183 |
+
)
|
1184 |
+
|
1185 |
+
stripped = self._body.strip() # type: ignore[union-attr]
|
1186 |
+
if not stripped:
|
1187 |
+
return None
|
1188 |
+
|
1189 |
+
if encoding is None:
|
1190 |
+
encoding = self.get_encoding()
|
1191 |
+
|
1192 |
+
return loads(stripped.decode(encoding))
|
1193 |
+
|
1194 |
+
async def __aenter__(self) -> "ClientResponse":
|
1195 |
+
return self
|
1196 |
+
|
1197 |
+
async def __aexit__(
|
1198 |
+
self,
|
1199 |
+
exc_type: Optional[Type[BaseException]],
|
1200 |
+
exc_val: Optional[BaseException],
|
1201 |
+
exc_tb: Optional[TracebackType],
|
1202 |
+
) -> None:
|
1203 |
+
# similar to _RequestContextManager, we do not need to check
|
1204 |
+
# for exceptions, response object can close connection
|
1205 |
+
# if state is broken
|
1206 |
+
self.release()
|
1207 |
+
await self.wait_for_close()
|
llmeval-env/lib/python3.10/site-packages/aiohttp/client_ws.py
ADDED
@@ -0,0 +1,315 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""WebSocket client for asyncio."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import sys
|
5 |
+
from typing import Any, Optional, cast
|
6 |
+
|
7 |
+
from .client_exceptions import ClientError
|
8 |
+
from .client_reqrep import ClientResponse
|
9 |
+
from .helpers import call_later, set_result
|
10 |
+
from .http import (
|
11 |
+
WS_CLOSED_MESSAGE,
|
12 |
+
WS_CLOSING_MESSAGE,
|
13 |
+
WebSocketError,
|
14 |
+
WSCloseCode,
|
15 |
+
WSMessage,
|
16 |
+
WSMsgType,
|
17 |
+
)
|
18 |
+
from .http_websocket import WebSocketWriter # WSMessage
|
19 |
+
from .streams import EofStream, FlowControlDataQueue
|
20 |
+
from .typedefs import (
|
21 |
+
DEFAULT_JSON_DECODER,
|
22 |
+
DEFAULT_JSON_ENCODER,
|
23 |
+
JSONDecoder,
|
24 |
+
JSONEncoder,
|
25 |
+
)
|
26 |
+
|
27 |
+
if sys.version_info >= (3, 11):
|
28 |
+
import asyncio as async_timeout
|
29 |
+
else:
|
30 |
+
import async_timeout
|
31 |
+
|
32 |
+
|
33 |
+
class ClientWebSocketResponse:
|
34 |
+
def __init__(
|
35 |
+
self,
|
36 |
+
reader: "FlowControlDataQueue[WSMessage]",
|
37 |
+
writer: WebSocketWriter,
|
38 |
+
protocol: Optional[str],
|
39 |
+
response: ClientResponse,
|
40 |
+
timeout: float,
|
41 |
+
autoclose: bool,
|
42 |
+
autoping: bool,
|
43 |
+
loop: asyncio.AbstractEventLoop,
|
44 |
+
*,
|
45 |
+
receive_timeout: Optional[float] = None,
|
46 |
+
heartbeat: Optional[float] = None,
|
47 |
+
compress: int = 0,
|
48 |
+
client_notakeover: bool = False,
|
49 |
+
) -> None:
|
50 |
+
self._response = response
|
51 |
+
self._conn = response.connection
|
52 |
+
|
53 |
+
self._writer = writer
|
54 |
+
self._reader = reader
|
55 |
+
self._protocol = protocol
|
56 |
+
self._closed = False
|
57 |
+
self._closing = False
|
58 |
+
self._close_code: Optional[int] = None
|
59 |
+
self._timeout = timeout
|
60 |
+
self._receive_timeout = receive_timeout
|
61 |
+
self._autoclose = autoclose
|
62 |
+
self._autoping = autoping
|
63 |
+
self._heartbeat = heartbeat
|
64 |
+
self._heartbeat_cb: Optional[asyncio.TimerHandle] = None
|
65 |
+
if heartbeat is not None:
|
66 |
+
self._pong_heartbeat = heartbeat / 2.0
|
67 |
+
self._pong_response_cb: Optional[asyncio.TimerHandle] = None
|
68 |
+
self._loop = loop
|
69 |
+
self._waiting: Optional[asyncio.Future[bool]] = None
|
70 |
+
self._exception: Optional[BaseException] = None
|
71 |
+
self._compress = compress
|
72 |
+
self._client_notakeover = client_notakeover
|
73 |
+
|
74 |
+
self._reset_heartbeat()
|
75 |
+
|
76 |
+
def _cancel_heartbeat(self) -> None:
|
77 |
+
if self._pong_response_cb is not None:
|
78 |
+
self._pong_response_cb.cancel()
|
79 |
+
self._pong_response_cb = None
|
80 |
+
|
81 |
+
if self._heartbeat_cb is not None:
|
82 |
+
self._heartbeat_cb.cancel()
|
83 |
+
self._heartbeat_cb = None
|
84 |
+
|
85 |
+
def _reset_heartbeat(self) -> None:
|
86 |
+
self._cancel_heartbeat()
|
87 |
+
|
88 |
+
if self._heartbeat is not None:
|
89 |
+
self._heartbeat_cb = call_later(
|
90 |
+
self._send_heartbeat,
|
91 |
+
self._heartbeat,
|
92 |
+
self._loop,
|
93 |
+
timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
|
94 |
+
if self._conn is not None
|
95 |
+
else 5,
|
96 |
+
)
|
97 |
+
|
98 |
+
def _send_heartbeat(self) -> None:
|
99 |
+
if self._heartbeat is not None and not self._closed:
|
100 |
+
# fire-and-forget a task is not perfect but maybe ok for
|
101 |
+
# sending ping. Otherwise we need a long-living heartbeat
|
102 |
+
# task in the class.
|
103 |
+
self._loop.create_task(self._writer.ping())
|
104 |
+
|
105 |
+
if self._pong_response_cb is not None:
|
106 |
+
self._pong_response_cb.cancel()
|
107 |
+
self._pong_response_cb = call_later(
|
108 |
+
self._pong_not_received,
|
109 |
+
self._pong_heartbeat,
|
110 |
+
self._loop,
|
111 |
+
timeout_ceil_threshold=self._conn._connector._timeout_ceil_threshold
|
112 |
+
if self._conn is not None
|
113 |
+
else 5,
|
114 |
+
)
|
115 |
+
|
116 |
+
def _pong_not_received(self) -> None:
|
117 |
+
if not self._closed:
|
118 |
+
self._closed = True
|
119 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
120 |
+
self._exception = asyncio.TimeoutError()
|
121 |
+
self._response.close()
|
122 |
+
|
123 |
+
@property
|
124 |
+
def closed(self) -> bool:
|
125 |
+
return self._closed
|
126 |
+
|
127 |
+
@property
|
128 |
+
def close_code(self) -> Optional[int]:
|
129 |
+
return self._close_code
|
130 |
+
|
131 |
+
@property
|
132 |
+
def protocol(self) -> Optional[str]:
|
133 |
+
return self._protocol
|
134 |
+
|
135 |
+
@property
|
136 |
+
def compress(self) -> int:
|
137 |
+
return self._compress
|
138 |
+
|
139 |
+
@property
|
140 |
+
def client_notakeover(self) -> bool:
|
141 |
+
return self._client_notakeover
|
142 |
+
|
143 |
+
def get_extra_info(self, name: str, default: Any = None) -> Any:
|
144 |
+
"""extra info from connection transport"""
|
145 |
+
conn = self._response.connection
|
146 |
+
if conn is None:
|
147 |
+
return default
|
148 |
+
transport = conn.transport
|
149 |
+
if transport is None:
|
150 |
+
return default
|
151 |
+
return transport.get_extra_info(name, default)
|
152 |
+
|
153 |
+
def exception(self) -> Optional[BaseException]:
|
154 |
+
return self._exception
|
155 |
+
|
156 |
+
async def ping(self, message: bytes = b"") -> None:
|
157 |
+
await self._writer.ping(message)
|
158 |
+
|
159 |
+
async def pong(self, message: bytes = b"") -> None:
|
160 |
+
await self._writer.pong(message)
|
161 |
+
|
162 |
+
async def send_str(self, data: str, compress: Optional[int] = None) -> None:
|
163 |
+
if not isinstance(data, str):
|
164 |
+
raise TypeError("data argument must be str (%r)" % type(data))
|
165 |
+
await self._writer.send(data, binary=False, compress=compress)
|
166 |
+
|
167 |
+
async def send_bytes(self, data: bytes, compress: Optional[int] = None) -> None:
|
168 |
+
if not isinstance(data, (bytes, bytearray, memoryview)):
|
169 |
+
raise TypeError("data argument must be byte-ish (%r)" % type(data))
|
170 |
+
await self._writer.send(data, binary=True, compress=compress)
|
171 |
+
|
172 |
+
async def send_json(
|
173 |
+
self,
|
174 |
+
data: Any,
|
175 |
+
compress: Optional[int] = None,
|
176 |
+
*,
|
177 |
+
dumps: JSONEncoder = DEFAULT_JSON_ENCODER,
|
178 |
+
) -> None:
|
179 |
+
await self.send_str(dumps(data), compress=compress)
|
180 |
+
|
181 |
+
async def close(self, *, code: int = WSCloseCode.OK, message: bytes = b"") -> bool:
|
182 |
+
# we need to break `receive()` cycle first,
|
183 |
+
# `close()` may be called from different task
|
184 |
+
if self._waiting is not None and not self._closing:
|
185 |
+
self._closing = True
|
186 |
+
self._reader.feed_data(WS_CLOSING_MESSAGE, 0)
|
187 |
+
await self._waiting
|
188 |
+
|
189 |
+
if not self._closed:
|
190 |
+
self._cancel_heartbeat()
|
191 |
+
self._closed = True
|
192 |
+
try:
|
193 |
+
await self._writer.close(code, message)
|
194 |
+
except asyncio.CancelledError:
|
195 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
196 |
+
self._response.close()
|
197 |
+
raise
|
198 |
+
except Exception as exc:
|
199 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
200 |
+
self._exception = exc
|
201 |
+
self._response.close()
|
202 |
+
return True
|
203 |
+
|
204 |
+
if self._close_code:
|
205 |
+
self._response.close()
|
206 |
+
return True
|
207 |
+
|
208 |
+
while True:
|
209 |
+
try:
|
210 |
+
async with async_timeout.timeout(self._timeout):
|
211 |
+
msg = await self._reader.read()
|
212 |
+
except asyncio.CancelledError:
|
213 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
214 |
+
self._response.close()
|
215 |
+
raise
|
216 |
+
except Exception as exc:
|
217 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
218 |
+
self._exception = exc
|
219 |
+
self._response.close()
|
220 |
+
return True
|
221 |
+
|
222 |
+
if msg.type == WSMsgType.CLOSE:
|
223 |
+
self._close_code = msg.data
|
224 |
+
self._response.close()
|
225 |
+
return True
|
226 |
+
else:
|
227 |
+
return False
|
228 |
+
|
229 |
+
async def receive(self, timeout: Optional[float] = None) -> WSMessage:
|
230 |
+
while True:
|
231 |
+
if self._waiting is not None:
|
232 |
+
raise RuntimeError("Concurrent call to receive() is not allowed")
|
233 |
+
|
234 |
+
if self._closed:
|
235 |
+
return WS_CLOSED_MESSAGE
|
236 |
+
elif self._closing:
|
237 |
+
await self.close()
|
238 |
+
return WS_CLOSED_MESSAGE
|
239 |
+
|
240 |
+
try:
|
241 |
+
self._waiting = self._loop.create_future()
|
242 |
+
try:
|
243 |
+
async with async_timeout.timeout(timeout or self._receive_timeout):
|
244 |
+
msg = await self._reader.read()
|
245 |
+
self._reset_heartbeat()
|
246 |
+
finally:
|
247 |
+
waiter = self._waiting
|
248 |
+
self._waiting = None
|
249 |
+
set_result(waiter, True)
|
250 |
+
except (asyncio.CancelledError, asyncio.TimeoutError):
|
251 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
252 |
+
raise
|
253 |
+
except EofStream:
|
254 |
+
self._close_code = WSCloseCode.OK
|
255 |
+
await self.close()
|
256 |
+
return WSMessage(WSMsgType.CLOSED, None, None)
|
257 |
+
except ClientError:
|
258 |
+
self._closed = True
|
259 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
260 |
+
return WS_CLOSED_MESSAGE
|
261 |
+
except WebSocketError as exc:
|
262 |
+
self._close_code = exc.code
|
263 |
+
await self.close(code=exc.code)
|
264 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
265 |
+
except Exception as exc:
|
266 |
+
self._exception = exc
|
267 |
+
self._closing = True
|
268 |
+
self._close_code = WSCloseCode.ABNORMAL_CLOSURE
|
269 |
+
await self.close()
|
270 |
+
return WSMessage(WSMsgType.ERROR, exc, None)
|
271 |
+
|
272 |
+
if msg.type == WSMsgType.CLOSE:
|
273 |
+
self._closing = True
|
274 |
+
self._close_code = msg.data
|
275 |
+
if not self._closed and self._autoclose:
|
276 |
+
await self.close()
|
277 |
+
elif msg.type == WSMsgType.CLOSING:
|
278 |
+
self._closing = True
|
279 |
+
elif msg.type == WSMsgType.PING and self._autoping:
|
280 |
+
await self.pong(msg.data)
|
281 |
+
continue
|
282 |
+
elif msg.type == WSMsgType.PONG and self._autoping:
|
283 |
+
continue
|
284 |
+
|
285 |
+
return msg
|
286 |
+
|
287 |
+
async def receive_str(self, *, timeout: Optional[float] = None) -> str:
|
288 |
+
msg = await self.receive(timeout)
|
289 |
+
if msg.type != WSMsgType.TEXT:
|
290 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not str")
|
291 |
+
return cast(str, msg.data)
|
292 |
+
|
293 |
+
async def receive_bytes(self, *, timeout: Optional[float] = None) -> bytes:
|
294 |
+
msg = await self.receive(timeout)
|
295 |
+
if msg.type != WSMsgType.BINARY:
|
296 |
+
raise TypeError(f"Received message {msg.type}:{msg.data!r} is not bytes")
|
297 |
+
return cast(bytes, msg.data)
|
298 |
+
|
299 |
+
async def receive_json(
|
300 |
+
self,
|
301 |
+
*,
|
302 |
+
loads: JSONDecoder = DEFAULT_JSON_DECODER,
|
303 |
+
timeout: Optional[float] = None,
|
304 |
+
) -> Any:
|
305 |
+
data = await self.receive_str(timeout=timeout)
|
306 |
+
return loads(data)
|
307 |
+
|
308 |
+
def __aiter__(self) -> "ClientWebSocketResponse":
|
309 |
+
return self
|
310 |
+
|
311 |
+
async def __anext__(self) -> WSMessage:
|
312 |
+
msg = await self.receive()
|
313 |
+
if msg.type in (WSMsgType.CLOSE, WSMsgType.CLOSING, WSMsgType.CLOSED):
|
314 |
+
raise StopAsyncIteration
|
315 |
+
return msg
|
llmeval-env/lib/python3.10/site-packages/aiohttp/compression_utils.py
ADDED
@@ -0,0 +1,157 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import zlib
|
3 |
+
from concurrent.futures import Executor
|
4 |
+
from typing import Optional, cast
|
5 |
+
|
6 |
+
try:
|
7 |
+
try:
|
8 |
+
import brotlicffi as brotli
|
9 |
+
except ImportError:
|
10 |
+
import brotli
|
11 |
+
|
12 |
+
HAS_BROTLI = True
|
13 |
+
except ImportError: # pragma: no cover
|
14 |
+
HAS_BROTLI = False
|
15 |
+
|
16 |
+
MAX_SYNC_CHUNK_SIZE = 1024
|
17 |
+
|
18 |
+
|
19 |
+
def encoding_to_mode(
|
20 |
+
encoding: Optional[str] = None,
|
21 |
+
suppress_deflate_header: bool = False,
|
22 |
+
) -> int:
|
23 |
+
if encoding == "gzip":
|
24 |
+
return 16 + zlib.MAX_WBITS
|
25 |
+
|
26 |
+
return -zlib.MAX_WBITS if suppress_deflate_header else zlib.MAX_WBITS
|
27 |
+
|
28 |
+
|
29 |
+
class ZlibBaseHandler:
|
30 |
+
def __init__(
|
31 |
+
self,
|
32 |
+
mode: int,
|
33 |
+
executor: Optional[Executor] = None,
|
34 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
35 |
+
):
|
36 |
+
self._mode = mode
|
37 |
+
self._executor = executor
|
38 |
+
self._max_sync_chunk_size = max_sync_chunk_size
|
39 |
+
|
40 |
+
|
41 |
+
class ZLibCompressor(ZlibBaseHandler):
|
42 |
+
def __init__(
|
43 |
+
self,
|
44 |
+
encoding: Optional[str] = None,
|
45 |
+
suppress_deflate_header: bool = False,
|
46 |
+
level: Optional[int] = None,
|
47 |
+
wbits: Optional[int] = None,
|
48 |
+
strategy: int = zlib.Z_DEFAULT_STRATEGY,
|
49 |
+
executor: Optional[Executor] = None,
|
50 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
51 |
+
):
|
52 |
+
super().__init__(
|
53 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header)
|
54 |
+
if wbits is None
|
55 |
+
else wbits,
|
56 |
+
executor=executor,
|
57 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
58 |
+
)
|
59 |
+
if level is None:
|
60 |
+
self._compressor = zlib.compressobj(wbits=self._mode, strategy=strategy)
|
61 |
+
else:
|
62 |
+
self._compressor = zlib.compressobj(
|
63 |
+
wbits=self._mode, strategy=strategy, level=level
|
64 |
+
)
|
65 |
+
self._compress_lock = asyncio.Lock()
|
66 |
+
|
67 |
+
def compress_sync(self, data: bytes) -> bytes:
|
68 |
+
return self._compressor.compress(data)
|
69 |
+
|
70 |
+
async def compress(self, data: bytes) -> bytes:
|
71 |
+
async with self._compress_lock:
|
72 |
+
# To ensure the stream is consistent in the event
|
73 |
+
# there are multiple writers, we need to lock
|
74 |
+
# the compressor so that only one writer can
|
75 |
+
# compress at a time.
|
76 |
+
if (
|
77 |
+
self._max_sync_chunk_size is not None
|
78 |
+
and len(data) > self._max_sync_chunk_size
|
79 |
+
):
|
80 |
+
return await asyncio.get_event_loop().run_in_executor(
|
81 |
+
self._executor, self.compress_sync, data
|
82 |
+
)
|
83 |
+
return self.compress_sync(data)
|
84 |
+
|
85 |
+
def flush(self, mode: int = zlib.Z_FINISH) -> bytes:
|
86 |
+
return self._compressor.flush(mode)
|
87 |
+
|
88 |
+
|
89 |
+
class ZLibDecompressor(ZlibBaseHandler):
|
90 |
+
def __init__(
|
91 |
+
self,
|
92 |
+
encoding: Optional[str] = None,
|
93 |
+
suppress_deflate_header: bool = False,
|
94 |
+
executor: Optional[Executor] = None,
|
95 |
+
max_sync_chunk_size: Optional[int] = MAX_SYNC_CHUNK_SIZE,
|
96 |
+
):
|
97 |
+
super().__init__(
|
98 |
+
mode=encoding_to_mode(encoding, suppress_deflate_header),
|
99 |
+
executor=executor,
|
100 |
+
max_sync_chunk_size=max_sync_chunk_size,
|
101 |
+
)
|
102 |
+
self._decompressor = zlib.decompressobj(wbits=self._mode)
|
103 |
+
|
104 |
+
def decompress_sync(self, data: bytes, max_length: int = 0) -> bytes:
|
105 |
+
return self._decompressor.decompress(data, max_length)
|
106 |
+
|
107 |
+
async def decompress(self, data: bytes, max_length: int = 0) -> bytes:
|
108 |
+
if (
|
109 |
+
self._max_sync_chunk_size is not None
|
110 |
+
and len(data) > self._max_sync_chunk_size
|
111 |
+
):
|
112 |
+
return await asyncio.get_event_loop().run_in_executor(
|
113 |
+
self._executor, self.decompress_sync, data, max_length
|
114 |
+
)
|
115 |
+
return self.decompress_sync(data, max_length)
|
116 |
+
|
117 |
+
def flush(self, length: int = 0) -> bytes:
|
118 |
+
return (
|
119 |
+
self._decompressor.flush(length)
|
120 |
+
if length > 0
|
121 |
+
else self._decompressor.flush()
|
122 |
+
)
|
123 |
+
|
124 |
+
@property
|
125 |
+
def eof(self) -> bool:
|
126 |
+
return self._decompressor.eof
|
127 |
+
|
128 |
+
@property
|
129 |
+
def unconsumed_tail(self) -> bytes:
|
130 |
+
return self._decompressor.unconsumed_tail
|
131 |
+
|
132 |
+
@property
|
133 |
+
def unused_data(self) -> bytes:
|
134 |
+
return self._decompressor.unused_data
|
135 |
+
|
136 |
+
|
137 |
+
class BrotliDecompressor:
|
138 |
+
# Supports both 'brotlipy' and 'Brotli' packages
|
139 |
+
# since they share an import name. The top branches
|
140 |
+
# are for 'brotlipy' and bottom branches for 'Brotli'
|
141 |
+
def __init__(self) -> None:
|
142 |
+
if not HAS_BROTLI:
|
143 |
+
raise RuntimeError(
|
144 |
+
"The brotli decompression is not available. "
|
145 |
+
"Please install `Brotli` module"
|
146 |
+
)
|
147 |
+
self._obj = brotli.Decompressor()
|
148 |
+
|
149 |
+
def decompress_sync(self, data: bytes) -> bytes:
|
150 |
+
if hasattr(self._obj, "decompress"):
|
151 |
+
return cast(bytes, self._obj.decompress(data))
|
152 |
+
return cast(bytes, self._obj.process(data))
|
153 |
+
|
154 |
+
def flush(self) -> bytes:
|
155 |
+
if hasattr(self._obj, "flush"):
|
156 |
+
return cast(bytes, self._obj.flush())
|
157 |
+
return b""
|
llmeval-env/lib/python3.10/site-packages/aiohttp/connector.py
ADDED
@@ -0,0 +1,1511 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import functools
|
3 |
+
import random
|
4 |
+
import sys
|
5 |
+
import traceback
|
6 |
+
import warnings
|
7 |
+
from collections import defaultdict, deque
|
8 |
+
from contextlib import suppress
|
9 |
+
from http import HTTPStatus
|
10 |
+
from http.cookies import SimpleCookie
|
11 |
+
from itertools import cycle, islice
|
12 |
+
from time import monotonic
|
13 |
+
from types import TracebackType
|
14 |
+
from typing import (
|
15 |
+
TYPE_CHECKING,
|
16 |
+
Any,
|
17 |
+
Awaitable,
|
18 |
+
Callable,
|
19 |
+
DefaultDict,
|
20 |
+
Dict,
|
21 |
+
Iterator,
|
22 |
+
List,
|
23 |
+
Literal,
|
24 |
+
Optional,
|
25 |
+
Set,
|
26 |
+
Tuple,
|
27 |
+
Type,
|
28 |
+
Union,
|
29 |
+
cast,
|
30 |
+
)
|
31 |
+
|
32 |
+
import attr
|
33 |
+
|
34 |
+
from . import hdrs, helpers
|
35 |
+
from .abc import AbstractResolver
|
36 |
+
from .client_exceptions import (
|
37 |
+
ClientConnectionError,
|
38 |
+
ClientConnectorCertificateError,
|
39 |
+
ClientConnectorError,
|
40 |
+
ClientConnectorSSLError,
|
41 |
+
ClientHttpProxyError,
|
42 |
+
ClientProxyConnectionError,
|
43 |
+
ServerFingerprintMismatch,
|
44 |
+
UnixClientConnectorError,
|
45 |
+
cert_errors,
|
46 |
+
ssl_errors,
|
47 |
+
)
|
48 |
+
from .client_proto import ResponseHandler
|
49 |
+
from .client_reqrep import ClientRequest, Fingerprint, _merge_ssl_params
|
50 |
+
from .helpers import ceil_timeout, get_running_loop, is_ip_address, noop, sentinel
|
51 |
+
from .locks import EventResultOrError
|
52 |
+
from .resolver import DefaultResolver
|
53 |
+
|
54 |
+
try:
|
55 |
+
import ssl
|
56 |
+
|
57 |
+
SSLContext = ssl.SSLContext
|
58 |
+
except ImportError: # pragma: no cover
|
59 |
+
ssl = None # type: ignore[assignment]
|
60 |
+
SSLContext = object # type: ignore[misc,assignment]
|
61 |
+
|
62 |
+
|
63 |
+
__all__ = ("BaseConnector", "TCPConnector", "UnixConnector", "NamedPipeConnector")
|
64 |
+
|
65 |
+
|
66 |
+
if TYPE_CHECKING:
|
67 |
+
from .client import ClientTimeout
|
68 |
+
from .client_reqrep import ConnectionKey
|
69 |
+
from .tracing import Trace
|
70 |
+
|
71 |
+
|
72 |
+
class _DeprecationWaiter:
|
73 |
+
__slots__ = ("_awaitable", "_awaited")
|
74 |
+
|
75 |
+
def __init__(self, awaitable: Awaitable[Any]) -> None:
|
76 |
+
self._awaitable = awaitable
|
77 |
+
self._awaited = False
|
78 |
+
|
79 |
+
def __await__(self) -> Any:
|
80 |
+
self._awaited = True
|
81 |
+
return self._awaitable.__await__()
|
82 |
+
|
83 |
+
def __del__(self) -> None:
|
84 |
+
if not self._awaited:
|
85 |
+
warnings.warn(
|
86 |
+
"Connector.close() is a coroutine, "
|
87 |
+
"please use await connector.close()",
|
88 |
+
DeprecationWarning,
|
89 |
+
)
|
90 |
+
|
91 |
+
|
92 |
+
class Connection:
|
93 |
+
|
94 |
+
_source_traceback = None
|
95 |
+
_transport = None
|
96 |
+
|
97 |
+
def __init__(
|
98 |
+
self,
|
99 |
+
connector: "BaseConnector",
|
100 |
+
key: "ConnectionKey",
|
101 |
+
protocol: ResponseHandler,
|
102 |
+
loop: asyncio.AbstractEventLoop,
|
103 |
+
) -> None:
|
104 |
+
self._key = key
|
105 |
+
self._connector = connector
|
106 |
+
self._loop = loop
|
107 |
+
self._protocol: Optional[ResponseHandler] = protocol
|
108 |
+
self._callbacks: List[Callable[[], None]] = []
|
109 |
+
|
110 |
+
if loop.get_debug():
|
111 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
112 |
+
|
113 |
+
def __repr__(self) -> str:
|
114 |
+
return f"Connection<{self._key}>"
|
115 |
+
|
116 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
117 |
+
if self._protocol is not None:
|
118 |
+
kwargs = {"source": self}
|
119 |
+
_warnings.warn(f"Unclosed connection {self!r}", ResourceWarning, **kwargs)
|
120 |
+
if self._loop.is_closed():
|
121 |
+
return
|
122 |
+
|
123 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
124 |
+
|
125 |
+
context = {"client_connection": self, "message": "Unclosed connection"}
|
126 |
+
if self._source_traceback is not None:
|
127 |
+
context["source_traceback"] = self._source_traceback
|
128 |
+
self._loop.call_exception_handler(context)
|
129 |
+
|
130 |
+
def __bool__(self) -> Literal[True]:
|
131 |
+
"""Force subclasses to not be falsy, to make checks simpler."""
|
132 |
+
return True
|
133 |
+
|
134 |
+
@property
|
135 |
+
def loop(self) -> asyncio.AbstractEventLoop:
|
136 |
+
warnings.warn(
|
137 |
+
"connector.loop property is deprecated", DeprecationWarning, stacklevel=2
|
138 |
+
)
|
139 |
+
return self._loop
|
140 |
+
|
141 |
+
@property
|
142 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
143 |
+
if self._protocol is None:
|
144 |
+
return None
|
145 |
+
return self._protocol.transport
|
146 |
+
|
147 |
+
@property
|
148 |
+
def protocol(self) -> Optional[ResponseHandler]:
|
149 |
+
return self._protocol
|
150 |
+
|
151 |
+
def add_callback(self, callback: Callable[[], None]) -> None:
|
152 |
+
if callback is not None:
|
153 |
+
self._callbacks.append(callback)
|
154 |
+
|
155 |
+
def _notify_release(self) -> None:
|
156 |
+
callbacks, self._callbacks = self._callbacks[:], []
|
157 |
+
|
158 |
+
for cb in callbacks:
|
159 |
+
with suppress(Exception):
|
160 |
+
cb()
|
161 |
+
|
162 |
+
def close(self) -> None:
|
163 |
+
self._notify_release()
|
164 |
+
|
165 |
+
if self._protocol is not None:
|
166 |
+
self._connector._release(self._key, self._protocol, should_close=True)
|
167 |
+
self._protocol = None
|
168 |
+
|
169 |
+
def release(self) -> None:
|
170 |
+
self._notify_release()
|
171 |
+
|
172 |
+
if self._protocol is not None:
|
173 |
+
self._connector._release(
|
174 |
+
self._key, self._protocol, should_close=self._protocol.should_close
|
175 |
+
)
|
176 |
+
self._protocol = None
|
177 |
+
|
178 |
+
@property
|
179 |
+
def closed(self) -> bool:
|
180 |
+
return self._protocol is None or not self._protocol.is_connected()
|
181 |
+
|
182 |
+
|
183 |
+
class _TransportPlaceholder:
|
184 |
+
"""placeholder for BaseConnector.connect function"""
|
185 |
+
|
186 |
+
def close(self) -> None:
|
187 |
+
pass
|
188 |
+
|
189 |
+
|
190 |
+
class BaseConnector:
|
191 |
+
"""Base connector class.
|
192 |
+
|
193 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
194 |
+
force_close - Set to True to force close and do reconnect
|
195 |
+
after each request (and between redirects).
|
196 |
+
limit - The total number of simultaneous connections.
|
197 |
+
limit_per_host - Number of simultaneous connections to one host.
|
198 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
199 |
+
Disabled by default.
|
200 |
+
timeout_ceil_threshold - Trigger ceiling of timeout values when
|
201 |
+
it's above timeout_ceil_threshold.
|
202 |
+
loop - Optional event loop.
|
203 |
+
"""
|
204 |
+
|
205 |
+
_closed = True # prevent AttributeError in __del__ if ctor was failed
|
206 |
+
_source_traceback = None
|
207 |
+
|
208 |
+
# abort transport after 2 seconds (cleanup broken connections)
|
209 |
+
_cleanup_closed_period = 2.0
|
210 |
+
|
211 |
+
def __init__(
|
212 |
+
self,
|
213 |
+
*,
|
214 |
+
keepalive_timeout: Union[object, None, float] = sentinel,
|
215 |
+
force_close: bool = False,
|
216 |
+
limit: int = 100,
|
217 |
+
limit_per_host: int = 0,
|
218 |
+
enable_cleanup_closed: bool = False,
|
219 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
220 |
+
timeout_ceil_threshold: float = 5,
|
221 |
+
) -> None:
|
222 |
+
|
223 |
+
if force_close:
|
224 |
+
if keepalive_timeout is not None and keepalive_timeout is not sentinel:
|
225 |
+
raise ValueError(
|
226 |
+
"keepalive_timeout cannot " "be set if force_close is True"
|
227 |
+
)
|
228 |
+
else:
|
229 |
+
if keepalive_timeout is sentinel:
|
230 |
+
keepalive_timeout = 15.0
|
231 |
+
|
232 |
+
loop = get_running_loop(loop)
|
233 |
+
self._timeout_ceil_threshold = timeout_ceil_threshold
|
234 |
+
|
235 |
+
self._closed = False
|
236 |
+
if loop.get_debug():
|
237 |
+
self._source_traceback = traceback.extract_stack(sys._getframe(1))
|
238 |
+
|
239 |
+
self._conns: Dict[ConnectionKey, List[Tuple[ResponseHandler, float]]] = {}
|
240 |
+
self._limit = limit
|
241 |
+
self._limit_per_host = limit_per_host
|
242 |
+
self._acquired: Set[ResponseHandler] = set()
|
243 |
+
self._acquired_per_host: DefaultDict[
|
244 |
+
ConnectionKey, Set[ResponseHandler]
|
245 |
+
] = defaultdict(set)
|
246 |
+
self._keepalive_timeout = cast(float, keepalive_timeout)
|
247 |
+
self._force_close = force_close
|
248 |
+
|
249 |
+
# {host_key: FIFO list of waiters}
|
250 |
+
self._waiters = defaultdict(deque) # type: ignore[var-annotated]
|
251 |
+
|
252 |
+
self._loop = loop
|
253 |
+
self._factory = functools.partial(ResponseHandler, loop=loop)
|
254 |
+
|
255 |
+
self.cookies = SimpleCookie()
|
256 |
+
|
257 |
+
# start keep-alive connection cleanup task
|
258 |
+
self._cleanup_handle: Optional[asyncio.TimerHandle] = None
|
259 |
+
|
260 |
+
# start cleanup closed transports task
|
261 |
+
self._cleanup_closed_handle: Optional[asyncio.TimerHandle] = None
|
262 |
+
self._cleanup_closed_disabled = not enable_cleanup_closed
|
263 |
+
self._cleanup_closed_transports: List[Optional[asyncio.Transport]] = []
|
264 |
+
self._cleanup_closed()
|
265 |
+
|
266 |
+
def __del__(self, _warnings: Any = warnings) -> None:
|
267 |
+
if self._closed:
|
268 |
+
return
|
269 |
+
if not self._conns:
|
270 |
+
return
|
271 |
+
|
272 |
+
conns = [repr(c) for c in self._conns.values()]
|
273 |
+
|
274 |
+
self._close()
|
275 |
+
|
276 |
+
kwargs = {"source": self}
|
277 |
+
_warnings.warn(f"Unclosed connector {self!r}", ResourceWarning, **kwargs)
|
278 |
+
context = {
|
279 |
+
"connector": self,
|
280 |
+
"connections": conns,
|
281 |
+
"message": "Unclosed connector",
|
282 |
+
}
|
283 |
+
if self._source_traceback is not None:
|
284 |
+
context["source_traceback"] = self._source_traceback
|
285 |
+
self._loop.call_exception_handler(context)
|
286 |
+
|
287 |
+
def __enter__(self) -> "BaseConnector":
|
288 |
+
warnings.warn(
|
289 |
+
'"with Connector():" is deprecated, '
|
290 |
+
'use "async with Connector():" instead',
|
291 |
+
DeprecationWarning,
|
292 |
+
)
|
293 |
+
return self
|
294 |
+
|
295 |
+
def __exit__(self, *exc: Any) -> None:
|
296 |
+
self._close()
|
297 |
+
|
298 |
+
async def __aenter__(self) -> "BaseConnector":
|
299 |
+
return self
|
300 |
+
|
301 |
+
async def __aexit__(
|
302 |
+
self,
|
303 |
+
exc_type: Optional[Type[BaseException]] = None,
|
304 |
+
exc_value: Optional[BaseException] = None,
|
305 |
+
exc_traceback: Optional[TracebackType] = None,
|
306 |
+
) -> None:
|
307 |
+
await self.close()
|
308 |
+
|
309 |
+
@property
|
310 |
+
def force_close(self) -> bool:
|
311 |
+
"""Ultimately close connection on releasing if True."""
|
312 |
+
return self._force_close
|
313 |
+
|
314 |
+
@property
|
315 |
+
def limit(self) -> int:
|
316 |
+
"""The total number for simultaneous connections.
|
317 |
+
|
318 |
+
If limit is 0 the connector has no limit.
|
319 |
+
The default limit size is 100.
|
320 |
+
"""
|
321 |
+
return self._limit
|
322 |
+
|
323 |
+
@property
|
324 |
+
def limit_per_host(self) -> int:
|
325 |
+
"""The limit for simultaneous connections to the same endpoint.
|
326 |
+
|
327 |
+
Endpoints are the same if they are have equal
|
328 |
+
(host, port, is_ssl) triple.
|
329 |
+
"""
|
330 |
+
return self._limit_per_host
|
331 |
+
|
332 |
+
def _cleanup(self) -> None:
|
333 |
+
"""Cleanup unused transports."""
|
334 |
+
if self._cleanup_handle:
|
335 |
+
self._cleanup_handle.cancel()
|
336 |
+
# _cleanup_handle should be unset, otherwise _release() will not
|
337 |
+
# recreate it ever!
|
338 |
+
self._cleanup_handle = None
|
339 |
+
|
340 |
+
now = self._loop.time()
|
341 |
+
timeout = self._keepalive_timeout
|
342 |
+
|
343 |
+
if self._conns:
|
344 |
+
connections = {}
|
345 |
+
deadline = now - timeout
|
346 |
+
for key, conns in self._conns.items():
|
347 |
+
alive = []
|
348 |
+
for proto, use_time in conns:
|
349 |
+
if proto.is_connected():
|
350 |
+
if use_time - deadline < 0:
|
351 |
+
transport = proto.transport
|
352 |
+
proto.close()
|
353 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
354 |
+
self._cleanup_closed_transports.append(transport)
|
355 |
+
else:
|
356 |
+
alive.append((proto, use_time))
|
357 |
+
else:
|
358 |
+
transport = proto.transport
|
359 |
+
proto.close()
|
360 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
361 |
+
self._cleanup_closed_transports.append(transport)
|
362 |
+
|
363 |
+
if alive:
|
364 |
+
connections[key] = alive
|
365 |
+
|
366 |
+
self._conns = connections
|
367 |
+
|
368 |
+
if self._conns:
|
369 |
+
self._cleanup_handle = helpers.weakref_handle(
|
370 |
+
self,
|
371 |
+
"_cleanup",
|
372 |
+
timeout,
|
373 |
+
self._loop,
|
374 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
375 |
+
)
|
376 |
+
|
377 |
+
def _drop_acquired_per_host(
|
378 |
+
self, key: "ConnectionKey", val: ResponseHandler
|
379 |
+
) -> None:
|
380 |
+
acquired_per_host = self._acquired_per_host
|
381 |
+
if key not in acquired_per_host:
|
382 |
+
return
|
383 |
+
conns = acquired_per_host[key]
|
384 |
+
conns.remove(val)
|
385 |
+
if not conns:
|
386 |
+
del self._acquired_per_host[key]
|
387 |
+
|
388 |
+
def _cleanup_closed(self) -> None:
|
389 |
+
"""Double confirmation for transport close.
|
390 |
+
|
391 |
+
Some broken ssl servers may leave socket open without proper close.
|
392 |
+
"""
|
393 |
+
if self._cleanup_closed_handle:
|
394 |
+
self._cleanup_closed_handle.cancel()
|
395 |
+
|
396 |
+
for transport in self._cleanup_closed_transports:
|
397 |
+
if transport is not None:
|
398 |
+
transport.abort()
|
399 |
+
|
400 |
+
self._cleanup_closed_transports = []
|
401 |
+
|
402 |
+
if not self._cleanup_closed_disabled:
|
403 |
+
self._cleanup_closed_handle = helpers.weakref_handle(
|
404 |
+
self,
|
405 |
+
"_cleanup_closed",
|
406 |
+
self._cleanup_closed_period,
|
407 |
+
self._loop,
|
408 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
409 |
+
)
|
410 |
+
|
411 |
+
def close(self) -> Awaitable[None]:
|
412 |
+
"""Close all opened transports."""
|
413 |
+
self._close()
|
414 |
+
return _DeprecationWaiter(noop())
|
415 |
+
|
416 |
+
def _close(self) -> None:
|
417 |
+
if self._closed:
|
418 |
+
return
|
419 |
+
|
420 |
+
self._closed = True
|
421 |
+
|
422 |
+
try:
|
423 |
+
if self._loop.is_closed():
|
424 |
+
return
|
425 |
+
|
426 |
+
# cancel cleanup task
|
427 |
+
if self._cleanup_handle:
|
428 |
+
self._cleanup_handle.cancel()
|
429 |
+
|
430 |
+
# cancel cleanup close task
|
431 |
+
if self._cleanup_closed_handle:
|
432 |
+
self._cleanup_closed_handle.cancel()
|
433 |
+
|
434 |
+
for data in self._conns.values():
|
435 |
+
for proto, t0 in data:
|
436 |
+
proto.close()
|
437 |
+
|
438 |
+
for proto in self._acquired:
|
439 |
+
proto.close()
|
440 |
+
|
441 |
+
for transport in self._cleanup_closed_transports:
|
442 |
+
if transport is not None:
|
443 |
+
transport.abort()
|
444 |
+
|
445 |
+
finally:
|
446 |
+
self._conns.clear()
|
447 |
+
self._acquired.clear()
|
448 |
+
self._waiters.clear()
|
449 |
+
self._cleanup_handle = None
|
450 |
+
self._cleanup_closed_transports.clear()
|
451 |
+
self._cleanup_closed_handle = None
|
452 |
+
|
453 |
+
@property
|
454 |
+
def closed(self) -> bool:
|
455 |
+
"""Is connector closed.
|
456 |
+
|
457 |
+
A readonly property.
|
458 |
+
"""
|
459 |
+
return self._closed
|
460 |
+
|
461 |
+
def _available_connections(self, key: "ConnectionKey") -> int:
|
462 |
+
"""
|
463 |
+
Return number of available connections.
|
464 |
+
|
465 |
+
The limit, limit_per_host and the connection key are taken into account.
|
466 |
+
|
467 |
+
If it returns less than 1 means that there are no connections
|
468 |
+
available.
|
469 |
+
"""
|
470 |
+
if self._limit:
|
471 |
+
# total calc available connections
|
472 |
+
available = self._limit - len(self._acquired)
|
473 |
+
|
474 |
+
# check limit per host
|
475 |
+
if (
|
476 |
+
self._limit_per_host
|
477 |
+
and available > 0
|
478 |
+
and key in self._acquired_per_host
|
479 |
+
):
|
480 |
+
acquired = self._acquired_per_host.get(key)
|
481 |
+
assert acquired is not None
|
482 |
+
available = self._limit_per_host - len(acquired)
|
483 |
+
|
484 |
+
elif self._limit_per_host and key in self._acquired_per_host:
|
485 |
+
# check limit per host
|
486 |
+
acquired = self._acquired_per_host.get(key)
|
487 |
+
assert acquired is not None
|
488 |
+
available = self._limit_per_host - len(acquired)
|
489 |
+
else:
|
490 |
+
available = 1
|
491 |
+
|
492 |
+
return available
|
493 |
+
|
494 |
+
async def connect(
|
495 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
496 |
+
) -> Connection:
|
497 |
+
"""Get from pool or create new connection."""
|
498 |
+
key = req.connection_key
|
499 |
+
available = self._available_connections(key)
|
500 |
+
|
501 |
+
# Wait if there are no available connections or if there are/were
|
502 |
+
# waiters (i.e. don't steal connection from a waiter about to wake up)
|
503 |
+
if available <= 0 or key in self._waiters:
|
504 |
+
fut = self._loop.create_future()
|
505 |
+
|
506 |
+
# This connection will now count towards the limit.
|
507 |
+
self._waiters[key].append(fut)
|
508 |
+
|
509 |
+
if traces:
|
510 |
+
for trace in traces:
|
511 |
+
await trace.send_connection_queued_start()
|
512 |
+
|
513 |
+
try:
|
514 |
+
await fut
|
515 |
+
except BaseException as e:
|
516 |
+
if key in self._waiters:
|
517 |
+
# remove a waiter even if it was cancelled, normally it's
|
518 |
+
# removed when it's notified
|
519 |
+
try:
|
520 |
+
self._waiters[key].remove(fut)
|
521 |
+
except ValueError: # fut may no longer be in list
|
522 |
+
pass
|
523 |
+
|
524 |
+
raise e
|
525 |
+
finally:
|
526 |
+
if key in self._waiters and not self._waiters[key]:
|
527 |
+
del self._waiters[key]
|
528 |
+
|
529 |
+
if traces:
|
530 |
+
for trace in traces:
|
531 |
+
await trace.send_connection_queued_end()
|
532 |
+
|
533 |
+
proto = self._get(key)
|
534 |
+
if proto is None:
|
535 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
536 |
+
self._acquired.add(placeholder)
|
537 |
+
self._acquired_per_host[key].add(placeholder)
|
538 |
+
|
539 |
+
if traces:
|
540 |
+
for trace in traces:
|
541 |
+
await trace.send_connection_create_start()
|
542 |
+
|
543 |
+
try:
|
544 |
+
proto = await self._create_connection(req, traces, timeout)
|
545 |
+
if self._closed:
|
546 |
+
proto.close()
|
547 |
+
raise ClientConnectionError("Connector is closed.")
|
548 |
+
except BaseException:
|
549 |
+
if not self._closed:
|
550 |
+
self._acquired.remove(placeholder)
|
551 |
+
self._drop_acquired_per_host(key, placeholder)
|
552 |
+
self._release_waiter()
|
553 |
+
raise
|
554 |
+
else:
|
555 |
+
if not self._closed:
|
556 |
+
self._acquired.remove(placeholder)
|
557 |
+
self._drop_acquired_per_host(key, placeholder)
|
558 |
+
|
559 |
+
if traces:
|
560 |
+
for trace in traces:
|
561 |
+
await trace.send_connection_create_end()
|
562 |
+
else:
|
563 |
+
if traces:
|
564 |
+
# Acquire the connection to prevent race conditions with limits
|
565 |
+
placeholder = cast(ResponseHandler, _TransportPlaceholder())
|
566 |
+
self._acquired.add(placeholder)
|
567 |
+
self._acquired_per_host[key].add(placeholder)
|
568 |
+
for trace in traces:
|
569 |
+
await trace.send_connection_reuseconn()
|
570 |
+
self._acquired.remove(placeholder)
|
571 |
+
self._drop_acquired_per_host(key, placeholder)
|
572 |
+
|
573 |
+
self._acquired.add(proto)
|
574 |
+
self._acquired_per_host[key].add(proto)
|
575 |
+
return Connection(self, key, proto, self._loop)
|
576 |
+
|
577 |
+
def _get(self, key: "ConnectionKey") -> Optional[ResponseHandler]:
|
578 |
+
try:
|
579 |
+
conns = self._conns[key]
|
580 |
+
except KeyError:
|
581 |
+
return None
|
582 |
+
|
583 |
+
t1 = self._loop.time()
|
584 |
+
while conns:
|
585 |
+
proto, t0 = conns.pop()
|
586 |
+
if proto.is_connected():
|
587 |
+
if t1 - t0 > self._keepalive_timeout:
|
588 |
+
transport = proto.transport
|
589 |
+
proto.close()
|
590 |
+
# only for SSL transports
|
591 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
592 |
+
self._cleanup_closed_transports.append(transport)
|
593 |
+
else:
|
594 |
+
if not conns:
|
595 |
+
# The very last connection was reclaimed: drop the key
|
596 |
+
del self._conns[key]
|
597 |
+
return proto
|
598 |
+
else:
|
599 |
+
transport = proto.transport
|
600 |
+
proto.close()
|
601 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
602 |
+
self._cleanup_closed_transports.append(transport)
|
603 |
+
|
604 |
+
# No more connections: drop the key
|
605 |
+
del self._conns[key]
|
606 |
+
return None
|
607 |
+
|
608 |
+
def _release_waiter(self) -> None:
|
609 |
+
"""
|
610 |
+
Iterates over all waiters until one to be released is found.
|
611 |
+
|
612 |
+
The one to be released is not finished and
|
613 |
+
belongs to a host that has available connections.
|
614 |
+
"""
|
615 |
+
if not self._waiters:
|
616 |
+
return
|
617 |
+
|
618 |
+
# Having the dict keys ordered this avoids to iterate
|
619 |
+
# at the same order at each call.
|
620 |
+
queues = list(self._waiters.keys())
|
621 |
+
random.shuffle(queues)
|
622 |
+
|
623 |
+
for key in queues:
|
624 |
+
if self._available_connections(key) < 1:
|
625 |
+
continue
|
626 |
+
|
627 |
+
waiters = self._waiters[key]
|
628 |
+
while waiters:
|
629 |
+
waiter = waiters.popleft()
|
630 |
+
if not waiter.done():
|
631 |
+
waiter.set_result(None)
|
632 |
+
return
|
633 |
+
|
634 |
+
def _release_acquired(self, key: "ConnectionKey", proto: ResponseHandler) -> None:
|
635 |
+
if self._closed:
|
636 |
+
# acquired connection is already released on connector closing
|
637 |
+
return
|
638 |
+
|
639 |
+
try:
|
640 |
+
self._acquired.remove(proto)
|
641 |
+
self._drop_acquired_per_host(key, proto)
|
642 |
+
except KeyError: # pragma: no cover
|
643 |
+
# this may be result of undetermenistic order of objects
|
644 |
+
# finalization due garbage collection.
|
645 |
+
pass
|
646 |
+
else:
|
647 |
+
self._release_waiter()
|
648 |
+
|
649 |
+
def _release(
|
650 |
+
self,
|
651 |
+
key: "ConnectionKey",
|
652 |
+
protocol: ResponseHandler,
|
653 |
+
*,
|
654 |
+
should_close: bool = False,
|
655 |
+
) -> None:
|
656 |
+
if self._closed:
|
657 |
+
# acquired connection is already released on connector closing
|
658 |
+
return
|
659 |
+
|
660 |
+
self._release_acquired(key, protocol)
|
661 |
+
|
662 |
+
if self._force_close:
|
663 |
+
should_close = True
|
664 |
+
|
665 |
+
if should_close or protocol.should_close:
|
666 |
+
transport = protocol.transport
|
667 |
+
protocol.close()
|
668 |
+
|
669 |
+
if key.is_ssl and not self._cleanup_closed_disabled:
|
670 |
+
self._cleanup_closed_transports.append(transport)
|
671 |
+
else:
|
672 |
+
conns = self._conns.get(key)
|
673 |
+
if conns is None:
|
674 |
+
conns = self._conns[key] = []
|
675 |
+
conns.append((protocol, self._loop.time()))
|
676 |
+
|
677 |
+
if self._cleanup_handle is None:
|
678 |
+
self._cleanup_handle = helpers.weakref_handle(
|
679 |
+
self,
|
680 |
+
"_cleanup",
|
681 |
+
self._keepalive_timeout,
|
682 |
+
self._loop,
|
683 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
684 |
+
)
|
685 |
+
|
686 |
+
async def _create_connection(
|
687 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
688 |
+
) -> ResponseHandler:
|
689 |
+
raise NotImplementedError()
|
690 |
+
|
691 |
+
|
692 |
+
class _DNSCacheTable:
|
693 |
+
def __init__(self, ttl: Optional[float] = None) -> None:
|
694 |
+
self._addrs_rr: Dict[Tuple[str, int], Tuple[Iterator[Dict[str, Any]], int]] = {}
|
695 |
+
self._timestamps: Dict[Tuple[str, int], float] = {}
|
696 |
+
self._ttl = ttl
|
697 |
+
|
698 |
+
def __contains__(self, host: object) -> bool:
|
699 |
+
return host in self._addrs_rr
|
700 |
+
|
701 |
+
def add(self, key: Tuple[str, int], addrs: List[Dict[str, Any]]) -> None:
|
702 |
+
self._addrs_rr[key] = (cycle(addrs), len(addrs))
|
703 |
+
|
704 |
+
if self._ttl is not None:
|
705 |
+
self._timestamps[key] = monotonic()
|
706 |
+
|
707 |
+
def remove(self, key: Tuple[str, int]) -> None:
|
708 |
+
self._addrs_rr.pop(key, None)
|
709 |
+
|
710 |
+
if self._ttl is not None:
|
711 |
+
self._timestamps.pop(key, None)
|
712 |
+
|
713 |
+
def clear(self) -> None:
|
714 |
+
self._addrs_rr.clear()
|
715 |
+
self._timestamps.clear()
|
716 |
+
|
717 |
+
def next_addrs(self, key: Tuple[str, int]) -> List[Dict[str, Any]]:
|
718 |
+
loop, length = self._addrs_rr[key]
|
719 |
+
addrs = list(islice(loop, length))
|
720 |
+
# Consume one more element to shift internal state of `cycle`
|
721 |
+
next(loop)
|
722 |
+
return addrs
|
723 |
+
|
724 |
+
def expired(self, key: Tuple[str, int]) -> bool:
|
725 |
+
if self._ttl is None:
|
726 |
+
return False
|
727 |
+
|
728 |
+
return self._timestamps[key] + self._ttl < monotonic()
|
729 |
+
|
730 |
+
|
731 |
+
class TCPConnector(BaseConnector):
|
732 |
+
"""TCP connector.
|
733 |
+
|
734 |
+
verify_ssl - Set to True to check ssl certifications.
|
735 |
+
fingerprint - Pass the binary sha256
|
736 |
+
digest of the expected certificate in DER format to verify
|
737 |
+
that the certificate the server presents matches. See also
|
738 |
+
https://en.wikipedia.org/wiki/Transport_Layer_Security#Certificate_pinning
|
739 |
+
resolver - Enable DNS lookups and use this
|
740 |
+
resolver
|
741 |
+
use_dns_cache - Use memory cache for DNS lookups.
|
742 |
+
ttl_dns_cache - Max seconds having cached a DNS entry, None forever.
|
743 |
+
family - socket address family
|
744 |
+
local_addr - local tuple of (host, port) to bind socket to
|
745 |
+
|
746 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
747 |
+
force_close - Set to True to force close and do reconnect
|
748 |
+
after each request (and between redirects).
|
749 |
+
limit - The total number of simultaneous connections.
|
750 |
+
limit_per_host - Number of simultaneous connections to one host.
|
751 |
+
enable_cleanup_closed - Enables clean-up closed ssl transports.
|
752 |
+
Disabled by default.
|
753 |
+
loop - Optional event loop.
|
754 |
+
"""
|
755 |
+
|
756 |
+
def __init__(
|
757 |
+
self,
|
758 |
+
*,
|
759 |
+
verify_ssl: bool = True,
|
760 |
+
fingerprint: Optional[bytes] = None,
|
761 |
+
use_dns_cache: bool = True,
|
762 |
+
ttl_dns_cache: Optional[int] = 10,
|
763 |
+
family: int = 0,
|
764 |
+
ssl_context: Optional[SSLContext] = None,
|
765 |
+
ssl: Union[bool, Fingerprint, SSLContext] = True,
|
766 |
+
local_addr: Optional[Tuple[str, int]] = None,
|
767 |
+
resolver: Optional[AbstractResolver] = None,
|
768 |
+
keepalive_timeout: Union[None, float, object] = sentinel,
|
769 |
+
force_close: bool = False,
|
770 |
+
limit: int = 100,
|
771 |
+
limit_per_host: int = 0,
|
772 |
+
enable_cleanup_closed: bool = False,
|
773 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
774 |
+
timeout_ceil_threshold: float = 5,
|
775 |
+
):
|
776 |
+
super().__init__(
|
777 |
+
keepalive_timeout=keepalive_timeout,
|
778 |
+
force_close=force_close,
|
779 |
+
limit=limit,
|
780 |
+
limit_per_host=limit_per_host,
|
781 |
+
enable_cleanup_closed=enable_cleanup_closed,
|
782 |
+
loop=loop,
|
783 |
+
timeout_ceil_threshold=timeout_ceil_threshold,
|
784 |
+
)
|
785 |
+
|
786 |
+
self._ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint)
|
787 |
+
if resolver is None:
|
788 |
+
resolver = DefaultResolver(loop=self._loop)
|
789 |
+
self._resolver = resolver
|
790 |
+
|
791 |
+
self._use_dns_cache = use_dns_cache
|
792 |
+
self._cached_hosts = _DNSCacheTable(ttl=ttl_dns_cache)
|
793 |
+
self._throttle_dns_events: Dict[Tuple[str, int], EventResultOrError] = {}
|
794 |
+
self._family = family
|
795 |
+
self._local_addr = local_addr
|
796 |
+
|
797 |
+
def close(self) -> Awaitable[None]:
|
798 |
+
"""Close all ongoing DNS calls."""
|
799 |
+
for ev in self._throttle_dns_events.values():
|
800 |
+
ev.cancel()
|
801 |
+
|
802 |
+
return super().close()
|
803 |
+
|
804 |
+
@property
|
805 |
+
def family(self) -> int:
|
806 |
+
"""Socket family like AF_INET."""
|
807 |
+
return self._family
|
808 |
+
|
809 |
+
@property
|
810 |
+
def use_dns_cache(self) -> bool:
|
811 |
+
"""True if local DNS caching is enabled."""
|
812 |
+
return self._use_dns_cache
|
813 |
+
|
814 |
+
def clear_dns_cache(
|
815 |
+
self, host: Optional[str] = None, port: Optional[int] = None
|
816 |
+
) -> None:
|
817 |
+
"""Remove specified host/port or clear all dns local cache."""
|
818 |
+
if host is not None and port is not None:
|
819 |
+
self._cached_hosts.remove((host, port))
|
820 |
+
elif host is not None or port is not None:
|
821 |
+
raise ValueError("either both host and port " "or none of them are allowed")
|
822 |
+
else:
|
823 |
+
self._cached_hosts.clear()
|
824 |
+
|
825 |
+
async def _resolve_host(
|
826 |
+
self, host: str, port: int, traces: Optional[List["Trace"]] = None
|
827 |
+
) -> List[Dict[str, Any]]:
|
828 |
+
"""Resolve host and return list of addresses."""
|
829 |
+
if is_ip_address(host):
|
830 |
+
return [
|
831 |
+
{
|
832 |
+
"hostname": host,
|
833 |
+
"host": host,
|
834 |
+
"port": port,
|
835 |
+
"family": self._family,
|
836 |
+
"proto": 0,
|
837 |
+
"flags": 0,
|
838 |
+
}
|
839 |
+
]
|
840 |
+
|
841 |
+
if not self._use_dns_cache:
|
842 |
+
|
843 |
+
if traces:
|
844 |
+
for trace in traces:
|
845 |
+
await trace.send_dns_resolvehost_start(host)
|
846 |
+
|
847 |
+
res = await self._resolver.resolve(host, port, family=self._family)
|
848 |
+
|
849 |
+
if traces:
|
850 |
+
for trace in traces:
|
851 |
+
await trace.send_dns_resolvehost_end(host)
|
852 |
+
|
853 |
+
return res
|
854 |
+
|
855 |
+
key = (host, port)
|
856 |
+
if key in self._cached_hosts and not self._cached_hosts.expired(key):
|
857 |
+
# get result early, before any await (#4014)
|
858 |
+
result = self._cached_hosts.next_addrs(key)
|
859 |
+
|
860 |
+
if traces:
|
861 |
+
for trace in traces:
|
862 |
+
await trace.send_dns_cache_hit(host)
|
863 |
+
return result
|
864 |
+
|
865 |
+
#
|
866 |
+
# If multiple connectors are resolving the same host, we wait
|
867 |
+
# for the first one to resolve and then use the result for all of them.
|
868 |
+
# We use a throttle event to ensure that we only resolve the host once
|
869 |
+
# and then use the result for all the waiters.
|
870 |
+
#
|
871 |
+
# In this case we need to create a task to ensure that we can shield
|
872 |
+
# the task from cancellation as cancelling this lookup should not cancel
|
873 |
+
# the underlying lookup or else the cancel event will get broadcast to
|
874 |
+
# all the waiters across all connections.
|
875 |
+
#
|
876 |
+
resolved_host_task = asyncio.create_task(
|
877 |
+
self._resolve_host_with_throttle(key, host, port, traces)
|
878 |
+
)
|
879 |
+
try:
|
880 |
+
return await asyncio.shield(resolved_host_task)
|
881 |
+
except asyncio.CancelledError:
|
882 |
+
|
883 |
+
def drop_exception(fut: "asyncio.Future[List[Dict[str, Any]]]") -> None:
|
884 |
+
with suppress(Exception, asyncio.CancelledError):
|
885 |
+
fut.result()
|
886 |
+
|
887 |
+
resolved_host_task.add_done_callback(drop_exception)
|
888 |
+
raise
|
889 |
+
|
890 |
+
async def _resolve_host_with_throttle(
|
891 |
+
self,
|
892 |
+
key: Tuple[str, int],
|
893 |
+
host: str,
|
894 |
+
port: int,
|
895 |
+
traces: Optional[List["Trace"]],
|
896 |
+
) -> List[Dict[str, Any]]:
|
897 |
+
"""Resolve host with a dns events throttle."""
|
898 |
+
if key in self._throttle_dns_events:
|
899 |
+
# get event early, before any await (#4014)
|
900 |
+
event = self._throttle_dns_events[key]
|
901 |
+
if traces:
|
902 |
+
for trace in traces:
|
903 |
+
await trace.send_dns_cache_hit(host)
|
904 |
+
await event.wait()
|
905 |
+
else:
|
906 |
+
# update dict early, before any await (#4014)
|
907 |
+
self._throttle_dns_events[key] = EventResultOrError(self._loop)
|
908 |
+
if traces:
|
909 |
+
for trace in traces:
|
910 |
+
await trace.send_dns_cache_miss(host)
|
911 |
+
try:
|
912 |
+
|
913 |
+
if traces:
|
914 |
+
for trace in traces:
|
915 |
+
await trace.send_dns_resolvehost_start(host)
|
916 |
+
|
917 |
+
addrs = await self._resolver.resolve(host, port, family=self._family)
|
918 |
+
if traces:
|
919 |
+
for trace in traces:
|
920 |
+
await trace.send_dns_resolvehost_end(host)
|
921 |
+
|
922 |
+
self._cached_hosts.add(key, addrs)
|
923 |
+
self._throttle_dns_events[key].set()
|
924 |
+
except BaseException as e:
|
925 |
+
# any DNS exception, independently of the implementation
|
926 |
+
# is set for the waiters to raise the same exception.
|
927 |
+
self._throttle_dns_events[key].set(exc=e)
|
928 |
+
raise
|
929 |
+
finally:
|
930 |
+
self._throttle_dns_events.pop(key)
|
931 |
+
|
932 |
+
return self._cached_hosts.next_addrs(key)
|
933 |
+
|
934 |
+
async def _create_connection(
|
935 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
936 |
+
) -> ResponseHandler:
|
937 |
+
"""Create connection.
|
938 |
+
|
939 |
+
Has same keyword arguments as BaseEventLoop.create_connection.
|
940 |
+
"""
|
941 |
+
if req.proxy:
|
942 |
+
_, proto = await self._create_proxy_connection(req, traces, timeout)
|
943 |
+
else:
|
944 |
+
_, proto = await self._create_direct_connection(req, traces, timeout)
|
945 |
+
|
946 |
+
return proto
|
947 |
+
|
948 |
+
@staticmethod
|
949 |
+
@functools.lru_cache(None)
|
950 |
+
def _make_ssl_context(verified: bool) -> SSLContext:
|
951 |
+
if verified:
|
952 |
+
return ssl.create_default_context()
|
953 |
+
else:
|
954 |
+
sslcontext = ssl.SSLContext(ssl.PROTOCOL_TLS_CLIENT)
|
955 |
+
sslcontext.options |= ssl.OP_NO_SSLv2
|
956 |
+
sslcontext.options |= ssl.OP_NO_SSLv3
|
957 |
+
sslcontext.check_hostname = False
|
958 |
+
sslcontext.verify_mode = ssl.CERT_NONE
|
959 |
+
try:
|
960 |
+
sslcontext.options |= ssl.OP_NO_COMPRESSION
|
961 |
+
except AttributeError as attr_err:
|
962 |
+
warnings.warn(
|
963 |
+
"{!s}: The Python interpreter is compiled "
|
964 |
+
"against OpenSSL < 1.0.0. Ref: "
|
965 |
+
"https://docs.python.org/3/library/ssl.html"
|
966 |
+
"#ssl.OP_NO_COMPRESSION".format(attr_err),
|
967 |
+
)
|
968 |
+
sslcontext.set_default_verify_paths()
|
969 |
+
return sslcontext
|
970 |
+
|
971 |
+
def _get_ssl_context(self, req: ClientRequest) -> Optional[SSLContext]:
|
972 |
+
"""Logic to get the correct SSL context
|
973 |
+
|
974 |
+
0. if req.ssl is false, return None
|
975 |
+
|
976 |
+
1. if ssl_context is specified in req, use it
|
977 |
+
2. if _ssl_context is specified in self, use it
|
978 |
+
3. otherwise:
|
979 |
+
1. if verify_ssl is not specified in req, use self.ssl_context
|
980 |
+
(will generate a default context according to self.verify_ssl)
|
981 |
+
2. if verify_ssl is True in req, generate a default SSL context
|
982 |
+
3. if verify_ssl is False in req, generate a SSL context that
|
983 |
+
won't verify
|
984 |
+
"""
|
985 |
+
if req.is_ssl():
|
986 |
+
if ssl is None: # pragma: no cover
|
987 |
+
raise RuntimeError("SSL is not supported.")
|
988 |
+
sslcontext = req.ssl
|
989 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
990 |
+
return sslcontext
|
991 |
+
if sslcontext is not True:
|
992 |
+
# not verified or fingerprinted
|
993 |
+
return self._make_ssl_context(False)
|
994 |
+
sslcontext = self._ssl
|
995 |
+
if isinstance(sslcontext, ssl.SSLContext):
|
996 |
+
return sslcontext
|
997 |
+
if sslcontext is not True:
|
998 |
+
# not verified or fingerprinted
|
999 |
+
return self._make_ssl_context(False)
|
1000 |
+
return self._make_ssl_context(True)
|
1001 |
+
else:
|
1002 |
+
return None
|
1003 |
+
|
1004 |
+
def _get_fingerprint(self, req: ClientRequest) -> Optional["Fingerprint"]:
|
1005 |
+
ret = req.ssl
|
1006 |
+
if isinstance(ret, Fingerprint):
|
1007 |
+
return ret
|
1008 |
+
ret = self._ssl
|
1009 |
+
if isinstance(ret, Fingerprint):
|
1010 |
+
return ret
|
1011 |
+
return None
|
1012 |
+
|
1013 |
+
async def _wrap_create_connection(
|
1014 |
+
self,
|
1015 |
+
*args: Any,
|
1016 |
+
req: ClientRequest,
|
1017 |
+
timeout: "ClientTimeout",
|
1018 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1019 |
+
**kwargs: Any,
|
1020 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1021 |
+
try:
|
1022 |
+
async with ceil_timeout(
|
1023 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1024 |
+
):
|
1025 |
+
return await self._loop.create_connection(*args, **kwargs)
|
1026 |
+
except cert_errors as exc:
|
1027 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1028 |
+
except ssl_errors as exc:
|
1029 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1030 |
+
except OSError as exc:
|
1031 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1032 |
+
raise
|
1033 |
+
raise client_error(req.connection_key, exc) from exc
|
1034 |
+
|
1035 |
+
def _fail_on_no_start_tls(self, req: "ClientRequest") -> None:
|
1036 |
+
"""Raise a :py:exc:`RuntimeError` on missing ``start_tls()``.
|
1037 |
+
|
1038 |
+
It is necessary for TLS-in-TLS so that it is possible to
|
1039 |
+
send HTTPS queries through HTTPS proxies.
|
1040 |
+
|
1041 |
+
This doesn't affect regular HTTP requests, though.
|
1042 |
+
"""
|
1043 |
+
if not req.is_ssl():
|
1044 |
+
return
|
1045 |
+
|
1046 |
+
proxy_url = req.proxy
|
1047 |
+
assert proxy_url is not None
|
1048 |
+
if proxy_url.scheme != "https":
|
1049 |
+
return
|
1050 |
+
|
1051 |
+
self._check_loop_for_start_tls()
|
1052 |
+
|
1053 |
+
def _check_loop_for_start_tls(self) -> None:
|
1054 |
+
try:
|
1055 |
+
self._loop.start_tls
|
1056 |
+
except AttributeError as attr_exc:
|
1057 |
+
raise RuntimeError(
|
1058 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1059 |
+
"This needs support for TLS in TLS but it is not implemented "
|
1060 |
+
"in your runtime for the stdlib asyncio.\n\n"
|
1061 |
+
"Please upgrade to Python 3.11 or higher. For more details, "
|
1062 |
+
"please see:\n"
|
1063 |
+
"* https://bugs.python.org/issue37179\n"
|
1064 |
+
"* https://github.com/python/cpython/pull/28073\n"
|
1065 |
+
"* https://docs.aiohttp.org/en/stable/"
|
1066 |
+
"client_advanced.html#proxy-support\n"
|
1067 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1068 |
+
) from attr_exc
|
1069 |
+
|
1070 |
+
def _loop_supports_start_tls(self) -> bool:
|
1071 |
+
try:
|
1072 |
+
self._check_loop_for_start_tls()
|
1073 |
+
except RuntimeError:
|
1074 |
+
return False
|
1075 |
+
else:
|
1076 |
+
return True
|
1077 |
+
|
1078 |
+
def _warn_about_tls_in_tls(
|
1079 |
+
self,
|
1080 |
+
underlying_transport: asyncio.Transport,
|
1081 |
+
req: ClientRequest,
|
1082 |
+
) -> None:
|
1083 |
+
"""Issue a warning if the requested URL has HTTPS scheme."""
|
1084 |
+
if req.request_info.url.scheme != "https":
|
1085 |
+
return
|
1086 |
+
|
1087 |
+
asyncio_supports_tls_in_tls = getattr(
|
1088 |
+
underlying_transport,
|
1089 |
+
"_start_tls_compatible",
|
1090 |
+
False,
|
1091 |
+
)
|
1092 |
+
|
1093 |
+
if asyncio_supports_tls_in_tls:
|
1094 |
+
return
|
1095 |
+
|
1096 |
+
warnings.warn(
|
1097 |
+
"An HTTPS request is being sent through an HTTPS proxy. "
|
1098 |
+
"This support for TLS in TLS is known to be disabled "
|
1099 |
+
"in the stdlib asyncio (Python <3.11). This is why you'll probably see "
|
1100 |
+
"an error in the log below.\n\n"
|
1101 |
+
"It is possible to enable it via monkeypatching. "
|
1102 |
+
"For more details, see:\n"
|
1103 |
+
"* https://bugs.python.org/issue37179\n"
|
1104 |
+
"* https://github.com/python/cpython/pull/28073\n\n"
|
1105 |
+
"You can temporarily patch this as follows:\n"
|
1106 |
+
"* https://docs.aiohttp.org/en/stable/client_advanced.html#proxy-support\n"
|
1107 |
+
"* https://github.com/aio-libs/aiohttp/discussions/6044\n",
|
1108 |
+
RuntimeWarning,
|
1109 |
+
source=self,
|
1110 |
+
# Why `4`? At least 3 of the calls in the stack originate
|
1111 |
+
# from the methods in this class.
|
1112 |
+
stacklevel=3,
|
1113 |
+
)
|
1114 |
+
|
1115 |
+
async def _start_tls_connection(
|
1116 |
+
self,
|
1117 |
+
underlying_transport: asyncio.Transport,
|
1118 |
+
req: ClientRequest,
|
1119 |
+
timeout: "ClientTimeout",
|
1120 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1121 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1122 |
+
"""Wrap the raw TCP transport with TLS."""
|
1123 |
+
tls_proto = self._factory() # Create a brand new proto for TLS
|
1124 |
+
|
1125 |
+
# Safety of the `cast()` call here is based on the fact that
|
1126 |
+
# internally `_get_ssl_context()` only returns `None` when
|
1127 |
+
# `req.is_ssl()` evaluates to `False` which is never gonna happen
|
1128 |
+
# in this code path. Of course, it's rather fragile
|
1129 |
+
# maintainability-wise but this is to be solved separately.
|
1130 |
+
sslcontext = cast(ssl.SSLContext, self._get_ssl_context(req))
|
1131 |
+
|
1132 |
+
try:
|
1133 |
+
async with ceil_timeout(
|
1134 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1135 |
+
):
|
1136 |
+
try:
|
1137 |
+
tls_transport = await self._loop.start_tls(
|
1138 |
+
underlying_transport,
|
1139 |
+
tls_proto,
|
1140 |
+
sslcontext,
|
1141 |
+
server_hostname=req.server_hostname or req.host,
|
1142 |
+
ssl_handshake_timeout=timeout.total,
|
1143 |
+
)
|
1144 |
+
except BaseException:
|
1145 |
+
# We need to close the underlying transport since
|
1146 |
+
# `start_tls()` probably failed before it had a
|
1147 |
+
# chance to do this:
|
1148 |
+
underlying_transport.close()
|
1149 |
+
raise
|
1150 |
+
except cert_errors as exc:
|
1151 |
+
raise ClientConnectorCertificateError(req.connection_key, exc) from exc
|
1152 |
+
except ssl_errors as exc:
|
1153 |
+
raise ClientConnectorSSLError(req.connection_key, exc) from exc
|
1154 |
+
except OSError as exc:
|
1155 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1156 |
+
raise
|
1157 |
+
raise client_error(req.connection_key, exc) from exc
|
1158 |
+
except TypeError as type_err:
|
1159 |
+
# Example cause looks like this:
|
1160 |
+
# TypeError: transport <asyncio.sslproto._SSLProtocolTransport
|
1161 |
+
# object at 0x7f760615e460> is not supported by start_tls()
|
1162 |
+
|
1163 |
+
raise ClientConnectionError(
|
1164 |
+
"Cannot initialize a TLS-in-TLS connection to host "
|
1165 |
+
f"{req.host!s}:{req.port:d} through an underlying connection "
|
1166 |
+
f"to an HTTPS proxy {req.proxy!s} ssl:{req.ssl or 'default'} "
|
1167 |
+
f"[{type_err!s}]"
|
1168 |
+
) from type_err
|
1169 |
+
else:
|
1170 |
+
if tls_transport is None:
|
1171 |
+
msg = "Failed to start TLS (possibly caused by closing transport)"
|
1172 |
+
raise client_error(req.connection_key, OSError(msg))
|
1173 |
+
tls_proto.connection_made(
|
1174 |
+
tls_transport
|
1175 |
+
) # Kick the state machine of the new TLS protocol
|
1176 |
+
|
1177 |
+
return tls_transport, tls_proto
|
1178 |
+
|
1179 |
+
async def _create_direct_connection(
|
1180 |
+
self,
|
1181 |
+
req: ClientRequest,
|
1182 |
+
traces: List["Trace"],
|
1183 |
+
timeout: "ClientTimeout",
|
1184 |
+
*,
|
1185 |
+
client_error: Type[Exception] = ClientConnectorError,
|
1186 |
+
) -> Tuple[asyncio.Transport, ResponseHandler]:
|
1187 |
+
sslcontext = self._get_ssl_context(req)
|
1188 |
+
fingerprint = self._get_fingerprint(req)
|
1189 |
+
|
1190 |
+
host = req.url.raw_host
|
1191 |
+
assert host is not None
|
1192 |
+
# Replace multiple trailing dots with a single one.
|
1193 |
+
# A trailing dot is only present for fully-qualified domain names.
|
1194 |
+
# See https://github.com/aio-libs/aiohttp/pull/7364.
|
1195 |
+
if host.endswith(".."):
|
1196 |
+
host = host.rstrip(".") + "."
|
1197 |
+
port = req.port
|
1198 |
+
assert port is not None
|
1199 |
+
try:
|
1200 |
+
# Cancelling this lookup should not cancel the underlying lookup
|
1201 |
+
# or else the cancel event will get broadcast to all the waiters
|
1202 |
+
# across all connections.
|
1203 |
+
hosts = await self._resolve_host(host, port, traces=traces)
|
1204 |
+
except OSError as exc:
|
1205 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1206 |
+
raise
|
1207 |
+
# in case of proxy it is not ClientProxyConnectionError
|
1208 |
+
# it is problem of resolving proxy ip itself
|
1209 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
1210 |
+
|
1211 |
+
last_exc: Optional[Exception] = None
|
1212 |
+
|
1213 |
+
for hinfo in hosts:
|
1214 |
+
host = hinfo["host"]
|
1215 |
+
port = hinfo["port"]
|
1216 |
+
|
1217 |
+
# Strip trailing dots, certificates contain FQDN without dots.
|
1218 |
+
# See https://github.com/aio-libs/aiohttp/issues/3636
|
1219 |
+
server_hostname = (
|
1220 |
+
(req.server_hostname or hinfo["hostname"]).rstrip(".")
|
1221 |
+
if sslcontext
|
1222 |
+
else None
|
1223 |
+
)
|
1224 |
+
|
1225 |
+
try:
|
1226 |
+
transp, proto = await self._wrap_create_connection(
|
1227 |
+
self._factory,
|
1228 |
+
host,
|
1229 |
+
port,
|
1230 |
+
timeout=timeout,
|
1231 |
+
ssl=sslcontext,
|
1232 |
+
family=hinfo["family"],
|
1233 |
+
proto=hinfo["proto"],
|
1234 |
+
flags=hinfo["flags"],
|
1235 |
+
server_hostname=server_hostname,
|
1236 |
+
local_addr=self._local_addr,
|
1237 |
+
req=req,
|
1238 |
+
client_error=client_error,
|
1239 |
+
)
|
1240 |
+
except ClientConnectorError as exc:
|
1241 |
+
last_exc = exc
|
1242 |
+
continue
|
1243 |
+
|
1244 |
+
if req.is_ssl() and fingerprint:
|
1245 |
+
try:
|
1246 |
+
fingerprint.check(transp)
|
1247 |
+
except ServerFingerprintMismatch as exc:
|
1248 |
+
transp.close()
|
1249 |
+
if not self._cleanup_closed_disabled:
|
1250 |
+
self._cleanup_closed_transports.append(transp)
|
1251 |
+
last_exc = exc
|
1252 |
+
continue
|
1253 |
+
|
1254 |
+
return transp, proto
|
1255 |
+
else:
|
1256 |
+
assert last_exc is not None
|
1257 |
+
raise last_exc
|
1258 |
+
|
1259 |
+
async def _create_proxy_connection(
|
1260 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1261 |
+
) -> Tuple[asyncio.BaseTransport, ResponseHandler]:
|
1262 |
+
self._fail_on_no_start_tls(req)
|
1263 |
+
runtime_has_start_tls = self._loop_supports_start_tls()
|
1264 |
+
|
1265 |
+
headers: Dict[str, str] = {}
|
1266 |
+
if req.proxy_headers is not None:
|
1267 |
+
headers = req.proxy_headers # type: ignore[assignment]
|
1268 |
+
headers[hdrs.HOST] = req.headers[hdrs.HOST]
|
1269 |
+
|
1270 |
+
url = req.proxy
|
1271 |
+
assert url is not None
|
1272 |
+
proxy_req = ClientRequest(
|
1273 |
+
hdrs.METH_GET,
|
1274 |
+
url,
|
1275 |
+
headers=headers,
|
1276 |
+
auth=req.proxy_auth,
|
1277 |
+
loop=self._loop,
|
1278 |
+
ssl=req.ssl,
|
1279 |
+
)
|
1280 |
+
|
1281 |
+
# create connection to proxy server
|
1282 |
+
transport, proto = await self._create_direct_connection(
|
1283 |
+
proxy_req, [], timeout, client_error=ClientProxyConnectionError
|
1284 |
+
)
|
1285 |
+
|
1286 |
+
# Many HTTP proxies has buggy keepalive support. Let's not
|
1287 |
+
# reuse connection but close it after processing every
|
1288 |
+
# response.
|
1289 |
+
proto.force_close()
|
1290 |
+
|
1291 |
+
auth = proxy_req.headers.pop(hdrs.AUTHORIZATION, None)
|
1292 |
+
if auth is not None:
|
1293 |
+
if not req.is_ssl():
|
1294 |
+
req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1295 |
+
else:
|
1296 |
+
proxy_req.headers[hdrs.PROXY_AUTHORIZATION] = auth
|
1297 |
+
|
1298 |
+
if req.is_ssl():
|
1299 |
+
if runtime_has_start_tls:
|
1300 |
+
self._warn_about_tls_in_tls(transport, req)
|
1301 |
+
|
1302 |
+
# For HTTPS requests over HTTP proxy
|
1303 |
+
# we must notify proxy to tunnel connection
|
1304 |
+
# so we send CONNECT command:
|
1305 |
+
# CONNECT www.python.org:443 HTTP/1.1
|
1306 |
+
# Host: www.python.org
|
1307 |
+
#
|
1308 |
+
# next we must do TLS handshake and so on
|
1309 |
+
# to do this we must wrap raw socket into secure one
|
1310 |
+
# asyncio handles this perfectly
|
1311 |
+
proxy_req.method = hdrs.METH_CONNECT
|
1312 |
+
proxy_req.url = req.url
|
1313 |
+
key = attr.evolve(
|
1314 |
+
req.connection_key, proxy=None, proxy_auth=None, proxy_headers_hash=None
|
1315 |
+
)
|
1316 |
+
conn = Connection(self, key, proto, self._loop)
|
1317 |
+
proxy_resp = await proxy_req.send(conn)
|
1318 |
+
try:
|
1319 |
+
protocol = conn._protocol
|
1320 |
+
assert protocol is not None
|
1321 |
+
|
1322 |
+
# read_until_eof=True will ensure the connection isn't closed
|
1323 |
+
# once the response is received and processed allowing
|
1324 |
+
# START_TLS to work on the connection below.
|
1325 |
+
protocol.set_response_params(
|
1326 |
+
read_until_eof=runtime_has_start_tls,
|
1327 |
+
timeout_ceil_threshold=self._timeout_ceil_threshold,
|
1328 |
+
)
|
1329 |
+
resp = await proxy_resp.start(conn)
|
1330 |
+
except BaseException:
|
1331 |
+
proxy_resp.close()
|
1332 |
+
conn.close()
|
1333 |
+
raise
|
1334 |
+
else:
|
1335 |
+
conn._protocol = None
|
1336 |
+
conn._transport = None
|
1337 |
+
try:
|
1338 |
+
if resp.status != 200:
|
1339 |
+
message = resp.reason
|
1340 |
+
if message is None:
|
1341 |
+
message = HTTPStatus(resp.status).phrase
|
1342 |
+
raise ClientHttpProxyError(
|
1343 |
+
proxy_resp.request_info,
|
1344 |
+
resp.history,
|
1345 |
+
status=resp.status,
|
1346 |
+
message=message,
|
1347 |
+
headers=resp.headers,
|
1348 |
+
)
|
1349 |
+
if not runtime_has_start_tls:
|
1350 |
+
rawsock = transport.get_extra_info("socket", default=None)
|
1351 |
+
if rawsock is None:
|
1352 |
+
raise RuntimeError(
|
1353 |
+
"Transport does not expose socket instance"
|
1354 |
+
)
|
1355 |
+
# Duplicate the socket, so now we can close proxy transport
|
1356 |
+
rawsock = rawsock.dup()
|
1357 |
+
except BaseException:
|
1358 |
+
# It shouldn't be closed in `finally` because it's fed to
|
1359 |
+
# `loop.start_tls()` and the docs say not to touch it after
|
1360 |
+
# passing there.
|
1361 |
+
transport.close()
|
1362 |
+
raise
|
1363 |
+
finally:
|
1364 |
+
if not runtime_has_start_tls:
|
1365 |
+
transport.close()
|
1366 |
+
|
1367 |
+
if not runtime_has_start_tls:
|
1368 |
+
# HTTP proxy with support for upgrade to HTTPS
|
1369 |
+
sslcontext = self._get_ssl_context(req)
|
1370 |
+
return await self._wrap_create_connection(
|
1371 |
+
self._factory,
|
1372 |
+
timeout=timeout,
|
1373 |
+
ssl=sslcontext,
|
1374 |
+
sock=rawsock,
|
1375 |
+
server_hostname=req.host,
|
1376 |
+
req=req,
|
1377 |
+
)
|
1378 |
+
|
1379 |
+
return await self._start_tls_connection(
|
1380 |
+
# Access the old transport for the last time before it's
|
1381 |
+
# closed and forgotten forever:
|
1382 |
+
transport,
|
1383 |
+
req=req,
|
1384 |
+
timeout=timeout,
|
1385 |
+
)
|
1386 |
+
finally:
|
1387 |
+
proxy_resp.close()
|
1388 |
+
|
1389 |
+
return transport, proto
|
1390 |
+
|
1391 |
+
|
1392 |
+
class UnixConnector(BaseConnector):
|
1393 |
+
"""Unix socket connector.
|
1394 |
+
|
1395 |
+
path - Unix socket path.
|
1396 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1397 |
+
force_close - Set to True to force close and do reconnect
|
1398 |
+
after each request (and between redirects).
|
1399 |
+
limit - The total number of simultaneous connections.
|
1400 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1401 |
+
loop - Optional event loop.
|
1402 |
+
"""
|
1403 |
+
|
1404 |
+
def __init__(
|
1405 |
+
self,
|
1406 |
+
path: str,
|
1407 |
+
force_close: bool = False,
|
1408 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1409 |
+
limit: int = 100,
|
1410 |
+
limit_per_host: int = 0,
|
1411 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1412 |
+
) -> None:
|
1413 |
+
super().__init__(
|
1414 |
+
force_close=force_close,
|
1415 |
+
keepalive_timeout=keepalive_timeout,
|
1416 |
+
limit=limit,
|
1417 |
+
limit_per_host=limit_per_host,
|
1418 |
+
loop=loop,
|
1419 |
+
)
|
1420 |
+
self._path = path
|
1421 |
+
|
1422 |
+
@property
|
1423 |
+
def path(self) -> str:
|
1424 |
+
"""Path to unix socket."""
|
1425 |
+
return self._path
|
1426 |
+
|
1427 |
+
async def _create_connection(
|
1428 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1429 |
+
) -> ResponseHandler:
|
1430 |
+
try:
|
1431 |
+
async with ceil_timeout(
|
1432 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1433 |
+
):
|
1434 |
+
_, proto = await self._loop.create_unix_connection(
|
1435 |
+
self._factory, self._path
|
1436 |
+
)
|
1437 |
+
except OSError as exc:
|
1438 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1439 |
+
raise
|
1440 |
+
raise UnixClientConnectorError(self.path, req.connection_key, exc) from exc
|
1441 |
+
|
1442 |
+
return proto
|
1443 |
+
|
1444 |
+
|
1445 |
+
class NamedPipeConnector(BaseConnector):
|
1446 |
+
"""Named pipe connector.
|
1447 |
+
|
1448 |
+
Only supported by the proactor event loop.
|
1449 |
+
See also: https://docs.python.org/3/library/asyncio-eventloop.html
|
1450 |
+
|
1451 |
+
path - Windows named pipe path.
|
1452 |
+
keepalive_timeout - (optional) Keep-alive timeout.
|
1453 |
+
force_close - Set to True to force close and do reconnect
|
1454 |
+
after each request (and between redirects).
|
1455 |
+
limit - The total number of simultaneous connections.
|
1456 |
+
limit_per_host - Number of simultaneous connections to one host.
|
1457 |
+
loop - Optional event loop.
|
1458 |
+
"""
|
1459 |
+
|
1460 |
+
def __init__(
|
1461 |
+
self,
|
1462 |
+
path: str,
|
1463 |
+
force_close: bool = False,
|
1464 |
+
keepalive_timeout: Union[object, float, None] = sentinel,
|
1465 |
+
limit: int = 100,
|
1466 |
+
limit_per_host: int = 0,
|
1467 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
1468 |
+
) -> None:
|
1469 |
+
super().__init__(
|
1470 |
+
force_close=force_close,
|
1471 |
+
keepalive_timeout=keepalive_timeout,
|
1472 |
+
limit=limit,
|
1473 |
+
limit_per_host=limit_per_host,
|
1474 |
+
loop=loop,
|
1475 |
+
)
|
1476 |
+
if not isinstance(
|
1477 |
+
self._loop, asyncio.ProactorEventLoop # type: ignore[attr-defined]
|
1478 |
+
):
|
1479 |
+
raise RuntimeError(
|
1480 |
+
"Named Pipes only available in proactor " "loop under windows"
|
1481 |
+
)
|
1482 |
+
self._path = path
|
1483 |
+
|
1484 |
+
@property
|
1485 |
+
def path(self) -> str:
|
1486 |
+
"""Path to the named pipe."""
|
1487 |
+
return self._path
|
1488 |
+
|
1489 |
+
async def _create_connection(
|
1490 |
+
self, req: ClientRequest, traces: List["Trace"], timeout: "ClientTimeout"
|
1491 |
+
) -> ResponseHandler:
|
1492 |
+
try:
|
1493 |
+
async with ceil_timeout(
|
1494 |
+
timeout.sock_connect, ceil_threshold=timeout.ceil_threshold
|
1495 |
+
):
|
1496 |
+
_, proto = await self._loop.create_pipe_connection( # type: ignore[attr-defined]
|
1497 |
+
self._factory, self._path
|
1498 |
+
)
|
1499 |
+
# the drain is required so that the connection_made is called
|
1500 |
+
# and transport is set otherwise it is not set before the
|
1501 |
+
# `assert conn.transport is not None`
|
1502 |
+
# in client.py's _request method
|
1503 |
+
await asyncio.sleep(0)
|
1504 |
+
# other option is to manually set transport like
|
1505 |
+
# `proto.transport = trans`
|
1506 |
+
except OSError as exc:
|
1507 |
+
if exc.errno is None and isinstance(exc, asyncio.TimeoutError):
|
1508 |
+
raise
|
1509 |
+
raise ClientConnectorError(req.connection_key, exc) from exc
|
1510 |
+
|
1511 |
+
return cast(ResponseHandler, proto)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/cookiejar.py
ADDED
@@ -0,0 +1,419 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import calendar
|
3 |
+
import contextlib
|
4 |
+
import datetime
|
5 |
+
import os # noqa
|
6 |
+
import pathlib
|
7 |
+
import pickle
|
8 |
+
import re
|
9 |
+
import time
|
10 |
+
from collections import defaultdict
|
11 |
+
from http.cookies import BaseCookie, Morsel, SimpleCookie
|
12 |
+
from math import ceil
|
13 |
+
from typing import ( # noqa
|
14 |
+
DefaultDict,
|
15 |
+
Dict,
|
16 |
+
Iterable,
|
17 |
+
Iterator,
|
18 |
+
List,
|
19 |
+
Mapping,
|
20 |
+
Optional,
|
21 |
+
Set,
|
22 |
+
Tuple,
|
23 |
+
Union,
|
24 |
+
cast,
|
25 |
+
)
|
26 |
+
|
27 |
+
from yarl import URL
|
28 |
+
|
29 |
+
from .abc import AbstractCookieJar, ClearCookiePredicate
|
30 |
+
from .helpers import is_ip_address
|
31 |
+
from .typedefs import LooseCookies, PathLike, StrOrURL
|
32 |
+
|
33 |
+
__all__ = ("CookieJar", "DummyCookieJar")
|
34 |
+
|
35 |
+
|
36 |
+
CookieItem = Union[str, "Morsel[str]"]
|
37 |
+
|
38 |
+
|
39 |
+
class CookieJar(AbstractCookieJar):
|
40 |
+
"""Implements cookie storage adhering to RFC 6265."""
|
41 |
+
|
42 |
+
DATE_TOKENS_RE = re.compile(
|
43 |
+
r"[\x09\x20-\x2F\x3B-\x40\x5B-\x60\x7B-\x7E]*"
|
44 |
+
r"(?P<token>[\x00-\x08\x0A-\x1F\d:a-zA-Z\x7F-\xFF]+)"
|
45 |
+
)
|
46 |
+
|
47 |
+
DATE_HMS_TIME_RE = re.compile(r"(\d{1,2}):(\d{1,2}):(\d{1,2})")
|
48 |
+
|
49 |
+
DATE_DAY_OF_MONTH_RE = re.compile(r"(\d{1,2})")
|
50 |
+
|
51 |
+
DATE_MONTH_RE = re.compile(
|
52 |
+
"(jan)|(feb)|(mar)|(apr)|(may)|(jun)|(jul)|" "(aug)|(sep)|(oct)|(nov)|(dec)",
|
53 |
+
re.I,
|
54 |
+
)
|
55 |
+
|
56 |
+
DATE_YEAR_RE = re.compile(r"(\d{2,4})")
|
57 |
+
|
58 |
+
# calendar.timegm() fails for timestamps after datetime.datetime.max
|
59 |
+
# Minus one as a loss of precision occurs when timestamp() is called.
|
60 |
+
MAX_TIME = (
|
61 |
+
int(datetime.datetime.max.replace(tzinfo=datetime.timezone.utc).timestamp()) - 1
|
62 |
+
)
|
63 |
+
try:
|
64 |
+
calendar.timegm(time.gmtime(MAX_TIME))
|
65 |
+
except (OSError, ValueError):
|
66 |
+
# Hit the maximum representable time on Windows
|
67 |
+
# https://learn.microsoft.com/en-us/cpp/c-runtime-library/reference/localtime-localtime32-localtime64
|
68 |
+
# Throws ValueError on PyPy 3.8 and 3.9, OSError elsewhere
|
69 |
+
MAX_TIME = calendar.timegm((3000, 12, 31, 23, 59, 59, -1, -1, -1))
|
70 |
+
except OverflowError:
|
71 |
+
# #4515: datetime.max may not be representable on 32-bit platforms
|
72 |
+
MAX_TIME = 2**31 - 1
|
73 |
+
# Avoid minuses in the future, 3x faster
|
74 |
+
SUB_MAX_TIME = MAX_TIME - 1
|
75 |
+
|
76 |
+
def __init__(
|
77 |
+
self,
|
78 |
+
*,
|
79 |
+
unsafe: bool = False,
|
80 |
+
quote_cookie: bool = True,
|
81 |
+
treat_as_secure_origin: Union[StrOrURL, List[StrOrURL], None] = None,
|
82 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
83 |
+
) -> None:
|
84 |
+
super().__init__(loop=loop)
|
85 |
+
self._cookies: DefaultDict[Tuple[str, str], SimpleCookie] = defaultdict(
|
86 |
+
SimpleCookie
|
87 |
+
)
|
88 |
+
self._host_only_cookies: Set[Tuple[str, str]] = set()
|
89 |
+
self._unsafe = unsafe
|
90 |
+
self._quote_cookie = quote_cookie
|
91 |
+
if treat_as_secure_origin is None:
|
92 |
+
treat_as_secure_origin = []
|
93 |
+
elif isinstance(treat_as_secure_origin, URL):
|
94 |
+
treat_as_secure_origin = [treat_as_secure_origin.origin()]
|
95 |
+
elif isinstance(treat_as_secure_origin, str):
|
96 |
+
treat_as_secure_origin = [URL(treat_as_secure_origin).origin()]
|
97 |
+
else:
|
98 |
+
treat_as_secure_origin = [
|
99 |
+
URL(url).origin() if isinstance(url, str) else url.origin()
|
100 |
+
for url in treat_as_secure_origin
|
101 |
+
]
|
102 |
+
self._treat_as_secure_origin = treat_as_secure_origin
|
103 |
+
self._next_expiration: float = ceil(time.time())
|
104 |
+
self._expirations: Dict[Tuple[str, str, str], float] = {}
|
105 |
+
|
106 |
+
def save(self, file_path: PathLike) -> None:
|
107 |
+
file_path = pathlib.Path(file_path)
|
108 |
+
with file_path.open(mode="wb") as f:
|
109 |
+
pickle.dump(self._cookies, f, pickle.HIGHEST_PROTOCOL)
|
110 |
+
|
111 |
+
def load(self, file_path: PathLike) -> None:
|
112 |
+
file_path = pathlib.Path(file_path)
|
113 |
+
with file_path.open(mode="rb") as f:
|
114 |
+
self._cookies = pickle.load(f)
|
115 |
+
|
116 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
117 |
+
if predicate is None:
|
118 |
+
self._next_expiration = ceil(time.time())
|
119 |
+
self._cookies.clear()
|
120 |
+
self._host_only_cookies.clear()
|
121 |
+
self._expirations.clear()
|
122 |
+
return
|
123 |
+
|
124 |
+
to_del = []
|
125 |
+
now = time.time()
|
126 |
+
for (domain, path), cookie in self._cookies.items():
|
127 |
+
for name, morsel in cookie.items():
|
128 |
+
key = (domain, path, name)
|
129 |
+
if (
|
130 |
+
key in self._expirations and self._expirations[key] <= now
|
131 |
+
) or predicate(morsel):
|
132 |
+
to_del.append(key)
|
133 |
+
|
134 |
+
for domain, path, name in to_del:
|
135 |
+
self._host_only_cookies.discard((domain, name))
|
136 |
+
key = (domain, path, name)
|
137 |
+
if key in self._expirations:
|
138 |
+
del self._expirations[(domain, path, name)]
|
139 |
+
self._cookies[(domain, path)].pop(name, None)
|
140 |
+
|
141 |
+
self._next_expiration = (
|
142 |
+
min(*self._expirations.values(), self.SUB_MAX_TIME) + 1
|
143 |
+
if self._expirations
|
144 |
+
else self.MAX_TIME
|
145 |
+
)
|
146 |
+
|
147 |
+
def clear_domain(self, domain: str) -> None:
|
148 |
+
self.clear(lambda x: self._is_domain_match(domain, x["domain"]))
|
149 |
+
|
150 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
151 |
+
self._do_expiration()
|
152 |
+
for val in self._cookies.values():
|
153 |
+
yield from val.values()
|
154 |
+
|
155 |
+
def __len__(self) -> int:
|
156 |
+
return sum(1 for i in self)
|
157 |
+
|
158 |
+
def _do_expiration(self) -> None:
|
159 |
+
self.clear(lambda x: False)
|
160 |
+
|
161 |
+
def _expire_cookie(self, when: float, domain: str, path: str, name: str) -> None:
|
162 |
+
self._next_expiration = min(self._next_expiration, when)
|
163 |
+
self._expirations[(domain, path, name)] = when
|
164 |
+
|
165 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
166 |
+
"""Update cookies."""
|
167 |
+
hostname = response_url.raw_host
|
168 |
+
|
169 |
+
if not self._unsafe and is_ip_address(hostname):
|
170 |
+
# Don't accept cookies from IPs
|
171 |
+
return
|
172 |
+
|
173 |
+
if isinstance(cookies, Mapping):
|
174 |
+
cookies = cookies.items()
|
175 |
+
|
176 |
+
for name, cookie in cookies:
|
177 |
+
if not isinstance(cookie, Morsel):
|
178 |
+
tmp = SimpleCookie()
|
179 |
+
tmp[name] = cookie # type: ignore[assignment]
|
180 |
+
cookie = tmp[name]
|
181 |
+
|
182 |
+
domain = cookie["domain"]
|
183 |
+
|
184 |
+
# ignore domains with trailing dots
|
185 |
+
if domain.endswith("."):
|
186 |
+
domain = ""
|
187 |
+
del cookie["domain"]
|
188 |
+
|
189 |
+
if not domain and hostname is not None:
|
190 |
+
# Set the cookie's domain to the response hostname
|
191 |
+
# and set its host-only-flag
|
192 |
+
self._host_only_cookies.add((hostname, name))
|
193 |
+
domain = cookie["domain"] = hostname
|
194 |
+
|
195 |
+
if domain.startswith("."):
|
196 |
+
# Remove leading dot
|
197 |
+
domain = domain[1:]
|
198 |
+
cookie["domain"] = domain
|
199 |
+
|
200 |
+
if hostname and not self._is_domain_match(domain, hostname):
|
201 |
+
# Setting cookies for different domains is not allowed
|
202 |
+
continue
|
203 |
+
|
204 |
+
path = cookie["path"]
|
205 |
+
if not path or not path.startswith("/"):
|
206 |
+
# Set the cookie's path to the response path
|
207 |
+
path = response_url.path
|
208 |
+
if not path.startswith("/"):
|
209 |
+
path = "/"
|
210 |
+
else:
|
211 |
+
# Cut everything from the last slash to the end
|
212 |
+
path = "/" + path[1 : path.rfind("/")]
|
213 |
+
cookie["path"] = path
|
214 |
+
|
215 |
+
max_age = cookie["max-age"]
|
216 |
+
if max_age:
|
217 |
+
try:
|
218 |
+
delta_seconds = int(max_age)
|
219 |
+
max_age_expiration = min(time.time() + delta_seconds, self.MAX_TIME)
|
220 |
+
self._expire_cookie(max_age_expiration, domain, path, name)
|
221 |
+
except ValueError:
|
222 |
+
cookie["max-age"] = ""
|
223 |
+
|
224 |
+
else:
|
225 |
+
expires = cookie["expires"]
|
226 |
+
if expires:
|
227 |
+
expire_time = self._parse_date(expires)
|
228 |
+
if expire_time:
|
229 |
+
self._expire_cookie(expire_time, domain, path, name)
|
230 |
+
else:
|
231 |
+
cookie["expires"] = ""
|
232 |
+
|
233 |
+
self._cookies[(domain, path)][name] = cookie
|
234 |
+
|
235 |
+
self._do_expiration()
|
236 |
+
|
237 |
+
def filter_cookies(self, request_url: URL = URL()) -> "BaseCookie[str]":
|
238 |
+
"""Returns this jar's cookies filtered by their attributes."""
|
239 |
+
filtered: Union[SimpleCookie, "BaseCookie[str]"] = (
|
240 |
+
SimpleCookie() if self._quote_cookie else BaseCookie()
|
241 |
+
)
|
242 |
+
if not self._cookies:
|
243 |
+
# Skip do_expiration() if there are no cookies.
|
244 |
+
return filtered
|
245 |
+
self._do_expiration()
|
246 |
+
if not self._cookies:
|
247 |
+
# Skip rest of function if no non-expired cookies.
|
248 |
+
return filtered
|
249 |
+
request_url = URL(request_url)
|
250 |
+
hostname = request_url.raw_host or ""
|
251 |
+
|
252 |
+
is_not_secure = request_url.scheme not in ("https", "wss")
|
253 |
+
if is_not_secure and self._treat_as_secure_origin:
|
254 |
+
request_origin = URL()
|
255 |
+
with contextlib.suppress(ValueError):
|
256 |
+
request_origin = request_url.origin()
|
257 |
+
is_not_secure = request_origin not in self._treat_as_secure_origin
|
258 |
+
|
259 |
+
# Point 2: https://www.rfc-editor.org/rfc/rfc6265.html#section-5.4
|
260 |
+
for cookie in sorted(self, key=lambda c: len(c["path"])):
|
261 |
+
name = cookie.key
|
262 |
+
domain = cookie["domain"]
|
263 |
+
|
264 |
+
# Send shared cookies
|
265 |
+
if not domain:
|
266 |
+
filtered[name] = cookie.value
|
267 |
+
continue
|
268 |
+
|
269 |
+
if not self._unsafe and is_ip_address(hostname):
|
270 |
+
continue
|
271 |
+
|
272 |
+
if (domain, name) in self._host_only_cookies:
|
273 |
+
if domain != hostname:
|
274 |
+
continue
|
275 |
+
elif not self._is_domain_match(domain, hostname):
|
276 |
+
continue
|
277 |
+
|
278 |
+
if not self._is_path_match(request_url.path, cookie["path"]):
|
279 |
+
continue
|
280 |
+
|
281 |
+
if is_not_secure and cookie["secure"]:
|
282 |
+
continue
|
283 |
+
|
284 |
+
# It's critical we use the Morsel so the coded_value
|
285 |
+
# (based on cookie version) is preserved
|
286 |
+
mrsl_val = cast("Morsel[str]", cookie.get(cookie.key, Morsel()))
|
287 |
+
mrsl_val.set(cookie.key, cookie.value, cookie.coded_value)
|
288 |
+
filtered[name] = mrsl_val
|
289 |
+
|
290 |
+
return filtered
|
291 |
+
|
292 |
+
@staticmethod
|
293 |
+
def _is_domain_match(domain: str, hostname: str) -> bool:
|
294 |
+
"""Implements domain matching adhering to RFC 6265."""
|
295 |
+
if hostname == domain:
|
296 |
+
return True
|
297 |
+
|
298 |
+
if not hostname.endswith(domain):
|
299 |
+
return False
|
300 |
+
|
301 |
+
non_matching = hostname[: -len(domain)]
|
302 |
+
|
303 |
+
if not non_matching.endswith("."):
|
304 |
+
return False
|
305 |
+
|
306 |
+
return not is_ip_address(hostname)
|
307 |
+
|
308 |
+
@staticmethod
|
309 |
+
def _is_path_match(req_path: str, cookie_path: str) -> bool:
|
310 |
+
"""Implements path matching adhering to RFC 6265."""
|
311 |
+
if not req_path.startswith("/"):
|
312 |
+
req_path = "/"
|
313 |
+
|
314 |
+
if req_path == cookie_path:
|
315 |
+
return True
|
316 |
+
|
317 |
+
if not req_path.startswith(cookie_path):
|
318 |
+
return False
|
319 |
+
|
320 |
+
if cookie_path.endswith("/"):
|
321 |
+
return True
|
322 |
+
|
323 |
+
non_matching = req_path[len(cookie_path) :]
|
324 |
+
|
325 |
+
return non_matching.startswith("/")
|
326 |
+
|
327 |
+
@classmethod
|
328 |
+
def _parse_date(cls, date_str: str) -> Optional[int]:
|
329 |
+
"""Implements date string parsing adhering to RFC 6265."""
|
330 |
+
if not date_str:
|
331 |
+
return None
|
332 |
+
|
333 |
+
found_time = False
|
334 |
+
found_day = False
|
335 |
+
found_month = False
|
336 |
+
found_year = False
|
337 |
+
|
338 |
+
hour = minute = second = 0
|
339 |
+
day = 0
|
340 |
+
month = 0
|
341 |
+
year = 0
|
342 |
+
|
343 |
+
for token_match in cls.DATE_TOKENS_RE.finditer(date_str):
|
344 |
+
|
345 |
+
token = token_match.group("token")
|
346 |
+
|
347 |
+
if not found_time:
|
348 |
+
time_match = cls.DATE_HMS_TIME_RE.match(token)
|
349 |
+
if time_match:
|
350 |
+
found_time = True
|
351 |
+
hour, minute, second = (int(s) for s in time_match.groups())
|
352 |
+
continue
|
353 |
+
|
354 |
+
if not found_day:
|
355 |
+
day_match = cls.DATE_DAY_OF_MONTH_RE.match(token)
|
356 |
+
if day_match:
|
357 |
+
found_day = True
|
358 |
+
day = int(day_match.group())
|
359 |
+
continue
|
360 |
+
|
361 |
+
if not found_month:
|
362 |
+
month_match = cls.DATE_MONTH_RE.match(token)
|
363 |
+
if month_match:
|
364 |
+
found_month = True
|
365 |
+
assert month_match.lastindex is not None
|
366 |
+
month = month_match.lastindex
|
367 |
+
continue
|
368 |
+
|
369 |
+
if not found_year:
|
370 |
+
year_match = cls.DATE_YEAR_RE.match(token)
|
371 |
+
if year_match:
|
372 |
+
found_year = True
|
373 |
+
year = int(year_match.group())
|
374 |
+
|
375 |
+
if 70 <= year <= 99:
|
376 |
+
year += 1900
|
377 |
+
elif 0 <= year <= 69:
|
378 |
+
year += 2000
|
379 |
+
|
380 |
+
if False in (found_day, found_month, found_year, found_time):
|
381 |
+
return None
|
382 |
+
|
383 |
+
if not 1 <= day <= 31:
|
384 |
+
return None
|
385 |
+
|
386 |
+
if year < 1601 or hour > 23 or minute > 59 or second > 59:
|
387 |
+
return None
|
388 |
+
|
389 |
+
return calendar.timegm((year, month, day, hour, minute, second, -1, -1, -1))
|
390 |
+
|
391 |
+
|
392 |
+
class DummyCookieJar(AbstractCookieJar):
|
393 |
+
"""Implements a dummy cookie storage.
|
394 |
+
|
395 |
+
It can be used with the ClientSession when no cookie processing is needed.
|
396 |
+
|
397 |
+
"""
|
398 |
+
|
399 |
+
def __init__(self, *, loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
|
400 |
+
super().__init__(loop=loop)
|
401 |
+
|
402 |
+
def __iter__(self) -> "Iterator[Morsel[str]]":
|
403 |
+
while False:
|
404 |
+
yield None
|
405 |
+
|
406 |
+
def __len__(self) -> int:
|
407 |
+
return 0
|
408 |
+
|
409 |
+
def clear(self, predicate: Optional[ClearCookiePredicate] = None) -> None:
|
410 |
+
pass
|
411 |
+
|
412 |
+
def clear_domain(self, domain: str) -> None:
|
413 |
+
pass
|
414 |
+
|
415 |
+
def update_cookies(self, cookies: LooseCookies, response_url: URL = URL()) -> None:
|
416 |
+
pass
|
417 |
+
|
418 |
+
def filter_cookies(self, request_url: URL) -> "BaseCookie[str]":
|
419 |
+
return SimpleCookie()
|
llmeval-env/lib/python3.10/site-packages/aiohttp/hdrs.py
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""HTTP Headers constants."""
|
2 |
+
|
3 |
+
# After changing the file content call ./tools/gen.py
|
4 |
+
# to regenerate the headers parser
|
5 |
+
from typing import Final, Set
|
6 |
+
|
7 |
+
from multidict import istr
|
8 |
+
|
9 |
+
METH_ANY: Final[str] = "*"
|
10 |
+
METH_CONNECT: Final[str] = "CONNECT"
|
11 |
+
METH_HEAD: Final[str] = "HEAD"
|
12 |
+
METH_GET: Final[str] = "GET"
|
13 |
+
METH_DELETE: Final[str] = "DELETE"
|
14 |
+
METH_OPTIONS: Final[str] = "OPTIONS"
|
15 |
+
METH_PATCH: Final[str] = "PATCH"
|
16 |
+
METH_POST: Final[str] = "POST"
|
17 |
+
METH_PUT: Final[str] = "PUT"
|
18 |
+
METH_TRACE: Final[str] = "TRACE"
|
19 |
+
|
20 |
+
METH_ALL: Final[Set[str]] = {
|
21 |
+
METH_CONNECT,
|
22 |
+
METH_HEAD,
|
23 |
+
METH_GET,
|
24 |
+
METH_DELETE,
|
25 |
+
METH_OPTIONS,
|
26 |
+
METH_PATCH,
|
27 |
+
METH_POST,
|
28 |
+
METH_PUT,
|
29 |
+
METH_TRACE,
|
30 |
+
}
|
31 |
+
|
32 |
+
ACCEPT: Final[istr] = istr("Accept")
|
33 |
+
ACCEPT_CHARSET: Final[istr] = istr("Accept-Charset")
|
34 |
+
ACCEPT_ENCODING: Final[istr] = istr("Accept-Encoding")
|
35 |
+
ACCEPT_LANGUAGE: Final[istr] = istr("Accept-Language")
|
36 |
+
ACCEPT_RANGES: Final[istr] = istr("Accept-Ranges")
|
37 |
+
ACCESS_CONTROL_MAX_AGE: Final[istr] = istr("Access-Control-Max-Age")
|
38 |
+
ACCESS_CONTROL_ALLOW_CREDENTIALS: Final[istr] = istr("Access-Control-Allow-Credentials")
|
39 |
+
ACCESS_CONTROL_ALLOW_HEADERS: Final[istr] = istr("Access-Control-Allow-Headers")
|
40 |
+
ACCESS_CONTROL_ALLOW_METHODS: Final[istr] = istr("Access-Control-Allow-Methods")
|
41 |
+
ACCESS_CONTROL_ALLOW_ORIGIN: Final[istr] = istr("Access-Control-Allow-Origin")
|
42 |
+
ACCESS_CONTROL_EXPOSE_HEADERS: Final[istr] = istr("Access-Control-Expose-Headers")
|
43 |
+
ACCESS_CONTROL_REQUEST_HEADERS: Final[istr] = istr("Access-Control-Request-Headers")
|
44 |
+
ACCESS_CONTROL_REQUEST_METHOD: Final[istr] = istr("Access-Control-Request-Method")
|
45 |
+
AGE: Final[istr] = istr("Age")
|
46 |
+
ALLOW: Final[istr] = istr("Allow")
|
47 |
+
AUTHORIZATION: Final[istr] = istr("Authorization")
|
48 |
+
CACHE_CONTROL: Final[istr] = istr("Cache-Control")
|
49 |
+
CONNECTION: Final[istr] = istr("Connection")
|
50 |
+
CONTENT_DISPOSITION: Final[istr] = istr("Content-Disposition")
|
51 |
+
CONTENT_ENCODING: Final[istr] = istr("Content-Encoding")
|
52 |
+
CONTENT_LANGUAGE: Final[istr] = istr("Content-Language")
|
53 |
+
CONTENT_LENGTH: Final[istr] = istr("Content-Length")
|
54 |
+
CONTENT_LOCATION: Final[istr] = istr("Content-Location")
|
55 |
+
CONTENT_MD5: Final[istr] = istr("Content-MD5")
|
56 |
+
CONTENT_RANGE: Final[istr] = istr("Content-Range")
|
57 |
+
CONTENT_TRANSFER_ENCODING: Final[istr] = istr("Content-Transfer-Encoding")
|
58 |
+
CONTENT_TYPE: Final[istr] = istr("Content-Type")
|
59 |
+
COOKIE: Final[istr] = istr("Cookie")
|
60 |
+
DATE: Final[istr] = istr("Date")
|
61 |
+
DESTINATION: Final[istr] = istr("Destination")
|
62 |
+
DIGEST: Final[istr] = istr("Digest")
|
63 |
+
ETAG: Final[istr] = istr("Etag")
|
64 |
+
EXPECT: Final[istr] = istr("Expect")
|
65 |
+
EXPIRES: Final[istr] = istr("Expires")
|
66 |
+
FORWARDED: Final[istr] = istr("Forwarded")
|
67 |
+
FROM: Final[istr] = istr("From")
|
68 |
+
HOST: Final[istr] = istr("Host")
|
69 |
+
IF_MATCH: Final[istr] = istr("If-Match")
|
70 |
+
IF_MODIFIED_SINCE: Final[istr] = istr("If-Modified-Since")
|
71 |
+
IF_NONE_MATCH: Final[istr] = istr("If-None-Match")
|
72 |
+
IF_RANGE: Final[istr] = istr("If-Range")
|
73 |
+
IF_UNMODIFIED_SINCE: Final[istr] = istr("If-Unmodified-Since")
|
74 |
+
KEEP_ALIVE: Final[istr] = istr("Keep-Alive")
|
75 |
+
LAST_EVENT_ID: Final[istr] = istr("Last-Event-ID")
|
76 |
+
LAST_MODIFIED: Final[istr] = istr("Last-Modified")
|
77 |
+
LINK: Final[istr] = istr("Link")
|
78 |
+
LOCATION: Final[istr] = istr("Location")
|
79 |
+
MAX_FORWARDS: Final[istr] = istr("Max-Forwards")
|
80 |
+
ORIGIN: Final[istr] = istr("Origin")
|
81 |
+
PRAGMA: Final[istr] = istr("Pragma")
|
82 |
+
PROXY_AUTHENTICATE: Final[istr] = istr("Proxy-Authenticate")
|
83 |
+
PROXY_AUTHORIZATION: Final[istr] = istr("Proxy-Authorization")
|
84 |
+
RANGE: Final[istr] = istr("Range")
|
85 |
+
REFERER: Final[istr] = istr("Referer")
|
86 |
+
RETRY_AFTER: Final[istr] = istr("Retry-After")
|
87 |
+
SEC_WEBSOCKET_ACCEPT: Final[istr] = istr("Sec-WebSocket-Accept")
|
88 |
+
SEC_WEBSOCKET_VERSION: Final[istr] = istr("Sec-WebSocket-Version")
|
89 |
+
SEC_WEBSOCKET_PROTOCOL: Final[istr] = istr("Sec-WebSocket-Protocol")
|
90 |
+
SEC_WEBSOCKET_EXTENSIONS: Final[istr] = istr("Sec-WebSocket-Extensions")
|
91 |
+
SEC_WEBSOCKET_KEY: Final[istr] = istr("Sec-WebSocket-Key")
|
92 |
+
SEC_WEBSOCKET_KEY1: Final[istr] = istr("Sec-WebSocket-Key1")
|
93 |
+
SERVER: Final[istr] = istr("Server")
|
94 |
+
SET_COOKIE: Final[istr] = istr("Set-Cookie")
|
95 |
+
TE: Final[istr] = istr("TE")
|
96 |
+
TRAILER: Final[istr] = istr("Trailer")
|
97 |
+
TRANSFER_ENCODING: Final[istr] = istr("Transfer-Encoding")
|
98 |
+
UPGRADE: Final[istr] = istr("Upgrade")
|
99 |
+
URI: Final[istr] = istr("URI")
|
100 |
+
USER_AGENT: Final[istr] = istr("User-Agent")
|
101 |
+
VARY: Final[istr] = istr("Vary")
|
102 |
+
VIA: Final[istr] = istr("Via")
|
103 |
+
WANT_DIGEST: Final[istr] = istr("Want-Digest")
|
104 |
+
WARNING: Final[istr] = istr("Warning")
|
105 |
+
WWW_AUTHENTICATE: Final[istr] = istr("WWW-Authenticate")
|
106 |
+
X_FORWARDED_FOR: Final[istr] = istr("X-Forwarded-For")
|
107 |
+
X_FORWARDED_HOST: Final[istr] = istr("X-Forwarded-Host")
|
108 |
+
X_FORWARDED_PROTO: Final[istr] = istr("X-Forwarded-Proto")
|
llmeval-env/lib/python3.10/site-packages/aiohttp/helpers.py
ADDED
@@ -0,0 +1,1029 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Various helper functions"""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import base64
|
5 |
+
import binascii
|
6 |
+
import contextlib
|
7 |
+
import datetime
|
8 |
+
import enum
|
9 |
+
import functools
|
10 |
+
import inspect
|
11 |
+
import netrc
|
12 |
+
import os
|
13 |
+
import platform
|
14 |
+
import re
|
15 |
+
import sys
|
16 |
+
import time
|
17 |
+
import warnings
|
18 |
+
import weakref
|
19 |
+
from collections import namedtuple
|
20 |
+
from contextlib import suppress
|
21 |
+
from email.parser import HeaderParser
|
22 |
+
from email.utils import parsedate
|
23 |
+
from math import ceil
|
24 |
+
from pathlib import Path
|
25 |
+
from types import TracebackType
|
26 |
+
from typing import (
|
27 |
+
Any,
|
28 |
+
Callable,
|
29 |
+
ContextManager,
|
30 |
+
Dict,
|
31 |
+
Generator,
|
32 |
+
Generic,
|
33 |
+
Iterable,
|
34 |
+
Iterator,
|
35 |
+
List,
|
36 |
+
Mapping,
|
37 |
+
Optional,
|
38 |
+
Pattern,
|
39 |
+
Protocol,
|
40 |
+
Tuple,
|
41 |
+
Type,
|
42 |
+
TypeVar,
|
43 |
+
Union,
|
44 |
+
get_args,
|
45 |
+
overload,
|
46 |
+
)
|
47 |
+
from urllib.parse import quote
|
48 |
+
from urllib.request import getproxies, proxy_bypass
|
49 |
+
|
50 |
+
import attr
|
51 |
+
from multidict import MultiDict, MultiDictProxy, MultiMapping
|
52 |
+
from yarl import URL
|
53 |
+
|
54 |
+
from . import hdrs
|
55 |
+
from .log import client_logger, internal_logger
|
56 |
+
|
57 |
+
if sys.version_info >= (3, 11):
|
58 |
+
import asyncio as async_timeout
|
59 |
+
else:
|
60 |
+
import async_timeout
|
61 |
+
|
62 |
+
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
|
63 |
+
|
64 |
+
IS_MACOS = platform.system() == "Darwin"
|
65 |
+
IS_WINDOWS = platform.system() == "Windows"
|
66 |
+
|
67 |
+
PY_310 = sys.version_info >= (3, 10)
|
68 |
+
PY_311 = sys.version_info >= (3, 11)
|
69 |
+
|
70 |
+
|
71 |
+
_T = TypeVar("_T")
|
72 |
+
_S = TypeVar("_S")
|
73 |
+
|
74 |
+
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
|
75 |
+
sentinel = _SENTINEL.sentinel
|
76 |
+
|
77 |
+
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS"))
|
78 |
+
|
79 |
+
DEBUG = sys.flags.dev_mode or (
|
80 |
+
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
|
81 |
+
)
|
82 |
+
|
83 |
+
|
84 |
+
CHAR = {chr(i) for i in range(0, 128)}
|
85 |
+
CTL = {chr(i) for i in range(0, 32)} | {
|
86 |
+
chr(127),
|
87 |
+
}
|
88 |
+
SEPARATORS = {
|
89 |
+
"(",
|
90 |
+
")",
|
91 |
+
"<",
|
92 |
+
">",
|
93 |
+
"@",
|
94 |
+
",",
|
95 |
+
";",
|
96 |
+
":",
|
97 |
+
"\\",
|
98 |
+
'"',
|
99 |
+
"/",
|
100 |
+
"[",
|
101 |
+
"]",
|
102 |
+
"?",
|
103 |
+
"=",
|
104 |
+
"{",
|
105 |
+
"}",
|
106 |
+
" ",
|
107 |
+
chr(9),
|
108 |
+
}
|
109 |
+
TOKEN = CHAR ^ CTL ^ SEPARATORS
|
110 |
+
|
111 |
+
|
112 |
+
class noop:
|
113 |
+
def __await__(self) -> Generator[None, None, None]:
|
114 |
+
yield
|
115 |
+
|
116 |
+
|
117 |
+
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
|
118 |
+
"""Http basic authentication helper."""
|
119 |
+
|
120 |
+
def __new__(
|
121 |
+
cls, login: str, password: str = "", encoding: str = "latin1"
|
122 |
+
) -> "BasicAuth":
|
123 |
+
if login is None:
|
124 |
+
raise ValueError("None is not allowed as login value")
|
125 |
+
|
126 |
+
if password is None:
|
127 |
+
raise ValueError("None is not allowed as password value")
|
128 |
+
|
129 |
+
if ":" in login:
|
130 |
+
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
|
131 |
+
|
132 |
+
return super().__new__(cls, login, password, encoding)
|
133 |
+
|
134 |
+
@classmethod
|
135 |
+
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
|
136 |
+
"""Create a BasicAuth object from an Authorization HTTP header."""
|
137 |
+
try:
|
138 |
+
auth_type, encoded_credentials = auth_header.split(" ", 1)
|
139 |
+
except ValueError:
|
140 |
+
raise ValueError("Could not parse authorization header.")
|
141 |
+
|
142 |
+
if auth_type.lower() != "basic":
|
143 |
+
raise ValueError("Unknown authorization method %s" % auth_type)
|
144 |
+
|
145 |
+
try:
|
146 |
+
decoded = base64.b64decode(
|
147 |
+
encoded_credentials.encode("ascii"), validate=True
|
148 |
+
).decode(encoding)
|
149 |
+
except binascii.Error:
|
150 |
+
raise ValueError("Invalid base64 encoding.")
|
151 |
+
|
152 |
+
try:
|
153 |
+
# RFC 2617 HTTP Authentication
|
154 |
+
# https://www.ietf.org/rfc/rfc2617.txt
|
155 |
+
# the colon must be present, but the username and password may be
|
156 |
+
# otherwise blank.
|
157 |
+
username, password = decoded.split(":", 1)
|
158 |
+
except ValueError:
|
159 |
+
raise ValueError("Invalid credentials.")
|
160 |
+
|
161 |
+
return cls(username, password, encoding=encoding)
|
162 |
+
|
163 |
+
@classmethod
|
164 |
+
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
|
165 |
+
"""Create BasicAuth from url."""
|
166 |
+
if not isinstance(url, URL):
|
167 |
+
raise TypeError("url should be yarl.URL instance")
|
168 |
+
if url.user is None:
|
169 |
+
return None
|
170 |
+
return cls(url.user, url.password or "", encoding=encoding)
|
171 |
+
|
172 |
+
def encode(self) -> str:
|
173 |
+
"""Encode credentials."""
|
174 |
+
creds = (f"{self.login}:{self.password}").encode(self.encoding)
|
175 |
+
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
|
176 |
+
|
177 |
+
|
178 |
+
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
179 |
+
auth = BasicAuth.from_url(url)
|
180 |
+
if auth is None:
|
181 |
+
return url, None
|
182 |
+
else:
|
183 |
+
return url.with_user(None), auth
|
184 |
+
|
185 |
+
|
186 |
+
def netrc_from_env() -> Optional[netrc.netrc]:
|
187 |
+
"""Load netrc from file.
|
188 |
+
|
189 |
+
Attempt to load it from the path specified by the env-var
|
190 |
+
NETRC or in the default location in the user's home directory.
|
191 |
+
|
192 |
+
Returns None if it couldn't be found or fails to parse.
|
193 |
+
"""
|
194 |
+
netrc_env = os.environ.get("NETRC")
|
195 |
+
|
196 |
+
if netrc_env is not None:
|
197 |
+
netrc_path = Path(netrc_env)
|
198 |
+
else:
|
199 |
+
try:
|
200 |
+
home_dir = Path.home()
|
201 |
+
except RuntimeError as e: # pragma: no cover
|
202 |
+
# if pathlib can't resolve home, it may raise a RuntimeError
|
203 |
+
client_logger.debug(
|
204 |
+
"Could not resolve home directory when "
|
205 |
+
"trying to look for .netrc file: %s",
|
206 |
+
e,
|
207 |
+
)
|
208 |
+
return None
|
209 |
+
|
210 |
+
netrc_path = home_dir / ("_netrc" if IS_WINDOWS else ".netrc")
|
211 |
+
|
212 |
+
try:
|
213 |
+
return netrc.netrc(str(netrc_path))
|
214 |
+
except netrc.NetrcParseError as e:
|
215 |
+
client_logger.warning("Could not parse .netrc file: %s", e)
|
216 |
+
except OSError as e:
|
217 |
+
netrc_exists = False
|
218 |
+
with contextlib.suppress(OSError):
|
219 |
+
netrc_exists = netrc_path.is_file()
|
220 |
+
# we couldn't read the file (doesn't exist, permissions, etc.)
|
221 |
+
if netrc_env or netrc_exists:
|
222 |
+
# only warn if the environment wanted us to load it,
|
223 |
+
# or it appears like the default file does actually exist
|
224 |
+
client_logger.warning("Could not read .netrc file: %s", e)
|
225 |
+
|
226 |
+
return None
|
227 |
+
|
228 |
+
|
229 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
230 |
+
class ProxyInfo:
|
231 |
+
proxy: URL
|
232 |
+
proxy_auth: Optional[BasicAuth]
|
233 |
+
|
234 |
+
|
235 |
+
def basicauth_from_netrc(netrc_obj: Optional[netrc.netrc], host: str) -> BasicAuth:
|
236 |
+
"""
|
237 |
+
Return :py:class:`~aiohttp.BasicAuth` credentials for ``host`` from ``netrc_obj``.
|
238 |
+
|
239 |
+
:raises LookupError: if ``netrc_obj`` is :py:data:`None` or if no
|
240 |
+
entry is found for the ``host``.
|
241 |
+
"""
|
242 |
+
if netrc_obj is None:
|
243 |
+
raise LookupError("No .netrc file found")
|
244 |
+
auth_from_netrc = netrc_obj.authenticators(host)
|
245 |
+
|
246 |
+
if auth_from_netrc is None:
|
247 |
+
raise LookupError(f"No entry for {host!s} found in the `.netrc` file.")
|
248 |
+
login, account, password = auth_from_netrc
|
249 |
+
|
250 |
+
# TODO(PY311): username = login or account
|
251 |
+
# Up to python 3.10, account could be None if not specified,
|
252 |
+
# and login will be empty string if not specified. From 3.11,
|
253 |
+
# login and account will be empty string if not specified.
|
254 |
+
username = login if (login or account is None) else account
|
255 |
+
|
256 |
+
# TODO(PY311): Remove this, as password will be empty string
|
257 |
+
# if not specified
|
258 |
+
if password is None:
|
259 |
+
password = ""
|
260 |
+
|
261 |
+
return BasicAuth(username, password)
|
262 |
+
|
263 |
+
|
264 |
+
def proxies_from_env() -> Dict[str, ProxyInfo]:
|
265 |
+
proxy_urls = {
|
266 |
+
k: URL(v)
|
267 |
+
for k, v in getproxies().items()
|
268 |
+
if k in ("http", "https", "ws", "wss")
|
269 |
+
}
|
270 |
+
netrc_obj = netrc_from_env()
|
271 |
+
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
|
272 |
+
ret = {}
|
273 |
+
for proto, val in stripped.items():
|
274 |
+
proxy, auth = val
|
275 |
+
if proxy.scheme in ("https", "wss"):
|
276 |
+
client_logger.warning(
|
277 |
+
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
|
278 |
+
)
|
279 |
+
continue
|
280 |
+
if netrc_obj and auth is None:
|
281 |
+
if proxy.host is not None:
|
282 |
+
try:
|
283 |
+
auth = basicauth_from_netrc(netrc_obj, proxy.host)
|
284 |
+
except LookupError:
|
285 |
+
auth = None
|
286 |
+
ret[proto] = ProxyInfo(proxy, auth)
|
287 |
+
return ret
|
288 |
+
|
289 |
+
|
290 |
+
def current_task(
|
291 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
292 |
+
) -> "Optional[asyncio.Task[Any]]":
|
293 |
+
return asyncio.current_task(loop=loop)
|
294 |
+
|
295 |
+
|
296 |
+
def get_running_loop(
|
297 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
298 |
+
) -> asyncio.AbstractEventLoop:
|
299 |
+
if loop is None:
|
300 |
+
loop = asyncio.get_event_loop()
|
301 |
+
if not loop.is_running():
|
302 |
+
warnings.warn(
|
303 |
+
"The object should be created within an async function",
|
304 |
+
DeprecationWarning,
|
305 |
+
stacklevel=3,
|
306 |
+
)
|
307 |
+
if loop.get_debug():
|
308 |
+
internal_logger.warning(
|
309 |
+
"The object should be created within an async function", stack_info=True
|
310 |
+
)
|
311 |
+
return loop
|
312 |
+
|
313 |
+
|
314 |
+
def isasyncgenfunction(obj: Any) -> bool:
|
315 |
+
func = getattr(inspect, "isasyncgenfunction", None)
|
316 |
+
if func is not None:
|
317 |
+
return func(obj) # type: ignore[no-any-return]
|
318 |
+
else:
|
319 |
+
return False
|
320 |
+
|
321 |
+
|
322 |
+
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
|
323 |
+
"""Get a permitted proxy for the given URL from the env."""
|
324 |
+
if url.host is not None and proxy_bypass(url.host):
|
325 |
+
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
|
326 |
+
|
327 |
+
proxies_in_env = proxies_from_env()
|
328 |
+
try:
|
329 |
+
proxy_info = proxies_in_env[url.scheme]
|
330 |
+
except KeyError:
|
331 |
+
raise LookupError(f"No proxies found for `{url!s}` in the env")
|
332 |
+
else:
|
333 |
+
return proxy_info.proxy, proxy_info.proxy_auth
|
334 |
+
|
335 |
+
|
336 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
337 |
+
class MimeType:
|
338 |
+
type: str
|
339 |
+
subtype: str
|
340 |
+
suffix: str
|
341 |
+
parameters: "MultiDictProxy[str]"
|
342 |
+
|
343 |
+
|
344 |
+
@functools.lru_cache(maxsize=56)
|
345 |
+
def parse_mimetype(mimetype: str) -> MimeType:
|
346 |
+
"""Parses a MIME type into its components.
|
347 |
+
|
348 |
+
mimetype is a MIME type string.
|
349 |
+
|
350 |
+
Returns a MimeType object.
|
351 |
+
|
352 |
+
Example:
|
353 |
+
|
354 |
+
>>> parse_mimetype('text/html; charset=utf-8')
|
355 |
+
MimeType(type='text', subtype='html', suffix='',
|
356 |
+
parameters={'charset': 'utf-8'})
|
357 |
+
|
358 |
+
"""
|
359 |
+
if not mimetype:
|
360 |
+
return MimeType(
|
361 |
+
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
|
362 |
+
)
|
363 |
+
|
364 |
+
parts = mimetype.split(";")
|
365 |
+
params: MultiDict[str] = MultiDict()
|
366 |
+
for item in parts[1:]:
|
367 |
+
if not item:
|
368 |
+
continue
|
369 |
+
key, _, value = item.partition("=")
|
370 |
+
params.add(key.lower().strip(), value.strip(' "'))
|
371 |
+
|
372 |
+
fulltype = parts[0].strip().lower()
|
373 |
+
if fulltype == "*":
|
374 |
+
fulltype = "*/*"
|
375 |
+
|
376 |
+
mtype, _, stype = fulltype.partition("/")
|
377 |
+
stype, _, suffix = stype.partition("+")
|
378 |
+
|
379 |
+
return MimeType(
|
380 |
+
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
|
381 |
+
)
|
382 |
+
|
383 |
+
|
384 |
+
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
|
385 |
+
name = getattr(obj, "name", None)
|
386 |
+
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
|
387 |
+
return Path(name).name
|
388 |
+
return default
|
389 |
+
|
390 |
+
|
391 |
+
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
|
392 |
+
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
|
393 |
+
|
394 |
+
|
395 |
+
def quoted_string(content: str) -> str:
|
396 |
+
"""Return 7-bit content as quoted-string.
|
397 |
+
|
398 |
+
Format content into a quoted-string as defined in RFC5322 for
|
399 |
+
Internet Message Format. Notice that this is not the 8-bit HTTP
|
400 |
+
format, but the 7-bit email format. Content must be in usascii or
|
401 |
+
a ValueError is raised.
|
402 |
+
"""
|
403 |
+
if not (QCONTENT > set(content)):
|
404 |
+
raise ValueError(f"bad content for quoted-string {content!r}")
|
405 |
+
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
|
406 |
+
|
407 |
+
|
408 |
+
def content_disposition_header(
|
409 |
+
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
|
410 |
+
) -> str:
|
411 |
+
"""Sets ``Content-Disposition`` header for MIME.
|
412 |
+
|
413 |
+
This is the MIME payload Content-Disposition header from RFC 2183
|
414 |
+
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
|
415 |
+
RFC 6266.
|
416 |
+
|
417 |
+
disptype is a disposition type: inline, attachment, form-data.
|
418 |
+
Should be valid extension token (see RFC 2183)
|
419 |
+
|
420 |
+
quote_fields performs value quoting to 7-bit MIME headers
|
421 |
+
according to RFC 7578. Set to quote_fields to False if recipient
|
422 |
+
can take 8-bit file names and field values.
|
423 |
+
|
424 |
+
_charset specifies the charset to use when quote_fields is True.
|
425 |
+
|
426 |
+
params is a dict with disposition params.
|
427 |
+
"""
|
428 |
+
if not disptype or not (TOKEN > set(disptype)):
|
429 |
+
raise ValueError("bad content disposition type {!r}" "".format(disptype))
|
430 |
+
|
431 |
+
value = disptype
|
432 |
+
if params:
|
433 |
+
lparams = []
|
434 |
+
for key, val in params.items():
|
435 |
+
if not key or not (TOKEN > set(key)):
|
436 |
+
raise ValueError(
|
437 |
+
"bad content disposition parameter" " {!r}={!r}".format(key, val)
|
438 |
+
)
|
439 |
+
if quote_fields:
|
440 |
+
if key.lower() == "filename":
|
441 |
+
qval = quote(val, "", encoding=_charset)
|
442 |
+
lparams.append((key, '"%s"' % qval))
|
443 |
+
else:
|
444 |
+
try:
|
445 |
+
qval = quoted_string(val)
|
446 |
+
except ValueError:
|
447 |
+
qval = "".join(
|
448 |
+
(_charset, "''", quote(val, "", encoding=_charset))
|
449 |
+
)
|
450 |
+
lparams.append((key + "*", qval))
|
451 |
+
else:
|
452 |
+
lparams.append((key, '"%s"' % qval))
|
453 |
+
else:
|
454 |
+
qval = val.replace("\\", "\\\\").replace('"', '\\"')
|
455 |
+
lparams.append((key, '"%s"' % qval))
|
456 |
+
sparams = "; ".join("=".join(pair) for pair in lparams)
|
457 |
+
value = "; ".join((value, sparams))
|
458 |
+
return value
|
459 |
+
|
460 |
+
|
461 |
+
class _TSelf(Protocol, Generic[_T]):
|
462 |
+
_cache: Dict[str, _T]
|
463 |
+
|
464 |
+
|
465 |
+
class reify(Generic[_T]):
|
466 |
+
"""Use as a class method decorator.
|
467 |
+
|
468 |
+
It operates almost exactly like
|
469 |
+
the Python `@property` decorator, but it puts the result of the
|
470 |
+
method it decorates into the instance dict after the first call,
|
471 |
+
effectively replacing the function it decorates with an instance
|
472 |
+
variable. It is, in Python parlance, a data descriptor.
|
473 |
+
"""
|
474 |
+
|
475 |
+
def __init__(self, wrapped: Callable[..., _T]) -> None:
|
476 |
+
self.wrapped = wrapped
|
477 |
+
self.__doc__ = wrapped.__doc__
|
478 |
+
self.name = wrapped.__name__
|
479 |
+
|
480 |
+
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
|
481 |
+
try:
|
482 |
+
try:
|
483 |
+
return inst._cache[self.name]
|
484 |
+
except KeyError:
|
485 |
+
val = self.wrapped(inst)
|
486 |
+
inst._cache[self.name] = val
|
487 |
+
return val
|
488 |
+
except AttributeError:
|
489 |
+
if inst is None:
|
490 |
+
return self
|
491 |
+
raise
|
492 |
+
|
493 |
+
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
|
494 |
+
raise AttributeError("reified property is read-only")
|
495 |
+
|
496 |
+
|
497 |
+
reify_py = reify
|
498 |
+
|
499 |
+
try:
|
500 |
+
from ._helpers import reify as reify_c
|
501 |
+
|
502 |
+
if not NO_EXTENSIONS:
|
503 |
+
reify = reify_c # type: ignore[misc,assignment]
|
504 |
+
except ImportError:
|
505 |
+
pass
|
506 |
+
|
507 |
+
_ipv4_pattern = (
|
508 |
+
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
|
509 |
+
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
|
510 |
+
)
|
511 |
+
_ipv6_pattern = (
|
512 |
+
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
|
513 |
+
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
|
514 |
+
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
|
515 |
+
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
|
516 |
+
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
|
517 |
+
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
|
518 |
+
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
|
519 |
+
r":|:(:[A-F0-9]{1,4}){7})$"
|
520 |
+
)
|
521 |
+
_ipv4_regex = re.compile(_ipv4_pattern)
|
522 |
+
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
|
523 |
+
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
|
524 |
+
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
|
525 |
+
|
526 |
+
|
527 |
+
def _is_ip_address(
|
528 |
+
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
|
529 |
+
) -> bool:
|
530 |
+
if host is None:
|
531 |
+
return False
|
532 |
+
if isinstance(host, str):
|
533 |
+
return bool(regex.match(host))
|
534 |
+
elif isinstance(host, (bytes, bytearray, memoryview)):
|
535 |
+
return bool(regexb.match(host))
|
536 |
+
else:
|
537 |
+
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
|
538 |
+
|
539 |
+
|
540 |
+
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
|
541 |
+
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
|
542 |
+
|
543 |
+
|
544 |
+
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
|
545 |
+
return is_ipv4_address(host) or is_ipv6_address(host)
|
546 |
+
|
547 |
+
|
548 |
+
_cached_current_datetime: Optional[int] = None
|
549 |
+
_cached_formatted_datetime = ""
|
550 |
+
|
551 |
+
|
552 |
+
def rfc822_formatted_time() -> str:
|
553 |
+
global _cached_current_datetime
|
554 |
+
global _cached_formatted_datetime
|
555 |
+
|
556 |
+
now = int(time.time())
|
557 |
+
if now != _cached_current_datetime:
|
558 |
+
# Weekday and month names for HTTP date/time formatting;
|
559 |
+
# always English!
|
560 |
+
# Tuples are constants stored in codeobject!
|
561 |
+
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
|
562 |
+
_monthname = (
|
563 |
+
"", # Dummy so we can use 1-based month numbers
|
564 |
+
"Jan",
|
565 |
+
"Feb",
|
566 |
+
"Mar",
|
567 |
+
"Apr",
|
568 |
+
"May",
|
569 |
+
"Jun",
|
570 |
+
"Jul",
|
571 |
+
"Aug",
|
572 |
+
"Sep",
|
573 |
+
"Oct",
|
574 |
+
"Nov",
|
575 |
+
"Dec",
|
576 |
+
)
|
577 |
+
|
578 |
+
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
|
579 |
+
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
|
580 |
+
_weekdayname[wd],
|
581 |
+
day,
|
582 |
+
_monthname[month],
|
583 |
+
year,
|
584 |
+
hh,
|
585 |
+
mm,
|
586 |
+
ss,
|
587 |
+
)
|
588 |
+
_cached_current_datetime = now
|
589 |
+
return _cached_formatted_datetime
|
590 |
+
|
591 |
+
|
592 |
+
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
|
593 |
+
ref, name = info
|
594 |
+
ob = ref()
|
595 |
+
if ob is not None:
|
596 |
+
with suppress(Exception):
|
597 |
+
getattr(ob, name)()
|
598 |
+
|
599 |
+
|
600 |
+
def weakref_handle(
|
601 |
+
ob: object,
|
602 |
+
name: str,
|
603 |
+
timeout: float,
|
604 |
+
loop: asyncio.AbstractEventLoop,
|
605 |
+
timeout_ceil_threshold: float = 5,
|
606 |
+
) -> Optional[asyncio.TimerHandle]:
|
607 |
+
if timeout is not None and timeout > 0:
|
608 |
+
when = loop.time() + timeout
|
609 |
+
if timeout >= timeout_ceil_threshold:
|
610 |
+
when = ceil(when)
|
611 |
+
|
612 |
+
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
|
613 |
+
return None
|
614 |
+
|
615 |
+
|
616 |
+
def call_later(
|
617 |
+
cb: Callable[[], Any],
|
618 |
+
timeout: float,
|
619 |
+
loop: asyncio.AbstractEventLoop,
|
620 |
+
timeout_ceil_threshold: float = 5,
|
621 |
+
) -> Optional[asyncio.TimerHandle]:
|
622 |
+
if timeout is not None and timeout > 0:
|
623 |
+
when = loop.time() + timeout
|
624 |
+
if timeout > timeout_ceil_threshold:
|
625 |
+
when = ceil(when)
|
626 |
+
return loop.call_at(when, cb)
|
627 |
+
return None
|
628 |
+
|
629 |
+
|
630 |
+
class TimeoutHandle:
|
631 |
+
"""Timeout handle"""
|
632 |
+
|
633 |
+
def __init__(
|
634 |
+
self,
|
635 |
+
loop: asyncio.AbstractEventLoop,
|
636 |
+
timeout: Optional[float],
|
637 |
+
ceil_threshold: float = 5,
|
638 |
+
) -> None:
|
639 |
+
self._timeout = timeout
|
640 |
+
self._loop = loop
|
641 |
+
self._ceil_threshold = ceil_threshold
|
642 |
+
self._callbacks: List[
|
643 |
+
Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]
|
644 |
+
] = []
|
645 |
+
|
646 |
+
def register(
|
647 |
+
self, callback: Callable[..., None], *args: Any, **kwargs: Any
|
648 |
+
) -> None:
|
649 |
+
self._callbacks.append((callback, args, kwargs))
|
650 |
+
|
651 |
+
def close(self) -> None:
|
652 |
+
self._callbacks.clear()
|
653 |
+
|
654 |
+
def start(self) -> Optional[asyncio.Handle]:
|
655 |
+
timeout = self._timeout
|
656 |
+
if timeout is not None and timeout > 0:
|
657 |
+
when = self._loop.time() + timeout
|
658 |
+
if timeout >= self._ceil_threshold:
|
659 |
+
when = ceil(when)
|
660 |
+
return self._loop.call_at(when, self.__call__)
|
661 |
+
else:
|
662 |
+
return None
|
663 |
+
|
664 |
+
def timer(self) -> "BaseTimerContext":
|
665 |
+
if self._timeout is not None and self._timeout > 0:
|
666 |
+
timer = TimerContext(self._loop)
|
667 |
+
self.register(timer.timeout)
|
668 |
+
return timer
|
669 |
+
else:
|
670 |
+
return TimerNoop()
|
671 |
+
|
672 |
+
def __call__(self) -> None:
|
673 |
+
for cb, args, kwargs in self._callbacks:
|
674 |
+
with suppress(Exception):
|
675 |
+
cb(*args, **kwargs)
|
676 |
+
|
677 |
+
self._callbacks.clear()
|
678 |
+
|
679 |
+
|
680 |
+
class BaseTimerContext(ContextManager["BaseTimerContext"]):
|
681 |
+
def assert_timeout(self) -> None:
|
682 |
+
"""Raise TimeoutError if timeout has been exceeded."""
|
683 |
+
|
684 |
+
|
685 |
+
class TimerNoop(BaseTimerContext):
|
686 |
+
def __enter__(self) -> BaseTimerContext:
|
687 |
+
return self
|
688 |
+
|
689 |
+
def __exit__(
|
690 |
+
self,
|
691 |
+
exc_type: Optional[Type[BaseException]],
|
692 |
+
exc_val: Optional[BaseException],
|
693 |
+
exc_tb: Optional[TracebackType],
|
694 |
+
) -> None:
|
695 |
+
return
|
696 |
+
|
697 |
+
|
698 |
+
class TimerContext(BaseTimerContext):
|
699 |
+
"""Low resolution timeout context manager"""
|
700 |
+
|
701 |
+
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
|
702 |
+
self._loop = loop
|
703 |
+
self._tasks: List[asyncio.Task[Any]] = []
|
704 |
+
self._cancelled = False
|
705 |
+
|
706 |
+
def assert_timeout(self) -> None:
|
707 |
+
"""Raise TimeoutError if timer has already been cancelled."""
|
708 |
+
if self._cancelled:
|
709 |
+
raise asyncio.TimeoutError from None
|
710 |
+
|
711 |
+
def __enter__(self) -> BaseTimerContext:
|
712 |
+
task = current_task(loop=self._loop)
|
713 |
+
|
714 |
+
if task is None:
|
715 |
+
raise RuntimeError(
|
716 |
+
"Timeout context manager should be used " "inside a task"
|
717 |
+
)
|
718 |
+
|
719 |
+
if self._cancelled:
|
720 |
+
raise asyncio.TimeoutError from None
|
721 |
+
|
722 |
+
self._tasks.append(task)
|
723 |
+
return self
|
724 |
+
|
725 |
+
def __exit__(
|
726 |
+
self,
|
727 |
+
exc_type: Optional[Type[BaseException]],
|
728 |
+
exc_val: Optional[BaseException],
|
729 |
+
exc_tb: Optional[TracebackType],
|
730 |
+
) -> Optional[bool]:
|
731 |
+
if self._tasks:
|
732 |
+
self._tasks.pop()
|
733 |
+
|
734 |
+
if exc_type is asyncio.CancelledError and self._cancelled:
|
735 |
+
raise asyncio.TimeoutError from None
|
736 |
+
return None
|
737 |
+
|
738 |
+
def timeout(self) -> None:
|
739 |
+
if not self._cancelled:
|
740 |
+
for task in set(self._tasks):
|
741 |
+
task.cancel()
|
742 |
+
|
743 |
+
self._cancelled = True
|
744 |
+
|
745 |
+
|
746 |
+
def ceil_timeout(
|
747 |
+
delay: Optional[float], ceil_threshold: float = 5
|
748 |
+
) -> async_timeout.Timeout:
|
749 |
+
if delay is None or delay <= 0:
|
750 |
+
return async_timeout.timeout(None)
|
751 |
+
|
752 |
+
loop = get_running_loop()
|
753 |
+
now = loop.time()
|
754 |
+
when = now + delay
|
755 |
+
if delay > ceil_threshold:
|
756 |
+
when = ceil(when)
|
757 |
+
return async_timeout.timeout_at(when)
|
758 |
+
|
759 |
+
|
760 |
+
class HeadersMixin:
|
761 |
+
ATTRS = frozenset(["_content_type", "_content_dict", "_stored_content_type"])
|
762 |
+
|
763 |
+
_headers: MultiMapping[str]
|
764 |
+
|
765 |
+
_content_type: Optional[str] = None
|
766 |
+
_content_dict: Optional[Dict[str, str]] = None
|
767 |
+
_stored_content_type: Union[str, None, _SENTINEL] = sentinel
|
768 |
+
|
769 |
+
def _parse_content_type(self, raw: Optional[str]) -> None:
|
770 |
+
self._stored_content_type = raw
|
771 |
+
if raw is None:
|
772 |
+
# default value according to RFC 2616
|
773 |
+
self._content_type = "application/octet-stream"
|
774 |
+
self._content_dict = {}
|
775 |
+
else:
|
776 |
+
msg = HeaderParser().parsestr("Content-Type: " + raw)
|
777 |
+
self._content_type = msg.get_content_type()
|
778 |
+
params = msg.get_params(())
|
779 |
+
self._content_dict = dict(params[1:]) # First element is content type again
|
780 |
+
|
781 |
+
@property
|
782 |
+
def content_type(self) -> str:
|
783 |
+
"""The value of content part for Content-Type HTTP header."""
|
784 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
785 |
+
if self._stored_content_type != raw:
|
786 |
+
self._parse_content_type(raw)
|
787 |
+
return self._content_type # type: ignore[return-value]
|
788 |
+
|
789 |
+
@property
|
790 |
+
def charset(self) -> Optional[str]:
|
791 |
+
"""The value of charset part for Content-Type HTTP header."""
|
792 |
+
raw = self._headers.get(hdrs.CONTENT_TYPE)
|
793 |
+
if self._stored_content_type != raw:
|
794 |
+
self._parse_content_type(raw)
|
795 |
+
return self._content_dict.get("charset") # type: ignore[union-attr]
|
796 |
+
|
797 |
+
@property
|
798 |
+
def content_length(self) -> Optional[int]:
|
799 |
+
"""The value of Content-Length HTTP header."""
|
800 |
+
content_length = self._headers.get(hdrs.CONTENT_LENGTH)
|
801 |
+
|
802 |
+
if content_length is not None:
|
803 |
+
return int(content_length)
|
804 |
+
else:
|
805 |
+
return None
|
806 |
+
|
807 |
+
|
808 |
+
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
|
809 |
+
if not fut.done():
|
810 |
+
fut.set_result(result)
|
811 |
+
|
812 |
+
|
813 |
+
_EXC_SENTINEL = BaseException()
|
814 |
+
|
815 |
+
|
816 |
+
class ErrorableProtocol(Protocol):
|
817 |
+
def set_exception(
|
818 |
+
self,
|
819 |
+
exc: BaseException,
|
820 |
+
exc_cause: BaseException = ...,
|
821 |
+
) -> None:
|
822 |
+
... # pragma: no cover
|
823 |
+
|
824 |
+
|
825 |
+
def set_exception(
|
826 |
+
fut: "asyncio.Future[_T] | ErrorableProtocol",
|
827 |
+
exc: BaseException,
|
828 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
829 |
+
) -> None:
|
830 |
+
"""Set future exception.
|
831 |
+
|
832 |
+
If the future is marked as complete, this function is a no-op.
|
833 |
+
|
834 |
+
:param exc_cause: An exception that is a direct cause of ``exc``.
|
835 |
+
Only set if provided.
|
836 |
+
"""
|
837 |
+
if asyncio.isfuture(fut) and fut.done():
|
838 |
+
return
|
839 |
+
|
840 |
+
exc_is_sentinel = exc_cause is _EXC_SENTINEL
|
841 |
+
exc_causes_itself = exc is exc_cause
|
842 |
+
if not exc_is_sentinel and not exc_causes_itself:
|
843 |
+
exc.__cause__ = exc_cause
|
844 |
+
|
845 |
+
fut.set_exception(exc)
|
846 |
+
|
847 |
+
|
848 |
+
@functools.total_ordering
|
849 |
+
class AppKey(Generic[_T]):
|
850 |
+
"""Keys for static typing support in Application."""
|
851 |
+
|
852 |
+
__slots__ = ("_name", "_t", "__orig_class__")
|
853 |
+
|
854 |
+
# This may be set by Python when instantiating with a generic type. We need to
|
855 |
+
# support this, in order to support types that are not concrete classes,
|
856 |
+
# like Iterable, which can't be passed as the second parameter to __init__.
|
857 |
+
__orig_class__: Type[object]
|
858 |
+
|
859 |
+
def __init__(self, name: str, t: Optional[Type[_T]] = None):
|
860 |
+
# Prefix with module name to help deduplicate key names.
|
861 |
+
frame = inspect.currentframe()
|
862 |
+
while frame:
|
863 |
+
if frame.f_code.co_name == "<module>":
|
864 |
+
module: str = frame.f_globals["__name__"]
|
865 |
+
break
|
866 |
+
frame = frame.f_back
|
867 |
+
|
868 |
+
self._name = module + "." + name
|
869 |
+
self._t = t
|
870 |
+
|
871 |
+
def __lt__(self, other: object) -> bool:
|
872 |
+
if isinstance(other, AppKey):
|
873 |
+
return self._name < other._name
|
874 |
+
return True # Order AppKey above other types.
|
875 |
+
|
876 |
+
def __repr__(self) -> str:
|
877 |
+
t = self._t
|
878 |
+
if t is None:
|
879 |
+
with suppress(AttributeError):
|
880 |
+
# Set to type arg.
|
881 |
+
t = get_args(self.__orig_class__)[0]
|
882 |
+
|
883 |
+
if t is None:
|
884 |
+
t_repr = "<<Unknown>>"
|
885 |
+
elif isinstance(t, type):
|
886 |
+
if t.__module__ == "builtins":
|
887 |
+
t_repr = t.__qualname__
|
888 |
+
else:
|
889 |
+
t_repr = f"{t.__module__}.{t.__qualname__}"
|
890 |
+
else:
|
891 |
+
t_repr = repr(t)
|
892 |
+
return f"<AppKey({self._name}, type={t_repr})>"
|
893 |
+
|
894 |
+
|
895 |
+
class ChainMapProxy(Mapping[Union[str, AppKey[Any]], Any]):
|
896 |
+
__slots__ = ("_maps",)
|
897 |
+
|
898 |
+
def __init__(self, maps: Iterable[Mapping[Union[str, AppKey[Any]], Any]]) -> None:
|
899 |
+
self._maps = tuple(maps)
|
900 |
+
|
901 |
+
def __init_subclass__(cls) -> None:
|
902 |
+
raise TypeError(
|
903 |
+
"Inheritance class {} from ChainMapProxy "
|
904 |
+
"is forbidden".format(cls.__name__)
|
905 |
+
)
|
906 |
+
|
907 |
+
@overload # type: ignore[override]
|
908 |
+
def __getitem__(self, key: AppKey[_T]) -> _T:
|
909 |
+
...
|
910 |
+
|
911 |
+
@overload
|
912 |
+
def __getitem__(self, key: str) -> Any:
|
913 |
+
...
|
914 |
+
|
915 |
+
def __getitem__(self, key: Union[str, AppKey[_T]]) -> Any:
|
916 |
+
for mapping in self._maps:
|
917 |
+
try:
|
918 |
+
return mapping[key]
|
919 |
+
except KeyError:
|
920 |
+
pass
|
921 |
+
raise KeyError(key)
|
922 |
+
|
923 |
+
@overload # type: ignore[override]
|
924 |
+
def get(self, key: AppKey[_T], default: _S) -> Union[_T, _S]:
|
925 |
+
...
|
926 |
+
|
927 |
+
@overload
|
928 |
+
def get(self, key: AppKey[_T], default: None = ...) -> Optional[_T]:
|
929 |
+
...
|
930 |
+
|
931 |
+
@overload
|
932 |
+
def get(self, key: str, default: Any = ...) -> Any:
|
933 |
+
...
|
934 |
+
|
935 |
+
def get(self, key: Union[str, AppKey[_T]], default: Any = None) -> Any:
|
936 |
+
try:
|
937 |
+
return self[key]
|
938 |
+
except KeyError:
|
939 |
+
return default
|
940 |
+
|
941 |
+
def __len__(self) -> int:
|
942 |
+
# reuses stored hash values if possible
|
943 |
+
return len(set().union(*self._maps))
|
944 |
+
|
945 |
+
def __iter__(self) -> Iterator[Union[str, AppKey[Any]]]:
|
946 |
+
d: Dict[Union[str, AppKey[Any]], Any] = {}
|
947 |
+
for mapping in reversed(self._maps):
|
948 |
+
# reuses stored hash values if possible
|
949 |
+
d.update(mapping)
|
950 |
+
return iter(d)
|
951 |
+
|
952 |
+
def __contains__(self, key: object) -> bool:
|
953 |
+
return any(key in m for m in self._maps)
|
954 |
+
|
955 |
+
def __bool__(self) -> bool:
|
956 |
+
return any(self._maps)
|
957 |
+
|
958 |
+
def __repr__(self) -> str:
|
959 |
+
content = ", ".join(map(repr, self._maps))
|
960 |
+
return f"ChainMapProxy({content})"
|
961 |
+
|
962 |
+
|
963 |
+
# https://tools.ietf.org/html/rfc7232#section-2.3
|
964 |
+
_ETAGC = r"[!\x23-\x7E\x80-\xff]+"
|
965 |
+
_ETAGC_RE = re.compile(_ETAGC)
|
966 |
+
_QUOTED_ETAG = rf'(W/)?"({_ETAGC})"'
|
967 |
+
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
|
968 |
+
LIST_QUOTED_ETAG_RE = re.compile(rf"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
|
969 |
+
|
970 |
+
ETAG_ANY = "*"
|
971 |
+
|
972 |
+
|
973 |
+
@attr.s(auto_attribs=True, frozen=True, slots=True)
|
974 |
+
class ETag:
|
975 |
+
value: str
|
976 |
+
is_weak: bool = False
|
977 |
+
|
978 |
+
|
979 |
+
def validate_etag_value(value: str) -> None:
|
980 |
+
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
|
981 |
+
raise ValueError(
|
982 |
+
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
|
983 |
+
)
|
984 |
+
|
985 |
+
|
986 |
+
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
|
987 |
+
"""Process a date string, return a datetime object"""
|
988 |
+
if date_str is not None:
|
989 |
+
timetuple = parsedate(date_str)
|
990 |
+
if timetuple is not None:
|
991 |
+
with suppress(ValueError):
|
992 |
+
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
|
993 |
+
return None
|
994 |
+
|
995 |
+
|
996 |
+
def must_be_empty_body(method: str, code: int) -> bool:
|
997 |
+
"""Check if a request must return an empty body."""
|
998 |
+
return (
|
999 |
+
status_code_must_be_empty_body(code)
|
1000 |
+
or method_must_be_empty_body(method)
|
1001 |
+
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
1002 |
+
)
|
1003 |
+
|
1004 |
+
|
1005 |
+
def method_must_be_empty_body(method: str) -> bool:
|
1006 |
+
"""Check if a method must return an empty body."""
|
1007 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
1008 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.2
|
1009 |
+
return method.upper() == hdrs.METH_HEAD
|
1010 |
+
|
1011 |
+
|
1012 |
+
def status_code_must_be_empty_body(code: int) -> bool:
|
1013 |
+
"""Check if a status code must return an empty body."""
|
1014 |
+
# https://datatracker.ietf.org/doc/html/rfc9112#section-6.3-2.1
|
1015 |
+
return code in {204, 304} or 100 <= code < 200
|
1016 |
+
|
1017 |
+
|
1018 |
+
def should_remove_content_length(method: str, code: int) -> bool:
|
1019 |
+
"""Check if a Content-Length header should be removed.
|
1020 |
+
|
1021 |
+
This should always be a subset of must_be_empty_body
|
1022 |
+
"""
|
1023 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-8.6-8
|
1024 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-15.4.5-4
|
1025 |
+
return (
|
1026 |
+
code in {204, 304}
|
1027 |
+
or 100 <= code < 200
|
1028 |
+
or (200 <= code < 300 and method.upper() == hdrs.METH_CONNECT)
|
1029 |
+
)
|
llmeval-env/lib/python3.10/site-packages/aiohttp/http.py
ADDED
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import sys
|
2 |
+
from http import HTTPStatus
|
3 |
+
from typing import Mapping, Tuple
|
4 |
+
|
5 |
+
from . import __version__
|
6 |
+
from .http_exceptions import HttpProcessingError as HttpProcessingError
|
7 |
+
from .http_parser import (
|
8 |
+
HeadersParser as HeadersParser,
|
9 |
+
HttpParser as HttpParser,
|
10 |
+
HttpRequestParser as HttpRequestParser,
|
11 |
+
HttpResponseParser as HttpResponseParser,
|
12 |
+
RawRequestMessage as RawRequestMessage,
|
13 |
+
RawResponseMessage as RawResponseMessage,
|
14 |
+
)
|
15 |
+
from .http_websocket import (
|
16 |
+
WS_CLOSED_MESSAGE as WS_CLOSED_MESSAGE,
|
17 |
+
WS_CLOSING_MESSAGE as WS_CLOSING_MESSAGE,
|
18 |
+
WS_KEY as WS_KEY,
|
19 |
+
WebSocketError as WebSocketError,
|
20 |
+
WebSocketReader as WebSocketReader,
|
21 |
+
WebSocketWriter as WebSocketWriter,
|
22 |
+
WSCloseCode as WSCloseCode,
|
23 |
+
WSMessage as WSMessage,
|
24 |
+
WSMsgType as WSMsgType,
|
25 |
+
ws_ext_gen as ws_ext_gen,
|
26 |
+
ws_ext_parse as ws_ext_parse,
|
27 |
+
)
|
28 |
+
from .http_writer import (
|
29 |
+
HttpVersion as HttpVersion,
|
30 |
+
HttpVersion10 as HttpVersion10,
|
31 |
+
HttpVersion11 as HttpVersion11,
|
32 |
+
StreamWriter as StreamWriter,
|
33 |
+
)
|
34 |
+
|
35 |
+
__all__ = (
|
36 |
+
"HttpProcessingError",
|
37 |
+
"RESPONSES",
|
38 |
+
"SERVER_SOFTWARE",
|
39 |
+
# .http_writer
|
40 |
+
"StreamWriter",
|
41 |
+
"HttpVersion",
|
42 |
+
"HttpVersion10",
|
43 |
+
"HttpVersion11",
|
44 |
+
# .http_parser
|
45 |
+
"HeadersParser",
|
46 |
+
"HttpParser",
|
47 |
+
"HttpRequestParser",
|
48 |
+
"HttpResponseParser",
|
49 |
+
"RawRequestMessage",
|
50 |
+
"RawResponseMessage",
|
51 |
+
# .http_websocket
|
52 |
+
"WS_CLOSED_MESSAGE",
|
53 |
+
"WS_CLOSING_MESSAGE",
|
54 |
+
"WS_KEY",
|
55 |
+
"WebSocketReader",
|
56 |
+
"WebSocketWriter",
|
57 |
+
"ws_ext_gen",
|
58 |
+
"ws_ext_parse",
|
59 |
+
"WSMessage",
|
60 |
+
"WebSocketError",
|
61 |
+
"WSMsgType",
|
62 |
+
"WSCloseCode",
|
63 |
+
)
|
64 |
+
|
65 |
+
|
66 |
+
SERVER_SOFTWARE: str = "Python/{0[0]}.{0[1]} aiohttp/{1}".format(
|
67 |
+
sys.version_info, __version__
|
68 |
+
)
|
69 |
+
|
70 |
+
RESPONSES: Mapping[int, Tuple[str, str]] = {
|
71 |
+
v: (v.phrase, v.description) for v in HTTPStatus.__members__.values()
|
72 |
+
}
|
llmeval-env/lib/python3.10/site-packages/aiohttp/http_exceptions.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Low-level http related exceptions."""
|
2 |
+
|
3 |
+
|
4 |
+
from textwrap import indent
|
5 |
+
from typing import Optional, Union
|
6 |
+
|
7 |
+
from .typedefs import _CIMultiDict
|
8 |
+
|
9 |
+
__all__ = ("HttpProcessingError",)
|
10 |
+
|
11 |
+
|
12 |
+
class HttpProcessingError(Exception):
|
13 |
+
"""HTTP error.
|
14 |
+
|
15 |
+
Shortcut for raising HTTP errors with custom code, message and headers.
|
16 |
+
|
17 |
+
code: HTTP Error code.
|
18 |
+
message: (optional) Error message.
|
19 |
+
headers: (optional) Headers to be sent in response, a list of pairs
|
20 |
+
"""
|
21 |
+
|
22 |
+
code = 0
|
23 |
+
message = ""
|
24 |
+
headers = None
|
25 |
+
|
26 |
+
def __init__(
|
27 |
+
self,
|
28 |
+
*,
|
29 |
+
code: Optional[int] = None,
|
30 |
+
message: str = "",
|
31 |
+
headers: Optional[_CIMultiDict] = None,
|
32 |
+
) -> None:
|
33 |
+
if code is not None:
|
34 |
+
self.code = code
|
35 |
+
self.headers = headers
|
36 |
+
self.message = message
|
37 |
+
|
38 |
+
def __str__(self) -> str:
|
39 |
+
msg = indent(self.message, " ")
|
40 |
+
return f"{self.code}, message:\n{msg}"
|
41 |
+
|
42 |
+
def __repr__(self) -> str:
|
43 |
+
return f"<{self.__class__.__name__}: {self.code}, message={self.message!r}>"
|
44 |
+
|
45 |
+
|
46 |
+
class BadHttpMessage(HttpProcessingError):
|
47 |
+
|
48 |
+
code = 400
|
49 |
+
message = "Bad Request"
|
50 |
+
|
51 |
+
def __init__(self, message: str, *, headers: Optional[_CIMultiDict] = None) -> None:
|
52 |
+
super().__init__(message=message, headers=headers)
|
53 |
+
self.args = (message,)
|
54 |
+
|
55 |
+
|
56 |
+
class HttpBadRequest(BadHttpMessage):
|
57 |
+
|
58 |
+
code = 400
|
59 |
+
message = "Bad Request"
|
60 |
+
|
61 |
+
|
62 |
+
class PayloadEncodingError(BadHttpMessage):
|
63 |
+
"""Base class for payload errors"""
|
64 |
+
|
65 |
+
|
66 |
+
class ContentEncodingError(PayloadEncodingError):
|
67 |
+
"""Content encoding error."""
|
68 |
+
|
69 |
+
|
70 |
+
class TransferEncodingError(PayloadEncodingError):
|
71 |
+
"""transfer encoding error."""
|
72 |
+
|
73 |
+
|
74 |
+
class ContentLengthError(PayloadEncodingError):
|
75 |
+
"""Not enough data for satisfy content length header."""
|
76 |
+
|
77 |
+
|
78 |
+
class LineTooLong(BadHttpMessage):
|
79 |
+
def __init__(
|
80 |
+
self, line: str, limit: str = "Unknown", actual_size: str = "Unknown"
|
81 |
+
) -> None:
|
82 |
+
super().__init__(
|
83 |
+
f"Got more than {limit} bytes ({actual_size}) when reading {line}."
|
84 |
+
)
|
85 |
+
self.args = (line, limit, actual_size)
|
86 |
+
|
87 |
+
|
88 |
+
class InvalidHeader(BadHttpMessage):
|
89 |
+
def __init__(self, hdr: Union[bytes, str]) -> None:
|
90 |
+
hdr_s = hdr.decode(errors="backslashreplace") if isinstance(hdr, bytes) else hdr
|
91 |
+
super().__init__(f"Invalid HTTP header: {hdr!r}")
|
92 |
+
self.hdr = hdr_s
|
93 |
+
self.args = (hdr,)
|
94 |
+
|
95 |
+
|
96 |
+
class BadStatusLine(BadHttpMessage):
|
97 |
+
def __init__(self, line: str = "", error: Optional[str] = None) -> None:
|
98 |
+
if not isinstance(line, str):
|
99 |
+
line = repr(line)
|
100 |
+
super().__init__(error or f"Bad status line {line!r}")
|
101 |
+
self.args = (line,)
|
102 |
+
self.line = line
|
103 |
+
|
104 |
+
|
105 |
+
class InvalidURLError(BadHttpMessage):
|
106 |
+
pass
|
llmeval-env/lib/python3.10/site-packages/aiohttp/http_parser.py
ADDED
@@ -0,0 +1,1041 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import abc
|
2 |
+
import asyncio
|
3 |
+
import re
|
4 |
+
import string
|
5 |
+
from contextlib import suppress
|
6 |
+
from enum import IntEnum
|
7 |
+
from typing import (
|
8 |
+
Any,
|
9 |
+
ClassVar,
|
10 |
+
Final,
|
11 |
+
Generic,
|
12 |
+
List,
|
13 |
+
Literal,
|
14 |
+
NamedTuple,
|
15 |
+
Optional,
|
16 |
+
Pattern,
|
17 |
+
Set,
|
18 |
+
Tuple,
|
19 |
+
Type,
|
20 |
+
TypeVar,
|
21 |
+
Union,
|
22 |
+
)
|
23 |
+
|
24 |
+
from multidict import CIMultiDict, CIMultiDictProxy, istr
|
25 |
+
from yarl import URL
|
26 |
+
|
27 |
+
from . import hdrs
|
28 |
+
from .base_protocol import BaseProtocol
|
29 |
+
from .compression_utils import HAS_BROTLI, BrotliDecompressor, ZLibDecompressor
|
30 |
+
from .helpers import (
|
31 |
+
_EXC_SENTINEL,
|
32 |
+
DEBUG,
|
33 |
+
NO_EXTENSIONS,
|
34 |
+
BaseTimerContext,
|
35 |
+
method_must_be_empty_body,
|
36 |
+
set_exception,
|
37 |
+
status_code_must_be_empty_body,
|
38 |
+
)
|
39 |
+
from .http_exceptions import (
|
40 |
+
BadHttpMessage,
|
41 |
+
BadStatusLine,
|
42 |
+
ContentEncodingError,
|
43 |
+
ContentLengthError,
|
44 |
+
InvalidHeader,
|
45 |
+
InvalidURLError,
|
46 |
+
LineTooLong,
|
47 |
+
TransferEncodingError,
|
48 |
+
)
|
49 |
+
from .http_writer import HttpVersion, HttpVersion10
|
50 |
+
from .log import internal_logger
|
51 |
+
from .streams import EMPTY_PAYLOAD, StreamReader
|
52 |
+
from .typedefs import RawHeaders
|
53 |
+
|
54 |
+
__all__ = (
|
55 |
+
"HeadersParser",
|
56 |
+
"HttpParser",
|
57 |
+
"HttpRequestParser",
|
58 |
+
"HttpResponseParser",
|
59 |
+
"RawRequestMessage",
|
60 |
+
"RawResponseMessage",
|
61 |
+
)
|
62 |
+
|
63 |
+
_SEP = Literal[b"\r\n", b"\n"]
|
64 |
+
|
65 |
+
ASCIISET: Final[Set[str]] = set(string.printable)
|
66 |
+
|
67 |
+
# See https://www.rfc-editor.org/rfc/rfc9110.html#name-overview
|
68 |
+
# and https://www.rfc-editor.org/rfc/rfc9110.html#name-tokens
|
69 |
+
#
|
70 |
+
# method = token
|
71 |
+
# tchar = "!" / "#" / "$" / "%" / "&" / "'" / "*" / "+" / "-" / "." /
|
72 |
+
# "^" / "_" / "`" / "|" / "~" / DIGIT / ALPHA
|
73 |
+
# token = 1*tchar
|
74 |
+
_TCHAR_SPECIALS: Final[str] = re.escape("!#$%&'*+-.^_`|~")
|
75 |
+
TOKENRE: Final[Pattern[str]] = re.compile(f"[0-9A-Za-z{_TCHAR_SPECIALS}]+")
|
76 |
+
VERSRE: Final[Pattern[str]] = re.compile(r"HTTP/(\d)\.(\d)", re.ASCII)
|
77 |
+
DIGITS: Final[Pattern[str]] = re.compile(r"\d+", re.ASCII)
|
78 |
+
HEXDIGITS: Final[Pattern[bytes]] = re.compile(rb"[0-9a-fA-F]+")
|
79 |
+
|
80 |
+
|
81 |
+
class RawRequestMessage(NamedTuple):
|
82 |
+
method: str
|
83 |
+
path: str
|
84 |
+
version: HttpVersion
|
85 |
+
headers: "CIMultiDictProxy[str]"
|
86 |
+
raw_headers: RawHeaders
|
87 |
+
should_close: bool
|
88 |
+
compression: Optional[str]
|
89 |
+
upgrade: bool
|
90 |
+
chunked: bool
|
91 |
+
url: URL
|
92 |
+
|
93 |
+
|
94 |
+
class RawResponseMessage(NamedTuple):
|
95 |
+
version: HttpVersion
|
96 |
+
code: int
|
97 |
+
reason: str
|
98 |
+
headers: CIMultiDictProxy[str]
|
99 |
+
raw_headers: RawHeaders
|
100 |
+
should_close: bool
|
101 |
+
compression: Optional[str]
|
102 |
+
upgrade: bool
|
103 |
+
chunked: bool
|
104 |
+
|
105 |
+
|
106 |
+
_MsgT = TypeVar("_MsgT", RawRequestMessage, RawResponseMessage)
|
107 |
+
|
108 |
+
|
109 |
+
class ParseState(IntEnum):
|
110 |
+
|
111 |
+
PARSE_NONE = 0
|
112 |
+
PARSE_LENGTH = 1
|
113 |
+
PARSE_CHUNKED = 2
|
114 |
+
PARSE_UNTIL_EOF = 3
|
115 |
+
|
116 |
+
|
117 |
+
class ChunkState(IntEnum):
|
118 |
+
PARSE_CHUNKED_SIZE = 0
|
119 |
+
PARSE_CHUNKED_CHUNK = 1
|
120 |
+
PARSE_CHUNKED_CHUNK_EOF = 2
|
121 |
+
PARSE_MAYBE_TRAILERS = 3
|
122 |
+
PARSE_TRAILERS = 4
|
123 |
+
|
124 |
+
|
125 |
+
class HeadersParser:
|
126 |
+
def __init__(
|
127 |
+
self,
|
128 |
+
max_line_size: int = 8190,
|
129 |
+
max_headers: int = 32768,
|
130 |
+
max_field_size: int = 8190,
|
131 |
+
lax: bool = False,
|
132 |
+
) -> None:
|
133 |
+
self.max_line_size = max_line_size
|
134 |
+
self.max_headers = max_headers
|
135 |
+
self.max_field_size = max_field_size
|
136 |
+
self._lax = lax
|
137 |
+
|
138 |
+
def parse_headers(
|
139 |
+
self, lines: List[bytes]
|
140 |
+
) -> Tuple["CIMultiDictProxy[str]", RawHeaders]:
|
141 |
+
headers: CIMultiDict[str] = CIMultiDict()
|
142 |
+
# note: "raw" does not mean inclusion of OWS before/after the field value
|
143 |
+
raw_headers = []
|
144 |
+
|
145 |
+
lines_idx = 1
|
146 |
+
line = lines[1]
|
147 |
+
line_count = len(lines)
|
148 |
+
|
149 |
+
while line:
|
150 |
+
# Parse initial header name : value pair.
|
151 |
+
try:
|
152 |
+
bname, bvalue = line.split(b":", 1)
|
153 |
+
except ValueError:
|
154 |
+
raise InvalidHeader(line) from None
|
155 |
+
|
156 |
+
if len(bname) == 0:
|
157 |
+
raise InvalidHeader(bname)
|
158 |
+
|
159 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-5.1-2
|
160 |
+
if {bname[0], bname[-1]} & {32, 9}: # {" ", "\t"}
|
161 |
+
raise InvalidHeader(line)
|
162 |
+
|
163 |
+
bvalue = bvalue.lstrip(b" \t")
|
164 |
+
if len(bname) > self.max_field_size:
|
165 |
+
raise LineTooLong(
|
166 |
+
"request header name {}".format(
|
167 |
+
bname.decode("utf8", "backslashreplace")
|
168 |
+
),
|
169 |
+
str(self.max_field_size),
|
170 |
+
str(len(bname)),
|
171 |
+
)
|
172 |
+
name = bname.decode("utf-8", "surrogateescape")
|
173 |
+
if not TOKENRE.fullmatch(name):
|
174 |
+
raise InvalidHeader(bname)
|
175 |
+
|
176 |
+
header_length = len(bvalue)
|
177 |
+
|
178 |
+
# next line
|
179 |
+
lines_idx += 1
|
180 |
+
line = lines[lines_idx]
|
181 |
+
|
182 |
+
# consume continuation lines
|
183 |
+
continuation = self._lax and line and line[0] in (32, 9) # (' ', '\t')
|
184 |
+
|
185 |
+
# Deprecated: https://www.rfc-editor.org/rfc/rfc9112.html#name-obsolete-line-folding
|
186 |
+
if continuation:
|
187 |
+
bvalue_lst = [bvalue]
|
188 |
+
while continuation:
|
189 |
+
header_length += len(line)
|
190 |
+
if header_length > self.max_field_size:
|
191 |
+
raise LineTooLong(
|
192 |
+
"request header field {}".format(
|
193 |
+
bname.decode("utf8", "backslashreplace")
|
194 |
+
),
|
195 |
+
str(self.max_field_size),
|
196 |
+
str(header_length),
|
197 |
+
)
|
198 |
+
bvalue_lst.append(line)
|
199 |
+
|
200 |
+
# next line
|
201 |
+
lines_idx += 1
|
202 |
+
if lines_idx < line_count:
|
203 |
+
line = lines[lines_idx]
|
204 |
+
if line:
|
205 |
+
continuation = line[0] in (32, 9) # (' ', '\t')
|
206 |
+
else:
|
207 |
+
line = b""
|
208 |
+
break
|
209 |
+
bvalue = b"".join(bvalue_lst)
|
210 |
+
else:
|
211 |
+
if header_length > self.max_field_size:
|
212 |
+
raise LineTooLong(
|
213 |
+
"request header field {}".format(
|
214 |
+
bname.decode("utf8", "backslashreplace")
|
215 |
+
),
|
216 |
+
str(self.max_field_size),
|
217 |
+
str(header_length),
|
218 |
+
)
|
219 |
+
|
220 |
+
bvalue = bvalue.strip(b" \t")
|
221 |
+
value = bvalue.decode("utf-8", "surrogateescape")
|
222 |
+
|
223 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-5
|
224 |
+
if "\n" in value or "\r" in value or "\x00" in value:
|
225 |
+
raise InvalidHeader(bvalue)
|
226 |
+
|
227 |
+
headers.add(name, value)
|
228 |
+
raw_headers.append((bname, bvalue))
|
229 |
+
|
230 |
+
return (CIMultiDictProxy(headers), tuple(raw_headers))
|
231 |
+
|
232 |
+
|
233 |
+
def _is_supported_upgrade(headers: CIMultiDictProxy[str]) -> bool:
|
234 |
+
"""Check if the upgrade header is supported."""
|
235 |
+
return headers.get(hdrs.UPGRADE, "").lower() in {"tcp", "websocket"}
|
236 |
+
|
237 |
+
|
238 |
+
class HttpParser(abc.ABC, Generic[_MsgT]):
|
239 |
+
lax: ClassVar[bool] = False
|
240 |
+
|
241 |
+
def __init__(
|
242 |
+
self,
|
243 |
+
protocol: Optional[BaseProtocol] = None,
|
244 |
+
loop: Optional[asyncio.AbstractEventLoop] = None,
|
245 |
+
limit: int = 2**16,
|
246 |
+
max_line_size: int = 8190,
|
247 |
+
max_headers: int = 32768,
|
248 |
+
max_field_size: int = 8190,
|
249 |
+
timer: Optional[BaseTimerContext] = None,
|
250 |
+
code: Optional[int] = None,
|
251 |
+
method: Optional[str] = None,
|
252 |
+
readall: bool = False,
|
253 |
+
payload_exception: Optional[Type[BaseException]] = None,
|
254 |
+
response_with_body: bool = True,
|
255 |
+
read_until_eof: bool = False,
|
256 |
+
auto_decompress: bool = True,
|
257 |
+
) -> None:
|
258 |
+
self.protocol = protocol
|
259 |
+
self.loop = loop
|
260 |
+
self.max_line_size = max_line_size
|
261 |
+
self.max_headers = max_headers
|
262 |
+
self.max_field_size = max_field_size
|
263 |
+
self.timer = timer
|
264 |
+
self.code = code
|
265 |
+
self.method = method
|
266 |
+
self.readall = readall
|
267 |
+
self.payload_exception = payload_exception
|
268 |
+
self.response_with_body = response_with_body
|
269 |
+
self.read_until_eof = read_until_eof
|
270 |
+
|
271 |
+
self._lines: List[bytes] = []
|
272 |
+
self._tail = b""
|
273 |
+
self._upgraded = False
|
274 |
+
self._payload = None
|
275 |
+
self._payload_parser: Optional[HttpPayloadParser] = None
|
276 |
+
self._auto_decompress = auto_decompress
|
277 |
+
self._limit = limit
|
278 |
+
self._headers_parser = HeadersParser(
|
279 |
+
max_line_size, max_headers, max_field_size, self.lax
|
280 |
+
)
|
281 |
+
|
282 |
+
@abc.abstractmethod
|
283 |
+
def parse_message(self, lines: List[bytes]) -> _MsgT:
|
284 |
+
pass
|
285 |
+
|
286 |
+
def feed_eof(self) -> Optional[_MsgT]:
|
287 |
+
if self._payload_parser is not None:
|
288 |
+
self._payload_parser.feed_eof()
|
289 |
+
self._payload_parser = None
|
290 |
+
else:
|
291 |
+
# try to extract partial message
|
292 |
+
if self._tail:
|
293 |
+
self._lines.append(self._tail)
|
294 |
+
|
295 |
+
if self._lines:
|
296 |
+
if self._lines[-1] != "\r\n":
|
297 |
+
self._lines.append(b"")
|
298 |
+
with suppress(Exception):
|
299 |
+
return self.parse_message(self._lines)
|
300 |
+
return None
|
301 |
+
|
302 |
+
def feed_data(
|
303 |
+
self,
|
304 |
+
data: bytes,
|
305 |
+
SEP: _SEP = b"\r\n",
|
306 |
+
EMPTY: bytes = b"",
|
307 |
+
CONTENT_LENGTH: istr = hdrs.CONTENT_LENGTH,
|
308 |
+
METH_CONNECT: str = hdrs.METH_CONNECT,
|
309 |
+
SEC_WEBSOCKET_KEY1: istr = hdrs.SEC_WEBSOCKET_KEY1,
|
310 |
+
) -> Tuple[List[Tuple[_MsgT, StreamReader]], bool, bytes]:
|
311 |
+
|
312 |
+
messages = []
|
313 |
+
|
314 |
+
if self._tail:
|
315 |
+
data, self._tail = self._tail + data, b""
|
316 |
+
|
317 |
+
data_len = len(data)
|
318 |
+
start_pos = 0
|
319 |
+
loop = self.loop
|
320 |
+
|
321 |
+
while start_pos < data_len:
|
322 |
+
|
323 |
+
# read HTTP message (request/response line + headers), \r\n\r\n
|
324 |
+
# and split by lines
|
325 |
+
if self._payload_parser is None and not self._upgraded:
|
326 |
+
pos = data.find(SEP, start_pos)
|
327 |
+
# consume \r\n
|
328 |
+
if pos == start_pos and not self._lines:
|
329 |
+
start_pos = pos + len(SEP)
|
330 |
+
continue
|
331 |
+
|
332 |
+
if pos >= start_pos:
|
333 |
+
# line found
|
334 |
+
line = data[start_pos:pos]
|
335 |
+
if SEP == b"\n": # For lax response parsing
|
336 |
+
line = line.rstrip(b"\r")
|
337 |
+
self._lines.append(line)
|
338 |
+
start_pos = pos + len(SEP)
|
339 |
+
|
340 |
+
# \r\n\r\n found
|
341 |
+
if self._lines[-1] == EMPTY:
|
342 |
+
try:
|
343 |
+
msg: _MsgT = self.parse_message(self._lines)
|
344 |
+
finally:
|
345 |
+
self._lines.clear()
|
346 |
+
|
347 |
+
def get_content_length() -> Optional[int]:
|
348 |
+
# payload length
|
349 |
+
length_hdr = msg.headers.get(CONTENT_LENGTH)
|
350 |
+
if length_hdr is None:
|
351 |
+
return None
|
352 |
+
|
353 |
+
# Shouldn't allow +/- or other number formats.
|
354 |
+
# https://www.rfc-editor.org/rfc/rfc9110#section-8.6-2
|
355 |
+
# msg.headers is already stripped of leading/trailing wsp
|
356 |
+
if not DIGITS.fullmatch(length_hdr):
|
357 |
+
raise InvalidHeader(CONTENT_LENGTH)
|
358 |
+
|
359 |
+
return int(length_hdr)
|
360 |
+
|
361 |
+
length = get_content_length()
|
362 |
+
# do not support old websocket spec
|
363 |
+
if SEC_WEBSOCKET_KEY1 in msg.headers:
|
364 |
+
raise InvalidHeader(SEC_WEBSOCKET_KEY1)
|
365 |
+
|
366 |
+
self._upgraded = msg.upgrade and _is_supported_upgrade(
|
367 |
+
msg.headers
|
368 |
+
)
|
369 |
+
|
370 |
+
method = getattr(msg, "method", self.method)
|
371 |
+
# code is only present on responses
|
372 |
+
code = getattr(msg, "code", 0)
|
373 |
+
|
374 |
+
assert self.protocol is not None
|
375 |
+
# calculate payload
|
376 |
+
empty_body = status_code_must_be_empty_body(code) or bool(
|
377 |
+
method and method_must_be_empty_body(method)
|
378 |
+
)
|
379 |
+
if not empty_body and (
|
380 |
+
((length is not None and length > 0) or msg.chunked)
|
381 |
+
and not self._upgraded
|
382 |
+
):
|
383 |
+
payload = StreamReader(
|
384 |
+
self.protocol,
|
385 |
+
timer=self.timer,
|
386 |
+
loop=loop,
|
387 |
+
limit=self._limit,
|
388 |
+
)
|
389 |
+
payload_parser = HttpPayloadParser(
|
390 |
+
payload,
|
391 |
+
length=length,
|
392 |
+
chunked=msg.chunked,
|
393 |
+
method=method,
|
394 |
+
compression=msg.compression,
|
395 |
+
code=self.code,
|
396 |
+
readall=self.readall,
|
397 |
+
response_with_body=self.response_with_body,
|
398 |
+
auto_decompress=self._auto_decompress,
|
399 |
+
lax=self.lax,
|
400 |
+
)
|
401 |
+
if not payload_parser.done:
|
402 |
+
self._payload_parser = payload_parser
|
403 |
+
elif method == METH_CONNECT:
|
404 |
+
assert isinstance(msg, RawRequestMessage)
|
405 |
+
payload = StreamReader(
|
406 |
+
self.protocol,
|
407 |
+
timer=self.timer,
|
408 |
+
loop=loop,
|
409 |
+
limit=self._limit,
|
410 |
+
)
|
411 |
+
self._upgraded = True
|
412 |
+
self._payload_parser = HttpPayloadParser(
|
413 |
+
payload,
|
414 |
+
method=msg.method,
|
415 |
+
compression=msg.compression,
|
416 |
+
readall=True,
|
417 |
+
auto_decompress=self._auto_decompress,
|
418 |
+
lax=self.lax,
|
419 |
+
)
|
420 |
+
elif not empty_body and length is None and self.read_until_eof:
|
421 |
+
payload = StreamReader(
|
422 |
+
self.protocol,
|
423 |
+
timer=self.timer,
|
424 |
+
loop=loop,
|
425 |
+
limit=self._limit,
|
426 |
+
)
|
427 |
+
payload_parser = HttpPayloadParser(
|
428 |
+
payload,
|
429 |
+
length=length,
|
430 |
+
chunked=msg.chunked,
|
431 |
+
method=method,
|
432 |
+
compression=msg.compression,
|
433 |
+
code=self.code,
|
434 |
+
readall=True,
|
435 |
+
response_with_body=self.response_with_body,
|
436 |
+
auto_decompress=self._auto_decompress,
|
437 |
+
lax=self.lax,
|
438 |
+
)
|
439 |
+
if not payload_parser.done:
|
440 |
+
self._payload_parser = payload_parser
|
441 |
+
else:
|
442 |
+
payload = EMPTY_PAYLOAD
|
443 |
+
|
444 |
+
messages.append((msg, payload))
|
445 |
+
else:
|
446 |
+
self._tail = data[start_pos:]
|
447 |
+
data = EMPTY
|
448 |
+
break
|
449 |
+
|
450 |
+
# no parser, just store
|
451 |
+
elif self._payload_parser is None and self._upgraded:
|
452 |
+
assert not self._lines
|
453 |
+
break
|
454 |
+
|
455 |
+
# feed payload
|
456 |
+
elif data and start_pos < data_len:
|
457 |
+
assert not self._lines
|
458 |
+
assert self._payload_parser is not None
|
459 |
+
try:
|
460 |
+
eof, data = self._payload_parser.feed_data(data[start_pos:], SEP)
|
461 |
+
except BaseException as underlying_exc:
|
462 |
+
reraised_exc = underlying_exc
|
463 |
+
if self.payload_exception is not None:
|
464 |
+
reraised_exc = self.payload_exception(str(underlying_exc))
|
465 |
+
|
466 |
+
set_exception(
|
467 |
+
self._payload_parser.payload,
|
468 |
+
reraised_exc,
|
469 |
+
underlying_exc,
|
470 |
+
)
|
471 |
+
|
472 |
+
eof = True
|
473 |
+
data = b""
|
474 |
+
|
475 |
+
if eof:
|
476 |
+
start_pos = 0
|
477 |
+
data_len = len(data)
|
478 |
+
self._payload_parser = None
|
479 |
+
continue
|
480 |
+
else:
|
481 |
+
break
|
482 |
+
|
483 |
+
if data and start_pos < data_len:
|
484 |
+
data = data[start_pos:]
|
485 |
+
else:
|
486 |
+
data = EMPTY
|
487 |
+
|
488 |
+
return messages, self._upgraded, data
|
489 |
+
|
490 |
+
def parse_headers(
|
491 |
+
self, lines: List[bytes]
|
492 |
+
) -> Tuple[
|
493 |
+
"CIMultiDictProxy[str]", RawHeaders, Optional[bool], Optional[str], bool, bool
|
494 |
+
]:
|
495 |
+
"""Parses RFC 5322 headers from a stream.
|
496 |
+
|
497 |
+
Line continuations are supported. Returns list of header name
|
498 |
+
and value pairs. Header name is in upper case.
|
499 |
+
"""
|
500 |
+
headers, raw_headers = self._headers_parser.parse_headers(lines)
|
501 |
+
close_conn = None
|
502 |
+
encoding = None
|
503 |
+
upgrade = False
|
504 |
+
chunked = False
|
505 |
+
|
506 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#section-5.5-6
|
507 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-collected-abnf
|
508 |
+
singletons = (
|
509 |
+
hdrs.CONTENT_LENGTH,
|
510 |
+
hdrs.CONTENT_LOCATION,
|
511 |
+
hdrs.CONTENT_RANGE,
|
512 |
+
hdrs.CONTENT_TYPE,
|
513 |
+
hdrs.ETAG,
|
514 |
+
hdrs.HOST,
|
515 |
+
hdrs.MAX_FORWARDS,
|
516 |
+
hdrs.SERVER,
|
517 |
+
hdrs.TRANSFER_ENCODING,
|
518 |
+
hdrs.USER_AGENT,
|
519 |
+
)
|
520 |
+
bad_hdr = next((h for h in singletons if len(headers.getall(h, ())) > 1), None)
|
521 |
+
if bad_hdr is not None:
|
522 |
+
raise BadHttpMessage(f"Duplicate '{bad_hdr}' header found.")
|
523 |
+
|
524 |
+
# keep-alive
|
525 |
+
conn = headers.get(hdrs.CONNECTION)
|
526 |
+
if conn:
|
527 |
+
v = conn.lower()
|
528 |
+
if v == "close":
|
529 |
+
close_conn = True
|
530 |
+
elif v == "keep-alive":
|
531 |
+
close_conn = False
|
532 |
+
# https://www.rfc-editor.org/rfc/rfc9110.html#name-101-switching-protocols
|
533 |
+
elif v == "upgrade" and headers.get(hdrs.UPGRADE):
|
534 |
+
upgrade = True
|
535 |
+
|
536 |
+
# encoding
|
537 |
+
enc = headers.get(hdrs.CONTENT_ENCODING)
|
538 |
+
if enc:
|
539 |
+
enc = enc.lower()
|
540 |
+
if enc in ("gzip", "deflate", "br"):
|
541 |
+
encoding = enc
|
542 |
+
|
543 |
+
# chunking
|
544 |
+
te = headers.get(hdrs.TRANSFER_ENCODING)
|
545 |
+
if te is not None:
|
546 |
+
if "chunked" == te.lower():
|
547 |
+
chunked = True
|
548 |
+
else:
|
549 |
+
raise BadHttpMessage("Request has invalid `Transfer-Encoding`")
|
550 |
+
|
551 |
+
if hdrs.CONTENT_LENGTH in headers:
|
552 |
+
raise BadHttpMessage(
|
553 |
+
"Transfer-Encoding can't be present with Content-Length",
|
554 |
+
)
|
555 |
+
|
556 |
+
return (headers, raw_headers, close_conn, encoding, upgrade, chunked)
|
557 |
+
|
558 |
+
def set_upgraded(self, val: bool) -> None:
|
559 |
+
"""Set connection upgraded (to websocket) mode.
|
560 |
+
|
561 |
+
:param bool val: new state.
|
562 |
+
"""
|
563 |
+
self._upgraded = val
|
564 |
+
|
565 |
+
|
566 |
+
class HttpRequestParser(HttpParser[RawRequestMessage]):
|
567 |
+
"""Read request status line.
|
568 |
+
|
569 |
+
Exception .http_exceptions.BadStatusLine
|
570 |
+
could be raised in case of any errors in status line.
|
571 |
+
Returns RawRequestMessage.
|
572 |
+
"""
|
573 |
+
|
574 |
+
def parse_message(self, lines: List[bytes]) -> RawRequestMessage:
|
575 |
+
# request line
|
576 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
577 |
+
try:
|
578 |
+
method, path, version = line.split(" ", maxsplit=2)
|
579 |
+
except ValueError:
|
580 |
+
raise BadStatusLine(line) from None
|
581 |
+
|
582 |
+
if len(path) > self.max_line_size:
|
583 |
+
raise LineTooLong(
|
584 |
+
"Status line is too long", str(self.max_line_size), str(len(path))
|
585 |
+
)
|
586 |
+
|
587 |
+
# method
|
588 |
+
if not TOKENRE.fullmatch(method):
|
589 |
+
raise BadStatusLine(method)
|
590 |
+
|
591 |
+
# version
|
592 |
+
match = VERSRE.fullmatch(version)
|
593 |
+
if match is None:
|
594 |
+
raise BadStatusLine(line)
|
595 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
596 |
+
|
597 |
+
if method == "CONNECT":
|
598 |
+
# authority-form,
|
599 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.3
|
600 |
+
url = URL.build(authority=path, encoded=True)
|
601 |
+
elif path.startswith("/"):
|
602 |
+
# origin-form,
|
603 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.1
|
604 |
+
path_part, _hash_separator, url_fragment = path.partition("#")
|
605 |
+
path_part, _question_mark_separator, qs_part = path_part.partition("?")
|
606 |
+
|
607 |
+
# NOTE: `yarl.URL.build()` is used to mimic what the Cython-based
|
608 |
+
# NOTE: parser does, otherwise it results into the same
|
609 |
+
# NOTE: HTTP Request-Line input producing different
|
610 |
+
# NOTE: `yarl.URL()` objects
|
611 |
+
url = URL.build(
|
612 |
+
path=path_part,
|
613 |
+
query_string=qs_part,
|
614 |
+
fragment=url_fragment,
|
615 |
+
encoded=True,
|
616 |
+
)
|
617 |
+
elif path == "*" and method == "OPTIONS":
|
618 |
+
# asterisk-form,
|
619 |
+
url = URL(path, encoded=True)
|
620 |
+
else:
|
621 |
+
# absolute-form for proxy maybe,
|
622 |
+
# https://datatracker.ietf.org/doc/html/rfc7230#section-5.3.2
|
623 |
+
url = URL(path, encoded=True)
|
624 |
+
if url.scheme == "":
|
625 |
+
# not absolute-form
|
626 |
+
raise InvalidURLError(
|
627 |
+
path.encode(errors="surrogateescape").decode("latin1")
|
628 |
+
)
|
629 |
+
|
630 |
+
# read headers
|
631 |
+
(
|
632 |
+
headers,
|
633 |
+
raw_headers,
|
634 |
+
close,
|
635 |
+
compression,
|
636 |
+
upgrade,
|
637 |
+
chunked,
|
638 |
+
) = self.parse_headers(lines)
|
639 |
+
|
640 |
+
if close is None: # then the headers weren't set in the request
|
641 |
+
if version_o <= HttpVersion10: # HTTP 1.0 must asks to not close
|
642 |
+
close = True
|
643 |
+
else: # HTTP 1.1 must ask to close.
|
644 |
+
close = False
|
645 |
+
|
646 |
+
return RawRequestMessage(
|
647 |
+
method,
|
648 |
+
path,
|
649 |
+
version_o,
|
650 |
+
headers,
|
651 |
+
raw_headers,
|
652 |
+
close,
|
653 |
+
compression,
|
654 |
+
upgrade,
|
655 |
+
chunked,
|
656 |
+
url,
|
657 |
+
)
|
658 |
+
|
659 |
+
|
660 |
+
class HttpResponseParser(HttpParser[RawResponseMessage]):
|
661 |
+
"""Read response status line and headers.
|
662 |
+
|
663 |
+
BadStatusLine could be raised in case of any errors in status line.
|
664 |
+
Returns RawResponseMessage.
|
665 |
+
"""
|
666 |
+
|
667 |
+
# Lax mode should only be enabled on response parser.
|
668 |
+
lax = not DEBUG
|
669 |
+
|
670 |
+
def feed_data(
|
671 |
+
self,
|
672 |
+
data: bytes,
|
673 |
+
SEP: Optional[_SEP] = None,
|
674 |
+
*args: Any,
|
675 |
+
**kwargs: Any,
|
676 |
+
) -> Tuple[List[Tuple[RawResponseMessage, StreamReader]], bool, bytes]:
|
677 |
+
if SEP is None:
|
678 |
+
SEP = b"\r\n" if DEBUG else b"\n"
|
679 |
+
return super().feed_data(data, SEP, *args, **kwargs)
|
680 |
+
|
681 |
+
def parse_message(self, lines: List[bytes]) -> RawResponseMessage:
|
682 |
+
line = lines[0].decode("utf-8", "surrogateescape")
|
683 |
+
try:
|
684 |
+
version, status = line.split(maxsplit=1)
|
685 |
+
except ValueError:
|
686 |
+
raise BadStatusLine(line) from None
|
687 |
+
|
688 |
+
try:
|
689 |
+
status, reason = status.split(maxsplit=1)
|
690 |
+
except ValueError:
|
691 |
+
status = status.strip()
|
692 |
+
reason = ""
|
693 |
+
|
694 |
+
if len(reason) > self.max_line_size:
|
695 |
+
raise LineTooLong(
|
696 |
+
"Status line is too long", str(self.max_line_size), str(len(reason))
|
697 |
+
)
|
698 |
+
|
699 |
+
# version
|
700 |
+
match = VERSRE.fullmatch(version)
|
701 |
+
if match is None:
|
702 |
+
raise BadStatusLine(line)
|
703 |
+
version_o = HttpVersion(int(match.group(1)), int(match.group(2)))
|
704 |
+
|
705 |
+
# The status code is a three-digit ASCII number, no padding
|
706 |
+
if len(status) != 3 or not DIGITS.fullmatch(status):
|
707 |
+
raise BadStatusLine(line)
|
708 |
+
status_i = int(status)
|
709 |
+
|
710 |
+
# read headers
|
711 |
+
(
|
712 |
+
headers,
|
713 |
+
raw_headers,
|
714 |
+
close,
|
715 |
+
compression,
|
716 |
+
upgrade,
|
717 |
+
chunked,
|
718 |
+
) = self.parse_headers(lines)
|
719 |
+
|
720 |
+
if close is None:
|
721 |
+
if version_o <= HttpVersion10:
|
722 |
+
close = True
|
723 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#name-message-body-length
|
724 |
+
elif 100 <= status_i < 200 or status_i in {204, 304}:
|
725 |
+
close = False
|
726 |
+
elif hdrs.CONTENT_LENGTH in headers or hdrs.TRANSFER_ENCODING in headers:
|
727 |
+
close = False
|
728 |
+
else:
|
729 |
+
# https://www.rfc-editor.org/rfc/rfc9112.html#section-6.3-2.8
|
730 |
+
close = True
|
731 |
+
|
732 |
+
return RawResponseMessage(
|
733 |
+
version_o,
|
734 |
+
status_i,
|
735 |
+
reason.strip(),
|
736 |
+
headers,
|
737 |
+
raw_headers,
|
738 |
+
close,
|
739 |
+
compression,
|
740 |
+
upgrade,
|
741 |
+
chunked,
|
742 |
+
)
|
743 |
+
|
744 |
+
|
745 |
+
class HttpPayloadParser:
|
746 |
+
def __init__(
|
747 |
+
self,
|
748 |
+
payload: StreamReader,
|
749 |
+
length: Optional[int] = None,
|
750 |
+
chunked: bool = False,
|
751 |
+
compression: Optional[str] = None,
|
752 |
+
code: Optional[int] = None,
|
753 |
+
method: Optional[str] = None,
|
754 |
+
readall: bool = False,
|
755 |
+
response_with_body: bool = True,
|
756 |
+
auto_decompress: bool = True,
|
757 |
+
lax: bool = False,
|
758 |
+
) -> None:
|
759 |
+
self._length = 0
|
760 |
+
self._type = ParseState.PARSE_NONE
|
761 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
762 |
+
self._chunk_size = 0
|
763 |
+
self._chunk_tail = b""
|
764 |
+
self._auto_decompress = auto_decompress
|
765 |
+
self._lax = lax
|
766 |
+
self.done = False
|
767 |
+
|
768 |
+
# payload decompression wrapper
|
769 |
+
if response_with_body and compression and self._auto_decompress:
|
770 |
+
real_payload: Union[StreamReader, DeflateBuffer] = DeflateBuffer(
|
771 |
+
payload, compression
|
772 |
+
)
|
773 |
+
else:
|
774 |
+
real_payload = payload
|
775 |
+
|
776 |
+
# payload parser
|
777 |
+
if not response_with_body:
|
778 |
+
# don't parse payload if it's not expected to be received
|
779 |
+
self._type = ParseState.PARSE_NONE
|
780 |
+
real_payload.feed_eof()
|
781 |
+
self.done = True
|
782 |
+
|
783 |
+
elif chunked:
|
784 |
+
self._type = ParseState.PARSE_CHUNKED
|
785 |
+
elif length is not None:
|
786 |
+
self._type = ParseState.PARSE_LENGTH
|
787 |
+
self._length = length
|
788 |
+
if self._length == 0:
|
789 |
+
real_payload.feed_eof()
|
790 |
+
self.done = True
|
791 |
+
else:
|
792 |
+
if readall and code != 204:
|
793 |
+
self._type = ParseState.PARSE_UNTIL_EOF
|
794 |
+
elif method in ("PUT", "POST"):
|
795 |
+
internal_logger.warning( # pragma: no cover
|
796 |
+
"Content-Length or Transfer-Encoding header is required"
|
797 |
+
)
|
798 |
+
self._type = ParseState.PARSE_NONE
|
799 |
+
real_payload.feed_eof()
|
800 |
+
self.done = True
|
801 |
+
|
802 |
+
self.payload = real_payload
|
803 |
+
|
804 |
+
def feed_eof(self) -> None:
|
805 |
+
if self._type == ParseState.PARSE_UNTIL_EOF:
|
806 |
+
self.payload.feed_eof()
|
807 |
+
elif self._type == ParseState.PARSE_LENGTH:
|
808 |
+
raise ContentLengthError(
|
809 |
+
"Not enough data for satisfy content length header."
|
810 |
+
)
|
811 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
812 |
+
raise TransferEncodingError(
|
813 |
+
"Not enough data for satisfy transfer length header."
|
814 |
+
)
|
815 |
+
|
816 |
+
def feed_data(
|
817 |
+
self, chunk: bytes, SEP: _SEP = b"\r\n", CHUNK_EXT: bytes = b";"
|
818 |
+
) -> Tuple[bool, bytes]:
|
819 |
+
# Read specified amount of bytes
|
820 |
+
if self._type == ParseState.PARSE_LENGTH:
|
821 |
+
required = self._length
|
822 |
+
chunk_len = len(chunk)
|
823 |
+
|
824 |
+
if required >= chunk_len:
|
825 |
+
self._length = required - chunk_len
|
826 |
+
self.payload.feed_data(chunk, chunk_len)
|
827 |
+
if self._length == 0:
|
828 |
+
self.payload.feed_eof()
|
829 |
+
return True, b""
|
830 |
+
else:
|
831 |
+
self._length = 0
|
832 |
+
self.payload.feed_data(chunk[:required], required)
|
833 |
+
self.payload.feed_eof()
|
834 |
+
return True, chunk[required:]
|
835 |
+
|
836 |
+
# Chunked transfer encoding parser
|
837 |
+
elif self._type == ParseState.PARSE_CHUNKED:
|
838 |
+
if self._chunk_tail:
|
839 |
+
chunk = self._chunk_tail + chunk
|
840 |
+
self._chunk_tail = b""
|
841 |
+
|
842 |
+
while chunk:
|
843 |
+
|
844 |
+
# read next chunk size
|
845 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_SIZE:
|
846 |
+
pos = chunk.find(SEP)
|
847 |
+
if pos >= 0:
|
848 |
+
i = chunk.find(CHUNK_EXT, 0, pos)
|
849 |
+
if i >= 0:
|
850 |
+
size_b = chunk[:i] # strip chunk-extensions
|
851 |
+
else:
|
852 |
+
size_b = chunk[:pos]
|
853 |
+
|
854 |
+
if self._lax: # Allow whitespace in lax mode.
|
855 |
+
size_b = size_b.strip()
|
856 |
+
|
857 |
+
if not re.fullmatch(HEXDIGITS, size_b):
|
858 |
+
exc = TransferEncodingError(
|
859 |
+
chunk[:pos].decode("ascii", "surrogateescape")
|
860 |
+
)
|
861 |
+
set_exception(self.payload, exc)
|
862 |
+
raise exc
|
863 |
+
size = int(bytes(size_b), 16)
|
864 |
+
|
865 |
+
chunk = chunk[pos + len(SEP) :]
|
866 |
+
if size == 0: # eof marker
|
867 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
868 |
+
if self._lax and chunk.startswith(b"\r"):
|
869 |
+
chunk = chunk[1:]
|
870 |
+
else:
|
871 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK
|
872 |
+
self._chunk_size = size
|
873 |
+
self.payload.begin_http_chunk_receiving()
|
874 |
+
else:
|
875 |
+
self._chunk_tail = chunk
|
876 |
+
return False, b""
|
877 |
+
|
878 |
+
# read chunk and feed buffer
|
879 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK:
|
880 |
+
required = self._chunk_size
|
881 |
+
chunk_len = len(chunk)
|
882 |
+
|
883 |
+
if required > chunk_len:
|
884 |
+
self._chunk_size = required - chunk_len
|
885 |
+
self.payload.feed_data(chunk, chunk_len)
|
886 |
+
return False, b""
|
887 |
+
else:
|
888 |
+
self._chunk_size = 0
|
889 |
+
self.payload.feed_data(chunk[:required], required)
|
890 |
+
chunk = chunk[required:]
|
891 |
+
if self._lax and chunk.startswith(b"\r"):
|
892 |
+
chunk = chunk[1:]
|
893 |
+
self._chunk = ChunkState.PARSE_CHUNKED_CHUNK_EOF
|
894 |
+
self.payload.end_http_chunk_receiving()
|
895 |
+
|
896 |
+
# toss the CRLF at the end of the chunk
|
897 |
+
if self._chunk == ChunkState.PARSE_CHUNKED_CHUNK_EOF:
|
898 |
+
if chunk[: len(SEP)] == SEP:
|
899 |
+
chunk = chunk[len(SEP) :]
|
900 |
+
self._chunk = ChunkState.PARSE_CHUNKED_SIZE
|
901 |
+
else:
|
902 |
+
self._chunk_tail = chunk
|
903 |
+
return False, b""
|
904 |
+
|
905 |
+
# if stream does not contain trailer, after 0\r\n
|
906 |
+
# we should get another \r\n otherwise
|
907 |
+
# trailers needs to be skipped until \r\n\r\n
|
908 |
+
if self._chunk == ChunkState.PARSE_MAYBE_TRAILERS:
|
909 |
+
head = chunk[: len(SEP)]
|
910 |
+
if head == SEP:
|
911 |
+
# end of stream
|
912 |
+
self.payload.feed_eof()
|
913 |
+
return True, chunk[len(SEP) :]
|
914 |
+
# Both CR and LF, or only LF may not be received yet. It is
|
915 |
+
# expected that CRLF or LF will be shown at the very first
|
916 |
+
# byte next time, otherwise trailers should come. The last
|
917 |
+
# CRLF which marks the end of response might not be
|
918 |
+
# contained in the same TCP segment which delivered the
|
919 |
+
# size indicator.
|
920 |
+
if not head:
|
921 |
+
return False, b""
|
922 |
+
if head == SEP[:1]:
|
923 |
+
self._chunk_tail = head
|
924 |
+
return False, b""
|
925 |
+
self._chunk = ChunkState.PARSE_TRAILERS
|
926 |
+
|
927 |
+
# read and discard trailer up to the CRLF terminator
|
928 |
+
if self._chunk == ChunkState.PARSE_TRAILERS:
|
929 |
+
pos = chunk.find(SEP)
|
930 |
+
if pos >= 0:
|
931 |
+
chunk = chunk[pos + len(SEP) :]
|
932 |
+
self._chunk = ChunkState.PARSE_MAYBE_TRAILERS
|
933 |
+
else:
|
934 |
+
self._chunk_tail = chunk
|
935 |
+
return False, b""
|
936 |
+
|
937 |
+
# Read all bytes until eof
|
938 |
+
elif self._type == ParseState.PARSE_UNTIL_EOF:
|
939 |
+
self.payload.feed_data(chunk, len(chunk))
|
940 |
+
|
941 |
+
return False, b""
|
942 |
+
|
943 |
+
|
944 |
+
class DeflateBuffer:
|
945 |
+
"""DeflateStream decompress stream and feed data into specified stream."""
|
946 |
+
|
947 |
+
decompressor: Any
|
948 |
+
|
949 |
+
def __init__(self, out: StreamReader, encoding: Optional[str]) -> None:
|
950 |
+
self.out = out
|
951 |
+
self.size = 0
|
952 |
+
self.encoding = encoding
|
953 |
+
self._started_decoding = False
|
954 |
+
|
955 |
+
self.decompressor: Union[BrotliDecompressor, ZLibDecompressor]
|
956 |
+
if encoding == "br":
|
957 |
+
if not HAS_BROTLI: # pragma: no cover
|
958 |
+
raise ContentEncodingError(
|
959 |
+
"Can not decode content-encoding: brotli (br). "
|
960 |
+
"Please install `Brotli`"
|
961 |
+
)
|
962 |
+
self.decompressor = BrotliDecompressor()
|
963 |
+
else:
|
964 |
+
self.decompressor = ZLibDecompressor(encoding=encoding)
|
965 |
+
|
966 |
+
def set_exception(
|
967 |
+
self,
|
968 |
+
exc: BaseException,
|
969 |
+
exc_cause: BaseException = _EXC_SENTINEL,
|
970 |
+
) -> None:
|
971 |
+
set_exception(self.out, exc, exc_cause)
|
972 |
+
|
973 |
+
def feed_data(self, chunk: bytes, size: int) -> None:
|
974 |
+
if not size:
|
975 |
+
return
|
976 |
+
|
977 |
+
self.size += size
|
978 |
+
|
979 |
+
# RFC1950
|
980 |
+
# bits 0..3 = CM = 0b1000 = 8 = "deflate"
|
981 |
+
# bits 4..7 = CINFO = 1..7 = windows size.
|
982 |
+
if (
|
983 |
+
not self._started_decoding
|
984 |
+
and self.encoding == "deflate"
|
985 |
+
and chunk[0] & 0xF != 8
|
986 |
+
):
|
987 |
+
# Change the decoder to decompress incorrectly compressed data
|
988 |
+
# Actually we should issue a warning about non-RFC-compliant data.
|
989 |
+
self.decompressor = ZLibDecompressor(
|
990 |
+
encoding=self.encoding, suppress_deflate_header=True
|
991 |
+
)
|
992 |
+
|
993 |
+
try:
|
994 |
+
chunk = self.decompressor.decompress_sync(chunk)
|
995 |
+
except Exception:
|
996 |
+
raise ContentEncodingError(
|
997 |
+
"Can not decode content-encoding: %s" % self.encoding
|
998 |
+
)
|
999 |
+
|
1000 |
+
self._started_decoding = True
|
1001 |
+
|
1002 |
+
if chunk:
|
1003 |
+
self.out.feed_data(chunk, len(chunk))
|
1004 |
+
|
1005 |
+
def feed_eof(self) -> None:
|
1006 |
+
chunk = self.decompressor.flush()
|
1007 |
+
|
1008 |
+
if chunk or self.size > 0:
|
1009 |
+
self.out.feed_data(chunk, len(chunk))
|
1010 |
+
if self.encoding == "deflate" and not self.decompressor.eof:
|
1011 |
+
raise ContentEncodingError("deflate")
|
1012 |
+
|
1013 |
+
self.out.feed_eof()
|
1014 |
+
|
1015 |
+
def begin_http_chunk_receiving(self) -> None:
|
1016 |
+
self.out.begin_http_chunk_receiving()
|
1017 |
+
|
1018 |
+
def end_http_chunk_receiving(self) -> None:
|
1019 |
+
self.out.end_http_chunk_receiving()
|
1020 |
+
|
1021 |
+
|
1022 |
+
HttpRequestParserPy = HttpRequestParser
|
1023 |
+
HttpResponseParserPy = HttpResponseParser
|
1024 |
+
RawRequestMessagePy = RawRequestMessage
|
1025 |
+
RawResponseMessagePy = RawResponseMessage
|
1026 |
+
|
1027 |
+
try:
|
1028 |
+
if not NO_EXTENSIONS:
|
1029 |
+
from ._http_parser import ( # type: ignore[import-not-found,no-redef]
|
1030 |
+
HttpRequestParser,
|
1031 |
+
HttpResponseParser,
|
1032 |
+
RawRequestMessage,
|
1033 |
+
RawResponseMessage,
|
1034 |
+
)
|
1035 |
+
|
1036 |
+
HttpRequestParserC = HttpRequestParser
|
1037 |
+
HttpResponseParserC = HttpResponseParser
|
1038 |
+
RawRequestMessageC = RawRequestMessage
|
1039 |
+
RawResponseMessageC = RawResponseMessage
|
1040 |
+
except ImportError: # pragma: no cover
|
1041 |
+
pass
|
llmeval-env/lib/python3.10/site-packages/aiohttp/http_websocket.py
ADDED
@@ -0,0 +1,740 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""WebSocket protocol versions 13 and 8."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import functools
|
5 |
+
import json
|
6 |
+
import random
|
7 |
+
import re
|
8 |
+
import sys
|
9 |
+
import zlib
|
10 |
+
from enum import IntEnum
|
11 |
+
from struct import Struct
|
12 |
+
from typing import (
|
13 |
+
Any,
|
14 |
+
Callable,
|
15 |
+
Final,
|
16 |
+
List,
|
17 |
+
NamedTuple,
|
18 |
+
Optional,
|
19 |
+
Pattern,
|
20 |
+
Set,
|
21 |
+
Tuple,
|
22 |
+
Union,
|
23 |
+
cast,
|
24 |
+
)
|
25 |
+
|
26 |
+
from .base_protocol import BaseProtocol
|
27 |
+
from .compression_utils import ZLibCompressor, ZLibDecompressor
|
28 |
+
from .helpers import NO_EXTENSIONS, set_exception
|
29 |
+
from .streams import DataQueue
|
30 |
+
|
31 |
+
__all__ = (
|
32 |
+
"WS_CLOSED_MESSAGE",
|
33 |
+
"WS_CLOSING_MESSAGE",
|
34 |
+
"WS_KEY",
|
35 |
+
"WebSocketReader",
|
36 |
+
"WebSocketWriter",
|
37 |
+
"WSMessage",
|
38 |
+
"WebSocketError",
|
39 |
+
"WSMsgType",
|
40 |
+
"WSCloseCode",
|
41 |
+
)
|
42 |
+
|
43 |
+
|
44 |
+
class WSCloseCode(IntEnum):
|
45 |
+
OK = 1000
|
46 |
+
GOING_AWAY = 1001
|
47 |
+
PROTOCOL_ERROR = 1002
|
48 |
+
UNSUPPORTED_DATA = 1003
|
49 |
+
ABNORMAL_CLOSURE = 1006
|
50 |
+
INVALID_TEXT = 1007
|
51 |
+
POLICY_VIOLATION = 1008
|
52 |
+
MESSAGE_TOO_BIG = 1009
|
53 |
+
MANDATORY_EXTENSION = 1010
|
54 |
+
INTERNAL_ERROR = 1011
|
55 |
+
SERVICE_RESTART = 1012
|
56 |
+
TRY_AGAIN_LATER = 1013
|
57 |
+
BAD_GATEWAY = 1014
|
58 |
+
|
59 |
+
|
60 |
+
ALLOWED_CLOSE_CODES: Final[Set[int]] = {int(i) for i in WSCloseCode}
|
61 |
+
|
62 |
+
# For websockets, keeping latency low is extremely important as implementations
|
63 |
+
# generally expect to be able to send and receive messages quickly. We use a
|
64 |
+
# larger chunk size than the default to reduce the number of executor calls
|
65 |
+
# since the executor is a significant source of latency and overhead when
|
66 |
+
# the chunks are small. A size of 5KiB was chosen because it is also the
|
67 |
+
# same value python-zlib-ng choose to use as the threshold to release the GIL.
|
68 |
+
|
69 |
+
WEBSOCKET_MAX_SYNC_CHUNK_SIZE = 5 * 1024
|
70 |
+
|
71 |
+
|
72 |
+
class WSMsgType(IntEnum):
|
73 |
+
# websocket spec types
|
74 |
+
CONTINUATION = 0x0
|
75 |
+
TEXT = 0x1
|
76 |
+
BINARY = 0x2
|
77 |
+
PING = 0x9
|
78 |
+
PONG = 0xA
|
79 |
+
CLOSE = 0x8
|
80 |
+
|
81 |
+
# aiohttp specific types
|
82 |
+
CLOSING = 0x100
|
83 |
+
CLOSED = 0x101
|
84 |
+
ERROR = 0x102
|
85 |
+
|
86 |
+
text = TEXT
|
87 |
+
binary = BINARY
|
88 |
+
ping = PING
|
89 |
+
pong = PONG
|
90 |
+
close = CLOSE
|
91 |
+
closing = CLOSING
|
92 |
+
closed = CLOSED
|
93 |
+
error = ERROR
|
94 |
+
|
95 |
+
|
96 |
+
WS_KEY: Final[bytes] = b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
|
97 |
+
|
98 |
+
|
99 |
+
UNPACK_LEN2 = Struct("!H").unpack_from
|
100 |
+
UNPACK_LEN3 = Struct("!Q").unpack_from
|
101 |
+
UNPACK_CLOSE_CODE = Struct("!H").unpack
|
102 |
+
PACK_LEN1 = Struct("!BB").pack
|
103 |
+
PACK_LEN2 = Struct("!BBH").pack
|
104 |
+
PACK_LEN3 = Struct("!BBQ").pack
|
105 |
+
PACK_CLOSE_CODE = Struct("!H").pack
|
106 |
+
MSG_SIZE: Final[int] = 2**14
|
107 |
+
DEFAULT_LIMIT: Final[int] = 2**16
|
108 |
+
|
109 |
+
|
110 |
+
class WSMessage(NamedTuple):
|
111 |
+
type: WSMsgType
|
112 |
+
# To type correctly, this would need some kind of tagged union for each type.
|
113 |
+
data: Any
|
114 |
+
extra: Optional[str]
|
115 |
+
|
116 |
+
def json(self, *, loads: Callable[[Any], Any] = json.loads) -> Any:
|
117 |
+
"""Return parsed JSON data.
|
118 |
+
|
119 |
+
.. versionadded:: 0.22
|
120 |
+
"""
|
121 |
+
return loads(self.data)
|
122 |
+
|
123 |
+
|
124 |
+
WS_CLOSED_MESSAGE = WSMessage(WSMsgType.CLOSED, None, None)
|
125 |
+
WS_CLOSING_MESSAGE = WSMessage(WSMsgType.CLOSING, None, None)
|
126 |
+
|
127 |
+
|
128 |
+
class WebSocketError(Exception):
|
129 |
+
"""WebSocket protocol parser error."""
|
130 |
+
|
131 |
+
def __init__(self, code: int, message: str) -> None:
|
132 |
+
self.code = code
|
133 |
+
super().__init__(code, message)
|
134 |
+
|
135 |
+
def __str__(self) -> str:
|
136 |
+
return cast(str, self.args[1])
|
137 |
+
|
138 |
+
|
139 |
+
class WSHandshakeError(Exception):
|
140 |
+
"""WebSocket protocol handshake error."""
|
141 |
+
|
142 |
+
|
143 |
+
native_byteorder: Final[str] = sys.byteorder
|
144 |
+
|
145 |
+
|
146 |
+
# Used by _websocket_mask_python
|
147 |
+
@functools.lru_cache
|
148 |
+
def _xor_table() -> List[bytes]:
|
149 |
+
return [bytes(a ^ b for a in range(256)) for b in range(256)]
|
150 |
+
|
151 |
+
|
152 |
+
def _websocket_mask_python(mask: bytes, data: bytearray) -> None:
|
153 |
+
"""Websocket masking function.
|
154 |
+
|
155 |
+
`mask` is a `bytes` object of length 4; `data` is a `bytearray`
|
156 |
+
object of any length. The contents of `data` are masked with `mask`,
|
157 |
+
as specified in section 5.3 of RFC 6455.
|
158 |
+
|
159 |
+
Note that this function mutates the `data` argument.
|
160 |
+
|
161 |
+
This pure-python implementation may be replaced by an optimized
|
162 |
+
version when available.
|
163 |
+
|
164 |
+
"""
|
165 |
+
assert isinstance(data, bytearray), data
|
166 |
+
assert len(mask) == 4, mask
|
167 |
+
|
168 |
+
if data:
|
169 |
+
_XOR_TABLE = _xor_table()
|
170 |
+
a, b, c, d = (_XOR_TABLE[n] for n in mask)
|
171 |
+
data[::4] = data[::4].translate(a)
|
172 |
+
data[1::4] = data[1::4].translate(b)
|
173 |
+
data[2::4] = data[2::4].translate(c)
|
174 |
+
data[3::4] = data[3::4].translate(d)
|
175 |
+
|
176 |
+
|
177 |
+
if NO_EXTENSIONS: # pragma: no cover
|
178 |
+
_websocket_mask = _websocket_mask_python
|
179 |
+
else:
|
180 |
+
try:
|
181 |
+
from ._websocket import _websocket_mask_cython # type: ignore[import-not-found]
|
182 |
+
|
183 |
+
_websocket_mask = _websocket_mask_cython
|
184 |
+
except ImportError: # pragma: no cover
|
185 |
+
_websocket_mask = _websocket_mask_python
|
186 |
+
|
187 |
+
_WS_DEFLATE_TRAILING: Final[bytes] = bytes([0x00, 0x00, 0xFF, 0xFF])
|
188 |
+
|
189 |
+
|
190 |
+
_WS_EXT_RE: Final[Pattern[str]] = re.compile(
|
191 |
+
r"^(?:;\s*(?:"
|
192 |
+
r"(server_no_context_takeover)|"
|
193 |
+
r"(client_no_context_takeover)|"
|
194 |
+
r"(server_max_window_bits(?:=(\d+))?)|"
|
195 |
+
r"(client_max_window_bits(?:=(\d+))?)))*$"
|
196 |
+
)
|
197 |
+
|
198 |
+
_WS_EXT_RE_SPLIT: Final[Pattern[str]] = re.compile(r"permessage-deflate([^,]+)?")
|
199 |
+
|
200 |
+
|
201 |
+
def ws_ext_parse(extstr: Optional[str], isserver: bool = False) -> Tuple[int, bool]:
|
202 |
+
if not extstr:
|
203 |
+
return 0, False
|
204 |
+
|
205 |
+
compress = 0
|
206 |
+
notakeover = False
|
207 |
+
for ext in _WS_EXT_RE_SPLIT.finditer(extstr):
|
208 |
+
defext = ext.group(1)
|
209 |
+
# Return compress = 15 when get `permessage-deflate`
|
210 |
+
if not defext:
|
211 |
+
compress = 15
|
212 |
+
break
|
213 |
+
match = _WS_EXT_RE.match(defext)
|
214 |
+
if match:
|
215 |
+
compress = 15
|
216 |
+
if isserver:
|
217 |
+
# Server never fail to detect compress handshake.
|
218 |
+
# Server does not need to send max wbit to client
|
219 |
+
if match.group(4):
|
220 |
+
compress = int(match.group(4))
|
221 |
+
# Group3 must match if group4 matches
|
222 |
+
# Compress wbit 8 does not support in zlib
|
223 |
+
# If compress level not support,
|
224 |
+
# CONTINUE to next extension
|
225 |
+
if compress > 15 or compress < 9:
|
226 |
+
compress = 0
|
227 |
+
continue
|
228 |
+
if match.group(1):
|
229 |
+
notakeover = True
|
230 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
231 |
+
break
|
232 |
+
else:
|
233 |
+
if match.group(6):
|
234 |
+
compress = int(match.group(6))
|
235 |
+
# Group5 must match if group6 matches
|
236 |
+
# Compress wbit 8 does not support in zlib
|
237 |
+
# If compress level not support,
|
238 |
+
# FAIL the parse progress
|
239 |
+
if compress > 15 or compress < 9:
|
240 |
+
raise WSHandshakeError("Invalid window size")
|
241 |
+
if match.group(2):
|
242 |
+
notakeover = True
|
243 |
+
# Ignore regex group 5 & 6 for client_max_window_bits
|
244 |
+
break
|
245 |
+
# Return Fail if client side and not match
|
246 |
+
elif not isserver:
|
247 |
+
raise WSHandshakeError("Extension for deflate not supported" + ext.group(1))
|
248 |
+
|
249 |
+
return compress, notakeover
|
250 |
+
|
251 |
+
|
252 |
+
def ws_ext_gen(
|
253 |
+
compress: int = 15, isserver: bool = False, server_notakeover: bool = False
|
254 |
+
) -> str:
|
255 |
+
# client_notakeover=False not used for server
|
256 |
+
# compress wbit 8 does not support in zlib
|
257 |
+
if compress < 9 or compress > 15:
|
258 |
+
raise ValueError(
|
259 |
+
"Compress wbits must between 9 and 15, " "zlib does not support wbits=8"
|
260 |
+
)
|
261 |
+
enabledext = ["permessage-deflate"]
|
262 |
+
if not isserver:
|
263 |
+
enabledext.append("client_max_window_bits")
|
264 |
+
|
265 |
+
if compress < 15:
|
266 |
+
enabledext.append("server_max_window_bits=" + str(compress))
|
267 |
+
if server_notakeover:
|
268 |
+
enabledext.append("server_no_context_takeover")
|
269 |
+
# if client_notakeover:
|
270 |
+
# enabledext.append('client_no_context_takeover')
|
271 |
+
return "; ".join(enabledext)
|
272 |
+
|
273 |
+
|
274 |
+
class WSParserState(IntEnum):
|
275 |
+
READ_HEADER = 1
|
276 |
+
READ_PAYLOAD_LENGTH = 2
|
277 |
+
READ_PAYLOAD_MASK = 3
|
278 |
+
READ_PAYLOAD = 4
|
279 |
+
|
280 |
+
|
281 |
+
class WebSocketReader:
|
282 |
+
def __init__(
|
283 |
+
self, queue: DataQueue[WSMessage], max_msg_size: int, compress: bool = True
|
284 |
+
) -> None:
|
285 |
+
self.queue = queue
|
286 |
+
self._max_msg_size = max_msg_size
|
287 |
+
|
288 |
+
self._exc: Optional[BaseException] = None
|
289 |
+
self._partial = bytearray()
|
290 |
+
self._state = WSParserState.READ_HEADER
|
291 |
+
|
292 |
+
self._opcode: Optional[int] = None
|
293 |
+
self._frame_fin = False
|
294 |
+
self._frame_opcode: Optional[int] = None
|
295 |
+
self._frame_payload = bytearray()
|
296 |
+
|
297 |
+
self._tail = b""
|
298 |
+
self._has_mask = False
|
299 |
+
self._frame_mask: Optional[bytes] = None
|
300 |
+
self._payload_length = 0
|
301 |
+
self._payload_length_flag = 0
|
302 |
+
self._compressed: Optional[bool] = None
|
303 |
+
self._decompressobj: Optional[ZLibDecompressor] = None
|
304 |
+
self._compress = compress
|
305 |
+
|
306 |
+
def feed_eof(self) -> None:
|
307 |
+
self.queue.feed_eof()
|
308 |
+
|
309 |
+
def feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
310 |
+
if self._exc:
|
311 |
+
return True, data
|
312 |
+
|
313 |
+
try:
|
314 |
+
return self._feed_data(data)
|
315 |
+
except Exception as exc:
|
316 |
+
self._exc = exc
|
317 |
+
set_exception(self.queue, exc)
|
318 |
+
return True, b""
|
319 |
+
|
320 |
+
def _feed_data(self, data: bytes) -> Tuple[bool, bytes]:
|
321 |
+
for fin, opcode, payload, compressed in self.parse_frame(data):
|
322 |
+
if compressed and not self._decompressobj:
|
323 |
+
self._decompressobj = ZLibDecompressor(suppress_deflate_header=True)
|
324 |
+
if opcode == WSMsgType.CLOSE:
|
325 |
+
if len(payload) >= 2:
|
326 |
+
close_code = UNPACK_CLOSE_CODE(payload[:2])[0]
|
327 |
+
if close_code < 3000 and close_code not in ALLOWED_CLOSE_CODES:
|
328 |
+
raise WebSocketError(
|
329 |
+
WSCloseCode.PROTOCOL_ERROR,
|
330 |
+
f"Invalid close code: {close_code}",
|
331 |
+
)
|
332 |
+
try:
|
333 |
+
close_message = payload[2:].decode("utf-8")
|
334 |
+
except UnicodeDecodeError as exc:
|
335 |
+
raise WebSocketError(
|
336 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
337 |
+
) from exc
|
338 |
+
msg = WSMessage(WSMsgType.CLOSE, close_code, close_message)
|
339 |
+
elif payload:
|
340 |
+
raise WebSocketError(
|
341 |
+
WSCloseCode.PROTOCOL_ERROR,
|
342 |
+
f"Invalid close frame: {fin} {opcode} {payload!r}",
|
343 |
+
)
|
344 |
+
else:
|
345 |
+
msg = WSMessage(WSMsgType.CLOSE, 0, "")
|
346 |
+
|
347 |
+
self.queue.feed_data(msg, 0)
|
348 |
+
|
349 |
+
elif opcode == WSMsgType.PING:
|
350 |
+
self.queue.feed_data(
|
351 |
+
WSMessage(WSMsgType.PING, payload, ""), len(payload)
|
352 |
+
)
|
353 |
+
|
354 |
+
elif opcode == WSMsgType.PONG:
|
355 |
+
self.queue.feed_data(
|
356 |
+
WSMessage(WSMsgType.PONG, payload, ""), len(payload)
|
357 |
+
)
|
358 |
+
|
359 |
+
elif (
|
360 |
+
opcode not in (WSMsgType.TEXT, WSMsgType.BINARY)
|
361 |
+
and self._opcode is None
|
362 |
+
):
|
363 |
+
raise WebSocketError(
|
364 |
+
WSCloseCode.PROTOCOL_ERROR, f"Unexpected opcode={opcode!r}"
|
365 |
+
)
|
366 |
+
else:
|
367 |
+
# load text/binary
|
368 |
+
if not fin:
|
369 |
+
# got partial frame payload
|
370 |
+
if opcode != WSMsgType.CONTINUATION:
|
371 |
+
self._opcode = opcode
|
372 |
+
self._partial.extend(payload)
|
373 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
374 |
+
raise WebSocketError(
|
375 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
376 |
+
"Message size {} exceeds limit {}".format(
|
377 |
+
len(self._partial), self._max_msg_size
|
378 |
+
),
|
379 |
+
)
|
380 |
+
else:
|
381 |
+
# previous frame was non finished
|
382 |
+
# we should get continuation opcode
|
383 |
+
if self._partial:
|
384 |
+
if opcode != WSMsgType.CONTINUATION:
|
385 |
+
raise WebSocketError(
|
386 |
+
WSCloseCode.PROTOCOL_ERROR,
|
387 |
+
"The opcode in non-fin frame is expected "
|
388 |
+
"to be zero, got {!r}".format(opcode),
|
389 |
+
)
|
390 |
+
|
391 |
+
if opcode == WSMsgType.CONTINUATION:
|
392 |
+
assert self._opcode is not None
|
393 |
+
opcode = self._opcode
|
394 |
+
self._opcode = None
|
395 |
+
|
396 |
+
self._partial.extend(payload)
|
397 |
+
if self._max_msg_size and len(self._partial) >= self._max_msg_size:
|
398 |
+
raise WebSocketError(
|
399 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
400 |
+
"Message size {} exceeds limit {}".format(
|
401 |
+
len(self._partial), self._max_msg_size
|
402 |
+
),
|
403 |
+
)
|
404 |
+
|
405 |
+
# Decompress process must to be done after all packets
|
406 |
+
# received.
|
407 |
+
if compressed:
|
408 |
+
assert self._decompressobj is not None
|
409 |
+
self._partial.extend(_WS_DEFLATE_TRAILING)
|
410 |
+
payload_merged = self._decompressobj.decompress_sync(
|
411 |
+
self._partial, self._max_msg_size
|
412 |
+
)
|
413 |
+
if self._decompressobj.unconsumed_tail:
|
414 |
+
left = len(self._decompressobj.unconsumed_tail)
|
415 |
+
raise WebSocketError(
|
416 |
+
WSCloseCode.MESSAGE_TOO_BIG,
|
417 |
+
"Decompressed message size {} exceeds limit {}".format(
|
418 |
+
self._max_msg_size + left, self._max_msg_size
|
419 |
+
),
|
420 |
+
)
|
421 |
+
else:
|
422 |
+
payload_merged = bytes(self._partial)
|
423 |
+
|
424 |
+
self._partial.clear()
|
425 |
+
|
426 |
+
if opcode == WSMsgType.TEXT:
|
427 |
+
try:
|
428 |
+
text = payload_merged.decode("utf-8")
|
429 |
+
self.queue.feed_data(
|
430 |
+
WSMessage(WSMsgType.TEXT, text, ""), len(text)
|
431 |
+
)
|
432 |
+
except UnicodeDecodeError as exc:
|
433 |
+
raise WebSocketError(
|
434 |
+
WSCloseCode.INVALID_TEXT, "Invalid UTF-8 text message"
|
435 |
+
) from exc
|
436 |
+
else:
|
437 |
+
self.queue.feed_data(
|
438 |
+
WSMessage(WSMsgType.BINARY, payload_merged, ""),
|
439 |
+
len(payload_merged),
|
440 |
+
)
|
441 |
+
|
442 |
+
return False, b""
|
443 |
+
|
444 |
+
def parse_frame(
|
445 |
+
self, buf: bytes
|
446 |
+
) -> List[Tuple[bool, Optional[int], bytearray, Optional[bool]]]:
|
447 |
+
"""Return the next frame from the socket."""
|
448 |
+
frames = []
|
449 |
+
if self._tail:
|
450 |
+
buf, self._tail = self._tail + buf, b""
|
451 |
+
|
452 |
+
start_pos = 0
|
453 |
+
buf_length = len(buf)
|
454 |
+
|
455 |
+
while True:
|
456 |
+
# read header
|
457 |
+
if self._state == WSParserState.READ_HEADER:
|
458 |
+
if buf_length - start_pos >= 2:
|
459 |
+
data = buf[start_pos : start_pos + 2]
|
460 |
+
start_pos += 2
|
461 |
+
first_byte, second_byte = data
|
462 |
+
|
463 |
+
fin = (first_byte >> 7) & 1
|
464 |
+
rsv1 = (first_byte >> 6) & 1
|
465 |
+
rsv2 = (first_byte >> 5) & 1
|
466 |
+
rsv3 = (first_byte >> 4) & 1
|
467 |
+
opcode = first_byte & 0xF
|
468 |
+
|
469 |
+
# frame-fin = %x0 ; more frames of this message follow
|
470 |
+
# / %x1 ; final frame of this message
|
471 |
+
# frame-rsv1 = %x0 ;
|
472 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
473 |
+
# frame-rsv2 = %x0 ;
|
474 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
475 |
+
# frame-rsv3 = %x0 ;
|
476 |
+
# 1 bit, MUST be 0 unless negotiated otherwise
|
477 |
+
#
|
478 |
+
# Remove rsv1 from this test for deflate development
|
479 |
+
if rsv2 or rsv3 or (rsv1 and not self._compress):
|
480 |
+
raise WebSocketError(
|
481 |
+
WSCloseCode.PROTOCOL_ERROR,
|
482 |
+
"Received frame with non-zero reserved bits",
|
483 |
+
)
|
484 |
+
|
485 |
+
if opcode > 0x7 and fin == 0:
|
486 |
+
raise WebSocketError(
|
487 |
+
WSCloseCode.PROTOCOL_ERROR,
|
488 |
+
"Received fragmented control frame",
|
489 |
+
)
|
490 |
+
|
491 |
+
has_mask = (second_byte >> 7) & 1
|
492 |
+
length = second_byte & 0x7F
|
493 |
+
|
494 |
+
# Control frames MUST have a payload
|
495 |
+
# length of 125 bytes or less
|
496 |
+
if opcode > 0x7 and length > 125:
|
497 |
+
raise WebSocketError(
|
498 |
+
WSCloseCode.PROTOCOL_ERROR,
|
499 |
+
"Control frame payload cannot be " "larger than 125 bytes",
|
500 |
+
)
|
501 |
+
|
502 |
+
# Set compress status if last package is FIN
|
503 |
+
# OR set compress status if this is first fragment
|
504 |
+
# Raise error if not first fragment with rsv1 = 0x1
|
505 |
+
if self._frame_fin or self._compressed is None:
|
506 |
+
self._compressed = True if rsv1 else False
|
507 |
+
elif rsv1:
|
508 |
+
raise WebSocketError(
|
509 |
+
WSCloseCode.PROTOCOL_ERROR,
|
510 |
+
"Received frame with non-zero reserved bits",
|
511 |
+
)
|
512 |
+
|
513 |
+
self._frame_fin = bool(fin)
|
514 |
+
self._frame_opcode = opcode
|
515 |
+
self._has_mask = bool(has_mask)
|
516 |
+
self._payload_length_flag = length
|
517 |
+
self._state = WSParserState.READ_PAYLOAD_LENGTH
|
518 |
+
else:
|
519 |
+
break
|
520 |
+
|
521 |
+
# read payload length
|
522 |
+
if self._state == WSParserState.READ_PAYLOAD_LENGTH:
|
523 |
+
length = self._payload_length_flag
|
524 |
+
if length == 126:
|
525 |
+
if buf_length - start_pos >= 2:
|
526 |
+
data = buf[start_pos : start_pos + 2]
|
527 |
+
start_pos += 2
|
528 |
+
length = UNPACK_LEN2(data)[0]
|
529 |
+
self._payload_length = length
|
530 |
+
self._state = (
|
531 |
+
WSParserState.READ_PAYLOAD_MASK
|
532 |
+
if self._has_mask
|
533 |
+
else WSParserState.READ_PAYLOAD
|
534 |
+
)
|
535 |
+
else:
|
536 |
+
break
|
537 |
+
elif length > 126:
|
538 |
+
if buf_length - start_pos >= 8:
|
539 |
+
data = buf[start_pos : start_pos + 8]
|
540 |
+
start_pos += 8
|
541 |
+
length = UNPACK_LEN3(data)[0]
|
542 |
+
self._payload_length = length
|
543 |
+
self._state = (
|
544 |
+
WSParserState.READ_PAYLOAD_MASK
|
545 |
+
if self._has_mask
|
546 |
+
else WSParserState.READ_PAYLOAD
|
547 |
+
)
|
548 |
+
else:
|
549 |
+
break
|
550 |
+
else:
|
551 |
+
self._payload_length = length
|
552 |
+
self._state = (
|
553 |
+
WSParserState.READ_PAYLOAD_MASK
|
554 |
+
if self._has_mask
|
555 |
+
else WSParserState.READ_PAYLOAD
|
556 |
+
)
|
557 |
+
|
558 |
+
# read payload mask
|
559 |
+
if self._state == WSParserState.READ_PAYLOAD_MASK:
|
560 |
+
if buf_length - start_pos >= 4:
|
561 |
+
self._frame_mask = buf[start_pos : start_pos + 4]
|
562 |
+
start_pos += 4
|
563 |
+
self._state = WSParserState.READ_PAYLOAD
|
564 |
+
else:
|
565 |
+
break
|
566 |
+
|
567 |
+
if self._state == WSParserState.READ_PAYLOAD:
|
568 |
+
length = self._payload_length
|
569 |
+
payload = self._frame_payload
|
570 |
+
|
571 |
+
chunk_len = buf_length - start_pos
|
572 |
+
if length >= chunk_len:
|
573 |
+
self._payload_length = length - chunk_len
|
574 |
+
payload.extend(buf[start_pos:])
|
575 |
+
start_pos = buf_length
|
576 |
+
else:
|
577 |
+
self._payload_length = 0
|
578 |
+
payload.extend(buf[start_pos : start_pos + length])
|
579 |
+
start_pos = start_pos + length
|
580 |
+
|
581 |
+
if self._payload_length == 0:
|
582 |
+
if self._has_mask:
|
583 |
+
assert self._frame_mask is not None
|
584 |
+
_websocket_mask(self._frame_mask, payload)
|
585 |
+
|
586 |
+
frames.append(
|
587 |
+
(self._frame_fin, self._frame_opcode, payload, self._compressed)
|
588 |
+
)
|
589 |
+
|
590 |
+
self._frame_payload = bytearray()
|
591 |
+
self._state = WSParserState.READ_HEADER
|
592 |
+
else:
|
593 |
+
break
|
594 |
+
|
595 |
+
self._tail = buf[start_pos:]
|
596 |
+
|
597 |
+
return frames
|
598 |
+
|
599 |
+
|
600 |
+
class WebSocketWriter:
|
601 |
+
def __init__(
|
602 |
+
self,
|
603 |
+
protocol: BaseProtocol,
|
604 |
+
transport: asyncio.Transport,
|
605 |
+
*,
|
606 |
+
use_mask: bool = False,
|
607 |
+
limit: int = DEFAULT_LIMIT,
|
608 |
+
random: random.Random = random.Random(),
|
609 |
+
compress: int = 0,
|
610 |
+
notakeover: bool = False,
|
611 |
+
) -> None:
|
612 |
+
self.protocol = protocol
|
613 |
+
self.transport = transport
|
614 |
+
self.use_mask = use_mask
|
615 |
+
self.randrange = random.randrange
|
616 |
+
self.compress = compress
|
617 |
+
self.notakeover = notakeover
|
618 |
+
self._closing = False
|
619 |
+
self._limit = limit
|
620 |
+
self._output_size = 0
|
621 |
+
self._compressobj: Any = None # actually compressobj
|
622 |
+
|
623 |
+
async def _send_frame(
|
624 |
+
self, message: bytes, opcode: int, compress: Optional[int] = None
|
625 |
+
) -> None:
|
626 |
+
"""Send a frame over the websocket with message as its payload."""
|
627 |
+
if self._closing and not (opcode & WSMsgType.CLOSE):
|
628 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
629 |
+
|
630 |
+
rsv = 0
|
631 |
+
|
632 |
+
# Only compress larger packets (disabled)
|
633 |
+
# Does small packet needs to be compressed?
|
634 |
+
# if self.compress and opcode < 8 and len(message) > 124:
|
635 |
+
if (compress or self.compress) and opcode < 8:
|
636 |
+
if compress:
|
637 |
+
# Do not set self._compress if compressing is for this frame
|
638 |
+
compressobj = self._make_compress_obj(compress)
|
639 |
+
else: # self.compress
|
640 |
+
if not self._compressobj:
|
641 |
+
self._compressobj = self._make_compress_obj(self.compress)
|
642 |
+
compressobj = self._compressobj
|
643 |
+
|
644 |
+
message = await compressobj.compress(message)
|
645 |
+
# Its critical that we do not return control to the event
|
646 |
+
# loop until we have finished sending all the compressed
|
647 |
+
# data. Otherwise we could end up mixing compressed frames
|
648 |
+
# if there are multiple coroutines compressing data.
|
649 |
+
message += compressobj.flush(
|
650 |
+
zlib.Z_FULL_FLUSH if self.notakeover else zlib.Z_SYNC_FLUSH
|
651 |
+
)
|
652 |
+
if message.endswith(_WS_DEFLATE_TRAILING):
|
653 |
+
message = message[:-4]
|
654 |
+
rsv = rsv | 0x40
|
655 |
+
|
656 |
+
msg_length = len(message)
|
657 |
+
|
658 |
+
use_mask = self.use_mask
|
659 |
+
if use_mask:
|
660 |
+
mask_bit = 0x80
|
661 |
+
else:
|
662 |
+
mask_bit = 0
|
663 |
+
|
664 |
+
if msg_length < 126:
|
665 |
+
header = PACK_LEN1(0x80 | rsv | opcode, msg_length | mask_bit)
|
666 |
+
elif msg_length < (1 << 16):
|
667 |
+
header = PACK_LEN2(0x80 | rsv | opcode, 126 | mask_bit, msg_length)
|
668 |
+
else:
|
669 |
+
header = PACK_LEN3(0x80 | rsv | opcode, 127 | mask_bit, msg_length)
|
670 |
+
if use_mask:
|
671 |
+
mask_int = self.randrange(0, 0xFFFFFFFF)
|
672 |
+
mask = mask_int.to_bytes(4, "big")
|
673 |
+
message = bytearray(message)
|
674 |
+
_websocket_mask(mask, message)
|
675 |
+
self._write(header + mask + message)
|
676 |
+
self._output_size += len(header) + len(mask) + msg_length
|
677 |
+
else:
|
678 |
+
if msg_length > MSG_SIZE:
|
679 |
+
self._write(header)
|
680 |
+
self._write(message)
|
681 |
+
else:
|
682 |
+
self._write(header + message)
|
683 |
+
|
684 |
+
self._output_size += len(header) + msg_length
|
685 |
+
|
686 |
+
# It is safe to return control to the event loop when using compression
|
687 |
+
# after this point as we have already sent or buffered all the data.
|
688 |
+
|
689 |
+
if self._output_size > self._limit:
|
690 |
+
self._output_size = 0
|
691 |
+
await self.protocol._drain_helper()
|
692 |
+
|
693 |
+
def _make_compress_obj(self, compress: int) -> ZLibCompressor:
|
694 |
+
return ZLibCompressor(
|
695 |
+
level=zlib.Z_BEST_SPEED,
|
696 |
+
wbits=-compress,
|
697 |
+
max_sync_chunk_size=WEBSOCKET_MAX_SYNC_CHUNK_SIZE,
|
698 |
+
)
|
699 |
+
|
700 |
+
def _write(self, data: bytes) -> None:
|
701 |
+
if self.transport is None or self.transport.is_closing():
|
702 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
703 |
+
self.transport.write(data)
|
704 |
+
|
705 |
+
async def pong(self, message: Union[bytes, str] = b"") -> None:
|
706 |
+
"""Send pong message."""
|
707 |
+
if isinstance(message, str):
|
708 |
+
message = message.encode("utf-8")
|
709 |
+
await self._send_frame(message, WSMsgType.PONG)
|
710 |
+
|
711 |
+
async def ping(self, message: Union[bytes, str] = b"") -> None:
|
712 |
+
"""Send ping message."""
|
713 |
+
if isinstance(message, str):
|
714 |
+
message = message.encode("utf-8")
|
715 |
+
await self._send_frame(message, WSMsgType.PING)
|
716 |
+
|
717 |
+
async def send(
|
718 |
+
self,
|
719 |
+
message: Union[str, bytes],
|
720 |
+
binary: bool = False,
|
721 |
+
compress: Optional[int] = None,
|
722 |
+
) -> None:
|
723 |
+
"""Send a frame over the websocket with message as its payload."""
|
724 |
+
if isinstance(message, str):
|
725 |
+
message = message.encode("utf-8")
|
726 |
+
if binary:
|
727 |
+
await self._send_frame(message, WSMsgType.BINARY, compress)
|
728 |
+
else:
|
729 |
+
await self._send_frame(message, WSMsgType.TEXT, compress)
|
730 |
+
|
731 |
+
async def close(self, code: int = 1000, message: Union[bytes, str] = b"") -> None:
|
732 |
+
"""Close the websocket, sending the specified code and message."""
|
733 |
+
if isinstance(message, str):
|
734 |
+
message = message.encode("utf-8")
|
735 |
+
try:
|
736 |
+
await self._send_frame(
|
737 |
+
PACK_CLOSE_CODE(code) + message, opcode=WSMsgType.CLOSE
|
738 |
+
)
|
739 |
+
finally:
|
740 |
+
self._closing = True
|
llmeval-env/lib/python3.10/site-packages/aiohttp/http_writer.py
ADDED
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""Http related parsers and protocol."""
|
2 |
+
|
3 |
+
import asyncio
|
4 |
+
import zlib
|
5 |
+
from typing import Any, Awaitable, Callable, NamedTuple, Optional, Union # noqa
|
6 |
+
|
7 |
+
from multidict import CIMultiDict
|
8 |
+
|
9 |
+
from .abc import AbstractStreamWriter
|
10 |
+
from .base_protocol import BaseProtocol
|
11 |
+
from .compression_utils import ZLibCompressor
|
12 |
+
from .helpers import NO_EXTENSIONS
|
13 |
+
|
14 |
+
__all__ = ("StreamWriter", "HttpVersion", "HttpVersion10", "HttpVersion11")
|
15 |
+
|
16 |
+
|
17 |
+
class HttpVersion(NamedTuple):
|
18 |
+
major: int
|
19 |
+
minor: int
|
20 |
+
|
21 |
+
|
22 |
+
HttpVersion10 = HttpVersion(1, 0)
|
23 |
+
HttpVersion11 = HttpVersion(1, 1)
|
24 |
+
|
25 |
+
|
26 |
+
_T_OnChunkSent = Optional[Callable[[bytes], Awaitable[None]]]
|
27 |
+
_T_OnHeadersSent = Optional[Callable[["CIMultiDict[str]"], Awaitable[None]]]
|
28 |
+
|
29 |
+
|
30 |
+
class StreamWriter(AbstractStreamWriter):
|
31 |
+
def __init__(
|
32 |
+
self,
|
33 |
+
protocol: BaseProtocol,
|
34 |
+
loop: asyncio.AbstractEventLoop,
|
35 |
+
on_chunk_sent: _T_OnChunkSent = None,
|
36 |
+
on_headers_sent: _T_OnHeadersSent = None,
|
37 |
+
) -> None:
|
38 |
+
self._protocol = protocol
|
39 |
+
|
40 |
+
self.loop = loop
|
41 |
+
self.length = None
|
42 |
+
self.chunked = False
|
43 |
+
self.buffer_size = 0
|
44 |
+
self.output_size = 0
|
45 |
+
|
46 |
+
self._eof = False
|
47 |
+
self._compress: Optional[ZLibCompressor] = None
|
48 |
+
self._drain_waiter = None
|
49 |
+
|
50 |
+
self._on_chunk_sent: _T_OnChunkSent = on_chunk_sent
|
51 |
+
self._on_headers_sent: _T_OnHeadersSent = on_headers_sent
|
52 |
+
|
53 |
+
@property
|
54 |
+
def transport(self) -> Optional[asyncio.Transport]:
|
55 |
+
return self._protocol.transport
|
56 |
+
|
57 |
+
@property
|
58 |
+
def protocol(self) -> BaseProtocol:
|
59 |
+
return self._protocol
|
60 |
+
|
61 |
+
def enable_chunking(self) -> None:
|
62 |
+
self.chunked = True
|
63 |
+
|
64 |
+
def enable_compression(
|
65 |
+
self, encoding: str = "deflate", strategy: int = zlib.Z_DEFAULT_STRATEGY
|
66 |
+
) -> None:
|
67 |
+
self._compress = ZLibCompressor(encoding=encoding, strategy=strategy)
|
68 |
+
|
69 |
+
def _write(self, chunk: bytes) -> None:
|
70 |
+
size = len(chunk)
|
71 |
+
self.buffer_size += size
|
72 |
+
self.output_size += size
|
73 |
+
transport = self.transport
|
74 |
+
if not self._protocol.connected or transport is None or transport.is_closing():
|
75 |
+
raise ConnectionResetError("Cannot write to closing transport")
|
76 |
+
transport.write(chunk)
|
77 |
+
|
78 |
+
async def write(
|
79 |
+
self, chunk: bytes, *, drain: bool = True, LIMIT: int = 0x10000
|
80 |
+
) -> None:
|
81 |
+
"""Writes chunk of data to a stream.
|
82 |
+
|
83 |
+
write_eof() indicates end of stream.
|
84 |
+
writer can't be used after write_eof() method being called.
|
85 |
+
write() return drain future.
|
86 |
+
"""
|
87 |
+
if self._on_chunk_sent is not None:
|
88 |
+
await self._on_chunk_sent(chunk)
|
89 |
+
|
90 |
+
if isinstance(chunk, memoryview):
|
91 |
+
if chunk.nbytes != len(chunk):
|
92 |
+
# just reshape it
|
93 |
+
chunk = chunk.cast("c")
|
94 |
+
|
95 |
+
if self._compress is not None:
|
96 |
+
chunk = await self._compress.compress(chunk)
|
97 |
+
if not chunk:
|
98 |
+
return
|
99 |
+
|
100 |
+
if self.length is not None:
|
101 |
+
chunk_len = len(chunk)
|
102 |
+
if self.length >= chunk_len:
|
103 |
+
self.length = self.length - chunk_len
|
104 |
+
else:
|
105 |
+
chunk = chunk[: self.length]
|
106 |
+
self.length = 0
|
107 |
+
if not chunk:
|
108 |
+
return
|
109 |
+
|
110 |
+
if chunk:
|
111 |
+
if self.chunked:
|
112 |
+
chunk_len_pre = ("%x\r\n" % len(chunk)).encode("ascii")
|
113 |
+
chunk = chunk_len_pre + chunk + b"\r\n"
|
114 |
+
|
115 |
+
self._write(chunk)
|
116 |
+
|
117 |
+
if self.buffer_size > LIMIT and drain:
|
118 |
+
self.buffer_size = 0
|
119 |
+
await self.drain()
|
120 |
+
|
121 |
+
async def write_headers(
|
122 |
+
self, status_line: str, headers: "CIMultiDict[str]"
|
123 |
+
) -> None:
|
124 |
+
"""Write request/response status and headers."""
|
125 |
+
if self._on_headers_sent is not None:
|
126 |
+
await self._on_headers_sent(headers)
|
127 |
+
|
128 |
+
# status + headers
|
129 |
+
buf = _serialize_headers(status_line, headers)
|
130 |
+
self._write(buf)
|
131 |
+
|
132 |
+
async def write_eof(self, chunk: bytes = b"") -> None:
|
133 |
+
if self._eof:
|
134 |
+
return
|
135 |
+
|
136 |
+
if chunk and self._on_chunk_sent is not None:
|
137 |
+
await self._on_chunk_sent(chunk)
|
138 |
+
|
139 |
+
if self._compress:
|
140 |
+
if chunk:
|
141 |
+
chunk = await self._compress.compress(chunk)
|
142 |
+
|
143 |
+
chunk += self._compress.flush()
|
144 |
+
if chunk and self.chunked:
|
145 |
+
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
146 |
+
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
147 |
+
else:
|
148 |
+
if self.chunked:
|
149 |
+
if chunk:
|
150 |
+
chunk_len = ("%x\r\n" % len(chunk)).encode("ascii")
|
151 |
+
chunk = chunk_len + chunk + b"\r\n0\r\n\r\n"
|
152 |
+
else:
|
153 |
+
chunk = b"0\r\n\r\n"
|
154 |
+
|
155 |
+
if chunk:
|
156 |
+
self._write(chunk)
|
157 |
+
|
158 |
+
await self.drain()
|
159 |
+
|
160 |
+
self._eof = True
|
161 |
+
|
162 |
+
async def drain(self) -> None:
|
163 |
+
"""Flush the write buffer.
|
164 |
+
|
165 |
+
The intended use is to write
|
166 |
+
|
167 |
+
await w.write(data)
|
168 |
+
await w.drain()
|
169 |
+
"""
|
170 |
+
if self._protocol.transport is not None:
|
171 |
+
await self._protocol._drain_helper()
|
172 |
+
|
173 |
+
|
174 |
+
def _safe_header(string: str) -> str:
|
175 |
+
if "\r" in string or "\n" in string:
|
176 |
+
raise ValueError(
|
177 |
+
"Newline or carriage return detected in headers. "
|
178 |
+
"Potential header injection attack."
|
179 |
+
)
|
180 |
+
return string
|
181 |
+
|
182 |
+
|
183 |
+
def _py_serialize_headers(status_line: str, headers: "CIMultiDict[str]") -> bytes:
|
184 |
+
headers_gen = (_safe_header(k) + ": " + _safe_header(v) for k, v in headers.items())
|
185 |
+
line = status_line + "\r\n" + "\r\n".join(headers_gen) + "\r\n\r\n"
|
186 |
+
return line.encode("utf-8")
|
187 |
+
|
188 |
+
|
189 |
+
_serialize_headers = _py_serialize_headers
|
190 |
+
|
191 |
+
try:
|
192 |
+
import aiohttp._http_writer as _http_writer # type: ignore[import-not-found]
|
193 |
+
|
194 |
+
_c_serialize_headers = _http_writer._serialize_headers
|
195 |
+
if not NO_EXTENSIONS:
|
196 |
+
_serialize_headers = _c_serialize_headers
|
197 |
+
except ImportError:
|
198 |
+
pass
|